blob: f5be7ef97805722b771fd4a32dfaa5707fd60273 [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
henrika@webrtc.org2919e952012-01-31 08:45:03 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000011#include "webrtc/voice_engine/channel.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
Henrik Lundin64dad832015-05-11 12:44:23 +020013#include <algorithm>
Tommif888bb52015-12-12 01:37:01 +010014#include <utility>
Henrik Lundin64dad832015-05-11 12:44:23 +020015
Ivo Creusenae856f22015-09-17 16:30:16 +020016#include "webrtc/base/checks.h"
tommi31fc21f2016-01-21 10:37:37 -080017#include "webrtc/base/criticalsection.h"
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +000018#include "webrtc/base/format_macros.h"
pbosad856222015-11-27 09:48:36 -080019#include "webrtc/base/logging.h"
Erik Språng737336d2016-07-29 12:59:36 +020020#include "webrtc/base/rate_limiter.h"
Stefan Holmerb86d4e42015-12-07 10:26:18 +010021#include "webrtc/base/thread_checker.h"
wu@webrtc.org94454b72014-06-05 20:34:08 +000022#include "webrtc/base/timeutils.h"
ivoc14d5dbe2016-07-04 07:06:55 -070023#include "webrtc/call/rtc_event_log.h"
minyue@webrtc.orge509f942013-09-12 17:03:00 +000024#include "webrtc/common.h"
Henrik Lundin64dad832015-05-11 12:44:23 +020025#include "webrtc/config.h"
ossue3525782016-05-25 07:37:43 -070026#include "webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.h"
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000027#include "webrtc/modules/audio_device/include/audio_device.h"
28#include "webrtc/modules/audio_processing/include/audio_processing.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010029#include "webrtc/modules/include/module_common_types.h"
Stefan Holmerb86d4e42015-12-07 10:26:18 +010030#include "webrtc/modules/pacing/packet_router.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010031#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
32#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
33#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
wu@webrtc.org822fbd82013-08-15 23:38:54 +000034#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010035#include "webrtc/modules/utility/include/audio_frame_operations.h"
36#include "webrtc/modules/utility/include/process_thread.h"
Henrik Kjellander98f53512015-10-28 18:17:40 +010037#include "webrtc/system_wrappers/include/trace.h"
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000038#include "webrtc/voice_engine/include/voe_base.h"
39#include "webrtc/voice_engine/include/voe_external_media.h"
40#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
41#include "webrtc/voice_engine/output_mixer.h"
42#include "webrtc/voice_engine/statistics.h"
43#include "webrtc/voice_engine/transmit_mixer.h"
44#include "webrtc/voice_engine/utility.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000045
andrew@webrtc.org50419b02012-11-14 19:07:54 +000046namespace webrtc {
47namespace voe {
niklase@google.com470e71d2011-07-07 08:21:25 +000048
kwibergc8d071e2016-04-06 12:22:38 -070049namespace {
50
Erik Språng737336d2016-07-29 12:59:36 +020051constexpr int64_t kMaxRetransmissionWindowMs = 1000;
52constexpr int64_t kMinRetransmissionWindowMs = 30;
53
kwibergc8d071e2016-04-06 12:22:38 -070054bool RegisterReceiveCodec(std::unique_ptr<AudioCodingModule>* acm,
55 acm2::RentACodec* rac,
56 const CodecInst& ci) {
kwibergabe95ba2016-06-02 02:58:59 -070057 const int result = (*acm)->RegisterReceiveCodec(
58 ci, [&] { return rac->RentIsacDecoder(ci.plfreq); });
kwibergc8d071e2016-04-06 12:22:38 -070059 return result == 0;
60}
61
62} // namespace
63
solenberg8842c3e2016-03-11 03:06:41 -080064const int kTelephoneEventAttenuationdB = 10;
65
ivoc14d5dbe2016-07-04 07:06:55 -070066class RtcEventLogProxy final : public webrtc::RtcEventLog {
67 public:
68 RtcEventLogProxy() : event_log_(nullptr) {}
69
70 bool StartLogging(const std::string& file_name,
71 int64_t max_size_bytes) override {
72 RTC_NOTREACHED();
73 return false;
74 }
75
76 bool StartLogging(rtc::PlatformFile log_file,
77 int64_t max_size_bytes) override {
78 RTC_NOTREACHED();
79 return false;
80 }
81
82 void StopLogging() override { RTC_NOTREACHED(); }
83
84 void LogVideoReceiveStreamConfig(
85 const webrtc::VideoReceiveStream::Config& config) override {
86 rtc::CritScope lock(&crit_);
87 if (event_log_) {
88 event_log_->LogVideoReceiveStreamConfig(config);
89 }
90 }
91
92 void LogVideoSendStreamConfig(
93 const webrtc::VideoSendStream::Config& config) override {
94 rtc::CritScope lock(&crit_);
95 if (event_log_) {
96 event_log_->LogVideoSendStreamConfig(config);
97 }
98 }
99
100 void LogRtpHeader(webrtc::PacketDirection direction,
101 webrtc::MediaType media_type,
102 const uint8_t* header,
103 size_t packet_length) override {
104 rtc::CritScope lock(&crit_);
105 if (event_log_) {
106 event_log_->LogRtpHeader(direction, media_type, header, packet_length);
107 }
108 }
109
110 void LogRtcpPacket(webrtc::PacketDirection direction,
111 webrtc::MediaType media_type,
112 const uint8_t* packet,
113 size_t length) override {
114 rtc::CritScope lock(&crit_);
115 if (event_log_) {
116 event_log_->LogRtcpPacket(direction, media_type, packet, length);
117 }
118 }
119
120 void LogAudioPlayout(uint32_t ssrc) override {
121 rtc::CritScope lock(&crit_);
122 if (event_log_) {
123 event_log_->LogAudioPlayout(ssrc);
124 }
125 }
126
127 void LogBwePacketLossEvent(int32_t bitrate,
128 uint8_t fraction_loss,
129 int32_t total_packets) override {
130 rtc::CritScope lock(&crit_);
131 if (event_log_) {
132 event_log_->LogBwePacketLossEvent(bitrate, fraction_loss, total_packets);
133 }
134 }
135
136 void SetEventLog(RtcEventLog* event_log) {
137 rtc::CritScope lock(&crit_);
138 event_log_ = event_log;
139 }
140
141 private:
142 rtc::CriticalSection crit_;
143 RtcEventLog* event_log_ GUARDED_BY(crit_);
144 RTC_DISALLOW_COPY_AND_ASSIGN(RtcEventLogProxy);
145};
146
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100147class TransportFeedbackProxy : public TransportFeedbackObserver {
148 public:
149 TransportFeedbackProxy() : feedback_observer_(nullptr) {
150 pacer_thread_.DetachFromThread();
151 network_thread_.DetachFromThread();
152 }
153
154 void SetTransportFeedbackObserver(
155 TransportFeedbackObserver* feedback_observer) {
156 RTC_DCHECK(thread_checker_.CalledOnValidThread());
157 rtc::CritScope lock(&crit_);
158 feedback_observer_ = feedback_observer;
159 }
160
161 // Implements TransportFeedbackObserver.
162 void AddPacket(uint16_t sequence_number,
163 size_t length,
philipela1ed0b32016-06-01 06:31:17 -0700164 int probe_cluster_id) override {
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100165 RTC_DCHECK(pacer_thread_.CalledOnValidThread());
166 rtc::CritScope lock(&crit_);
167 if (feedback_observer_)
pbos2169d8b2016-06-20 11:53:02 -0700168 feedback_observer_->AddPacket(sequence_number, length, probe_cluster_id);
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100169 }
170 void OnTransportFeedback(const rtcp::TransportFeedback& feedback) override {
171 RTC_DCHECK(network_thread_.CalledOnValidThread());
172 rtc::CritScope lock(&crit_);
173 if (feedback_observer_)
174 feedback_observer_->OnTransportFeedback(feedback);
175 }
176
177 private:
178 rtc::CriticalSection crit_;
179 rtc::ThreadChecker thread_checker_;
180 rtc::ThreadChecker pacer_thread_;
181 rtc::ThreadChecker network_thread_;
182 TransportFeedbackObserver* feedback_observer_ GUARDED_BY(&crit_);
183};
184
185class TransportSequenceNumberProxy : public TransportSequenceNumberAllocator {
186 public:
187 TransportSequenceNumberProxy() : seq_num_allocator_(nullptr) {
188 pacer_thread_.DetachFromThread();
189 }
190
191 void SetSequenceNumberAllocator(
192 TransportSequenceNumberAllocator* seq_num_allocator) {
193 RTC_DCHECK(thread_checker_.CalledOnValidThread());
194 rtc::CritScope lock(&crit_);
195 seq_num_allocator_ = seq_num_allocator;
196 }
197
198 // Implements TransportSequenceNumberAllocator.
199 uint16_t AllocateSequenceNumber() override {
200 RTC_DCHECK(pacer_thread_.CalledOnValidThread());
201 rtc::CritScope lock(&crit_);
202 if (!seq_num_allocator_)
203 return 0;
204 return seq_num_allocator_->AllocateSequenceNumber();
205 }
206
207 private:
208 rtc::CriticalSection crit_;
209 rtc::ThreadChecker thread_checker_;
210 rtc::ThreadChecker pacer_thread_;
211 TransportSequenceNumberAllocator* seq_num_allocator_ GUARDED_BY(&crit_);
212};
213
214class RtpPacketSenderProxy : public RtpPacketSender {
215 public:
kwiberg55b97fe2016-01-28 05:22:45 -0800216 RtpPacketSenderProxy() : rtp_packet_sender_(nullptr) {}
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100217
218 void SetPacketSender(RtpPacketSender* rtp_packet_sender) {
219 RTC_DCHECK(thread_checker_.CalledOnValidThread());
220 rtc::CritScope lock(&crit_);
221 rtp_packet_sender_ = rtp_packet_sender;
222 }
223
224 // Implements RtpPacketSender.
225 void InsertPacket(Priority priority,
226 uint32_t ssrc,
227 uint16_t sequence_number,
228 int64_t capture_time_ms,
229 size_t bytes,
230 bool retransmission) override {
231 rtc::CritScope lock(&crit_);
232 if (rtp_packet_sender_) {
233 rtp_packet_sender_->InsertPacket(priority, ssrc, sequence_number,
234 capture_time_ms, bytes, retransmission);
235 }
236 }
237
238 private:
239 rtc::ThreadChecker thread_checker_;
240 rtc::CriticalSection crit_;
241 RtpPacketSender* rtp_packet_sender_ GUARDED_BY(&crit_);
242};
243
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000244// Extend the default RTCP statistics struct with max_jitter, defined as the
245// maximum jitter value seen in an RTCP report block.
246struct ChannelStatistics : public RtcpStatistics {
247 ChannelStatistics() : rtcp(), max_jitter(0) {}
248
249 RtcpStatistics rtcp;
250 uint32_t max_jitter;
251};
252
253// Statistics callback, called at each generation of a new RTCP report block.
254class StatisticsProxy : public RtcpStatisticsCallback {
255 public:
tommi31fc21f2016-01-21 10:37:37 -0800256 StatisticsProxy(uint32_t ssrc) : ssrc_(ssrc) {}
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000257 virtual ~StatisticsProxy() {}
258
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000259 void StatisticsUpdated(const RtcpStatistics& statistics,
260 uint32_t ssrc) override {
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000261 if (ssrc != ssrc_)
262 return;
263
tommi31fc21f2016-01-21 10:37:37 -0800264 rtc::CritScope cs(&stats_lock_);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000265 stats_.rtcp = statistics;
266 if (statistics.jitter > stats_.max_jitter) {
267 stats_.max_jitter = statistics.jitter;
268 }
269 }
270
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000271 void CNameChanged(const char* cname, uint32_t ssrc) override {}
pbos@webrtc.orgce4e9a32014-12-18 13:50:16 +0000272
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000273 ChannelStatistics GetStats() {
tommi31fc21f2016-01-21 10:37:37 -0800274 rtc::CritScope cs(&stats_lock_);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000275 return stats_;
276 }
277
278 private:
279 // StatisticsUpdated calls are triggered from threads in the RTP module,
280 // while GetStats calls can be triggered from the public voice engine API,
281 // hence synchronization is needed.
tommi31fc21f2016-01-21 10:37:37 -0800282 rtc::CriticalSection stats_lock_;
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000283 const uint32_t ssrc_;
284 ChannelStatistics stats_;
285};
286
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000287class VoERtcpObserver : public RtcpBandwidthObserver {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000288 public:
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000289 explicit VoERtcpObserver(Channel* owner) : owner_(owner) {}
290 virtual ~VoERtcpObserver() {}
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000291
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000292 void OnReceivedEstimatedBitrate(uint32_t bitrate) override {
293 // Not used for Voice Engine.
294 }
295
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000296 void OnReceivedRtcpReceiverReport(const ReportBlockList& report_blocks,
297 int64_t rtt,
298 int64_t now_ms) override {
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000299 // TODO(mflodman): Do we need to aggregate reports here or can we jut send
300 // what we get? I.e. do we ever get multiple reports bundled into one RTCP
301 // report for VoiceEngine?
302 if (report_blocks.empty())
303 return;
304
305 int fraction_lost_aggregate = 0;
306 int total_number_of_packets = 0;
307
308 // If receiving multiple report blocks, calculate the weighted average based
309 // on the number of packets a report refers to.
310 for (ReportBlockList::const_iterator block_it = report_blocks.begin();
311 block_it != report_blocks.end(); ++block_it) {
312 // Find the previous extended high sequence number for this remote SSRC,
313 // to calculate the number of RTP packets this report refers to. Ignore if
314 // we haven't seen this SSRC before.
315 std::map<uint32_t, uint32_t>::iterator seq_num_it =
316 extended_max_sequence_number_.find(block_it->sourceSSRC);
317 int number_of_packets = 0;
318 if (seq_num_it != extended_max_sequence_number_.end()) {
319 number_of_packets = block_it->extendedHighSeqNum - seq_num_it->second;
320 }
321 fraction_lost_aggregate += number_of_packets * block_it->fractionLost;
322 total_number_of_packets += number_of_packets;
323
324 extended_max_sequence_number_[block_it->sourceSSRC] =
325 block_it->extendedHighSeqNum;
326 }
327 int weighted_fraction_lost = 0;
328 if (total_number_of_packets > 0) {
kwiberg55b97fe2016-01-28 05:22:45 -0800329 weighted_fraction_lost =
330 (fraction_lost_aggregate + total_number_of_packets / 2) /
331 total_number_of_packets;
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000332 }
333 owner_->OnIncomingFractionLoss(weighted_fraction_lost);
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000334 }
335
336 private:
337 Channel* owner_;
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000338 // Maps remote side ssrc to extended highest sequence number received.
339 std::map<uint32_t, uint32_t> extended_max_sequence_number_;
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000340};
341
kwiberg55b97fe2016-01-28 05:22:45 -0800342int32_t Channel::SendData(FrameType frameType,
343 uint8_t payloadType,
344 uint32_t timeStamp,
345 const uint8_t* payloadData,
346 size_t payloadSize,
347 const RTPFragmentationHeader* fragmentation) {
348 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
349 "Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u,"
350 " payloadSize=%" PRIuS ", fragmentation=0x%x)",
351 frameType, payloadType, timeStamp, payloadSize, fragmentation);
niklase@google.com470e71d2011-07-07 08:21:25 +0000352
kwiberg55b97fe2016-01-28 05:22:45 -0800353 if (_includeAudioLevelIndication) {
354 // Store current audio level in the RTP/RTCP module.
355 // The level will be used in combination with voice-activity state
356 // (frameType) to add an RTP header extension
357 _rtpRtcpModule->SetAudioLevel(rms_level_.RMS());
358 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000359
kwiberg55b97fe2016-01-28 05:22:45 -0800360 // Push data from ACM to RTP/RTCP-module to deliver audio frame for
361 // packetization.
362 // This call will trigger Transport::SendPacket() from the RTP/RTCP module.
363 if (_rtpRtcpModule->SendOutgoingData(
364 (FrameType&)frameType, payloadType, timeStamp,
365 // Leaving the time when this frame was
366 // received from the capture device as
367 // undefined for voice for now.
368 -1, payloadData, payloadSize, fragmentation) == -1) {
369 _engineStatisticsPtr->SetLastError(
370 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
371 "Channel::SendData() failed to send data to RTP/RTCP module");
372 return -1;
373 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000374
kwiberg55b97fe2016-01-28 05:22:45 -0800375 _lastLocalTimeStamp = timeStamp;
376 _lastPayloadType = payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +0000377
kwiberg55b97fe2016-01-28 05:22:45 -0800378 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000379}
380
kwiberg55b97fe2016-01-28 05:22:45 -0800381int32_t Channel::InFrameType(FrameType frame_type) {
382 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
383 "Channel::InFrameType(frame_type=%d)", frame_type);
niklase@google.com470e71d2011-07-07 08:21:25 +0000384
kwiberg55b97fe2016-01-28 05:22:45 -0800385 rtc::CritScope cs(&_callbackCritSect);
386 _sendFrameType = (frame_type == kAudioFrameSpeech);
387 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000388}
389
kwiberg55b97fe2016-01-28 05:22:45 -0800390int32_t Channel::OnRxVadDetected(int vadDecision) {
391 rtc::CritScope cs(&_callbackCritSect);
392 if (_rxVadObserverPtr) {
393 _rxVadObserverPtr->OnRxVad(_channelId, vadDecision);
394 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000395
kwiberg55b97fe2016-01-28 05:22:45 -0800396 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000397}
398
stefan1d8a5062015-10-02 03:39:33 -0700399bool Channel::SendRtp(const uint8_t* data,
400 size_t len,
401 const PacketOptions& options) {
kwiberg55b97fe2016-01-28 05:22:45 -0800402 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
403 "Channel::SendPacket(channel=%d, len=%" PRIuS ")", len);
niklase@google.com470e71d2011-07-07 08:21:25 +0000404
kwiberg55b97fe2016-01-28 05:22:45 -0800405 rtc::CritScope cs(&_callbackCritSect);
wu@webrtc.orgfb648da2013-10-18 21:10:51 +0000406
kwiberg55b97fe2016-01-28 05:22:45 -0800407 if (_transportPtr == NULL) {
408 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
409 "Channel::SendPacket() failed to send RTP packet due to"
410 " invalid transport object");
411 return false;
412 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000413
kwiberg55b97fe2016-01-28 05:22:45 -0800414 uint8_t* bufferToSendPtr = (uint8_t*)data;
415 size_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000416
kwiberg55b97fe2016-01-28 05:22:45 -0800417 if (!_transportPtr->SendRtp(bufferToSendPtr, bufferLength, options)) {
418 std::string transport_name =
419 _externalTransport ? "external transport" : "WebRtc sockets";
420 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
421 "Channel::SendPacket() RTP transmission using %s failed",
422 transport_name.c_str());
423 return false;
424 }
425 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +0000426}
427
kwiberg55b97fe2016-01-28 05:22:45 -0800428bool Channel::SendRtcp(const uint8_t* data, size_t len) {
429 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
430 "Channel::SendRtcp(len=%" PRIuS ")", len);
niklase@google.com470e71d2011-07-07 08:21:25 +0000431
kwiberg55b97fe2016-01-28 05:22:45 -0800432 rtc::CritScope cs(&_callbackCritSect);
433 if (_transportPtr == NULL) {
434 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
435 "Channel::SendRtcp() failed to send RTCP packet"
436 " due to invalid transport object");
437 return false;
438 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000439
kwiberg55b97fe2016-01-28 05:22:45 -0800440 uint8_t* bufferToSendPtr = (uint8_t*)data;
441 size_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000442
kwiberg55b97fe2016-01-28 05:22:45 -0800443 int n = _transportPtr->SendRtcp(bufferToSendPtr, bufferLength);
444 if (n < 0) {
445 std::string transport_name =
446 _externalTransport ? "external transport" : "WebRtc sockets";
447 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
448 "Channel::SendRtcp() transmission using %s failed",
449 transport_name.c_str());
450 return false;
451 }
452 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +0000453}
454
kwiberg55b97fe2016-01-28 05:22:45 -0800455void Channel::OnIncomingSSRCChanged(uint32_t ssrc) {
456 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
457 "Channel::OnIncomingSSRCChanged(SSRC=%d)", ssrc);
niklase@google.com470e71d2011-07-07 08:21:25 +0000458
kwiberg55b97fe2016-01-28 05:22:45 -0800459 // Update ssrc so that NTP for AV sync can be updated.
460 _rtpRtcpModule->SetRemoteSSRC(ssrc);
niklase@google.com470e71d2011-07-07 08:21:25 +0000461}
462
Peter Boströmac547a62015-09-17 23:03:57 +0200463void Channel::OnIncomingCSRCChanged(uint32_t CSRC, bool added) {
464 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
465 "Channel::OnIncomingCSRCChanged(CSRC=%d, added=%d)", CSRC,
466 added);
niklase@google.com470e71d2011-07-07 08:21:25 +0000467}
468
Peter Boströmac547a62015-09-17 23:03:57 +0200469int32_t Channel::OnInitializeDecoder(
pbos@webrtc.org92135212013-05-14 08:31:39 +0000470 int8_t payloadType,
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +0000471 const char payloadName[RTP_PAYLOAD_NAME_SIZE],
pbos@webrtc.org92135212013-05-14 08:31:39 +0000472 int frequency,
Peter Kasting69558702016-01-12 16:26:35 -0800473 size_t channels,
Peter Boströmac547a62015-09-17 23:03:57 +0200474 uint32_t rate) {
kwiberg55b97fe2016-01-28 05:22:45 -0800475 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
476 "Channel::OnInitializeDecoder(payloadType=%d, "
477 "payloadName=%s, frequency=%u, channels=%" PRIuS ", rate=%u)",
478 payloadType, payloadName, frequency, channels, rate);
niklase@google.com470e71d2011-07-07 08:21:25 +0000479
kwiberg55b97fe2016-01-28 05:22:45 -0800480 CodecInst receiveCodec = {0};
481 CodecInst dummyCodec = {0};
niklase@google.com470e71d2011-07-07 08:21:25 +0000482
kwiberg55b97fe2016-01-28 05:22:45 -0800483 receiveCodec.pltype = payloadType;
484 receiveCodec.plfreq = frequency;
485 receiveCodec.channels = channels;
486 receiveCodec.rate = rate;
487 strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +0000488
kwiberg55b97fe2016-01-28 05:22:45 -0800489 audio_coding_->Codec(payloadName, &dummyCodec, frequency, channels);
490 receiveCodec.pacsize = dummyCodec.pacsize;
niklase@google.com470e71d2011-07-07 08:21:25 +0000491
kwiberg55b97fe2016-01-28 05:22:45 -0800492 // Register the new codec to the ACM
kwibergc8d071e2016-04-06 12:22:38 -0700493 if (!RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, receiveCodec)) {
kwiberg55b97fe2016-01-28 05:22:45 -0800494 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
495 "Channel::OnInitializeDecoder() invalid codec ("
496 "pt=%d, name=%s) received - 1",
497 payloadType, payloadName);
498 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR);
499 return -1;
500 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000501
kwiberg55b97fe2016-01-28 05:22:45 -0800502 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000503}
504
kwiberg55b97fe2016-01-28 05:22:45 -0800505int32_t Channel::OnReceivedPayloadData(const uint8_t* payloadData,
506 size_t payloadSize,
507 const WebRtcRTPHeader* rtpHeader) {
508 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
509 "Channel::OnReceivedPayloadData(payloadSize=%" PRIuS
510 ","
511 " payloadType=%u, audioChannel=%" PRIuS ")",
512 payloadSize, rtpHeader->header.payloadType,
513 rtpHeader->type.Audio.channel);
niklase@google.com470e71d2011-07-07 08:21:25 +0000514
kwiberg55b97fe2016-01-28 05:22:45 -0800515 if (!channel_state_.Get().playing) {
516 // Avoid inserting into NetEQ when we are not playing. Count the
517 // packet as discarded.
518 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
519 "received packet is discarded since playing is not"
520 " activated");
521 _numberOfDiscardedPackets++;
niklase@google.com470e71d2011-07-07 08:21:25 +0000522 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -0800523 }
524
525 // Push the incoming payload (parsed and ready for decoding) into the ACM
526 if (audio_coding_->IncomingPacket(payloadData, payloadSize, *rtpHeader) !=
527 0) {
528 _engineStatisticsPtr->SetLastError(
529 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
530 "Channel::OnReceivedPayloadData() unable to push data to the ACM");
531 return -1;
532 }
533
534 // Update the packet delay.
535 UpdatePacketDelay(rtpHeader->header.timestamp,
536 rtpHeader->header.sequenceNumber);
537
538 int64_t round_trip_time = 0;
539 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), &round_trip_time, NULL, NULL,
540 NULL);
541
542 std::vector<uint16_t> nack_list = audio_coding_->GetNackList(round_trip_time);
543 if (!nack_list.empty()) {
544 // Can't use nack_list.data() since it's not supported by all
545 // compilers.
546 ResendPackets(&(nack_list[0]), static_cast<int>(nack_list.size()));
547 }
548 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000549}
550
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +0000551bool Channel::OnRecoveredPacket(const uint8_t* rtp_packet,
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +0000552 size_t rtp_packet_length) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +0000553 RTPHeader header;
554 if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length, &header)) {
555 WEBRTC_TRACE(kTraceDebug, webrtc::kTraceVoice, _channelId,
556 "IncomingPacket invalid RTP header");
557 return false;
558 }
559 header.payload_type_frequency =
560 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
561 if (header.payload_type_frequency < 0)
562 return false;
563 return ReceivePacket(rtp_packet, rtp_packet_length, header, false);
564}
565
henrik.lundin42dda502016-05-18 05:36:01 -0700566MixerParticipant::AudioFrameInfo Channel::GetAudioFrameWithMuted(
567 int32_t id,
568 AudioFrame* audioFrame) {
ivoc14d5dbe2016-07-04 07:06:55 -0700569 unsigned int ssrc;
570 RTC_CHECK_EQ(GetLocalSSRC(ssrc), 0);
571 event_log_proxy_->LogAudioPlayout(ssrc);
kwiberg55b97fe2016-01-28 05:22:45 -0800572 // Get 10ms raw PCM data from the ACM (mixer limits output frequency)
henrik.lundind4ccb002016-05-17 12:21:55 -0700573 bool muted;
574 if (audio_coding_->PlayoutData10Ms(audioFrame->sample_rate_hz_, audioFrame,
575 &muted) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -0800576 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
577 "Channel::GetAudioFrame() PlayoutData10Ms() failed!");
578 // In all likelihood, the audio in this frame is garbage. We return an
579 // error so that the audio mixer module doesn't add it to the mix. As
580 // a result, it won't be played out and the actions skipped here are
581 // irrelevant.
henrik.lundin42dda502016-05-18 05:36:01 -0700582 return MixerParticipant::AudioFrameInfo::kError;
kwiberg55b97fe2016-01-28 05:22:45 -0800583 }
henrik.lundina89ab962016-05-18 08:52:45 -0700584
585 if (muted) {
586 // TODO(henrik.lundin): We should be able to do better than this. But we
587 // will have to go through all the cases below where the audio samples may
588 // be used, and handle the muted case in some way.
589 audioFrame->Mute();
590 }
kwiberg55b97fe2016-01-28 05:22:45 -0800591
592 if (_RxVadDetection) {
593 UpdateRxVadDetection(*audioFrame);
594 }
595
596 // Convert module ID to internal VoE channel ID
597 audioFrame->id_ = VoEChannelId(audioFrame->id_);
598 // Store speech type for dead-or-alive detection
599 _outputSpeechType = audioFrame->speech_type_;
600
601 ChannelState::State state = channel_state_.Get();
602
603 if (state.rx_apm_is_enabled) {
604 int err = rx_audioproc_->ProcessStream(audioFrame);
605 if (err) {
606 LOG(LS_ERROR) << "ProcessStream() error: " << err;
607 assert(false);
Ivo Creusenae856f22015-09-17 16:30:16 +0200608 }
kwiberg55b97fe2016-01-28 05:22:45 -0800609 }
610
611 {
612 // Pass the audio buffers to an optional sink callback, before applying
613 // scaling/panning, as that applies to the mix operation.
614 // External recipients of the audio (e.g. via AudioTrack), will do their
615 // own mixing/dynamic processing.
616 rtc::CritScope cs(&_callbackCritSect);
617 if (audio_sink_) {
618 AudioSinkInterface::Data data(
619 &audioFrame->data_[0], audioFrame->samples_per_channel_,
620 audioFrame->sample_rate_hz_, audioFrame->num_channels_,
621 audioFrame->timestamp_);
622 audio_sink_->OnData(data);
623 }
624 }
625
626 float output_gain = 1.0f;
627 float left_pan = 1.0f;
628 float right_pan = 1.0f;
629 {
630 rtc::CritScope cs(&volume_settings_critsect_);
631 output_gain = _outputGain;
632 left_pan = _panLeft;
633 right_pan = _panRight;
634 }
635
636 // Output volume scaling
637 if (output_gain < 0.99f || output_gain > 1.01f) {
638 AudioFrameOperations::ScaleWithSat(output_gain, *audioFrame);
639 }
640
641 // Scale left and/or right channel(s) if stereo and master balance is
642 // active
643
644 if (left_pan != 1.0f || right_pan != 1.0f) {
645 if (audioFrame->num_channels_ == 1) {
646 // Emulate stereo mode since panning is active.
647 // The mono signal is copied to both left and right channels here.
648 AudioFrameOperations::MonoToStereo(audioFrame);
649 }
650 // For true stereo mode (when we are receiving a stereo signal), no
651 // action is needed.
652
653 // Do the panning operation (the audio frame contains stereo at this
654 // stage)
655 AudioFrameOperations::Scale(left_pan, right_pan, *audioFrame);
656 }
657
658 // Mix decoded PCM output with file if file mixing is enabled
659 if (state.output_file_playing) {
660 MixAudioWithFile(*audioFrame, audioFrame->sample_rate_hz_);
henrik.lundina89ab962016-05-18 08:52:45 -0700661 muted = false; // We may have added non-zero samples.
kwiberg55b97fe2016-01-28 05:22:45 -0800662 }
663
664 // External media
665 if (_outputExternalMedia) {
666 rtc::CritScope cs(&_callbackCritSect);
667 const bool isStereo = (audioFrame->num_channels_ == 2);
668 if (_outputExternalMediaCallbackPtr) {
669 _outputExternalMediaCallbackPtr->Process(
670 _channelId, kPlaybackPerChannel, (int16_t*)audioFrame->data_,
671 audioFrame->samples_per_channel_, audioFrame->sample_rate_hz_,
672 isStereo);
673 }
674 }
675
676 // Record playout if enabled
677 {
678 rtc::CritScope cs(&_fileCritSect);
679
680 if (_outputFileRecording && _outputFileRecorderPtr) {
681 _outputFileRecorderPtr->RecordAudioToFile(*audioFrame);
682 }
683 }
684
685 // Measure audio level (0-9)
henrik.lundina89ab962016-05-18 08:52:45 -0700686 // TODO(henrik.lundin) Use the |muted| information here too.
kwiberg55b97fe2016-01-28 05:22:45 -0800687 _outputAudioLevel.ComputeLevel(*audioFrame);
688
689 if (capture_start_rtp_time_stamp_ < 0 && audioFrame->timestamp_ != 0) {
690 // The first frame with a valid rtp timestamp.
691 capture_start_rtp_time_stamp_ = audioFrame->timestamp_;
692 }
693
694 if (capture_start_rtp_time_stamp_ >= 0) {
695 // audioFrame.timestamp_ should be valid from now on.
696
697 // Compute elapsed time.
698 int64_t unwrap_timestamp =
699 rtp_ts_wraparound_handler_->Unwrap(audioFrame->timestamp_);
700 audioFrame->elapsed_time_ms_ =
701 (unwrap_timestamp - capture_start_rtp_time_stamp_) /
702 (GetPlayoutFrequency() / 1000);
703
niklase@google.com470e71d2011-07-07 08:21:25 +0000704 {
kwiberg55b97fe2016-01-28 05:22:45 -0800705 rtc::CritScope lock(&ts_stats_lock_);
706 // Compute ntp time.
707 audioFrame->ntp_time_ms_ =
708 ntp_estimator_.Estimate(audioFrame->timestamp_);
709 // |ntp_time_ms_| won't be valid until at least 2 RTCP SRs are received.
710 if (audioFrame->ntp_time_ms_ > 0) {
711 // Compute |capture_start_ntp_time_ms_| so that
712 // |capture_start_ntp_time_ms_| + |elapsed_time_ms_| == |ntp_time_ms_|
713 capture_start_ntp_time_ms_ =
714 audioFrame->ntp_time_ms_ - audioFrame->elapsed_time_ms_;
andrew@webrtc.org60730cf2014-01-07 17:45:09 +0000715 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000716 }
kwiberg55b97fe2016-01-28 05:22:45 -0800717 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000718
henrik.lundin42dda502016-05-18 05:36:01 -0700719 return muted ? MixerParticipant::AudioFrameInfo::kMuted
720 : MixerParticipant::AudioFrameInfo::kNormal;
niklase@google.com470e71d2011-07-07 08:21:25 +0000721}
722
kwiberg55b97fe2016-01-28 05:22:45 -0800723int32_t Channel::NeededFrequency(int32_t id) const {
724 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
725 "Channel::NeededFrequency(id=%d)", id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000726
kwiberg55b97fe2016-01-28 05:22:45 -0800727 int highestNeeded = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000728
kwiberg55b97fe2016-01-28 05:22:45 -0800729 // Determine highest needed receive frequency
730 int32_t receiveFrequency = audio_coding_->ReceiveFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +0000731
kwiberg55b97fe2016-01-28 05:22:45 -0800732 // Return the bigger of playout and receive frequency in the ACM.
733 if (audio_coding_->PlayoutFrequency() > receiveFrequency) {
734 highestNeeded = audio_coding_->PlayoutFrequency();
735 } else {
736 highestNeeded = receiveFrequency;
737 }
738
739 // Special case, if we're playing a file on the playout side
740 // we take that frequency into consideration as well
741 // This is not needed on sending side, since the codec will
742 // limit the spectrum anyway.
743 if (channel_state_.Get().output_file_playing) {
744 rtc::CritScope cs(&_fileCritSect);
745 if (_outputFilePlayerPtr) {
746 if (_outputFilePlayerPtr->Frequency() > highestNeeded) {
747 highestNeeded = _outputFilePlayerPtr->Frequency();
748 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000749 }
kwiberg55b97fe2016-01-28 05:22:45 -0800750 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000751
kwiberg55b97fe2016-01-28 05:22:45 -0800752 return (highestNeeded);
niklase@google.com470e71d2011-07-07 08:21:25 +0000753}
754
ivocb04965c2015-09-09 00:09:43 -0700755int32_t Channel::CreateChannel(Channel*& channel,
756 int32_t channelId,
757 uint32_t instanceId,
ivocb04965c2015-09-09 00:09:43 -0700758 const Config& config) {
ivoc14d5dbe2016-07-04 07:06:55 -0700759 return CreateChannel(channel, channelId, instanceId, config,
ossu5f7cfa52016-05-30 08:11:28 -0700760 CreateBuiltinAudioDecoderFactory());
761}
762
763int32_t Channel::CreateChannel(
764 Channel*& channel,
765 int32_t channelId,
766 uint32_t instanceId,
ossu5f7cfa52016-05-30 08:11:28 -0700767 const Config& config,
768 const rtc::scoped_refptr<AudioDecoderFactory>& decoder_factory) {
kwiberg55b97fe2016-01-28 05:22:45 -0800769 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, channelId),
770 "Channel::CreateChannel(channelId=%d, instanceId=%d)", channelId,
771 instanceId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000772
ivoc14d5dbe2016-07-04 07:06:55 -0700773 channel = new Channel(channelId, instanceId, config, decoder_factory);
kwiberg55b97fe2016-01-28 05:22:45 -0800774 if (channel == NULL) {
775 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, channelId),
776 "Channel::CreateChannel() unable to allocate memory for"
777 " channel");
778 return -1;
779 }
780 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000781}
782
kwiberg55b97fe2016-01-28 05:22:45 -0800783void Channel::PlayNotification(int32_t id, uint32_t durationMs) {
784 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
785 "Channel::PlayNotification(id=%d, durationMs=%d)", id,
786 durationMs);
niklase@google.com470e71d2011-07-07 08:21:25 +0000787
kwiberg55b97fe2016-01-28 05:22:45 -0800788 // Not implement yet
niklase@google.com470e71d2011-07-07 08:21:25 +0000789}
790
kwiberg55b97fe2016-01-28 05:22:45 -0800791void Channel::RecordNotification(int32_t id, uint32_t durationMs) {
792 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
793 "Channel::RecordNotification(id=%d, durationMs=%d)", id,
794 durationMs);
niklase@google.com470e71d2011-07-07 08:21:25 +0000795
kwiberg55b97fe2016-01-28 05:22:45 -0800796 // Not implement yet
niklase@google.com470e71d2011-07-07 08:21:25 +0000797}
798
kwiberg55b97fe2016-01-28 05:22:45 -0800799void Channel::PlayFileEnded(int32_t id) {
800 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
801 "Channel::PlayFileEnded(id=%d)", id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000802
kwiberg55b97fe2016-01-28 05:22:45 -0800803 if (id == _inputFilePlayerId) {
804 channel_state_.SetInputFilePlaying(false);
805 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
806 "Channel::PlayFileEnded() => input file player module is"
niklase@google.com470e71d2011-07-07 08:21:25 +0000807 " shutdown");
kwiberg55b97fe2016-01-28 05:22:45 -0800808 } else if (id == _outputFilePlayerId) {
809 channel_state_.SetOutputFilePlaying(false);
810 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
811 "Channel::PlayFileEnded() => output file player module is"
812 " shutdown");
813 }
814}
815
816void Channel::RecordFileEnded(int32_t id) {
817 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
818 "Channel::RecordFileEnded(id=%d)", id);
819
820 assert(id == _outputFileRecorderId);
821
822 rtc::CritScope cs(&_fileCritSect);
823
824 _outputFileRecording = false;
825 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
826 "Channel::RecordFileEnded() => output file recorder module is"
827 " shutdown");
niklase@google.com470e71d2011-07-07 08:21:25 +0000828}
829
pbos@webrtc.org92135212013-05-14 08:31:39 +0000830Channel::Channel(int32_t channelId,
minyue@webrtc.orge509f942013-09-12 17:03:00 +0000831 uint32_t instanceId,
ossu5f7cfa52016-05-30 08:11:28 -0700832 const Config& config,
833 const rtc::scoped_refptr<AudioDecoderFactory>& decoder_factory)
tommi31fc21f2016-01-21 10:37:37 -0800834 : _instanceId(instanceId),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100835 _channelId(channelId),
ivoc14d5dbe2016-07-04 07:06:55 -0700836 event_log_proxy_(new RtcEventLogProxy()),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100837 rtp_header_parser_(RtpHeaderParser::Create()),
838 rtp_payload_registry_(
839 new RTPPayloadRegistry(RTPPayloadStrategy::CreateStrategy(true))),
840 rtp_receive_statistics_(
841 ReceiveStatistics::Create(Clock::GetRealTimeClock())),
842 rtp_receiver_(
843 RtpReceiver::CreateAudioReceiver(Clock::GetRealTimeClock(),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100844 this,
845 this,
846 rtp_payload_registry_.get())),
847 telephone_event_handler_(rtp_receiver_->GetTelephoneEventHandler()),
848 _outputAudioLevel(),
849 _externalTransport(false),
850 _inputFilePlayerPtr(NULL),
851 _outputFilePlayerPtr(NULL),
852 _outputFileRecorderPtr(NULL),
853 // Avoid conflict with other channels by adding 1024 - 1026,
854 // won't use as much as 1024 channels.
855 _inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
856 _outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
857 _outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
858 _outputFileRecording(false),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100859 _outputExternalMedia(false),
860 _inputExternalMediaCallbackPtr(NULL),
861 _outputExternalMediaCallbackPtr(NULL),
862 _timeStamp(0), // This is just an offset, RTP module will add it's own
863 // random offset
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100864 ntp_estimator_(Clock::GetRealTimeClock()),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100865 playout_timestamp_rtp_(0),
866 playout_timestamp_rtcp_(0),
867 playout_delay_ms_(0),
868 _numberOfDiscardedPackets(0),
869 send_sequence_number_(0),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100870 rtp_ts_wraparound_handler_(new rtc::TimestampWrapAroundHandler()),
871 capture_start_rtp_time_stamp_(-1),
872 capture_start_ntp_time_ms_(-1),
873 _engineStatisticsPtr(NULL),
874 _outputMixerPtr(NULL),
875 _transmitMixerPtr(NULL),
876 _moduleProcessThreadPtr(NULL),
877 _audioDeviceModulePtr(NULL),
878 _voiceEngineObserverPtr(NULL),
879 _callbackCritSectPtr(NULL),
880 _transportPtr(NULL),
881 _rxVadObserverPtr(NULL),
882 _oldVadDecision(-1),
883 _sendFrameType(0),
884 _externalMixing(false),
885 _mixFileWithMicrophone(false),
solenberg1c2af8e2016-03-24 10:36:00 -0700886 input_mute_(false),
887 previous_frame_muted_(false),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100888 _panLeft(1.0f),
889 _panRight(1.0f),
890 _outputGain(1.0f),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100891 _lastLocalTimeStamp(0),
892 _lastPayloadType(0),
893 _includeAudioLevelIndication(false),
894 _outputSpeechType(AudioFrame::kNormalSpeech),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100895 _average_jitter_buffer_delay_us(0),
896 _previousTimestamp(0),
897 _recPacketDelayMs(20),
898 _RxVadDetection(false),
899 _rxAgcIsEnabled(false),
900 _rxNsIsEnabled(false),
901 restored_packet_in_use_(false),
902 rtcp_observer_(new VoERtcpObserver(this)),
903 network_predictor_(new NetworkPredictor(Clock::GetRealTimeClock())),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100904 associate_send_channel_(ChannelOwner(nullptr)),
905 pacing_enabled_(config.Get<VoicePacing>().enabled),
stefanbba9dec2016-02-01 04:39:55 -0800906 feedback_observer_proxy_(new TransportFeedbackProxy()),
907 seq_num_allocator_proxy_(new TransportSequenceNumberProxy()),
ossu29b1a8d2016-06-13 07:34:51 -0700908 rtp_packet_sender_proxy_(new RtpPacketSenderProxy()),
Erik Språng737336d2016-07-29 12:59:36 +0200909 retransmission_rate_limiter_(new RateLimiter(Clock::GetRealTimeClock(),
910 kMaxRetransmissionWindowMs)),
ossu29b1a8d2016-06-13 07:34:51 -0700911 decoder_factory_(decoder_factory) {
kwiberg55b97fe2016-01-28 05:22:45 -0800912 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, _channelId),
913 "Channel::Channel() - ctor");
914 AudioCodingModule::Config acm_config;
915 acm_config.id = VoEModuleId(instanceId, channelId);
916 if (config.Get<NetEqCapacityConfig>().enabled) {
917 // Clamping the buffer capacity at 20 packets. While going lower will
918 // probably work, it makes little sense.
919 acm_config.neteq_config.max_packets_in_buffer =
920 std::max(20, config.Get<NetEqCapacityConfig>().capacity);
921 }
922 acm_config.neteq_config.enable_fast_accelerate =
923 config.Get<NetEqFastAccelerate>().enabled;
henrik.lundina89ab962016-05-18 08:52:45 -0700924 acm_config.neteq_config.enable_muted_state = true;
ossu5f7cfa52016-05-30 08:11:28 -0700925 acm_config.decoder_factory = decoder_factory;
kwiberg55b97fe2016-01-28 05:22:45 -0800926 audio_coding_.reset(AudioCodingModule::Create(acm_config));
Henrik Lundin64dad832015-05-11 12:44:23 +0200927
kwiberg55b97fe2016-01-28 05:22:45 -0800928 _outputAudioLevel.Clear();
niklase@google.com470e71d2011-07-07 08:21:25 +0000929
kwiberg55b97fe2016-01-28 05:22:45 -0800930 RtpRtcp::Configuration configuration;
931 configuration.audio = true;
932 configuration.outgoing_transport = this;
kwiberg55b97fe2016-01-28 05:22:45 -0800933 configuration.receive_statistics = rtp_receive_statistics_.get();
934 configuration.bandwidth_callback = rtcp_observer_.get();
stefanbba9dec2016-02-01 04:39:55 -0800935 if (pacing_enabled_) {
936 configuration.paced_sender = rtp_packet_sender_proxy_.get();
937 configuration.transport_sequence_number_allocator =
938 seq_num_allocator_proxy_.get();
939 configuration.transport_feedback_callback = feedback_observer_proxy_.get();
940 }
ivoc14d5dbe2016-07-04 07:06:55 -0700941 configuration.event_log = &(*event_log_proxy_);
Erik Språng737336d2016-07-29 12:59:36 +0200942 configuration.retransmission_rate_limiter =
943 retransmission_rate_limiter_.get();
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +0000944
kwiberg55b97fe2016-01-28 05:22:45 -0800945 _rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
Peter Boström3dd5d1d2016-02-25 16:56:48 +0100946 _rtpRtcpModule->SetSendingMediaStatus(false);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000947
kwiberg55b97fe2016-01-28 05:22:45 -0800948 statistics_proxy_.reset(new StatisticsProxy(_rtpRtcpModule->SSRC()));
949 rtp_receive_statistics_->RegisterRtcpStatisticsCallback(
950 statistics_proxy_.get());
aluebs@webrtc.orgf927fd62014-04-16 11:58:18 +0000951
kwiberg55b97fe2016-01-28 05:22:45 -0800952 Config audioproc_config;
953 audioproc_config.Set<ExperimentalAgc>(new ExperimentalAgc(false));
954 rx_audioproc_.reset(AudioProcessing::Create(audioproc_config));
niklase@google.com470e71d2011-07-07 08:21:25 +0000955}
956
kwiberg55b97fe2016-01-28 05:22:45 -0800957Channel::~Channel() {
958 rtp_receive_statistics_->RegisterRtcpStatisticsCallback(NULL);
959 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, _channelId),
960 "Channel::~Channel() - dtor");
niklase@google.com470e71d2011-07-07 08:21:25 +0000961
kwiberg55b97fe2016-01-28 05:22:45 -0800962 if (_outputExternalMedia) {
963 DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
964 }
965 if (channel_state_.Get().input_external_media) {
966 DeRegisterExternalMediaProcessing(kRecordingPerChannel);
967 }
968 StopSend();
969 StopPlayout();
niklase@google.com470e71d2011-07-07 08:21:25 +0000970
kwiberg55b97fe2016-01-28 05:22:45 -0800971 {
972 rtc::CritScope cs(&_fileCritSect);
973 if (_inputFilePlayerPtr) {
974 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
975 _inputFilePlayerPtr->StopPlayingFile();
976 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
977 _inputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +0000978 }
kwiberg55b97fe2016-01-28 05:22:45 -0800979 if (_outputFilePlayerPtr) {
980 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
981 _outputFilePlayerPtr->StopPlayingFile();
982 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
983 _outputFilePlayerPtr = NULL;
984 }
985 if (_outputFileRecorderPtr) {
986 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
987 _outputFileRecorderPtr->StopRecording();
988 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
989 _outputFileRecorderPtr = NULL;
990 }
991 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000992
kwiberg55b97fe2016-01-28 05:22:45 -0800993 // The order to safely shutdown modules in a channel is:
994 // 1. De-register callbacks in modules
995 // 2. De-register modules in process thread
996 // 3. Destroy modules
997 if (audio_coding_->RegisterTransportCallback(NULL) == -1) {
998 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
999 "~Channel() failed to de-register transport callback"
1000 " (Audio coding module)");
1001 }
1002 if (audio_coding_->RegisterVADCallback(NULL) == -1) {
1003 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1004 "~Channel() failed to de-register VAD callback"
1005 " (Audio coding module)");
1006 }
1007 // De-register modules in process thread
1008 _moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get());
tommi@webrtc.org3985f012015-02-27 13:36:34 +00001009
kwiberg55b97fe2016-01-28 05:22:45 -08001010 // End of modules shutdown
niklase@google.com470e71d2011-07-07 08:21:25 +00001011}
1012
kwiberg55b97fe2016-01-28 05:22:45 -08001013int32_t Channel::Init() {
1014 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1015 "Channel::Init()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001016
kwiberg55b97fe2016-01-28 05:22:45 -08001017 channel_state_.Reset();
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001018
kwiberg55b97fe2016-01-28 05:22:45 -08001019 // --- Initial sanity
niklase@google.com470e71d2011-07-07 08:21:25 +00001020
kwiberg55b97fe2016-01-28 05:22:45 -08001021 if ((_engineStatisticsPtr == NULL) || (_moduleProcessThreadPtr == NULL)) {
1022 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1023 "Channel::Init() must call SetEngineInformation() first");
1024 return -1;
1025 }
1026
1027 // --- Add modules to process thread (for periodic schedulation)
1028
1029 _moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get());
1030
1031 // --- ACM initialization
1032
1033 if (audio_coding_->InitializeReceiver() == -1) {
1034 _engineStatisticsPtr->SetLastError(
1035 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1036 "Channel::Init() unable to initialize the ACM - 1");
1037 return -1;
1038 }
1039
1040 // --- RTP/RTCP module initialization
1041
1042 // Ensure that RTCP is enabled by default for the created channel.
1043 // Note that, the module will keep generating RTCP until it is explicitly
1044 // disabled by the user.
1045 // After StopListen (when no sockets exists), RTCP packets will no longer
1046 // be transmitted since the Transport object will then be invalid.
1047 telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
1048 // RTCP is enabled by default.
1049 _rtpRtcpModule->SetRTCPStatus(RtcpMode::kCompound);
1050 // --- Register all permanent callbacks
1051 const bool fail = (audio_coding_->RegisterTransportCallback(this) == -1) ||
1052 (audio_coding_->RegisterVADCallback(this) == -1);
1053
1054 if (fail) {
1055 _engineStatisticsPtr->SetLastError(
1056 VE_CANNOT_INIT_CHANNEL, kTraceError,
1057 "Channel::Init() callbacks not registered");
1058 return -1;
1059 }
1060
1061 // --- Register all supported codecs to the receiving side of the
1062 // RTP/RTCP module
1063
1064 CodecInst codec;
1065 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
1066
1067 for (int idx = 0; idx < nSupportedCodecs; idx++) {
1068 // Open up the RTP/RTCP receiver for all supported codecs
1069 if ((audio_coding_->Codec(idx, &codec) == -1) ||
1070 (rtp_receiver_->RegisterReceivePayload(
1071 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1072 (codec.rate < 0) ? 0 : codec.rate) == -1)) {
1073 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1074 "Channel::Init() unable to register %s "
1075 "(%d/%d/%" PRIuS "/%d) to RTP/RTCP receiver",
1076 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1077 codec.rate);
1078 } else {
1079 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1080 "Channel::Init() %s (%d/%d/%" PRIuS
1081 "/%d) has been "
1082 "added to the RTP/RTCP receiver",
1083 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1084 codec.rate);
niklase@google.com470e71d2011-07-07 08:21:25 +00001085 }
1086
kwiberg55b97fe2016-01-28 05:22:45 -08001087 // Ensure that PCMU is used as default codec on the sending side
1088 if (!STR_CASE_CMP(codec.plname, "PCMU") && (codec.channels == 1)) {
1089 SetSendCodec(codec);
niklase@google.com470e71d2011-07-07 08:21:25 +00001090 }
1091
kwiberg55b97fe2016-01-28 05:22:45 -08001092 // Register default PT for outband 'telephone-event'
1093 if (!STR_CASE_CMP(codec.plname, "telephone-event")) {
kwibergc8d071e2016-04-06 12:22:38 -07001094 if (_rtpRtcpModule->RegisterSendPayload(codec) == -1 ||
1095 !RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, codec)) {
kwiberg55b97fe2016-01-28 05:22:45 -08001096 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1097 "Channel::Init() failed to register outband "
1098 "'telephone-event' (%d/%d) correctly",
1099 codec.pltype, codec.plfreq);
1100 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001101 }
1102
kwiberg55b97fe2016-01-28 05:22:45 -08001103 if (!STR_CASE_CMP(codec.plname, "CN")) {
kwibergc8d071e2016-04-06 12:22:38 -07001104 if (!codec_manager_.RegisterEncoder(codec) ||
1105 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get()) ||
1106 !RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, codec) ||
1107 _rtpRtcpModule->RegisterSendPayload(codec) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -08001108 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1109 "Channel::Init() failed to register CN (%d/%d) "
1110 "correctly - 1",
1111 codec.pltype, codec.plfreq);
1112 }
1113 }
kwiberg55b97fe2016-01-28 05:22:45 -08001114 }
pwestin@webrtc.org684f0572013-03-13 23:20:57 +00001115
kwiberg55b97fe2016-01-28 05:22:45 -08001116 if (rx_audioproc_->noise_suppression()->set_level(kDefaultNsMode) != 0) {
1117 LOG(LS_ERROR) << "noise_suppression()->set_level(kDefaultNsMode) failed.";
1118 return -1;
1119 }
1120 if (rx_audioproc_->gain_control()->set_mode(kDefaultRxAgcMode) != 0) {
1121 LOG(LS_ERROR) << "gain_control()->set_mode(kDefaultRxAgcMode) failed.";
1122 return -1;
1123 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001124
kwiberg55b97fe2016-01-28 05:22:45 -08001125 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001126}
1127
kwiberg55b97fe2016-01-28 05:22:45 -08001128int32_t Channel::SetEngineInformation(Statistics& engineStatistics,
1129 OutputMixer& outputMixer,
1130 voe::TransmitMixer& transmitMixer,
1131 ProcessThread& moduleProcessThread,
1132 AudioDeviceModule& audioDeviceModule,
1133 VoiceEngineObserver* voiceEngineObserver,
1134 rtc::CriticalSection* callbackCritSect) {
1135 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1136 "Channel::SetEngineInformation()");
1137 _engineStatisticsPtr = &engineStatistics;
1138 _outputMixerPtr = &outputMixer;
1139 _transmitMixerPtr = &transmitMixer,
1140 _moduleProcessThreadPtr = &moduleProcessThread;
1141 _audioDeviceModulePtr = &audioDeviceModule;
1142 _voiceEngineObserverPtr = voiceEngineObserver;
1143 _callbackCritSectPtr = callbackCritSect;
1144 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001145}
1146
kwiberg55b97fe2016-01-28 05:22:45 -08001147int32_t Channel::UpdateLocalTimeStamp() {
1148 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_);
1149 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001150}
1151
kwibergb7f89d62016-02-17 10:04:18 -08001152void Channel::SetSink(std::unique_ptr<AudioSinkInterface> sink) {
tommi31fc21f2016-01-21 10:37:37 -08001153 rtc::CritScope cs(&_callbackCritSect);
deadbeef2d110be2016-01-13 12:00:26 -08001154 audio_sink_ = std::move(sink);
Tommif888bb52015-12-12 01:37:01 +01001155}
1156
ossu29b1a8d2016-06-13 07:34:51 -07001157const rtc::scoped_refptr<AudioDecoderFactory>&
1158Channel::GetAudioDecoderFactory() const {
1159 return decoder_factory_;
1160}
1161
kwiberg55b97fe2016-01-28 05:22:45 -08001162int32_t Channel::StartPlayout() {
1163 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1164 "Channel::StartPlayout()");
1165 if (channel_state_.Get().playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001166 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001167 }
1168
1169 if (!_externalMixing) {
1170 // Add participant as candidates for mixing.
1171 if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0) {
1172 _engineStatisticsPtr->SetLastError(
1173 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1174 "StartPlayout() failed to add participant to mixer");
1175 return -1;
1176 }
1177 }
1178
1179 channel_state_.SetPlaying(true);
1180 if (RegisterFilePlayingToMixer() != 0)
1181 return -1;
1182
1183 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001184}
1185
kwiberg55b97fe2016-01-28 05:22:45 -08001186int32_t Channel::StopPlayout() {
1187 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1188 "Channel::StopPlayout()");
1189 if (!channel_state_.Get().playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001190 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001191 }
1192
1193 if (!_externalMixing) {
1194 // Remove participant as candidates for mixing
1195 if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0) {
1196 _engineStatisticsPtr->SetLastError(
1197 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1198 "StopPlayout() failed to remove participant from mixer");
1199 return -1;
1200 }
1201 }
1202
1203 channel_state_.SetPlaying(false);
1204 _outputAudioLevel.Clear();
1205
1206 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001207}
1208
kwiberg55b97fe2016-01-28 05:22:45 -08001209int32_t Channel::StartSend() {
1210 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1211 "Channel::StartSend()");
1212 // Resume the previous sequence number which was reset by StopSend().
1213 // This needs to be done before |sending| is set to true.
1214 if (send_sequence_number_)
1215 SetInitSequenceNumber(send_sequence_number_);
xians@webrtc.org09e8c472013-07-31 16:30:19 +00001216
kwiberg55b97fe2016-01-28 05:22:45 -08001217 if (channel_state_.Get().sending) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001218 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001219 }
1220 channel_state_.SetSending(true);
niklase@google.com470e71d2011-07-07 08:21:25 +00001221
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001222 _rtpRtcpModule->SetSendingMediaStatus(true);
kwiberg55b97fe2016-01-28 05:22:45 -08001223 if (_rtpRtcpModule->SetSendingStatus(true) != 0) {
1224 _engineStatisticsPtr->SetLastError(
1225 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1226 "StartSend() RTP/RTCP failed to start sending");
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001227 _rtpRtcpModule->SetSendingMediaStatus(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001228 rtc::CritScope cs(&_callbackCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001229 channel_state_.SetSending(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001230 return -1;
1231 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001232
kwiberg55b97fe2016-01-28 05:22:45 -08001233 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001234}
1235
kwiberg55b97fe2016-01-28 05:22:45 -08001236int32_t Channel::StopSend() {
1237 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1238 "Channel::StopSend()");
1239 if (!channel_state_.Get().sending) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001240 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001241 }
1242 channel_state_.SetSending(false);
1243
1244 // Store the sequence number to be able to pick up the same sequence for
1245 // the next StartSend(). This is needed for restarting device, otherwise
1246 // it might cause libSRTP to complain about packets being replayed.
1247 // TODO(xians): Remove this workaround after RtpRtcpModule's refactoring
1248 // CL is landed. See issue
1249 // https://code.google.com/p/webrtc/issues/detail?id=2111 .
1250 send_sequence_number_ = _rtpRtcpModule->SequenceNumber();
1251
1252 // Reset sending SSRC and sequence number and triggers direct transmission
1253 // of RTCP BYE
1254 if (_rtpRtcpModule->SetSendingStatus(false) == -1) {
1255 _engineStatisticsPtr->SetLastError(
1256 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1257 "StartSend() RTP/RTCP failed to stop sending");
1258 }
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001259 _rtpRtcpModule->SetSendingMediaStatus(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001260
1261 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001262}
1263
kwiberg55b97fe2016-01-28 05:22:45 -08001264int32_t Channel::StartReceiving() {
1265 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1266 "Channel::StartReceiving()");
1267 if (channel_state_.Get().receiving) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001268 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001269 }
1270 channel_state_.SetReceiving(true);
1271 _numberOfDiscardedPackets = 0;
1272 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001273}
1274
kwiberg55b97fe2016-01-28 05:22:45 -08001275int32_t Channel::StopReceiving() {
1276 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1277 "Channel::StopReceiving()");
1278 if (!channel_state_.Get().receiving) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001279 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001280 }
1281
1282 channel_state_.SetReceiving(false);
1283 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001284}
1285
kwiberg55b97fe2016-01-28 05:22:45 -08001286int32_t Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer) {
1287 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1288 "Channel::RegisterVoiceEngineObserver()");
1289 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001290
kwiberg55b97fe2016-01-28 05:22:45 -08001291 if (_voiceEngineObserverPtr) {
1292 _engineStatisticsPtr->SetLastError(
1293 VE_INVALID_OPERATION, kTraceError,
1294 "RegisterVoiceEngineObserver() observer already enabled");
1295 return -1;
1296 }
1297 _voiceEngineObserverPtr = &observer;
1298 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001299}
1300
kwiberg55b97fe2016-01-28 05:22:45 -08001301int32_t Channel::DeRegisterVoiceEngineObserver() {
1302 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1303 "Channel::DeRegisterVoiceEngineObserver()");
1304 rtc::CritScope cs(&_callbackCritSect);
1305
1306 if (!_voiceEngineObserverPtr) {
1307 _engineStatisticsPtr->SetLastError(
1308 VE_INVALID_OPERATION, kTraceWarning,
1309 "DeRegisterVoiceEngineObserver() observer already disabled");
1310 return 0;
1311 }
1312 _voiceEngineObserverPtr = NULL;
1313 return 0;
1314}
1315
1316int32_t Channel::GetSendCodec(CodecInst& codec) {
kwibergc8d071e2016-04-06 12:22:38 -07001317 auto send_codec = codec_manager_.GetCodecInst();
kwiberg1fd4a4a2015-11-03 11:20:50 -08001318 if (send_codec) {
1319 codec = *send_codec;
1320 return 0;
1321 }
1322 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001323}
1324
kwiberg55b97fe2016-01-28 05:22:45 -08001325int32_t Channel::GetRecCodec(CodecInst& codec) {
1326 return (audio_coding_->ReceiveCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001327}
1328
kwiberg55b97fe2016-01-28 05:22:45 -08001329int32_t Channel::SetSendCodec(const CodecInst& codec) {
1330 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1331 "Channel::SetSendCodec()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001332
kwibergc8d071e2016-04-06 12:22:38 -07001333 if (!codec_manager_.RegisterEncoder(codec) ||
1334 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
kwiberg55b97fe2016-01-28 05:22:45 -08001335 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1336 "SetSendCodec() failed to register codec to ACM");
1337 return -1;
1338 }
1339
1340 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1341 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1342 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1343 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1344 "SetSendCodec() failed to register codec to"
1345 " RTP/RTCP module");
1346 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001347 }
kwiberg55b97fe2016-01-28 05:22:45 -08001348 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001349
kwiberg55b97fe2016-01-28 05:22:45 -08001350 if (_rtpRtcpModule->SetAudioPacketSize(codec.pacsize) != 0) {
1351 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1352 "SetSendCodec() failed to set audio packet size");
1353 return -1;
1354 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001355
kwiberg55b97fe2016-01-28 05:22:45 -08001356 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001357}
1358
Ivo Creusenadf89b72015-04-29 16:03:33 +02001359void Channel::SetBitRate(int bitrate_bps) {
1360 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1361 "Channel::SetBitRate(bitrate_bps=%d)", bitrate_bps);
1362 audio_coding_->SetBitRate(bitrate_bps);
Erik Språng737336d2016-07-29 12:59:36 +02001363 retransmission_rate_limiter_->SetMaxRate(bitrate_bps);
Ivo Creusenadf89b72015-04-29 16:03:33 +02001364}
1365
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +00001366void Channel::OnIncomingFractionLoss(int fraction_lost) {
minyue@webrtc.org74aaf292014-07-16 21:28:26 +00001367 network_predictor_->UpdatePacketLossRate(fraction_lost);
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +00001368 uint8_t average_fraction_loss = network_predictor_->GetLossRate();
1369
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00001370 // Normalizes rate to 0 - 100.
kwiberg55b97fe2016-01-28 05:22:45 -08001371 if (audio_coding_->SetPacketLossRate(100 * average_fraction_loss / 255) !=
1372 0) {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00001373 assert(false); // This should not happen.
1374 }
1375}
1376
kwiberg55b97fe2016-01-28 05:22:45 -08001377int32_t Channel::SetVADStatus(bool enableVAD,
1378 ACMVADMode mode,
1379 bool disableDTX) {
1380 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1381 "Channel::SetVADStatus(mode=%d)", mode);
kwibergc8d071e2016-04-06 12:22:38 -07001382 RTC_DCHECK(!(disableDTX && enableVAD)); // disableDTX mode is deprecated.
1383 if (!codec_manager_.SetVAD(enableVAD, mode) ||
1384 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
kwiberg55b97fe2016-01-28 05:22:45 -08001385 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
1386 kTraceError,
1387 "SetVADStatus() failed to set VAD");
1388 return -1;
1389 }
1390 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001391}
1392
kwiberg55b97fe2016-01-28 05:22:45 -08001393int32_t Channel::GetVADStatus(bool& enabledVAD,
1394 ACMVADMode& mode,
1395 bool& disabledDTX) {
kwibergc8d071e2016-04-06 12:22:38 -07001396 const auto* params = codec_manager_.GetStackParams();
1397 enabledVAD = params->use_cng;
1398 mode = params->vad_mode;
1399 disabledDTX = !params->use_cng;
kwiberg55b97fe2016-01-28 05:22:45 -08001400 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001401}
1402
kwiberg55b97fe2016-01-28 05:22:45 -08001403int32_t Channel::SetRecPayloadType(const CodecInst& codec) {
1404 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1405 "Channel::SetRecPayloadType()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001406
kwiberg55b97fe2016-01-28 05:22:45 -08001407 if (channel_state_.Get().playing) {
1408 _engineStatisticsPtr->SetLastError(
1409 VE_ALREADY_PLAYING, kTraceError,
1410 "SetRecPayloadType() unable to set PT while playing");
1411 return -1;
1412 }
1413 if (channel_state_.Get().receiving) {
1414 _engineStatisticsPtr->SetLastError(
1415 VE_ALREADY_LISTENING, kTraceError,
1416 "SetRecPayloadType() unable to set PT while listening");
1417 return -1;
1418 }
1419
1420 if (codec.pltype == -1) {
1421 // De-register the selected codec (RTP/RTCP module and ACM)
1422
1423 int8_t pltype(-1);
1424 CodecInst rxCodec = codec;
1425
1426 // Get payload type for the given codec
1427 rtp_payload_registry_->ReceivePayloadType(
1428 rxCodec.plname, rxCodec.plfreq, rxCodec.channels,
1429 (rxCodec.rate < 0) ? 0 : rxCodec.rate, &pltype);
1430 rxCodec.pltype = pltype;
1431
1432 if (rtp_receiver_->DeRegisterReceivePayload(pltype) != 0) {
1433 _engineStatisticsPtr->SetLastError(
1434 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1435 "SetRecPayloadType() RTP/RTCP-module deregistration "
1436 "failed");
1437 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001438 }
kwiberg55b97fe2016-01-28 05:22:45 -08001439 if (audio_coding_->UnregisterReceiveCodec(rxCodec.pltype) != 0) {
1440 _engineStatisticsPtr->SetLastError(
1441 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1442 "SetRecPayloadType() ACM deregistration failed - 1");
1443 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001444 }
kwiberg55b97fe2016-01-28 05:22:45 -08001445 return 0;
1446 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001447
kwiberg55b97fe2016-01-28 05:22:45 -08001448 if (rtp_receiver_->RegisterReceivePayload(
1449 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1450 (codec.rate < 0) ? 0 : codec.rate) != 0) {
1451 // First attempt to register failed => de-register and try again
kwibergc8d071e2016-04-06 12:22:38 -07001452 // TODO(kwiberg): Retrying is probably not necessary, since
1453 // AcmReceiver::AddCodec also retries.
kwiberg55b97fe2016-01-28 05:22:45 -08001454 rtp_receiver_->DeRegisterReceivePayload(codec.pltype);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001455 if (rtp_receiver_->RegisterReceivePayload(
kwiberg55b97fe2016-01-28 05:22:45 -08001456 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1457 (codec.rate < 0) ? 0 : codec.rate) != 0) {
1458 _engineStatisticsPtr->SetLastError(
1459 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1460 "SetRecPayloadType() RTP/RTCP-module registration failed");
1461 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001462 }
kwiberg55b97fe2016-01-28 05:22:45 -08001463 }
kwibergc8d071e2016-04-06 12:22:38 -07001464 if (!RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, codec)) {
kwiberg55b97fe2016-01-28 05:22:45 -08001465 audio_coding_->UnregisterReceiveCodec(codec.pltype);
kwibergc8d071e2016-04-06 12:22:38 -07001466 if (!RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, codec)) {
kwiberg55b97fe2016-01-28 05:22:45 -08001467 _engineStatisticsPtr->SetLastError(
1468 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1469 "SetRecPayloadType() ACM registration failed - 1");
1470 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001471 }
kwiberg55b97fe2016-01-28 05:22:45 -08001472 }
1473 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001474}
1475
kwiberg55b97fe2016-01-28 05:22:45 -08001476int32_t Channel::GetRecPayloadType(CodecInst& codec) {
1477 int8_t payloadType(-1);
1478 if (rtp_payload_registry_->ReceivePayloadType(
1479 codec.plname, codec.plfreq, codec.channels,
1480 (codec.rate < 0) ? 0 : codec.rate, &payloadType) != 0) {
1481 _engineStatisticsPtr->SetLastError(
1482 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1483 "GetRecPayloadType() failed to retrieve RX payload type");
1484 return -1;
1485 }
1486 codec.pltype = payloadType;
1487 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001488}
1489
kwiberg55b97fe2016-01-28 05:22:45 -08001490int32_t Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency) {
1491 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1492 "Channel::SetSendCNPayloadType()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001493
kwiberg55b97fe2016-01-28 05:22:45 -08001494 CodecInst codec;
1495 int32_t samplingFreqHz(-1);
1496 const size_t kMono = 1;
1497 if (frequency == kFreq32000Hz)
1498 samplingFreqHz = 32000;
1499 else if (frequency == kFreq16000Hz)
1500 samplingFreqHz = 16000;
niklase@google.com470e71d2011-07-07 08:21:25 +00001501
kwiberg55b97fe2016-01-28 05:22:45 -08001502 if (audio_coding_->Codec("CN", &codec, samplingFreqHz, kMono) == -1) {
1503 _engineStatisticsPtr->SetLastError(
1504 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1505 "SetSendCNPayloadType() failed to retrieve default CN codec "
1506 "settings");
1507 return -1;
1508 }
1509
1510 // Modify the payload type (must be set to dynamic range)
1511 codec.pltype = type;
1512
kwibergc8d071e2016-04-06 12:22:38 -07001513 if (!codec_manager_.RegisterEncoder(codec) ||
1514 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
kwiberg55b97fe2016-01-28 05:22:45 -08001515 _engineStatisticsPtr->SetLastError(
1516 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1517 "SetSendCNPayloadType() failed to register CN to ACM");
1518 return -1;
1519 }
1520
1521 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1522 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1523 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1524 _engineStatisticsPtr->SetLastError(
1525 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1526 "SetSendCNPayloadType() failed to register CN to RTP/RTCP "
1527 "module");
1528 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001529 }
kwiberg55b97fe2016-01-28 05:22:45 -08001530 }
1531 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001532}
1533
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001534int Channel::SetOpusMaxPlaybackRate(int frequency_hz) {
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001535 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001536 "Channel::SetOpusMaxPlaybackRate()");
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001537
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001538 if (audio_coding_->SetOpusMaxPlaybackRate(frequency_hz) != 0) {
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001539 _engineStatisticsPtr->SetLastError(
1540 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001541 "SetOpusMaxPlaybackRate() failed to set maximum playback rate");
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001542 return -1;
1543 }
1544 return 0;
1545}
1546
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001547int Channel::SetOpusDtx(bool enable_dtx) {
1548 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1549 "Channel::SetOpusDtx(%d)", enable_dtx);
Minyue Li092041c2015-05-11 12:19:35 +02001550 int ret = enable_dtx ? audio_coding_->EnableOpusDtx()
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001551 : audio_coding_->DisableOpusDtx();
1552 if (ret != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08001553 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
1554 kTraceError, "SetOpusDtx() failed");
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001555 return -1;
1556 }
1557 return 0;
1558}
1559
ivoc85228d62016-07-27 04:53:47 -07001560int Channel::GetOpusDtx(bool* enabled) {
1561 int success = -1;
1562 audio_coding_->QueryEncoder([&](AudioEncoder const* encoder) {
1563 if (encoder) {
1564 *enabled = encoder->GetDtx();
1565 success = 0;
1566 }
1567 });
1568 return success;
1569}
1570
mflodman3d7db262016-04-29 00:57:13 -07001571int32_t Channel::RegisterExternalTransport(Transport* transport) {
kwiberg55b97fe2016-01-28 05:22:45 -08001572 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00001573 "Channel::RegisterExternalTransport()");
1574
kwiberg55b97fe2016-01-28 05:22:45 -08001575 rtc::CritScope cs(&_callbackCritSect);
kwiberg55b97fe2016-01-28 05:22:45 -08001576 if (_externalTransport) {
1577 _engineStatisticsPtr->SetLastError(
1578 VE_INVALID_OPERATION, kTraceError,
1579 "RegisterExternalTransport() external transport already enabled");
1580 return -1;
1581 }
1582 _externalTransport = true;
mflodman3d7db262016-04-29 00:57:13 -07001583 _transportPtr = transport;
kwiberg55b97fe2016-01-28 05:22:45 -08001584 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001585}
1586
kwiberg55b97fe2016-01-28 05:22:45 -08001587int32_t Channel::DeRegisterExternalTransport() {
1588 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1589 "Channel::DeRegisterExternalTransport()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001590
kwiberg55b97fe2016-01-28 05:22:45 -08001591 rtc::CritScope cs(&_callbackCritSect);
mflodman3d7db262016-04-29 00:57:13 -07001592 if (_transportPtr) {
1593 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1594 "DeRegisterExternalTransport() all transport is disabled");
1595 } else {
kwiberg55b97fe2016-01-28 05:22:45 -08001596 _engineStatisticsPtr->SetLastError(
1597 VE_INVALID_OPERATION, kTraceWarning,
1598 "DeRegisterExternalTransport() external transport already "
1599 "disabled");
kwiberg55b97fe2016-01-28 05:22:45 -08001600 }
1601 _externalTransport = false;
1602 _transportPtr = NULL;
kwiberg55b97fe2016-01-28 05:22:45 -08001603 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001604}
1605
mflodman3d7db262016-04-29 00:57:13 -07001606int32_t Channel::ReceivedRTPPacket(const uint8_t* received_packet,
kwiberg55b97fe2016-01-28 05:22:45 -08001607 size_t length,
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00001608 const PacketTime& packet_time) {
kwiberg55b97fe2016-01-28 05:22:45 -08001609 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001610 "Channel::ReceivedRTPPacket()");
1611
1612 // Store playout timestamp for the received RTP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00001613 UpdatePlayoutTimestamp(false);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001614
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00001615 RTPHeader header;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001616 if (!rtp_header_parser_->Parse(received_packet, length, &header)) {
1617 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1618 "Incoming packet: invalid RTP header");
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00001619 return -1;
1620 }
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001621 header.payload_type_frequency =
1622 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001623 if (header.payload_type_frequency < 0)
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001624 return -1;
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001625 bool in_order = IsPacketInOrder(header);
kwiberg55b97fe2016-01-28 05:22:45 -08001626 rtp_receive_statistics_->IncomingPacket(
1627 header, length, IsPacketRetransmitted(header, in_order));
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001628 rtp_payload_registry_->SetIncomingPayloadType(header);
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00001629
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001630 return ReceivePacket(received_packet, length, header, in_order) ? 0 : -1;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001631}
1632
1633bool Channel::ReceivePacket(const uint8_t* packet,
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001634 size_t packet_length,
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001635 const RTPHeader& header,
1636 bool in_order) {
minyue@webrtc.org456f0142015-01-23 11:58:42 +00001637 if (rtp_payload_registry_->IsRtx(header)) {
1638 return HandleRtxPacket(packet, packet_length, header);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001639 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001640 const uint8_t* payload = packet + header.headerLength;
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001641 assert(packet_length >= header.headerLength);
1642 size_t payload_length = packet_length - header.headerLength;
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001643 PayloadUnion payload_specific;
1644 if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001645 &payload_specific)) {
1646 return false;
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001647 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001648 return rtp_receiver_->IncomingRtpPacket(header, payload, payload_length,
1649 payload_specific, in_order);
1650}
1651
minyue@webrtc.org456f0142015-01-23 11:58:42 +00001652bool Channel::HandleRtxPacket(const uint8_t* packet,
1653 size_t packet_length,
1654 const RTPHeader& header) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001655 if (!rtp_payload_registry_->IsRtx(header))
1656 return false;
1657
1658 // Remove the RTX header and parse the original RTP header.
1659 if (packet_length < header.headerLength)
1660 return false;
1661 if (packet_length > kVoiceEngineMaxIpPacketSizeBytes)
1662 return false;
1663 if (restored_packet_in_use_) {
1664 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1665 "Multiple RTX headers detected, dropping packet");
1666 return false;
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001667 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001668 if (!rtp_payload_registry_->RestoreOriginalPacket(
noahric65220a72015-10-14 11:29:49 -07001669 restored_packet_, packet, &packet_length, rtp_receiver_->SSRC(),
1670 header)) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001671 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1672 "Incoming RTX packet: invalid RTP header");
1673 return false;
1674 }
1675 restored_packet_in_use_ = true;
noahric65220a72015-10-14 11:29:49 -07001676 bool ret = OnRecoveredPacket(restored_packet_, packet_length);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001677 restored_packet_in_use_ = false;
1678 return ret;
1679}
1680
1681bool Channel::IsPacketInOrder(const RTPHeader& header) const {
1682 StreamStatistician* statistician =
1683 rtp_receive_statistics_->GetStatistician(header.ssrc);
1684 if (!statistician)
1685 return false;
1686 return statistician->IsPacketInOrder(header.sequenceNumber);
niklase@google.com470e71d2011-07-07 08:21:25 +00001687}
1688
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001689bool Channel::IsPacketRetransmitted(const RTPHeader& header,
1690 bool in_order) const {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001691 // Retransmissions are handled separately if RTX is enabled.
1692 if (rtp_payload_registry_->RtxEnabled())
1693 return false;
1694 StreamStatistician* statistician =
1695 rtp_receive_statistics_->GetStatistician(header.ssrc);
1696 if (!statistician)
1697 return false;
1698 // Check if this is a retransmission.
pkasting@chromium.org16825b12015-01-12 21:51:21 +00001699 int64_t min_rtt = 0;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001700 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL);
kwiberg55b97fe2016-01-28 05:22:45 -08001701 return !in_order && statistician->IsRetransmitOfOldPacket(header, min_rtt);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001702}
1703
mflodman3d7db262016-04-29 00:57:13 -07001704int32_t Channel::ReceivedRTCPPacket(const uint8_t* data, size_t length) {
kwiberg55b97fe2016-01-28 05:22:45 -08001705 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001706 "Channel::ReceivedRTCPPacket()");
1707 // Store playout timestamp for the received RTCP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00001708 UpdatePlayoutTimestamp(true);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001709
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001710 // Deliver RTCP packet to RTP/RTCP module for parsing
mflodman3d7db262016-04-29 00:57:13 -07001711 if (_rtpRtcpModule->IncomingRtcpPacket(data, length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001712 _engineStatisticsPtr->SetLastError(
1713 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
1714 "Channel::IncomingRTPPacket() RTCP packet is invalid");
1715 }
wu@webrtc.org82c4b852014-05-20 22:55:01 +00001716
Minyue2013aec2015-05-13 14:14:42 +02001717 int64_t rtt = GetRTT(true);
1718 if (rtt == 0) {
1719 // Waiting for valid RTT.
1720 return 0;
1721 }
Erik Språng737336d2016-07-29 12:59:36 +02001722
1723 int64_t nack_window_ms = rtt;
1724 if (nack_window_ms < kMinRetransmissionWindowMs) {
1725 nack_window_ms = kMinRetransmissionWindowMs;
1726 } else if (nack_window_ms > kMaxRetransmissionWindowMs) {
1727 nack_window_ms = kMaxRetransmissionWindowMs;
1728 }
1729 retransmission_rate_limiter_->SetWindowSize(nack_window_ms);
1730
Minyue2013aec2015-05-13 14:14:42 +02001731 uint32_t ntp_secs = 0;
1732 uint32_t ntp_frac = 0;
1733 uint32_t rtp_timestamp = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001734 if (0 !=
1735 _rtpRtcpModule->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL,
1736 &rtp_timestamp)) {
Minyue2013aec2015-05-13 14:14:42 +02001737 // Waiting for RTCP.
1738 return 0;
1739 }
1740
stefan@webrtc.org8e24d872014-09-02 18:58:24 +00001741 {
tommi31fc21f2016-01-21 10:37:37 -08001742 rtc::CritScope lock(&ts_stats_lock_);
minyue@webrtc.org2c0cdbc2014-10-09 10:52:43 +00001743 ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp);
stefan@webrtc.org8e24d872014-09-02 18:58:24 +00001744 }
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001745 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001746}
1747
niklase@google.com470e71d2011-07-07 08:21:25 +00001748int Channel::StartPlayingFileLocally(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001749 bool loop,
1750 FileFormats format,
1751 int startPosition,
1752 float volumeScaling,
1753 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001754 const CodecInst* codecInst) {
1755 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1756 "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d,"
1757 " format=%d, volumeScaling=%5.3f, startPosition=%d, "
1758 "stopPosition=%d)",
1759 fileName, loop, format, volumeScaling, startPosition,
1760 stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001761
kwiberg55b97fe2016-01-28 05:22:45 -08001762 if (channel_state_.Get().output_file_playing) {
1763 _engineStatisticsPtr->SetLastError(
1764 VE_ALREADY_PLAYING, kTraceError,
1765 "StartPlayingFileLocally() is already playing");
1766 return -1;
1767 }
1768
1769 {
1770 rtc::CritScope cs(&_fileCritSect);
1771
1772 if (_outputFilePlayerPtr) {
1773 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1774 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1775 _outputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +00001776 }
1777
kwiberg55b97fe2016-01-28 05:22:45 -08001778 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
1779 _outputFilePlayerId, (const FileFormats)format);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00001780
kwiberg55b97fe2016-01-28 05:22:45 -08001781 if (_outputFilePlayerPtr == NULL) {
1782 _engineStatisticsPtr->SetLastError(
1783 VE_INVALID_ARGUMENT, kTraceError,
1784 "StartPlayingFileLocally() filePlayer format is not correct");
1785 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001786 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001787
kwiberg55b97fe2016-01-28 05:22:45 -08001788 const uint32_t notificationTime(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00001789
kwiberg55b97fe2016-01-28 05:22:45 -08001790 if (_outputFilePlayerPtr->StartPlayingFile(
1791 fileName, loop, startPosition, volumeScaling, notificationTime,
1792 stopPosition, (const CodecInst*)codecInst) != 0) {
1793 _engineStatisticsPtr->SetLastError(
1794 VE_BAD_FILE, kTraceError,
1795 "StartPlayingFile() failed to start file playout");
1796 _outputFilePlayerPtr->StopPlayingFile();
1797 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1798 _outputFilePlayerPtr = NULL;
1799 return -1;
1800 }
1801 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
1802 channel_state_.SetOutputFilePlaying(true);
1803 }
1804
1805 if (RegisterFilePlayingToMixer() != 0)
1806 return -1;
1807
1808 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001809}
1810
1811int Channel::StartPlayingFileLocally(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001812 FileFormats format,
1813 int startPosition,
1814 float volumeScaling,
1815 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001816 const CodecInst* codecInst) {
1817 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1818 "Channel::StartPlayingFileLocally(format=%d,"
1819 " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
1820 format, volumeScaling, startPosition, stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001821
kwiberg55b97fe2016-01-28 05:22:45 -08001822 if (stream == NULL) {
1823 _engineStatisticsPtr->SetLastError(
1824 VE_BAD_FILE, kTraceError,
1825 "StartPlayingFileLocally() NULL as input stream");
1826 return -1;
1827 }
1828
1829 if (channel_state_.Get().output_file_playing) {
1830 _engineStatisticsPtr->SetLastError(
1831 VE_ALREADY_PLAYING, kTraceError,
1832 "StartPlayingFileLocally() is already playing");
1833 return -1;
1834 }
1835
1836 {
1837 rtc::CritScope cs(&_fileCritSect);
1838
1839 // Destroy the old instance
1840 if (_outputFilePlayerPtr) {
1841 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1842 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1843 _outputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +00001844 }
1845
kwiberg55b97fe2016-01-28 05:22:45 -08001846 // Create the instance
1847 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
1848 _outputFilePlayerId, (const FileFormats)format);
niklase@google.com470e71d2011-07-07 08:21:25 +00001849
kwiberg55b97fe2016-01-28 05:22:45 -08001850 if (_outputFilePlayerPtr == NULL) {
1851 _engineStatisticsPtr->SetLastError(
1852 VE_INVALID_ARGUMENT, kTraceError,
1853 "StartPlayingFileLocally() filePlayer format isnot correct");
1854 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001855 }
1856
kwiberg55b97fe2016-01-28 05:22:45 -08001857 const uint32_t notificationTime(0);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00001858
kwiberg55b97fe2016-01-28 05:22:45 -08001859 if (_outputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
1860 volumeScaling, notificationTime,
1861 stopPosition, codecInst) != 0) {
1862 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
1863 "StartPlayingFile() failed to "
1864 "start file playout");
1865 _outputFilePlayerPtr->StopPlayingFile();
1866 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1867 _outputFilePlayerPtr = NULL;
1868 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001869 }
kwiberg55b97fe2016-01-28 05:22:45 -08001870 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
1871 channel_state_.SetOutputFilePlaying(true);
1872 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001873
kwiberg55b97fe2016-01-28 05:22:45 -08001874 if (RegisterFilePlayingToMixer() != 0)
1875 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001876
kwiberg55b97fe2016-01-28 05:22:45 -08001877 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001878}
1879
kwiberg55b97fe2016-01-28 05:22:45 -08001880int Channel::StopPlayingFileLocally() {
1881 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1882 "Channel::StopPlayingFileLocally()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001883
kwiberg55b97fe2016-01-28 05:22:45 -08001884 if (!channel_state_.Get().output_file_playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001885 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001886 }
1887
1888 {
1889 rtc::CritScope cs(&_fileCritSect);
1890
1891 if (_outputFilePlayerPtr->StopPlayingFile() != 0) {
1892 _engineStatisticsPtr->SetLastError(
1893 VE_STOP_RECORDING_FAILED, kTraceError,
1894 "StopPlayingFile() could not stop playing");
1895 return -1;
1896 }
1897 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1898 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1899 _outputFilePlayerPtr = NULL;
1900 channel_state_.SetOutputFilePlaying(false);
1901 }
1902 // _fileCritSect cannot be taken while calling
1903 // SetAnonymousMixibilityStatus. Refer to comments in
1904 // StartPlayingFileLocally(const char* ...) for more details.
1905 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0) {
1906 _engineStatisticsPtr->SetLastError(
1907 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1908 "StopPlayingFile() failed to stop participant from playing as"
1909 "file in the mixer");
1910 return -1;
1911 }
1912
1913 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001914}
1915
kwiberg55b97fe2016-01-28 05:22:45 -08001916int Channel::IsPlayingFileLocally() const {
1917 return channel_state_.Get().output_file_playing;
niklase@google.com470e71d2011-07-07 08:21:25 +00001918}
1919
kwiberg55b97fe2016-01-28 05:22:45 -08001920int Channel::RegisterFilePlayingToMixer() {
1921 // Return success for not registering for file playing to mixer if:
1922 // 1. playing file before playout is started on that channel.
1923 // 2. starting playout without file playing on that channel.
1924 if (!channel_state_.Get().playing ||
1925 !channel_state_.Get().output_file_playing) {
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001926 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001927 }
1928
1929 // |_fileCritSect| cannot be taken while calling
1930 // SetAnonymousMixabilityStatus() since as soon as the participant is added
1931 // frames can be pulled by the mixer. Since the frames are generated from
1932 // the file, _fileCritSect will be taken. This would result in a deadlock.
1933 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0) {
1934 channel_state_.SetOutputFilePlaying(false);
1935 rtc::CritScope cs(&_fileCritSect);
1936 _engineStatisticsPtr->SetLastError(
1937 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1938 "StartPlayingFile() failed to add participant as file to mixer");
1939 _outputFilePlayerPtr->StopPlayingFile();
1940 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1941 _outputFilePlayerPtr = NULL;
1942 return -1;
1943 }
1944
1945 return 0;
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001946}
1947
niklase@google.com470e71d2011-07-07 08:21:25 +00001948int Channel::StartPlayingFileAsMicrophone(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001949 bool loop,
1950 FileFormats format,
1951 int startPosition,
1952 float volumeScaling,
1953 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001954 const CodecInst* codecInst) {
1955 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1956 "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, "
1957 "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, "
1958 "stopPosition=%d)",
1959 fileName, loop, format, volumeScaling, startPosition,
1960 stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001961
kwiberg55b97fe2016-01-28 05:22:45 -08001962 rtc::CritScope cs(&_fileCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001963
kwiberg55b97fe2016-01-28 05:22:45 -08001964 if (channel_state_.Get().input_file_playing) {
1965 _engineStatisticsPtr->SetLastError(
1966 VE_ALREADY_PLAYING, kTraceWarning,
1967 "StartPlayingFileAsMicrophone() filePlayer is playing");
niklase@google.com470e71d2011-07-07 08:21:25 +00001968 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001969 }
1970
1971 // Destroy the old instance
1972 if (_inputFilePlayerPtr) {
1973 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1974 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1975 _inputFilePlayerPtr = NULL;
1976 }
1977
1978 // Create the instance
1979 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(_inputFilePlayerId,
1980 (const FileFormats)format);
1981
1982 if (_inputFilePlayerPtr == NULL) {
1983 _engineStatisticsPtr->SetLastError(
1984 VE_INVALID_ARGUMENT, kTraceError,
1985 "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
1986 return -1;
1987 }
1988
1989 const uint32_t notificationTime(0);
1990
1991 if (_inputFilePlayerPtr->StartPlayingFile(
1992 fileName, loop, startPosition, volumeScaling, notificationTime,
1993 stopPosition, (const CodecInst*)codecInst) != 0) {
1994 _engineStatisticsPtr->SetLastError(
1995 VE_BAD_FILE, kTraceError,
1996 "StartPlayingFile() failed to start file playout");
1997 _inputFilePlayerPtr->StopPlayingFile();
1998 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1999 _inputFilePlayerPtr = NULL;
2000 return -1;
2001 }
2002 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2003 channel_state_.SetInputFilePlaying(true);
2004
2005 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002006}
2007
2008int Channel::StartPlayingFileAsMicrophone(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00002009 FileFormats format,
2010 int startPosition,
2011 float volumeScaling,
2012 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08002013 const CodecInst* codecInst) {
2014 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2015 "Channel::StartPlayingFileAsMicrophone(format=%d, "
2016 "volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
2017 format, volumeScaling, startPosition, stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00002018
kwiberg55b97fe2016-01-28 05:22:45 -08002019 if (stream == NULL) {
2020 _engineStatisticsPtr->SetLastError(
2021 VE_BAD_FILE, kTraceError,
2022 "StartPlayingFileAsMicrophone NULL as input stream");
2023 return -1;
2024 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002025
kwiberg55b97fe2016-01-28 05:22:45 -08002026 rtc::CritScope cs(&_fileCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00002027
kwiberg55b97fe2016-01-28 05:22:45 -08002028 if (channel_state_.Get().input_file_playing) {
2029 _engineStatisticsPtr->SetLastError(
2030 VE_ALREADY_PLAYING, kTraceWarning,
2031 "StartPlayingFileAsMicrophone() is playing");
niklase@google.com470e71d2011-07-07 08:21:25 +00002032 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002033 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002034
kwiberg55b97fe2016-01-28 05:22:45 -08002035 // Destroy the old instance
2036 if (_inputFilePlayerPtr) {
niklase@google.com470e71d2011-07-07 08:21:25 +00002037 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2038 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2039 _inputFilePlayerPtr = NULL;
kwiberg55b97fe2016-01-28 05:22:45 -08002040 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002041
kwiberg55b97fe2016-01-28 05:22:45 -08002042 // Create the instance
2043 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(_inputFilePlayerId,
2044 (const FileFormats)format);
2045
2046 if (_inputFilePlayerPtr == NULL) {
2047 _engineStatisticsPtr->SetLastError(
2048 VE_INVALID_ARGUMENT, kTraceError,
2049 "StartPlayingInputFile() filePlayer format isnot correct");
2050 return -1;
2051 }
2052
2053 const uint32_t notificationTime(0);
2054
2055 if (_inputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
2056 volumeScaling, notificationTime,
2057 stopPosition, codecInst) != 0) {
2058 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2059 "StartPlayingFile() failed to start "
2060 "file playout");
2061 _inputFilePlayerPtr->StopPlayingFile();
2062 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2063 _inputFilePlayerPtr = NULL;
2064 return -1;
2065 }
2066
2067 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2068 channel_state_.SetInputFilePlaying(true);
2069
2070 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002071}
2072
kwiberg55b97fe2016-01-28 05:22:45 -08002073int Channel::StopPlayingFileAsMicrophone() {
2074 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2075 "Channel::StopPlayingFileAsMicrophone()");
2076
2077 rtc::CritScope cs(&_fileCritSect);
2078
2079 if (!channel_state_.Get().input_file_playing) {
2080 return 0;
2081 }
2082
2083 if (_inputFilePlayerPtr->StopPlayingFile() != 0) {
2084 _engineStatisticsPtr->SetLastError(
2085 VE_STOP_RECORDING_FAILED, kTraceError,
2086 "StopPlayingFile() could not stop playing");
2087 return -1;
2088 }
2089 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2090 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2091 _inputFilePlayerPtr = NULL;
2092 channel_state_.SetInputFilePlaying(false);
2093
2094 return 0;
2095}
2096
2097int Channel::IsPlayingFileAsMicrophone() const {
2098 return channel_state_.Get().input_file_playing;
niklase@google.com470e71d2011-07-07 08:21:25 +00002099}
2100
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00002101int Channel::StartRecordingPlayout(const char* fileName,
kwiberg55b97fe2016-01-28 05:22:45 -08002102 const CodecInst* codecInst) {
2103 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2104 "Channel::StartRecordingPlayout(fileName=%s)", fileName);
niklase@google.com470e71d2011-07-07 08:21:25 +00002105
kwiberg55b97fe2016-01-28 05:22:45 -08002106 if (_outputFileRecording) {
2107 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
2108 "StartRecordingPlayout() is already recording");
niklase@google.com470e71d2011-07-07 08:21:25 +00002109 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002110 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002111
kwiberg55b97fe2016-01-28 05:22:45 -08002112 FileFormats format;
2113 const uint32_t notificationTime(0); // Not supported in VoE
2114 CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000};
niklase@google.com470e71d2011-07-07 08:21:25 +00002115
kwiberg55b97fe2016-01-28 05:22:45 -08002116 if ((codecInst != NULL) &&
2117 ((codecInst->channels < 1) || (codecInst->channels > 2))) {
2118 _engineStatisticsPtr->SetLastError(
2119 VE_BAD_ARGUMENT, kTraceError,
2120 "StartRecordingPlayout() invalid compression");
2121 return (-1);
2122 }
2123 if (codecInst == NULL) {
2124 format = kFileFormatPcm16kHzFile;
2125 codecInst = &dummyCodec;
2126 } else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) ||
2127 (STR_CASE_CMP(codecInst->plname, "PCMU") == 0) ||
2128 (STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) {
2129 format = kFileFormatWavFile;
2130 } else {
2131 format = kFileFormatCompressedFile;
2132 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002133
kwiberg55b97fe2016-01-28 05:22:45 -08002134 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002135
kwiberg55b97fe2016-01-28 05:22:45 -08002136 // Destroy the old instance
2137 if (_outputFileRecorderPtr) {
niklase@google.com470e71d2011-07-07 08:21:25 +00002138 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2139 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2140 _outputFileRecorderPtr = NULL;
kwiberg55b97fe2016-01-28 05:22:45 -08002141 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002142
kwiberg55b97fe2016-01-28 05:22:45 -08002143 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2144 _outputFileRecorderId, (const FileFormats)format);
2145 if (_outputFileRecorderPtr == NULL) {
2146 _engineStatisticsPtr->SetLastError(
2147 VE_INVALID_ARGUMENT, kTraceError,
2148 "StartRecordingPlayout() fileRecorder format isnot correct");
2149 return -1;
2150 }
2151
2152 if (_outputFileRecorderPtr->StartRecordingAudioFile(
2153 fileName, (const CodecInst&)*codecInst, notificationTime) != 0) {
2154 _engineStatisticsPtr->SetLastError(
2155 VE_BAD_FILE, kTraceError,
2156 "StartRecordingAudioFile() failed to start file recording");
2157 _outputFileRecorderPtr->StopRecording();
2158 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2159 _outputFileRecorderPtr = NULL;
2160 return -1;
2161 }
2162 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2163 _outputFileRecording = true;
2164
2165 return 0;
2166}
2167
2168int Channel::StartRecordingPlayout(OutStream* stream,
2169 const CodecInst* codecInst) {
2170 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2171 "Channel::StartRecordingPlayout()");
2172
2173 if (_outputFileRecording) {
2174 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
2175 "StartRecordingPlayout() is already recording");
niklase@google.com470e71d2011-07-07 08:21:25 +00002176 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002177 }
2178
2179 FileFormats format;
2180 const uint32_t notificationTime(0); // Not supported in VoE
2181 CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000};
2182
2183 if (codecInst != NULL && codecInst->channels != 1) {
2184 _engineStatisticsPtr->SetLastError(
2185 VE_BAD_ARGUMENT, kTraceError,
2186 "StartRecordingPlayout() invalid compression");
2187 return (-1);
2188 }
2189 if (codecInst == NULL) {
2190 format = kFileFormatPcm16kHzFile;
2191 codecInst = &dummyCodec;
2192 } else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) ||
2193 (STR_CASE_CMP(codecInst->plname, "PCMU") == 0) ||
2194 (STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) {
2195 format = kFileFormatWavFile;
2196 } else {
2197 format = kFileFormatCompressedFile;
2198 }
2199
2200 rtc::CritScope cs(&_fileCritSect);
2201
2202 // Destroy the old instance
2203 if (_outputFileRecorderPtr) {
2204 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2205 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2206 _outputFileRecorderPtr = NULL;
2207 }
2208
2209 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2210 _outputFileRecorderId, (const FileFormats)format);
2211 if (_outputFileRecorderPtr == NULL) {
2212 _engineStatisticsPtr->SetLastError(
2213 VE_INVALID_ARGUMENT, kTraceError,
2214 "StartRecordingPlayout() fileRecorder format isnot correct");
2215 return -1;
2216 }
2217
2218 if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream, *codecInst,
2219 notificationTime) != 0) {
2220 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2221 "StartRecordingPlayout() failed to "
2222 "start file recording");
2223 _outputFileRecorderPtr->StopRecording();
2224 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2225 _outputFileRecorderPtr = NULL;
2226 return -1;
2227 }
2228
2229 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2230 _outputFileRecording = true;
2231
2232 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002233}
2234
kwiberg55b97fe2016-01-28 05:22:45 -08002235int Channel::StopRecordingPlayout() {
2236 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
2237 "Channel::StopRecordingPlayout()");
2238
2239 if (!_outputFileRecording) {
2240 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
2241 "StopRecordingPlayout() isnot recording");
2242 return -1;
2243 }
2244
2245 rtc::CritScope cs(&_fileCritSect);
2246
2247 if (_outputFileRecorderPtr->StopRecording() != 0) {
2248 _engineStatisticsPtr->SetLastError(
2249 VE_STOP_RECORDING_FAILED, kTraceError,
2250 "StopRecording() could not stop recording");
2251 return (-1);
2252 }
2253 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2254 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2255 _outputFileRecorderPtr = NULL;
2256 _outputFileRecording = false;
2257
2258 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002259}
2260
kwiberg55b97fe2016-01-28 05:22:45 -08002261void Channel::SetMixWithMicStatus(bool mix) {
2262 rtc::CritScope cs(&_fileCritSect);
2263 _mixFileWithMicrophone = mix;
niklase@google.com470e71d2011-07-07 08:21:25 +00002264}
2265
kwiberg55b97fe2016-01-28 05:22:45 -08002266int Channel::GetSpeechOutputLevel(uint32_t& level) const {
2267 int8_t currentLevel = _outputAudioLevel.Level();
2268 level = static_cast<int32_t>(currentLevel);
2269 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002270}
2271
kwiberg55b97fe2016-01-28 05:22:45 -08002272int Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const {
2273 int16_t currentLevel = _outputAudioLevel.LevelFullRange();
2274 level = static_cast<int32_t>(currentLevel);
2275 return 0;
2276}
2277
solenberg1c2af8e2016-03-24 10:36:00 -07002278int Channel::SetInputMute(bool enable) {
kwiberg55b97fe2016-01-28 05:22:45 -08002279 rtc::CritScope cs(&volume_settings_critsect_);
2280 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002281 "Channel::SetMute(enable=%d)", enable);
solenberg1c2af8e2016-03-24 10:36:00 -07002282 input_mute_ = enable;
kwiberg55b97fe2016-01-28 05:22:45 -08002283 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002284}
2285
solenberg1c2af8e2016-03-24 10:36:00 -07002286bool Channel::InputMute() const {
kwiberg55b97fe2016-01-28 05:22:45 -08002287 rtc::CritScope cs(&volume_settings_critsect_);
solenberg1c2af8e2016-03-24 10:36:00 -07002288 return input_mute_;
niklase@google.com470e71d2011-07-07 08:21:25 +00002289}
2290
kwiberg55b97fe2016-01-28 05:22:45 -08002291int Channel::SetOutputVolumePan(float left, float right) {
2292 rtc::CritScope cs(&volume_settings_critsect_);
2293 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002294 "Channel::SetOutputVolumePan()");
kwiberg55b97fe2016-01-28 05:22:45 -08002295 _panLeft = left;
2296 _panRight = right;
2297 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002298}
2299
kwiberg55b97fe2016-01-28 05:22:45 -08002300int Channel::GetOutputVolumePan(float& left, float& right) const {
2301 rtc::CritScope cs(&volume_settings_critsect_);
2302 left = _panLeft;
2303 right = _panRight;
2304 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002305}
2306
kwiberg55b97fe2016-01-28 05:22:45 -08002307int Channel::SetChannelOutputVolumeScaling(float scaling) {
2308 rtc::CritScope cs(&volume_settings_critsect_);
2309 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002310 "Channel::SetChannelOutputVolumeScaling()");
kwiberg55b97fe2016-01-28 05:22:45 -08002311 _outputGain = scaling;
2312 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002313}
2314
kwiberg55b97fe2016-01-28 05:22:45 -08002315int Channel::GetChannelOutputVolumeScaling(float& scaling) const {
2316 rtc::CritScope cs(&volume_settings_critsect_);
2317 scaling = _outputGain;
2318 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002319}
2320
solenberg8842c3e2016-03-11 03:06:41 -08002321int Channel::SendTelephoneEventOutband(int event, int duration_ms) {
kwiberg55b97fe2016-01-28 05:22:45 -08002322 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
solenberg8842c3e2016-03-11 03:06:41 -08002323 "Channel::SendTelephoneEventOutband(...)");
2324 RTC_DCHECK_LE(0, event);
2325 RTC_DCHECK_GE(255, event);
2326 RTC_DCHECK_LE(0, duration_ms);
2327 RTC_DCHECK_GE(65535, duration_ms);
kwiberg55b97fe2016-01-28 05:22:45 -08002328 if (!Sending()) {
2329 return -1;
2330 }
solenberg8842c3e2016-03-11 03:06:41 -08002331 if (_rtpRtcpModule->SendTelephoneEventOutband(
2332 event, duration_ms, kTelephoneEventAttenuationdB) != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08002333 _engineStatisticsPtr->SetLastError(
2334 VE_SEND_DTMF_FAILED, kTraceWarning,
2335 "SendTelephoneEventOutband() failed to send event");
2336 return -1;
2337 }
2338 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002339}
2340
solenberg31642aa2016-03-14 08:00:37 -07002341int Channel::SetSendTelephoneEventPayloadType(int payload_type) {
kwiberg55b97fe2016-01-28 05:22:45 -08002342 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002343 "Channel::SetSendTelephoneEventPayloadType()");
solenberg31642aa2016-03-14 08:00:37 -07002344 RTC_DCHECK_LE(0, payload_type);
2345 RTC_DCHECK_GE(127, payload_type);
2346 CodecInst codec = {0};
kwiberg55b97fe2016-01-28 05:22:45 -08002347 codec.plfreq = 8000;
solenberg31642aa2016-03-14 08:00:37 -07002348 codec.pltype = payload_type;
kwiberg55b97fe2016-01-28 05:22:45 -08002349 memcpy(codec.plname, "telephone-event", 16);
2350 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
2351 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
2352 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
2353 _engineStatisticsPtr->SetLastError(
2354 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2355 "SetSendTelephoneEventPayloadType() failed to register send"
2356 "payload type");
2357 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002358 }
kwiberg55b97fe2016-01-28 05:22:45 -08002359 }
kwiberg55b97fe2016-01-28 05:22:45 -08002360 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002361}
2362
kwiberg55b97fe2016-01-28 05:22:45 -08002363int Channel::UpdateRxVadDetection(AudioFrame& audioFrame) {
2364 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2365 "Channel::UpdateRxVadDetection()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002366
kwiberg55b97fe2016-01-28 05:22:45 -08002367 int vadDecision = 1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002368
kwiberg55b97fe2016-01-28 05:22:45 -08002369 vadDecision = (audioFrame.vad_activity_ == AudioFrame::kVadActive) ? 1 : 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002370
kwiberg55b97fe2016-01-28 05:22:45 -08002371 if ((vadDecision != _oldVadDecision) && _rxVadObserverPtr) {
2372 OnRxVadDetected(vadDecision);
2373 _oldVadDecision = vadDecision;
2374 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002375
kwiberg55b97fe2016-01-28 05:22:45 -08002376 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2377 "Channel::UpdateRxVadDetection() => vadDecision=%d",
2378 vadDecision);
2379 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002380}
2381
kwiberg55b97fe2016-01-28 05:22:45 -08002382int Channel::RegisterRxVadObserver(VoERxVadCallback& observer) {
2383 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2384 "Channel::RegisterRxVadObserver()");
2385 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002386
kwiberg55b97fe2016-01-28 05:22:45 -08002387 if (_rxVadObserverPtr) {
2388 _engineStatisticsPtr->SetLastError(
2389 VE_INVALID_OPERATION, kTraceError,
2390 "RegisterRxVadObserver() observer already enabled");
2391 return -1;
2392 }
2393 _rxVadObserverPtr = &observer;
2394 _RxVadDetection = true;
2395 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002396}
2397
kwiberg55b97fe2016-01-28 05:22:45 -08002398int Channel::DeRegisterRxVadObserver() {
2399 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2400 "Channel::DeRegisterRxVadObserver()");
2401 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002402
kwiberg55b97fe2016-01-28 05:22:45 -08002403 if (!_rxVadObserverPtr) {
2404 _engineStatisticsPtr->SetLastError(
2405 VE_INVALID_OPERATION, kTraceWarning,
2406 "DeRegisterRxVadObserver() observer already disabled");
niklase@google.com470e71d2011-07-07 08:21:25 +00002407 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002408 }
2409 _rxVadObserverPtr = NULL;
2410 _RxVadDetection = false;
2411 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002412}
2413
kwiberg55b97fe2016-01-28 05:22:45 -08002414int Channel::VoiceActivityIndicator(int& activity) {
2415 activity = _sendFrameType;
2416 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002417}
2418
2419#ifdef WEBRTC_VOICE_ENGINE_AGC
2420
kwiberg55b97fe2016-01-28 05:22:45 -08002421int Channel::SetRxAgcStatus(bool enable, AgcModes mode) {
2422 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2423 "Channel::SetRxAgcStatus(enable=%d, mode=%d)", (int)enable,
2424 (int)mode);
niklase@google.com470e71d2011-07-07 08:21:25 +00002425
kwiberg55b97fe2016-01-28 05:22:45 -08002426 GainControl::Mode agcMode = kDefaultRxAgcMode;
2427 switch (mode) {
2428 case kAgcDefault:
2429 break;
2430 case kAgcUnchanged:
2431 agcMode = rx_audioproc_->gain_control()->mode();
2432 break;
2433 case kAgcFixedDigital:
2434 agcMode = GainControl::kFixedDigital;
2435 break;
2436 case kAgcAdaptiveDigital:
2437 agcMode = GainControl::kAdaptiveDigital;
2438 break;
2439 default:
2440 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
2441 "SetRxAgcStatus() invalid Agc mode");
2442 return -1;
2443 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002444
kwiberg55b97fe2016-01-28 05:22:45 -08002445 if (rx_audioproc_->gain_control()->set_mode(agcMode) != 0) {
2446 _engineStatisticsPtr->SetLastError(
2447 VE_APM_ERROR, kTraceError, "SetRxAgcStatus() failed to set Agc mode");
2448 return -1;
2449 }
2450 if (rx_audioproc_->gain_control()->Enable(enable) != 0) {
2451 _engineStatisticsPtr->SetLastError(
2452 VE_APM_ERROR, kTraceError, "SetRxAgcStatus() failed to set Agc state");
2453 return -1;
2454 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002455
kwiberg55b97fe2016-01-28 05:22:45 -08002456 _rxAgcIsEnabled = enable;
2457 channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
niklase@google.com470e71d2011-07-07 08:21:25 +00002458
kwiberg55b97fe2016-01-28 05:22:45 -08002459 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002460}
2461
kwiberg55b97fe2016-01-28 05:22:45 -08002462int Channel::GetRxAgcStatus(bool& enabled, AgcModes& mode) {
2463 bool enable = rx_audioproc_->gain_control()->is_enabled();
2464 GainControl::Mode agcMode = rx_audioproc_->gain_control()->mode();
niklase@google.com470e71d2011-07-07 08:21:25 +00002465
kwiberg55b97fe2016-01-28 05:22:45 -08002466 enabled = enable;
niklase@google.com470e71d2011-07-07 08:21:25 +00002467
kwiberg55b97fe2016-01-28 05:22:45 -08002468 switch (agcMode) {
2469 case GainControl::kFixedDigital:
2470 mode = kAgcFixedDigital;
2471 break;
2472 case GainControl::kAdaptiveDigital:
2473 mode = kAgcAdaptiveDigital;
2474 break;
2475 default:
2476 _engineStatisticsPtr->SetLastError(VE_APM_ERROR, kTraceError,
2477 "GetRxAgcStatus() invalid Agc mode");
2478 return -1;
2479 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002480
kwiberg55b97fe2016-01-28 05:22:45 -08002481 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002482}
2483
kwiberg55b97fe2016-01-28 05:22:45 -08002484int Channel::SetRxAgcConfig(AgcConfig config) {
2485 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2486 "Channel::SetRxAgcConfig()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002487
kwiberg55b97fe2016-01-28 05:22:45 -08002488 if (rx_audioproc_->gain_control()->set_target_level_dbfs(
2489 config.targetLeveldBOv) != 0) {
2490 _engineStatisticsPtr->SetLastError(
2491 VE_APM_ERROR, kTraceError,
2492 "SetRxAgcConfig() failed to set target peak |level|"
2493 "(or envelope) of the Agc");
2494 return -1;
2495 }
2496 if (rx_audioproc_->gain_control()->set_compression_gain_db(
2497 config.digitalCompressionGaindB) != 0) {
2498 _engineStatisticsPtr->SetLastError(
2499 VE_APM_ERROR, kTraceError,
2500 "SetRxAgcConfig() failed to set the range in |gain| the"
2501 " digital compression stage may apply");
2502 return -1;
2503 }
2504 if (rx_audioproc_->gain_control()->enable_limiter(config.limiterEnable) !=
2505 0) {
2506 _engineStatisticsPtr->SetLastError(
2507 VE_APM_ERROR, kTraceError,
2508 "SetRxAgcConfig() failed to set hard limiter to the signal");
2509 return -1;
2510 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002511
kwiberg55b97fe2016-01-28 05:22:45 -08002512 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002513}
2514
kwiberg55b97fe2016-01-28 05:22:45 -08002515int Channel::GetRxAgcConfig(AgcConfig& config) {
2516 config.targetLeveldBOv = rx_audioproc_->gain_control()->target_level_dbfs();
2517 config.digitalCompressionGaindB =
2518 rx_audioproc_->gain_control()->compression_gain_db();
2519 config.limiterEnable = rx_audioproc_->gain_control()->is_limiter_enabled();
niklase@google.com470e71d2011-07-07 08:21:25 +00002520
kwiberg55b97fe2016-01-28 05:22:45 -08002521 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002522}
2523
kwiberg55b97fe2016-01-28 05:22:45 -08002524#endif // #ifdef WEBRTC_VOICE_ENGINE_AGC
niklase@google.com470e71d2011-07-07 08:21:25 +00002525
2526#ifdef WEBRTC_VOICE_ENGINE_NR
2527
kwiberg55b97fe2016-01-28 05:22:45 -08002528int Channel::SetRxNsStatus(bool enable, NsModes mode) {
2529 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2530 "Channel::SetRxNsStatus(enable=%d, mode=%d)", (int)enable,
2531 (int)mode);
niklase@google.com470e71d2011-07-07 08:21:25 +00002532
kwiberg55b97fe2016-01-28 05:22:45 -08002533 NoiseSuppression::Level nsLevel = kDefaultNsMode;
2534 switch (mode) {
2535 case kNsDefault:
2536 break;
2537 case kNsUnchanged:
2538 nsLevel = rx_audioproc_->noise_suppression()->level();
2539 break;
2540 case kNsConference:
2541 nsLevel = NoiseSuppression::kHigh;
2542 break;
2543 case kNsLowSuppression:
2544 nsLevel = NoiseSuppression::kLow;
2545 break;
2546 case kNsModerateSuppression:
2547 nsLevel = NoiseSuppression::kModerate;
2548 break;
2549 case kNsHighSuppression:
2550 nsLevel = NoiseSuppression::kHigh;
2551 break;
2552 case kNsVeryHighSuppression:
2553 nsLevel = NoiseSuppression::kVeryHigh;
2554 break;
2555 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002556
kwiberg55b97fe2016-01-28 05:22:45 -08002557 if (rx_audioproc_->noise_suppression()->set_level(nsLevel) != 0) {
2558 _engineStatisticsPtr->SetLastError(
2559 VE_APM_ERROR, kTraceError, "SetRxNsStatus() failed to set NS level");
2560 return -1;
2561 }
2562 if (rx_audioproc_->noise_suppression()->Enable(enable) != 0) {
2563 _engineStatisticsPtr->SetLastError(
2564 VE_APM_ERROR, kTraceError, "SetRxNsStatus() failed to set NS state");
2565 return -1;
2566 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002567
kwiberg55b97fe2016-01-28 05:22:45 -08002568 _rxNsIsEnabled = enable;
2569 channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
niklase@google.com470e71d2011-07-07 08:21:25 +00002570
kwiberg55b97fe2016-01-28 05:22:45 -08002571 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002572}
2573
kwiberg55b97fe2016-01-28 05:22:45 -08002574int Channel::GetRxNsStatus(bool& enabled, NsModes& mode) {
2575 bool enable = rx_audioproc_->noise_suppression()->is_enabled();
2576 NoiseSuppression::Level ncLevel = rx_audioproc_->noise_suppression()->level();
niklase@google.com470e71d2011-07-07 08:21:25 +00002577
kwiberg55b97fe2016-01-28 05:22:45 -08002578 enabled = enable;
niklase@google.com470e71d2011-07-07 08:21:25 +00002579
kwiberg55b97fe2016-01-28 05:22:45 -08002580 switch (ncLevel) {
2581 case NoiseSuppression::kLow:
2582 mode = kNsLowSuppression;
2583 break;
2584 case NoiseSuppression::kModerate:
2585 mode = kNsModerateSuppression;
2586 break;
2587 case NoiseSuppression::kHigh:
2588 mode = kNsHighSuppression;
2589 break;
2590 case NoiseSuppression::kVeryHigh:
2591 mode = kNsVeryHighSuppression;
2592 break;
2593 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002594
kwiberg55b97fe2016-01-28 05:22:45 -08002595 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002596}
2597
kwiberg55b97fe2016-01-28 05:22:45 -08002598#endif // #ifdef WEBRTC_VOICE_ENGINE_NR
niklase@google.com470e71d2011-07-07 08:21:25 +00002599
kwiberg55b97fe2016-01-28 05:22:45 -08002600int Channel::SetLocalSSRC(unsigned int ssrc) {
2601 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2602 "Channel::SetLocalSSRC()");
2603 if (channel_state_.Get().sending) {
2604 _engineStatisticsPtr->SetLastError(VE_ALREADY_SENDING, kTraceError,
2605 "SetLocalSSRC() already sending");
2606 return -1;
2607 }
2608 _rtpRtcpModule->SetSSRC(ssrc);
2609 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002610}
2611
kwiberg55b97fe2016-01-28 05:22:45 -08002612int Channel::GetLocalSSRC(unsigned int& ssrc) {
2613 ssrc = _rtpRtcpModule->SSRC();
2614 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002615}
2616
kwiberg55b97fe2016-01-28 05:22:45 -08002617int Channel::GetRemoteSSRC(unsigned int& ssrc) {
2618 ssrc = rtp_receiver_->SSRC();
2619 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002620}
2621
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002622int Channel::SetSendAudioLevelIndicationStatus(bool enable, unsigned char id) {
andrew@webrtc.orgf3930e92013-09-18 22:37:32 +00002623 _includeAudioLevelIndication = enable;
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002624 return SetSendRtpHeaderExtension(enable, kRtpExtensionAudioLevel, id);
niklase@google.com470e71d2011-07-07 08:21:25 +00002625}
andrew@webrtc.orgf3930e92013-09-18 22:37:32 +00002626
wu@webrtc.org93fd25c2014-04-24 20:33:08 +00002627int Channel::SetReceiveAudioLevelIndicationStatus(bool enable,
2628 unsigned char id) {
kwiberg55b97fe2016-01-28 05:22:45 -08002629 rtp_header_parser_->DeregisterRtpHeaderExtension(kRtpExtensionAudioLevel);
2630 if (enable &&
2631 !rtp_header_parser_->RegisterRtpHeaderExtension(kRtpExtensionAudioLevel,
2632 id)) {
wu@webrtc.org93fd25c2014-04-24 20:33:08 +00002633 return -1;
2634 }
2635 return 0;
2636}
2637
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002638int Channel::SetSendAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
2639 return SetSendRtpHeaderExtension(enable, kRtpExtensionAbsoluteSendTime, id);
2640}
2641
2642int Channel::SetReceiveAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
2643 rtp_header_parser_->DeregisterRtpHeaderExtension(
2644 kRtpExtensionAbsoluteSendTime);
kwiberg55b97fe2016-01-28 05:22:45 -08002645 if (enable &&
2646 !rtp_header_parser_->RegisterRtpHeaderExtension(
2647 kRtpExtensionAbsoluteSendTime, id)) {
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00002648 return -1;
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002649 }
2650 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002651}
2652
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002653void Channel::EnableSendTransportSequenceNumber(int id) {
2654 int ret =
2655 SetSendRtpHeaderExtension(true, kRtpExtensionTransportSequenceNumber, id);
2656 RTC_DCHECK_EQ(0, ret);
2657}
2658
stefan3313ec92016-01-21 06:32:43 -08002659void Channel::EnableReceiveTransportSequenceNumber(int id) {
2660 rtp_header_parser_->DeregisterRtpHeaderExtension(
2661 kRtpExtensionTransportSequenceNumber);
2662 bool ret = rtp_header_parser_->RegisterRtpHeaderExtension(
2663 kRtpExtensionTransportSequenceNumber, id);
2664 RTC_DCHECK(ret);
2665}
2666
stefanbba9dec2016-02-01 04:39:55 -08002667void Channel::RegisterSenderCongestionControlObjects(
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002668 RtpPacketSender* rtp_packet_sender,
2669 TransportFeedbackObserver* transport_feedback_observer,
2670 PacketRouter* packet_router) {
stefanbba9dec2016-02-01 04:39:55 -08002671 RTC_DCHECK(rtp_packet_sender);
2672 RTC_DCHECK(transport_feedback_observer);
2673 RTC_DCHECK(packet_router && !packet_router_);
2674 feedback_observer_proxy_->SetTransportFeedbackObserver(
2675 transport_feedback_observer);
2676 seq_num_allocator_proxy_->SetSequenceNumberAllocator(packet_router);
2677 rtp_packet_sender_proxy_->SetPacketSender(rtp_packet_sender);
2678 _rtpRtcpModule->SetStorePacketsStatus(true, 600);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002679 packet_router->AddRtpModule(_rtpRtcpModule.get());
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002680 packet_router_ = packet_router;
2681}
2682
stefanbba9dec2016-02-01 04:39:55 -08002683void Channel::RegisterReceiverCongestionControlObjects(
2684 PacketRouter* packet_router) {
2685 RTC_DCHECK(packet_router && !packet_router_);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002686 packet_router->AddRtpModule(_rtpRtcpModule.get());
stefanbba9dec2016-02-01 04:39:55 -08002687 packet_router_ = packet_router;
2688}
2689
2690void Channel::ResetCongestionControlObjects() {
2691 RTC_DCHECK(packet_router_);
2692 _rtpRtcpModule->SetStorePacketsStatus(false, 600);
2693 feedback_observer_proxy_->SetTransportFeedbackObserver(nullptr);
2694 seq_num_allocator_proxy_->SetSequenceNumberAllocator(nullptr);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002695 packet_router_->RemoveRtpModule(_rtpRtcpModule.get());
stefanbba9dec2016-02-01 04:39:55 -08002696 packet_router_ = nullptr;
2697 rtp_packet_sender_proxy_->SetPacketSender(nullptr);
2698}
2699
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00002700void Channel::SetRTCPStatus(bool enable) {
2701 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2702 "Channel::SetRTCPStatus()");
pbosda903ea2015-10-02 02:36:56 -07002703 _rtpRtcpModule->SetRTCPStatus(enable ? RtcpMode::kCompound : RtcpMode::kOff);
niklase@google.com470e71d2011-07-07 08:21:25 +00002704}
2705
kwiberg55b97fe2016-01-28 05:22:45 -08002706int Channel::GetRTCPStatus(bool& enabled) {
pbosda903ea2015-10-02 02:36:56 -07002707 RtcpMode method = _rtpRtcpModule->RTCP();
2708 enabled = (method != RtcpMode::kOff);
kwiberg55b97fe2016-01-28 05:22:45 -08002709 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002710}
2711
kwiberg55b97fe2016-01-28 05:22:45 -08002712int Channel::SetRTCP_CNAME(const char cName[256]) {
2713 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2714 "Channel::SetRTCP_CNAME()");
2715 if (_rtpRtcpModule->SetCNAME(cName) != 0) {
2716 _engineStatisticsPtr->SetLastError(
2717 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2718 "SetRTCP_CNAME() failed to set RTCP CNAME");
2719 return -1;
2720 }
2721 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002722}
2723
kwiberg55b97fe2016-01-28 05:22:45 -08002724int Channel::GetRemoteRTCP_CNAME(char cName[256]) {
2725 if (cName == NULL) {
2726 _engineStatisticsPtr->SetLastError(
2727 VE_INVALID_ARGUMENT, kTraceError,
2728 "GetRemoteRTCP_CNAME() invalid CNAME input buffer");
2729 return -1;
2730 }
2731 char cname[RTCP_CNAME_SIZE];
2732 const uint32_t remoteSSRC = rtp_receiver_->SSRC();
2733 if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0) {
2734 _engineStatisticsPtr->SetLastError(
2735 VE_CANNOT_RETRIEVE_CNAME, kTraceError,
2736 "GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME");
2737 return -1;
2738 }
2739 strcpy(cName, cname);
2740 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002741}
2742
kwiberg55b97fe2016-01-28 05:22:45 -08002743int Channel::GetRemoteRTCPData(unsigned int& NTPHigh,
2744 unsigned int& NTPLow,
2745 unsigned int& timestamp,
2746 unsigned int& playoutTimestamp,
2747 unsigned int* jitter,
2748 unsigned short* fractionLost) {
2749 // --- Information from sender info in received Sender Reports
niklase@google.com470e71d2011-07-07 08:21:25 +00002750
kwiberg55b97fe2016-01-28 05:22:45 -08002751 RTCPSenderInfo senderInfo;
2752 if (_rtpRtcpModule->RemoteRTCPStat(&senderInfo) != 0) {
2753 _engineStatisticsPtr->SetLastError(
2754 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2755 "GetRemoteRTCPData() failed to retrieve sender info for remote "
2756 "side");
2757 return -1;
2758 }
2759
2760 // We only utilize 12 out of 20 bytes in the sender info (ignores packet
2761 // and octet count)
2762 NTPHigh = senderInfo.NTPseconds;
2763 NTPLow = senderInfo.NTPfraction;
2764 timestamp = senderInfo.RTPtimeStamp;
2765
2766 // --- Locally derived information
2767
2768 // This value is updated on each incoming RTCP packet (0 when no packet
2769 // has been received)
2770 playoutTimestamp = playout_timestamp_rtcp_;
2771
2772 if (NULL != jitter || NULL != fractionLost) {
2773 // Get all RTCP receiver report blocks that have been received on this
2774 // channel. If we receive RTP packets from a remote source we know the
2775 // remote SSRC and use the report block from him.
2776 // Otherwise use the first report block.
2777 std::vector<RTCPReportBlock> remote_stats;
2778 if (_rtpRtcpModule->RemoteRTCPStat(&remote_stats) != 0 ||
2779 remote_stats.empty()) {
2780 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2781 "GetRemoteRTCPData() failed to measure statistics due"
2782 " to lack of received RTP and/or RTCP packets");
2783 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002784 }
2785
kwiberg55b97fe2016-01-28 05:22:45 -08002786 uint32_t remoteSSRC = rtp_receiver_->SSRC();
2787 std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
2788 for (; it != remote_stats.end(); ++it) {
2789 if (it->remoteSSRC == remoteSSRC)
2790 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00002791 }
kwiberg55b97fe2016-01-28 05:22:45 -08002792
2793 if (it == remote_stats.end()) {
2794 // If we have not received any RTCP packets from this SSRC it probably
2795 // means that we have not received any RTP packets.
2796 // Use the first received report block instead.
2797 it = remote_stats.begin();
2798 remoteSSRC = it->remoteSSRC;
2799 }
2800
2801 if (jitter) {
2802 *jitter = it->jitter;
2803 }
2804
2805 if (fractionLost) {
2806 *fractionLost = it->fractionLost;
2807 }
2808 }
2809 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002810}
2811
kwiberg55b97fe2016-01-28 05:22:45 -08002812int Channel::SendApplicationDefinedRTCPPacket(
2813 unsigned char subType,
2814 unsigned int name,
2815 const char* data,
2816 unsigned short dataLengthInBytes) {
2817 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2818 "Channel::SendApplicationDefinedRTCPPacket()");
2819 if (!channel_state_.Get().sending) {
2820 _engineStatisticsPtr->SetLastError(
2821 VE_NOT_SENDING, kTraceError,
2822 "SendApplicationDefinedRTCPPacket() not sending");
2823 return -1;
2824 }
2825 if (NULL == data) {
2826 _engineStatisticsPtr->SetLastError(
2827 VE_INVALID_ARGUMENT, kTraceError,
2828 "SendApplicationDefinedRTCPPacket() invalid data value");
2829 return -1;
2830 }
2831 if (dataLengthInBytes % 4 != 0) {
2832 _engineStatisticsPtr->SetLastError(
2833 VE_INVALID_ARGUMENT, kTraceError,
2834 "SendApplicationDefinedRTCPPacket() invalid length value");
2835 return -1;
2836 }
2837 RtcpMode status = _rtpRtcpModule->RTCP();
2838 if (status == RtcpMode::kOff) {
2839 _engineStatisticsPtr->SetLastError(
2840 VE_RTCP_ERROR, kTraceError,
2841 "SendApplicationDefinedRTCPPacket() RTCP is disabled");
2842 return -1;
2843 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002844
kwiberg55b97fe2016-01-28 05:22:45 -08002845 // Create and schedule the RTCP APP packet for transmission
2846 if (_rtpRtcpModule->SetRTCPApplicationSpecificData(
2847 subType, name, (const unsigned char*)data, dataLengthInBytes) != 0) {
2848 _engineStatisticsPtr->SetLastError(
2849 VE_SEND_ERROR, kTraceError,
2850 "SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
2851 return -1;
2852 }
2853 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002854}
2855
kwiberg55b97fe2016-01-28 05:22:45 -08002856int Channel::GetRTPStatistics(unsigned int& averageJitterMs,
2857 unsigned int& maxJitterMs,
2858 unsigned int& discardedPackets) {
2859 // The jitter statistics is updated for each received RTP packet and is
2860 // based on received packets.
2861 if (_rtpRtcpModule->RTCP() == RtcpMode::kOff) {
2862 // If RTCP is off, there is no timed thread in the RTCP module regularly
2863 // generating new stats, trigger the update manually here instead.
2864 StreamStatistician* statistician =
2865 rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
2866 if (statistician) {
2867 // Don't use returned statistics, use data from proxy instead so that
2868 // max jitter can be fetched atomically.
2869 RtcpStatistics s;
2870 statistician->GetStatistics(&s, true);
niklase@google.com470e71d2011-07-07 08:21:25 +00002871 }
kwiberg55b97fe2016-01-28 05:22:45 -08002872 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002873
kwiberg55b97fe2016-01-28 05:22:45 -08002874 ChannelStatistics stats = statistics_proxy_->GetStats();
2875 const int32_t playoutFrequency = audio_coding_->PlayoutFrequency();
2876 if (playoutFrequency > 0) {
2877 // Scale RTP statistics given the current playout frequency
2878 maxJitterMs = stats.max_jitter / (playoutFrequency / 1000);
2879 averageJitterMs = stats.rtcp.jitter / (playoutFrequency / 1000);
2880 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002881
kwiberg55b97fe2016-01-28 05:22:45 -08002882 discardedPackets = _numberOfDiscardedPackets;
niklase@google.com470e71d2011-07-07 08:21:25 +00002883
kwiberg55b97fe2016-01-28 05:22:45 -08002884 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002885}
2886
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002887int Channel::GetRemoteRTCPReportBlocks(
2888 std::vector<ReportBlock>* report_blocks) {
2889 if (report_blocks == NULL) {
kwiberg55b97fe2016-01-28 05:22:45 -08002890 _engineStatisticsPtr->SetLastError(
2891 VE_INVALID_ARGUMENT, kTraceError,
2892 "GetRemoteRTCPReportBlock()s invalid report_blocks.");
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002893 return -1;
2894 }
2895
2896 // Get the report blocks from the latest received RTCP Sender or Receiver
2897 // Report. Each element in the vector contains the sender's SSRC and a
2898 // report block according to RFC 3550.
2899 std::vector<RTCPReportBlock> rtcp_report_blocks;
2900 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks) != 0) {
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002901 return -1;
2902 }
2903
2904 if (rtcp_report_blocks.empty())
2905 return 0;
2906
2907 std::vector<RTCPReportBlock>::const_iterator it = rtcp_report_blocks.begin();
2908 for (; it != rtcp_report_blocks.end(); ++it) {
2909 ReportBlock report_block;
2910 report_block.sender_SSRC = it->remoteSSRC;
2911 report_block.source_SSRC = it->sourceSSRC;
2912 report_block.fraction_lost = it->fractionLost;
2913 report_block.cumulative_num_packets_lost = it->cumulativeLost;
2914 report_block.extended_highest_sequence_number = it->extendedHighSeqNum;
2915 report_block.interarrival_jitter = it->jitter;
2916 report_block.last_SR_timestamp = it->lastSR;
2917 report_block.delay_since_last_SR = it->delaySinceLastSR;
2918 report_blocks->push_back(report_block);
2919 }
2920 return 0;
2921}
2922
kwiberg55b97fe2016-01-28 05:22:45 -08002923int Channel::GetRTPStatistics(CallStatistics& stats) {
2924 // --- RtcpStatistics
niklase@google.com470e71d2011-07-07 08:21:25 +00002925
kwiberg55b97fe2016-01-28 05:22:45 -08002926 // The jitter statistics is updated for each received RTP packet and is
2927 // based on received packets.
2928 RtcpStatistics statistics;
2929 StreamStatistician* statistician =
2930 rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
Peter Boström59013bc2016-02-12 11:35:08 +01002931 if (statistician) {
2932 statistician->GetStatistics(&statistics,
2933 _rtpRtcpModule->RTCP() == RtcpMode::kOff);
kwiberg55b97fe2016-01-28 05:22:45 -08002934 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002935
kwiberg55b97fe2016-01-28 05:22:45 -08002936 stats.fractionLost = statistics.fraction_lost;
2937 stats.cumulativeLost = statistics.cumulative_lost;
2938 stats.extendedMax = statistics.extended_max_sequence_number;
2939 stats.jitterSamples = statistics.jitter;
niklase@google.com470e71d2011-07-07 08:21:25 +00002940
kwiberg55b97fe2016-01-28 05:22:45 -08002941 // --- RTT
2942 stats.rttMs = GetRTT(true);
niklase@google.com470e71d2011-07-07 08:21:25 +00002943
kwiberg55b97fe2016-01-28 05:22:45 -08002944 // --- Data counters
niklase@google.com470e71d2011-07-07 08:21:25 +00002945
kwiberg55b97fe2016-01-28 05:22:45 -08002946 size_t bytesSent(0);
2947 uint32_t packetsSent(0);
2948 size_t bytesReceived(0);
2949 uint32_t packetsReceived(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00002950
kwiberg55b97fe2016-01-28 05:22:45 -08002951 if (statistician) {
2952 statistician->GetDataCounters(&bytesReceived, &packetsReceived);
2953 }
wu@webrtc.org822fbd82013-08-15 23:38:54 +00002954
kwiberg55b97fe2016-01-28 05:22:45 -08002955 if (_rtpRtcpModule->DataCountersRTP(&bytesSent, &packetsSent) != 0) {
2956 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2957 "GetRTPStatistics() failed to retrieve RTP datacounters =>"
2958 " output will not be complete");
2959 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002960
kwiberg55b97fe2016-01-28 05:22:45 -08002961 stats.bytesSent = bytesSent;
2962 stats.packetsSent = packetsSent;
2963 stats.bytesReceived = bytesReceived;
2964 stats.packetsReceived = packetsReceived;
niklase@google.com470e71d2011-07-07 08:21:25 +00002965
kwiberg55b97fe2016-01-28 05:22:45 -08002966 // --- Timestamps
2967 {
2968 rtc::CritScope lock(&ts_stats_lock_);
2969 stats.capture_start_ntp_time_ms_ = capture_start_ntp_time_ms_;
2970 }
2971 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002972}
2973
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002974int Channel::SetCodecFECStatus(bool enable) {
2975 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2976 "Channel::SetCodecFECStatus()");
2977
kwibergc8d071e2016-04-06 12:22:38 -07002978 if (!codec_manager_.SetCodecFEC(enable) ||
2979 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002980 _engineStatisticsPtr->SetLastError(
2981 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2982 "SetCodecFECStatus() failed to set FEC state");
2983 return -1;
2984 }
2985 return 0;
2986}
2987
2988bool Channel::GetCodecFECStatus() {
kwibergc8d071e2016-04-06 12:22:38 -07002989 return codec_manager_.GetStackParams()->use_codec_fec;
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002990}
2991
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002992void Channel::SetNACKStatus(bool enable, int maxNumberOfPackets) {
2993 // None of these functions can fail.
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002994 // If pacing is enabled we always store packets.
2995 if (!pacing_enabled_)
2996 _rtpRtcpModule->SetStorePacketsStatus(enable, maxNumberOfPackets);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00002997 rtp_receive_statistics_->SetMaxReorderingThreshold(maxNumberOfPackets);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002998 if (enable)
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00002999 audio_coding_->EnableNack(maxNumberOfPackets);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00003000 else
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00003001 audio_coding_->DisableNack();
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00003002}
3003
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00003004// Called when we are missing one or more packets.
3005int Channel::ResendPackets(const uint16_t* sequence_numbers, int length) {
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00003006 return _rtpRtcpModule->SendNACK(sequence_numbers, length);
3007}
3008
kwiberg55b97fe2016-01-28 05:22:45 -08003009uint32_t Channel::Demultiplex(const AudioFrame& audioFrame) {
3010 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
3011 "Channel::Demultiplex()");
3012 _audioFrame.CopyFrom(audioFrame);
3013 _audioFrame.id_ = _channelId;
3014 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003015}
3016
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00003017void Channel::Demultiplex(const int16_t* audio_data,
xians@webrtc.org8fff1f02013-07-31 16:27:42 +00003018 int sample_rate,
Peter Kastingdce40cf2015-08-24 14:52:23 -07003019 size_t number_of_frames,
Peter Kasting69558702016-01-12 16:26:35 -08003020 size_t number_of_channels) {
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00003021 CodecInst codec;
3022 GetSendCodec(codec);
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00003023
Alejandro Luebscdfe20b2015-09-23 12:49:12 -07003024 // Never upsample or upmix the capture signal here. This should be done at the
3025 // end of the send chain.
3026 _audioFrame.sample_rate_hz_ = std::min(codec.plfreq, sample_rate);
3027 _audioFrame.num_channels_ = std::min(number_of_channels, codec.channels);
3028 RemixAndResample(audio_data, number_of_frames, number_of_channels,
3029 sample_rate, &input_resampler_, &_audioFrame);
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00003030}
3031
kwiberg55b97fe2016-01-28 05:22:45 -08003032uint32_t Channel::PrepareEncodeAndSend(int mixingFrequency) {
3033 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
3034 "Channel::PrepareEncodeAndSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003035
kwiberg55b97fe2016-01-28 05:22:45 -08003036 if (_audioFrame.samples_per_channel_ == 0) {
3037 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3038 "Channel::PrepareEncodeAndSend() invalid audio frame");
3039 return 0xFFFFFFFF;
3040 }
3041
3042 if (channel_state_.Get().input_file_playing) {
3043 MixOrReplaceAudioWithFile(mixingFrequency);
3044 }
3045
solenberg1c2af8e2016-03-24 10:36:00 -07003046 bool is_muted = InputMute(); // Cache locally as InputMute() takes a lock.
3047 AudioFrameOperations::Mute(&_audioFrame, previous_frame_muted_, is_muted);
kwiberg55b97fe2016-01-28 05:22:45 -08003048
3049 if (channel_state_.Get().input_external_media) {
3050 rtc::CritScope cs(&_callbackCritSect);
3051 const bool isStereo = (_audioFrame.num_channels_ == 2);
3052 if (_inputExternalMediaCallbackPtr) {
3053 _inputExternalMediaCallbackPtr->Process(
3054 _channelId, kRecordingPerChannel, (int16_t*)_audioFrame.data_,
3055 _audioFrame.samples_per_channel_, _audioFrame.sample_rate_hz_,
3056 isStereo);
niklase@google.com470e71d2011-07-07 08:21:25 +00003057 }
kwiberg55b97fe2016-01-28 05:22:45 -08003058 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003059
kwiberg55b97fe2016-01-28 05:22:45 -08003060 if (_includeAudioLevelIndication) {
3061 size_t length =
3062 _audioFrame.samples_per_channel_ * _audioFrame.num_channels_;
Tommi60c4e0a2016-05-26 21:35:27 +02003063 RTC_CHECK_LE(length, sizeof(_audioFrame.data_));
solenberg1c2af8e2016-03-24 10:36:00 -07003064 if (is_muted && previous_frame_muted_) {
kwiberg55b97fe2016-01-28 05:22:45 -08003065 rms_level_.ProcessMuted(length);
3066 } else {
3067 rms_level_.Process(_audioFrame.data_, length);
niklase@google.com470e71d2011-07-07 08:21:25 +00003068 }
kwiberg55b97fe2016-01-28 05:22:45 -08003069 }
solenberg1c2af8e2016-03-24 10:36:00 -07003070 previous_frame_muted_ = is_muted;
niklase@google.com470e71d2011-07-07 08:21:25 +00003071
kwiberg55b97fe2016-01-28 05:22:45 -08003072 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003073}
3074
kwiberg55b97fe2016-01-28 05:22:45 -08003075uint32_t Channel::EncodeAndSend() {
3076 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
3077 "Channel::EncodeAndSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003078
kwiberg55b97fe2016-01-28 05:22:45 -08003079 assert(_audioFrame.num_channels_ <= 2);
3080 if (_audioFrame.samples_per_channel_ == 0) {
3081 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3082 "Channel::EncodeAndSend() invalid audio frame");
3083 return 0xFFFFFFFF;
3084 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003085
kwiberg55b97fe2016-01-28 05:22:45 -08003086 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00003087
kwiberg55b97fe2016-01-28 05:22:45 -08003088 // --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz.
niklase@google.com470e71d2011-07-07 08:21:25 +00003089
kwiberg55b97fe2016-01-28 05:22:45 -08003090 // The ACM resamples internally.
3091 _audioFrame.timestamp_ = _timeStamp;
3092 // This call will trigger AudioPacketizationCallback::SendData if encoding
3093 // is done and payload is ready for packetization and transmission.
3094 // Otherwise, it will return without invoking the callback.
3095 if (audio_coding_->Add10MsData((AudioFrame&)_audioFrame) < 0) {
3096 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
3097 "Channel::EncodeAndSend() ACM encoding failed");
3098 return 0xFFFFFFFF;
3099 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003100
kwiberg55b97fe2016-01-28 05:22:45 -08003101 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_);
3102 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003103}
3104
Minyue2013aec2015-05-13 14:14:42 +02003105void Channel::DisassociateSendChannel(int channel_id) {
tommi31fc21f2016-01-21 10:37:37 -08003106 rtc::CritScope lock(&assoc_send_channel_lock_);
Minyue2013aec2015-05-13 14:14:42 +02003107 Channel* channel = associate_send_channel_.channel();
3108 if (channel && channel->ChannelId() == channel_id) {
3109 // If this channel is associated with a send channel of the specified
3110 // Channel ID, disassociate with it.
3111 ChannelOwner ref(NULL);
3112 associate_send_channel_ = ref;
3113 }
3114}
3115
ivoc14d5dbe2016-07-04 07:06:55 -07003116void Channel::SetRtcEventLog(RtcEventLog* event_log) {
3117 event_log_proxy_->SetEventLog(event_log);
3118}
3119
kwiberg55b97fe2016-01-28 05:22:45 -08003120int Channel::RegisterExternalMediaProcessing(ProcessingTypes type,
3121 VoEMediaProcess& processObject) {
3122 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3123 "Channel::RegisterExternalMediaProcessing()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003124
kwiberg55b97fe2016-01-28 05:22:45 -08003125 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003126
kwiberg55b97fe2016-01-28 05:22:45 -08003127 if (kPlaybackPerChannel == type) {
3128 if (_outputExternalMediaCallbackPtr) {
3129 _engineStatisticsPtr->SetLastError(
3130 VE_INVALID_OPERATION, kTraceError,
3131 "Channel::RegisterExternalMediaProcessing() "
3132 "output external media already enabled");
3133 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003134 }
kwiberg55b97fe2016-01-28 05:22:45 -08003135 _outputExternalMediaCallbackPtr = &processObject;
3136 _outputExternalMedia = true;
3137 } else if (kRecordingPerChannel == type) {
3138 if (_inputExternalMediaCallbackPtr) {
3139 _engineStatisticsPtr->SetLastError(
3140 VE_INVALID_OPERATION, kTraceError,
3141 "Channel::RegisterExternalMediaProcessing() "
3142 "output external media already enabled");
3143 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003144 }
kwiberg55b97fe2016-01-28 05:22:45 -08003145 _inputExternalMediaCallbackPtr = &processObject;
3146 channel_state_.SetInputExternalMedia(true);
3147 }
3148 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003149}
3150
kwiberg55b97fe2016-01-28 05:22:45 -08003151int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type) {
3152 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3153 "Channel::DeRegisterExternalMediaProcessing()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003154
kwiberg55b97fe2016-01-28 05:22:45 -08003155 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003156
kwiberg55b97fe2016-01-28 05:22:45 -08003157 if (kPlaybackPerChannel == type) {
3158 if (!_outputExternalMediaCallbackPtr) {
3159 _engineStatisticsPtr->SetLastError(
3160 VE_INVALID_OPERATION, kTraceWarning,
3161 "Channel::DeRegisterExternalMediaProcessing() "
3162 "output external media already disabled");
3163 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003164 }
kwiberg55b97fe2016-01-28 05:22:45 -08003165 _outputExternalMedia = false;
3166 _outputExternalMediaCallbackPtr = NULL;
3167 } else if (kRecordingPerChannel == type) {
3168 if (!_inputExternalMediaCallbackPtr) {
3169 _engineStatisticsPtr->SetLastError(
3170 VE_INVALID_OPERATION, kTraceWarning,
3171 "Channel::DeRegisterExternalMediaProcessing() "
3172 "input external media already disabled");
3173 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003174 }
kwiberg55b97fe2016-01-28 05:22:45 -08003175 channel_state_.SetInputExternalMedia(false);
3176 _inputExternalMediaCallbackPtr = NULL;
3177 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003178
kwiberg55b97fe2016-01-28 05:22:45 -08003179 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003180}
3181
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003182int Channel::SetExternalMixing(bool enabled) {
kwiberg55b97fe2016-01-28 05:22:45 -08003183 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3184 "Channel::SetExternalMixing(enabled=%d)", enabled);
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003185
kwiberg55b97fe2016-01-28 05:22:45 -08003186 if (channel_state_.Get().playing) {
3187 _engineStatisticsPtr->SetLastError(
3188 VE_INVALID_OPERATION, kTraceError,
3189 "Channel::SetExternalMixing() "
3190 "external mixing cannot be changed while playing.");
3191 return -1;
3192 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003193
kwiberg55b97fe2016-01-28 05:22:45 -08003194 _externalMixing = enabled;
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003195
kwiberg55b97fe2016-01-28 05:22:45 -08003196 return 0;
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003197}
3198
kwiberg55b97fe2016-01-28 05:22:45 -08003199int Channel::GetNetworkStatistics(NetworkStatistics& stats) {
3200 return audio_coding_->GetNetworkStatistics(&stats);
niklase@google.com470e71d2011-07-07 08:21:25 +00003201}
3202
wu@webrtc.org24301a62013-12-13 19:17:43 +00003203void Channel::GetDecodingCallStatistics(AudioDecodingCallStats* stats) const {
3204 audio_coding_->GetDecodingCallStatistics(stats);
3205}
3206
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003207bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms,
3208 int* playout_buffer_delay_ms) const {
tommi31fc21f2016-01-21 10:37:37 -08003209 rtc::CritScope lock(&video_sync_lock_);
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003210 if (_average_jitter_buffer_delay_us == 0) {
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003211 return false;
3212 }
kwiberg55b97fe2016-01-28 05:22:45 -08003213 *jitter_buffer_delay_ms =
3214 (_average_jitter_buffer_delay_us + 500) / 1000 + _recPacketDelayMs;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003215 *playout_buffer_delay_ms = playout_delay_ms_;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003216 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +00003217}
3218
solenberg358057b2015-11-27 10:46:42 -08003219uint32_t Channel::GetDelayEstimate() const {
3220 int jitter_buffer_delay_ms = 0;
3221 int playout_buffer_delay_ms = 0;
3222 GetDelayEstimate(&jitter_buffer_delay_ms, &playout_buffer_delay_ms);
3223 return jitter_buffer_delay_ms + playout_buffer_delay_ms;
3224}
3225
deadbeef74375882015-08-13 12:09:10 -07003226int Channel::LeastRequiredDelayMs() const {
3227 return audio_coding_->LeastRequiredDelayMs();
3228}
3229
kwiberg55b97fe2016-01-28 05:22:45 -08003230int Channel::SetMinimumPlayoutDelay(int delayMs) {
3231 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3232 "Channel::SetMinimumPlayoutDelay()");
3233 if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
3234 (delayMs > kVoiceEngineMaxMinPlayoutDelayMs)) {
3235 _engineStatisticsPtr->SetLastError(
3236 VE_INVALID_ARGUMENT, kTraceError,
3237 "SetMinimumPlayoutDelay() invalid min delay");
3238 return -1;
3239 }
3240 if (audio_coding_->SetMinimumPlayoutDelay(delayMs) != 0) {
3241 _engineStatisticsPtr->SetLastError(
3242 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
3243 "SetMinimumPlayoutDelay() failed to set min playout delay");
3244 return -1;
3245 }
3246 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003247}
3248
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003249int Channel::GetPlayoutTimestamp(unsigned int& timestamp) {
deadbeef74375882015-08-13 12:09:10 -07003250 uint32_t playout_timestamp_rtp = 0;
3251 {
tommi31fc21f2016-01-21 10:37:37 -08003252 rtc::CritScope lock(&video_sync_lock_);
deadbeef74375882015-08-13 12:09:10 -07003253 playout_timestamp_rtp = playout_timestamp_rtp_;
3254 }
kwiberg55b97fe2016-01-28 05:22:45 -08003255 if (playout_timestamp_rtp == 0) {
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003256 _engineStatisticsPtr->SetLastError(
skvlad4c0536b2016-07-07 13:06:26 -07003257 VE_CANNOT_RETRIEVE_VALUE, kTraceStateInfo,
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003258 "GetPlayoutTimestamp() failed to retrieve timestamp");
3259 return -1;
3260 }
deadbeef74375882015-08-13 12:09:10 -07003261 timestamp = playout_timestamp_rtp;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003262 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003263}
3264
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003265int Channel::SetInitTimestamp(unsigned int timestamp) {
3266 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00003267 "Channel::SetInitTimestamp()");
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003268 if (channel_state_.Get().sending) {
3269 _engineStatisticsPtr->SetLastError(VE_SENDING, kTraceError,
3270 "SetInitTimestamp() already sending");
3271 return -1;
3272 }
3273 _rtpRtcpModule->SetStartTimestamp(timestamp);
3274 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003275}
3276
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003277int Channel::SetInitSequenceNumber(short sequenceNumber) {
3278 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3279 "Channel::SetInitSequenceNumber()");
3280 if (channel_state_.Get().sending) {
3281 _engineStatisticsPtr->SetLastError(
3282 VE_SENDING, kTraceError, "SetInitSequenceNumber() already sending");
3283 return -1;
3284 }
3285 _rtpRtcpModule->SetSequenceNumber(sequenceNumber);
3286 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003287}
3288
kwiberg55b97fe2016-01-28 05:22:45 -08003289int Channel::GetRtpRtcp(RtpRtcp** rtpRtcpModule,
3290 RtpReceiver** rtp_receiver) const {
3291 *rtpRtcpModule = _rtpRtcpModule.get();
3292 *rtp_receiver = rtp_receiver_.get();
3293 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003294}
3295
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00003296// TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use
3297// a shared helper.
kwiberg55b97fe2016-01-28 05:22:45 -08003298int32_t Channel::MixOrReplaceAudioWithFile(int mixingFrequency) {
kwibergb7f89d62016-02-17 10:04:18 -08003299 std::unique_ptr<int16_t[]> fileBuffer(new int16_t[640]);
kwiberg55b97fe2016-01-28 05:22:45 -08003300 size_t fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00003301
kwiberg55b97fe2016-01-28 05:22:45 -08003302 {
3303 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003304
kwiberg55b97fe2016-01-28 05:22:45 -08003305 if (_inputFilePlayerPtr == NULL) {
3306 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3307 "Channel::MixOrReplaceAudioWithFile() fileplayer"
3308 " doesnt exist");
3309 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003310 }
3311
kwiberg55b97fe2016-01-28 05:22:45 -08003312 if (_inputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(), fileSamples,
3313 mixingFrequency) == -1) {
3314 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3315 "Channel::MixOrReplaceAudioWithFile() file mixing "
3316 "failed");
3317 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003318 }
kwiberg55b97fe2016-01-28 05:22:45 -08003319 if (fileSamples == 0) {
3320 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3321 "Channel::MixOrReplaceAudioWithFile() file is ended");
3322 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003323 }
kwiberg55b97fe2016-01-28 05:22:45 -08003324 }
3325
3326 assert(_audioFrame.samples_per_channel_ == fileSamples);
3327
3328 if (_mixFileWithMicrophone) {
3329 // Currently file stream is always mono.
3330 // TODO(xians): Change the code when FilePlayer supports real stereo.
3331 MixWithSat(_audioFrame.data_, _audioFrame.num_channels_, fileBuffer.get(),
3332 1, fileSamples);
3333 } else {
3334 // Replace ACM audio with file.
3335 // Currently file stream is always mono.
3336 // TODO(xians): Change the code when FilePlayer supports real stereo.
3337 _audioFrame.UpdateFrame(
3338 _channelId, 0xFFFFFFFF, fileBuffer.get(), fileSamples, mixingFrequency,
3339 AudioFrame::kNormalSpeech, AudioFrame::kVadUnknown, 1);
3340 }
3341 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003342}
3343
kwiberg55b97fe2016-01-28 05:22:45 -08003344int32_t Channel::MixAudioWithFile(AudioFrame& audioFrame, int mixingFrequency) {
3345 assert(mixingFrequency <= 48000);
niklase@google.com470e71d2011-07-07 08:21:25 +00003346
kwibergb7f89d62016-02-17 10:04:18 -08003347 std::unique_ptr<int16_t[]> fileBuffer(new int16_t[960]);
kwiberg55b97fe2016-01-28 05:22:45 -08003348 size_t fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00003349
kwiberg55b97fe2016-01-28 05:22:45 -08003350 {
3351 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003352
kwiberg55b97fe2016-01-28 05:22:45 -08003353 if (_outputFilePlayerPtr == NULL) {
3354 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3355 "Channel::MixAudioWithFile() file mixing failed");
3356 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003357 }
3358
kwiberg55b97fe2016-01-28 05:22:45 -08003359 // We should get the frequency we ask for.
3360 if (_outputFilePlayerPtr->Get10msAudioFromFile(
3361 fileBuffer.get(), fileSamples, mixingFrequency) == -1) {
3362 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3363 "Channel::MixAudioWithFile() file mixing failed");
3364 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003365 }
kwiberg55b97fe2016-01-28 05:22:45 -08003366 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003367
kwiberg55b97fe2016-01-28 05:22:45 -08003368 if (audioFrame.samples_per_channel_ == fileSamples) {
3369 // Currently file stream is always mono.
3370 // TODO(xians): Change the code when FilePlayer supports real stereo.
3371 MixWithSat(audioFrame.data_, audioFrame.num_channels_, fileBuffer.get(), 1,
3372 fileSamples);
3373 } else {
3374 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3375 "Channel::MixAudioWithFile() samples_per_channel_(%" PRIuS
3376 ") != "
3377 "fileSamples(%" PRIuS ")",
3378 audioFrame.samples_per_channel_, fileSamples);
3379 return -1;
3380 }
3381
3382 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003383}
3384
deadbeef74375882015-08-13 12:09:10 -07003385void Channel::UpdatePlayoutTimestamp(bool rtcp) {
henrik.lundin96bd5022016-04-06 04:13:56 -07003386 jitter_buffer_playout_timestamp_ = audio_coding_->PlayoutTimestamp();
deadbeef74375882015-08-13 12:09:10 -07003387
henrik.lundin96bd5022016-04-06 04:13:56 -07003388 if (!jitter_buffer_playout_timestamp_) {
3389 // This can happen if this channel has not received any RTP packets. In
3390 // this case, NetEq is not capable of computing a playout timestamp.
deadbeef74375882015-08-13 12:09:10 -07003391 return;
3392 }
3393
3394 uint16_t delay_ms = 0;
3395 if (_audioDeviceModulePtr->PlayoutDelay(&delay_ms) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -08003396 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
deadbeef74375882015-08-13 12:09:10 -07003397 "Channel::UpdatePlayoutTimestamp() failed to read playout"
3398 " delay from the ADM");
3399 _engineStatisticsPtr->SetLastError(
3400 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
3401 "UpdatePlayoutTimestamp() failed to retrieve playout delay");
3402 return;
3403 }
3404
henrik.lundin96bd5022016-04-06 04:13:56 -07003405 RTC_DCHECK(jitter_buffer_playout_timestamp_);
3406 uint32_t playout_timestamp = *jitter_buffer_playout_timestamp_;
deadbeef74375882015-08-13 12:09:10 -07003407
3408 // Remove the playout delay.
henrik.lundin96bd5022016-04-06 04:13:56 -07003409 playout_timestamp -= (delay_ms * (GetPlayoutFrequency() / 1000));
deadbeef74375882015-08-13 12:09:10 -07003410
kwiberg55b97fe2016-01-28 05:22:45 -08003411 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
deadbeef74375882015-08-13 12:09:10 -07003412 "Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu",
henrik.lundin96bd5022016-04-06 04:13:56 -07003413 playout_timestamp);
deadbeef74375882015-08-13 12:09:10 -07003414
3415 {
tommi31fc21f2016-01-21 10:37:37 -08003416 rtc::CritScope lock(&video_sync_lock_);
deadbeef74375882015-08-13 12:09:10 -07003417 if (rtcp) {
henrik.lundin96bd5022016-04-06 04:13:56 -07003418 playout_timestamp_rtcp_ = playout_timestamp;
deadbeef74375882015-08-13 12:09:10 -07003419 } else {
henrik.lundin96bd5022016-04-06 04:13:56 -07003420 playout_timestamp_rtp_ = playout_timestamp;
deadbeef74375882015-08-13 12:09:10 -07003421 }
3422 playout_delay_ms_ = delay_ms;
3423 }
3424}
3425
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003426// Called for incoming RTP packets after successful RTP header parsing.
3427void Channel::UpdatePacketDelay(uint32_t rtp_timestamp,
3428 uint16_t sequence_number) {
kwiberg55b97fe2016-01-28 05:22:45 -08003429 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003430 "Channel::UpdatePacketDelay(timestamp=%lu, sequenceNumber=%u)",
3431 rtp_timestamp, sequence_number);
niklase@google.com470e71d2011-07-07 08:21:25 +00003432
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003433 // Get frequency of last received payload
wu@webrtc.org94454b72014-06-05 20:34:08 +00003434 int rtp_receive_frequency = GetPlayoutFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +00003435
turaj@webrtc.org167b6df2013-12-13 21:05:07 +00003436 // |jitter_buffer_playout_timestamp_| updated in UpdatePlayoutTimestamp for
henrik.lundin96bd5022016-04-06 04:13:56 -07003437 // every incoming packet. May be empty if no valid playout timestamp is
3438 // available.
3439 // If |rtp_timestamp| is newer than |jitter_buffer_playout_timestamp_|, the
3440 // resulting difference is positive and will be used. When the inverse is
3441 // true (can happen when a network glitch causes a packet to arrive late,
3442 // and during long comfort noise periods with clock drift), or when
3443 // |jitter_buffer_playout_timestamp_| has no value, the difference is not
3444 // changed from the initial 0.
3445 uint32_t timestamp_diff_ms = 0;
3446 if (jitter_buffer_playout_timestamp_ &&
3447 IsNewerTimestamp(rtp_timestamp, *jitter_buffer_playout_timestamp_)) {
3448 timestamp_diff_ms = (rtp_timestamp - *jitter_buffer_playout_timestamp_) /
3449 (rtp_receive_frequency / 1000);
3450 if (timestamp_diff_ms > (2 * kVoiceEngineMaxMinPlayoutDelayMs)) {
3451 // Diff is too large; set it to zero instead.
3452 timestamp_diff_ms = 0;
3453 }
henrik.lundin@webrtc.orgd6692992014-03-20 12:04:09 +00003454 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003455
kwiberg55b97fe2016-01-28 05:22:45 -08003456 uint16_t packet_delay_ms =
3457 (rtp_timestamp - _previousTimestamp) / (rtp_receive_frequency / 1000);
niklase@google.com470e71d2011-07-07 08:21:25 +00003458
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003459 _previousTimestamp = rtp_timestamp;
niklase@google.com470e71d2011-07-07 08:21:25 +00003460
kwiberg55b97fe2016-01-28 05:22:45 -08003461 if (timestamp_diff_ms == 0)
3462 return;
niklase@google.com470e71d2011-07-07 08:21:25 +00003463
deadbeef74375882015-08-13 12:09:10 -07003464 {
tommi31fc21f2016-01-21 10:37:37 -08003465 rtc::CritScope lock(&video_sync_lock_);
niklase@google.com470e71d2011-07-07 08:21:25 +00003466
deadbeef74375882015-08-13 12:09:10 -07003467 if (packet_delay_ms >= 10 && packet_delay_ms <= 60) {
3468 _recPacketDelayMs = packet_delay_ms;
3469 }
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003470
deadbeef74375882015-08-13 12:09:10 -07003471 if (_average_jitter_buffer_delay_us == 0) {
3472 _average_jitter_buffer_delay_us = timestamp_diff_ms * 1000;
3473 return;
3474 }
3475
3476 // Filter average delay value using exponential filter (alpha is
3477 // 7/8). We derive 1000 *_average_jitter_buffer_delay_us here (reduces
3478 // risk of rounding error) and compensate for it in GetDelayEstimate()
3479 // later.
kwiberg55b97fe2016-01-28 05:22:45 -08003480 _average_jitter_buffer_delay_us =
3481 (_average_jitter_buffer_delay_us * 7 + 1000 * timestamp_diff_ms + 500) /
3482 8;
deadbeef74375882015-08-13 12:09:10 -07003483 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003484}
3485
kwiberg55b97fe2016-01-28 05:22:45 -08003486void Channel::RegisterReceiveCodecsToRTPModule() {
3487 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3488 "Channel::RegisterReceiveCodecsToRTPModule()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003489
kwiberg55b97fe2016-01-28 05:22:45 -08003490 CodecInst codec;
3491 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
niklase@google.com470e71d2011-07-07 08:21:25 +00003492
kwiberg55b97fe2016-01-28 05:22:45 -08003493 for (int idx = 0; idx < nSupportedCodecs; idx++) {
3494 // Open up the RTP/RTCP receiver for all supported codecs
3495 if ((audio_coding_->Codec(idx, &codec) == -1) ||
3496 (rtp_receiver_->RegisterReceivePayload(
3497 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3498 (codec.rate < 0) ? 0 : codec.rate) == -1)) {
3499 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3500 "Channel::RegisterReceiveCodecsToRTPModule() unable"
3501 " to register %s (%d/%d/%" PRIuS
3502 "/%d) to RTP/RTCP "
3503 "receiver",
3504 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3505 codec.rate);
3506 } else {
3507 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3508 "Channel::RegisterReceiveCodecsToRTPModule() %s "
3509 "(%d/%d/%" PRIuS
3510 "/%d) has been added to the RTP/RTCP "
3511 "receiver",
3512 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3513 codec.rate);
niklase@google.com470e71d2011-07-07 08:21:25 +00003514 }
kwiberg55b97fe2016-01-28 05:22:45 -08003515 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003516}
3517
kwiberg55b97fe2016-01-28 05:22:45 -08003518int Channel::SetSendRtpHeaderExtension(bool enable,
3519 RTPExtensionType type,
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00003520 unsigned char id) {
3521 int error = 0;
3522 _rtpRtcpModule->DeregisterSendRtpHeaderExtension(type);
3523 if (enable) {
3524 error = _rtpRtcpModule->RegisterSendRtpHeaderExtension(type, id);
3525 }
3526 return error;
3527}
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00003528
wu@webrtc.org94454b72014-06-05 20:34:08 +00003529int32_t Channel::GetPlayoutFrequency() {
3530 int32_t playout_frequency = audio_coding_->PlayoutFrequency();
3531 CodecInst current_recive_codec;
3532 if (audio_coding_->ReceiveCodec(&current_recive_codec) == 0) {
3533 if (STR_CASE_CMP("G722", current_recive_codec.plname) == 0) {
3534 // Even though the actual sampling rate for G.722 audio is
3535 // 16,000 Hz, the RTP clock rate for the G722 payload format is
3536 // 8,000 Hz because that value was erroneously assigned in
3537 // RFC 1890 and must remain unchanged for backward compatibility.
3538 playout_frequency = 8000;
3539 } else if (STR_CASE_CMP("opus", current_recive_codec.plname) == 0) {
3540 // We are resampling Opus internally to 32,000 Hz until all our
3541 // DSP routines can operate at 48,000 Hz, but the RTP clock
3542 // rate for the Opus payload format is standardized to 48,000 Hz,
3543 // because that is the maximum supported decoding sampling rate.
3544 playout_frequency = 48000;
3545 }
3546 }
3547 return playout_frequency;
3548}
3549
Minyue2013aec2015-05-13 14:14:42 +02003550int64_t Channel::GetRTT(bool allow_associate_channel) const {
pbosda903ea2015-10-02 02:36:56 -07003551 RtcpMode method = _rtpRtcpModule->RTCP();
3552 if (method == RtcpMode::kOff) {
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003553 return 0;
3554 }
3555 std::vector<RTCPReportBlock> report_blocks;
3556 _rtpRtcpModule->RemoteRTCPStat(&report_blocks);
Minyue2013aec2015-05-13 14:14:42 +02003557
3558 int64_t rtt = 0;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003559 if (report_blocks.empty()) {
Minyue2013aec2015-05-13 14:14:42 +02003560 if (allow_associate_channel) {
tommi31fc21f2016-01-21 10:37:37 -08003561 rtc::CritScope lock(&assoc_send_channel_lock_);
Minyue2013aec2015-05-13 14:14:42 +02003562 Channel* channel = associate_send_channel_.channel();
3563 // Tries to get RTT from an associated channel. This is important for
3564 // receive-only channels.
3565 if (channel) {
3566 // To prevent infinite recursion and deadlock, calling GetRTT of
3567 // associate channel should always use "false" for argument:
3568 // |allow_associate_channel|.
3569 rtt = channel->GetRTT(false);
3570 }
3571 }
3572 return rtt;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003573 }
3574
3575 uint32_t remoteSSRC = rtp_receiver_->SSRC();
3576 std::vector<RTCPReportBlock>::const_iterator it = report_blocks.begin();
3577 for (; it != report_blocks.end(); ++it) {
3578 if (it->remoteSSRC == remoteSSRC)
3579 break;
3580 }
3581 if (it == report_blocks.end()) {
3582 // We have not received packets with SSRC matching the report blocks.
3583 // To calculate RTT we try with the SSRC of the first report block.
3584 // This is very important for send-only channels where we don't know
3585 // the SSRC of the other end.
3586 remoteSSRC = report_blocks[0].remoteSSRC;
3587 }
Minyue2013aec2015-05-13 14:14:42 +02003588
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003589 int64_t avg_rtt = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08003590 int64_t max_rtt = 0;
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003591 int64_t min_rtt = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08003592 if (_rtpRtcpModule->RTT(remoteSSRC, &rtt, &avg_rtt, &min_rtt, &max_rtt) !=
3593 0) {
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003594 return 0;
3595 }
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003596 return rtt;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003597}
3598
pbos@webrtc.orgd900e8b2013-07-03 15:12:26 +00003599} // namespace voe
3600} // namespace webrtc