blob: bdf6fb538798ed8bc1851ab845389aa15b7fcb37 [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
henrika@webrtc.org2919e952012-01-31 08:45:03 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000011#include "webrtc/voice_engine/channel.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
Henrik Lundin64dad832015-05-11 12:44:23 +020013#include <algorithm>
Tommif888bb52015-12-12 01:37:01 +010014#include <utility>
Henrik Lundin64dad832015-05-11 12:44:23 +020015
Ivo Creusenae856f22015-09-17 16:30:16 +020016#include "webrtc/base/checks.h"
tommi31fc21f2016-01-21 10:37:37 -080017#include "webrtc/base/criticalsection.h"
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +000018#include "webrtc/base/format_macros.h"
pbosad856222015-11-27 09:48:36 -080019#include "webrtc/base/logging.h"
Erik Språng737336d2016-07-29 12:59:36 +020020#include "webrtc/base/rate_limiter.h"
Stefan Holmerb86d4e42015-12-07 10:26:18 +010021#include "webrtc/base/thread_checker.h"
wu@webrtc.org94454b72014-06-05 20:34:08 +000022#include "webrtc/base/timeutils.h"
Henrik Lundin64dad832015-05-11 12:44:23 +020023#include "webrtc/config.h"
skvladcc91d282016-10-03 18:31:22 -070024#include "webrtc/logging/rtc_event_log/rtc_event_log.h"
kwibergda2bf4e2016-10-24 13:47:09 -070025#include "webrtc/modules/audio_coding/codecs/audio_format_conversion.h"
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000026#include "webrtc/modules/audio_device/include/audio_device.h"
27#include "webrtc/modules/audio_processing/include/audio_processing.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010028#include "webrtc/modules/include/module_common_types.h"
Stefan Holmerb86d4e42015-12-07 10:26:18 +010029#include "webrtc/modules/pacing/packet_router.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010030#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
31#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
32#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
wu@webrtc.org822fbd82013-08-15 23:38:54 +000033#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010034#include "webrtc/modules/utility/include/audio_frame_operations.h"
35#include "webrtc/modules/utility/include/process_thread.h"
Henrik Kjellander98f53512015-10-28 18:17:40 +010036#include "webrtc/system_wrappers/include/trace.h"
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000037#include "webrtc/voice_engine/include/voe_external_media.h"
38#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
39#include "webrtc/voice_engine/output_mixer.h"
40#include "webrtc/voice_engine/statistics.h"
41#include "webrtc/voice_engine/transmit_mixer.h"
42#include "webrtc/voice_engine/utility.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000043
andrew@webrtc.org50419b02012-11-14 19:07:54 +000044namespace webrtc {
45namespace voe {
niklase@google.com470e71d2011-07-07 08:21:25 +000046
kwibergc8d071e2016-04-06 12:22:38 -070047namespace {
48
Erik Språng737336d2016-07-29 12:59:36 +020049constexpr int64_t kMaxRetransmissionWindowMs = 1000;
50constexpr int64_t kMinRetransmissionWindowMs = 30;
51
kwibergc8d071e2016-04-06 12:22:38 -070052} // namespace
53
solenberg8842c3e2016-03-11 03:06:41 -080054const int kTelephoneEventAttenuationdB = 10;
55
ivoc14d5dbe2016-07-04 07:06:55 -070056class RtcEventLogProxy final : public webrtc::RtcEventLog {
57 public:
58 RtcEventLogProxy() : event_log_(nullptr) {}
59
60 bool StartLogging(const std::string& file_name,
61 int64_t max_size_bytes) override {
62 RTC_NOTREACHED();
63 return false;
64 }
65
66 bool StartLogging(rtc::PlatformFile log_file,
67 int64_t max_size_bytes) override {
68 RTC_NOTREACHED();
69 return false;
70 }
71
72 void StopLogging() override { RTC_NOTREACHED(); }
73
74 void LogVideoReceiveStreamConfig(
75 const webrtc::VideoReceiveStream::Config& config) override {
76 rtc::CritScope lock(&crit_);
77 if (event_log_) {
78 event_log_->LogVideoReceiveStreamConfig(config);
79 }
80 }
81
82 void LogVideoSendStreamConfig(
83 const webrtc::VideoSendStream::Config& config) override {
84 rtc::CritScope lock(&crit_);
85 if (event_log_) {
86 event_log_->LogVideoSendStreamConfig(config);
87 }
88 }
89
ivoce0928d82016-10-10 05:12:51 -070090 void LogAudioReceiveStreamConfig(
91 const webrtc::AudioReceiveStream::Config& config) override {
92 rtc::CritScope lock(&crit_);
93 if (event_log_) {
94 event_log_->LogAudioReceiveStreamConfig(config);
95 }
96 }
97
98 void LogAudioSendStreamConfig(
99 const webrtc::AudioSendStream::Config& config) override {
100 rtc::CritScope lock(&crit_);
101 if (event_log_) {
102 event_log_->LogAudioSendStreamConfig(config);
103 }
104 }
105
ivoc14d5dbe2016-07-04 07:06:55 -0700106 void LogRtpHeader(webrtc::PacketDirection direction,
107 webrtc::MediaType media_type,
108 const uint8_t* header,
109 size_t packet_length) override {
110 rtc::CritScope lock(&crit_);
111 if (event_log_) {
112 event_log_->LogRtpHeader(direction, media_type, header, packet_length);
113 }
114 }
115
116 void LogRtcpPacket(webrtc::PacketDirection direction,
117 webrtc::MediaType media_type,
118 const uint8_t* packet,
119 size_t length) override {
120 rtc::CritScope lock(&crit_);
121 if (event_log_) {
122 event_log_->LogRtcpPacket(direction, media_type, packet, length);
123 }
124 }
125
126 void LogAudioPlayout(uint32_t ssrc) override {
127 rtc::CritScope lock(&crit_);
128 if (event_log_) {
129 event_log_->LogAudioPlayout(ssrc);
130 }
131 }
132
133 void LogBwePacketLossEvent(int32_t bitrate,
134 uint8_t fraction_loss,
135 int32_t total_packets) override {
136 rtc::CritScope lock(&crit_);
137 if (event_log_) {
138 event_log_->LogBwePacketLossEvent(bitrate, fraction_loss, total_packets);
139 }
140 }
141
142 void SetEventLog(RtcEventLog* event_log) {
143 rtc::CritScope lock(&crit_);
144 event_log_ = event_log;
145 }
146
147 private:
148 rtc::CriticalSection crit_;
149 RtcEventLog* event_log_ GUARDED_BY(crit_);
150 RTC_DISALLOW_COPY_AND_ASSIGN(RtcEventLogProxy);
151};
152
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100153class TransportFeedbackProxy : public TransportFeedbackObserver {
154 public:
155 TransportFeedbackProxy() : feedback_observer_(nullptr) {
156 pacer_thread_.DetachFromThread();
157 network_thread_.DetachFromThread();
158 }
159
160 void SetTransportFeedbackObserver(
161 TransportFeedbackObserver* feedback_observer) {
162 RTC_DCHECK(thread_checker_.CalledOnValidThread());
163 rtc::CritScope lock(&crit_);
164 feedback_observer_ = feedback_observer;
165 }
166
167 // Implements TransportFeedbackObserver.
168 void AddPacket(uint16_t sequence_number,
169 size_t length,
philipela1ed0b32016-06-01 06:31:17 -0700170 int probe_cluster_id) override {
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100171 RTC_DCHECK(pacer_thread_.CalledOnValidThread());
172 rtc::CritScope lock(&crit_);
173 if (feedback_observer_)
pbos2169d8b2016-06-20 11:53:02 -0700174 feedback_observer_->AddPacket(sequence_number, length, probe_cluster_id);
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100175 }
176 void OnTransportFeedback(const rtcp::TransportFeedback& feedback) override {
177 RTC_DCHECK(network_thread_.CalledOnValidThread());
178 rtc::CritScope lock(&crit_);
michaelt9960bb12016-10-18 09:40:34 -0700179 if (feedback_observer_)
180 feedback_observer_->OnTransportFeedback(feedback);
Stefan Holmer60e43462016-09-07 09:58:20 +0200181 }
182 std::vector<PacketInfo> GetTransportFeedbackVector() const override {
183 RTC_NOTREACHED();
184 return std::vector<PacketInfo>();
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100185 }
186
187 private:
188 rtc::CriticalSection crit_;
189 rtc::ThreadChecker thread_checker_;
190 rtc::ThreadChecker pacer_thread_;
191 rtc::ThreadChecker network_thread_;
192 TransportFeedbackObserver* feedback_observer_ GUARDED_BY(&crit_);
193};
194
195class TransportSequenceNumberProxy : public TransportSequenceNumberAllocator {
196 public:
197 TransportSequenceNumberProxy() : seq_num_allocator_(nullptr) {
198 pacer_thread_.DetachFromThread();
199 }
200
201 void SetSequenceNumberAllocator(
202 TransportSequenceNumberAllocator* seq_num_allocator) {
203 RTC_DCHECK(thread_checker_.CalledOnValidThread());
204 rtc::CritScope lock(&crit_);
205 seq_num_allocator_ = seq_num_allocator;
206 }
207
208 // Implements TransportSequenceNumberAllocator.
209 uint16_t AllocateSequenceNumber() override {
210 RTC_DCHECK(pacer_thread_.CalledOnValidThread());
211 rtc::CritScope lock(&crit_);
212 if (!seq_num_allocator_)
213 return 0;
214 return seq_num_allocator_->AllocateSequenceNumber();
215 }
216
217 private:
218 rtc::CriticalSection crit_;
219 rtc::ThreadChecker thread_checker_;
220 rtc::ThreadChecker pacer_thread_;
221 TransportSequenceNumberAllocator* seq_num_allocator_ GUARDED_BY(&crit_);
222};
223
224class RtpPacketSenderProxy : public RtpPacketSender {
225 public:
kwiberg55b97fe2016-01-28 05:22:45 -0800226 RtpPacketSenderProxy() : rtp_packet_sender_(nullptr) {}
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100227
228 void SetPacketSender(RtpPacketSender* rtp_packet_sender) {
229 RTC_DCHECK(thread_checker_.CalledOnValidThread());
230 rtc::CritScope lock(&crit_);
231 rtp_packet_sender_ = rtp_packet_sender;
232 }
233
234 // Implements RtpPacketSender.
235 void InsertPacket(Priority priority,
236 uint32_t ssrc,
237 uint16_t sequence_number,
238 int64_t capture_time_ms,
239 size_t bytes,
240 bool retransmission) override {
241 rtc::CritScope lock(&crit_);
242 if (rtp_packet_sender_) {
243 rtp_packet_sender_->InsertPacket(priority, ssrc, sequence_number,
244 capture_time_ms, bytes, retransmission);
245 }
246 }
247
248 private:
249 rtc::ThreadChecker thread_checker_;
250 rtc::CriticalSection crit_;
251 RtpPacketSender* rtp_packet_sender_ GUARDED_BY(&crit_);
252};
253
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000254// Extend the default RTCP statistics struct with max_jitter, defined as the
255// maximum jitter value seen in an RTCP report block.
256struct ChannelStatistics : public RtcpStatistics {
257 ChannelStatistics() : rtcp(), max_jitter(0) {}
258
259 RtcpStatistics rtcp;
260 uint32_t max_jitter;
261};
262
263// Statistics callback, called at each generation of a new RTCP report block.
264class StatisticsProxy : public RtcpStatisticsCallback {
265 public:
tommi31fc21f2016-01-21 10:37:37 -0800266 StatisticsProxy(uint32_t ssrc) : ssrc_(ssrc) {}
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000267 virtual ~StatisticsProxy() {}
268
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000269 void StatisticsUpdated(const RtcpStatistics& statistics,
270 uint32_t ssrc) override {
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000271 if (ssrc != ssrc_)
272 return;
273
tommi31fc21f2016-01-21 10:37:37 -0800274 rtc::CritScope cs(&stats_lock_);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000275 stats_.rtcp = statistics;
276 if (statistics.jitter > stats_.max_jitter) {
277 stats_.max_jitter = statistics.jitter;
278 }
279 }
280
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000281 void CNameChanged(const char* cname, uint32_t ssrc) override {}
pbos@webrtc.orgce4e9a32014-12-18 13:50:16 +0000282
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000283 ChannelStatistics GetStats() {
tommi31fc21f2016-01-21 10:37:37 -0800284 rtc::CritScope cs(&stats_lock_);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000285 return stats_;
286 }
287
288 private:
289 // StatisticsUpdated calls are triggered from threads in the RTP module,
290 // while GetStats calls can be triggered from the public voice engine API,
291 // hence synchronization is needed.
tommi31fc21f2016-01-21 10:37:37 -0800292 rtc::CriticalSection stats_lock_;
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000293 const uint32_t ssrc_;
294 ChannelStatistics stats_;
295};
296
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000297class VoERtcpObserver : public RtcpBandwidthObserver {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000298 public:
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000299 explicit VoERtcpObserver(Channel* owner) : owner_(owner) {}
300 virtual ~VoERtcpObserver() {}
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000301
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000302 void OnReceivedEstimatedBitrate(uint32_t bitrate) override {
303 // Not used for Voice Engine.
304 }
305
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000306 void OnReceivedRtcpReceiverReport(const ReportBlockList& report_blocks,
307 int64_t rtt,
308 int64_t now_ms) override {
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000309 // TODO(mflodman): Do we need to aggregate reports here or can we jut send
310 // what we get? I.e. do we ever get multiple reports bundled into one RTCP
311 // report for VoiceEngine?
312 if (report_blocks.empty())
313 return;
314
315 int fraction_lost_aggregate = 0;
316 int total_number_of_packets = 0;
317
318 // If receiving multiple report blocks, calculate the weighted average based
319 // on the number of packets a report refers to.
320 for (ReportBlockList::const_iterator block_it = report_blocks.begin();
321 block_it != report_blocks.end(); ++block_it) {
322 // Find the previous extended high sequence number for this remote SSRC,
323 // to calculate the number of RTP packets this report refers to. Ignore if
324 // we haven't seen this SSRC before.
325 std::map<uint32_t, uint32_t>::iterator seq_num_it =
326 extended_max_sequence_number_.find(block_it->sourceSSRC);
327 int number_of_packets = 0;
328 if (seq_num_it != extended_max_sequence_number_.end()) {
329 number_of_packets = block_it->extendedHighSeqNum - seq_num_it->second;
330 }
331 fraction_lost_aggregate += number_of_packets * block_it->fractionLost;
332 total_number_of_packets += number_of_packets;
333
334 extended_max_sequence_number_[block_it->sourceSSRC] =
335 block_it->extendedHighSeqNum;
336 }
337 int weighted_fraction_lost = 0;
338 if (total_number_of_packets > 0) {
kwiberg55b97fe2016-01-28 05:22:45 -0800339 weighted_fraction_lost =
340 (fraction_lost_aggregate + total_number_of_packets / 2) /
341 total_number_of_packets;
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000342 }
343 owner_->OnIncomingFractionLoss(weighted_fraction_lost);
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000344 }
345
346 private:
347 Channel* owner_;
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000348 // Maps remote side ssrc to extended highest sequence number received.
349 std::map<uint32_t, uint32_t> extended_max_sequence_number_;
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000350};
351
kwiberg55b97fe2016-01-28 05:22:45 -0800352int32_t Channel::SendData(FrameType frameType,
353 uint8_t payloadType,
354 uint32_t timeStamp,
355 const uint8_t* payloadData,
356 size_t payloadSize,
357 const RTPFragmentationHeader* fragmentation) {
358 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
359 "Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u,"
360 " payloadSize=%" PRIuS ", fragmentation=0x%x)",
361 frameType, payloadType, timeStamp, payloadSize, fragmentation);
niklase@google.com470e71d2011-07-07 08:21:25 +0000362
kwiberg55b97fe2016-01-28 05:22:45 -0800363 if (_includeAudioLevelIndication) {
364 // Store current audio level in the RTP/RTCP module.
365 // The level will be used in combination with voice-activity state
366 // (frameType) to add an RTP header extension
367 _rtpRtcpModule->SetAudioLevel(rms_level_.RMS());
368 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000369
kwiberg55b97fe2016-01-28 05:22:45 -0800370 // Push data from ACM to RTP/RTCP-module to deliver audio frame for
371 // packetization.
372 // This call will trigger Transport::SendPacket() from the RTP/RTCP module.
Sergey Ulanov525df3f2016-08-02 17:46:41 -0700373 if (!_rtpRtcpModule->SendOutgoingData(
kwiberg55b97fe2016-01-28 05:22:45 -0800374 (FrameType&)frameType, payloadType, timeStamp,
375 // Leaving the time when this frame was
376 // received from the capture device as
377 // undefined for voice for now.
Sergey Ulanov525df3f2016-08-02 17:46:41 -0700378 -1, payloadData, payloadSize, fragmentation, nullptr, nullptr)) {
kwiberg55b97fe2016-01-28 05:22:45 -0800379 _engineStatisticsPtr->SetLastError(
380 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
381 "Channel::SendData() failed to send data to RTP/RTCP module");
382 return -1;
383 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000384
kwiberg55b97fe2016-01-28 05:22:45 -0800385 _lastLocalTimeStamp = timeStamp;
386 _lastPayloadType = payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +0000387
kwiberg55b97fe2016-01-28 05:22:45 -0800388 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000389}
390
kwiberg55b97fe2016-01-28 05:22:45 -0800391int32_t Channel::InFrameType(FrameType frame_type) {
392 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
393 "Channel::InFrameType(frame_type=%d)", frame_type);
niklase@google.com470e71d2011-07-07 08:21:25 +0000394
kwiberg55b97fe2016-01-28 05:22:45 -0800395 rtc::CritScope cs(&_callbackCritSect);
396 _sendFrameType = (frame_type == kAudioFrameSpeech);
397 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000398}
399
stefan1d8a5062015-10-02 03:39:33 -0700400bool Channel::SendRtp(const uint8_t* data,
401 size_t len,
402 const PacketOptions& options) {
kwiberg55b97fe2016-01-28 05:22:45 -0800403 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
404 "Channel::SendPacket(channel=%d, len=%" PRIuS ")", len);
niklase@google.com470e71d2011-07-07 08:21:25 +0000405
kwiberg55b97fe2016-01-28 05:22:45 -0800406 rtc::CritScope cs(&_callbackCritSect);
wu@webrtc.orgfb648da2013-10-18 21:10:51 +0000407
kwiberg55b97fe2016-01-28 05:22:45 -0800408 if (_transportPtr == NULL) {
409 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
410 "Channel::SendPacket() failed to send RTP packet due to"
411 " invalid transport object");
412 return false;
413 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000414
kwiberg55b97fe2016-01-28 05:22:45 -0800415 uint8_t* bufferToSendPtr = (uint8_t*)data;
416 size_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000417
kwiberg55b97fe2016-01-28 05:22:45 -0800418 if (!_transportPtr->SendRtp(bufferToSendPtr, bufferLength, options)) {
419 std::string transport_name =
420 _externalTransport ? "external transport" : "WebRtc sockets";
421 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
422 "Channel::SendPacket() RTP transmission using %s failed",
423 transport_name.c_str());
424 return false;
425 }
426 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +0000427}
428
kwiberg55b97fe2016-01-28 05:22:45 -0800429bool Channel::SendRtcp(const uint8_t* data, size_t len) {
430 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
431 "Channel::SendRtcp(len=%" PRIuS ")", len);
niklase@google.com470e71d2011-07-07 08:21:25 +0000432
kwiberg55b97fe2016-01-28 05:22:45 -0800433 rtc::CritScope cs(&_callbackCritSect);
434 if (_transportPtr == NULL) {
435 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
436 "Channel::SendRtcp() failed to send RTCP packet"
437 " due to invalid transport object");
438 return false;
439 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000440
kwiberg55b97fe2016-01-28 05:22:45 -0800441 uint8_t* bufferToSendPtr = (uint8_t*)data;
442 size_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000443
kwiberg55b97fe2016-01-28 05:22:45 -0800444 int n = _transportPtr->SendRtcp(bufferToSendPtr, bufferLength);
445 if (n < 0) {
446 std::string transport_name =
447 _externalTransport ? "external transport" : "WebRtc sockets";
448 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
449 "Channel::SendRtcp() transmission using %s failed",
450 transport_name.c_str());
451 return false;
452 }
453 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +0000454}
455
kwiberg55b97fe2016-01-28 05:22:45 -0800456void Channel::OnIncomingSSRCChanged(uint32_t ssrc) {
457 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
458 "Channel::OnIncomingSSRCChanged(SSRC=%d)", ssrc);
niklase@google.com470e71d2011-07-07 08:21:25 +0000459
kwiberg55b97fe2016-01-28 05:22:45 -0800460 // Update ssrc so that NTP for AV sync can be updated.
461 _rtpRtcpModule->SetRemoteSSRC(ssrc);
niklase@google.com470e71d2011-07-07 08:21:25 +0000462}
463
Peter Boströmac547a62015-09-17 23:03:57 +0200464void Channel::OnIncomingCSRCChanged(uint32_t CSRC, bool added) {
465 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
466 "Channel::OnIncomingCSRCChanged(CSRC=%d, added=%d)", CSRC,
467 added);
niklase@google.com470e71d2011-07-07 08:21:25 +0000468}
469
Peter Boströmac547a62015-09-17 23:03:57 +0200470int32_t Channel::OnInitializeDecoder(
pbos@webrtc.org92135212013-05-14 08:31:39 +0000471 int8_t payloadType,
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +0000472 const char payloadName[RTP_PAYLOAD_NAME_SIZE],
pbos@webrtc.org92135212013-05-14 08:31:39 +0000473 int frequency,
Peter Kasting69558702016-01-12 16:26:35 -0800474 size_t channels,
Peter Boströmac547a62015-09-17 23:03:57 +0200475 uint32_t rate) {
kwiberg55b97fe2016-01-28 05:22:45 -0800476 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
477 "Channel::OnInitializeDecoder(payloadType=%d, "
478 "payloadName=%s, frequency=%u, channels=%" PRIuS ", rate=%u)",
479 payloadType, payloadName, frequency, channels, rate);
niklase@google.com470e71d2011-07-07 08:21:25 +0000480
kwiberg55b97fe2016-01-28 05:22:45 -0800481 CodecInst receiveCodec = {0};
482 CodecInst dummyCodec = {0};
niklase@google.com470e71d2011-07-07 08:21:25 +0000483
kwiberg55b97fe2016-01-28 05:22:45 -0800484 receiveCodec.pltype = payloadType;
485 receiveCodec.plfreq = frequency;
486 receiveCodec.channels = channels;
487 receiveCodec.rate = rate;
488 strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +0000489
kwiberg55b97fe2016-01-28 05:22:45 -0800490 audio_coding_->Codec(payloadName, &dummyCodec, frequency, channels);
491 receiveCodec.pacsize = dummyCodec.pacsize;
niklase@google.com470e71d2011-07-07 08:21:25 +0000492
kwiberg55b97fe2016-01-28 05:22:45 -0800493 // Register the new codec to the ACM
kwibergda2bf4e2016-10-24 13:47:09 -0700494 if (!audio_coding_->RegisterReceiveCodec(receiveCodec.pltype,
495 CodecInstToSdp(receiveCodec))) {
kwiberg55b97fe2016-01-28 05:22:45 -0800496 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
497 "Channel::OnInitializeDecoder() invalid codec ("
498 "pt=%d, name=%s) received - 1",
499 payloadType, payloadName);
500 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR);
501 return -1;
502 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000503
kwiberg55b97fe2016-01-28 05:22:45 -0800504 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000505}
506
kwiberg55b97fe2016-01-28 05:22:45 -0800507int32_t Channel::OnReceivedPayloadData(const uint8_t* payloadData,
508 size_t payloadSize,
509 const WebRtcRTPHeader* rtpHeader) {
510 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
511 "Channel::OnReceivedPayloadData(payloadSize=%" PRIuS
512 ","
513 " payloadType=%u, audioChannel=%" PRIuS ")",
514 payloadSize, rtpHeader->header.payloadType,
515 rtpHeader->type.Audio.channel);
niklase@google.com470e71d2011-07-07 08:21:25 +0000516
kwiberg55b97fe2016-01-28 05:22:45 -0800517 if (!channel_state_.Get().playing) {
518 // Avoid inserting into NetEQ when we are not playing. Count the
519 // packet as discarded.
520 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
521 "received packet is discarded since playing is not"
522 " activated");
523 _numberOfDiscardedPackets++;
niklase@google.com470e71d2011-07-07 08:21:25 +0000524 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -0800525 }
526
527 // Push the incoming payload (parsed and ready for decoding) into the ACM
528 if (audio_coding_->IncomingPacket(payloadData, payloadSize, *rtpHeader) !=
529 0) {
530 _engineStatisticsPtr->SetLastError(
531 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
532 "Channel::OnReceivedPayloadData() unable to push data to the ACM");
533 return -1;
534 }
535
kwiberg55b97fe2016-01-28 05:22:45 -0800536 int64_t round_trip_time = 0;
537 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), &round_trip_time, NULL, NULL,
538 NULL);
539
540 std::vector<uint16_t> nack_list = audio_coding_->GetNackList(round_trip_time);
541 if (!nack_list.empty()) {
542 // Can't use nack_list.data() since it's not supported by all
543 // compilers.
544 ResendPackets(&(nack_list[0]), static_cast<int>(nack_list.size()));
545 }
546 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000547}
548
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +0000549bool Channel::OnRecoveredPacket(const uint8_t* rtp_packet,
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +0000550 size_t rtp_packet_length) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +0000551 RTPHeader header;
552 if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length, &header)) {
553 WEBRTC_TRACE(kTraceDebug, webrtc::kTraceVoice, _channelId,
554 "IncomingPacket invalid RTP header");
555 return false;
556 }
557 header.payload_type_frequency =
558 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
559 if (header.payload_type_frequency < 0)
560 return false;
561 return ReceivePacket(rtp_packet, rtp_packet_length, header, false);
562}
563
henrik.lundin42dda502016-05-18 05:36:01 -0700564MixerParticipant::AudioFrameInfo Channel::GetAudioFrameWithMuted(
565 int32_t id,
566 AudioFrame* audioFrame) {
ivoc14d5dbe2016-07-04 07:06:55 -0700567 unsigned int ssrc;
568 RTC_CHECK_EQ(GetLocalSSRC(ssrc), 0);
569 event_log_proxy_->LogAudioPlayout(ssrc);
kwiberg55b97fe2016-01-28 05:22:45 -0800570 // Get 10ms raw PCM data from the ACM (mixer limits output frequency)
henrik.lundind4ccb002016-05-17 12:21:55 -0700571 bool muted;
572 if (audio_coding_->PlayoutData10Ms(audioFrame->sample_rate_hz_, audioFrame,
573 &muted) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -0800574 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
575 "Channel::GetAudioFrame() PlayoutData10Ms() failed!");
576 // In all likelihood, the audio in this frame is garbage. We return an
577 // error so that the audio mixer module doesn't add it to the mix. As
578 // a result, it won't be played out and the actions skipped here are
579 // irrelevant.
henrik.lundin42dda502016-05-18 05:36:01 -0700580 return MixerParticipant::AudioFrameInfo::kError;
kwiberg55b97fe2016-01-28 05:22:45 -0800581 }
henrik.lundina89ab962016-05-18 08:52:45 -0700582
583 if (muted) {
584 // TODO(henrik.lundin): We should be able to do better than this. But we
585 // will have to go through all the cases below where the audio samples may
586 // be used, and handle the muted case in some way.
587 audioFrame->Mute();
588 }
kwiberg55b97fe2016-01-28 05:22:45 -0800589
kwiberg55b97fe2016-01-28 05:22:45 -0800590 // Convert module ID to internal VoE channel ID
591 audioFrame->id_ = VoEChannelId(audioFrame->id_);
592 // Store speech type for dead-or-alive detection
593 _outputSpeechType = audioFrame->speech_type_;
594
595 ChannelState::State state = channel_state_.Get();
596
kwiberg55b97fe2016-01-28 05:22:45 -0800597 {
598 // Pass the audio buffers to an optional sink callback, before applying
599 // scaling/panning, as that applies to the mix operation.
600 // External recipients of the audio (e.g. via AudioTrack), will do their
601 // own mixing/dynamic processing.
602 rtc::CritScope cs(&_callbackCritSect);
603 if (audio_sink_) {
604 AudioSinkInterface::Data data(
605 &audioFrame->data_[0], audioFrame->samples_per_channel_,
606 audioFrame->sample_rate_hz_, audioFrame->num_channels_,
607 audioFrame->timestamp_);
608 audio_sink_->OnData(data);
609 }
610 }
611
612 float output_gain = 1.0f;
613 float left_pan = 1.0f;
614 float right_pan = 1.0f;
615 {
616 rtc::CritScope cs(&volume_settings_critsect_);
617 output_gain = _outputGain;
618 left_pan = _panLeft;
619 right_pan = _panRight;
620 }
621
622 // Output volume scaling
623 if (output_gain < 0.99f || output_gain > 1.01f) {
624 AudioFrameOperations::ScaleWithSat(output_gain, *audioFrame);
625 }
626
627 // Scale left and/or right channel(s) if stereo and master balance is
628 // active
629
630 if (left_pan != 1.0f || right_pan != 1.0f) {
631 if (audioFrame->num_channels_ == 1) {
632 // Emulate stereo mode since panning is active.
633 // The mono signal is copied to both left and right channels here.
634 AudioFrameOperations::MonoToStereo(audioFrame);
635 }
636 // For true stereo mode (when we are receiving a stereo signal), no
637 // action is needed.
638
639 // Do the panning operation (the audio frame contains stereo at this
640 // stage)
641 AudioFrameOperations::Scale(left_pan, right_pan, *audioFrame);
642 }
643
644 // Mix decoded PCM output with file if file mixing is enabled
645 if (state.output_file_playing) {
646 MixAudioWithFile(*audioFrame, audioFrame->sample_rate_hz_);
henrik.lundina89ab962016-05-18 08:52:45 -0700647 muted = false; // We may have added non-zero samples.
kwiberg55b97fe2016-01-28 05:22:45 -0800648 }
649
650 // External media
651 if (_outputExternalMedia) {
652 rtc::CritScope cs(&_callbackCritSect);
653 const bool isStereo = (audioFrame->num_channels_ == 2);
654 if (_outputExternalMediaCallbackPtr) {
655 _outputExternalMediaCallbackPtr->Process(
656 _channelId, kPlaybackPerChannel, (int16_t*)audioFrame->data_,
657 audioFrame->samples_per_channel_, audioFrame->sample_rate_hz_,
658 isStereo);
659 }
660 }
661
662 // Record playout if enabled
663 {
664 rtc::CritScope cs(&_fileCritSect);
665
kwiberg5a25d952016-08-17 07:31:12 -0700666 if (_outputFileRecording && output_file_recorder_) {
667 output_file_recorder_->RecordAudioToFile(*audioFrame);
kwiberg55b97fe2016-01-28 05:22:45 -0800668 }
669 }
670
671 // Measure audio level (0-9)
henrik.lundina89ab962016-05-18 08:52:45 -0700672 // TODO(henrik.lundin) Use the |muted| information here too.
kwiberg55b97fe2016-01-28 05:22:45 -0800673 _outputAudioLevel.ComputeLevel(*audioFrame);
674
675 if (capture_start_rtp_time_stamp_ < 0 && audioFrame->timestamp_ != 0) {
676 // The first frame with a valid rtp timestamp.
677 capture_start_rtp_time_stamp_ = audioFrame->timestamp_;
678 }
679
680 if (capture_start_rtp_time_stamp_ >= 0) {
681 // audioFrame.timestamp_ should be valid from now on.
682
683 // Compute elapsed time.
684 int64_t unwrap_timestamp =
685 rtp_ts_wraparound_handler_->Unwrap(audioFrame->timestamp_);
686 audioFrame->elapsed_time_ms_ =
687 (unwrap_timestamp - capture_start_rtp_time_stamp_) /
ossue280cde2016-10-12 11:04:10 -0700688 (GetRtpTimestampRateHz() / 1000);
kwiberg55b97fe2016-01-28 05:22:45 -0800689
niklase@google.com470e71d2011-07-07 08:21:25 +0000690 {
kwiberg55b97fe2016-01-28 05:22:45 -0800691 rtc::CritScope lock(&ts_stats_lock_);
692 // Compute ntp time.
693 audioFrame->ntp_time_ms_ =
694 ntp_estimator_.Estimate(audioFrame->timestamp_);
695 // |ntp_time_ms_| won't be valid until at least 2 RTCP SRs are received.
696 if (audioFrame->ntp_time_ms_ > 0) {
697 // Compute |capture_start_ntp_time_ms_| so that
698 // |capture_start_ntp_time_ms_| + |elapsed_time_ms_| == |ntp_time_ms_|
699 capture_start_ntp_time_ms_ =
700 audioFrame->ntp_time_ms_ - audioFrame->elapsed_time_ms_;
andrew@webrtc.org60730cf2014-01-07 17:45:09 +0000701 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000702 }
kwiberg55b97fe2016-01-28 05:22:45 -0800703 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000704
henrik.lundin42dda502016-05-18 05:36:01 -0700705 return muted ? MixerParticipant::AudioFrameInfo::kMuted
706 : MixerParticipant::AudioFrameInfo::kNormal;
niklase@google.com470e71d2011-07-07 08:21:25 +0000707}
708
aleloi6c278492016-10-20 14:24:39 -0700709AudioMixer::Source::AudioFrameInfo Channel::GetAudioFrameWithInfo(
710 int sample_rate_hz,
711 AudioFrame* audio_frame) {
712 audio_frame->sample_rate_hz_ = sample_rate_hz;
aleloiaed581a2016-10-20 06:32:39 -0700713
aleloi6c278492016-10-20 14:24:39 -0700714 const auto frame_info = GetAudioFrameWithMuted(-1, audio_frame);
aleloiaed581a2016-10-20 06:32:39 -0700715
716 using FrameInfo = AudioMixer::Source::AudioFrameInfo;
717 FrameInfo new_audio_frame_info = FrameInfo::kError;
718 switch (frame_info) {
719 case MixerParticipant::AudioFrameInfo::kNormal:
720 new_audio_frame_info = FrameInfo::kNormal;
721 break;
722 case MixerParticipant::AudioFrameInfo::kMuted:
723 new_audio_frame_info = FrameInfo::kMuted;
724 break;
725 case MixerParticipant::AudioFrameInfo::kError:
726 new_audio_frame_info = FrameInfo::kError;
727 break;
728 }
aleloi6c278492016-10-20 14:24:39 -0700729 return new_audio_frame_info;
aleloiaed581a2016-10-20 06:32:39 -0700730}
731
kwiberg55b97fe2016-01-28 05:22:45 -0800732int32_t Channel::NeededFrequency(int32_t id) const {
733 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
734 "Channel::NeededFrequency(id=%d)", id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000735
kwiberg55b97fe2016-01-28 05:22:45 -0800736 int highestNeeded = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000737
kwiberg55b97fe2016-01-28 05:22:45 -0800738 // Determine highest needed receive frequency
739 int32_t receiveFrequency = audio_coding_->ReceiveFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +0000740
kwiberg55b97fe2016-01-28 05:22:45 -0800741 // Return the bigger of playout and receive frequency in the ACM.
742 if (audio_coding_->PlayoutFrequency() > receiveFrequency) {
743 highestNeeded = audio_coding_->PlayoutFrequency();
744 } else {
745 highestNeeded = receiveFrequency;
746 }
747
748 // Special case, if we're playing a file on the playout side
749 // we take that frequency into consideration as well
750 // This is not needed on sending side, since the codec will
751 // limit the spectrum anyway.
752 if (channel_state_.Get().output_file_playing) {
753 rtc::CritScope cs(&_fileCritSect);
kwiberg5a25d952016-08-17 07:31:12 -0700754 if (output_file_player_) {
755 if (output_file_player_->Frequency() > highestNeeded) {
756 highestNeeded = output_file_player_->Frequency();
kwiberg55b97fe2016-01-28 05:22:45 -0800757 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000758 }
kwiberg55b97fe2016-01-28 05:22:45 -0800759 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000760
kwiberg55b97fe2016-01-28 05:22:45 -0800761 return (highestNeeded);
niklase@google.com470e71d2011-07-07 08:21:25 +0000762}
763
ossu5f7cfa52016-05-30 08:11:28 -0700764int32_t Channel::CreateChannel(
765 Channel*& channel,
766 int32_t channelId,
767 uint32_t instanceId,
solenberg88499ec2016-09-07 07:34:41 -0700768 const VoEBase::ChannelConfig& config) {
kwiberg55b97fe2016-01-28 05:22:45 -0800769 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, channelId),
770 "Channel::CreateChannel(channelId=%d, instanceId=%d)", channelId,
771 instanceId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000772
solenberg88499ec2016-09-07 07:34:41 -0700773 channel = new Channel(channelId, instanceId, config);
kwiberg55b97fe2016-01-28 05:22:45 -0800774 if (channel == NULL) {
775 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, channelId),
776 "Channel::CreateChannel() unable to allocate memory for"
777 " channel");
778 return -1;
779 }
780 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000781}
782
kwiberg55b97fe2016-01-28 05:22:45 -0800783void Channel::PlayNotification(int32_t id, uint32_t durationMs) {
784 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
785 "Channel::PlayNotification(id=%d, durationMs=%d)", id,
786 durationMs);
niklase@google.com470e71d2011-07-07 08:21:25 +0000787
kwiberg55b97fe2016-01-28 05:22:45 -0800788 // Not implement yet
niklase@google.com470e71d2011-07-07 08:21:25 +0000789}
790
kwiberg55b97fe2016-01-28 05:22:45 -0800791void Channel::RecordNotification(int32_t id, uint32_t durationMs) {
792 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
793 "Channel::RecordNotification(id=%d, durationMs=%d)", id,
794 durationMs);
niklase@google.com470e71d2011-07-07 08:21:25 +0000795
kwiberg55b97fe2016-01-28 05:22:45 -0800796 // Not implement yet
niklase@google.com470e71d2011-07-07 08:21:25 +0000797}
798
kwiberg55b97fe2016-01-28 05:22:45 -0800799void Channel::PlayFileEnded(int32_t id) {
800 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
801 "Channel::PlayFileEnded(id=%d)", id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000802
kwiberg55b97fe2016-01-28 05:22:45 -0800803 if (id == _inputFilePlayerId) {
804 channel_state_.SetInputFilePlaying(false);
805 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
806 "Channel::PlayFileEnded() => input file player module is"
niklase@google.com470e71d2011-07-07 08:21:25 +0000807 " shutdown");
kwiberg55b97fe2016-01-28 05:22:45 -0800808 } else if (id == _outputFilePlayerId) {
809 channel_state_.SetOutputFilePlaying(false);
810 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
811 "Channel::PlayFileEnded() => output file player module is"
812 " shutdown");
813 }
814}
815
816void Channel::RecordFileEnded(int32_t id) {
817 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
818 "Channel::RecordFileEnded(id=%d)", id);
819
820 assert(id == _outputFileRecorderId);
821
822 rtc::CritScope cs(&_fileCritSect);
823
824 _outputFileRecording = false;
825 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
826 "Channel::RecordFileEnded() => output file recorder module is"
827 " shutdown");
niklase@google.com470e71d2011-07-07 08:21:25 +0000828}
829
pbos@webrtc.org92135212013-05-14 08:31:39 +0000830Channel::Channel(int32_t channelId,
minyue@webrtc.orge509f942013-09-12 17:03:00 +0000831 uint32_t instanceId,
solenberg88499ec2016-09-07 07:34:41 -0700832 const VoEBase::ChannelConfig& config)
tommi31fc21f2016-01-21 10:37:37 -0800833 : _instanceId(instanceId),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100834 _channelId(channelId),
ivoc14d5dbe2016-07-04 07:06:55 -0700835 event_log_proxy_(new RtcEventLogProxy()),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100836 rtp_header_parser_(RtpHeaderParser::Create()),
magjed33c81d02016-11-24 11:08:39 -0800837 rtp_payload_registry_(
838 new RTPPayloadRegistry(RTPPayloadStrategy::CreateStrategy(true))),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100839 rtp_receive_statistics_(
840 ReceiveStatistics::Create(Clock::GetRealTimeClock())),
841 rtp_receiver_(
842 RtpReceiver::CreateAudioReceiver(Clock::GetRealTimeClock(),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100843 this,
844 this,
845 rtp_payload_registry_.get())),
danilchap799a9d02016-09-22 03:36:27 -0700846 telephone_event_handler_(rtp_receiver_->GetTelephoneEventHandler()),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100847 _outputAudioLevel(),
848 _externalTransport(false),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100849 // Avoid conflict with other channels by adding 1024 - 1026,
850 // won't use as much as 1024 channels.
851 _inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
852 _outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
853 _outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
854 _outputFileRecording(false),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100855 _outputExternalMedia(false),
856 _inputExternalMediaCallbackPtr(NULL),
857 _outputExternalMediaCallbackPtr(NULL),
858 _timeStamp(0), // This is just an offset, RTP module will add it's own
859 // random offset
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100860 ntp_estimator_(Clock::GetRealTimeClock()),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100861 playout_timestamp_rtp_(0),
862 playout_timestamp_rtcp_(0),
863 playout_delay_ms_(0),
864 _numberOfDiscardedPackets(0),
865 send_sequence_number_(0),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100866 rtp_ts_wraparound_handler_(new rtc::TimestampWrapAroundHandler()),
867 capture_start_rtp_time_stamp_(-1),
868 capture_start_ntp_time_ms_(-1),
869 _engineStatisticsPtr(NULL),
870 _outputMixerPtr(NULL),
871 _transmitMixerPtr(NULL),
872 _moduleProcessThreadPtr(NULL),
873 _audioDeviceModulePtr(NULL),
874 _voiceEngineObserverPtr(NULL),
875 _callbackCritSectPtr(NULL),
876 _transportPtr(NULL),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100877 _sendFrameType(0),
878 _externalMixing(false),
879 _mixFileWithMicrophone(false),
solenberg1c2af8e2016-03-24 10:36:00 -0700880 input_mute_(false),
881 previous_frame_muted_(false),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100882 _panLeft(1.0f),
883 _panRight(1.0f),
884 _outputGain(1.0f),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100885 _lastLocalTimeStamp(0),
886 _lastPayloadType(0),
887 _includeAudioLevelIndication(false),
888 _outputSpeechType(AudioFrame::kNormalSpeech),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100889 restored_packet_in_use_(false),
890 rtcp_observer_(new VoERtcpObserver(this)),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100891 associate_send_channel_(ChannelOwner(nullptr)),
solenberg88499ec2016-09-07 07:34:41 -0700892 pacing_enabled_(config.enable_voice_pacing),
stefanbba9dec2016-02-01 04:39:55 -0800893 feedback_observer_proxy_(new TransportFeedbackProxy()),
894 seq_num_allocator_proxy_(new TransportSequenceNumberProxy()),
ossu29b1a8d2016-06-13 07:34:51 -0700895 rtp_packet_sender_proxy_(new RtpPacketSenderProxy()),
Erik Språng737336d2016-07-29 12:59:36 +0200896 retransmission_rate_limiter_(new RateLimiter(Clock::GetRealTimeClock(),
897 kMaxRetransmissionWindowMs)),
solenberg88499ec2016-09-07 07:34:41 -0700898 decoder_factory_(config.acm_config.decoder_factory) {
kwiberg55b97fe2016-01-28 05:22:45 -0800899 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, _channelId),
900 "Channel::Channel() - ctor");
solenberg88499ec2016-09-07 07:34:41 -0700901 AudioCodingModule::Config acm_config(config.acm_config);
kwiberg55b97fe2016-01-28 05:22:45 -0800902 acm_config.id = VoEModuleId(instanceId, channelId);
henrik.lundina89ab962016-05-18 08:52:45 -0700903 acm_config.neteq_config.enable_muted_state = true;
kwiberg55b97fe2016-01-28 05:22:45 -0800904 audio_coding_.reset(AudioCodingModule::Create(acm_config));
Henrik Lundin64dad832015-05-11 12:44:23 +0200905
kwiberg55b97fe2016-01-28 05:22:45 -0800906 _outputAudioLevel.Clear();
niklase@google.com470e71d2011-07-07 08:21:25 +0000907
kwiberg55b97fe2016-01-28 05:22:45 -0800908 RtpRtcp::Configuration configuration;
909 configuration.audio = true;
910 configuration.outgoing_transport = this;
kwiberg55b97fe2016-01-28 05:22:45 -0800911 configuration.receive_statistics = rtp_receive_statistics_.get();
912 configuration.bandwidth_callback = rtcp_observer_.get();
stefanbba9dec2016-02-01 04:39:55 -0800913 if (pacing_enabled_) {
914 configuration.paced_sender = rtp_packet_sender_proxy_.get();
915 configuration.transport_sequence_number_allocator =
916 seq_num_allocator_proxy_.get();
917 configuration.transport_feedback_callback = feedback_observer_proxy_.get();
918 }
ivoc14d5dbe2016-07-04 07:06:55 -0700919 configuration.event_log = &(*event_log_proxy_);
Erik Språng737336d2016-07-29 12:59:36 +0200920 configuration.retransmission_rate_limiter =
921 retransmission_rate_limiter_.get();
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +0000922
kwiberg55b97fe2016-01-28 05:22:45 -0800923 _rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
Peter Boström3dd5d1d2016-02-25 16:56:48 +0100924 _rtpRtcpModule->SetSendingMediaStatus(false);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000925
kwiberg55b97fe2016-01-28 05:22:45 -0800926 statistics_proxy_.reset(new StatisticsProxy(_rtpRtcpModule->SSRC()));
927 rtp_receive_statistics_->RegisterRtcpStatisticsCallback(
928 statistics_proxy_.get());
niklase@google.com470e71d2011-07-07 08:21:25 +0000929}
930
kwiberg55b97fe2016-01-28 05:22:45 -0800931Channel::~Channel() {
932 rtp_receive_statistics_->RegisterRtcpStatisticsCallback(NULL);
933 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, _channelId),
934 "Channel::~Channel() - dtor");
niklase@google.com470e71d2011-07-07 08:21:25 +0000935
kwiberg55b97fe2016-01-28 05:22:45 -0800936 if (_outputExternalMedia) {
937 DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
938 }
939 if (channel_state_.Get().input_external_media) {
940 DeRegisterExternalMediaProcessing(kRecordingPerChannel);
941 }
942 StopSend();
943 StopPlayout();
niklase@google.com470e71d2011-07-07 08:21:25 +0000944
kwiberg55b97fe2016-01-28 05:22:45 -0800945 {
946 rtc::CritScope cs(&_fileCritSect);
kwiberg5a25d952016-08-17 07:31:12 -0700947 if (input_file_player_) {
948 input_file_player_->RegisterModuleFileCallback(NULL);
949 input_file_player_->StopPlayingFile();
niklase@google.com470e71d2011-07-07 08:21:25 +0000950 }
kwiberg5a25d952016-08-17 07:31:12 -0700951 if (output_file_player_) {
952 output_file_player_->RegisterModuleFileCallback(NULL);
953 output_file_player_->StopPlayingFile();
kwiberg55b97fe2016-01-28 05:22:45 -0800954 }
kwiberg5a25d952016-08-17 07:31:12 -0700955 if (output_file_recorder_) {
956 output_file_recorder_->RegisterModuleFileCallback(NULL);
957 output_file_recorder_->StopRecording();
kwiberg55b97fe2016-01-28 05:22:45 -0800958 }
959 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000960
kwiberg55b97fe2016-01-28 05:22:45 -0800961 // The order to safely shutdown modules in a channel is:
962 // 1. De-register callbacks in modules
963 // 2. De-register modules in process thread
964 // 3. Destroy modules
965 if (audio_coding_->RegisterTransportCallback(NULL) == -1) {
966 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
967 "~Channel() failed to de-register transport callback"
968 " (Audio coding module)");
969 }
970 if (audio_coding_->RegisterVADCallback(NULL) == -1) {
971 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
972 "~Channel() failed to de-register VAD callback"
973 " (Audio coding module)");
974 }
975 // De-register modules in process thread
976 _moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get());
tommi@webrtc.org3985f012015-02-27 13:36:34 +0000977
kwiberg55b97fe2016-01-28 05:22:45 -0800978 // End of modules shutdown
niklase@google.com470e71d2011-07-07 08:21:25 +0000979}
980
kwiberg55b97fe2016-01-28 05:22:45 -0800981int32_t Channel::Init() {
982 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
983 "Channel::Init()");
niklase@google.com470e71d2011-07-07 08:21:25 +0000984
kwiberg55b97fe2016-01-28 05:22:45 -0800985 channel_state_.Reset();
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +0000986
kwiberg55b97fe2016-01-28 05:22:45 -0800987 // --- Initial sanity
niklase@google.com470e71d2011-07-07 08:21:25 +0000988
kwiberg55b97fe2016-01-28 05:22:45 -0800989 if ((_engineStatisticsPtr == NULL) || (_moduleProcessThreadPtr == NULL)) {
990 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
991 "Channel::Init() must call SetEngineInformation() first");
992 return -1;
993 }
994
995 // --- Add modules to process thread (for periodic schedulation)
996
997 _moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get());
998
999 // --- ACM initialization
1000
1001 if (audio_coding_->InitializeReceiver() == -1) {
1002 _engineStatisticsPtr->SetLastError(
1003 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1004 "Channel::Init() unable to initialize the ACM - 1");
1005 return -1;
1006 }
1007
1008 // --- RTP/RTCP module initialization
1009
1010 // Ensure that RTCP is enabled by default for the created channel.
1011 // Note that, the module will keep generating RTCP until it is explicitly
1012 // disabled by the user.
1013 // After StopListen (when no sockets exists), RTCP packets will no longer
1014 // be transmitted since the Transport object will then be invalid.
danilchap799a9d02016-09-22 03:36:27 -07001015 telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
kwiberg55b97fe2016-01-28 05:22:45 -08001016 // RTCP is enabled by default.
1017 _rtpRtcpModule->SetRTCPStatus(RtcpMode::kCompound);
1018 // --- Register all permanent callbacks
1019 const bool fail = (audio_coding_->RegisterTransportCallback(this) == -1) ||
1020 (audio_coding_->RegisterVADCallback(this) == -1);
1021
1022 if (fail) {
1023 _engineStatisticsPtr->SetLastError(
1024 VE_CANNOT_INIT_CHANNEL, kTraceError,
1025 "Channel::Init() callbacks not registered");
1026 return -1;
1027 }
1028
1029 // --- Register all supported codecs to the receiving side of the
1030 // RTP/RTCP module
1031
1032 CodecInst codec;
1033 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
1034
1035 for (int idx = 0; idx < nSupportedCodecs; idx++) {
1036 // Open up the RTP/RTCP receiver for all supported codecs
1037 if ((audio_coding_->Codec(idx, &codec) == -1) ||
magjed56124bd2016-11-24 09:34:46 -08001038 (rtp_receiver_->RegisterReceivePayload(codec) == -1)) {
kwiberg55b97fe2016-01-28 05:22:45 -08001039 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1040 "Channel::Init() unable to register %s "
1041 "(%d/%d/%" PRIuS "/%d) to RTP/RTCP receiver",
1042 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1043 codec.rate);
1044 } else {
1045 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1046 "Channel::Init() %s (%d/%d/%" PRIuS
1047 "/%d) has been "
1048 "added to the RTP/RTCP receiver",
1049 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1050 codec.rate);
niklase@google.com470e71d2011-07-07 08:21:25 +00001051 }
1052
kwiberg55b97fe2016-01-28 05:22:45 -08001053 // Ensure that PCMU is used as default codec on the sending side
1054 if (!STR_CASE_CMP(codec.plname, "PCMU") && (codec.channels == 1)) {
1055 SetSendCodec(codec);
niklase@google.com470e71d2011-07-07 08:21:25 +00001056 }
1057
kwiberg55b97fe2016-01-28 05:22:45 -08001058 // Register default PT for outband 'telephone-event'
1059 if (!STR_CASE_CMP(codec.plname, "telephone-event")) {
kwibergc8d071e2016-04-06 12:22:38 -07001060 if (_rtpRtcpModule->RegisterSendPayload(codec) == -1 ||
kwibergda2bf4e2016-10-24 13:47:09 -07001061 !audio_coding_->RegisterReceiveCodec(codec.pltype,
1062 CodecInstToSdp(codec))) {
kwiberg55b97fe2016-01-28 05:22:45 -08001063 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1064 "Channel::Init() failed to register outband "
1065 "'telephone-event' (%d/%d) correctly",
1066 codec.pltype, codec.plfreq);
1067 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001068 }
1069
kwiberg55b97fe2016-01-28 05:22:45 -08001070 if (!STR_CASE_CMP(codec.plname, "CN")) {
kwibergc8d071e2016-04-06 12:22:38 -07001071 if (!codec_manager_.RegisterEncoder(codec) ||
1072 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get()) ||
kwibergda2bf4e2016-10-24 13:47:09 -07001073 !audio_coding_->RegisterReceiveCodec(codec.pltype,
1074 CodecInstToSdp(codec)) ||
kwibergc8d071e2016-04-06 12:22:38 -07001075 _rtpRtcpModule->RegisterSendPayload(codec) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -08001076 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1077 "Channel::Init() failed to register CN (%d/%d) "
1078 "correctly - 1",
1079 codec.pltype, codec.plfreq);
1080 }
1081 }
kwiberg55b97fe2016-01-28 05:22:45 -08001082 }
pwestin@webrtc.org684f0572013-03-13 23:20:57 +00001083
kwiberg55b97fe2016-01-28 05:22:45 -08001084 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001085}
1086
kwiberg55b97fe2016-01-28 05:22:45 -08001087int32_t Channel::SetEngineInformation(Statistics& engineStatistics,
1088 OutputMixer& outputMixer,
1089 voe::TransmitMixer& transmitMixer,
1090 ProcessThread& moduleProcessThread,
1091 AudioDeviceModule& audioDeviceModule,
1092 VoiceEngineObserver* voiceEngineObserver,
1093 rtc::CriticalSection* callbackCritSect) {
1094 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1095 "Channel::SetEngineInformation()");
1096 _engineStatisticsPtr = &engineStatistics;
1097 _outputMixerPtr = &outputMixer;
1098 _transmitMixerPtr = &transmitMixer,
1099 _moduleProcessThreadPtr = &moduleProcessThread;
1100 _audioDeviceModulePtr = &audioDeviceModule;
1101 _voiceEngineObserverPtr = voiceEngineObserver;
1102 _callbackCritSectPtr = callbackCritSect;
1103 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001104}
1105
kwiberg55b97fe2016-01-28 05:22:45 -08001106int32_t Channel::UpdateLocalTimeStamp() {
1107 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_);
1108 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001109}
1110
kwibergb7f89d62016-02-17 10:04:18 -08001111void Channel::SetSink(std::unique_ptr<AudioSinkInterface> sink) {
tommi31fc21f2016-01-21 10:37:37 -08001112 rtc::CritScope cs(&_callbackCritSect);
deadbeef2d110be2016-01-13 12:00:26 -08001113 audio_sink_ = std::move(sink);
Tommif888bb52015-12-12 01:37:01 +01001114}
1115
ossu29b1a8d2016-06-13 07:34:51 -07001116const rtc::scoped_refptr<AudioDecoderFactory>&
1117Channel::GetAudioDecoderFactory() const {
1118 return decoder_factory_;
1119}
1120
kwiberg55b97fe2016-01-28 05:22:45 -08001121int32_t Channel::StartPlayout() {
1122 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1123 "Channel::StartPlayout()");
1124 if (channel_state_.Get().playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001125 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001126 }
1127
1128 if (!_externalMixing) {
1129 // Add participant as candidates for mixing.
1130 if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0) {
1131 _engineStatisticsPtr->SetLastError(
1132 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1133 "StartPlayout() failed to add participant to mixer");
1134 return -1;
1135 }
1136 }
1137
1138 channel_state_.SetPlaying(true);
1139 if (RegisterFilePlayingToMixer() != 0)
1140 return -1;
1141
1142 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001143}
1144
kwiberg55b97fe2016-01-28 05:22:45 -08001145int32_t Channel::StopPlayout() {
1146 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1147 "Channel::StopPlayout()");
1148 if (!channel_state_.Get().playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001149 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001150 }
1151
1152 if (!_externalMixing) {
1153 // Remove participant as candidates for mixing
1154 if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0) {
1155 _engineStatisticsPtr->SetLastError(
1156 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1157 "StopPlayout() failed to remove participant from mixer");
1158 return -1;
1159 }
1160 }
1161
1162 channel_state_.SetPlaying(false);
1163 _outputAudioLevel.Clear();
1164
1165 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001166}
1167
kwiberg55b97fe2016-01-28 05:22:45 -08001168int32_t Channel::StartSend() {
1169 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1170 "Channel::StartSend()");
1171 // Resume the previous sequence number which was reset by StopSend().
1172 // This needs to be done before |sending| is set to true.
1173 if (send_sequence_number_)
1174 SetInitSequenceNumber(send_sequence_number_);
xians@webrtc.org09e8c472013-07-31 16:30:19 +00001175
kwiberg55b97fe2016-01-28 05:22:45 -08001176 if (channel_state_.Get().sending) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001177 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001178 }
1179 channel_state_.SetSending(true);
niklase@google.com470e71d2011-07-07 08:21:25 +00001180
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001181 _rtpRtcpModule->SetSendingMediaStatus(true);
kwiberg55b97fe2016-01-28 05:22:45 -08001182 if (_rtpRtcpModule->SetSendingStatus(true) != 0) {
1183 _engineStatisticsPtr->SetLastError(
1184 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1185 "StartSend() RTP/RTCP failed to start sending");
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001186 _rtpRtcpModule->SetSendingMediaStatus(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001187 rtc::CritScope cs(&_callbackCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001188 channel_state_.SetSending(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001189 return -1;
1190 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001191
kwiberg55b97fe2016-01-28 05:22:45 -08001192 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001193}
1194
kwiberg55b97fe2016-01-28 05:22:45 -08001195int32_t Channel::StopSend() {
1196 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1197 "Channel::StopSend()");
1198 if (!channel_state_.Get().sending) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001199 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001200 }
1201 channel_state_.SetSending(false);
1202
1203 // Store the sequence number to be able to pick up the same sequence for
1204 // the next StartSend(). This is needed for restarting device, otherwise
1205 // it might cause libSRTP to complain about packets being replayed.
1206 // TODO(xians): Remove this workaround after RtpRtcpModule's refactoring
1207 // CL is landed. See issue
1208 // https://code.google.com/p/webrtc/issues/detail?id=2111 .
1209 send_sequence_number_ = _rtpRtcpModule->SequenceNumber();
1210
1211 // Reset sending SSRC and sequence number and triggers direct transmission
1212 // of RTCP BYE
1213 if (_rtpRtcpModule->SetSendingStatus(false) == -1) {
1214 _engineStatisticsPtr->SetLastError(
1215 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1216 "StartSend() RTP/RTCP failed to stop sending");
1217 }
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001218 _rtpRtcpModule->SetSendingMediaStatus(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001219
1220 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001221}
1222
solenberge566ac72016-10-31 12:52:33 -07001223void Channel::ResetDiscardedPacketCount() {
kwiberg55b97fe2016-01-28 05:22:45 -08001224 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
solenberge566ac72016-10-31 12:52:33 -07001225 "Channel::ResetDiscardedPacketCount()");
kwiberg55b97fe2016-01-28 05:22:45 -08001226 _numberOfDiscardedPackets = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001227}
1228
kwiberg55b97fe2016-01-28 05:22:45 -08001229int32_t Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer) {
1230 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1231 "Channel::RegisterVoiceEngineObserver()");
1232 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001233
kwiberg55b97fe2016-01-28 05:22:45 -08001234 if (_voiceEngineObserverPtr) {
1235 _engineStatisticsPtr->SetLastError(
1236 VE_INVALID_OPERATION, kTraceError,
1237 "RegisterVoiceEngineObserver() observer already enabled");
1238 return -1;
1239 }
1240 _voiceEngineObserverPtr = &observer;
1241 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001242}
1243
kwiberg55b97fe2016-01-28 05:22:45 -08001244int32_t Channel::DeRegisterVoiceEngineObserver() {
1245 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1246 "Channel::DeRegisterVoiceEngineObserver()");
1247 rtc::CritScope cs(&_callbackCritSect);
1248
1249 if (!_voiceEngineObserverPtr) {
1250 _engineStatisticsPtr->SetLastError(
1251 VE_INVALID_OPERATION, kTraceWarning,
1252 "DeRegisterVoiceEngineObserver() observer already disabled");
1253 return 0;
1254 }
1255 _voiceEngineObserverPtr = NULL;
1256 return 0;
1257}
1258
1259int32_t Channel::GetSendCodec(CodecInst& codec) {
kwibergc8d071e2016-04-06 12:22:38 -07001260 auto send_codec = codec_manager_.GetCodecInst();
kwiberg1fd4a4a2015-11-03 11:20:50 -08001261 if (send_codec) {
1262 codec = *send_codec;
1263 return 0;
1264 }
1265 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001266}
1267
kwiberg55b97fe2016-01-28 05:22:45 -08001268int32_t Channel::GetRecCodec(CodecInst& codec) {
1269 return (audio_coding_->ReceiveCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001270}
1271
kwiberg55b97fe2016-01-28 05:22:45 -08001272int32_t Channel::SetSendCodec(const CodecInst& codec) {
1273 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1274 "Channel::SetSendCodec()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001275
kwibergc8d071e2016-04-06 12:22:38 -07001276 if (!codec_manager_.RegisterEncoder(codec) ||
1277 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
kwiberg55b97fe2016-01-28 05:22:45 -08001278 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1279 "SetSendCodec() failed to register codec to ACM");
1280 return -1;
1281 }
1282
1283 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1284 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1285 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1286 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1287 "SetSendCodec() failed to register codec to"
1288 " RTP/RTCP module");
1289 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001290 }
kwiberg55b97fe2016-01-28 05:22:45 -08001291 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001292
kwiberg55b97fe2016-01-28 05:22:45 -08001293 if (_rtpRtcpModule->SetAudioPacketSize(codec.pacsize) != 0) {
1294 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1295 "SetSendCodec() failed to set audio packet size");
1296 return -1;
1297 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001298
kwiberg55b97fe2016-01-28 05:22:45 -08001299 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001300}
1301
Ivo Creusenadf89b72015-04-29 16:03:33 +02001302void Channel::SetBitRate(int bitrate_bps) {
1303 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1304 "Channel::SetBitRate(bitrate_bps=%d)", bitrate_bps);
minyue7e304322016-10-12 05:00:55 -07001305 audio_coding_->ModifyEncoder([&](std::unique_ptr<AudioEncoder>* encoder) {
1306 if (*encoder)
1307 (*encoder)->OnReceivedTargetAudioBitrate(bitrate_bps);
1308 });
Erik Språng737336d2016-07-29 12:59:36 +02001309 retransmission_rate_limiter_->SetMaxRate(bitrate_bps);
Ivo Creusenadf89b72015-04-29 16:03:33 +02001310}
1311
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +00001312void Channel::OnIncomingFractionLoss(int fraction_lost) {
minyue7e304322016-10-12 05:00:55 -07001313 audio_coding_->ModifyEncoder([&](std::unique_ptr<AudioEncoder>* encoder) {
1314 if (*encoder)
1315 (*encoder)->OnReceivedUplinkPacketLossFraction(fraction_lost / 255.0f);
1316 });
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00001317}
1318
kwiberg55b97fe2016-01-28 05:22:45 -08001319int32_t Channel::SetVADStatus(bool enableVAD,
1320 ACMVADMode mode,
1321 bool disableDTX) {
1322 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1323 "Channel::SetVADStatus(mode=%d)", mode);
kwibergc8d071e2016-04-06 12:22:38 -07001324 RTC_DCHECK(!(disableDTX && enableVAD)); // disableDTX mode is deprecated.
1325 if (!codec_manager_.SetVAD(enableVAD, mode) ||
1326 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
kwiberg55b97fe2016-01-28 05:22:45 -08001327 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
1328 kTraceError,
1329 "SetVADStatus() failed to set VAD");
1330 return -1;
1331 }
1332 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001333}
1334
kwiberg55b97fe2016-01-28 05:22:45 -08001335int32_t Channel::GetVADStatus(bool& enabledVAD,
1336 ACMVADMode& mode,
1337 bool& disabledDTX) {
kwibergc8d071e2016-04-06 12:22:38 -07001338 const auto* params = codec_manager_.GetStackParams();
1339 enabledVAD = params->use_cng;
1340 mode = params->vad_mode;
1341 disabledDTX = !params->use_cng;
kwiberg55b97fe2016-01-28 05:22:45 -08001342 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001343}
1344
kwiberg55b97fe2016-01-28 05:22:45 -08001345int32_t Channel::SetRecPayloadType(const CodecInst& codec) {
1346 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1347 "Channel::SetRecPayloadType()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001348
kwiberg55b97fe2016-01-28 05:22:45 -08001349 if (channel_state_.Get().playing) {
1350 _engineStatisticsPtr->SetLastError(
1351 VE_ALREADY_PLAYING, kTraceError,
1352 "SetRecPayloadType() unable to set PT while playing");
1353 return -1;
1354 }
kwiberg55b97fe2016-01-28 05:22:45 -08001355
1356 if (codec.pltype == -1) {
1357 // De-register the selected codec (RTP/RTCP module and ACM)
1358
1359 int8_t pltype(-1);
1360 CodecInst rxCodec = codec;
1361
1362 // Get payload type for the given codec
magjed56124bd2016-11-24 09:34:46 -08001363 rtp_payload_registry_->ReceivePayloadType(rxCodec, &pltype);
kwiberg55b97fe2016-01-28 05:22:45 -08001364 rxCodec.pltype = pltype;
1365
1366 if (rtp_receiver_->DeRegisterReceivePayload(pltype) != 0) {
1367 _engineStatisticsPtr->SetLastError(
1368 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1369 "SetRecPayloadType() RTP/RTCP-module deregistration "
1370 "failed");
1371 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001372 }
kwiberg55b97fe2016-01-28 05:22:45 -08001373 if (audio_coding_->UnregisterReceiveCodec(rxCodec.pltype) != 0) {
1374 _engineStatisticsPtr->SetLastError(
1375 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1376 "SetRecPayloadType() ACM deregistration failed - 1");
1377 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001378 }
kwiberg55b97fe2016-01-28 05:22:45 -08001379 return 0;
1380 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001381
magjed56124bd2016-11-24 09:34:46 -08001382 if (rtp_receiver_->RegisterReceivePayload(codec) != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08001383 // First attempt to register failed => de-register and try again
kwibergc8d071e2016-04-06 12:22:38 -07001384 // TODO(kwiberg): Retrying is probably not necessary, since
1385 // AcmReceiver::AddCodec also retries.
kwiberg55b97fe2016-01-28 05:22:45 -08001386 rtp_receiver_->DeRegisterReceivePayload(codec.pltype);
magjed56124bd2016-11-24 09:34:46 -08001387 if (rtp_receiver_->RegisterReceivePayload(codec) != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08001388 _engineStatisticsPtr->SetLastError(
1389 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1390 "SetRecPayloadType() RTP/RTCP-module registration failed");
1391 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001392 }
kwiberg55b97fe2016-01-28 05:22:45 -08001393 }
kwibergda2bf4e2016-10-24 13:47:09 -07001394 if (!audio_coding_->RegisterReceiveCodec(codec.pltype,
1395 CodecInstToSdp(codec))) {
kwiberg55b97fe2016-01-28 05:22:45 -08001396 audio_coding_->UnregisterReceiveCodec(codec.pltype);
kwibergda2bf4e2016-10-24 13:47:09 -07001397 if (!audio_coding_->RegisterReceiveCodec(codec.pltype,
1398 CodecInstToSdp(codec))) {
kwiberg55b97fe2016-01-28 05:22:45 -08001399 _engineStatisticsPtr->SetLastError(
1400 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1401 "SetRecPayloadType() ACM registration failed - 1");
1402 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001403 }
kwiberg55b97fe2016-01-28 05:22:45 -08001404 }
1405 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001406}
1407
kwiberg55b97fe2016-01-28 05:22:45 -08001408int32_t Channel::GetRecPayloadType(CodecInst& codec) {
1409 int8_t payloadType(-1);
magjed56124bd2016-11-24 09:34:46 -08001410 if (rtp_payload_registry_->ReceivePayloadType(codec, &payloadType) != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08001411 _engineStatisticsPtr->SetLastError(
1412 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1413 "GetRecPayloadType() failed to retrieve RX payload type");
1414 return -1;
1415 }
1416 codec.pltype = payloadType;
1417 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001418}
1419
kwiberg55b97fe2016-01-28 05:22:45 -08001420int32_t Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency) {
1421 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1422 "Channel::SetSendCNPayloadType()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001423
kwiberg55b97fe2016-01-28 05:22:45 -08001424 CodecInst codec;
1425 int32_t samplingFreqHz(-1);
1426 const size_t kMono = 1;
1427 if (frequency == kFreq32000Hz)
1428 samplingFreqHz = 32000;
1429 else if (frequency == kFreq16000Hz)
1430 samplingFreqHz = 16000;
niklase@google.com470e71d2011-07-07 08:21:25 +00001431
kwiberg55b97fe2016-01-28 05:22:45 -08001432 if (audio_coding_->Codec("CN", &codec, samplingFreqHz, kMono) == -1) {
1433 _engineStatisticsPtr->SetLastError(
1434 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1435 "SetSendCNPayloadType() failed to retrieve default CN codec "
1436 "settings");
1437 return -1;
1438 }
1439
1440 // Modify the payload type (must be set to dynamic range)
1441 codec.pltype = type;
1442
kwibergc8d071e2016-04-06 12:22:38 -07001443 if (!codec_manager_.RegisterEncoder(codec) ||
1444 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
kwiberg55b97fe2016-01-28 05:22:45 -08001445 _engineStatisticsPtr->SetLastError(
1446 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1447 "SetSendCNPayloadType() failed to register CN to ACM");
1448 return -1;
1449 }
1450
1451 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1452 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1453 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1454 _engineStatisticsPtr->SetLastError(
1455 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1456 "SetSendCNPayloadType() failed to register CN to RTP/RTCP "
1457 "module");
1458 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001459 }
kwiberg55b97fe2016-01-28 05:22:45 -08001460 }
1461 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001462}
1463
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001464int Channel::SetOpusMaxPlaybackRate(int frequency_hz) {
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001465 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001466 "Channel::SetOpusMaxPlaybackRate()");
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001467
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001468 if (audio_coding_->SetOpusMaxPlaybackRate(frequency_hz) != 0) {
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001469 _engineStatisticsPtr->SetLastError(
1470 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001471 "SetOpusMaxPlaybackRate() failed to set maximum playback rate");
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001472 return -1;
1473 }
1474 return 0;
1475}
1476
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001477int Channel::SetOpusDtx(bool enable_dtx) {
1478 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1479 "Channel::SetOpusDtx(%d)", enable_dtx);
Minyue Li092041c2015-05-11 12:19:35 +02001480 int ret = enable_dtx ? audio_coding_->EnableOpusDtx()
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001481 : audio_coding_->DisableOpusDtx();
1482 if (ret != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08001483 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
1484 kTraceError, "SetOpusDtx() failed");
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001485 return -1;
1486 }
1487 return 0;
1488}
1489
ivoc85228d62016-07-27 04:53:47 -07001490int Channel::GetOpusDtx(bool* enabled) {
1491 int success = -1;
1492 audio_coding_->QueryEncoder([&](AudioEncoder const* encoder) {
1493 if (encoder) {
1494 *enabled = encoder->GetDtx();
1495 success = 0;
1496 }
1497 });
1498 return success;
1499}
1500
minyue7e304322016-10-12 05:00:55 -07001501bool Channel::EnableAudioNetworkAdaptor(const std::string& config_string) {
1502 bool success = false;
1503 audio_coding_->ModifyEncoder([&](std::unique_ptr<AudioEncoder>* encoder) {
1504 if (*encoder) {
1505 success = (*encoder)->EnableAudioNetworkAdaptor(
1506 config_string, Clock::GetRealTimeClock());
1507 }
1508 });
1509 return success;
1510}
1511
1512void Channel::DisableAudioNetworkAdaptor() {
1513 audio_coding_->ModifyEncoder([&](std::unique_ptr<AudioEncoder>* encoder) {
1514 if (*encoder)
1515 (*encoder)->DisableAudioNetworkAdaptor();
1516 });
1517}
1518
1519void Channel::SetReceiverFrameLengthRange(int min_frame_length_ms,
1520 int max_frame_length_ms) {
1521 audio_coding_->ModifyEncoder([&](std::unique_ptr<AudioEncoder>* encoder) {
1522 if (*encoder) {
1523 (*encoder)->SetReceiverFrameLengthRange(min_frame_length_ms,
1524 max_frame_length_ms);
1525 }
1526 });
1527}
1528
mflodman3d7db262016-04-29 00:57:13 -07001529int32_t Channel::RegisterExternalTransport(Transport* transport) {
kwiberg55b97fe2016-01-28 05:22:45 -08001530 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00001531 "Channel::RegisterExternalTransport()");
1532
kwiberg55b97fe2016-01-28 05:22:45 -08001533 rtc::CritScope cs(&_callbackCritSect);
kwiberg55b97fe2016-01-28 05:22:45 -08001534 if (_externalTransport) {
1535 _engineStatisticsPtr->SetLastError(
1536 VE_INVALID_OPERATION, kTraceError,
1537 "RegisterExternalTransport() external transport already enabled");
1538 return -1;
1539 }
1540 _externalTransport = true;
mflodman3d7db262016-04-29 00:57:13 -07001541 _transportPtr = transport;
kwiberg55b97fe2016-01-28 05:22:45 -08001542 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001543}
1544
kwiberg55b97fe2016-01-28 05:22:45 -08001545int32_t Channel::DeRegisterExternalTransport() {
1546 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1547 "Channel::DeRegisterExternalTransport()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001548
kwiberg55b97fe2016-01-28 05:22:45 -08001549 rtc::CritScope cs(&_callbackCritSect);
mflodman3d7db262016-04-29 00:57:13 -07001550 if (_transportPtr) {
1551 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1552 "DeRegisterExternalTransport() all transport is disabled");
1553 } else {
kwiberg55b97fe2016-01-28 05:22:45 -08001554 _engineStatisticsPtr->SetLastError(
1555 VE_INVALID_OPERATION, kTraceWarning,
1556 "DeRegisterExternalTransport() external transport already "
1557 "disabled");
kwiberg55b97fe2016-01-28 05:22:45 -08001558 }
1559 _externalTransport = false;
1560 _transportPtr = NULL;
kwiberg55b97fe2016-01-28 05:22:45 -08001561 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001562}
1563
mflodman3d7db262016-04-29 00:57:13 -07001564int32_t Channel::ReceivedRTPPacket(const uint8_t* received_packet,
kwiberg55b97fe2016-01-28 05:22:45 -08001565 size_t length,
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00001566 const PacketTime& packet_time) {
kwiberg55b97fe2016-01-28 05:22:45 -08001567 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001568 "Channel::ReceivedRTPPacket()");
1569
1570 // Store playout timestamp for the received RTP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00001571 UpdatePlayoutTimestamp(false);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001572
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00001573 RTPHeader header;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001574 if (!rtp_header_parser_->Parse(received_packet, length, &header)) {
1575 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1576 "Incoming packet: invalid RTP header");
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00001577 return -1;
1578 }
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001579 header.payload_type_frequency =
1580 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001581 if (header.payload_type_frequency < 0)
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001582 return -1;
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001583 bool in_order = IsPacketInOrder(header);
kwiberg55b97fe2016-01-28 05:22:45 -08001584 rtp_receive_statistics_->IncomingPacket(
1585 header, length, IsPacketRetransmitted(header, in_order));
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001586 rtp_payload_registry_->SetIncomingPayloadType(header);
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00001587
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001588 return ReceivePacket(received_packet, length, header, in_order) ? 0 : -1;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001589}
1590
1591bool Channel::ReceivePacket(const uint8_t* packet,
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001592 size_t packet_length,
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001593 const RTPHeader& header,
1594 bool in_order) {
minyue@webrtc.org456f0142015-01-23 11:58:42 +00001595 if (rtp_payload_registry_->IsRtx(header)) {
1596 return HandleRtxPacket(packet, packet_length, header);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001597 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001598 const uint8_t* payload = packet + header.headerLength;
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001599 assert(packet_length >= header.headerLength);
1600 size_t payload_length = packet_length - header.headerLength;
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001601 PayloadUnion payload_specific;
1602 if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001603 &payload_specific)) {
1604 return false;
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001605 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001606 return rtp_receiver_->IncomingRtpPacket(header, payload, payload_length,
1607 payload_specific, in_order);
1608}
1609
minyue@webrtc.org456f0142015-01-23 11:58:42 +00001610bool Channel::HandleRtxPacket(const uint8_t* packet,
1611 size_t packet_length,
1612 const RTPHeader& header) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001613 if (!rtp_payload_registry_->IsRtx(header))
1614 return false;
1615
1616 // Remove the RTX header and parse the original RTP header.
1617 if (packet_length < header.headerLength)
1618 return false;
1619 if (packet_length > kVoiceEngineMaxIpPacketSizeBytes)
1620 return false;
1621 if (restored_packet_in_use_) {
1622 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1623 "Multiple RTX headers detected, dropping packet");
1624 return false;
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001625 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001626 if (!rtp_payload_registry_->RestoreOriginalPacket(
noahric65220a72015-10-14 11:29:49 -07001627 restored_packet_, packet, &packet_length, rtp_receiver_->SSRC(),
1628 header)) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001629 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1630 "Incoming RTX packet: invalid RTP header");
1631 return false;
1632 }
1633 restored_packet_in_use_ = true;
noahric65220a72015-10-14 11:29:49 -07001634 bool ret = OnRecoveredPacket(restored_packet_, packet_length);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001635 restored_packet_in_use_ = false;
1636 return ret;
1637}
1638
1639bool Channel::IsPacketInOrder(const RTPHeader& header) const {
1640 StreamStatistician* statistician =
1641 rtp_receive_statistics_->GetStatistician(header.ssrc);
1642 if (!statistician)
1643 return false;
1644 return statistician->IsPacketInOrder(header.sequenceNumber);
niklase@google.com470e71d2011-07-07 08:21:25 +00001645}
1646
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001647bool Channel::IsPacketRetransmitted(const RTPHeader& header,
1648 bool in_order) const {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001649 // Retransmissions are handled separately if RTX is enabled.
1650 if (rtp_payload_registry_->RtxEnabled())
1651 return false;
1652 StreamStatistician* statistician =
1653 rtp_receive_statistics_->GetStatistician(header.ssrc);
1654 if (!statistician)
1655 return false;
1656 // Check if this is a retransmission.
pkasting@chromium.org16825b12015-01-12 21:51:21 +00001657 int64_t min_rtt = 0;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001658 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL);
kwiberg55b97fe2016-01-28 05:22:45 -08001659 return !in_order && statistician->IsRetransmitOfOldPacket(header, min_rtt);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001660}
1661
mflodman3d7db262016-04-29 00:57:13 -07001662int32_t Channel::ReceivedRTCPPacket(const uint8_t* data, size_t length) {
kwiberg55b97fe2016-01-28 05:22:45 -08001663 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001664 "Channel::ReceivedRTCPPacket()");
1665 // Store playout timestamp for the received RTCP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00001666 UpdatePlayoutTimestamp(true);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001667
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001668 // Deliver RTCP packet to RTP/RTCP module for parsing
mflodman3d7db262016-04-29 00:57:13 -07001669 if (_rtpRtcpModule->IncomingRtcpPacket(data, length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001670 _engineStatisticsPtr->SetLastError(
1671 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
1672 "Channel::IncomingRTPPacket() RTCP packet is invalid");
1673 }
wu@webrtc.org82c4b852014-05-20 22:55:01 +00001674
Minyue2013aec2015-05-13 14:14:42 +02001675 int64_t rtt = GetRTT(true);
1676 if (rtt == 0) {
1677 // Waiting for valid RTT.
1678 return 0;
1679 }
Erik Språng737336d2016-07-29 12:59:36 +02001680
1681 int64_t nack_window_ms = rtt;
1682 if (nack_window_ms < kMinRetransmissionWindowMs) {
1683 nack_window_ms = kMinRetransmissionWindowMs;
1684 } else if (nack_window_ms > kMaxRetransmissionWindowMs) {
1685 nack_window_ms = kMaxRetransmissionWindowMs;
1686 }
1687 retransmission_rate_limiter_->SetWindowSize(nack_window_ms);
1688
minyue7e304322016-10-12 05:00:55 -07001689 // Invoke audio encoders OnReceivedRtt().
1690 audio_coding_->ModifyEncoder([&](std::unique_ptr<AudioEncoder>* encoder) {
1691 if (*encoder)
1692 (*encoder)->OnReceivedRtt(rtt);
1693 });
1694
Minyue2013aec2015-05-13 14:14:42 +02001695 uint32_t ntp_secs = 0;
1696 uint32_t ntp_frac = 0;
1697 uint32_t rtp_timestamp = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001698 if (0 !=
1699 _rtpRtcpModule->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL,
1700 &rtp_timestamp)) {
Minyue2013aec2015-05-13 14:14:42 +02001701 // Waiting for RTCP.
1702 return 0;
1703 }
1704
stefan@webrtc.org8e24d872014-09-02 18:58:24 +00001705 {
tommi31fc21f2016-01-21 10:37:37 -08001706 rtc::CritScope lock(&ts_stats_lock_);
minyue@webrtc.org2c0cdbc2014-10-09 10:52:43 +00001707 ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp);
stefan@webrtc.org8e24d872014-09-02 18:58:24 +00001708 }
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001709 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001710}
1711
niklase@google.com470e71d2011-07-07 08:21:25 +00001712int Channel::StartPlayingFileLocally(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001713 bool loop,
1714 FileFormats format,
1715 int startPosition,
1716 float volumeScaling,
1717 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001718 const CodecInst* codecInst) {
1719 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1720 "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d,"
1721 " format=%d, volumeScaling=%5.3f, startPosition=%d, "
1722 "stopPosition=%d)",
1723 fileName, loop, format, volumeScaling, startPosition,
1724 stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001725
kwiberg55b97fe2016-01-28 05:22:45 -08001726 if (channel_state_.Get().output_file_playing) {
1727 _engineStatisticsPtr->SetLastError(
1728 VE_ALREADY_PLAYING, kTraceError,
1729 "StartPlayingFileLocally() is already playing");
1730 return -1;
1731 }
1732
1733 {
1734 rtc::CritScope cs(&_fileCritSect);
1735
kwiberg5a25d952016-08-17 07:31:12 -07001736 if (output_file_player_) {
1737 output_file_player_->RegisterModuleFileCallback(NULL);
1738 output_file_player_.reset();
niklase@google.com470e71d2011-07-07 08:21:25 +00001739 }
1740
kwiberg5b356f42016-09-08 04:32:33 -07001741 output_file_player_ = FilePlayer::CreateFilePlayer(
kwiberg55b97fe2016-01-28 05:22:45 -08001742 _outputFilePlayerId, (const FileFormats)format);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00001743
kwiberg5a25d952016-08-17 07:31:12 -07001744 if (!output_file_player_) {
kwiberg55b97fe2016-01-28 05:22:45 -08001745 _engineStatisticsPtr->SetLastError(
1746 VE_INVALID_ARGUMENT, kTraceError,
1747 "StartPlayingFileLocally() filePlayer format is not correct");
1748 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001749 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001750
kwiberg55b97fe2016-01-28 05:22:45 -08001751 const uint32_t notificationTime(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00001752
kwiberg5a25d952016-08-17 07:31:12 -07001753 if (output_file_player_->StartPlayingFile(
kwiberg55b97fe2016-01-28 05:22:45 -08001754 fileName, loop, startPosition, volumeScaling, notificationTime,
1755 stopPosition, (const CodecInst*)codecInst) != 0) {
1756 _engineStatisticsPtr->SetLastError(
1757 VE_BAD_FILE, kTraceError,
1758 "StartPlayingFile() failed to start file playout");
kwiberg5a25d952016-08-17 07:31:12 -07001759 output_file_player_->StopPlayingFile();
1760 output_file_player_.reset();
kwiberg55b97fe2016-01-28 05:22:45 -08001761 return -1;
1762 }
kwiberg5a25d952016-08-17 07:31:12 -07001763 output_file_player_->RegisterModuleFileCallback(this);
kwiberg55b97fe2016-01-28 05:22:45 -08001764 channel_state_.SetOutputFilePlaying(true);
1765 }
1766
1767 if (RegisterFilePlayingToMixer() != 0)
1768 return -1;
1769
1770 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001771}
1772
1773int Channel::StartPlayingFileLocally(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001774 FileFormats format,
1775 int startPosition,
1776 float volumeScaling,
1777 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001778 const CodecInst* codecInst) {
1779 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1780 "Channel::StartPlayingFileLocally(format=%d,"
1781 " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
1782 format, volumeScaling, startPosition, stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001783
kwiberg55b97fe2016-01-28 05:22:45 -08001784 if (stream == NULL) {
1785 _engineStatisticsPtr->SetLastError(
1786 VE_BAD_FILE, kTraceError,
1787 "StartPlayingFileLocally() NULL as input stream");
1788 return -1;
1789 }
1790
1791 if (channel_state_.Get().output_file_playing) {
1792 _engineStatisticsPtr->SetLastError(
1793 VE_ALREADY_PLAYING, kTraceError,
1794 "StartPlayingFileLocally() is already playing");
1795 return -1;
1796 }
1797
1798 {
1799 rtc::CritScope cs(&_fileCritSect);
1800
1801 // Destroy the old instance
kwiberg5a25d952016-08-17 07:31:12 -07001802 if (output_file_player_) {
1803 output_file_player_->RegisterModuleFileCallback(NULL);
1804 output_file_player_.reset();
niklase@google.com470e71d2011-07-07 08:21:25 +00001805 }
1806
kwiberg55b97fe2016-01-28 05:22:45 -08001807 // Create the instance
kwiberg5b356f42016-09-08 04:32:33 -07001808 output_file_player_ = FilePlayer::CreateFilePlayer(
kwiberg55b97fe2016-01-28 05:22:45 -08001809 _outputFilePlayerId, (const FileFormats)format);
niklase@google.com470e71d2011-07-07 08:21:25 +00001810
kwiberg5a25d952016-08-17 07:31:12 -07001811 if (!output_file_player_) {
kwiberg55b97fe2016-01-28 05:22:45 -08001812 _engineStatisticsPtr->SetLastError(
1813 VE_INVALID_ARGUMENT, kTraceError,
1814 "StartPlayingFileLocally() filePlayer format isnot correct");
1815 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001816 }
1817
kwiberg55b97fe2016-01-28 05:22:45 -08001818 const uint32_t notificationTime(0);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00001819
kwiberg4ec01d92016-08-22 08:43:54 -07001820 if (output_file_player_->StartPlayingFile(stream, startPosition,
kwiberg5a25d952016-08-17 07:31:12 -07001821 volumeScaling, notificationTime,
1822 stopPosition, codecInst) != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08001823 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
1824 "StartPlayingFile() failed to "
1825 "start file playout");
kwiberg5a25d952016-08-17 07:31:12 -07001826 output_file_player_->StopPlayingFile();
1827 output_file_player_.reset();
kwiberg55b97fe2016-01-28 05:22:45 -08001828 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001829 }
kwiberg5a25d952016-08-17 07:31:12 -07001830 output_file_player_->RegisterModuleFileCallback(this);
kwiberg55b97fe2016-01-28 05:22:45 -08001831 channel_state_.SetOutputFilePlaying(true);
1832 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001833
kwiberg55b97fe2016-01-28 05:22:45 -08001834 if (RegisterFilePlayingToMixer() != 0)
1835 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001836
kwiberg55b97fe2016-01-28 05:22:45 -08001837 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001838}
1839
kwiberg55b97fe2016-01-28 05:22:45 -08001840int Channel::StopPlayingFileLocally() {
1841 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1842 "Channel::StopPlayingFileLocally()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001843
kwiberg55b97fe2016-01-28 05:22:45 -08001844 if (!channel_state_.Get().output_file_playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001845 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001846 }
1847
1848 {
1849 rtc::CritScope cs(&_fileCritSect);
1850
kwiberg5a25d952016-08-17 07:31:12 -07001851 if (output_file_player_->StopPlayingFile() != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08001852 _engineStatisticsPtr->SetLastError(
1853 VE_STOP_RECORDING_FAILED, kTraceError,
1854 "StopPlayingFile() could not stop playing");
1855 return -1;
1856 }
kwiberg5a25d952016-08-17 07:31:12 -07001857 output_file_player_->RegisterModuleFileCallback(NULL);
1858 output_file_player_.reset();
kwiberg55b97fe2016-01-28 05:22:45 -08001859 channel_state_.SetOutputFilePlaying(false);
1860 }
1861 // _fileCritSect cannot be taken while calling
1862 // SetAnonymousMixibilityStatus. Refer to comments in
1863 // StartPlayingFileLocally(const char* ...) for more details.
1864 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0) {
1865 _engineStatisticsPtr->SetLastError(
1866 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1867 "StopPlayingFile() failed to stop participant from playing as"
1868 "file in the mixer");
1869 return -1;
1870 }
1871
1872 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001873}
1874
kwiberg55b97fe2016-01-28 05:22:45 -08001875int Channel::IsPlayingFileLocally() const {
1876 return channel_state_.Get().output_file_playing;
niklase@google.com470e71d2011-07-07 08:21:25 +00001877}
1878
kwiberg55b97fe2016-01-28 05:22:45 -08001879int Channel::RegisterFilePlayingToMixer() {
1880 // Return success for not registering for file playing to mixer if:
1881 // 1. playing file before playout is started on that channel.
1882 // 2. starting playout without file playing on that channel.
1883 if (!channel_state_.Get().playing ||
1884 !channel_state_.Get().output_file_playing) {
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001885 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001886 }
1887
1888 // |_fileCritSect| cannot be taken while calling
1889 // SetAnonymousMixabilityStatus() since as soon as the participant is added
1890 // frames can be pulled by the mixer. Since the frames are generated from
1891 // the file, _fileCritSect will be taken. This would result in a deadlock.
1892 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0) {
1893 channel_state_.SetOutputFilePlaying(false);
1894 rtc::CritScope cs(&_fileCritSect);
1895 _engineStatisticsPtr->SetLastError(
1896 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1897 "StartPlayingFile() failed to add participant as file to mixer");
kwiberg5a25d952016-08-17 07:31:12 -07001898 output_file_player_->StopPlayingFile();
1899 output_file_player_.reset();
kwiberg55b97fe2016-01-28 05:22:45 -08001900 return -1;
1901 }
1902
1903 return 0;
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001904}
1905
niklase@google.com470e71d2011-07-07 08:21:25 +00001906int Channel::StartPlayingFileAsMicrophone(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001907 bool loop,
1908 FileFormats format,
1909 int startPosition,
1910 float volumeScaling,
1911 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001912 const CodecInst* codecInst) {
1913 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1914 "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, "
1915 "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, "
1916 "stopPosition=%d)",
1917 fileName, loop, format, volumeScaling, startPosition,
1918 stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001919
kwiberg55b97fe2016-01-28 05:22:45 -08001920 rtc::CritScope cs(&_fileCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001921
kwiberg55b97fe2016-01-28 05:22:45 -08001922 if (channel_state_.Get().input_file_playing) {
1923 _engineStatisticsPtr->SetLastError(
1924 VE_ALREADY_PLAYING, kTraceWarning,
1925 "StartPlayingFileAsMicrophone() filePlayer is playing");
niklase@google.com470e71d2011-07-07 08:21:25 +00001926 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001927 }
1928
1929 // Destroy the old instance
kwiberg5a25d952016-08-17 07:31:12 -07001930 if (input_file_player_) {
1931 input_file_player_->RegisterModuleFileCallback(NULL);
1932 input_file_player_.reset();
kwiberg55b97fe2016-01-28 05:22:45 -08001933 }
1934
1935 // Create the instance
kwiberg5b356f42016-09-08 04:32:33 -07001936 input_file_player_ = FilePlayer::CreateFilePlayer(_inputFilePlayerId,
kwiberg5a25d952016-08-17 07:31:12 -07001937 (const FileFormats)format);
kwiberg55b97fe2016-01-28 05:22:45 -08001938
kwiberg5a25d952016-08-17 07:31:12 -07001939 if (!input_file_player_) {
kwiberg55b97fe2016-01-28 05:22:45 -08001940 _engineStatisticsPtr->SetLastError(
1941 VE_INVALID_ARGUMENT, kTraceError,
1942 "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
1943 return -1;
1944 }
1945
1946 const uint32_t notificationTime(0);
1947
kwiberg5a25d952016-08-17 07:31:12 -07001948 if (input_file_player_->StartPlayingFile(
kwiberg55b97fe2016-01-28 05:22:45 -08001949 fileName, loop, startPosition, volumeScaling, notificationTime,
1950 stopPosition, (const CodecInst*)codecInst) != 0) {
1951 _engineStatisticsPtr->SetLastError(
1952 VE_BAD_FILE, kTraceError,
1953 "StartPlayingFile() failed to start file playout");
kwiberg5a25d952016-08-17 07:31:12 -07001954 input_file_player_->StopPlayingFile();
1955 input_file_player_.reset();
kwiberg55b97fe2016-01-28 05:22:45 -08001956 return -1;
1957 }
kwiberg5a25d952016-08-17 07:31:12 -07001958 input_file_player_->RegisterModuleFileCallback(this);
kwiberg55b97fe2016-01-28 05:22:45 -08001959 channel_state_.SetInputFilePlaying(true);
1960
1961 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001962}
1963
1964int Channel::StartPlayingFileAsMicrophone(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001965 FileFormats format,
1966 int startPosition,
1967 float volumeScaling,
1968 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001969 const CodecInst* codecInst) {
1970 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1971 "Channel::StartPlayingFileAsMicrophone(format=%d, "
1972 "volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
1973 format, volumeScaling, startPosition, stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001974
kwiberg55b97fe2016-01-28 05:22:45 -08001975 if (stream == NULL) {
1976 _engineStatisticsPtr->SetLastError(
1977 VE_BAD_FILE, kTraceError,
1978 "StartPlayingFileAsMicrophone NULL as input stream");
1979 return -1;
1980 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001981
kwiberg55b97fe2016-01-28 05:22:45 -08001982 rtc::CritScope cs(&_fileCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001983
kwiberg55b97fe2016-01-28 05:22:45 -08001984 if (channel_state_.Get().input_file_playing) {
1985 _engineStatisticsPtr->SetLastError(
1986 VE_ALREADY_PLAYING, kTraceWarning,
1987 "StartPlayingFileAsMicrophone() is playing");
niklase@google.com470e71d2011-07-07 08:21:25 +00001988 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001989 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001990
kwiberg55b97fe2016-01-28 05:22:45 -08001991 // Destroy the old instance
kwiberg5a25d952016-08-17 07:31:12 -07001992 if (input_file_player_) {
1993 input_file_player_->RegisterModuleFileCallback(NULL);
1994 input_file_player_.reset();
kwiberg55b97fe2016-01-28 05:22:45 -08001995 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001996
kwiberg55b97fe2016-01-28 05:22:45 -08001997 // Create the instance
kwiberg5b356f42016-09-08 04:32:33 -07001998 input_file_player_ = FilePlayer::CreateFilePlayer(_inputFilePlayerId,
kwiberg5a25d952016-08-17 07:31:12 -07001999 (const FileFormats)format);
kwiberg55b97fe2016-01-28 05:22:45 -08002000
kwiberg5a25d952016-08-17 07:31:12 -07002001 if (!input_file_player_) {
kwiberg55b97fe2016-01-28 05:22:45 -08002002 _engineStatisticsPtr->SetLastError(
2003 VE_INVALID_ARGUMENT, kTraceError,
2004 "StartPlayingInputFile() filePlayer format isnot correct");
2005 return -1;
2006 }
2007
2008 const uint32_t notificationTime(0);
2009
kwiberg4ec01d92016-08-22 08:43:54 -07002010 if (input_file_player_->StartPlayingFile(stream, startPosition, volumeScaling,
2011 notificationTime, stopPosition,
2012 codecInst) != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08002013 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2014 "StartPlayingFile() failed to start "
2015 "file playout");
kwiberg5a25d952016-08-17 07:31:12 -07002016 input_file_player_->StopPlayingFile();
2017 input_file_player_.reset();
kwiberg55b97fe2016-01-28 05:22:45 -08002018 return -1;
2019 }
2020
kwiberg5a25d952016-08-17 07:31:12 -07002021 input_file_player_->RegisterModuleFileCallback(this);
kwiberg55b97fe2016-01-28 05:22:45 -08002022 channel_state_.SetInputFilePlaying(true);
2023
2024 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002025}
2026
kwiberg55b97fe2016-01-28 05:22:45 -08002027int Channel::StopPlayingFileAsMicrophone() {
2028 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2029 "Channel::StopPlayingFileAsMicrophone()");
2030
2031 rtc::CritScope cs(&_fileCritSect);
2032
2033 if (!channel_state_.Get().input_file_playing) {
2034 return 0;
2035 }
2036
kwiberg5a25d952016-08-17 07:31:12 -07002037 if (input_file_player_->StopPlayingFile() != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08002038 _engineStatisticsPtr->SetLastError(
2039 VE_STOP_RECORDING_FAILED, kTraceError,
2040 "StopPlayingFile() could not stop playing");
2041 return -1;
2042 }
kwiberg5a25d952016-08-17 07:31:12 -07002043 input_file_player_->RegisterModuleFileCallback(NULL);
2044 input_file_player_.reset();
kwiberg55b97fe2016-01-28 05:22:45 -08002045 channel_state_.SetInputFilePlaying(false);
2046
2047 return 0;
2048}
2049
2050int Channel::IsPlayingFileAsMicrophone() const {
2051 return channel_state_.Get().input_file_playing;
niklase@google.com470e71d2011-07-07 08:21:25 +00002052}
2053
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00002054int Channel::StartRecordingPlayout(const char* fileName,
kwiberg55b97fe2016-01-28 05:22:45 -08002055 const CodecInst* codecInst) {
2056 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2057 "Channel::StartRecordingPlayout(fileName=%s)", fileName);
niklase@google.com470e71d2011-07-07 08:21:25 +00002058
kwiberg55b97fe2016-01-28 05:22:45 -08002059 if (_outputFileRecording) {
2060 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
2061 "StartRecordingPlayout() is already recording");
niklase@google.com470e71d2011-07-07 08:21:25 +00002062 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002063 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002064
kwiberg55b97fe2016-01-28 05:22:45 -08002065 FileFormats format;
2066 const uint32_t notificationTime(0); // Not supported in VoE
2067 CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000};
niklase@google.com470e71d2011-07-07 08:21:25 +00002068
kwiberg55b97fe2016-01-28 05:22:45 -08002069 if ((codecInst != NULL) &&
2070 ((codecInst->channels < 1) || (codecInst->channels > 2))) {
2071 _engineStatisticsPtr->SetLastError(
2072 VE_BAD_ARGUMENT, kTraceError,
2073 "StartRecordingPlayout() invalid compression");
2074 return (-1);
2075 }
2076 if (codecInst == NULL) {
2077 format = kFileFormatPcm16kHzFile;
2078 codecInst = &dummyCodec;
2079 } else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) ||
2080 (STR_CASE_CMP(codecInst->plname, "PCMU") == 0) ||
2081 (STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) {
2082 format = kFileFormatWavFile;
2083 } else {
2084 format = kFileFormatCompressedFile;
2085 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002086
kwiberg55b97fe2016-01-28 05:22:45 -08002087 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002088
kwiberg55b97fe2016-01-28 05:22:45 -08002089 // Destroy the old instance
kwiberg5a25d952016-08-17 07:31:12 -07002090 if (output_file_recorder_) {
2091 output_file_recorder_->RegisterModuleFileCallback(NULL);
2092 output_file_recorder_.reset();
kwiberg55b97fe2016-01-28 05:22:45 -08002093 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002094
kwiberg5a25d952016-08-17 07:31:12 -07002095 output_file_recorder_ = FileRecorder::CreateFileRecorder(
kwiberg55b97fe2016-01-28 05:22:45 -08002096 _outputFileRecorderId, (const FileFormats)format);
kwiberg5a25d952016-08-17 07:31:12 -07002097 if (!output_file_recorder_) {
kwiberg55b97fe2016-01-28 05:22:45 -08002098 _engineStatisticsPtr->SetLastError(
2099 VE_INVALID_ARGUMENT, kTraceError,
2100 "StartRecordingPlayout() fileRecorder format isnot correct");
2101 return -1;
2102 }
2103
kwiberg5a25d952016-08-17 07:31:12 -07002104 if (output_file_recorder_->StartRecordingAudioFile(
kwiberg55b97fe2016-01-28 05:22:45 -08002105 fileName, (const CodecInst&)*codecInst, notificationTime) != 0) {
2106 _engineStatisticsPtr->SetLastError(
2107 VE_BAD_FILE, kTraceError,
2108 "StartRecordingAudioFile() failed to start file recording");
kwiberg5a25d952016-08-17 07:31:12 -07002109 output_file_recorder_->StopRecording();
2110 output_file_recorder_.reset();
kwiberg55b97fe2016-01-28 05:22:45 -08002111 return -1;
2112 }
kwiberg5a25d952016-08-17 07:31:12 -07002113 output_file_recorder_->RegisterModuleFileCallback(this);
kwiberg55b97fe2016-01-28 05:22:45 -08002114 _outputFileRecording = true;
2115
2116 return 0;
2117}
2118
2119int Channel::StartRecordingPlayout(OutStream* stream,
2120 const CodecInst* codecInst) {
2121 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2122 "Channel::StartRecordingPlayout()");
2123
2124 if (_outputFileRecording) {
2125 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
2126 "StartRecordingPlayout() is already recording");
niklase@google.com470e71d2011-07-07 08:21:25 +00002127 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002128 }
2129
2130 FileFormats format;
2131 const uint32_t notificationTime(0); // Not supported in VoE
2132 CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000};
2133
2134 if (codecInst != NULL && codecInst->channels != 1) {
2135 _engineStatisticsPtr->SetLastError(
2136 VE_BAD_ARGUMENT, kTraceError,
2137 "StartRecordingPlayout() invalid compression");
2138 return (-1);
2139 }
2140 if (codecInst == NULL) {
2141 format = kFileFormatPcm16kHzFile;
2142 codecInst = &dummyCodec;
2143 } else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) ||
2144 (STR_CASE_CMP(codecInst->plname, "PCMU") == 0) ||
2145 (STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) {
2146 format = kFileFormatWavFile;
2147 } else {
2148 format = kFileFormatCompressedFile;
2149 }
2150
2151 rtc::CritScope cs(&_fileCritSect);
2152
2153 // Destroy the old instance
kwiberg5a25d952016-08-17 07:31:12 -07002154 if (output_file_recorder_) {
2155 output_file_recorder_->RegisterModuleFileCallback(NULL);
2156 output_file_recorder_.reset();
kwiberg55b97fe2016-01-28 05:22:45 -08002157 }
2158
kwiberg5a25d952016-08-17 07:31:12 -07002159 output_file_recorder_ = FileRecorder::CreateFileRecorder(
kwiberg55b97fe2016-01-28 05:22:45 -08002160 _outputFileRecorderId, (const FileFormats)format);
kwiberg5a25d952016-08-17 07:31:12 -07002161 if (!output_file_recorder_) {
kwiberg55b97fe2016-01-28 05:22:45 -08002162 _engineStatisticsPtr->SetLastError(
2163 VE_INVALID_ARGUMENT, kTraceError,
2164 "StartRecordingPlayout() fileRecorder format isnot correct");
2165 return -1;
2166 }
2167
kwiberg4ec01d92016-08-22 08:43:54 -07002168 if (output_file_recorder_->StartRecordingAudioFile(stream, *codecInst,
kwiberg5a25d952016-08-17 07:31:12 -07002169 notificationTime) != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08002170 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2171 "StartRecordingPlayout() failed to "
2172 "start file recording");
kwiberg5a25d952016-08-17 07:31:12 -07002173 output_file_recorder_->StopRecording();
2174 output_file_recorder_.reset();
kwiberg55b97fe2016-01-28 05:22:45 -08002175 return -1;
2176 }
2177
kwiberg5a25d952016-08-17 07:31:12 -07002178 output_file_recorder_->RegisterModuleFileCallback(this);
kwiberg55b97fe2016-01-28 05:22:45 -08002179 _outputFileRecording = true;
2180
2181 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002182}
2183
kwiberg55b97fe2016-01-28 05:22:45 -08002184int Channel::StopRecordingPlayout() {
2185 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
2186 "Channel::StopRecordingPlayout()");
2187
2188 if (!_outputFileRecording) {
2189 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
2190 "StopRecordingPlayout() isnot recording");
2191 return -1;
2192 }
2193
2194 rtc::CritScope cs(&_fileCritSect);
2195
kwiberg5a25d952016-08-17 07:31:12 -07002196 if (output_file_recorder_->StopRecording() != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08002197 _engineStatisticsPtr->SetLastError(
2198 VE_STOP_RECORDING_FAILED, kTraceError,
2199 "StopRecording() could not stop recording");
2200 return (-1);
2201 }
kwiberg5a25d952016-08-17 07:31:12 -07002202 output_file_recorder_->RegisterModuleFileCallback(NULL);
2203 output_file_recorder_.reset();
kwiberg55b97fe2016-01-28 05:22:45 -08002204 _outputFileRecording = false;
2205
2206 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002207}
2208
kwiberg55b97fe2016-01-28 05:22:45 -08002209void Channel::SetMixWithMicStatus(bool mix) {
2210 rtc::CritScope cs(&_fileCritSect);
2211 _mixFileWithMicrophone = mix;
niklase@google.com470e71d2011-07-07 08:21:25 +00002212}
2213
kwiberg55b97fe2016-01-28 05:22:45 -08002214int Channel::GetSpeechOutputLevel(uint32_t& level) const {
2215 int8_t currentLevel = _outputAudioLevel.Level();
2216 level = static_cast<int32_t>(currentLevel);
2217 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002218}
2219
kwiberg55b97fe2016-01-28 05:22:45 -08002220int Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const {
2221 int16_t currentLevel = _outputAudioLevel.LevelFullRange();
2222 level = static_cast<int32_t>(currentLevel);
2223 return 0;
2224}
2225
solenberg1c2af8e2016-03-24 10:36:00 -07002226int Channel::SetInputMute(bool enable) {
kwiberg55b97fe2016-01-28 05:22:45 -08002227 rtc::CritScope cs(&volume_settings_critsect_);
2228 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002229 "Channel::SetMute(enable=%d)", enable);
solenberg1c2af8e2016-03-24 10:36:00 -07002230 input_mute_ = enable;
kwiberg55b97fe2016-01-28 05:22:45 -08002231 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002232}
2233
solenberg1c2af8e2016-03-24 10:36:00 -07002234bool Channel::InputMute() const {
kwiberg55b97fe2016-01-28 05:22:45 -08002235 rtc::CritScope cs(&volume_settings_critsect_);
solenberg1c2af8e2016-03-24 10:36:00 -07002236 return input_mute_;
niklase@google.com470e71d2011-07-07 08:21:25 +00002237}
2238
kwiberg55b97fe2016-01-28 05:22:45 -08002239int Channel::SetOutputVolumePan(float left, float right) {
2240 rtc::CritScope cs(&volume_settings_critsect_);
2241 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002242 "Channel::SetOutputVolumePan()");
kwiberg55b97fe2016-01-28 05:22:45 -08002243 _panLeft = left;
2244 _panRight = right;
2245 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002246}
2247
kwiberg55b97fe2016-01-28 05:22:45 -08002248int Channel::GetOutputVolumePan(float& left, float& right) const {
2249 rtc::CritScope cs(&volume_settings_critsect_);
2250 left = _panLeft;
2251 right = _panRight;
2252 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002253}
2254
kwiberg55b97fe2016-01-28 05:22:45 -08002255int Channel::SetChannelOutputVolumeScaling(float scaling) {
2256 rtc::CritScope cs(&volume_settings_critsect_);
2257 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002258 "Channel::SetChannelOutputVolumeScaling()");
kwiberg55b97fe2016-01-28 05:22:45 -08002259 _outputGain = scaling;
2260 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002261}
2262
kwiberg55b97fe2016-01-28 05:22:45 -08002263int Channel::GetChannelOutputVolumeScaling(float& scaling) const {
2264 rtc::CritScope cs(&volume_settings_critsect_);
2265 scaling = _outputGain;
2266 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002267}
2268
solenberg8842c3e2016-03-11 03:06:41 -08002269int Channel::SendTelephoneEventOutband(int event, int duration_ms) {
kwiberg55b97fe2016-01-28 05:22:45 -08002270 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
solenberg8842c3e2016-03-11 03:06:41 -08002271 "Channel::SendTelephoneEventOutband(...)");
2272 RTC_DCHECK_LE(0, event);
2273 RTC_DCHECK_GE(255, event);
2274 RTC_DCHECK_LE(0, duration_ms);
2275 RTC_DCHECK_GE(65535, duration_ms);
kwiberg55b97fe2016-01-28 05:22:45 -08002276 if (!Sending()) {
2277 return -1;
2278 }
solenberg8842c3e2016-03-11 03:06:41 -08002279 if (_rtpRtcpModule->SendTelephoneEventOutband(
2280 event, duration_ms, kTelephoneEventAttenuationdB) != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08002281 _engineStatisticsPtr->SetLastError(
2282 VE_SEND_DTMF_FAILED, kTraceWarning,
2283 "SendTelephoneEventOutband() failed to send event");
2284 return -1;
2285 }
2286 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002287}
2288
solenbergffbbcac2016-11-17 05:25:37 -08002289int Channel::SetSendTelephoneEventPayloadType(int payload_type,
2290 int payload_frequency) {
kwiberg55b97fe2016-01-28 05:22:45 -08002291 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002292 "Channel::SetSendTelephoneEventPayloadType()");
solenberg31642aa2016-03-14 08:00:37 -07002293 RTC_DCHECK_LE(0, payload_type);
2294 RTC_DCHECK_GE(127, payload_type);
2295 CodecInst codec = {0};
solenberg31642aa2016-03-14 08:00:37 -07002296 codec.pltype = payload_type;
solenbergffbbcac2016-11-17 05:25:37 -08002297 codec.plfreq = payload_frequency;
kwiberg55b97fe2016-01-28 05:22:45 -08002298 memcpy(codec.plname, "telephone-event", 16);
2299 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
2300 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
2301 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
2302 _engineStatisticsPtr->SetLastError(
2303 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2304 "SetSendTelephoneEventPayloadType() failed to register send"
2305 "payload type");
2306 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002307 }
kwiberg55b97fe2016-01-28 05:22:45 -08002308 }
kwiberg55b97fe2016-01-28 05:22:45 -08002309 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002310}
2311
kwiberg55b97fe2016-01-28 05:22:45 -08002312int Channel::VoiceActivityIndicator(int& activity) {
2313 activity = _sendFrameType;
2314 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002315}
2316
kwiberg55b97fe2016-01-28 05:22:45 -08002317int Channel::SetLocalSSRC(unsigned int ssrc) {
2318 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2319 "Channel::SetLocalSSRC()");
2320 if (channel_state_.Get().sending) {
2321 _engineStatisticsPtr->SetLastError(VE_ALREADY_SENDING, kTraceError,
2322 "SetLocalSSRC() already sending");
2323 return -1;
2324 }
2325 _rtpRtcpModule->SetSSRC(ssrc);
2326 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002327}
2328
kwiberg55b97fe2016-01-28 05:22:45 -08002329int Channel::GetLocalSSRC(unsigned int& ssrc) {
2330 ssrc = _rtpRtcpModule->SSRC();
2331 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002332}
2333
kwiberg55b97fe2016-01-28 05:22:45 -08002334int Channel::GetRemoteSSRC(unsigned int& ssrc) {
2335 ssrc = rtp_receiver_->SSRC();
2336 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002337}
2338
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002339int Channel::SetSendAudioLevelIndicationStatus(bool enable, unsigned char id) {
andrew@webrtc.orgf3930e92013-09-18 22:37:32 +00002340 _includeAudioLevelIndication = enable;
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002341 return SetSendRtpHeaderExtension(enable, kRtpExtensionAudioLevel, id);
niklase@google.com470e71d2011-07-07 08:21:25 +00002342}
andrew@webrtc.orgf3930e92013-09-18 22:37:32 +00002343
wu@webrtc.org93fd25c2014-04-24 20:33:08 +00002344int Channel::SetReceiveAudioLevelIndicationStatus(bool enable,
2345 unsigned char id) {
kwiberg55b97fe2016-01-28 05:22:45 -08002346 rtp_header_parser_->DeregisterRtpHeaderExtension(kRtpExtensionAudioLevel);
2347 if (enable &&
2348 !rtp_header_parser_->RegisterRtpHeaderExtension(kRtpExtensionAudioLevel,
2349 id)) {
wu@webrtc.org93fd25c2014-04-24 20:33:08 +00002350 return -1;
2351 }
2352 return 0;
2353}
2354
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002355void Channel::EnableSendTransportSequenceNumber(int id) {
2356 int ret =
2357 SetSendRtpHeaderExtension(true, kRtpExtensionTransportSequenceNumber, id);
2358 RTC_DCHECK_EQ(0, ret);
2359}
2360
stefan3313ec92016-01-21 06:32:43 -08002361void Channel::EnableReceiveTransportSequenceNumber(int id) {
2362 rtp_header_parser_->DeregisterRtpHeaderExtension(
2363 kRtpExtensionTransportSequenceNumber);
2364 bool ret = rtp_header_parser_->RegisterRtpHeaderExtension(
2365 kRtpExtensionTransportSequenceNumber, id);
2366 RTC_DCHECK(ret);
2367}
2368
stefanbba9dec2016-02-01 04:39:55 -08002369void Channel::RegisterSenderCongestionControlObjects(
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002370 RtpPacketSender* rtp_packet_sender,
2371 TransportFeedbackObserver* transport_feedback_observer,
2372 PacketRouter* packet_router) {
stefanbba9dec2016-02-01 04:39:55 -08002373 RTC_DCHECK(rtp_packet_sender);
2374 RTC_DCHECK(transport_feedback_observer);
2375 RTC_DCHECK(packet_router && !packet_router_);
2376 feedback_observer_proxy_->SetTransportFeedbackObserver(
2377 transport_feedback_observer);
2378 seq_num_allocator_proxy_->SetSequenceNumberAllocator(packet_router);
2379 rtp_packet_sender_proxy_->SetPacketSender(rtp_packet_sender);
2380 _rtpRtcpModule->SetStorePacketsStatus(true, 600);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002381 packet_router->AddRtpModule(_rtpRtcpModule.get());
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002382 packet_router_ = packet_router;
2383}
2384
stefanbba9dec2016-02-01 04:39:55 -08002385void Channel::RegisterReceiverCongestionControlObjects(
2386 PacketRouter* packet_router) {
2387 RTC_DCHECK(packet_router && !packet_router_);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002388 packet_router->AddRtpModule(_rtpRtcpModule.get());
stefanbba9dec2016-02-01 04:39:55 -08002389 packet_router_ = packet_router;
2390}
2391
2392void Channel::ResetCongestionControlObjects() {
2393 RTC_DCHECK(packet_router_);
2394 _rtpRtcpModule->SetStorePacketsStatus(false, 600);
2395 feedback_observer_proxy_->SetTransportFeedbackObserver(nullptr);
2396 seq_num_allocator_proxy_->SetSequenceNumberAllocator(nullptr);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002397 packet_router_->RemoveRtpModule(_rtpRtcpModule.get());
stefanbba9dec2016-02-01 04:39:55 -08002398 packet_router_ = nullptr;
2399 rtp_packet_sender_proxy_->SetPacketSender(nullptr);
2400}
2401
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00002402void Channel::SetRTCPStatus(bool enable) {
2403 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2404 "Channel::SetRTCPStatus()");
pbosda903ea2015-10-02 02:36:56 -07002405 _rtpRtcpModule->SetRTCPStatus(enable ? RtcpMode::kCompound : RtcpMode::kOff);
niklase@google.com470e71d2011-07-07 08:21:25 +00002406}
2407
kwiberg55b97fe2016-01-28 05:22:45 -08002408int Channel::GetRTCPStatus(bool& enabled) {
pbosda903ea2015-10-02 02:36:56 -07002409 RtcpMode method = _rtpRtcpModule->RTCP();
2410 enabled = (method != RtcpMode::kOff);
kwiberg55b97fe2016-01-28 05:22:45 -08002411 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002412}
2413
kwiberg55b97fe2016-01-28 05:22:45 -08002414int Channel::SetRTCP_CNAME(const char cName[256]) {
2415 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2416 "Channel::SetRTCP_CNAME()");
2417 if (_rtpRtcpModule->SetCNAME(cName) != 0) {
2418 _engineStatisticsPtr->SetLastError(
2419 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2420 "SetRTCP_CNAME() failed to set RTCP CNAME");
2421 return -1;
2422 }
2423 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002424}
2425
kwiberg55b97fe2016-01-28 05:22:45 -08002426int Channel::GetRemoteRTCP_CNAME(char cName[256]) {
2427 if (cName == NULL) {
2428 _engineStatisticsPtr->SetLastError(
2429 VE_INVALID_ARGUMENT, kTraceError,
2430 "GetRemoteRTCP_CNAME() invalid CNAME input buffer");
2431 return -1;
2432 }
2433 char cname[RTCP_CNAME_SIZE];
2434 const uint32_t remoteSSRC = rtp_receiver_->SSRC();
2435 if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0) {
2436 _engineStatisticsPtr->SetLastError(
2437 VE_CANNOT_RETRIEVE_CNAME, kTraceError,
2438 "GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME");
2439 return -1;
2440 }
2441 strcpy(cName, cname);
2442 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002443}
2444
kwiberg55b97fe2016-01-28 05:22:45 -08002445int Channel::GetRemoteRTCPData(unsigned int& NTPHigh,
2446 unsigned int& NTPLow,
2447 unsigned int& timestamp,
2448 unsigned int& playoutTimestamp,
2449 unsigned int* jitter,
2450 unsigned short* fractionLost) {
2451 // --- Information from sender info in received Sender Reports
niklase@google.com470e71d2011-07-07 08:21:25 +00002452
kwiberg55b97fe2016-01-28 05:22:45 -08002453 RTCPSenderInfo senderInfo;
2454 if (_rtpRtcpModule->RemoteRTCPStat(&senderInfo) != 0) {
2455 _engineStatisticsPtr->SetLastError(
2456 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2457 "GetRemoteRTCPData() failed to retrieve sender info for remote "
2458 "side");
2459 return -1;
2460 }
2461
2462 // We only utilize 12 out of 20 bytes in the sender info (ignores packet
2463 // and octet count)
2464 NTPHigh = senderInfo.NTPseconds;
2465 NTPLow = senderInfo.NTPfraction;
2466 timestamp = senderInfo.RTPtimeStamp;
2467
2468 // --- Locally derived information
2469
2470 // This value is updated on each incoming RTCP packet (0 when no packet
2471 // has been received)
2472 playoutTimestamp = playout_timestamp_rtcp_;
2473
2474 if (NULL != jitter || NULL != fractionLost) {
2475 // Get all RTCP receiver report blocks that have been received on this
2476 // channel. If we receive RTP packets from a remote source we know the
2477 // remote SSRC and use the report block from him.
2478 // Otherwise use the first report block.
2479 std::vector<RTCPReportBlock> remote_stats;
2480 if (_rtpRtcpModule->RemoteRTCPStat(&remote_stats) != 0 ||
2481 remote_stats.empty()) {
2482 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2483 "GetRemoteRTCPData() failed to measure statistics due"
2484 " to lack of received RTP and/or RTCP packets");
2485 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002486 }
2487
kwiberg55b97fe2016-01-28 05:22:45 -08002488 uint32_t remoteSSRC = rtp_receiver_->SSRC();
2489 std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
2490 for (; it != remote_stats.end(); ++it) {
2491 if (it->remoteSSRC == remoteSSRC)
2492 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00002493 }
kwiberg55b97fe2016-01-28 05:22:45 -08002494
2495 if (it == remote_stats.end()) {
2496 // If we have not received any RTCP packets from this SSRC it probably
2497 // means that we have not received any RTP packets.
2498 // Use the first received report block instead.
2499 it = remote_stats.begin();
2500 remoteSSRC = it->remoteSSRC;
2501 }
2502
2503 if (jitter) {
2504 *jitter = it->jitter;
2505 }
2506
2507 if (fractionLost) {
2508 *fractionLost = it->fractionLost;
2509 }
2510 }
2511 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002512}
2513
kwiberg55b97fe2016-01-28 05:22:45 -08002514int Channel::SendApplicationDefinedRTCPPacket(
2515 unsigned char subType,
2516 unsigned int name,
2517 const char* data,
2518 unsigned short dataLengthInBytes) {
2519 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2520 "Channel::SendApplicationDefinedRTCPPacket()");
2521 if (!channel_state_.Get().sending) {
2522 _engineStatisticsPtr->SetLastError(
2523 VE_NOT_SENDING, kTraceError,
2524 "SendApplicationDefinedRTCPPacket() not sending");
2525 return -1;
2526 }
2527 if (NULL == data) {
2528 _engineStatisticsPtr->SetLastError(
2529 VE_INVALID_ARGUMENT, kTraceError,
2530 "SendApplicationDefinedRTCPPacket() invalid data value");
2531 return -1;
2532 }
2533 if (dataLengthInBytes % 4 != 0) {
2534 _engineStatisticsPtr->SetLastError(
2535 VE_INVALID_ARGUMENT, kTraceError,
2536 "SendApplicationDefinedRTCPPacket() invalid length value");
2537 return -1;
2538 }
2539 RtcpMode status = _rtpRtcpModule->RTCP();
2540 if (status == RtcpMode::kOff) {
2541 _engineStatisticsPtr->SetLastError(
2542 VE_RTCP_ERROR, kTraceError,
2543 "SendApplicationDefinedRTCPPacket() RTCP is disabled");
2544 return -1;
2545 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002546
kwiberg55b97fe2016-01-28 05:22:45 -08002547 // Create and schedule the RTCP APP packet for transmission
2548 if (_rtpRtcpModule->SetRTCPApplicationSpecificData(
2549 subType, name, (const unsigned char*)data, dataLengthInBytes) != 0) {
2550 _engineStatisticsPtr->SetLastError(
2551 VE_SEND_ERROR, kTraceError,
2552 "SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
2553 return -1;
2554 }
2555 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002556}
2557
kwiberg55b97fe2016-01-28 05:22:45 -08002558int Channel::GetRTPStatistics(unsigned int& averageJitterMs,
2559 unsigned int& maxJitterMs,
2560 unsigned int& discardedPackets) {
2561 // The jitter statistics is updated for each received RTP packet and is
2562 // based on received packets.
2563 if (_rtpRtcpModule->RTCP() == RtcpMode::kOff) {
2564 // If RTCP is off, there is no timed thread in the RTCP module regularly
2565 // generating new stats, trigger the update manually here instead.
2566 StreamStatistician* statistician =
2567 rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
2568 if (statistician) {
2569 // Don't use returned statistics, use data from proxy instead so that
2570 // max jitter can be fetched atomically.
2571 RtcpStatistics s;
2572 statistician->GetStatistics(&s, true);
niklase@google.com470e71d2011-07-07 08:21:25 +00002573 }
kwiberg55b97fe2016-01-28 05:22:45 -08002574 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002575
kwiberg55b97fe2016-01-28 05:22:45 -08002576 ChannelStatistics stats = statistics_proxy_->GetStats();
2577 const int32_t playoutFrequency = audio_coding_->PlayoutFrequency();
2578 if (playoutFrequency > 0) {
2579 // Scale RTP statistics given the current playout frequency
2580 maxJitterMs = stats.max_jitter / (playoutFrequency / 1000);
2581 averageJitterMs = stats.rtcp.jitter / (playoutFrequency / 1000);
2582 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002583
kwiberg55b97fe2016-01-28 05:22:45 -08002584 discardedPackets = _numberOfDiscardedPackets;
niklase@google.com470e71d2011-07-07 08:21:25 +00002585
kwiberg55b97fe2016-01-28 05:22:45 -08002586 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002587}
2588
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002589int Channel::GetRemoteRTCPReportBlocks(
2590 std::vector<ReportBlock>* report_blocks) {
2591 if (report_blocks == NULL) {
kwiberg55b97fe2016-01-28 05:22:45 -08002592 _engineStatisticsPtr->SetLastError(
2593 VE_INVALID_ARGUMENT, kTraceError,
2594 "GetRemoteRTCPReportBlock()s invalid report_blocks.");
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002595 return -1;
2596 }
2597
2598 // Get the report blocks from the latest received RTCP Sender or Receiver
2599 // Report. Each element in the vector contains the sender's SSRC and a
2600 // report block according to RFC 3550.
2601 std::vector<RTCPReportBlock> rtcp_report_blocks;
2602 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks) != 0) {
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002603 return -1;
2604 }
2605
2606 if (rtcp_report_blocks.empty())
2607 return 0;
2608
2609 std::vector<RTCPReportBlock>::const_iterator it = rtcp_report_blocks.begin();
2610 for (; it != rtcp_report_blocks.end(); ++it) {
2611 ReportBlock report_block;
2612 report_block.sender_SSRC = it->remoteSSRC;
2613 report_block.source_SSRC = it->sourceSSRC;
2614 report_block.fraction_lost = it->fractionLost;
2615 report_block.cumulative_num_packets_lost = it->cumulativeLost;
2616 report_block.extended_highest_sequence_number = it->extendedHighSeqNum;
2617 report_block.interarrival_jitter = it->jitter;
2618 report_block.last_SR_timestamp = it->lastSR;
2619 report_block.delay_since_last_SR = it->delaySinceLastSR;
2620 report_blocks->push_back(report_block);
2621 }
2622 return 0;
2623}
2624
kwiberg55b97fe2016-01-28 05:22:45 -08002625int Channel::GetRTPStatistics(CallStatistics& stats) {
2626 // --- RtcpStatistics
niklase@google.com470e71d2011-07-07 08:21:25 +00002627
kwiberg55b97fe2016-01-28 05:22:45 -08002628 // The jitter statistics is updated for each received RTP packet and is
2629 // based on received packets.
2630 RtcpStatistics statistics;
2631 StreamStatistician* statistician =
2632 rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
Peter Boström59013bc2016-02-12 11:35:08 +01002633 if (statistician) {
2634 statistician->GetStatistics(&statistics,
2635 _rtpRtcpModule->RTCP() == RtcpMode::kOff);
kwiberg55b97fe2016-01-28 05:22:45 -08002636 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002637
kwiberg55b97fe2016-01-28 05:22:45 -08002638 stats.fractionLost = statistics.fraction_lost;
2639 stats.cumulativeLost = statistics.cumulative_lost;
2640 stats.extendedMax = statistics.extended_max_sequence_number;
2641 stats.jitterSamples = statistics.jitter;
niklase@google.com470e71d2011-07-07 08:21:25 +00002642
kwiberg55b97fe2016-01-28 05:22:45 -08002643 // --- RTT
2644 stats.rttMs = GetRTT(true);
niklase@google.com470e71d2011-07-07 08:21:25 +00002645
kwiberg55b97fe2016-01-28 05:22:45 -08002646 // --- Data counters
niklase@google.com470e71d2011-07-07 08:21:25 +00002647
kwiberg55b97fe2016-01-28 05:22:45 -08002648 size_t bytesSent(0);
2649 uint32_t packetsSent(0);
2650 size_t bytesReceived(0);
2651 uint32_t packetsReceived(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00002652
kwiberg55b97fe2016-01-28 05:22:45 -08002653 if (statistician) {
2654 statistician->GetDataCounters(&bytesReceived, &packetsReceived);
2655 }
wu@webrtc.org822fbd82013-08-15 23:38:54 +00002656
kwiberg55b97fe2016-01-28 05:22:45 -08002657 if (_rtpRtcpModule->DataCountersRTP(&bytesSent, &packetsSent) != 0) {
2658 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2659 "GetRTPStatistics() failed to retrieve RTP datacounters =>"
2660 " output will not be complete");
2661 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002662
kwiberg55b97fe2016-01-28 05:22:45 -08002663 stats.bytesSent = bytesSent;
2664 stats.packetsSent = packetsSent;
2665 stats.bytesReceived = bytesReceived;
2666 stats.packetsReceived = packetsReceived;
niklase@google.com470e71d2011-07-07 08:21:25 +00002667
kwiberg55b97fe2016-01-28 05:22:45 -08002668 // --- Timestamps
2669 {
2670 rtc::CritScope lock(&ts_stats_lock_);
2671 stats.capture_start_ntp_time_ms_ = capture_start_ntp_time_ms_;
2672 }
2673 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002674}
2675
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002676int Channel::SetCodecFECStatus(bool enable) {
2677 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2678 "Channel::SetCodecFECStatus()");
2679
kwibergc8d071e2016-04-06 12:22:38 -07002680 if (!codec_manager_.SetCodecFEC(enable) ||
2681 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002682 _engineStatisticsPtr->SetLastError(
2683 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2684 "SetCodecFECStatus() failed to set FEC state");
2685 return -1;
2686 }
2687 return 0;
2688}
2689
2690bool Channel::GetCodecFECStatus() {
kwibergc8d071e2016-04-06 12:22:38 -07002691 return codec_manager_.GetStackParams()->use_codec_fec;
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002692}
2693
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002694void Channel::SetNACKStatus(bool enable, int maxNumberOfPackets) {
2695 // None of these functions can fail.
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002696 // If pacing is enabled we always store packets.
2697 if (!pacing_enabled_)
2698 _rtpRtcpModule->SetStorePacketsStatus(enable, maxNumberOfPackets);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00002699 rtp_receive_statistics_->SetMaxReorderingThreshold(maxNumberOfPackets);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002700 if (enable)
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00002701 audio_coding_->EnableNack(maxNumberOfPackets);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002702 else
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00002703 audio_coding_->DisableNack();
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002704}
2705
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002706// Called when we are missing one or more packets.
2707int Channel::ResendPackets(const uint16_t* sequence_numbers, int length) {
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002708 return _rtpRtcpModule->SendNACK(sequence_numbers, length);
2709}
2710
kwiberg55b97fe2016-01-28 05:22:45 -08002711uint32_t Channel::Demultiplex(const AudioFrame& audioFrame) {
2712 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2713 "Channel::Demultiplex()");
2714 _audioFrame.CopyFrom(audioFrame);
2715 _audioFrame.id_ = _channelId;
2716 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002717}
2718
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002719void Channel::Demultiplex(const int16_t* audio_data,
xians@webrtc.org8fff1f02013-07-31 16:27:42 +00002720 int sample_rate,
Peter Kastingdce40cf2015-08-24 14:52:23 -07002721 size_t number_of_frames,
Peter Kasting69558702016-01-12 16:26:35 -08002722 size_t number_of_channels) {
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002723 CodecInst codec;
2724 GetSendCodec(codec);
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002725
Alejandro Luebscdfe20b2015-09-23 12:49:12 -07002726 // Never upsample or upmix the capture signal here. This should be done at the
2727 // end of the send chain.
2728 _audioFrame.sample_rate_hz_ = std::min(codec.plfreq, sample_rate);
2729 _audioFrame.num_channels_ = std::min(number_of_channels, codec.channels);
2730 RemixAndResample(audio_data, number_of_frames, number_of_channels,
2731 sample_rate, &input_resampler_, &_audioFrame);
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002732}
2733
kwiberg55b97fe2016-01-28 05:22:45 -08002734uint32_t Channel::PrepareEncodeAndSend(int mixingFrequency) {
2735 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2736 "Channel::PrepareEncodeAndSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002737
kwiberg55b97fe2016-01-28 05:22:45 -08002738 if (_audioFrame.samples_per_channel_ == 0) {
2739 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2740 "Channel::PrepareEncodeAndSend() invalid audio frame");
2741 return 0xFFFFFFFF;
2742 }
2743
2744 if (channel_state_.Get().input_file_playing) {
2745 MixOrReplaceAudioWithFile(mixingFrequency);
2746 }
2747
solenberg1c2af8e2016-03-24 10:36:00 -07002748 bool is_muted = InputMute(); // Cache locally as InputMute() takes a lock.
2749 AudioFrameOperations::Mute(&_audioFrame, previous_frame_muted_, is_muted);
kwiberg55b97fe2016-01-28 05:22:45 -08002750
2751 if (channel_state_.Get().input_external_media) {
2752 rtc::CritScope cs(&_callbackCritSect);
2753 const bool isStereo = (_audioFrame.num_channels_ == 2);
2754 if (_inputExternalMediaCallbackPtr) {
2755 _inputExternalMediaCallbackPtr->Process(
2756 _channelId, kRecordingPerChannel, (int16_t*)_audioFrame.data_,
2757 _audioFrame.samples_per_channel_, _audioFrame.sample_rate_hz_,
2758 isStereo);
niklase@google.com470e71d2011-07-07 08:21:25 +00002759 }
kwiberg55b97fe2016-01-28 05:22:45 -08002760 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002761
kwiberg55b97fe2016-01-28 05:22:45 -08002762 if (_includeAudioLevelIndication) {
2763 size_t length =
2764 _audioFrame.samples_per_channel_ * _audioFrame.num_channels_;
Tommi60c4e0a2016-05-26 21:35:27 +02002765 RTC_CHECK_LE(length, sizeof(_audioFrame.data_));
solenberg1c2af8e2016-03-24 10:36:00 -07002766 if (is_muted && previous_frame_muted_) {
kwiberg55b97fe2016-01-28 05:22:45 -08002767 rms_level_.ProcessMuted(length);
2768 } else {
2769 rms_level_.Process(_audioFrame.data_, length);
niklase@google.com470e71d2011-07-07 08:21:25 +00002770 }
kwiberg55b97fe2016-01-28 05:22:45 -08002771 }
solenberg1c2af8e2016-03-24 10:36:00 -07002772 previous_frame_muted_ = is_muted;
niklase@google.com470e71d2011-07-07 08:21:25 +00002773
kwiberg55b97fe2016-01-28 05:22:45 -08002774 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002775}
2776
kwiberg55b97fe2016-01-28 05:22:45 -08002777uint32_t Channel::EncodeAndSend() {
2778 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2779 "Channel::EncodeAndSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002780
kwiberg55b97fe2016-01-28 05:22:45 -08002781 assert(_audioFrame.num_channels_ <= 2);
2782 if (_audioFrame.samples_per_channel_ == 0) {
2783 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2784 "Channel::EncodeAndSend() invalid audio frame");
2785 return 0xFFFFFFFF;
2786 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002787
kwiberg55b97fe2016-01-28 05:22:45 -08002788 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00002789
kwiberg55b97fe2016-01-28 05:22:45 -08002790 // --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz.
niklase@google.com470e71d2011-07-07 08:21:25 +00002791
kwiberg55b97fe2016-01-28 05:22:45 -08002792 // The ACM resamples internally.
2793 _audioFrame.timestamp_ = _timeStamp;
2794 // This call will trigger AudioPacketizationCallback::SendData if encoding
2795 // is done and payload is ready for packetization and transmission.
2796 // Otherwise, it will return without invoking the callback.
2797 if (audio_coding_->Add10MsData((AudioFrame&)_audioFrame) < 0) {
2798 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
2799 "Channel::EncodeAndSend() ACM encoding failed");
2800 return 0xFFFFFFFF;
2801 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002802
kwiberg55b97fe2016-01-28 05:22:45 -08002803 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_);
2804 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002805}
2806
solenberg7602aab2016-11-14 11:30:07 -08002807void Channel::set_associate_send_channel(const ChannelOwner& channel) {
2808 RTC_DCHECK(!channel.channel() ||
2809 channel.channel()->ChannelId() != _channelId);
2810 rtc::CritScope lock(&assoc_send_channel_lock_);
2811 associate_send_channel_ = channel;
2812}
2813
Minyue2013aec2015-05-13 14:14:42 +02002814void Channel::DisassociateSendChannel(int channel_id) {
tommi31fc21f2016-01-21 10:37:37 -08002815 rtc::CritScope lock(&assoc_send_channel_lock_);
Minyue2013aec2015-05-13 14:14:42 +02002816 Channel* channel = associate_send_channel_.channel();
2817 if (channel && channel->ChannelId() == channel_id) {
2818 // If this channel is associated with a send channel of the specified
2819 // Channel ID, disassociate with it.
2820 ChannelOwner ref(NULL);
2821 associate_send_channel_ = ref;
2822 }
2823}
2824
ivoc14d5dbe2016-07-04 07:06:55 -07002825void Channel::SetRtcEventLog(RtcEventLog* event_log) {
2826 event_log_proxy_->SetEventLog(event_log);
2827}
2828
michaelt79e05882016-11-08 02:50:09 -08002829void Channel::SetTransportOverhead(int transport_overhead_per_packet) {
2830 _rtpRtcpModule->SetTransportOverhead(transport_overhead_per_packet);
2831}
2832
kwiberg55b97fe2016-01-28 05:22:45 -08002833int Channel::RegisterExternalMediaProcessing(ProcessingTypes type,
2834 VoEMediaProcess& processObject) {
2835 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2836 "Channel::RegisterExternalMediaProcessing()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002837
kwiberg55b97fe2016-01-28 05:22:45 -08002838 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002839
kwiberg55b97fe2016-01-28 05:22:45 -08002840 if (kPlaybackPerChannel == type) {
2841 if (_outputExternalMediaCallbackPtr) {
2842 _engineStatisticsPtr->SetLastError(
2843 VE_INVALID_OPERATION, kTraceError,
2844 "Channel::RegisterExternalMediaProcessing() "
2845 "output external media already enabled");
2846 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002847 }
kwiberg55b97fe2016-01-28 05:22:45 -08002848 _outputExternalMediaCallbackPtr = &processObject;
2849 _outputExternalMedia = true;
2850 } else if (kRecordingPerChannel == type) {
2851 if (_inputExternalMediaCallbackPtr) {
2852 _engineStatisticsPtr->SetLastError(
2853 VE_INVALID_OPERATION, kTraceError,
2854 "Channel::RegisterExternalMediaProcessing() "
2855 "output external media already enabled");
2856 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002857 }
kwiberg55b97fe2016-01-28 05:22:45 -08002858 _inputExternalMediaCallbackPtr = &processObject;
2859 channel_state_.SetInputExternalMedia(true);
2860 }
2861 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002862}
2863
kwiberg55b97fe2016-01-28 05:22:45 -08002864int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type) {
2865 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2866 "Channel::DeRegisterExternalMediaProcessing()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002867
kwiberg55b97fe2016-01-28 05:22:45 -08002868 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002869
kwiberg55b97fe2016-01-28 05:22:45 -08002870 if (kPlaybackPerChannel == type) {
2871 if (!_outputExternalMediaCallbackPtr) {
2872 _engineStatisticsPtr->SetLastError(
2873 VE_INVALID_OPERATION, kTraceWarning,
2874 "Channel::DeRegisterExternalMediaProcessing() "
2875 "output external media already disabled");
2876 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002877 }
kwiberg55b97fe2016-01-28 05:22:45 -08002878 _outputExternalMedia = false;
2879 _outputExternalMediaCallbackPtr = NULL;
2880 } else if (kRecordingPerChannel == type) {
2881 if (!_inputExternalMediaCallbackPtr) {
2882 _engineStatisticsPtr->SetLastError(
2883 VE_INVALID_OPERATION, kTraceWarning,
2884 "Channel::DeRegisterExternalMediaProcessing() "
2885 "input external media already disabled");
2886 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002887 }
kwiberg55b97fe2016-01-28 05:22:45 -08002888 channel_state_.SetInputExternalMedia(false);
2889 _inputExternalMediaCallbackPtr = NULL;
2890 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002891
kwiberg55b97fe2016-01-28 05:22:45 -08002892 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002893}
2894
roosa@google.com1b60ceb2012-12-12 23:00:29 +00002895int Channel::SetExternalMixing(bool enabled) {
kwiberg55b97fe2016-01-28 05:22:45 -08002896 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2897 "Channel::SetExternalMixing(enabled=%d)", enabled);
roosa@google.com1b60ceb2012-12-12 23:00:29 +00002898
kwiberg55b97fe2016-01-28 05:22:45 -08002899 if (channel_state_.Get().playing) {
2900 _engineStatisticsPtr->SetLastError(
2901 VE_INVALID_OPERATION, kTraceError,
2902 "Channel::SetExternalMixing() "
2903 "external mixing cannot be changed while playing.");
2904 return -1;
2905 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00002906
kwiberg55b97fe2016-01-28 05:22:45 -08002907 _externalMixing = enabled;
roosa@google.com1b60ceb2012-12-12 23:00:29 +00002908
kwiberg55b97fe2016-01-28 05:22:45 -08002909 return 0;
roosa@google.com1b60ceb2012-12-12 23:00:29 +00002910}
2911
kwiberg55b97fe2016-01-28 05:22:45 -08002912int Channel::GetNetworkStatistics(NetworkStatistics& stats) {
2913 return audio_coding_->GetNetworkStatistics(&stats);
niklase@google.com470e71d2011-07-07 08:21:25 +00002914}
2915
wu@webrtc.org24301a62013-12-13 19:17:43 +00002916void Channel::GetDecodingCallStatistics(AudioDecodingCallStats* stats) const {
2917 audio_coding_->GetDecodingCallStatistics(stats);
2918}
2919
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00002920bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms,
2921 int* playout_buffer_delay_ms) const {
tommi31fc21f2016-01-21 10:37:37 -08002922 rtc::CritScope lock(&video_sync_lock_);
henrik.lundinb3f1c5d2016-08-22 15:39:53 -07002923 *jitter_buffer_delay_ms = audio_coding_->FilteredCurrentDelayMs();
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00002924 *playout_buffer_delay_ms = playout_delay_ms_;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00002925 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +00002926}
2927
solenberg358057b2015-11-27 10:46:42 -08002928uint32_t Channel::GetDelayEstimate() const {
2929 int jitter_buffer_delay_ms = 0;
2930 int playout_buffer_delay_ms = 0;
2931 GetDelayEstimate(&jitter_buffer_delay_ms, &playout_buffer_delay_ms);
2932 return jitter_buffer_delay_ms + playout_buffer_delay_ms;
2933}
2934
deadbeef74375882015-08-13 12:09:10 -07002935int Channel::LeastRequiredDelayMs() const {
2936 return audio_coding_->LeastRequiredDelayMs();
2937}
2938
kwiberg55b97fe2016-01-28 05:22:45 -08002939int Channel::SetMinimumPlayoutDelay(int delayMs) {
2940 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2941 "Channel::SetMinimumPlayoutDelay()");
2942 if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
2943 (delayMs > kVoiceEngineMaxMinPlayoutDelayMs)) {
2944 _engineStatisticsPtr->SetLastError(
2945 VE_INVALID_ARGUMENT, kTraceError,
2946 "SetMinimumPlayoutDelay() invalid min delay");
2947 return -1;
2948 }
2949 if (audio_coding_->SetMinimumPlayoutDelay(delayMs) != 0) {
2950 _engineStatisticsPtr->SetLastError(
2951 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2952 "SetMinimumPlayoutDelay() failed to set min playout delay");
2953 return -1;
2954 }
2955 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002956}
2957
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00002958int Channel::GetPlayoutTimestamp(unsigned int& timestamp) {
deadbeef74375882015-08-13 12:09:10 -07002959 uint32_t playout_timestamp_rtp = 0;
2960 {
tommi31fc21f2016-01-21 10:37:37 -08002961 rtc::CritScope lock(&video_sync_lock_);
deadbeef74375882015-08-13 12:09:10 -07002962 playout_timestamp_rtp = playout_timestamp_rtp_;
2963 }
kwiberg55b97fe2016-01-28 05:22:45 -08002964 if (playout_timestamp_rtp == 0) {
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00002965 _engineStatisticsPtr->SetLastError(
skvlad4c0536b2016-07-07 13:06:26 -07002966 VE_CANNOT_RETRIEVE_VALUE, kTraceStateInfo,
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00002967 "GetPlayoutTimestamp() failed to retrieve timestamp");
2968 return -1;
2969 }
deadbeef74375882015-08-13 12:09:10 -07002970 timestamp = playout_timestamp_rtp;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00002971 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002972}
2973
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00002974int Channel::SetInitTimestamp(unsigned int timestamp) {
2975 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002976 "Channel::SetInitTimestamp()");
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00002977 if (channel_state_.Get().sending) {
2978 _engineStatisticsPtr->SetLastError(VE_SENDING, kTraceError,
2979 "SetInitTimestamp() already sending");
2980 return -1;
2981 }
2982 _rtpRtcpModule->SetStartTimestamp(timestamp);
2983 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002984}
2985
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00002986int Channel::SetInitSequenceNumber(short sequenceNumber) {
2987 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2988 "Channel::SetInitSequenceNumber()");
2989 if (channel_state_.Get().sending) {
2990 _engineStatisticsPtr->SetLastError(
2991 VE_SENDING, kTraceError, "SetInitSequenceNumber() already sending");
2992 return -1;
2993 }
2994 _rtpRtcpModule->SetSequenceNumber(sequenceNumber);
2995 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002996}
2997
kwiberg55b97fe2016-01-28 05:22:45 -08002998int Channel::GetRtpRtcp(RtpRtcp** rtpRtcpModule,
2999 RtpReceiver** rtp_receiver) const {
3000 *rtpRtcpModule = _rtpRtcpModule.get();
3001 *rtp_receiver = rtp_receiver_.get();
3002 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003003}
3004
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00003005// TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use
3006// a shared helper.
kwiberg55b97fe2016-01-28 05:22:45 -08003007int32_t Channel::MixOrReplaceAudioWithFile(int mixingFrequency) {
kwibergb7f89d62016-02-17 10:04:18 -08003008 std::unique_ptr<int16_t[]> fileBuffer(new int16_t[640]);
kwiberg55b97fe2016-01-28 05:22:45 -08003009 size_t fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00003010
kwiberg55b97fe2016-01-28 05:22:45 -08003011 {
3012 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003013
kwiberg5a25d952016-08-17 07:31:12 -07003014 if (!input_file_player_) {
kwiberg55b97fe2016-01-28 05:22:45 -08003015 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3016 "Channel::MixOrReplaceAudioWithFile() fileplayer"
3017 " doesnt exist");
3018 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003019 }
3020
kwiberg4ec01d92016-08-22 08:43:54 -07003021 if (input_file_player_->Get10msAudioFromFile(fileBuffer.get(), &fileSamples,
kwiberg5a25d952016-08-17 07:31:12 -07003022 mixingFrequency) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -08003023 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3024 "Channel::MixOrReplaceAudioWithFile() file mixing "
3025 "failed");
3026 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003027 }
kwiberg55b97fe2016-01-28 05:22:45 -08003028 if (fileSamples == 0) {
3029 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3030 "Channel::MixOrReplaceAudioWithFile() file is ended");
3031 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003032 }
kwiberg55b97fe2016-01-28 05:22:45 -08003033 }
3034
3035 assert(_audioFrame.samples_per_channel_ == fileSamples);
3036
3037 if (_mixFileWithMicrophone) {
3038 // Currently file stream is always mono.
3039 // TODO(xians): Change the code when FilePlayer supports real stereo.
3040 MixWithSat(_audioFrame.data_, _audioFrame.num_channels_, fileBuffer.get(),
3041 1, fileSamples);
3042 } else {
3043 // Replace ACM audio with file.
3044 // Currently file stream is always mono.
3045 // TODO(xians): Change the code when FilePlayer supports real stereo.
3046 _audioFrame.UpdateFrame(
3047 _channelId, 0xFFFFFFFF, fileBuffer.get(), fileSamples, mixingFrequency,
3048 AudioFrame::kNormalSpeech, AudioFrame::kVadUnknown, 1);
3049 }
3050 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003051}
3052
kwiberg55b97fe2016-01-28 05:22:45 -08003053int32_t Channel::MixAudioWithFile(AudioFrame& audioFrame, int mixingFrequency) {
3054 assert(mixingFrequency <= 48000);
niklase@google.com470e71d2011-07-07 08:21:25 +00003055
kwibergb7f89d62016-02-17 10:04:18 -08003056 std::unique_ptr<int16_t[]> fileBuffer(new int16_t[960]);
kwiberg55b97fe2016-01-28 05:22:45 -08003057 size_t fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00003058
kwiberg55b97fe2016-01-28 05:22:45 -08003059 {
3060 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003061
kwiberg5a25d952016-08-17 07:31:12 -07003062 if (!output_file_player_) {
kwiberg55b97fe2016-01-28 05:22:45 -08003063 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3064 "Channel::MixAudioWithFile() file mixing failed");
3065 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003066 }
3067
kwiberg55b97fe2016-01-28 05:22:45 -08003068 // We should get the frequency we ask for.
kwiberg4ec01d92016-08-22 08:43:54 -07003069 if (output_file_player_->Get10msAudioFromFile(
3070 fileBuffer.get(), &fileSamples, mixingFrequency) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -08003071 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3072 "Channel::MixAudioWithFile() file mixing failed");
3073 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003074 }
kwiberg55b97fe2016-01-28 05:22:45 -08003075 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003076
kwiberg55b97fe2016-01-28 05:22:45 -08003077 if (audioFrame.samples_per_channel_ == fileSamples) {
3078 // Currently file stream is always mono.
3079 // TODO(xians): Change the code when FilePlayer supports real stereo.
3080 MixWithSat(audioFrame.data_, audioFrame.num_channels_, fileBuffer.get(), 1,
3081 fileSamples);
3082 } else {
3083 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3084 "Channel::MixAudioWithFile() samples_per_channel_(%" PRIuS
3085 ") != "
3086 "fileSamples(%" PRIuS ")",
3087 audioFrame.samples_per_channel_, fileSamples);
3088 return -1;
3089 }
3090
3091 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003092}
3093
deadbeef74375882015-08-13 12:09:10 -07003094void Channel::UpdatePlayoutTimestamp(bool rtcp) {
henrik.lundin96bd5022016-04-06 04:13:56 -07003095 jitter_buffer_playout_timestamp_ = audio_coding_->PlayoutTimestamp();
deadbeef74375882015-08-13 12:09:10 -07003096
henrik.lundin96bd5022016-04-06 04:13:56 -07003097 if (!jitter_buffer_playout_timestamp_) {
3098 // This can happen if this channel has not received any RTP packets. In
3099 // this case, NetEq is not capable of computing a playout timestamp.
deadbeef74375882015-08-13 12:09:10 -07003100 return;
3101 }
3102
3103 uint16_t delay_ms = 0;
3104 if (_audioDeviceModulePtr->PlayoutDelay(&delay_ms) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -08003105 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
deadbeef74375882015-08-13 12:09:10 -07003106 "Channel::UpdatePlayoutTimestamp() failed to read playout"
3107 " delay from the ADM");
3108 _engineStatisticsPtr->SetLastError(
3109 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
3110 "UpdatePlayoutTimestamp() failed to retrieve playout delay");
3111 return;
3112 }
3113
henrik.lundin96bd5022016-04-06 04:13:56 -07003114 RTC_DCHECK(jitter_buffer_playout_timestamp_);
3115 uint32_t playout_timestamp = *jitter_buffer_playout_timestamp_;
deadbeef74375882015-08-13 12:09:10 -07003116
3117 // Remove the playout delay.
ossue280cde2016-10-12 11:04:10 -07003118 playout_timestamp -= (delay_ms * (GetRtpTimestampRateHz() / 1000));
deadbeef74375882015-08-13 12:09:10 -07003119
kwiberg55b97fe2016-01-28 05:22:45 -08003120 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
deadbeef74375882015-08-13 12:09:10 -07003121 "Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu",
henrik.lundin96bd5022016-04-06 04:13:56 -07003122 playout_timestamp);
deadbeef74375882015-08-13 12:09:10 -07003123
3124 {
tommi31fc21f2016-01-21 10:37:37 -08003125 rtc::CritScope lock(&video_sync_lock_);
deadbeef74375882015-08-13 12:09:10 -07003126 if (rtcp) {
henrik.lundin96bd5022016-04-06 04:13:56 -07003127 playout_timestamp_rtcp_ = playout_timestamp;
deadbeef74375882015-08-13 12:09:10 -07003128 } else {
henrik.lundin96bd5022016-04-06 04:13:56 -07003129 playout_timestamp_rtp_ = playout_timestamp;
deadbeef74375882015-08-13 12:09:10 -07003130 }
3131 playout_delay_ms_ = delay_ms;
3132 }
3133}
3134
kwiberg55b97fe2016-01-28 05:22:45 -08003135void Channel::RegisterReceiveCodecsToRTPModule() {
3136 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3137 "Channel::RegisterReceiveCodecsToRTPModule()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003138
kwiberg55b97fe2016-01-28 05:22:45 -08003139 CodecInst codec;
3140 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
niklase@google.com470e71d2011-07-07 08:21:25 +00003141
kwiberg55b97fe2016-01-28 05:22:45 -08003142 for (int idx = 0; idx < nSupportedCodecs; idx++) {
3143 // Open up the RTP/RTCP receiver for all supported codecs
3144 if ((audio_coding_->Codec(idx, &codec) == -1) ||
magjed56124bd2016-11-24 09:34:46 -08003145 (rtp_receiver_->RegisterReceivePayload(codec) == -1)) {
kwiberg55b97fe2016-01-28 05:22:45 -08003146 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3147 "Channel::RegisterReceiveCodecsToRTPModule() unable"
3148 " to register %s (%d/%d/%" PRIuS
3149 "/%d) to RTP/RTCP "
3150 "receiver",
3151 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3152 codec.rate);
3153 } else {
3154 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3155 "Channel::RegisterReceiveCodecsToRTPModule() %s "
3156 "(%d/%d/%" PRIuS
3157 "/%d) has been added to the RTP/RTCP "
3158 "receiver",
3159 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3160 codec.rate);
niklase@google.com470e71d2011-07-07 08:21:25 +00003161 }
kwiberg55b97fe2016-01-28 05:22:45 -08003162 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003163}
3164
kwiberg55b97fe2016-01-28 05:22:45 -08003165int Channel::SetSendRtpHeaderExtension(bool enable,
3166 RTPExtensionType type,
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00003167 unsigned char id) {
3168 int error = 0;
3169 _rtpRtcpModule->DeregisterSendRtpHeaderExtension(type);
3170 if (enable) {
3171 error = _rtpRtcpModule->RegisterSendRtpHeaderExtension(type, id);
3172 }
3173 return error;
3174}
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00003175
ossue280cde2016-10-12 11:04:10 -07003176int Channel::GetRtpTimestampRateHz() const {
3177 const auto format = audio_coding_->ReceiveFormat();
3178 // Default to the playout frequency if we've not gotten any packets yet.
3179 // TODO(ossu): Zero clockrate can only happen if we've added an external
3180 // decoder for a format we don't support internally. Remove once that way of
3181 // adding decoders is gone!
3182 return (format && format->clockrate_hz != 0)
3183 ? format->clockrate_hz
3184 : audio_coding_->PlayoutFrequency();
wu@webrtc.org94454b72014-06-05 20:34:08 +00003185}
3186
Minyue2013aec2015-05-13 14:14:42 +02003187int64_t Channel::GetRTT(bool allow_associate_channel) const {
pbosda903ea2015-10-02 02:36:56 -07003188 RtcpMode method = _rtpRtcpModule->RTCP();
3189 if (method == RtcpMode::kOff) {
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003190 return 0;
3191 }
3192 std::vector<RTCPReportBlock> report_blocks;
3193 _rtpRtcpModule->RemoteRTCPStat(&report_blocks);
Minyue2013aec2015-05-13 14:14:42 +02003194
3195 int64_t rtt = 0;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003196 if (report_blocks.empty()) {
Minyue2013aec2015-05-13 14:14:42 +02003197 if (allow_associate_channel) {
tommi31fc21f2016-01-21 10:37:37 -08003198 rtc::CritScope lock(&assoc_send_channel_lock_);
Minyue2013aec2015-05-13 14:14:42 +02003199 Channel* channel = associate_send_channel_.channel();
3200 // Tries to get RTT from an associated channel. This is important for
3201 // receive-only channels.
3202 if (channel) {
3203 // To prevent infinite recursion and deadlock, calling GetRTT of
3204 // associate channel should always use "false" for argument:
3205 // |allow_associate_channel|.
3206 rtt = channel->GetRTT(false);
3207 }
3208 }
3209 return rtt;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003210 }
3211
3212 uint32_t remoteSSRC = rtp_receiver_->SSRC();
3213 std::vector<RTCPReportBlock>::const_iterator it = report_blocks.begin();
3214 for (; it != report_blocks.end(); ++it) {
3215 if (it->remoteSSRC == remoteSSRC)
3216 break;
3217 }
3218 if (it == report_blocks.end()) {
3219 // We have not received packets with SSRC matching the report blocks.
3220 // To calculate RTT we try with the SSRC of the first report block.
3221 // This is very important for send-only channels where we don't know
3222 // the SSRC of the other end.
3223 remoteSSRC = report_blocks[0].remoteSSRC;
3224 }
Minyue2013aec2015-05-13 14:14:42 +02003225
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003226 int64_t avg_rtt = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08003227 int64_t max_rtt = 0;
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003228 int64_t min_rtt = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08003229 if (_rtpRtcpModule->RTT(remoteSSRC, &rtt, &avg_rtt, &min_rtt, &max_rtt) !=
3230 0) {
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003231 return 0;
3232 }
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003233 return rtt;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003234}
3235
pbos@webrtc.orgd900e8b2013-07-03 15:12:26 +00003236} // namespace voe
3237} // namespace webrtc