blob: 4579cdbe8d33b605372b9bd1aace699225784884 [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
henrika@webrtc.org2919e952012-01-31 08:45:03 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000011#include "webrtc/voice_engine/channel.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
Henrik Lundin64dad832015-05-11 12:44:23 +020013#include <algorithm>
Tommif888bb52015-12-12 01:37:01 +010014#include <utility>
Henrik Lundin64dad832015-05-11 12:44:23 +020015
henrik.lundin50499422016-11-29 04:26:24 -080016#include "webrtc/base/array_view.h"
Ivo Creusenae856f22015-09-17 16:30:16 +020017#include "webrtc/base/checks.h"
tommi31fc21f2016-01-21 10:37:37 -080018#include "webrtc/base/criticalsection.h"
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +000019#include "webrtc/base/format_macros.h"
pbosad856222015-11-27 09:48:36 -080020#include "webrtc/base/logging.h"
Erik Språng737336d2016-07-29 12:59:36 +020021#include "webrtc/base/rate_limiter.h"
Stefan Holmerb86d4e42015-12-07 10:26:18 +010022#include "webrtc/base/thread_checker.h"
wu@webrtc.org94454b72014-06-05 20:34:08 +000023#include "webrtc/base/timeutils.h"
Henrik Lundin64dad832015-05-11 12:44:23 +020024#include "webrtc/config.h"
skvladcc91d282016-10-03 18:31:22 -070025#include "webrtc/logging/rtc_event_log/rtc_event_log.h"
kwibergda2bf4e2016-10-24 13:47:09 -070026#include "webrtc/modules/audio_coding/codecs/audio_format_conversion.h"
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000027#include "webrtc/modules/audio_device/include/audio_device.h"
28#include "webrtc/modules/audio_processing/include/audio_processing.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010029#include "webrtc/modules/include/module_common_types.h"
Stefan Holmerb86d4e42015-12-07 10:26:18 +010030#include "webrtc/modules/pacing/packet_router.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010031#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
32#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
33#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
wu@webrtc.org822fbd82013-08-15 23:38:54 +000034#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010035#include "webrtc/modules/utility/include/audio_frame_operations.h"
36#include "webrtc/modules/utility/include/process_thread.h"
Henrik Kjellander98f53512015-10-28 18:17:40 +010037#include "webrtc/system_wrappers/include/trace.h"
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000038#include "webrtc/voice_engine/include/voe_external_media.h"
39#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
40#include "webrtc/voice_engine/output_mixer.h"
41#include "webrtc/voice_engine/statistics.h"
42#include "webrtc/voice_engine/transmit_mixer.h"
43#include "webrtc/voice_engine/utility.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000044
andrew@webrtc.org50419b02012-11-14 19:07:54 +000045namespace webrtc {
46namespace voe {
niklase@google.com470e71d2011-07-07 08:21:25 +000047
kwibergc8d071e2016-04-06 12:22:38 -070048namespace {
49
Erik Språng737336d2016-07-29 12:59:36 +020050constexpr int64_t kMaxRetransmissionWindowMs = 1000;
51constexpr int64_t kMinRetransmissionWindowMs = 30;
52
kwibergc8d071e2016-04-06 12:22:38 -070053} // namespace
54
solenberg8842c3e2016-03-11 03:06:41 -080055const int kTelephoneEventAttenuationdB = 10;
56
ivoc14d5dbe2016-07-04 07:06:55 -070057class RtcEventLogProxy final : public webrtc::RtcEventLog {
58 public:
59 RtcEventLogProxy() : event_log_(nullptr) {}
60
61 bool StartLogging(const std::string& file_name,
62 int64_t max_size_bytes) override {
63 RTC_NOTREACHED();
64 return false;
65 }
66
67 bool StartLogging(rtc::PlatformFile log_file,
68 int64_t max_size_bytes) override {
69 RTC_NOTREACHED();
70 return false;
71 }
72
73 void StopLogging() override { RTC_NOTREACHED(); }
74
75 void LogVideoReceiveStreamConfig(
76 const webrtc::VideoReceiveStream::Config& config) override {
77 rtc::CritScope lock(&crit_);
78 if (event_log_) {
79 event_log_->LogVideoReceiveStreamConfig(config);
80 }
81 }
82
83 void LogVideoSendStreamConfig(
84 const webrtc::VideoSendStream::Config& config) override {
85 rtc::CritScope lock(&crit_);
86 if (event_log_) {
87 event_log_->LogVideoSendStreamConfig(config);
88 }
89 }
90
ivoce0928d82016-10-10 05:12:51 -070091 void LogAudioReceiveStreamConfig(
92 const webrtc::AudioReceiveStream::Config& config) override {
93 rtc::CritScope lock(&crit_);
94 if (event_log_) {
95 event_log_->LogAudioReceiveStreamConfig(config);
96 }
97 }
98
99 void LogAudioSendStreamConfig(
100 const webrtc::AudioSendStream::Config& config) override {
101 rtc::CritScope lock(&crit_);
102 if (event_log_) {
103 event_log_->LogAudioSendStreamConfig(config);
104 }
105 }
106
ivoc14d5dbe2016-07-04 07:06:55 -0700107 void LogRtpHeader(webrtc::PacketDirection direction,
108 webrtc::MediaType media_type,
109 const uint8_t* header,
110 size_t packet_length) override {
111 rtc::CritScope lock(&crit_);
112 if (event_log_) {
113 event_log_->LogRtpHeader(direction, media_type, header, packet_length);
114 }
115 }
116
117 void LogRtcpPacket(webrtc::PacketDirection direction,
118 webrtc::MediaType media_type,
119 const uint8_t* packet,
120 size_t length) override {
121 rtc::CritScope lock(&crit_);
122 if (event_log_) {
123 event_log_->LogRtcpPacket(direction, media_type, packet, length);
124 }
125 }
126
127 void LogAudioPlayout(uint32_t ssrc) override {
128 rtc::CritScope lock(&crit_);
129 if (event_log_) {
130 event_log_->LogAudioPlayout(ssrc);
131 }
132 }
133
134 void LogBwePacketLossEvent(int32_t bitrate,
135 uint8_t fraction_loss,
136 int32_t total_packets) override {
137 rtc::CritScope lock(&crit_);
138 if (event_log_) {
139 event_log_->LogBwePacketLossEvent(bitrate, fraction_loss, total_packets);
140 }
141 }
142
143 void SetEventLog(RtcEventLog* event_log) {
144 rtc::CritScope lock(&crit_);
145 event_log_ = event_log;
146 }
147
148 private:
149 rtc::CriticalSection crit_;
150 RtcEventLog* event_log_ GUARDED_BY(crit_);
151 RTC_DISALLOW_COPY_AND_ASSIGN(RtcEventLogProxy);
152};
153
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100154class TransportFeedbackProxy : public TransportFeedbackObserver {
155 public:
156 TransportFeedbackProxy() : feedback_observer_(nullptr) {
157 pacer_thread_.DetachFromThread();
158 network_thread_.DetachFromThread();
159 }
160
161 void SetTransportFeedbackObserver(
162 TransportFeedbackObserver* feedback_observer) {
163 RTC_DCHECK(thread_checker_.CalledOnValidThread());
164 rtc::CritScope lock(&crit_);
165 feedback_observer_ = feedback_observer;
166 }
167
168 // Implements TransportFeedbackObserver.
169 void AddPacket(uint16_t sequence_number,
170 size_t length,
philipela1ed0b32016-06-01 06:31:17 -0700171 int probe_cluster_id) override {
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100172 RTC_DCHECK(pacer_thread_.CalledOnValidThread());
173 rtc::CritScope lock(&crit_);
174 if (feedback_observer_)
pbos2169d8b2016-06-20 11:53:02 -0700175 feedback_observer_->AddPacket(sequence_number, length, probe_cluster_id);
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100176 }
177 void OnTransportFeedback(const rtcp::TransportFeedback& feedback) override {
178 RTC_DCHECK(network_thread_.CalledOnValidThread());
179 rtc::CritScope lock(&crit_);
michaelt9960bb12016-10-18 09:40:34 -0700180 if (feedback_observer_)
181 feedback_observer_->OnTransportFeedback(feedback);
Stefan Holmer60e43462016-09-07 09:58:20 +0200182 }
183 std::vector<PacketInfo> GetTransportFeedbackVector() const override {
184 RTC_NOTREACHED();
185 return std::vector<PacketInfo>();
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100186 }
187
188 private:
189 rtc::CriticalSection crit_;
190 rtc::ThreadChecker thread_checker_;
191 rtc::ThreadChecker pacer_thread_;
192 rtc::ThreadChecker network_thread_;
193 TransportFeedbackObserver* feedback_observer_ GUARDED_BY(&crit_);
194};
195
196class TransportSequenceNumberProxy : public TransportSequenceNumberAllocator {
197 public:
198 TransportSequenceNumberProxy() : seq_num_allocator_(nullptr) {
199 pacer_thread_.DetachFromThread();
200 }
201
202 void SetSequenceNumberAllocator(
203 TransportSequenceNumberAllocator* seq_num_allocator) {
204 RTC_DCHECK(thread_checker_.CalledOnValidThread());
205 rtc::CritScope lock(&crit_);
206 seq_num_allocator_ = seq_num_allocator;
207 }
208
209 // Implements TransportSequenceNumberAllocator.
210 uint16_t AllocateSequenceNumber() override {
211 RTC_DCHECK(pacer_thread_.CalledOnValidThread());
212 rtc::CritScope lock(&crit_);
213 if (!seq_num_allocator_)
214 return 0;
215 return seq_num_allocator_->AllocateSequenceNumber();
216 }
217
218 private:
219 rtc::CriticalSection crit_;
220 rtc::ThreadChecker thread_checker_;
221 rtc::ThreadChecker pacer_thread_;
222 TransportSequenceNumberAllocator* seq_num_allocator_ GUARDED_BY(&crit_);
223};
224
225class RtpPacketSenderProxy : public RtpPacketSender {
226 public:
kwiberg55b97fe2016-01-28 05:22:45 -0800227 RtpPacketSenderProxy() : rtp_packet_sender_(nullptr) {}
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100228
229 void SetPacketSender(RtpPacketSender* rtp_packet_sender) {
230 RTC_DCHECK(thread_checker_.CalledOnValidThread());
231 rtc::CritScope lock(&crit_);
232 rtp_packet_sender_ = rtp_packet_sender;
233 }
234
235 // Implements RtpPacketSender.
236 void InsertPacket(Priority priority,
237 uint32_t ssrc,
238 uint16_t sequence_number,
239 int64_t capture_time_ms,
240 size_t bytes,
241 bool retransmission) override {
242 rtc::CritScope lock(&crit_);
243 if (rtp_packet_sender_) {
244 rtp_packet_sender_->InsertPacket(priority, ssrc, sequence_number,
245 capture_time_ms, bytes, retransmission);
246 }
247 }
248
249 private:
250 rtc::ThreadChecker thread_checker_;
251 rtc::CriticalSection crit_;
252 RtpPacketSender* rtp_packet_sender_ GUARDED_BY(&crit_);
253};
254
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000255// Extend the default RTCP statistics struct with max_jitter, defined as the
256// maximum jitter value seen in an RTCP report block.
257struct ChannelStatistics : public RtcpStatistics {
258 ChannelStatistics() : rtcp(), max_jitter(0) {}
259
260 RtcpStatistics rtcp;
261 uint32_t max_jitter;
262};
263
264// Statistics callback, called at each generation of a new RTCP report block.
265class StatisticsProxy : public RtcpStatisticsCallback {
266 public:
tommi31fc21f2016-01-21 10:37:37 -0800267 StatisticsProxy(uint32_t ssrc) : ssrc_(ssrc) {}
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000268 virtual ~StatisticsProxy() {}
269
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000270 void StatisticsUpdated(const RtcpStatistics& statistics,
271 uint32_t ssrc) override {
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000272 if (ssrc != ssrc_)
273 return;
274
tommi31fc21f2016-01-21 10:37:37 -0800275 rtc::CritScope cs(&stats_lock_);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000276 stats_.rtcp = statistics;
277 if (statistics.jitter > stats_.max_jitter) {
278 stats_.max_jitter = statistics.jitter;
279 }
280 }
281
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000282 void CNameChanged(const char* cname, uint32_t ssrc) override {}
pbos@webrtc.orgce4e9a32014-12-18 13:50:16 +0000283
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000284 ChannelStatistics GetStats() {
tommi31fc21f2016-01-21 10:37:37 -0800285 rtc::CritScope cs(&stats_lock_);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000286 return stats_;
287 }
288
289 private:
290 // StatisticsUpdated calls are triggered from threads in the RTP module,
291 // while GetStats calls can be triggered from the public voice engine API,
292 // hence synchronization is needed.
tommi31fc21f2016-01-21 10:37:37 -0800293 rtc::CriticalSection stats_lock_;
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000294 const uint32_t ssrc_;
295 ChannelStatistics stats_;
296};
297
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000298class VoERtcpObserver : public RtcpBandwidthObserver {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000299 public:
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000300 explicit VoERtcpObserver(Channel* owner) : owner_(owner) {}
301 virtual ~VoERtcpObserver() {}
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000302
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000303 void OnReceivedEstimatedBitrate(uint32_t bitrate) override {
304 // Not used for Voice Engine.
305 }
306
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000307 void OnReceivedRtcpReceiverReport(const ReportBlockList& report_blocks,
308 int64_t rtt,
309 int64_t now_ms) override {
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000310 // TODO(mflodman): Do we need to aggregate reports here or can we jut send
311 // what we get? I.e. do we ever get multiple reports bundled into one RTCP
312 // report for VoiceEngine?
313 if (report_blocks.empty())
314 return;
315
316 int fraction_lost_aggregate = 0;
317 int total_number_of_packets = 0;
318
319 // If receiving multiple report blocks, calculate the weighted average based
320 // on the number of packets a report refers to.
321 for (ReportBlockList::const_iterator block_it = report_blocks.begin();
322 block_it != report_blocks.end(); ++block_it) {
323 // Find the previous extended high sequence number for this remote SSRC,
324 // to calculate the number of RTP packets this report refers to. Ignore if
325 // we haven't seen this SSRC before.
326 std::map<uint32_t, uint32_t>::iterator seq_num_it =
327 extended_max_sequence_number_.find(block_it->sourceSSRC);
328 int number_of_packets = 0;
329 if (seq_num_it != extended_max_sequence_number_.end()) {
330 number_of_packets = block_it->extendedHighSeqNum - seq_num_it->second;
331 }
332 fraction_lost_aggregate += number_of_packets * block_it->fractionLost;
333 total_number_of_packets += number_of_packets;
334
335 extended_max_sequence_number_[block_it->sourceSSRC] =
336 block_it->extendedHighSeqNum;
337 }
338 int weighted_fraction_lost = 0;
339 if (total_number_of_packets > 0) {
kwiberg55b97fe2016-01-28 05:22:45 -0800340 weighted_fraction_lost =
341 (fraction_lost_aggregate + total_number_of_packets / 2) /
342 total_number_of_packets;
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000343 }
344 owner_->OnIncomingFractionLoss(weighted_fraction_lost);
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000345 }
346
347 private:
348 Channel* owner_;
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000349 // Maps remote side ssrc to extended highest sequence number received.
350 std::map<uint32_t, uint32_t> extended_max_sequence_number_;
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000351};
352
kwiberg55b97fe2016-01-28 05:22:45 -0800353int32_t Channel::SendData(FrameType frameType,
354 uint8_t payloadType,
355 uint32_t timeStamp,
356 const uint8_t* payloadData,
357 size_t payloadSize,
358 const RTPFragmentationHeader* fragmentation) {
359 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
360 "Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u,"
361 " payloadSize=%" PRIuS ", fragmentation=0x%x)",
362 frameType, payloadType, timeStamp, payloadSize, fragmentation);
niklase@google.com470e71d2011-07-07 08:21:25 +0000363
kwiberg55b97fe2016-01-28 05:22:45 -0800364 if (_includeAudioLevelIndication) {
365 // Store current audio level in the RTP/RTCP module.
366 // The level will be used in combination with voice-activity state
367 // (frameType) to add an RTP header extension
henrik.lundin50499422016-11-29 04:26:24 -0800368 _rtpRtcpModule->SetAudioLevel(rms_level_.Average());
kwiberg55b97fe2016-01-28 05:22:45 -0800369 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000370
kwiberg55b97fe2016-01-28 05:22:45 -0800371 // Push data from ACM to RTP/RTCP-module to deliver audio frame for
372 // packetization.
373 // This call will trigger Transport::SendPacket() from the RTP/RTCP module.
Sergey Ulanov525df3f2016-08-02 17:46:41 -0700374 if (!_rtpRtcpModule->SendOutgoingData(
kwiberg55b97fe2016-01-28 05:22:45 -0800375 (FrameType&)frameType, payloadType, timeStamp,
376 // Leaving the time when this frame was
377 // received from the capture device as
378 // undefined for voice for now.
Sergey Ulanov525df3f2016-08-02 17:46:41 -0700379 -1, payloadData, payloadSize, fragmentation, nullptr, nullptr)) {
kwiberg55b97fe2016-01-28 05:22:45 -0800380 _engineStatisticsPtr->SetLastError(
381 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
382 "Channel::SendData() failed to send data to RTP/RTCP module");
383 return -1;
384 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000385
kwiberg55b97fe2016-01-28 05:22:45 -0800386 _lastLocalTimeStamp = timeStamp;
387 _lastPayloadType = payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +0000388
kwiberg55b97fe2016-01-28 05:22:45 -0800389 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000390}
391
kwiberg55b97fe2016-01-28 05:22:45 -0800392int32_t Channel::InFrameType(FrameType frame_type) {
393 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
394 "Channel::InFrameType(frame_type=%d)", frame_type);
niklase@google.com470e71d2011-07-07 08:21:25 +0000395
kwiberg55b97fe2016-01-28 05:22:45 -0800396 rtc::CritScope cs(&_callbackCritSect);
397 _sendFrameType = (frame_type == kAudioFrameSpeech);
398 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000399}
400
stefan1d8a5062015-10-02 03:39:33 -0700401bool Channel::SendRtp(const uint8_t* data,
402 size_t len,
403 const PacketOptions& options) {
kwiberg55b97fe2016-01-28 05:22:45 -0800404 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
405 "Channel::SendPacket(channel=%d, len=%" PRIuS ")", len);
niklase@google.com470e71d2011-07-07 08:21:25 +0000406
kwiberg55b97fe2016-01-28 05:22:45 -0800407 rtc::CritScope cs(&_callbackCritSect);
wu@webrtc.orgfb648da2013-10-18 21:10:51 +0000408
kwiberg55b97fe2016-01-28 05:22:45 -0800409 if (_transportPtr == NULL) {
410 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
411 "Channel::SendPacket() failed to send RTP packet due to"
412 " invalid transport object");
413 return false;
414 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000415
kwiberg55b97fe2016-01-28 05:22:45 -0800416 uint8_t* bufferToSendPtr = (uint8_t*)data;
417 size_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000418
kwiberg55b97fe2016-01-28 05:22:45 -0800419 if (!_transportPtr->SendRtp(bufferToSendPtr, bufferLength, options)) {
420 std::string transport_name =
421 _externalTransport ? "external transport" : "WebRtc sockets";
422 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
423 "Channel::SendPacket() RTP transmission using %s failed",
424 transport_name.c_str());
425 return false;
426 }
427 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +0000428}
429
kwiberg55b97fe2016-01-28 05:22:45 -0800430bool Channel::SendRtcp(const uint8_t* data, size_t len) {
431 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
432 "Channel::SendRtcp(len=%" PRIuS ")", len);
niklase@google.com470e71d2011-07-07 08:21:25 +0000433
kwiberg55b97fe2016-01-28 05:22:45 -0800434 rtc::CritScope cs(&_callbackCritSect);
435 if (_transportPtr == NULL) {
436 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
437 "Channel::SendRtcp() failed to send RTCP packet"
438 " due to invalid transport object");
439 return false;
440 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000441
kwiberg55b97fe2016-01-28 05:22:45 -0800442 uint8_t* bufferToSendPtr = (uint8_t*)data;
443 size_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000444
kwiberg55b97fe2016-01-28 05:22:45 -0800445 int n = _transportPtr->SendRtcp(bufferToSendPtr, bufferLength);
446 if (n < 0) {
447 std::string transport_name =
448 _externalTransport ? "external transport" : "WebRtc sockets";
449 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
450 "Channel::SendRtcp() transmission using %s failed",
451 transport_name.c_str());
452 return false;
453 }
454 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +0000455}
456
kwiberg55b97fe2016-01-28 05:22:45 -0800457void Channel::OnIncomingSSRCChanged(uint32_t ssrc) {
458 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
459 "Channel::OnIncomingSSRCChanged(SSRC=%d)", ssrc);
niklase@google.com470e71d2011-07-07 08:21:25 +0000460
kwiberg55b97fe2016-01-28 05:22:45 -0800461 // Update ssrc so that NTP for AV sync can be updated.
462 _rtpRtcpModule->SetRemoteSSRC(ssrc);
niklase@google.com470e71d2011-07-07 08:21:25 +0000463}
464
Peter Boströmac547a62015-09-17 23:03:57 +0200465void Channel::OnIncomingCSRCChanged(uint32_t CSRC, bool added) {
466 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
467 "Channel::OnIncomingCSRCChanged(CSRC=%d, added=%d)", CSRC,
468 added);
niklase@google.com470e71d2011-07-07 08:21:25 +0000469}
470
Peter Boströmac547a62015-09-17 23:03:57 +0200471int32_t Channel::OnInitializeDecoder(
pbos@webrtc.org92135212013-05-14 08:31:39 +0000472 int8_t payloadType,
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +0000473 const char payloadName[RTP_PAYLOAD_NAME_SIZE],
pbos@webrtc.org92135212013-05-14 08:31:39 +0000474 int frequency,
Peter Kasting69558702016-01-12 16:26:35 -0800475 size_t channels,
Peter Boströmac547a62015-09-17 23:03:57 +0200476 uint32_t rate) {
kwiberg55b97fe2016-01-28 05:22:45 -0800477 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
478 "Channel::OnInitializeDecoder(payloadType=%d, "
479 "payloadName=%s, frequency=%u, channels=%" PRIuS ", rate=%u)",
480 payloadType, payloadName, frequency, channels, rate);
niklase@google.com470e71d2011-07-07 08:21:25 +0000481
kwiberg55b97fe2016-01-28 05:22:45 -0800482 CodecInst receiveCodec = {0};
483 CodecInst dummyCodec = {0};
niklase@google.com470e71d2011-07-07 08:21:25 +0000484
kwiberg55b97fe2016-01-28 05:22:45 -0800485 receiveCodec.pltype = payloadType;
486 receiveCodec.plfreq = frequency;
487 receiveCodec.channels = channels;
488 receiveCodec.rate = rate;
489 strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +0000490
kwiberg55b97fe2016-01-28 05:22:45 -0800491 audio_coding_->Codec(payloadName, &dummyCodec, frequency, channels);
492 receiveCodec.pacsize = dummyCodec.pacsize;
niklase@google.com470e71d2011-07-07 08:21:25 +0000493
kwiberg55b97fe2016-01-28 05:22:45 -0800494 // Register the new codec to the ACM
kwibergda2bf4e2016-10-24 13:47:09 -0700495 if (!audio_coding_->RegisterReceiveCodec(receiveCodec.pltype,
496 CodecInstToSdp(receiveCodec))) {
kwiberg55b97fe2016-01-28 05:22:45 -0800497 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
498 "Channel::OnInitializeDecoder() invalid codec ("
499 "pt=%d, name=%s) received - 1",
500 payloadType, payloadName);
501 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR);
502 return -1;
503 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000504
kwiberg55b97fe2016-01-28 05:22:45 -0800505 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000506}
507
kwiberg55b97fe2016-01-28 05:22:45 -0800508int32_t Channel::OnReceivedPayloadData(const uint8_t* payloadData,
509 size_t payloadSize,
510 const WebRtcRTPHeader* rtpHeader) {
511 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
512 "Channel::OnReceivedPayloadData(payloadSize=%" PRIuS
513 ","
514 " payloadType=%u, audioChannel=%" PRIuS ")",
515 payloadSize, rtpHeader->header.payloadType,
516 rtpHeader->type.Audio.channel);
niklase@google.com470e71d2011-07-07 08:21:25 +0000517
kwiberg55b97fe2016-01-28 05:22:45 -0800518 if (!channel_state_.Get().playing) {
519 // Avoid inserting into NetEQ when we are not playing. Count the
520 // packet as discarded.
521 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
522 "received packet is discarded since playing is not"
523 " activated");
524 _numberOfDiscardedPackets++;
niklase@google.com470e71d2011-07-07 08:21:25 +0000525 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -0800526 }
527
528 // Push the incoming payload (parsed and ready for decoding) into the ACM
529 if (audio_coding_->IncomingPacket(payloadData, payloadSize, *rtpHeader) !=
530 0) {
531 _engineStatisticsPtr->SetLastError(
532 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
533 "Channel::OnReceivedPayloadData() unable to push data to the ACM");
534 return -1;
535 }
536
kwiberg55b97fe2016-01-28 05:22:45 -0800537 int64_t round_trip_time = 0;
538 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), &round_trip_time, NULL, NULL,
539 NULL);
540
541 std::vector<uint16_t> nack_list = audio_coding_->GetNackList(round_trip_time);
542 if (!nack_list.empty()) {
543 // Can't use nack_list.data() since it's not supported by all
544 // compilers.
545 ResendPackets(&(nack_list[0]), static_cast<int>(nack_list.size()));
546 }
547 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000548}
549
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +0000550bool Channel::OnRecoveredPacket(const uint8_t* rtp_packet,
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +0000551 size_t rtp_packet_length) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +0000552 RTPHeader header;
553 if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length, &header)) {
554 WEBRTC_TRACE(kTraceDebug, webrtc::kTraceVoice, _channelId,
555 "IncomingPacket invalid RTP header");
556 return false;
557 }
558 header.payload_type_frequency =
559 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
560 if (header.payload_type_frequency < 0)
561 return false;
562 return ReceivePacket(rtp_packet, rtp_packet_length, header, false);
563}
564
henrik.lundin42dda502016-05-18 05:36:01 -0700565MixerParticipant::AudioFrameInfo Channel::GetAudioFrameWithMuted(
566 int32_t id,
567 AudioFrame* audioFrame) {
ivoc14d5dbe2016-07-04 07:06:55 -0700568 unsigned int ssrc;
569 RTC_CHECK_EQ(GetLocalSSRC(ssrc), 0);
570 event_log_proxy_->LogAudioPlayout(ssrc);
kwiberg55b97fe2016-01-28 05:22:45 -0800571 // Get 10ms raw PCM data from the ACM (mixer limits output frequency)
henrik.lundind4ccb002016-05-17 12:21:55 -0700572 bool muted;
573 if (audio_coding_->PlayoutData10Ms(audioFrame->sample_rate_hz_, audioFrame,
574 &muted) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -0800575 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
576 "Channel::GetAudioFrame() PlayoutData10Ms() failed!");
577 // In all likelihood, the audio in this frame is garbage. We return an
578 // error so that the audio mixer module doesn't add it to the mix. As
579 // a result, it won't be played out and the actions skipped here are
580 // irrelevant.
henrik.lundin42dda502016-05-18 05:36:01 -0700581 return MixerParticipant::AudioFrameInfo::kError;
kwiberg55b97fe2016-01-28 05:22:45 -0800582 }
henrik.lundina89ab962016-05-18 08:52:45 -0700583
584 if (muted) {
585 // TODO(henrik.lundin): We should be able to do better than this. But we
586 // will have to go through all the cases below where the audio samples may
587 // be used, and handle the muted case in some way.
588 audioFrame->Mute();
589 }
kwiberg55b97fe2016-01-28 05:22:45 -0800590
kwiberg55b97fe2016-01-28 05:22:45 -0800591 // Convert module ID to internal VoE channel ID
592 audioFrame->id_ = VoEChannelId(audioFrame->id_);
593 // Store speech type for dead-or-alive detection
594 _outputSpeechType = audioFrame->speech_type_;
595
596 ChannelState::State state = channel_state_.Get();
597
kwiberg55b97fe2016-01-28 05:22:45 -0800598 {
599 // Pass the audio buffers to an optional sink callback, before applying
600 // scaling/panning, as that applies to the mix operation.
601 // External recipients of the audio (e.g. via AudioTrack), will do their
602 // own mixing/dynamic processing.
603 rtc::CritScope cs(&_callbackCritSect);
604 if (audio_sink_) {
605 AudioSinkInterface::Data data(
606 &audioFrame->data_[0], audioFrame->samples_per_channel_,
607 audioFrame->sample_rate_hz_, audioFrame->num_channels_,
608 audioFrame->timestamp_);
609 audio_sink_->OnData(data);
610 }
611 }
612
613 float output_gain = 1.0f;
614 float left_pan = 1.0f;
615 float right_pan = 1.0f;
616 {
617 rtc::CritScope cs(&volume_settings_critsect_);
618 output_gain = _outputGain;
619 left_pan = _panLeft;
620 right_pan = _panRight;
621 }
622
623 // Output volume scaling
624 if (output_gain < 0.99f || output_gain > 1.01f) {
625 AudioFrameOperations::ScaleWithSat(output_gain, *audioFrame);
626 }
627
628 // Scale left and/or right channel(s) if stereo and master balance is
629 // active
630
631 if (left_pan != 1.0f || right_pan != 1.0f) {
632 if (audioFrame->num_channels_ == 1) {
633 // Emulate stereo mode since panning is active.
634 // The mono signal is copied to both left and right channels here.
635 AudioFrameOperations::MonoToStereo(audioFrame);
636 }
637 // For true stereo mode (when we are receiving a stereo signal), no
638 // action is needed.
639
640 // Do the panning operation (the audio frame contains stereo at this
641 // stage)
642 AudioFrameOperations::Scale(left_pan, right_pan, *audioFrame);
643 }
644
645 // Mix decoded PCM output with file if file mixing is enabled
646 if (state.output_file_playing) {
647 MixAudioWithFile(*audioFrame, audioFrame->sample_rate_hz_);
henrik.lundina89ab962016-05-18 08:52:45 -0700648 muted = false; // We may have added non-zero samples.
kwiberg55b97fe2016-01-28 05:22:45 -0800649 }
650
651 // External media
652 if (_outputExternalMedia) {
653 rtc::CritScope cs(&_callbackCritSect);
654 const bool isStereo = (audioFrame->num_channels_ == 2);
655 if (_outputExternalMediaCallbackPtr) {
656 _outputExternalMediaCallbackPtr->Process(
657 _channelId, kPlaybackPerChannel, (int16_t*)audioFrame->data_,
658 audioFrame->samples_per_channel_, audioFrame->sample_rate_hz_,
659 isStereo);
660 }
661 }
662
663 // Record playout if enabled
664 {
665 rtc::CritScope cs(&_fileCritSect);
666
kwiberg5a25d952016-08-17 07:31:12 -0700667 if (_outputFileRecording && output_file_recorder_) {
668 output_file_recorder_->RecordAudioToFile(*audioFrame);
kwiberg55b97fe2016-01-28 05:22:45 -0800669 }
670 }
671
672 // Measure audio level (0-9)
henrik.lundina89ab962016-05-18 08:52:45 -0700673 // TODO(henrik.lundin) Use the |muted| information here too.
kwiberg55b97fe2016-01-28 05:22:45 -0800674 _outputAudioLevel.ComputeLevel(*audioFrame);
675
676 if (capture_start_rtp_time_stamp_ < 0 && audioFrame->timestamp_ != 0) {
677 // The first frame with a valid rtp timestamp.
678 capture_start_rtp_time_stamp_ = audioFrame->timestamp_;
679 }
680
681 if (capture_start_rtp_time_stamp_ >= 0) {
682 // audioFrame.timestamp_ should be valid from now on.
683
684 // Compute elapsed time.
685 int64_t unwrap_timestamp =
686 rtp_ts_wraparound_handler_->Unwrap(audioFrame->timestamp_);
687 audioFrame->elapsed_time_ms_ =
688 (unwrap_timestamp - capture_start_rtp_time_stamp_) /
ossue280cde2016-10-12 11:04:10 -0700689 (GetRtpTimestampRateHz() / 1000);
kwiberg55b97fe2016-01-28 05:22:45 -0800690
niklase@google.com470e71d2011-07-07 08:21:25 +0000691 {
kwiberg55b97fe2016-01-28 05:22:45 -0800692 rtc::CritScope lock(&ts_stats_lock_);
693 // Compute ntp time.
694 audioFrame->ntp_time_ms_ =
695 ntp_estimator_.Estimate(audioFrame->timestamp_);
696 // |ntp_time_ms_| won't be valid until at least 2 RTCP SRs are received.
697 if (audioFrame->ntp_time_ms_ > 0) {
698 // Compute |capture_start_ntp_time_ms_| so that
699 // |capture_start_ntp_time_ms_| + |elapsed_time_ms_| == |ntp_time_ms_|
700 capture_start_ntp_time_ms_ =
701 audioFrame->ntp_time_ms_ - audioFrame->elapsed_time_ms_;
andrew@webrtc.org60730cf2014-01-07 17:45:09 +0000702 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000703 }
kwiberg55b97fe2016-01-28 05:22:45 -0800704 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000705
henrik.lundin42dda502016-05-18 05:36:01 -0700706 return muted ? MixerParticipant::AudioFrameInfo::kMuted
707 : MixerParticipant::AudioFrameInfo::kNormal;
niklase@google.com470e71d2011-07-07 08:21:25 +0000708}
709
aleloi6c278492016-10-20 14:24:39 -0700710AudioMixer::Source::AudioFrameInfo Channel::GetAudioFrameWithInfo(
711 int sample_rate_hz,
712 AudioFrame* audio_frame) {
713 audio_frame->sample_rate_hz_ = sample_rate_hz;
aleloiaed581a2016-10-20 06:32:39 -0700714
aleloi6c278492016-10-20 14:24:39 -0700715 const auto frame_info = GetAudioFrameWithMuted(-1, audio_frame);
aleloiaed581a2016-10-20 06:32:39 -0700716
717 using FrameInfo = AudioMixer::Source::AudioFrameInfo;
718 FrameInfo new_audio_frame_info = FrameInfo::kError;
719 switch (frame_info) {
720 case MixerParticipant::AudioFrameInfo::kNormal:
721 new_audio_frame_info = FrameInfo::kNormal;
722 break;
723 case MixerParticipant::AudioFrameInfo::kMuted:
724 new_audio_frame_info = FrameInfo::kMuted;
725 break;
726 case MixerParticipant::AudioFrameInfo::kError:
727 new_audio_frame_info = FrameInfo::kError;
728 break;
729 }
aleloi6c278492016-10-20 14:24:39 -0700730 return new_audio_frame_info;
aleloiaed581a2016-10-20 06:32:39 -0700731}
732
kwiberg55b97fe2016-01-28 05:22:45 -0800733int32_t Channel::NeededFrequency(int32_t id) const {
734 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
735 "Channel::NeededFrequency(id=%d)", id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000736
kwiberg55b97fe2016-01-28 05:22:45 -0800737 int highestNeeded = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000738
kwiberg55b97fe2016-01-28 05:22:45 -0800739 // Determine highest needed receive frequency
740 int32_t receiveFrequency = audio_coding_->ReceiveFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +0000741
kwiberg55b97fe2016-01-28 05:22:45 -0800742 // Return the bigger of playout and receive frequency in the ACM.
743 if (audio_coding_->PlayoutFrequency() > receiveFrequency) {
744 highestNeeded = audio_coding_->PlayoutFrequency();
745 } else {
746 highestNeeded = receiveFrequency;
747 }
748
749 // Special case, if we're playing a file on the playout side
750 // we take that frequency into consideration as well
751 // This is not needed on sending side, since the codec will
752 // limit the spectrum anyway.
753 if (channel_state_.Get().output_file_playing) {
754 rtc::CritScope cs(&_fileCritSect);
kwiberg5a25d952016-08-17 07:31:12 -0700755 if (output_file_player_) {
756 if (output_file_player_->Frequency() > highestNeeded) {
757 highestNeeded = output_file_player_->Frequency();
kwiberg55b97fe2016-01-28 05:22:45 -0800758 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000759 }
kwiberg55b97fe2016-01-28 05:22:45 -0800760 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000761
kwiberg55b97fe2016-01-28 05:22:45 -0800762 return (highestNeeded);
niklase@google.com470e71d2011-07-07 08:21:25 +0000763}
764
ossu5f7cfa52016-05-30 08:11:28 -0700765int32_t Channel::CreateChannel(
766 Channel*& channel,
767 int32_t channelId,
768 uint32_t instanceId,
solenberg88499ec2016-09-07 07:34:41 -0700769 const VoEBase::ChannelConfig& config) {
kwiberg55b97fe2016-01-28 05:22:45 -0800770 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, channelId),
771 "Channel::CreateChannel(channelId=%d, instanceId=%d)", channelId,
772 instanceId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000773
solenberg88499ec2016-09-07 07:34:41 -0700774 channel = new Channel(channelId, instanceId, config);
kwiberg55b97fe2016-01-28 05:22:45 -0800775 if (channel == NULL) {
776 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, channelId),
777 "Channel::CreateChannel() unable to allocate memory for"
778 " channel");
779 return -1;
780 }
781 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000782}
783
kwiberg55b97fe2016-01-28 05:22:45 -0800784void Channel::PlayNotification(int32_t id, uint32_t durationMs) {
785 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
786 "Channel::PlayNotification(id=%d, durationMs=%d)", id,
787 durationMs);
niklase@google.com470e71d2011-07-07 08:21:25 +0000788
kwiberg55b97fe2016-01-28 05:22:45 -0800789 // Not implement yet
niklase@google.com470e71d2011-07-07 08:21:25 +0000790}
791
kwiberg55b97fe2016-01-28 05:22:45 -0800792void Channel::RecordNotification(int32_t id, uint32_t durationMs) {
793 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
794 "Channel::RecordNotification(id=%d, durationMs=%d)", id,
795 durationMs);
niklase@google.com470e71d2011-07-07 08:21:25 +0000796
kwiberg55b97fe2016-01-28 05:22:45 -0800797 // Not implement yet
niklase@google.com470e71d2011-07-07 08:21:25 +0000798}
799
kwiberg55b97fe2016-01-28 05:22:45 -0800800void Channel::PlayFileEnded(int32_t id) {
801 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
802 "Channel::PlayFileEnded(id=%d)", id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000803
kwiberg55b97fe2016-01-28 05:22:45 -0800804 if (id == _inputFilePlayerId) {
805 channel_state_.SetInputFilePlaying(false);
806 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
807 "Channel::PlayFileEnded() => input file player module is"
niklase@google.com470e71d2011-07-07 08:21:25 +0000808 " shutdown");
kwiberg55b97fe2016-01-28 05:22:45 -0800809 } else if (id == _outputFilePlayerId) {
810 channel_state_.SetOutputFilePlaying(false);
811 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
812 "Channel::PlayFileEnded() => output file player module is"
813 " shutdown");
814 }
815}
816
817void Channel::RecordFileEnded(int32_t id) {
818 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
819 "Channel::RecordFileEnded(id=%d)", id);
820
821 assert(id == _outputFileRecorderId);
822
823 rtc::CritScope cs(&_fileCritSect);
824
825 _outputFileRecording = false;
826 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
827 "Channel::RecordFileEnded() => output file recorder module is"
828 " shutdown");
niklase@google.com470e71d2011-07-07 08:21:25 +0000829}
830
pbos@webrtc.org92135212013-05-14 08:31:39 +0000831Channel::Channel(int32_t channelId,
minyue@webrtc.orge509f942013-09-12 17:03:00 +0000832 uint32_t instanceId,
solenberg88499ec2016-09-07 07:34:41 -0700833 const VoEBase::ChannelConfig& config)
tommi31fc21f2016-01-21 10:37:37 -0800834 : _instanceId(instanceId),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100835 _channelId(channelId),
ivoc14d5dbe2016-07-04 07:06:55 -0700836 event_log_proxy_(new RtcEventLogProxy()),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100837 rtp_header_parser_(RtpHeaderParser::Create()),
magjedf3feeff2016-11-25 06:40:25 -0800838 rtp_payload_registry_(new RTPPayloadRegistry()),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100839 rtp_receive_statistics_(
840 ReceiveStatistics::Create(Clock::GetRealTimeClock())),
841 rtp_receiver_(
842 RtpReceiver::CreateAudioReceiver(Clock::GetRealTimeClock(),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100843 this,
844 this,
845 rtp_payload_registry_.get())),
danilchap799a9d02016-09-22 03:36:27 -0700846 telephone_event_handler_(rtp_receiver_->GetTelephoneEventHandler()),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100847 _outputAudioLevel(),
848 _externalTransport(false),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100849 // Avoid conflict with other channels by adding 1024 - 1026,
850 // won't use as much as 1024 channels.
851 _inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
852 _outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
853 _outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
854 _outputFileRecording(false),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100855 _outputExternalMedia(false),
856 _inputExternalMediaCallbackPtr(NULL),
857 _outputExternalMediaCallbackPtr(NULL),
858 _timeStamp(0), // This is just an offset, RTP module will add it's own
859 // random offset
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100860 ntp_estimator_(Clock::GetRealTimeClock()),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100861 playout_timestamp_rtp_(0),
862 playout_timestamp_rtcp_(0),
863 playout_delay_ms_(0),
864 _numberOfDiscardedPackets(0),
865 send_sequence_number_(0),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100866 rtp_ts_wraparound_handler_(new rtc::TimestampWrapAroundHandler()),
867 capture_start_rtp_time_stamp_(-1),
868 capture_start_ntp_time_ms_(-1),
869 _engineStatisticsPtr(NULL),
870 _outputMixerPtr(NULL),
871 _transmitMixerPtr(NULL),
872 _moduleProcessThreadPtr(NULL),
873 _audioDeviceModulePtr(NULL),
874 _voiceEngineObserverPtr(NULL),
875 _callbackCritSectPtr(NULL),
876 _transportPtr(NULL),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100877 _sendFrameType(0),
878 _externalMixing(false),
879 _mixFileWithMicrophone(false),
solenberg1c2af8e2016-03-24 10:36:00 -0700880 input_mute_(false),
881 previous_frame_muted_(false),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100882 _panLeft(1.0f),
883 _panRight(1.0f),
884 _outputGain(1.0f),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100885 _lastLocalTimeStamp(0),
886 _lastPayloadType(0),
887 _includeAudioLevelIndication(false),
888 _outputSpeechType(AudioFrame::kNormalSpeech),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100889 restored_packet_in_use_(false),
890 rtcp_observer_(new VoERtcpObserver(this)),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100891 associate_send_channel_(ChannelOwner(nullptr)),
solenberg88499ec2016-09-07 07:34:41 -0700892 pacing_enabled_(config.enable_voice_pacing),
stefanbba9dec2016-02-01 04:39:55 -0800893 feedback_observer_proxy_(new TransportFeedbackProxy()),
894 seq_num_allocator_proxy_(new TransportSequenceNumberProxy()),
ossu29b1a8d2016-06-13 07:34:51 -0700895 rtp_packet_sender_proxy_(new RtpPacketSenderProxy()),
Erik Språng737336d2016-07-29 12:59:36 +0200896 retransmission_rate_limiter_(new RateLimiter(Clock::GetRealTimeClock(),
897 kMaxRetransmissionWindowMs)),
michaelt2fedf9c2016-11-28 02:34:18 -0800898 decoder_factory_(config.acm_config.decoder_factory),
899 // Bitrate smoother can be initialized with arbitrary time constant
900 // (0 used here). The actual time constant will be set in SetBitRate.
901 bitrate_smoother_(0, Clock::GetRealTimeClock()) {
kwiberg55b97fe2016-01-28 05:22:45 -0800902 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, _channelId),
903 "Channel::Channel() - ctor");
solenberg88499ec2016-09-07 07:34:41 -0700904 AudioCodingModule::Config acm_config(config.acm_config);
kwiberg55b97fe2016-01-28 05:22:45 -0800905 acm_config.id = VoEModuleId(instanceId, channelId);
henrik.lundina89ab962016-05-18 08:52:45 -0700906 acm_config.neteq_config.enable_muted_state = true;
kwiberg55b97fe2016-01-28 05:22:45 -0800907 audio_coding_.reset(AudioCodingModule::Create(acm_config));
Henrik Lundin64dad832015-05-11 12:44:23 +0200908
kwiberg55b97fe2016-01-28 05:22:45 -0800909 _outputAudioLevel.Clear();
niklase@google.com470e71d2011-07-07 08:21:25 +0000910
kwiberg55b97fe2016-01-28 05:22:45 -0800911 RtpRtcp::Configuration configuration;
912 configuration.audio = true;
913 configuration.outgoing_transport = this;
kwiberg55b97fe2016-01-28 05:22:45 -0800914 configuration.receive_statistics = rtp_receive_statistics_.get();
915 configuration.bandwidth_callback = rtcp_observer_.get();
stefanbba9dec2016-02-01 04:39:55 -0800916 if (pacing_enabled_) {
917 configuration.paced_sender = rtp_packet_sender_proxy_.get();
918 configuration.transport_sequence_number_allocator =
919 seq_num_allocator_proxy_.get();
920 configuration.transport_feedback_callback = feedback_observer_proxy_.get();
921 }
ivoc14d5dbe2016-07-04 07:06:55 -0700922 configuration.event_log = &(*event_log_proxy_);
Erik Språng737336d2016-07-29 12:59:36 +0200923 configuration.retransmission_rate_limiter =
924 retransmission_rate_limiter_.get();
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +0000925
kwiberg55b97fe2016-01-28 05:22:45 -0800926 _rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
Peter Boström3dd5d1d2016-02-25 16:56:48 +0100927 _rtpRtcpModule->SetSendingMediaStatus(false);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000928
kwiberg55b97fe2016-01-28 05:22:45 -0800929 statistics_proxy_.reset(new StatisticsProxy(_rtpRtcpModule->SSRC()));
930 rtp_receive_statistics_->RegisterRtcpStatisticsCallback(
931 statistics_proxy_.get());
niklase@google.com470e71d2011-07-07 08:21:25 +0000932}
933
kwiberg55b97fe2016-01-28 05:22:45 -0800934Channel::~Channel() {
935 rtp_receive_statistics_->RegisterRtcpStatisticsCallback(NULL);
936 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, _channelId),
937 "Channel::~Channel() - dtor");
niklase@google.com470e71d2011-07-07 08:21:25 +0000938
kwiberg55b97fe2016-01-28 05:22:45 -0800939 if (_outputExternalMedia) {
940 DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
941 }
942 if (channel_state_.Get().input_external_media) {
943 DeRegisterExternalMediaProcessing(kRecordingPerChannel);
944 }
945 StopSend();
946 StopPlayout();
niklase@google.com470e71d2011-07-07 08:21:25 +0000947
kwiberg55b97fe2016-01-28 05:22:45 -0800948 {
949 rtc::CritScope cs(&_fileCritSect);
kwiberg5a25d952016-08-17 07:31:12 -0700950 if (input_file_player_) {
951 input_file_player_->RegisterModuleFileCallback(NULL);
952 input_file_player_->StopPlayingFile();
niklase@google.com470e71d2011-07-07 08:21:25 +0000953 }
kwiberg5a25d952016-08-17 07:31:12 -0700954 if (output_file_player_) {
955 output_file_player_->RegisterModuleFileCallback(NULL);
956 output_file_player_->StopPlayingFile();
kwiberg55b97fe2016-01-28 05:22:45 -0800957 }
kwiberg5a25d952016-08-17 07:31:12 -0700958 if (output_file_recorder_) {
959 output_file_recorder_->RegisterModuleFileCallback(NULL);
960 output_file_recorder_->StopRecording();
kwiberg55b97fe2016-01-28 05:22:45 -0800961 }
962 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000963
kwiberg55b97fe2016-01-28 05:22:45 -0800964 // The order to safely shutdown modules in a channel is:
965 // 1. De-register callbacks in modules
966 // 2. De-register modules in process thread
967 // 3. Destroy modules
968 if (audio_coding_->RegisterTransportCallback(NULL) == -1) {
969 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
970 "~Channel() failed to de-register transport callback"
971 " (Audio coding module)");
972 }
973 if (audio_coding_->RegisterVADCallback(NULL) == -1) {
974 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
975 "~Channel() failed to de-register VAD callback"
976 " (Audio coding module)");
977 }
978 // De-register modules in process thread
979 _moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get());
tommi@webrtc.org3985f012015-02-27 13:36:34 +0000980
kwiberg55b97fe2016-01-28 05:22:45 -0800981 // End of modules shutdown
niklase@google.com470e71d2011-07-07 08:21:25 +0000982}
983
kwiberg55b97fe2016-01-28 05:22:45 -0800984int32_t Channel::Init() {
985 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
986 "Channel::Init()");
niklase@google.com470e71d2011-07-07 08:21:25 +0000987
kwiberg55b97fe2016-01-28 05:22:45 -0800988 channel_state_.Reset();
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +0000989
kwiberg55b97fe2016-01-28 05:22:45 -0800990 // --- Initial sanity
niklase@google.com470e71d2011-07-07 08:21:25 +0000991
kwiberg55b97fe2016-01-28 05:22:45 -0800992 if ((_engineStatisticsPtr == NULL) || (_moduleProcessThreadPtr == NULL)) {
993 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
994 "Channel::Init() must call SetEngineInformation() first");
995 return -1;
996 }
997
998 // --- Add modules to process thread (for periodic schedulation)
999
1000 _moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get());
1001
1002 // --- ACM initialization
1003
1004 if (audio_coding_->InitializeReceiver() == -1) {
1005 _engineStatisticsPtr->SetLastError(
1006 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1007 "Channel::Init() unable to initialize the ACM - 1");
1008 return -1;
1009 }
1010
1011 // --- RTP/RTCP module initialization
1012
1013 // Ensure that RTCP is enabled by default for the created channel.
1014 // Note that, the module will keep generating RTCP until it is explicitly
1015 // disabled by the user.
1016 // After StopListen (when no sockets exists), RTCP packets will no longer
1017 // be transmitted since the Transport object will then be invalid.
danilchap799a9d02016-09-22 03:36:27 -07001018 telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
kwiberg55b97fe2016-01-28 05:22:45 -08001019 // RTCP is enabled by default.
1020 _rtpRtcpModule->SetRTCPStatus(RtcpMode::kCompound);
1021 // --- Register all permanent callbacks
1022 const bool fail = (audio_coding_->RegisterTransportCallback(this) == -1) ||
1023 (audio_coding_->RegisterVADCallback(this) == -1);
1024
1025 if (fail) {
1026 _engineStatisticsPtr->SetLastError(
1027 VE_CANNOT_INIT_CHANNEL, kTraceError,
1028 "Channel::Init() callbacks not registered");
1029 return -1;
1030 }
1031
1032 // --- Register all supported codecs to the receiving side of the
1033 // RTP/RTCP module
1034
1035 CodecInst codec;
1036 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
1037
1038 for (int idx = 0; idx < nSupportedCodecs; idx++) {
1039 // Open up the RTP/RTCP receiver for all supported codecs
1040 if ((audio_coding_->Codec(idx, &codec) == -1) ||
magjed56124bd2016-11-24 09:34:46 -08001041 (rtp_receiver_->RegisterReceivePayload(codec) == -1)) {
kwiberg55b97fe2016-01-28 05:22:45 -08001042 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1043 "Channel::Init() unable to register %s "
1044 "(%d/%d/%" PRIuS "/%d) to RTP/RTCP receiver",
1045 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1046 codec.rate);
1047 } else {
1048 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1049 "Channel::Init() %s (%d/%d/%" PRIuS
1050 "/%d) has been "
1051 "added to the RTP/RTCP receiver",
1052 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1053 codec.rate);
niklase@google.com470e71d2011-07-07 08:21:25 +00001054 }
1055
kwiberg55b97fe2016-01-28 05:22:45 -08001056 // Ensure that PCMU is used as default codec on the sending side
1057 if (!STR_CASE_CMP(codec.plname, "PCMU") && (codec.channels == 1)) {
1058 SetSendCodec(codec);
niklase@google.com470e71d2011-07-07 08:21:25 +00001059 }
1060
kwiberg55b97fe2016-01-28 05:22:45 -08001061 // Register default PT for outband 'telephone-event'
1062 if (!STR_CASE_CMP(codec.plname, "telephone-event")) {
kwibergc8d071e2016-04-06 12:22:38 -07001063 if (_rtpRtcpModule->RegisterSendPayload(codec) == -1 ||
kwibergda2bf4e2016-10-24 13:47:09 -07001064 !audio_coding_->RegisterReceiveCodec(codec.pltype,
1065 CodecInstToSdp(codec))) {
kwiberg55b97fe2016-01-28 05:22:45 -08001066 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1067 "Channel::Init() failed to register outband "
1068 "'telephone-event' (%d/%d) correctly",
1069 codec.pltype, codec.plfreq);
1070 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001071 }
1072
kwiberg55b97fe2016-01-28 05:22:45 -08001073 if (!STR_CASE_CMP(codec.plname, "CN")) {
kwibergc8d071e2016-04-06 12:22:38 -07001074 if (!codec_manager_.RegisterEncoder(codec) ||
1075 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get()) ||
kwibergda2bf4e2016-10-24 13:47:09 -07001076 !audio_coding_->RegisterReceiveCodec(codec.pltype,
1077 CodecInstToSdp(codec)) ||
kwibergc8d071e2016-04-06 12:22:38 -07001078 _rtpRtcpModule->RegisterSendPayload(codec) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -08001079 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1080 "Channel::Init() failed to register CN (%d/%d) "
1081 "correctly - 1",
1082 codec.pltype, codec.plfreq);
1083 }
1084 }
kwiberg55b97fe2016-01-28 05:22:45 -08001085 }
pwestin@webrtc.org684f0572013-03-13 23:20:57 +00001086
kwiberg55b97fe2016-01-28 05:22:45 -08001087 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001088}
1089
kwiberg55b97fe2016-01-28 05:22:45 -08001090int32_t Channel::SetEngineInformation(Statistics& engineStatistics,
1091 OutputMixer& outputMixer,
1092 voe::TransmitMixer& transmitMixer,
1093 ProcessThread& moduleProcessThread,
1094 AudioDeviceModule& audioDeviceModule,
1095 VoiceEngineObserver* voiceEngineObserver,
1096 rtc::CriticalSection* callbackCritSect) {
1097 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1098 "Channel::SetEngineInformation()");
1099 _engineStatisticsPtr = &engineStatistics;
1100 _outputMixerPtr = &outputMixer;
1101 _transmitMixerPtr = &transmitMixer,
1102 _moduleProcessThreadPtr = &moduleProcessThread;
1103 _audioDeviceModulePtr = &audioDeviceModule;
1104 _voiceEngineObserverPtr = voiceEngineObserver;
1105 _callbackCritSectPtr = callbackCritSect;
1106 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001107}
1108
kwiberg55b97fe2016-01-28 05:22:45 -08001109int32_t Channel::UpdateLocalTimeStamp() {
1110 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_);
1111 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001112}
1113
kwibergb7f89d62016-02-17 10:04:18 -08001114void Channel::SetSink(std::unique_ptr<AudioSinkInterface> sink) {
tommi31fc21f2016-01-21 10:37:37 -08001115 rtc::CritScope cs(&_callbackCritSect);
deadbeef2d110be2016-01-13 12:00:26 -08001116 audio_sink_ = std::move(sink);
Tommif888bb52015-12-12 01:37:01 +01001117}
1118
ossu29b1a8d2016-06-13 07:34:51 -07001119const rtc::scoped_refptr<AudioDecoderFactory>&
1120Channel::GetAudioDecoderFactory() const {
1121 return decoder_factory_;
1122}
1123
kwiberg55b97fe2016-01-28 05:22:45 -08001124int32_t Channel::StartPlayout() {
1125 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1126 "Channel::StartPlayout()");
1127 if (channel_state_.Get().playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001128 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001129 }
1130
1131 if (!_externalMixing) {
1132 // Add participant as candidates for mixing.
1133 if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0) {
1134 _engineStatisticsPtr->SetLastError(
1135 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1136 "StartPlayout() failed to add participant to mixer");
1137 return -1;
1138 }
1139 }
1140
1141 channel_state_.SetPlaying(true);
1142 if (RegisterFilePlayingToMixer() != 0)
1143 return -1;
1144
1145 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001146}
1147
kwiberg55b97fe2016-01-28 05:22:45 -08001148int32_t Channel::StopPlayout() {
1149 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1150 "Channel::StopPlayout()");
1151 if (!channel_state_.Get().playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001152 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001153 }
1154
1155 if (!_externalMixing) {
1156 // Remove participant as candidates for mixing
1157 if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0) {
1158 _engineStatisticsPtr->SetLastError(
1159 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1160 "StopPlayout() failed to remove participant from mixer");
1161 return -1;
1162 }
1163 }
1164
1165 channel_state_.SetPlaying(false);
1166 _outputAudioLevel.Clear();
1167
1168 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001169}
1170
kwiberg55b97fe2016-01-28 05:22:45 -08001171int32_t Channel::StartSend() {
1172 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1173 "Channel::StartSend()");
1174 // Resume the previous sequence number which was reset by StopSend().
1175 // This needs to be done before |sending| is set to true.
1176 if (send_sequence_number_)
1177 SetInitSequenceNumber(send_sequence_number_);
xians@webrtc.org09e8c472013-07-31 16:30:19 +00001178
kwiberg55b97fe2016-01-28 05:22:45 -08001179 if (channel_state_.Get().sending) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001180 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001181 }
1182 channel_state_.SetSending(true);
niklase@google.com470e71d2011-07-07 08:21:25 +00001183
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001184 _rtpRtcpModule->SetSendingMediaStatus(true);
kwiberg55b97fe2016-01-28 05:22:45 -08001185 if (_rtpRtcpModule->SetSendingStatus(true) != 0) {
1186 _engineStatisticsPtr->SetLastError(
1187 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1188 "StartSend() RTP/RTCP failed to start sending");
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001189 _rtpRtcpModule->SetSendingMediaStatus(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001190 rtc::CritScope cs(&_callbackCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001191 channel_state_.SetSending(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001192 return -1;
1193 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001194
kwiberg55b97fe2016-01-28 05:22:45 -08001195 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001196}
1197
kwiberg55b97fe2016-01-28 05:22:45 -08001198int32_t Channel::StopSend() {
1199 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1200 "Channel::StopSend()");
1201 if (!channel_state_.Get().sending) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001202 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001203 }
1204 channel_state_.SetSending(false);
1205
1206 // Store the sequence number to be able to pick up the same sequence for
1207 // the next StartSend(). This is needed for restarting device, otherwise
1208 // it might cause libSRTP to complain about packets being replayed.
1209 // TODO(xians): Remove this workaround after RtpRtcpModule's refactoring
1210 // CL is landed. See issue
1211 // https://code.google.com/p/webrtc/issues/detail?id=2111 .
1212 send_sequence_number_ = _rtpRtcpModule->SequenceNumber();
1213
1214 // Reset sending SSRC and sequence number and triggers direct transmission
1215 // of RTCP BYE
1216 if (_rtpRtcpModule->SetSendingStatus(false) == -1) {
1217 _engineStatisticsPtr->SetLastError(
1218 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1219 "StartSend() RTP/RTCP failed to stop sending");
1220 }
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001221 _rtpRtcpModule->SetSendingMediaStatus(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001222
1223 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001224}
1225
solenberge566ac72016-10-31 12:52:33 -07001226void Channel::ResetDiscardedPacketCount() {
kwiberg55b97fe2016-01-28 05:22:45 -08001227 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
solenberge566ac72016-10-31 12:52:33 -07001228 "Channel::ResetDiscardedPacketCount()");
kwiberg55b97fe2016-01-28 05:22:45 -08001229 _numberOfDiscardedPackets = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001230}
1231
kwiberg55b97fe2016-01-28 05:22:45 -08001232int32_t Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer) {
1233 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1234 "Channel::RegisterVoiceEngineObserver()");
1235 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001236
kwiberg55b97fe2016-01-28 05:22:45 -08001237 if (_voiceEngineObserverPtr) {
1238 _engineStatisticsPtr->SetLastError(
1239 VE_INVALID_OPERATION, kTraceError,
1240 "RegisterVoiceEngineObserver() observer already enabled");
1241 return -1;
1242 }
1243 _voiceEngineObserverPtr = &observer;
1244 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001245}
1246
kwiberg55b97fe2016-01-28 05:22:45 -08001247int32_t Channel::DeRegisterVoiceEngineObserver() {
1248 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1249 "Channel::DeRegisterVoiceEngineObserver()");
1250 rtc::CritScope cs(&_callbackCritSect);
1251
1252 if (!_voiceEngineObserverPtr) {
1253 _engineStatisticsPtr->SetLastError(
1254 VE_INVALID_OPERATION, kTraceWarning,
1255 "DeRegisterVoiceEngineObserver() observer already disabled");
1256 return 0;
1257 }
1258 _voiceEngineObserverPtr = NULL;
1259 return 0;
1260}
1261
1262int32_t Channel::GetSendCodec(CodecInst& codec) {
kwibergc8d071e2016-04-06 12:22:38 -07001263 auto send_codec = codec_manager_.GetCodecInst();
kwiberg1fd4a4a2015-11-03 11:20:50 -08001264 if (send_codec) {
1265 codec = *send_codec;
1266 return 0;
1267 }
1268 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001269}
1270
kwiberg55b97fe2016-01-28 05:22:45 -08001271int32_t Channel::GetRecCodec(CodecInst& codec) {
1272 return (audio_coding_->ReceiveCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001273}
1274
kwiberg55b97fe2016-01-28 05:22:45 -08001275int32_t Channel::SetSendCodec(const CodecInst& codec) {
1276 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1277 "Channel::SetSendCodec()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001278
kwibergc8d071e2016-04-06 12:22:38 -07001279 if (!codec_manager_.RegisterEncoder(codec) ||
1280 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
kwiberg55b97fe2016-01-28 05:22:45 -08001281 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1282 "SetSendCodec() failed to register codec to ACM");
1283 return -1;
1284 }
1285
1286 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1287 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1288 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1289 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1290 "SetSendCodec() failed to register codec to"
1291 " RTP/RTCP module");
1292 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001293 }
kwiberg55b97fe2016-01-28 05:22:45 -08001294 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001295
kwiberg55b97fe2016-01-28 05:22:45 -08001296 if (_rtpRtcpModule->SetAudioPacketSize(codec.pacsize) != 0) {
1297 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1298 "SetSendCodec() failed to set audio packet size");
1299 return -1;
1300 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001301
kwiberg55b97fe2016-01-28 05:22:45 -08001302 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001303}
1304
minyue78b4d562016-11-30 04:47:39 -08001305void Channel::SetBitRate(int bitrate_bps, int64_t probing_interval_ms) {
Ivo Creusenadf89b72015-04-29 16:03:33 +02001306 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1307 "Channel::SetBitRate(bitrate_bps=%d)", bitrate_bps);
minyue7e304322016-10-12 05:00:55 -07001308 audio_coding_->ModifyEncoder([&](std::unique_ptr<AudioEncoder>* encoder) {
1309 if (*encoder)
1310 (*encoder)->OnReceivedTargetAudioBitrate(bitrate_bps);
1311 });
Erik Språng737336d2016-07-29 12:59:36 +02001312 retransmission_rate_limiter_->SetMaxRate(bitrate_bps);
michaelt2fedf9c2016-11-28 02:34:18 -08001313
1314 // We give smoothed bitrate allocation to audio network adaptor as
1315 // the uplink bandwidth.
minyue78b4d562016-11-30 04:47:39 -08001316 // The probing spikes should not affect the bitrate smoother more than 25%.
1317 // To simplify the calculations we use a step response as input signal.
1318 // The step response of an exponential filter is
1319 // u(t) = 1 - e^(-t / time_constant).
1320 // In order to limit the affect of a BWE spike within 25% of its value before
1321 // the next probing, we would choose a time constant that fulfills
1322 // 1 - e^(-probing_interval_ms / time_constant) < 0.25
1323 // Then 4 * probing_interval_ms is a good choice.
1324 bitrate_smoother_.SetTimeConstantMs(probing_interval_ms * 4);
michaelt2fedf9c2016-11-28 02:34:18 -08001325 bitrate_smoother_.AddSample(bitrate_bps);
1326 audio_coding_->ModifyEncoder([&](std::unique_ptr<AudioEncoder>* encoder) {
1327 if (*encoder) {
1328 (*encoder)->OnReceivedUplinkBandwidth(
1329 static_cast<int>(*bitrate_smoother_.GetAverage()));
1330 }
1331 });
Ivo Creusenadf89b72015-04-29 16:03:33 +02001332}
1333
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +00001334void Channel::OnIncomingFractionLoss(int fraction_lost) {
minyue7e304322016-10-12 05:00:55 -07001335 audio_coding_->ModifyEncoder([&](std::unique_ptr<AudioEncoder>* encoder) {
1336 if (*encoder)
1337 (*encoder)->OnReceivedUplinkPacketLossFraction(fraction_lost / 255.0f);
1338 });
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00001339}
1340
kwiberg55b97fe2016-01-28 05:22:45 -08001341int32_t Channel::SetVADStatus(bool enableVAD,
1342 ACMVADMode mode,
1343 bool disableDTX) {
1344 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1345 "Channel::SetVADStatus(mode=%d)", mode);
kwibergc8d071e2016-04-06 12:22:38 -07001346 RTC_DCHECK(!(disableDTX && enableVAD)); // disableDTX mode is deprecated.
1347 if (!codec_manager_.SetVAD(enableVAD, mode) ||
1348 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
kwiberg55b97fe2016-01-28 05:22:45 -08001349 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
1350 kTraceError,
1351 "SetVADStatus() failed to set VAD");
1352 return -1;
1353 }
1354 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001355}
1356
kwiberg55b97fe2016-01-28 05:22:45 -08001357int32_t Channel::GetVADStatus(bool& enabledVAD,
1358 ACMVADMode& mode,
1359 bool& disabledDTX) {
kwibergc8d071e2016-04-06 12:22:38 -07001360 const auto* params = codec_manager_.GetStackParams();
1361 enabledVAD = params->use_cng;
1362 mode = params->vad_mode;
1363 disabledDTX = !params->use_cng;
kwiberg55b97fe2016-01-28 05:22:45 -08001364 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001365}
1366
kwiberg55b97fe2016-01-28 05:22:45 -08001367int32_t Channel::SetRecPayloadType(const CodecInst& codec) {
1368 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1369 "Channel::SetRecPayloadType()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001370
kwiberg55b97fe2016-01-28 05:22:45 -08001371 if (channel_state_.Get().playing) {
1372 _engineStatisticsPtr->SetLastError(
1373 VE_ALREADY_PLAYING, kTraceError,
1374 "SetRecPayloadType() unable to set PT while playing");
1375 return -1;
1376 }
kwiberg55b97fe2016-01-28 05:22:45 -08001377
1378 if (codec.pltype == -1) {
1379 // De-register the selected codec (RTP/RTCP module and ACM)
1380
1381 int8_t pltype(-1);
1382 CodecInst rxCodec = codec;
1383
1384 // Get payload type for the given codec
magjed56124bd2016-11-24 09:34:46 -08001385 rtp_payload_registry_->ReceivePayloadType(rxCodec, &pltype);
kwiberg55b97fe2016-01-28 05:22:45 -08001386 rxCodec.pltype = pltype;
1387
1388 if (rtp_receiver_->DeRegisterReceivePayload(pltype) != 0) {
1389 _engineStatisticsPtr->SetLastError(
1390 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1391 "SetRecPayloadType() RTP/RTCP-module deregistration "
1392 "failed");
1393 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001394 }
kwiberg55b97fe2016-01-28 05:22:45 -08001395 if (audio_coding_->UnregisterReceiveCodec(rxCodec.pltype) != 0) {
1396 _engineStatisticsPtr->SetLastError(
1397 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1398 "SetRecPayloadType() ACM deregistration failed - 1");
1399 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001400 }
kwiberg55b97fe2016-01-28 05:22:45 -08001401 return 0;
1402 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001403
magjed56124bd2016-11-24 09:34:46 -08001404 if (rtp_receiver_->RegisterReceivePayload(codec) != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08001405 // First attempt to register failed => de-register and try again
kwibergc8d071e2016-04-06 12:22:38 -07001406 // TODO(kwiberg): Retrying is probably not necessary, since
1407 // AcmReceiver::AddCodec also retries.
kwiberg55b97fe2016-01-28 05:22:45 -08001408 rtp_receiver_->DeRegisterReceivePayload(codec.pltype);
magjed56124bd2016-11-24 09:34:46 -08001409 if (rtp_receiver_->RegisterReceivePayload(codec) != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08001410 _engineStatisticsPtr->SetLastError(
1411 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1412 "SetRecPayloadType() RTP/RTCP-module registration failed");
1413 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001414 }
kwiberg55b97fe2016-01-28 05:22:45 -08001415 }
kwibergda2bf4e2016-10-24 13:47:09 -07001416 if (!audio_coding_->RegisterReceiveCodec(codec.pltype,
1417 CodecInstToSdp(codec))) {
kwiberg55b97fe2016-01-28 05:22:45 -08001418 audio_coding_->UnregisterReceiveCodec(codec.pltype);
kwibergda2bf4e2016-10-24 13:47:09 -07001419 if (!audio_coding_->RegisterReceiveCodec(codec.pltype,
1420 CodecInstToSdp(codec))) {
kwiberg55b97fe2016-01-28 05:22:45 -08001421 _engineStatisticsPtr->SetLastError(
1422 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1423 "SetRecPayloadType() ACM registration failed - 1");
1424 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001425 }
kwiberg55b97fe2016-01-28 05:22:45 -08001426 }
1427 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001428}
1429
kwiberg55b97fe2016-01-28 05:22:45 -08001430int32_t Channel::GetRecPayloadType(CodecInst& codec) {
1431 int8_t payloadType(-1);
magjed56124bd2016-11-24 09:34:46 -08001432 if (rtp_payload_registry_->ReceivePayloadType(codec, &payloadType) != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08001433 _engineStatisticsPtr->SetLastError(
1434 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1435 "GetRecPayloadType() failed to retrieve RX payload type");
1436 return -1;
1437 }
1438 codec.pltype = payloadType;
1439 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001440}
1441
kwiberg55b97fe2016-01-28 05:22:45 -08001442int32_t Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency) {
1443 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1444 "Channel::SetSendCNPayloadType()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001445
kwiberg55b97fe2016-01-28 05:22:45 -08001446 CodecInst codec;
1447 int32_t samplingFreqHz(-1);
1448 const size_t kMono = 1;
1449 if (frequency == kFreq32000Hz)
1450 samplingFreqHz = 32000;
1451 else if (frequency == kFreq16000Hz)
1452 samplingFreqHz = 16000;
niklase@google.com470e71d2011-07-07 08:21:25 +00001453
kwiberg55b97fe2016-01-28 05:22:45 -08001454 if (audio_coding_->Codec("CN", &codec, samplingFreqHz, kMono) == -1) {
1455 _engineStatisticsPtr->SetLastError(
1456 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1457 "SetSendCNPayloadType() failed to retrieve default CN codec "
1458 "settings");
1459 return -1;
1460 }
1461
1462 // Modify the payload type (must be set to dynamic range)
1463 codec.pltype = type;
1464
kwibergc8d071e2016-04-06 12:22:38 -07001465 if (!codec_manager_.RegisterEncoder(codec) ||
1466 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
kwiberg55b97fe2016-01-28 05:22:45 -08001467 _engineStatisticsPtr->SetLastError(
1468 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1469 "SetSendCNPayloadType() failed to register CN to ACM");
1470 return -1;
1471 }
1472
1473 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1474 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1475 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1476 _engineStatisticsPtr->SetLastError(
1477 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1478 "SetSendCNPayloadType() failed to register CN to RTP/RTCP "
1479 "module");
1480 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001481 }
kwiberg55b97fe2016-01-28 05:22:45 -08001482 }
1483 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001484}
1485
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001486int Channel::SetOpusMaxPlaybackRate(int frequency_hz) {
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001487 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001488 "Channel::SetOpusMaxPlaybackRate()");
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001489
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001490 if (audio_coding_->SetOpusMaxPlaybackRate(frequency_hz) != 0) {
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001491 _engineStatisticsPtr->SetLastError(
1492 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001493 "SetOpusMaxPlaybackRate() failed to set maximum playback rate");
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001494 return -1;
1495 }
1496 return 0;
1497}
1498
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001499int Channel::SetOpusDtx(bool enable_dtx) {
1500 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1501 "Channel::SetOpusDtx(%d)", enable_dtx);
Minyue Li092041c2015-05-11 12:19:35 +02001502 int ret = enable_dtx ? audio_coding_->EnableOpusDtx()
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001503 : audio_coding_->DisableOpusDtx();
1504 if (ret != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08001505 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
1506 kTraceError, "SetOpusDtx() failed");
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001507 return -1;
1508 }
1509 return 0;
1510}
1511
ivoc85228d62016-07-27 04:53:47 -07001512int Channel::GetOpusDtx(bool* enabled) {
1513 int success = -1;
1514 audio_coding_->QueryEncoder([&](AudioEncoder const* encoder) {
1515 if (encoder) {
1516 *enabled = encoder->GetDtx();
1517 success = 0;
1518 }
1519 });
1520 return success;
1521}
1522
minyue7e304322016-10-12 05:00:55 -07001523bool Channel::EnableAudioNetworkAdaptor(const std::string& config_string) {
1524 bool success = false;
1525 audio_coding_->ModifyEncoder([&](std::unique_ptr<AudioEncoder>* encoder) {
1526 if (*encoder) {
1527 success = (*encoder)->EnableAudioNetworkAdaptor(
1528 config_string, Clock::GetRealTimeClock());
1529 }
1530 });
1531 return success;
1532}
1533
1534void Channel::DisableAudioNetworkAdaptor() {
1535 audio_coding_->ModifyEncoder([&](std::unique_ptr<AudioEncoder>* encoder) {
1536 if (*encoder)
1537 (*encoder)->DisableAudioNetworkAdaptor();
1538 });
1539}
1540
1541void Channel::SetReceiverFrameLengthRange(int min_frame_length_ms,
1542 int max_frame_length_ms) {
1543 audio_coding_->ModifyEncoder([&](std::unique_ptr<AudioEncoder>* encoder) {
1544 if (*encoder) {
1545 (*encoder)->SetReceiverFrameLengthRange(min_frame_length_ms,
1546 max_frame_length_ms);
1547 }
1548 });
1549}
1550
mflodman3d7db262016-04-29 00:57:13 -07001551int32_t Channel::RegisterExternalTransport(Transport* transport) {
kwiberg55b97fe2016-01-28 05:22:45 -08001552 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00001553 "Channel::RegisterExternalTransport()");
1554
kwiberg55b97fe2016-01-28 05:22:45 -08001555 rtc::CritScope cs(&_callbackCritSect);
kwiberg55b97fe2016-01-28 05:22:45 -08001556 if (_externalTransport) {
1557 _engineStatisticsPtr->SetLastError(
1558 VE_INVALID_OPERATION, kTraceError,
1559 "RegisterExternalTransport() external transport already enabled");
1560 return -1;
1561 }
1562 _externalTransport = true;
mflodman3d7db262016-04-29 00:57:13 -07001563 _transportPtr = transport;
kwiberg55b97fe2016-01-28 05:22:45 -08001564 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001565}
1566
kwiberg55b97fe2016-01-28 05:22:45 -08001567int32_t Channel::DeRegisterExternalTransport() {
1568 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1569 "Channel::DeRegisterExternalTransport()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001570
kwiberg55b97fe2016-01-28 05:22:45 -08001571 rtc::CritScope cs(&_callbackCritSect);
mflodman3d7db262016-04-29 00:57:13 -07001572 if (_transportPtr) {
1573 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1574 "DeRegisterExternalTransport() all transport is disabled");
1575 } else {
kwiberg55b97fe2016-01-28 05:22:45 -08001576 _engineStatisticsPtr->SetLastError(
1577 VE_INVALID_OPERATION, kTraceWarning,
1578 "DeRegisterExternalTransport() external transport already "
1579 "disabled");
kwiberg55b97fe2016-01-28 05:22:45 -08001580 }
1581 _externalTransport = false;
1582 _transportPtr = NULL;
kwiberg55b97fe2016-01-28 05:22:45 -08001583 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001584}
1585
mflodman3d7db262016-04-29 00:57:13 -07001586int32_t Channel::ReceivedRTPPacket(const uint8_t* received_packet,
kwiberg55b97fe2016-01-28 05:22:45 -08001587 size_t length,
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00001588 const PacketTime& packet_time) {
kwiberg55b97fe2016-01-28 05:22:45 -08001589 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001590 "Channel::ReceivedRTPPacket()");
1591
1592 // Store playout timestamp for the received RTP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00001593 UpdatePlayoutTimestamp(false);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001594
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00001595 RTPHeader header;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001596 if (!rtp_header_parser_->Parse(received_packet, length, &header)) {
1597 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1598 "Incoming packet: invalid RTP header");
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00001599 return -1;
1600 }
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001601 header.payload_type_frequency =
1602 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001603 if (header.payload_type_frequency < 0)
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001604 return -1;
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001605 bool in_order = IsPacketInOrder(header);
kwiberg55b97fe2016-01-28 05:22:45 -08001606 rtp_receive_statistics_->IncomingPacket(
1607 header, length, IsPacketRetransmitted(header, in_order));
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001608 rtp_payload_registry_->SetIncomingPayloadType(header);
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00001609
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001610 return ReceivePacket(received_packet, length, header, in_order) ? 0 : -1;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001611}
1612
1613bool Channel::ReceivePacket(const uint8_t* packet,
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001614 size_t packet_length,
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001615 const RTPHeader& header,
1616 bool in_order) {
minyue@webrtc.org456f0142015-01-23 11:58:42 +00001617 if (rtp_payload_registry_->IsRtx(header)) {
1618 return HandleRtxPacket(packet, packet_length, header);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001619 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001620 const uint8_t* payload = packet + header.headerLength;
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001621 assert(packet_length >= header.headerLength);
1622 size_t payload_length = packet_length - header.headerLength;
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001623 PayloadUnion payload_specific;
1624 if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001625 &payload_specific)) {
1626 return false;
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001627 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001628 return rtp_receiver_->IncomingRtpPacket(header, payload, payload_length,
1629 payload_specific, in_order);
1630}
1631
minyue@webrtc.org456f0142015-01-23 11:58:42 +00001632bool Channel::HandleRtxPacket(const uint8_t* packet,
1633 size_t packet_length,
1634 const RTPHeader& header) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001635 if (!rtp_payload_registry_->IsRtx(header))
1636 return false;
1637
1638 // Remove the RTX header and parse the original RTP header.
1639 if (packet_length < header.headerLength)
1640 return false;
1641 if (packet_length > kVoiceEngineMaxIpPacketSizeBytes)
1642 return false;
1643 if (restored_packet_in_use_) {
1644 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1645 "Multiple RTX headers detected, dropping packet");
1646 return false;
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001647 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001648 if (!rtp_payload_registry_->RestoreOriginalPacket(
noahric65220a72015-10-14 11:29:49 -07001649 restored_packet_, packet, &packet_length, rtp_receiver_->SSRC(),
1650 header)) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001651 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1652 "Incoming RTX packet: invalid RTP header");
1653 return false;
1654 }
1655 restored_packet_in_use_ = true;
noahric65220a72015-10-14 11:29:49 -07001656 bool ret = OnRecoveredPacket(restored_packet_, packet_length);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001657 restored_packet_in_use_ = false;
1658 return ret;
1659}
1660
1661bool Channel::IsPacketInOrder(const RTPHeader& header) const {
1662 StreamStatistician* statistician =
1663 rtp_receive_statistics_->GetStatistician(header.ssrc);
1664 if (!statistician)
1665 return false;
1666 return statistician->IsPacketInOrder(header.sequenceNumber);
niklase@google.com470e71d2011-07-07 08:21:25 +00001667}
1668
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001669bool Channel::IsPacketRetransmitted(const RTPHeader& header,
1670 bool in_order) const {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001671 // Retransmissions are handled separately if RTX is enabled.
1672 if (rtp_payload_registry_->RtxEnabled())
1673 return false;
1674 StreamStatistician* statistician =
1675 rtp_receive_statistics_->GetStatistician(header.ssrc);
1676 if (!statistician)
1677 return false;
1678 // Check if this is a retransmission.
pkasting@chromium.org16825b12015-01-12 21:51:21 +00001679 int64_t min_rtt = 0;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001680 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL);
kwiberg55b97fe2016-01-28 05:22:45 -08001681 return !in_order && statistician->IsRetransmitOfOldPacket(header, min_rtt);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001682}
1683
mflodman3d7db262016-04-29 00:57:13 -07001684int32_t Channel::ReceivedRTCPPacket(const uint8_t* data, size_t length) {
kwiberg55b97fe2016-01-28 05:22:45 -08001685 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001686 "Channel::ReceivedRTCPPacket()");
1687 // Store playout timestamp for the received RTCP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00001688 UpdatePlayoutTimestamp(true);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001689
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001690 // Deliver RTCP packet to RTP/RTCP module for parsing
mflodman3d7db262016-04-29 00:57:13 -07001691 if (_rtpRtcpModule->IncomingRtcpPacket(data, length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001692 _engineStatisticsPtr->SetLastError(
1693 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
1694 "Channel::IncomingRTPPacket() RTCP packet is invalid");
1695 }
wu@webrtc.org82c4b852014-05-20 22:55:01 +00001696
Minyue2013aec2015-05-13 14:14:42 +02001697 int64_t rtt = GetRTT(true);
1698 if (rtt == 0) {
1699 // Waiting for valid RTT.
1700 return 0;
1701 }
Erik Språng737336d2016-07-29 12:59:36 +02001702
1703 int64_t nack_window_ms = rtt;
1704 if (nack_window_ms < kMinRetransmissionWindowMs) {
1705 nack_window_ms = kMinRetransmissionWindowMs;
1706 } else if (nack_window_ms > kMaxRetransmissionWindowMs) {
1707 nack_window_ms = kMaxRetransmissionWindowMs;
1708 }
1709 retransmission_rate_limiter_->SetWindowSize(nack_window_ms);
1710
minyue7e304322016-10-12 05:00:55 -07001711 // Invoke audio encoders OnReceivedRtt().
1712 audio_coding_->ModifyEncoder([&](std::unique_ptr<AudioEncoder>* encoder) {
1713 if (*encoder)
1714 (*encoder)->OnReceivedRtt(rtt);
1715 });
1716
Minyue2013aec2015-05-13 14:14:42 +02001717 uint32_t ntp_secs = 0;
1718 uint32_t ntp_frac = 0;
1719 uint32_t rtp_timestamp = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001720 if (0 !=
1721 _rtpRtcpModule->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL,
1722 &rtp_timestamp)) {
Minyue2013aec2015-05-13 14:14:42 +02001723 // Waiting for RTCP.
1724 return 0;
1725 }
1726
stefan@webrtc.org8e24d872014-09-02 18:58:24 +00001727 {
tommi31fc21f2016-01-21 10:37:37 -08001728 rtc::CritScope lock(&ts_stats_lock_);
minyue@webrtc.org2c0cdbc2014-10-09 10:52:43 +00001729 ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp);
stefan@webrtc.org8e24d872014-09-02 18:58:24 +00001730 }
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001731 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001732}
1733
niklase@google.com470e71d2011-07-07 08:21:25 +00001734int Channel::StartPlayingFileLocally(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001735 bool loop,
1736 FileFormats format,
1737 int startPosition,
1738 float volumeScaling,
1739 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001740 const CodecInst* codecInst) {
1741 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1742 "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d,"
1743 " format=%d, volumeScaling=%5.3f, startPosition=%d, "
1744 "stopPosition=%d)",
1745 fileName, loop, format, volumeScaling, startPosition,
1746 stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001747
kwiberg55b97fe2016-01-28 05:22:45 -08001748 if (channel_state_.Get().output_file_playing) {
1749 _engineStatisticsPtr->SetLastError(
1750 VE_ALREADY_PLAYING, kTraceError,
1751 "StartPlayingFileLocally() is already playing");
1752 return -1;
1753 }
1754
1755 {
1756 rtc::CritScope cs(&_fileCritSect);
1757
kwiberg5a25d952016-08-17 07:31:12 -07001758 if (output_file_player_) {
1759 output_file_player_->RegisterModuleFileCallback(NULL);
1760 output_file_player_.reset();
niklase@google.com470e71d2011-07-07 08:21:25 +00001761 }
1762
kwiberg5b356f42016-09-08 04:32:33 -07001763 output_file_player_ = FilePlayer::CreateFilePlayer(
kwiberg55b97fe2016-01-28 05:22:45 -08001764 _outputFilePlayerId, (const FileFormats)format);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00001765
kwiberg5a25d952016-08-17 07:31:12 -07001766 if (!output_file_player_) {
kwiberg55b97fe2016-01-28 05:22:45 -08001767 _engineStatisticsPtr->SetLastError(
1768 VE_INVALID_ARGUMENT, kTraceError,
1769 "StartPlayingFileLocally() filePlayer format is not correct");
1770 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001771 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001772
kwiberg55b97fe2016-01-28 05:22:45 -08001773 const uint32_t notificationTime(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00001774
kwiberg5a25d952016-08-17 07:31:12 -07001775 if (output_file_player_->StartPlayingFile(
kwiberg55b97fe2016-01-28 05:22:45 -08001776 fileName, loop, startPosition, volumeScaling, notificationTime,
1777 stopPosition, (const CodecInst*)codecInst) != 0) {
1778 _engineStatisticsPtr->SetLastError(
1779 VE_BAD_FILE, kTraceError,
1780 "StartPlayingFile() failed to start file playout");
kwiberg5a25d952016-08-17 07:31:12 -07001781 output_file_player_->StopPlayingFile();
1782 output_file_player_.reset();
kwiberg55b97fe2016-01-28 05:22:45 -08001783 return -1;
1784 }
kwiberg5a25d952016-08-17 07:31:12 -07001785 output_file_player_->RegisterModuleFileCallback(this);
kwiberg55b97fe2016-01-28 05:22:45 -08001786 channel_state_.SetOutputFilePlaying(true);
1787 }
1788
1789 if (RegisterFilePlayingToMixer() != 0)
1790 return -1;
1791
1792 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001793}
1794
1795int Channel::StartPlayingFileLocally(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001796 FileFormats format,
1797 int startPosition,
1798 float volumeScaling,
1799 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001800 const CodecInst* codecInst) {
1801 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1802 "Channel::StartPlayingFileLocally(format=%d,"
1803 " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
1804 format, volumeScaling, startPosition, stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001805
kwiberg55b97fe2016-01-28 05:22:45 -08001806 if (stream == NULL) {
1807 _engineStatisticsPtr->SetLastError(
1808 VE_BAD_FILE, kTraceError,
1809 "StartPlayingFileLocally() NULL as input stream");
1810 return -1;
1811 }
1812
1813 if (channel_state_.Get().output_file_playing) {
1814 _engineStatisticsPtr->SetLastError(
1815 VE_ALREADY_PLAYING, kTraceError,
1816 "StartPlayingFileLocally() is already playing");
1817 return -1;
1818 }
1819
1820 {
1821 rtc::CritScope cs(&_fileCritSect);
1822
1823 // Destroy the old instance
kwiberg5a25d952016-08-17 07:31:12 -07001824 if (output_file_player_) {
1825 output_file_player_->RegisterModuleFileCallback(NULL);
1826 output_file_player_.reset();
niklase@google.com470e71d2011-07-07 08:21:25 +00001827 }
1828
kwiberg55b97fe2016-01-28 05:22:45 -08001829 // Create the instance
kwiberg5b356f42016-09-08 04:32:33 -07001830 output_file_player_ = FilePlayer::CreateFilePlayer(
kwiberg55b97fe2016-01-28 05:22:45 -08001831 _outputFilePlayerId, (const FileFormats)format);
niklase@google.com470e71d2011-07-07 08:21:25 +00001832
kwiberg5a25d952016-08-17 07:31:12 -07001833 if (!output_file_player_) {
kwiberg55b97fe2016-01-28 05:22:45 -08001834 _engineStatisticsPtr->SetLastError(
1835 VE_INVALID_ARGUMENT, kTraceError,
1836 "StartPlayingFileLocally() filePlayer format isnot correct");
1837 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001838 }
1839
kwiberg55b97fe2016-01-28 05:22:45 -08001840 const uint32_t notificationTime(0);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00001841
kwiberg4ec01d92016-08-22 08:43:54 -07001842 if (output_file_player_->StartPlayingFile(stream, startPosition,
kwiberg5a25d952016-08-17 07:31:12 -07001843 volumeScaling, notificationTime,
1844 stopPosition, codecInst) != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08001845 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
1846 "StartPlayingFile() failed to "
1847 "start file playout");
kwiberg5a25d952016-08-17 07:31:12 -07001848 output_file_player_->StopPlayingFile();
1849 output_file_player_.reset();
kwiberg55b97fe2016-01-28 05:22:45 -08001850 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001851 }
kwiberg5a25d952016-08-17 07:31:12 -07001852 output_file_player_->RegisterModuleFileCallback(this);
kwiberg55b97fe2016-01-28 05:22:45 -08001853 channel_state_.SetOutputFilePlaying(true);
1854 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001855
kwiberg55b97fe2016-01-28 05:22:45 -08001856 if (RegisterFilePlayingToMixer() != 0)
1857 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001858
kwiberg55b97fe2016-01-28 05:22:45 -08001859 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001860}
1861
kwiberg55b97fe2016-01-28 05:22:45 -08001862int Channel::StopPlayingFileLocally() {
1863 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1864 "Channel::StopPlayingFileLocally()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001865
kwiberg55b97fe2016-01-28 05:22:45 -08001866 if (!channel_state_.Get().output_file_playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001867 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001868 }
1869
1870 {
1871 rtc::CritScope cs(&_fileCritSect);
1872
kwiberg5a25d952016-08-17 07:31:12 -07001873 if (output_file_player_->StopPlayingFile() != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08001874 _engineStatisticsPtr->SetLastError(
1875 VE_STOP_RECORDING_FAILED, kTraceError,
1876 "StopPlayingFile() could not stop playing");
1877 return -1;
1878 }
kwiberg5a25d952016-08-17 07:31:12 -07001879 output_file_player_->RegisterModuleFileCallback(NULL);
1880 output_file_player_.reset();
kwiberg55b97fe2016-01-28 05:22:45 -08001881 channel_state_.SetOutputFilePlaying(false);
1882 }
1883 // _fileCritSect cannot be taken while calling
1884 // SetAnonymousMixibilityStatus. Refer to comments in
1885 // StartPlayingFileLocally(const char* ...) for more details.
1886 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0) {
1887 _engineStatisticsPtr->SetLastError(
1888 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1889 "StopPlayingFile() failed to stop participant from playing as"
1890 "file in the mixer");
1891 return -1;
1892 }
1893
1894 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001895}
1896
kwiberg55b97fe2016-01-28 05:22:45 -08001897int Channel::IsPlayingFileLocally() const {
1898 return channel_state_.Get().output_file_playing;
niklase@google.com470e71d2011-07-07 08:21:25 +00001899}
1900
kwiberg55b97fe2016-01-28 05:22:45 -08001901int Channel::RegisterFilePlayingToMixer() {
1902 // Return success for not registering for file playing to mixer if:
1903 // 1. playing file before playout is started on that channel.
1904 // 2. starting playout without file playing on that channel.
1905 if (!channel_state_.Get().playing ||
1906 !channel_state_.Get().output_file_playing) {
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001907 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001908 }
1909
1910 // |_fileCritSect| cannot be taken while calling
1911 // SetAnonymousMixabilityStatus() since as soon as the participant is added
1912 // frames can be pulled by the mixer. Since the frames are generated from
1913 // the file, _fileCritSect will be taken. This would result in a deadlock.
1914 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0) {
1915 channel_state_.SetOutputFilePlaying(false);
1916 rtc::CritScope cs(&_fileCritSect);
1917 _engineStatisticsPtr->SetLastError(
1918 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1919 "StartPlayingFile() failed to add participant as file to mixer");
kwiberg5a25d952016-08-17 07:31:12 -07001920 output_file_player_->StopPlayingFile();
1921 output_file_player_.reset();
kwiberg55b97fe2016-01-28 05:22:45 -08001922 return -1;
1923 }
1924
1925 return 0;
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001926}
1927
niklase@google.com470e71d2011-07-07 08:21:25 +00001928int Channel::StartPlayingFileAsMicrophone(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001929 bool loop,
1930 FileFormats format,
1931 int startPosition,
1932 float volumeScaling,
1933 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001934 const CodecInst* codecInst) {
1935 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1936 "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, "
1937 "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, "
1938 "stopPosition=%d)",
1939 fileName, loop, format, volumeScaling, startPosition,
1940 stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001941
kwiberg55b97fe2016-01-28 05:22:45 -08001942 rtc::CritScope cs(&_fileCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001943
kwiberg55b97fe2016-01-28 05:22:45 -08001944 if (channel_state_.Get().input_file_playing) {
1945 _engineStatisticsPtr->SetLastError(
1946 VE_ALREADY_PLAYING, kTraceWarning,
1947 "StartPlayingFileAsMicrophone() filePlayer is playing");
niklase@google.com470e71d2011-07-07 08:21:25 +00001948 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001949 }
1950
1951 // Destroy the old instance
kwiberg5a25d952016-08-17 07:31:12 -07001952 if (input_file_player_) {
1953 input_file_player_->RegisterModuleFileCallback(NULL);
1954 input_file_player_.reset();
kwiberg55b97fe2016-01-28 05:22:45 -08001955 }
1956
1957 // Create the instance
kwiberg5b356f42016-09-08 04:32:33 -07001958 input_file_player_ = FilePlayer::CreateFilePlayer(_inputFilePlayerId,
kwiberg5a25d952016-08-17 07:31:12 -07001959 (const FileFormats)format);
kwiberg55b97fe2016-01-28 05:22:45 -08001960
kwiberg5a25d952016-08-17 07:31:12 -07001961 if (!input_file_player_) {
kwiberg55b97fe2016-01-28 05:22:45 -08001962 _engineStatisticsPtr->SetLastError(
1963 VE_INVALID_ARGUMENT, kTraceError,
1964 "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
1965 return -1;
1966 }
1967
1968 const uint32_t notificationTime(0);
1969
kwiberg5a25d952016-08-17 07:31:12 -07001970 if (input_file_player_->StartPlayingFile(
kwiberg55b97fe2016-01-28 05:22:45 -08001971 fileName, loop, startPosition, volumeScaling, notificationTime,
1972 stopPosition, (const CodecInst*)codecInst) != 0) {
1973 _engineStatisticsPtr->SetLastError(
1974 VE_BAD_FILE, kTraceError,
1975 "StartPlayingFile() failed to start file playout");
kwiberg5a25d952016-08-17 07:31:12 -07001976 input_file_player_->StopPlayingFile();
1977 input_file_player_.reset();
kwiberg55b97fe2016-01-28 05:22:45 -08001978 return -1;
1979 }
kwiberg5a25d952016-08-17 07:31:12 -07001980 input_file_player_->RegisterModuleFileCallback(this);
kwiberg55b97fe2016-01-28 05:22:45 -08001981 channel_state_.SetInputFilePlaying(true);
1982
1983 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001984}
1985
1986int Channel::StartPlayingFileAsMicrophone(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001987 FileFormats format,
1988 int startPosition,
1989 float volumeScaling,
1990 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001991 const CodecInst* codecInst) {
1992 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1993 "Channel::StartPlayingFileAsMicrophone(format=%d, "
1994 "volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
1995 format, volumeScaling, startPosition, stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001996
kwiberg55b97fe2016-01-28 05:22:45 -08001997 if (stream == NULL) {
1998 _engineStatisticsPtr->SetLastError(
1999 VE_BAD_FILE, kTraceError,
2000 "StartPlayingFileAsMicrophone NULL as input stream");
2001 return -1;
2002 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002003
kwiberg55b97fe2016-01-28 05:22:45 -08002004 rtc::CritScope cs(&_fileCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00002005
kwiberg55b97fe2016-01-28 05:22:45 -08002006 if (channel_state_.Get().input_file_playing) {
2007 _engineStatisticsPtr->SetLastError(
2008 VE_ALREADY_PLAYING, kTraceWarning,
2009 "StartPlayingFileAsMicrophone() is playing");
niklase@google.com470e71d2011-07-07 08:21:25 +00002010 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002011 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002012
kwiberg55b97fe2016-01-28 05:22:45 -08002013 // Destroy the old instance
kwiberg5a25d952016-08-17 07:31:12 -07002014 if (input_file_player_) {
2015 input_file_player_->RegisterModuleFileCallback(NULL);
2016 input_file_player_.reset();
kwiberg55b97fe2016-01-28 05:22:45 -08002017 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002018
kwiberg55b97fe2016-01-28 05:22:45 -08002019 // Create the instance
kwiberg5b356f42016-09-08 04:32:33 -07002020 input_file_player_ = FilePlayer::CreateFilePlayer(_inputFilePlayerId,
kwiberg5a25d952016-08-17 07:31:12 -07002021 (const FileFormats)format);
kwiberg55b97fe2016-01-28 05:22:45 -08002022
kwiberg5a25d952016-08-17 07:31:12 -07002023 if (!input_file_player_) {
kwiberg55b97fe2016-01-28 05:22:45 -08002024 _engineStatisticsPtr->SetLastError(
2025 VE_INVALID_ARGUMENT, kTraceError,
2026 "StartPlayingInputFile() filePlayer format isnot correct");
2027 return -1;
2028 }
2029
2030 const uint32_t notificationTime(0);
2031
kwiberg4ec01d92016-08-22 08:43:54 -07002032 if (input_file_player_->StartPlayingFile(stream, startPosition, volumeScaling,
2033 notificationTime, stopPosition,
2034 codecInst) != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08002035 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2036 "StartPlayingFile() failed to start "
2037 "file playout");
kwiberg5a25d952016-08-17 07:31:12 -07002038 input_file_player_->StopPlayingFile();
2039 input_file_player_.reset();
kwiberg55b97fe2016-01-28 05:22:45 -08002040 return -1;
2041 }
2042
kwiberg5a25d952016-08-17 07:31:12 -07002043 input_file_player_->RegisterModuleFileCallback(this);
kwiberg55b97fe2016-01-28 05:22:45 -08002044 channel_state_.SetInputFilePlaying(true);
2045
2046 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002047}
2048
kwiberg55b97fe2016-01-28 05:22:45 -08002049int Channel::StopPlayingFileAsMicrophone() {
2050 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2051 "Channel::StopPlayingFileAsMicrophone()");
2052
2053 rtc::CritScope cs(&_fileCritSect);
2054
2055 if (!channel_state_.Get().input_file_playing) {
2056 return 0;
2057 }
2058
kwiberg5a25d952016-08-17 07:31:12 -07002059 if (input_file_player_->StopPlayingFile() != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08002060 _engineStatisticsPtr->SetLastError(
2061 VE_STOP_RECORDING_FAILED, kTraceError,
2062 "StopPlayingFile() could not stop playing");
2063 return -1;
2064 }
kwiberg5a25d952016-08-17 07:31:12 -07002065 input_file_player_->RegisterModuleFileCallback(NULL);
2066 input_file_player_.reset();
kwiberg55b97fe2016-01-28 05:22:45 -08002067 channel_state_.SetInputFilePlaying(false);
2068
2069 return 0;
2070}
2071
2072int Channel::IsPlayingFileAsMicrophone() const {
2073 return channel_state_.Get().input_file_playing;
niklase@google.com470e71d2011-07-07 08:21:25 +00002074}
2075
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00002076int Channel::StartRecordingPlayout(const char* fileName,
kwiberg55b97fe2016-01-28 05:22:45 -08002077 const CodecInst* codecInst) {
2078 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2079 "Channel::StartRecordingPlayout(fileName=%s)", fileName);
niklase@google.com470e71d2011-07-07 08:21:25 +00002080
kwiberg55b97fe2016-01-28 05:22:45 -08002081 if (_outputFileRecording) {
2082 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
2083 "StartRecordingPlayout() is already recording");
niklase@google.com470e71d2011-07-07 08:21:25 +00002084 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002085 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002086
kwiberg55b97fe2016-01-28 05:22:45 -08002087 FileFormats format;
2088 const uint32_t notificationTime(0); // Not supported in VoE
2089 CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000};
niklase@google.com470e71d2011-07-07 08:21:25 +00002090
kwiberg55b97fe2016-01-28 05:22:45 -08002091 if ((codecInst != NULL) &&
2092 ((codecInst->channels < 1) || (codecInst->channels > 2))) {
2093 _engineStatisticsPtr->SetLastError(
2094 VE_BAD_ARGUMENT, kTraceError,
2095 "StartRecordingPlayout() invalid compression");
2096 return (-1);
2097 }
2098 if (codecInst == NULL) {
2099 format = kFileFormatPcm16kHzFile;
2100 codecInst = &dummyCodec;
2101 } else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) ||
2102 (STR_CASE_CMP(codecInst->plname, "PCMU") == 0) ||
2103 (STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) {
2104 format = kFileFormatWavFile;
2105 } else {
2106 format = kFileFormatCompressedFile;
2107 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002108
kwiberg55b97fe2016-01-28 05:22:45 -08002109 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002110
kwiberg55b97fe2016-01-28 05:22:45 -08002111 // Destroy the old instance
kwiberg5a25d952016-08-17 07:31:12 -07002112 if (output_file_recorder_) {
2113 output_file_recorder_->RegisterModuleFileCallback(NULL);
2114 output_file_recorder_.reset();
kwiberg55b97fe2016-01-28 05:22:45 -08002115 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002116
kwiberg5a25d952016-08-17 07:31:12 -07002117 output_file_recorder_ = FileRecorder::CreateFileRecorder(
kwiberg55b97fe2016-01-28 05:22:45 -08002118 _outputFileRecorderId, (const FileFormats)format);
kwiberg5a25d952016-08-17 07:31:12 -07002119 if (!output_file_recorder_) {
kwiberg55b97fe2016-01-28 05:22:45 -08002120 _engineStatisticsPtr->SetLastError(
2121 VE_INVALID_ARGUMENT, kTraceError,
2122 "StartRecordingPlayout() fileRecorder format isnot correct");
2123 return -1;
2124 }
2125
kwiberg5a25d952016-08-17 07:31:12 -07002126 if (output_file_recorder_->StartRecordingAudioFile(
kwiberg55b97fe2016-01-28 05:22:45 -08002127 fileName, (const CodecInst&)*codecInst, notificationTime) != 0) {
2128 _engineStatisticsPtr->SetLastError(
2129 VE_BAD_FILE, kTraceError,
2130 "StartRecordingAudioFile() failed to start file recording");
kwiberg5a25d952016-08-17 07:31:12 -07002131 output_file_recorder_->StopRecording();
2132 output_file_recorder_.reset();
kwiberg55b97fe2016-01-28 05:22:45 -08002133 return -1;
2134 }
kwiberg5a25d952016-08-17 07:31:12 -07002135 output_file_recorder_->RegisterModuleFileCallback(this);
kwiberg55b97fe2016-01-28 05:22:45 -08002136 _outputFileRecording = true;
2137
2138 return 0;
2139}
2140
2141int Channel::StartRecordingPlayout(OutStream* stream,
2142 const CodecInst* codecInst) {
2143 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2144 "Channel::StartRecordingPlayout()");
2145
2146 if (_outputFileRecording) {
2147 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
2148 "StartRecordingPlayout() is already recording");
niklase@google.com470e71d2011-07-07 08:21:25 +00002149 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002150 }
2151
2152 FileFormats format;
2153 const uint32_t notificationTime(0); // Not supported in VoE
2154 CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000};
2155
2156 if (codecInst != NULL && codecInst->channels != 1) {
2157 _engineStatisticsPtr->SetLastError(
2158 VE_BAD_ARGUMENT, kTraceError,
2159 "StartRecordingPlayout() invalid compression");
2160 return (-1);
2161 }
2162 if (codecInst == NULL) {
2163 format = kFileFormatPcm16kHzFile;
2164 codecInst = &dummyCodec;
2165 } else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) ||
2166 (STR_CASE_CMP(codecInst->plname, "PCMU") == 0) ||
2167 (STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) {
2168 format = kFileFormatWavFile;
2169 } else {
2170 format = kFileFormatCompressedFile;
2171 }
2172
2173 rtc::CritScope cs(&_fileCritSect);
2174
2175 // Destroy the old instance
kwiberg5a25d952016-08-17 07:31:12 -07002176 if (output_file_recorder_) {
2177 output_file_recorder_->RegisterModuleFileCallback(NULL);
2178 output_file_recorder_.reset();
kwiberg55b97fe2016-01-28 05:22:45 -08002179 }
2180
kwiberg5a25d952016-08-17 07:31:12 -07002181 output_file_recorder_ = FileRecorder::CreateFileRecorder(
kwiberg55b97fe2016-01-28 05:22:45 -08002182 _outputFileRecorderId, (const FileFormats)format);
kwiberg5a25d952016-08-17 07:31:12 -07002183 if (!output_file_recorder_) {
kwiberg55b97fe2016-01-28 05:22:45 -08002184 _engineStatisticsPtr->SetLastError(
2185 VE_INVALID_ARGUMENT, kTraceError,
2186 "StartRecordingPlayout() fileRecorder format isnot correct");
2187 return -1;
2188 }
2189
kwiberg4ec01d92016-08-22 08:43:54 -07002190 if (output_file_recorder_->StartRecordingAudioFile(stream, *codecInst,
kwiberg5a25d952016-08-17 07:31:12 -07002191 notificationTime) != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08002192 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2193 "StartRecordingPlayout() failed to "
2194 "start file recording");
kwiberg5a25d952016-08-17 07:31:12 -07002195 output_file_recorder_->StopRecording();
2196 output_file_recorder_.reset();
kwiberg55b97fe2016-01-28 05:22:45 -08002197 return -1;
2198 }
2199
kwiberg5a25d952016-08-17 07:31:12 -07002200 output_file_recorder_->RegisterModuleFileCallback(this);
kwiberg55b97fe2016-01-28 05:22:45 -08002201 _outputFileRecording = true;
2202
2203 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002204}
2205
kwiberg55b97fe2016-01-28 05:22:45 -08002206int Channel::StopRecordingPlayout() {
2207 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
2208 "Channel::StopRecordingPlayout()");
2209
2210 if (!_outputFileRecording) {
2211 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
2212 "StopRecordingPlayout() isnot recording");
2213 return -1;
2214 }
2215
2216 rtc::CritScope cs(&_fileCritSect);
2217
kwiberg5a25d952016-08-17 07:31:12 -07002218 if (output_file_recorder_->StopRecording() != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08002219 _engineStatisticsPtr->SetLastError(
2220 VE_STOP_RECORDING_FAILED, kTraceError,
2221 "StopRecording() could not stop recording");
2222 return (-1);
2223 }
kwiberg5a25d952016-08-17 07:31:12 -07002224 output_file_recorder_->RegisterModuleFileCallback(NULL);
2225 output_file_recorder_.reset();
kwiberg55b97fe2016-01-28 05:22:45 -08002226 _outputFileRecording = false;
2227
2228 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002229}
2230
kwiberg55b97fe2016-01-28 05:22:45 -08002231void Channel::SetMixWithMicStatus(bool mix) {
2232 rtc::CritScope cs(&_fileCritSect);
2233 _mixFileWithMicrophone = mix;
niklase@google.com470e71d2011-07-07 08:21:25 +00002234}
2235
kwiberg55b97fe2016-01-28 05:22:45 -08002236int Channel::GetSpeechOutputLevel(uint32_t& level) const {
2237 int8_t currentLevel = _outputAudioLevel.Level();
2238 level = static_cast<int32_t>(currentLevel);
2239 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002240}
2241
kwiberg55b97fe2016-01-28 05:22:45 -08002242int Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const {
2243 int16_t currentLevel = _outputAudioLevel.LevelFullRange();
2244 level = static_cast<int32_t>(currentLevel);
2245 return 0;
2246}
2247
solenberg1c2af8e2016-03-24 10:36:00 -07002248int Channel::SetInputMute(bool enable) {
kwiberg55b97fe2016-01-28 05:22:45 -08002249 rtc::CritScope cs(&volume_settings_critsect_);
2250 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002251 "Channel::SetMute(enable=%d)", enable);
solenberg1c2af8e2016-03-24 10:36:00 -07002252 input_mute_ = enable;
kwiberg55b97fe2016-01-28 05:22:45 -08002253 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002254}
2255
solenberg1c2af8e2016-03-24 10:36:00 -07002256bool Channel::InputMute() const {
kwiberg55b97fe2016-01-28 05:22:45 -08002257 rtc::CritScope cs(&volume_settings_critsect_);
solenberg1c2af8e2016-03-24 10:36:00 -07002258 return input_mute_;
niklase@google.com470e71d2011-07-07 08:21:25 +00002259}
2260
kwiberg55b97fe2016-01-28 05:22:45 -08002261int Channel::SetOutputVolumePan(float left, float right) {
2262 rtc::CritScope cs(&volume_settings_critsect_);
2263 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002264 "Channel::SetOutputVolumePan()");
kwiberg55b97fe2016-01-28 05:22:45 -08002265 _panLeft = left;
2266 _panRight = right;
2267 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002268}
2269
kwiberg55b97fe2016-01-28 05:22:45 -08002270int Channel::GetOutputVolumePan(float& left, float& right) const {
2271 rtc::CritScope cs(&volume_settings_critsect_);
2272 left = _panLeft;
2273 right = _panRight;
2274 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002275}
2276
kwiberg55b97fe2016-01-28 05:22:45 -08002277int Channel::SetChannelOutputVolumeScaling(float scaling) {
2278 rtc::CritScope cs(&volume_settings_critsect_);
2279 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002280 "Channel::SetChannelOutputVolumeScaling()");
kwiberg55b97fe2016-01-28 05:22:45 -08002281 _outputGain = scaling;
2282 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002283}
2284
kwiberg55b97fe2016-01-28 05:22:45 -08002285int Channel::GetChannelOutputVolumeScaling(float& scaling) const {
2286 rtc::CritScope cs(&volume_settings_critsect_);
2287 scaling = _outputGain;
2288 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002289}
2290
solenberg8842c3e2016-03-11 03:06:41 -08002291int Channel::SendTelephoneEventOutband(int event, int duration_ms) {
kwiberg55b97fe2016-01-28 05:22:45 -08002292 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
solenberg8842c3e2016-03-11 03:06:41 -08002293 "Channel::SendTelephoneEventOutband(...)");
2294 RTC_DCHECK_LE(0, event);
2295 RTC_DCHECK_GE(255, event);
2296 RTC_DCHECK_LE(0, duration_ms);
2297 RTC_DCHECK_GE(65535, duration_ms);
kwiberg55b97fe2016-01-28 05:22:45 -08002298 if (!Sending()) {
2299 return -1;
2300 }
solenberg8842c3e2016-03-11 03:06:41 -08002301 if (_rtpRtcpModule->SendTelephoneEventOutband(
2302 event, duration_ms, kTelephoneEventAttenuationdB) != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08002303 _engineStatisticsPtr->SetLastError(
2304 VE_SEND_DTMF_FAILED, kTraceWarning,
2305 "SendTelephoneEventOutband() failed to send event");
2306 return -1;
2307 }
2308 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002309}
2310
solenbergffbbcac2016-11-17 05:25:37 -08002311int Channel::SetSendTelephoneEventPayloadType(int payload_type,
2312 int payload_frequency) {
kwiberg55b97fe2016-01-28 05:22:45 -08002313 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002314 "Channel::SetSendTelephoneEventPayloadType()");
solenberg31642aa2016-03-14 08:00:37 -07002315 RTC_DCHECK_LE(0, payload_type);
2316 RTC_DCHECK_GE(127, payload_type);
2317 CodecInst codec = {0};
solenberg31642aa2016-03-14 08:00:37 -07002318 codec.pltype = payload_type;
solenbergffbbcac2016-11-17 05:25:37 -08002319 codec.plfreq = payload_frequency;
kwiberg55b97fe2016-01-28 05:22:45 -08002320 memcpy(codec.plname, "telephone-event", 16);
2321 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
2322 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
2323 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
2324 _engineStatisticsPtr->SetLastError(
2325 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2326 "SetSendTelephoneEventPayloadType() failed to register send"
2327 "payload type");
2328 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002329 }
kwiberg55b97fe2016-01-28 05:22:45 -08002330 }
kwiberg55b97fe2016-01-28 05:22:45 -08002331 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002332}
2333
kwiberg55b97fe2016-01-28 05:22:45 -08002334int Channel::VoiceActivityIndicator(int& activity) {
2335 activity = _sendFrameType;
2336 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002337}
2338
kwiberg55b97fe2016-01-28 05:22:45 -08002339int Channel::SetLocalSSRC(unsigned int ssrc) {
2340 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2341 "Channel::SetLocalSSRC()");
2342 if (channel_state_.Get().sending) {
2343 _engineStatisticsPtr->SetLastError(VE_ALREADY_SENDING, kTraceError,
2344 "SetLocalSSRC() already sending");
2345 return -1;
2346 }
2347 _rtpRtcpModule->SetSSRC(ssrc);
2348 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002349}
2350
kwiberg55b97fe2016-01-28 05:22:45 -08002351int Channel::GetLocalSSRC(unsigned int& ssrc) {
2352 ssrc = _rtpRtcpModule->SSRC();
2353 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002354}
2355
kwiberg55b97fe2016-01-28 05:22:45 -08002356int Channel::GetRemoteSSRC(unsigned int& ssrc) {
2357 ssrc = rtp_receiver_->SSRC();
2358 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002359}
2360
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002361int Channel::SetSendAudioLevelIndicationStatus(bool enable, unsigned char id) {
andrew@webrtc.orgf3930e92013-09-18 22:37:32 +00002362 _includeAudioLevelIndication = enable;
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002363 return SetSendRtpHeaderExtension(enable, kRtpExtensionAudioLevel, id);
niklase@google.com470e71d2011-07-07 08:21:25 +00002364}
andrew@webrtc.orgf3930e92013-09-18 22:37:32 +00002365
wu@webrtc.org93fd25c2014-04-24 20:33:08 +00002366int Channel::SetReceiveAudioLevelIndicationStatus(bool enable,
2367 unsigned char id) {
kwiberg55b97fe2016-01-28 05:22:45 -08002368 rtp_header_parser_->DeregisterRtpHeaderExtension(kRtpExtensionAudioLevel);
2369 if (enable &&
2370 !rtp_header_parser_->RegisterRtpHeaderExtension(kRtpExtensionAudioLevel,
2371 id)) {
wu@webrtc.org93fd25c2014-04-24 20:33:08 +00002372 return -1;
2373 }
2374 return 0;
2375}
2376
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002377void Channel::EnableSendTransportSequenceNumber(int id) {
2378 int ret =
2379 SetSendRtpHeaderExtension(true, kRtpExtensionTransportSequenceNumber, id);
2380 RTC_DCHECK_EQ(0, ret);
2381}
2382
stefan3313ec92016-01-21 06:32:43 -08002383void Channel::EnableReceiveTransportSequenceNumber(int id) {
2384 rtp_header_parser_->DeregisterRtpHeaderExtension(
2385 kRtpExtensionTransportSequenceNumber);
2386 bool ret = rtp_header_parser_->RegisterRtpHeaderExtension(
2387 kRtpExtensionTransportSequenceNumber, id);
2388 RTC_DCHECK(ret);
2389}
2390
stefanbba9dec2016-02-01 04:39:55 -08002391void Channel::RegisterSenderCongestionControlObjects(
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002392 RtpPacketSender* rtp_packet_sender,
2393 TransportFeedbackObserver* transport_feedback_observer,
2394 PacketRouter* packet_router) {
stefanbba9dec2016-02-01 04:39:55 -08002395 RTC_DCHECK(rtp_packet_sender);
2396 RTC_DCHECK(transport_feedback_observer);
2397 RTC_DCHECK(packet_router && !packet_router_);
2398 feedback_observer_proxy_->SetTransportFeedbackObserver(
2399 transport_feedback_observer);
2400 seq_num_allocator_proxy_->SetSequenceNumberAllocator(packet_router);
2401 rtp_packet_sender_proxy_->SetPacketSender(rtp_packet_sender);
2402 _rtpRtcpModule->SetStorePacketsStatus(true, 600);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002403 packet_router->AddRtpModule(_rtpRtcpModule.get());
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002404 packet_router_ = packet_router;
2405}
2406
stefanbba9dec2016-02-01 04:39:55 -08002407void Channel::RegisterReceiverCongestionControlObjects(
2408 PacketRouter* packet_router) {
2409 RTC_DCHECK(packet_router && !packet_router_);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002410 packet_router->AddRtpModule(_rtpRtcpModule.get());
stefanbba9dec2016-02-01 04:39:55 -08002411 packet_router_ = packet_router;
2412}
2413
2414void Channel::ResetCongestionControlObjects() {
2415 RTC_DCHECK(packet_router_);
2416 _rtpRtcpModule->SetStorePacketsStatus(false, 600);
2417 feedback_observer_proxy_->SetTransportFeedbackObserver(nullptr);
2418 seq_num_allocator_proxy_->SetSequenceNumberAllocator(nullptr);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002419 packet_router_->RemoveRtpModule(_rtpRtcpModule.get());
stefanbba9dec2016-02-01 04:39:55 -08002420 packet_router_ = nullptr;
2421 rtp_packet_sender_proxy_->SetPacketSender(nullptr);
2422}
2423
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00002424void Channel::SetRTCPStatus(bool enable) {
2425 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2426 "Channel::SetRTCPStatus()");
pbosda903ea2015-10-02 02:36:56 -07002427 _rtpRtcpModule->SetRTCPStatus(enable ? RtcpMode::kCompound : RtcpMode::kOff);
niklase@google.com470e71d2011-07-07 08:21:25 +00002428}
2429
kwiberg55b97fe2016-01-28 05:22:45 -08002430int Channel::GetRTCPStatus(bool& enabled) {
pbosda903ea2015-10-02 02:36:56 -07002431 RtcpMode method = _rtpRtcpModule->RTCP();
2432 enabled = (method != RtcpMode::kOff);
kwiberg55b97fe2016-01-28 05:22:45 -08002433 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002434}
2435
kwiberg55b97fe2016-01-28 05:22:45 -08002436int Channel::SetRTCP_CNAME(const char cName[256]) {
2437 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2438 "Channel::SetRTCP_CNAME()");
2439 if (_rtpRtcpModule->SetCNAME(cName) != 0) {
2440 _engineStatisticsPtr->SetLastError(
2441 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2442 "SetRTCP_CNAME() failed to set RTCP CNAME");
2443 return -1;
2444 }
2445 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002446}
2447
kwiberg55b97fe2016-01-28 05:22:45 -08002448int Channel::GetRemoteRTCP_CNAME(char cName[256]) {
2449 if (cName == NULL) {
2450 _engineStatisticsPtr->SetLastError(
2451 VE_INVALID_ARGUMENT, kTraceError,
2452 "GetRemoteRTCP_CNAME() invalid CNAME input buffer");
2453 return -1;
2454 }
2455 char cname[RTCP_CNAME_SIZE];
2456 const uint32_t remoteSSRC = rtp_receiver_->SSRC();
2457 if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0) {
2458 _engineStatisticsPtr->SetLastError(
2459 VE_CANNOT_RETRIEVE_CNAME, kTraceError,
2460 "GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME");
2461 return -1;
2462 }
2463 strcpy(cName, cname);
2464 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002465}
2466
kwiberg55b97fe2016-01-28 05:22:45 -08002467int Channel::GetRemoteRTCPData(unsigned int& NTPHigh,
2468 unsigned int& NTPLow,
2469 unsigned int& timestamp,
2470 unsigned int& playoutTimestamp,
2471 unsigned int* jitter,
2472 unsigned short* fractionLost) {
2473 // --- Information from sender info in received Sender Reports
niklase@google.com470e71d2011-07-07 08:21:25 +00002474
kwiberg55b97fe2016-01-28 05:22:45 -08002475 RTCPSenderInfo senderInfo;
2476 if (_rtpRtcpModule->RemoteRTCPStat(&senderInfo) != 0) {
2477 _engineStatisticsPtr->SetLastError(
2478 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2479 "GetRemoteRTCPData() failed to retrieve sender info for remote "
2480 "side");
2481 return -1;
2482 }
2483
2484 // We only utilize 12 out of 20 bytes in the sender info (ignores packet
2485 // and octet count)
2486 NTPHigh = senderInfo.NTPseconds;
2487 NTPLow = senderInfo.NTPfraction;
2488 timestamp = senderInfo.RTPtimeStamp;
2489
2490 // --- Locally derived information
2491
2492 // This value is updated on each incoming RTCP packet (0 when no packet
2493 // has been received)
2494 playoutTimestamp = playout_timestamp_rtcp_;
2495
2496 if (NULL != jitter || NULL != fractionLost) {
2497 // Get all RTCP receiver report blocks that have been received on this
2498 // channel. If we receive RTP packets from a remote source we know the
2499 // remote SSRC and use the report block from him.
2500 // Otherwise use the first report block.
2501 std::vector<RTCPReportBlock> remote_stats;
2502 if (_rtpRtcpModule->RemoteRTCPStat(&remote_stats) != 0 ||
2503 remote_stats.empty()) {
2504 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2505 "GetRemoteRTCPData() failed to measure statistics due"
2506 " to lack of received RTP and/or RTCP packets");
2507 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002508 }
2509
kwiberg55b97fe2016-01-28 05:22:45 -08002510 uint32_t remoteSSRC = rtp_receiver_->SSRC();
2511 std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
2512 for (; it != remote_stats.end(); ++it) {
2513 if (it->remoteSSRC == remoteSSRC)
2514 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00002515 }
kwiberg55b97fe2016-01-28 05:22:45 -08002516
2517 if (it == remote_stats.end()) {
2518 // If we have not received any RTCP packets from this SSRC it probably
2519 // means that we have not received any RTP packets.
2520 // Use the first received report block instead.
2521 it = remote_stats.begin();
2522 remoteSSRC = it->remoteSSRC;
2523 }
2524
2525 if (jitter) {
2526 *jitter = it->jitter;
2527 }
2528
2529 if (fractionLost) {
2530 *fractionLost = it->fractionLost;
2531 }
2532 }
2533 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002534}
2535
kwiberg55b97fe2016-01-28 05:22:45 -08002536int Channel::SendApplicationDefinedRTCPPacket(
2537 unsigned char subType,
2538 unsigned int name,
2539 const char* data,
2540 unsigned short dataLengthInBytes) {
2541 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2542 "Channel::SendApplicationDefinedRTCPPacket()");
2543 if (!channel_state_.Get().sending) {
2544 _engineStatisticsPtr->SetLastError(
2545 VE_NOT_SENDING, kTraceError,
2546 "SendApplicationDefinedRTCPPacket() not sending");
2547 return -1;
2548 }
2549 if (NULL == data) {
2550 _engineStatisticsPtr->SetLastError(
2551 VE_INVALID_ARGUMENT, kTraceError,
2552 "SendApplicationDefinedRTCPPacket() invalid data value");
2553 return -1;
2554 }
2555 if (dataLengthInBytes % 4 != 0) {
2556 _engineStatisticsPtr->SetLastError(
2557 VE_INVALID_ARGUMENT, kTraceError,
2558 "SendApplicationDefinedRTCPPacket() invalid length value");
2559 return -1;
2560 }
2561 RtcpMode status = _rtpRtcpModule->RTCP();
2562 if (status == RtcpMode::kOff) {
2563 _engineStatisticsPtr->SetLastError(
2564 VE_RTCP_ERROR, kTraceError,
2565 "SendApplicationDefinedRTCPPacket() RTCP is disabled");
2566 return -1;
2567 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002568
kwiberg55b97fe2016-01-28 05:22:45 -08002569 // Create and schedule the RTCP APP packet for transmission
2570 if (_rtpRtcpModule->SetRTCPApplicationSpecificData(
2571 subType, name, (const unsigned char*)data, dataLengthInBytes) != 0) {
2572 _engineStatisticsPtr->SetLastError(
2573 VE_SEND_ERROR, kTraceError,
2574 "SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
2575 return -1;
2576 }
2577 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002578}
2579
kwiberg55b97fe2016-01-28 05:22:45 -08002580int Channel::GetRTPStatistics(unsigned int& averageJitterMs,
2581 unsigned int& maxJitterMs,
2582 unsigned int& discardedPackets) {
2583 // The jitter statistics is updated for each received RTP packet and is
2584 // based on received packets.
2585 if (_rtpRtcpModule->RTCP() == RtcpMode::kOff) {
2586 // If RTCP is off, there is no timed thread in the RTCP module regularly
2587 // generating new stats, trigger the update manually here instead.
2588 StreamStatistician* statistician =
2589 rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
2590 if (statistician) {
2591 // Don't use returned statistics, use data from proxy instead so that
2592 // max jitter can be fetched atomically.
2593 RtcpStatistics s;
2594 statistician->GetStatistics(&s, true);
niklase@google.com470e71d2011-07-07 08:21:25 +00002595 }
kwiberg55b97fe2016-01-28 05:22:45 -08002596 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002597
kwiberg55b97fe2016-01-28 05:22:45 -08002598 ChannelStatistics stats = statistics_proxy_->GetStats();
2599 const int32_t playoutFrequency = audio_coding_->PlayoutFrequency();
2600 if (playoutFrequency > 0) {
2601 // Scale RTP statistics given the current playout frequency
2602 maxJitterMs = stats.max_jitter / (playoutFrequency / 1000);
2603 averageJitterMs = stats.rtcp.jitter / (playoutFrequency / 1000);
2604 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002605
kwiberg55b97fe2016-01-28 05:22:45 -08002606 discardedPackets = _numberOfDiscardedPackets;
niklase@google.com470e71d2011-07-07 08:21:25 +00002607
kwiberg55b97fe2016-01-28 05:22:45 -08002608 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002609}
2610
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002611int Channel::GetRemoteRTCPReportBlocks(
2612 std::vector<ReportBlock>* report_blocks) {
2613 if (report_blocks == NULL) {
kwiberg55b97fe2016-01-28 05:22:45 -08002614 _engineStatisticsPtr->SetLastError(
2615 VE_INVALID_ARGUMENT, kTraceError,
2616 "GetRemoteRTCPReportBlock()s invalid report_blocks.");
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002617 return -1;
2618 }
2619
2620 // Get the report blocks from the latest received RTCP Sender or Receiver
2621 // Report. Each element in the vector contains the sender's SSRC and a
2622 // report block according to RFC 3550.
2623 std::vector<RTCPReportBlock> rtcp_report_blocks;
2624 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks) != 0) {
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002625 return -1;
2626 }
2627
2628 if (rtcp_report_blocks.empty())
2629 return 0;
2630
2631 std::vector<RTCPReportBlock>::const_iterator it = rtcp_report_blocks.begin();
2632 for (; it != rtcp_report_blocks.end(); ++it) {
2633 ReportBlock report_block;
2634 report_block.sender_SSRC = it->remoteSSRC;
2635 report_block.source_SSRC = it->sourceSSRC;
2636 report_block.fraction_lost = it->fractionLost;
2637 report_block.cumulative_num_packets_lost = it->cumulativeLost;
2638 report_block.extended_highest_sequence_number = it->extendedHighSeqNum;
2639 report_block.interarrival_jitter = it->jitter;
2640 report_block.last_SR_timestamp = it->lastSR;
2641 report_block.delay_since_last_SR = it->delaySinceLastSR;
2642 report_blocks->push_back(report_block);
2643 }
2644 return 0;
2645}
2646
kwiberg55b97fe2016-01-28 05:22:45 -08002647int Channel::GetRTPStatistics(CallStatistics& stats) {
2648 // --- RtcpStatistics
niklase@google.com470e71d2011-07-07 08:21:25 +00002649
kwiberg55b97fe2016-01-28 05:22:45 -08002650 // The jitter statistics is updated for each received RTP packet and is
2651 // based on received packets.
2652 RtcpStatistics statistics;
2653 StreamStatistician* statistician =
2654 rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
Peter Boström59013bc2016-02-12 11:35:08 +01002655 if (statistician) {
2656 statistician->GetStatistics(&statistics,
2657 _rtpRtcpModule->RTCP() == RtcpMode::kOff);
kwiberg55b97fe2016-01-28 05:22:45 -08002658 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002659
kwiberg55b97fe2016-01-28 05:22:45 -08002660 stats.fractionLost = statistics.fraction_lost;
2661 stats.cumulativeLost = statistics.cumulative_lost;
2662 stats.extendedMax = statistics.extended_max_sequence_number;
2663 stats.jitterSamples = statistics.jitter;
niklase@google.com470e71d2011-07-07 08:21:25 +00002664
kwiberg55b97fe2016-01-28 05:22:45 -08002665 // --- RTT
2666 stats.rttMs = GetRTT(true);
niklase@google.com470e71d2011-07-07 08:21:25 +00002667
kwiberg55b97fe2016-01-28 05:22:45 -08002668 // --- Data counters
niklase@google.com470e71d2011-07-07 08:21:25 +00002669
kwiberg55b97fe2016-01-28 05:22:45 -08002670 size_t bytesSent(0);
2671 uint32_t packetsSent(0);
2672 size_t bytesReceived(0);
2673 uint32_t packetsReceived(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00002674
kwiberg55b97fe2016-01-28 05:22:45 -08002675 if (statistician) {
2676 statistician->GetDataCounters(&bytesReceived, &packetsReceived);
2677 }
wu@webrtc.org822fbd82013-08-15 23:38:54 +00002678
kwiberg55b97fe2016-01-28 05:22:45 -08002679 if (_rtpRtcpModule->DataCountersRTP(&bytesSent, &packetsSent) != 0) {
2680 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2681 "GetRTPStatistics() failed to retrieve RTP datacounters =>"
2682 " output will not be complete");
2683 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002684
kwiberg55b97fe2016-01-28 05:22:45 -08002685 stats.bytesSent = bytesSent;
2686 stats.packetsSent = packetsSent;
2687 stats.bytesReceived = bytesReceived;
2688 stats.packetsReceived = packetsReceived;
niklase@google.com470e71d2011-07-07 08:21:25 +00002689
kwiberg55b97fe2016-01-28 05:22:45 -08002690 // --- Timestamps
2691 {
2692 rtc::CritScope lock(&ts_stats_lock_);
2693 stats.capture_start_ntp_time_ms_ = capture_start_ntp_time_ms_;
2694 }
2695 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002696}
2697
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002698int Channel::SetCodecFECStatus(bool enable) {
2699 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2700 "Channel::SetCodecFECStatus()");
2701
kwibergc8d071e2016-04-06 12:22:38 -07002702 if (!codec_manager_.SetCodecFEC(enable) ||
2703 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002704 _engineStatisticsPtr->SetLastError(
2705 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2706 "SetCodecFECStatus() failed to set FEC state");
2707 return -1;
2708 }
2709 return 0;
2710}
2711
2712bool Channel::GetCodecFECStatus() {
kwibergc8d071e2016-04-06 12:22:38 -07002713 return codec_manager_.GetStackParams()->use_codec_fec;
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002714}
2715
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002716void Channel::SetNACKStatus(bool enable, int maxNumberOfPackets) {
2717 // None of these functions can fail.
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002718 // If pacing is enabled we always store packets.
2719 if (!pacing_enabled_)
2720 _rtpRtcpModule->SetStorePacketsStatus(enable, maxNumberOfPackets);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00002721 rtp_receive_statistics_->SetMaxReorderingThreshold(maxNumberOfPackets);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002722 if (enable)
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00002723 audio_coding_->EnableNack(maxNumberOfPackets);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002724 else
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00002725 audio_coding_->DisableNack();
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002726}
2727
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002728// Called when we are missing one or more packets.
2729int Channel::ResendPackets(const uint16_t* sequence_numbers, int length) {
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002730 return _rtpRtcpModule->SendNACK(sequence_numbers, length);
2731}
2732
kwiberg55b97fe2016-01-28 05:22:45 -08002733uint32_t Channel::Demultiplex(const AudioFrame& audioFrame) {
2734 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2735 "Channel::Demultiplex()");
2736 _audioFrame.CopyFrom(audioFrame);
2737 _audioFrame.id_ = _channelId;
2738 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002739}
2740
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002741void Channel::Demultiplex(const int16_t* audio_data,
xians@webrtc.org8fff1f02013-07-31 16:27:42 +00002742 int sample_rate,
Peter Kastingdce40cf2015-08-24 14:52:23 -07002743 size_t number_of_frames,
Peter Kasting69558702016-01-12 16:26:35 -08002744 size_t number_of_channels) {
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002745 CodecInst codec;
2746 GetSendCodec(codec);
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002747
Alejandro Luebscdfe20b2015-09-23 12:49:12 -07002748 // Never upsample or upmix the capture signal here. This should be done at the
2749 // end of the send chain.
2750 _audioFrame.sample_rate_hz_ = std::min(codec.plfreq, sample_rate);
2751 _audioFrame.num_channels_ = std::min(number_of_channels, codec.channels);
2752 RemixAndResample(audio_data, number_of_frames, number_of_channels,
2753 sample_rate, &input_resampler_, &_audioFrame);
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002754}
2755
kwiberg55b97fe2016-01-28 05:22:45 -08002756uint32_t Channel::PrepareEncodeAndSend(int mixingFrequency) {
2757 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2758 "Channel::PrepareEncodeAndSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002759
kwiberg55b97fe2016-01-28 05:22:45 -08002760 if (_audioFrame.samples_per_channel_ == 0) {
2761 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2762 "Channel::PrepareEncodeAndSend() invalid audio frame");
2763 return 0xFFFFFFFF;
2764 }
2765
2766 if (channel_state_.Get().input_file_playing) {
2767 MixOrReplaceAudioWithFile(mixingFrequency);
2768 }
2769
solenberg1c2af8e2016-03-24 10:36:00 -07002770 bool is_muted = InputMute(); // Cache locally as InputMute() takes a lock.
2771 AudioFrameOperations::Mute(&_audioFrame, previous_frame_muted_, is_muted);
kwiberg55b97fe2016-01-28 05:22:45 -08002772
2773 if (channel_state_.Get().input_external_media) {
2774 rtc::CritScope cs(&_callbackCritSect);
2775 const bool isStereo = (_audioFrame.num_channels_ == 2);
2776 if (_inputExternalMediaCallbackPtr) {
2777 _inputExternalMediaCallbackPtr->Process(
2778 _channelId, kRecordingPerChannel, (int16_t*)_audioFrame.data_,
2779 _audioFrame.samples_per_channel_, _audioFrame.sample_rate_hz_,
2780 isStereo);
niklase@google.com470e71d2011-07-07 08:21:25 +00002781 }
kwiberg55b97fe2016-01-28 05:22:45 -08002782 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002783
kwiberg55b97fe2016-01-28 05:22:45 -08002784 if (_includeAudioLevelIndication) {
2785 size_t length =
2786 _audioFrame.samples_per_channel_ * _audioFrame.num_channels_;
Tommi60c4e0a2016-05-26 21:35:27 +02002787 RTC_CHECK_LE(length, sizeof(_audioFrame.data_));
solenberg1c2af8e2016-03-24 10:36:00 -07002788 if (is_muted && previous_frame_muted_) {
henrik.lundin50499422016-11-29 04:26:24 -08002789 rms_level_.AnalyzeMuted(length);
kwiberg55b97fe2016-01-28 05:22:45 -08002790 } else {
henrik.lundin50499422016-11-29 04:26:24 -08002791 rms_level_.Analyze(
2792 rtc::ArrayView<const int16_t>(_audioFrame.data_, length));
niklase@google.com470e71d2011-07-07 08:21:25 +00002793 }
kwiberg55b97fe2016-01-28 05:22:45 -08002794 }
solenberg1c2af8e2016-03-24 10:36:00 -07002795 previous_frame_muted_ = is_muted;
niklase@google.com470e71d2011-07-07 08:21:25 +00002796
kwiberg55b97fe2016-01-28 05:22:45 -08002797 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002798}
2799
kwiberg55b97fe2016-01-28 05:22:45 -08002800uint32_t Channel::EncodeAndSend() {
2801 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2802 "Channel::EncodeAndSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002803
kwiberg55b97fe2016-01-28 05:22:45 -08002804 assert(_audioFrame.num_channels_ <= 2);
2805 if (_audioFrame.samples_per_channel_ == 0) {
2806 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2807 "Channel::EncodeAndSend() invalid audio frame");
2808 return 0xFFFFFFFF;
2809 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002810
kwiberg55b97fe2016-01-28 05:22:45 -08002811 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00002812
kwiberg55b97fe2016-01-28 05:22:45 -08002813 // --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz.
niklase@google.com470e71d2011-07-07 08:21:25 +00002814
kwiberg55b97fe2016-01-28 05:22:45 -08002815 // The ACM resamples internally.
2816 _audioFrame.timestamp_ = _timeStamp;
2817 // This call will trigger AudioPacketizationCallback::SendData if encoding
2818 // is done and payload is ready for packetization and transmission.
2819 // Otherwise, it will return without invoking the callback.
2820 if (audio_coding_->Add10MsData((AudioFrame&)_audioFrame) < 0) {
2821 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
2822 "Channel::EncodeAndSend() ACM encoding failed");
2823 return 0xFFFFFFFF;
2824 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002825
kwiberg55b97fe2016-01-28 05:22:45 -08002826 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_);
2827 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002828}
2829
solenberg7602aab2016-11-14 11:30:07 -08002830void Channel::set_associate_send_channel(const ChannelOwner& channel) {
2831 RTC_DCHECK(!channel.channel() ||
2832 channel.channel()->ChannelId() != _channelId);
2833 rtc::CritScope lock(&assoc_send_channel_lock_);
2834 associate_send_channel_ = channel;
2835}
2836
Minyue2013aec2015-05-13 14:14:42 +02002837void Channel::DisassociateSendChannel(int channel_id) {
tommi31fc21f2016-01-21 10:37:37 -08002838 rtc::CritScope lock(&assoc_send_channel_lock_);
Minyue2013aec2015-05-13 14:14:42 +02002839 Channel* channel = associate_send_channel_.channel();
2840 if (channel && channel->ChannelId() == channel_id) {
2841 // If this channel is associated with a send channel of the specified
2842 // Channel ID, disassociate with it.
2843 ChannelOwner ref(NULL);
2844 associate_send_channel_ = ref;
2845 }
2846}
2847
ivoc14d5dbe2016-07-04 07:06:55 -07002848void Channel::SetRtcEventLog(RtcEventLog* event_log) {
2849 event_log_proxy_->SetEventLog(event_log);
2850}
2851
michaelt79e05882016-11-08 02:50:09 -08002852void Channel::SetTransportOverhead(int transport_overhead_per_packet) {
2853 _rtpRtcpModule->SetTransportOverhead(transport_overhead_per_packet);
2854}
2855
kwiberg55b97fe2016-01-28 05:22:45 -08002856int Channel::RegisterExternalMediaProcessing(ProcessingTypes type,
2857 VoEMediaProcess& processObject) {
2858 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2859 "Channel::RegisterExternalMediaProcessing()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002860
kwiberg55b97fe2016-01-28 05:22:45 -08002861 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002862
kwiberg55b97fe2016-01-28 05:22:45 -08002863 if (kPlaybackPerChannel == type) {
2864 if (_outputExternalMediaCallbackPtr) {
2865 _engineStatisticsPtr->SetLastError(
2866 VE_INVALID_OPERATION, kTraceError,
2867 "Channel::RegisterExternalMediaProcessing() "
2868 "output external media already enabled");
2869 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002870 }
kwiberg55b97fe2016-01-28 05:22:45 -08002871 _outputExternalMediaCallbackPtr = &processObject;
2872 _outputExternalMedia = true;
2873 } else if (kRecordingPerChannel == type) {
2874 if (_inputExternalMediaCallbackPtr) {
2875 _engineStatisticsPtr->SetLastError(
2876 VE_INVALID_OPERATION, kTraceError,
2877 "Channel::RegisterExternalMediaProcessing() "
2878 "output external media already enabled");
2879 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002880 }
kwiberg55b97fe2016-01-28 05:22:45 -08002881 _inputExternalMediaCallbackPtr = &processObject;
2882 channel_state_.SetInputExternalMedia(true);
2883 }
2884 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002885}
2886
kwiberg55b97fe2016-01-28 05:22:45 -08002887int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type) {
2888 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2889 "Channel::DeRegisterExternalMediaProcessing()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002890
kwiberg55b97fe2016-01-28 05:22:45 -08002891 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002892
kwiberg55b97fe2016-01-28 05:22:45 -08002893 if (kPlaybackPerChannel == type) {
2894 if (!_outputExternalMediaCallbackPtr) {
2895 _engineStatisticsPtr->SetLastError(
2896 VE_INVALID_OPERATION, kTraceWarning,
2897 "Channel::DeRegisterExternalMediaProcessing() "
2898 "output external media already disabled");
2899 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002900 }
kwiberg55b97fe2016-01-28 05:22:45 -08002901 _outputExternalMedia = false;
2902 _outputExternalMediaCallbackPtr = NULL;
2903 } else if (kRecordingPerChannel == type) {
2904 if (!_inputExternalMediaCallbackPtr) {
2905 _engineStatisticsPtr->SetLastError(
2906 VE_INVALID_OPERATION, kTraceWarning,
2907 "Channel::DeRegisterExternalMediaProcessing() "
2908 "input external media already disabled");
2909 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002910 }
kwiberg55b97fe2016-01-28 05:22:45 -08002911 channel_state_.SetInputExternalMedia(false);
2912 _inputExternalMediaCallbackPtr = NULL;
2913 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002914
kwiberg55b97fe2016-01-28 05:22:45 -08002915 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002916}
2917
roosa@google.com1b60ceb2012-12-12 23:00:29 +00002918int Channel::SetExternalMixing(bool enabled) {
kwiberg55b97fe2016-01-28 05:22:45 -08002919 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2920 "Channel::SetExternalMixing(enabled=%d)", enabled);
roosa@google.com1b60ceb2012-12-12 23:00:29 +00002921
kwiberg55b97fe2016-01-28 05:22:45 -08002922 if (channel_state_.Get().playing) {
2923 _engineStatisticsPtr->SetLastError(
2924 VE_INVALID_OPERATION, kTraceError,
2925 "Channel::SetExternalMixing() "
2926 "external mixing cannot be changed while playing.");
2927 return -1;
2928 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00002929
kwiberg55b97fe2016-01-28 05:22:45 -08002930 _externalMixing = enabled;
roosa@google.com1b60ceb2012-12-12 23:00:29 +00002931
kwiberg55b97fe2016-01-28 05:22:45 -08002932 return 0;
roosa@google.com1b60ceb2012-12-12 23:00:29 +00002933}
2934
kwiberg55b97fe2016-01-28 05:22:45 -08002935int Channel::GetNetworkStatistics(NetworkStatistics& stats) {
2936 return audio_coding_->GetNetworkStatistics(&stats);
niklase@google.com470e71d2011-07-07 08:21:25 +00002937}
2938
wu@webrtc.org24301a62013-12-13 19:17:43 +00002939void Channel::GetDecodingCallStatistics(AudioDecodingCallStats* stats) const {
2940 audio_coding_->GetDecodingCallStatistics(stats);
2941}
2942
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00002943bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms,
2944 int* playout_buffer_delay_ms) const {
tommi31fc21f2016-01-21 10:37:37 -08002945 rtc::CritScope lock(&video_sync_lock_);
henrik.lundinb3f1c5d2016-08-22 15:39:53 -07002946 *jitter_buffer_delay_ms = audio_coding_->FilteredCurrentDelayMs();
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00002947 *playout_buffer_delay_ms = playout_delay_ms_;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00002948 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +00002949}
2950
solenberg358057b2015-11-27 10:46:42 -08002951uint32_t Channel::GetDelayEstimate() const {
2952 int jitter_buffer_delay_ms = 0;
2953 int playout_buffer_delay_ms = 0;
2954 GetDelayEstimate(&jitter_buffer_delay_ms, &playout_buffer_delay_ms);
2955 return jitter_buffer_delay_ms + playout_buffer_delay_ms;
2956}
2957
deadbeef74375882015-08-13 12:09:10 -07002958int Channel::LeastRequiredDelayMs() const {
2959 return audio_coding_->LeastRequiredDelayMs();
2960}
2961
kwiberg55b97fe2016-01-28 05:22:45 -08002962int Channel::SetMinimumPlayoutDelay(int delayMs) {
2963 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2964 "Channel::SetMinimumPlayoutDelay()");
2965 if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
2966 (delayMs > kVoiceEngineMaxMinPlayoutDelayMs)) {
2967 _engineStatisticsPtr->SetLastError(
2968 VE_INVALID_ARGUMENT, kTraceError,
2969 "SetMinimumPlayoutDelay() invalid min delay");
2970 return -1;
2971 }
2972 if (audio_coding_->SetMinimumPlayoutDelay(delayMs) != 0) {
2973 _engineStatisticsPtr->SetLastError(
2974 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2975 "SetMinimumPlayoutDelay() failed to set min playout delay");
2976 return -1;
2977 }
2978 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002979}
2980
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00002981int Channel::GetPlayoutTimestamp(unsigned int& timestamp) {
deadbeef74375882015-08-13 12:09:10 -07002982 uint32_t playout_timestamp_rtp = 0;
2983 {
tommi31fc21f2016-01-21 10:37:37 -08002984 rtc::CritScope lock(&video_sync_lock_);
deadbeef74375882015-08-13 12:09:10 -07002985 playout_timestamp_rtp = playout_timestamp_rtp_;
2986 }
kwiberg55b97fe2016-01-28 05:22:45 -08002987 if (playout_timestamp_rtp == 0) {
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00002988 _engineStatisticsPtr->SetLastError(
skvlad4c0536b2016-07-07 13:06:26 -07002989 VE_CANNOT_RETRIEVE_VALUE, kTraceStateInfo,
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00002990 "GetPlayoutTimestamp() failed to retrieve timestamp");
2991 return -1;
2992 }
deadbeef74375882015-08-13 12:09:10 -07002993 timestamp = playout_timestamp_rtp;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00002994 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002995}
2996
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00002997int Channel::SetInitTimestamp(unsigned int timestamp) {
2998 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002999 "Channel::SetInitTimestamp()");
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003000 if (channel_state_.Get().sending) {
3001 _engineStatisticsPtr->SetLastError(VE_SENDING, kTraceError,
3002 "SetInitTimestamp() already sending");
3003 return -1;
3004 }
3005 _rtpRtcpModule->SetStartTimestamp(timestamp);
3006 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003007}
3008
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003009int Channel::SetInitSequenceNumber(short sequenceNumber) {
3010 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3011 "Channel::SetInitSequenceNumber()");
3012 if (channel_state_.Get().sending) {
3013 _engineStatisticsPtr->SetLastError(
3014 VE_SENDING, kTraceError, "SetInitSequenceNumber() already sending");
3015 return -1;
3016 }
3017 _rtpRtcpModule->SetSequenceNumber(sequenceNumber);
3018 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003019}
3020
kwiberg55b97fe2016-01-28 05:22:45 -08003021int Channel::GetRtpRtcp(RtpRtcp** rtpRtcpModule,
3022 RtpReceiver** rtp_receiver) const {
3023 *rtpRtcpModule = _rtpRtcpModule.get();
3024 *rtp_receiver = rtp_receiver_.get();
3025 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003026}
3027
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00003028// TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use
3029// a shared helper.
kwiberg55b97fe2016-01-28 05:22:45 -08003030int32_t Channel::MixOrReplaceAudioWithFile(int mixingFrequency) {
kwibergb7f89d62016-02-17 10:04:18 -08003031 std::unique_ptr<int16_t[]> fileBuffer(new int16_t[640]);
kwiberg55b97fe2016-01-28 05:22:45 -08003032 size_t fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00003033
kwiberg55b97fe2016-01-28 05:22:45 -08003034 {
3035 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003036
kwiberg5a25d952016-08-17 07:31:12 -07003037 if (!input_file_player_) {
kwiberg55b97fe2016-01-28 05:22:45 -08003038 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3039 "Channel::MixOrReplaceAudioWithFile() fileplayer"
3040 " doesnt exist");
3041 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003042 }
3043
kwiberg4ec01d92016-08-22 08:43:54 -07003044 if (input_file_player_->Get10msAudioFromFile(fileBuffer.get(), &fileSamples,
kwiberg5a25d952016-08-17 07:31:12 -07003045 mixingFrequency) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -08003046 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3047 "Channel::MixOrReplaceAudioWithFile() file mixing "
3048 "failed");
3049 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003050 }
kwiberg55b97fe2016-01-28 05:22:45 -08003051 if (fileSamples == 0) {
3052 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3053 "Channel::MixOrReplaceAudioWithFile() file is ended");
3054 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003055 }
kwiberg55b97fe2016-01-28 05:22:45 -08003056 }
3057
3058 assert(_audioFrame.samples_per_channel_ == fileSamples);
3059
3060 if (_mixFileWithMicrophone) {
3061 // Currently file stream is always mono.
3062 // TODO(xians): Change the code when FilePlayer supports real stereo.
3063 MixWithSat(_audioFrame.data_, _audioFrame.num_channels_, fileBuffer.get(),
3064 1, fileSamples);
3065 } else {
3066 // Replace ACM audio with file.
3067 // Currently file stream is always mono.
3068 // TODO(xians): Change the code when FilePlayer supports real stereo.
3069 _audioFrame.UpdateFrame(
3070 _channelId, 0xFFFFFFFF, fileBuffer.get(), fileSamples, mixingFrequency,
3071 AudioFrame::kNormalSpeech, AudioFrame::kVadUnknown, 1);
3072 }
3073 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003074}
3075
kwiberg55b97fe2016-01-28 05:22:45 -08003076int32_t Channel::MixAudioWithFile(AudioFrame& audioFrame, int mixingFrequency) {
3077 assert(mixingFrequency <= 48000);
niklase@google.com470e71d2011-07-07 08:21:25 +00003078
kwibergb7f89d62016-02-17 10:04:18 -08003079 std::unique_ptr<int16_t[]> fileBuffer(new int16_t[960]);
kwiberg55b97fe2016-01-28 05:22:45 -08003080 size_t fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00003081
kwiberg55b97fe2016-01-28 05:22:45 -08003082 {
3083 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003084
kwiberg5a25d952016-08-17 07:31:12 -07003085 if (!output_file_player_) {
kwiberg55b97fe2016-01-28 05:22:45 -08003086 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3087 "Channel::MixAudioWithFile() file mixing failed");
3088 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003089 }
3090
kwiberg55b97fe2016-01-28 05:22:45 -08003091 // We should get the frequency we ask for.
kwiberg4ec01d92016-08-22 08:43:54 -07003092 if (output_file_player_->Get10msAudioFromFile(
3093 fileBuffer.get(), &fileSamples, mixingFrequency) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -08003094 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3095 "Channel::MixAudioWithFile() file mixing failed");
3096 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003097 }
kwiberg55b97fe2016-01-28 05:22:45 -08003098 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003099
kwiberg55b97fe2016-01-28 05:22:45 -08003100 if (audioFrame.samples_per_channel_ == fileSamples) {
3101 // Currently file stream is always mono.
3102 // TODO(xians): Change the code when FilePlayer supports real stereo.
3103 MixWithSat(audioFrame.data_, audioFrame.num_channels_, fileBuffer.get(), 1,
3104 fileSamples);
3105 } else {
3106 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3107 "Channel::MixAudioWithFile() samples_per_channel_(%" PRIuS
3108 ") != "
3109 "fileSamples(%" PRIuS ")",
3110 audioFrame.samples_per_channel_, fileSamples);
3111 return -1;
3112 }
3113
3114 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003115}
3116
deadbeef74375882015-08-13 12:09:10 -07003117void Channel::UpdatePlayoutTimestamp(bool rtcp) {
henrik.lundin96bd5022016-04-06 04:13:56 -07003118 jitter_buffer_playout_timestamp_ = audio_coding_->PlayoutTimestamp();
deadbeef74375882015-08-13 12:09:10 -07003119
henrik.lundin96bd5022016-04-06 04:13:56 -07003120 if (!jitter_buffer_playout_timestamp_) {
3121 // This can happen if this channel has not received any RTP packets. In
3122 // this case, NetEq is not capable of computing a playout timestamp.
deadbeef74375882015-08-13 12:09:10 -07003123 return;
3124 }
3125
3126 uint16_t delay_ms = 0;
3127 if (_audioDeviceModulePtr->PlayoutDelay(&delay_ms) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -08003128 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
deadbeef74375882015-08-13 12:09:10 -07003129 "Channel::UpdatePlayoutTimestamp() failed to read playout"
3130 " delay from the ADM");
3131 _engineStatisticsPtr->SetLastError(
3132 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
3133 "UpdatePlayoutTimestamp() failed to retrieve playout delay");
3134 return;
3135 }
3136
henrik.lundin96bd5022016-04-06 04:13:56 -07003137 RTC_DCHECK(jitter_buffer_playout_timestamp_);
3138 uint32_t playout_timestamp = *jitter_buffer_playout_timestamp_;
deadbeef74375882015-08-13 12:09:10 -07003139
3140 // Remove the playout delay.
ossue280cde2016-10-12 11:04:10 -07003141 playout_timestamp -= (delay_ms * (GetRtpTimestampRateHz() / 1000));
deadbeef74375882015-08-13 12:09:10 -07003142
kwiberg55b97fe2016-01-28 05:22:45 -08003143 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
deadbeef74375882015-08-13 12:09:10 -07003144 "Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu",
henrik.lundin96bd5022016-04-06 04:13:56 -07003145 playout_timestamp);
deadbeef74375882015-08-13 12:09:10 -07003146
3147 {
tommi31fc21f2016-01-21 10:37:37 -08003148 rtc::CritScope lock(&video_sync_lock_);
deadbeef74375882015-08-13 12:09:10 -07003149 if (rtcp) {
henrik.lundin96bd5022016-04-06 04:13:56 -07003150 playout_timestamp_rtcp_ = playout_timestamp;
deadbeef74375882015-08-13 12:09:10 -07003151 } else {
henrik.lundin96bd5022016-04-06 04:13:56 -07003152 playout_timestamp_rtp_ = playout_timestamp;
deadbeef74375882015-08-13 12:09:10 -07003153 }
3154 playout_delay_ms_ = delay_ms;
3155 }
3156}
3157
kwiberg55b97fe2016-01-28 05:22:45 -08003158void Channel::RegisterReceiveCodecsToRTPModule() {
3159 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3160 "Channel::RegisterReceiveCodecsToRTPModule()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003161
kwiberg55b97fe2016-01-28 05:22:45 -08003162 CodecInst codec;
3163 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
niklase@google.com470e71d2011-07-07 08:21:25 +00003164
kwiberg55b97fe2016-01-28 05:22:45 -08003165 for (int idx = 0; idx < nSupportedCodecs; idx++) {
3166 // Open up the RTP/RTCP receiver for all supported codecs
3167 if ((audio_coding_->Codec(idx, &codec) == -1) ||
magjed56124bd2016-11-24 09:34:46 -08003168 (rtp_receiver_->RegisterReceivePayload(codec) == -1)) {
kwiberg55b97fe2016-01-28 05:22:45 -08003169 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3170 "Channel::RegisterReceiveCodecsToRTPModule() unable"
3171 " to register %s (%d/%d/%" PRIuS
3172 "/%d) to RTP/RTCP "
3173 "receiver",
3174 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3175 codec.rate);
3176 } else {
3177 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3178 "Channel::RegisterReceiveCodecsToRTPModule() %s "
3179 "(%d/%d/%" PRIuS
3180 "/%d) has been added to the RTP/RTCP "
3181 "receiver",
3182 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3183 codec.rate);
niklase@google.com470e71d2011-07-07 08:21:25 +00003184 }
kwiberg55b97fe2016-01-28 05:22:45 -08003185 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003186}
3187
kwiberg55b97fe2016-01-28 05:22:45 -08003188int Channel::SetSendRtpHeaderExtension(bool enable,
3189 RTPExtensionType type,
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00003190 unsigned char id) {
3191 int error = 0;
3192 _rtpRtcpModule->DeregisterSendRtpHeaderExtension(type);
3193 if (enable) {
3194 error = _rtpRtcpModule->RegisterSendRtpHeaderExtension(type, id);
3195 }
3196 return error;
3197}
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00003198
ossue280cde2016-10-12 11:04:10 -07003199int Channel::GetRtpTimestampRateHz() const {
3200 const auto format = audio_coding_->ReceiveFormat();
3201 // Default to the playout frequency if we've not gotten any packets yet.
3202 // TODO(ossu): Zero clockrate can only happen if we've added an external
3203 // decoder for a format we don't support internally. Remove once that way of
3204 // adding decoders is gone!
3205 return (format && format->clockrate_hz != 0)
3206 ? format->clockrate_hz
3207 : audio_coding_->PlayoutFrequency();
wu@webrtc.org94454b72014-06-05 20:34:08 +00003208}
3209
Minyue2013aec2015-05-13 14:14:42 +02003210int64_t Channel::GetRTT(bool allow_associate_channel) const {
pbosda903ea2015-10-02 02:36:56 -07003211 RtcpMode method = _rtpRtcpModule->RTCP();
3212 if (method == RtcpMode::kOff) {
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003213 return 0;
3214 }
3215 std::vector<RTCPReportBlock> report_blocks;
3216 _rtpRtcpModule->RemoteRTCPStat(&report_blocks);
Minyue2013aec2015-05-13 14:14:42 +02003217
3218 int64_t rtt = 0;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003219 if (report_blocks.empty()) {
Minyue2013aec2015-05-13 14:14:42 +02003220 if (allow_associate_channel) {
tommi31fc21f2016-01-21 10:37:37 -08003221 rtc::CritScope lock(&assoc_send_channel_lock_);
Minyue2013aec2015-05-13 14:14:42 +02003222 Channel* channel = associate_send_channel_.channel();
3223 // Tries to get RTT from an associated channel. This is important for
3224 // receive-only channels.
3225 if (channel) {
3226 // To prevent infinite recursion and deadlock, calling GetRTT of
3227 // associate channel should always use "false" for argument:
3228 // |allow_associate_channel|.
3229 rtt = channel->GetRTT(false);
3230 }
3231 }
3232 return rtt;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003233 }
3234
3235 uint32_t remoteSSRC = rtp_receiver_->SSRC();
3236 std::vector<RTCPReportBlock>::const_iterator it = report_blocks.begin();
3237 for (; it != report_blocks.end(); ++it) {
3238 if (it->remoteSSRC == remoteSSRC)
3239 break;
3240 }
3241 if (it == report_blocks.end()) {
3242 // We have not received packets with SSRC matching the report blocks.
3243 // To calculate RTT we try with the SSRC of the first report block.
3244 // This is very important for send-only channels where we don't know
3245 // the SSRC of the other end.
3246 remoteSSRC = report_blocks[0].remoteSSRC;
3247 }
Minyue2013aec2015-05-13 14:14:42 +02003248
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003249 int64_t avg_rtt = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08003250 int64_t max_rtt = 0;
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003251 int64_t min_rtt = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08003252 if (_rtpRtcpModule->RTT(remoteSSRC, &rtt, &avg_rtt, &min_rtt, &max_rtt) !=
3253 0) {
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003254 return 0;
3255 }
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003256 return rtt;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003257}
3258
pbos@webrtc.orgd900e8b2013-07-03 15:12:26 +00003259} // namespace voe
3260} // namespace webrtc