blob: b1a2aa6c2d7f6b00d29c2bde7a8bb635e4b4c874 [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
henrika@webrtc.org2919e952012-01-31 08:45:03 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000011#include "webrtc/voice_engine/channel.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
minyue@webrtc.orge509f942013-09-12 17:03:00 +000013#include "webrtc/common.h"
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000014#include "webrtc/modules/audio_device/include/audio_device.h"
15#include "webrtc/modules/audio_processing/include/audio_processing.h"
wu@webrtc.org822fbd82013-08-15 23:38:54 +000016#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
17#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
18#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
19#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000020#include "webrtc/modules/utility/interface/audio_frame_operations.h"
21#include "webrtc/modules/utility/interface/process_thread.h"
22#include "webrtc/modules/utility/interface/rtp_dump.h"
23#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
24#include "webrtc/system_wrappers/interface/logging.h"
25#include "webrtc/system_wrappers/interface/trace.h"
26#include "webrtc/voice_engine/include/voe_base.h"
27#include "webrtc/voice_engine/include/voe_external_media.h"
28#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
29#include "webrtc/voice_engine/output_mixer.h"
30#include "webrtc/voice_engine/statistics.h"
31#include "webrtc/voice_engine/transmit_mixer.h"
32#include "webrtc/voice_engine/utility.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000033
34#if defined(_WIN32)
35#include <Qos.h>
36#endif
37
andrew@webrtc.org50419b02012-11-14 19:07:54 +000038namespace webrtc {
39namespace voe {
niklase@google.com470e71d2011-07-07 08:21:25 +000040
pbos@webrtc.org6141e132013-04-09 10:09:10 +000041int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +000042Channel::SendData(FrameType frameType,
pbos@webrtc.org6141e132013-04-09 10:09:10 +000043 uint8_t payloadType,
44 uint32_t timeStamp,
45 const uint8_t* payloadData,
46 uint16_t payloadSize,
niklase@google.com470e71d2011-07-07 08:21:25 +000047 const RTPFragmentationHeader* fragmentation)
48{
49 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
50 "Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u,"
51 " payloadSize=%u, fragmentation=0x%x)",
52 frameType, payloadType, timeStamp, payloadSize, fragmentation);
53
54 if (_includeAudioLevelIndication)
55 {
andrew@webrtc.org755b04a2011-11-15 16:57:56 +000056 assert(_rtpAudioProc.get() != NULL);
niklase@google.com470e71d2011-07-07 08:21:25 +000057 // Store current audio level in the RTP/RTCP module.
58 // The level will be used in combination with voice-activity state
59 // (frameType) to add an RTP header extension
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +000060 _rtpRtcpModule->SetAudioLevel(_rtpAudioProc->level_estimator()->RMS());
niklase@google.com470e71d2011-07-07 08:21:25 +000061 }
62
63 // Push data from ACM to RTP/RTCP-module to deliver audio frame for
64 // packetization.
65 // This call will trigger Transport::SendPacket() from the RTP/RTCP module.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +000066 if (_rtpRtcpModule->SendOutgoingData((FrameType&)frameType,
niklase@google.com470e71d2011-07-07 08:21:25 +000067 payloadType,
68 timeStamp,
stefan@webrtc.orgddfdfed2012-07-03 13:21:22 +000069 // Leaving the time when this frame was
70 // received from the capture device as
71 // undefined for voice for now.
72 -1,
niklase@google.com470e71d2011-07-07 08:21:25 +000073 payloadData,
74 payloadSize,
75 fragmentation) == -1)
76 {
77 _engineStatisticsPtr->SetLastError(
78 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
79 "Channel::SendData() failed to send data to RTP/RTCP module");
80 return -1;
81 }
82
83 _lastLocalTimeStamp = timeStamp;
84 _lastPayloadType = payloadType;
85
86 return 0;
87}
88
pbos@webrtc.org6141e132013-04-09 10:09:10 +000089int32_t
90Channel::InFrameType(int16_t frameType)
niklase@google.com470e71d2011-07-07 08:21:25 +000091{
92 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
93 "Channel::InFrameType(frameType=%d)", frameType);
94
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +000095 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +000096 // 1 indicates speech
97 _sendFrameType = (frameType == 1) ? 1 : 0;
98 return 0;
99}
100
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000101int32_t
pbos@webrtc.org92135212013-05-14 08:31:39 +0000102Channel::OnRxVadDetected(int vadDecision)
niklase@google.com470e71d2011-07-07 08:21:25 +0000103{
104 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
105 "Channel::OnRxVadDetected(vadDecision=%d)", vadDecision);
106
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000107 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000108 if (_rxVadObserverPtr)
109 {
110 _rxVadObserverPtr->OnRxVad(_channelId, vadDecision);
111 }
112
113 return 0;
114}
115
116int
117Channel::SendPacket(int channel, const void *data, int len)
118{
119 channel = VoEChannelId(channel);
120 assert(channel == _channelId);
121
122 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
123 "Channel::SendPacket(channel=%d, len=%d)", channel, len);
124
125 if (_transportPtr == NULL)
126 {
127 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
128 "Channel::SendPacket() failed to send RTP packet due to"
129 " invalid transport object");
130 return -1;
131 }
132
133 // Insert extra RTP packet using if user has called the InsertExtraRTPPacket
134 // API
135 if (_insertExtraRTPPacket)
136 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000137 uint8_t* rtpHdr = (uint8_t*)data;
138 uint8_t M_PT(0);
niklase@google.com470e71d2011-07-07 08:21:25 +0000139 if (_extraMarkerBit)
140 {
141 M_PT = 0x80; // set the M-bit
142 }
143 M_PT += _extraPayloadType; // set the payload type
144 *(++rtpHdr) = M_PT; // modify the M|PT-byte within the RTP header
145 _insertExtraRTPPacket = false; // insert one packet only
146 }
147
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000148 uint8_t* bufferToSendPtr = (uint8_t*)data;
149 int32_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000150
151 // Dump the RTP packet to a file (if RTP dump is enabled).
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000152 if (_rtpDumpOut.DumpPacket((const uint8_t*)data, len) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000153 {
154 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
155 VoEId(_instanceId,_channelId),
156 "Channel::SendPacket() RTP dump to output file failed");
157 }
158
159 // SRTP or External encryption
160 if (_encrypting)
161 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000162 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000163
164 if (_encryptionPtr)
165 {
166 if (!_encryptionRTPBufferPtr)
167 {
168 // Allocate memory for encryption buffer one time only
169 _encryptionRTPBufferPtr =
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000170 new uint8_t[kVoiceEngineMaxIpPacketSizeBytes];
xians@webrtc.org51253502012-10-25 13:58:02 +0000171 memset(_encryptionRTPBufferPtr, 0,
172 kVoiceEngineMaxIpPacketSizeBytes);
niklase@google.com470e71d2011-07-07 08:21:25 +0000173 }
174
175 // Perform encryption (SRTP or external)
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000176 int32_t encryptedBufferLength = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000177 _encryptionPtr->encrypt(_channelId,
178 bufferToSendPtr,
179 _encryptionRTPBufferPtr,
180 bufferLength,
181 (int*)&encryptedBufferLength);
182 if (encryptedBufferLength <= 0)
183 {
184 _engineStatisticsPtr->SetLastError(
185 VE_ENCRYPTION_FAILED,
186 kTraceError, "Channel::SendPacket() encryption failed");
187 return -1;
188 }
189
190 // Replace default data buffer with encrypted buffer
191 bufferToSendPtr = _encryptionRTPBufferPtr;
192 bufferLength = encryptedBufferLength;
193 }
194 }
195
196 // Packet transmission using WebRtc socket transport
197 if (!_externalTransport)
198 {
199 int n = _transportPtr->SendPacket(channel, bufferToSendPtr,
200 bufferLength);
201 if (n < 0)
202 {
203 WEBRTC_TRACE(kTraceError, kTraceVoice,
204 VoEId(_instanceId,_channelId),
205 "Channel::SendPacket() RTP transmission using WebRtc"
206 " sockets failed");
207 return -1;
208 }
209 return n;
210 }
211
212 // Packet transmission using external transport transport
213 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000214 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000215
216 int n = _transportPtr->SendPacket(channel,
217 bufferToSendPtr,
218 bufferLength);
219 if (n < 0)
220 {
221 WEBRTC_TRACE(kTraceError, kTraceVoice,
222 VoEId(_instanceId,_channelId),
223 "Channel::SendPacket() RTP transmission using external"
224 " transport failed");
225 return -1;
226 }
227 return n;
228 }
229}
230
231int
232Channel::SendRTCPPacket(int channel, const void *data, int len)
233{
234 channel = VoEChannelId(channel);
235 assert(channel == _channelId);
236
237 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
238 "Channel::SendRTCPPacket(channel=%d, len=%d)", channel, len);
239
niklase@google.com470e71d2011-07-07 08:21:25 +0000240 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000241 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.org83661f52011-11-25 10:58:15 +0000242 if (_transportPtr == NULL)
243 {
244 WEBRTC_TRACE(kTraceError, kTraceVoice,
245 VoEId(_instanceId,_channelId),
246 "Channel::SendRTCPPacket() failed to send RTCP packet"
247 " due to invalid transport object");
248 return -1;
249 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000250 }
251
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000252 uint8_t* bufferToSendPtr = (uint8_t*)data;
253 int32_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000254
255 // Dump the RTCP packet to a file (if RTP dump is enabled).
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000256 if (_rtpDumpOut.DumpPacket((const uint8_t*)data, len) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000257 {
258 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
259 VoEId(_instanceId,_channelId),
260 "Channel::SendPacket() RTCP dump to output file failed");
261 }
262
263 // SRTP or External encryption
264 if (_encrypting)
265 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000266 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000267
268 if (_encryptionPtr)
269 {
270 if (!_encryptionRTCPBufferPtr)
271 {
272 // Allocate memory for encryption buffer one time only
273 _encryptionRTCPBufferPtr =
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000274 new uint8_t[kVoiceEngineMaxIpPacketSizeBytes];
niklase@google.com470e71d2011-07-07 08:21:25 +0000275 }
276
277 // Perform encryption (SRTP or external).
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000278 int32_t encryptedBufferLength = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000279 _encryptionPtr->encrypt_rtcp(_channelId,
280 bufferToSendPtr,
281 _encryptionRTCPBufferPtr,
282 bufferLength,
283 (int*)&encryptedBufferLength);
284 if (encryptedBufferLength <= 0)
285 {
286 _engineStatisticsPtr->SetLastError(
287 VE_ENCRYPTION_FAILED, kTraceError,
288 "Channel::SendRTCPPacket() encryption failed");
289 return -1;
290 }
291
292 // Replace default data buffer with encrypted buffer
293 bufferToSendPtr = _encryptionRTCPBufferPtr;
294 bufferLength = encryptedBufferLength;
295 }
296 }
297
298 // Packet transmission using WebRtc socket transport
299 if (!_externalTransport)
300 {
301 int n = _transportPtr->SendRTCPPacket(channel,
302 bufferToSendPtr,
303 bufferLength);
304 if (n < 0)
305 {
306 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
307 VoEId(_instanceId,_channelId),
308 "Channel::SendRTCPPacket() transmission using WebRtc"
309 " sockets failed");
310 return -1;
311 }
312 return n;
313 }
314
315 // Packet transmission using external transport transport
316 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000317 CriticalSectionScoped cs(&_callbackCritSect);
henrike@webrtc.orgde727ab2012-11-18 18:49:13 +0000318 if (_transportPtr == NULL)
319 {
320 return -1;
321 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000322 int n = _transportPtr->SendRTCPPacket(channel,
323 bufferToSendPtr,
324 bufferLength);
325 if (n < 0)
326 {
327 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
328 VoEId(_instanceId,_channelId),
329 "Channel::SendRTCPPacket() transmission using external"
330 " transport failed");
331 return -1;
332 }
333 return n;
334 }
335
336 return len;
337}
338
339void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000340Channel::OnPlayTelephoneEvent(int32_t id,
341 uint8_t event,
342 uint16_t lengthMs,
343 uint8_t volume)
niklase@google.com470e71d2011-07-07 08:21:25 +0000344{
345 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
346 "Channel::OnPlayTelephoneEvent(id=%d, event=%u, lengthMs=%u,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +0000347 " volume=%u)", id, event, lengthMs, volume);
niklase@google.com470e71d2011-07-07 08:21:25 +0000348
349 if (!_playOutbandDtmfEvent || (event > 15))
350 {
351 // Ignore callback since feedback is disabled or event is not a
352 // Dtmf tone event.
353 return;
354 }
355
356 assert(_outputMixerPtr != NULL);
357
358 // Start playing out the Dtmf tone (if playout is enabled).
359 // Reduce length of tone with 80ms to the reduce risk of echo.
360 _outputMixerPtr->PlayDtmfTone(event, lengthMs - 80, volume);
361}
362
363void
stefan@webrtc.org286fe0b2013-08-21 20:58:21 +0000364Channel::OnIncomingSSRCChanged(int32_t id, uint32_t ssrc)
niklase@google.com470e71d2011-07-07 08:21:25 +0000365{
366 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
367 "Channel::OnIncomingSSRCChanged(id=%d, SSRC=%d)",
stefan@webrtc.org286fe0b2013-08-21 20:58:21 +0000368 id, ssrc);
niklase@google.com470e71d2011-07-07 08:21:25 +0000369
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000370 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000371 assert(channel == _channelId);
372
dwkang@webrtc.orgb295a3f2013-08-29 07:34:12 +0000373 // Update ssrc so that NTP for AV sync can be updated.
374 _rtpRtcpModule->SetRemoteSSRC(ssrc);
375
niklase@google.com470e71d2011-07-07 08:21:25 +0000376 if (_rtpObserver)
377 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000378 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000379
380 if (_rtpObserverPtr)
381 {
382 // Send new SSRC to registered observer using callback
stefan@webrtc.org286fe0b2013-08-21 20:58:21 +0000383 _rtpObserverPtr->OnIncomingSSRCChanged(channel, ssrc);
niklase@google.com470e71d2011-07-07 08:21:25 +0000384 }
385 }
386}
387
pbos@webrtc.org92135212013-05-14 08:31:39 +0000388void Channel::OnIncomingCSRCChanged(int32_t id,
389 uint32_t CSRC,
390 bool added)
niklase@google.com470e71d2011-07-07 08:21:25 +0000391{
392 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
393 "Channel::OnIncomingCSRCChanged(id=%d, CSRC=%d, added=%d)",
394 id, CSRC, added);
395
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000396 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000397 assert(channel == _channelId);
398
399 if (_rtpObserver)
400 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000401 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000402
403 if (_rtpObserverPtr)
404 {
405 _rtpObserverPtr->OnIncomingCSRCChanged(channel, CSRC, added);
406 }
407 }
408}
409
stefan@webrtc.org286fe0b2013-08-21 20:58:21 +0000410void Channel::ResetStatistics(uint32_t ssrc) {
411 StreamStatistician* statistician =
412 rtp_receive_statistics_->GetStatistician(ssrc);
413 if (statistician) {
414 statistician->ResetStatistics();
415 }
wu@webrtc.org822fbd82013-08-15 23:38:54 +0000416}
417
niklase@google.com470e71d2011-07-07 08:21:25 +0000418void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000419Channel::OnApplicationDataReceived(int32_t id,
420 uint8_t subType,
421 uint32_t name,
422 uint16_t length,
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000423 const uint8_t* data)
niklase@google.com470e71d2011-07-07 08:21:25 +0000424{
425 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
426 "Channel::OnApplicationDataReceived(id=%d, subType=%u,"
427 " name=%u, length=%u)",
428 id, subType, name, length);
429
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000430 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000431 assert(channel == _channelId);
432
433 if (_rtcpObserver)
434 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000435 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000436
437 if (_rtcpObserverPtr)
438 {
439 _rtcpObserverPtr->OnApplicationDataReceived(channel,
440 subType,
441 name,
442 data,
443 length);
444 }
445 }
446}
447
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000448int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +0000449Channel::OnInitializeDecoder(
pbos@webrtc.org92135212013-05-14 08:31:39 +0000450 int32_t id,
451 int8_t payloadType,
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +0000452 const char payloadName[RTP_PAYLOAD_NAME_SIZE],
pbos@webrtc.org92135212013-05-14 08:31:39 +0000453 int frequency,
454 uint8_t channels,
455 uint32_t rate)
niklase@google.com470e71d2011-07-07 08:21:25 +0000456{
457 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
458 "Channel::OnInitializeDecoder(id=%d, payloadType=%d, "
459 "payloadName=%s, frequency=%u, channels=%u, rate=%u)",
460 id, payloadType, payloadName, frequency, channels, rate);
461
andrew@webrtc.orgceb148c2011-08-23 17:53:54 +0000462 assert(VoEChannelId(id) == _channelId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000463
henrika@webrtc.orgf75901f2012-01-16 08:45:42 +0000464 CodecInst receiveCodec = {0};
465 CodecInst dummyCodec = {0};
niklase@google.com470e71d2011-07-07 08:21:25 +0000466
467 receiveCodec.pltype = payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +0000468 receiveCodec.plfreq = frequency;
469 receiveCodec.channels = channels;
470 receiveCodec.rate = rate;
henrika@webrtc.orgf75901f2012-01-16 08:45:42 +0000471 strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +0000472
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +0000473 _audioCodingModule.Codec(payloadName, &dummyCodec, frequency, channels);
niklase@google.com470e71d2011-07-07 08:21:25 +0000474 receiveCodec.pacsize = dummyCodec.pacsize;
475
476 // Register the new codec to the ACM
477 if (_audioCodingModule.RegisterReceiveCodec(receiveCodec) == -1)
478 {
479 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
andrew@webrtc.orgceb148c2011-08-23 17:53:54 +0000480 VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +0000481 "Channel::OnInitializeDecoder() invalid codec ("
482 "pt=%d, name=%s) received - 1", payloadType, payloadName);
483 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR);
484 return -1;
485 }
486
487 return 0;
488}
489
490void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000491Channel::OnPacketTimeout(int32_t id)
niklase@google.com470e71d2011-07-07 08:21:25 +0000492{
493 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
494 "Channel::OnPacketTimeout(id=%d)", id);
495
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000496 CriticalSectionScoped cs(_callbackCritSectPtr);
niklase@google.com470e71d2011-07-07 08:21:25 +0000497 if (_voiceEngineObserverPtr)
498 {
499 if (_receiving || _externalTransport)
500 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000501 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000502 assert(channel == _channelId);
503 // Ensure that next OnReceivedPacket() callback will trigger
504 // a VE_PACKET_RECEIPT_RESTARTED callback.
505 _rtpPacketTimedOut = true;
506 // Deliver callback to the observer
507 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
508 VoEId(_instanceId,_channelId),
509 "Channel::OnPacketTimeout() => "
510 "CallbackOnError(VE_RECEIVE_PACKET_TIMEOUT)");
511 _voiceEngineObserverPtr->CallbackOnError(channel,
512 VE_RECEIVE_PACKET_TIMEOUT);
513 }
514 }
515}
516
517void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000518Channel::OnReceivedPacket(int32_t id,
519 RtpRtcpPacketType packetType)
niklase@google.com470e71d2011-07-07 08:21:25 +0000520{
521 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
522 "Channel::OnReceivedPacket(id=%d, packetType=%d)",
523 id, packetType);
524
andrew@webrtc.orgceb148c2011-08-23 17:53:54 +0000525 assert(VoEChannelId(id) == _channelId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000526
527 // Notify only for the case when we have restarted an RTP session.
528 if (_rtpPacketTimedOut && (kPacketRtp == packetType))
529 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000530 CriticalSectionScoped cs(_callbackCritSectPtr);
niklase@google.com470e71d2011-07-07 08:21:25 +0000531 if (_voiceEngineObserverPtr)
532 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000533 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000534 assert(channel == _channelId);
535 // Reset timeout mechanism
536 _rtpPacketTimedOut = false;
537 // Deliver callback to the observer
538 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
539 VoEId(_instanceId,_channelId),
540 "Channel::OnPacketTimeout() =>"
541 " CallbackOnError(VE_PACKET_RECEIPT_RESTARTED)");
542 _voiceEngineObserverPtr->CallbackOnError(
543 channel,
544 VE_PACKET_RECEIPT_RESTARTED);
545 }
546 }
547}
548
549void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000550Channel::OnPeriodicDeadOrAlive(int32_t id,
551 RTPAliveType alive)
niklase@google.com470e71d2011-07-07 08:21:25 +0000552{
553 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
554 "Channel::OnPeriodicDeadOrAlive(id=%d, alive=%d)", id, alive);
555
henrika@webrtc.org19da7192013-04-05 14:34:57 +0000556 {
557 CriticalSectionScoped cs(&_callbackCritSect);
558 if (!_connectionObserver)
559 return;
560 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000561
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000562 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000563 assert(channel == _channelId);
564
565 // Use Alive as default to limit risk of false Dead detections
566 bool isAlive(true);
567
568 // Always mark the connection as Dead when the module reports kRtpDead
569 if (kRtpDead == alive)
570 {
571 isAlive = false;
572 }
573
574 // It is possible that the connection is alive even if no RTP packet has
575 // been received for a long time since the other side might use VAD/DTX
576 // and a low SID-packet update rate.
577 if ((kRtpNoRtp == alive) && _playing)
578 {
579 // Detect Alive for all NetEQ states except for the case when we are
580 // in PLC_CNG state.
581 // PLC_CNG <=> background noise only due to long expand or error.
582 // Note that, the case where the other side stops sending during CNG
583 // state will be detected as Alive. Dead is is not set until after
584 // missing RTCP packets for at least twelve seconds (handled
585 // internally by the RTP/RTCP module).
586 isAlive = (_outputSpeechType != AudioFrame::kPLCCNG);
587 }
588
589 UpdateDeadOrAliveCounters(isAlive);
590
591 // Send callback to the registered observer
592 if (_connectionObserver)
593 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000594 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000595 if (_connectionObserverPtr)
596 {
597 _connectionObserverPtr->OnPeriodicDeadOrAlive(channel, isAlive);
598 }
599 }
600}
601
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000602int32_t
603Channel::OnReceivedPayloadData(const uint8_t* payloadData,
pbos@webrtc.org92135212013-05-14 08:31:39 +0000604 uint16_t payloadSize,
niklase@google.com470e71d2011-07-07 08:21:25 +0000605 const WebRtcRTPHeader* rtpHeader)
606{
607 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
608 "Channel::OnReceivedPayloadData(payloadSize=%d,"
609 " payloadType=%u, audioChannel=%u)",
610 payloadSize,
611 rtpHeader->header.payloadType,
612 rtpHeader->type.Audio.channel);
613
roosa@google.com0870f022012-12-12 21:31:41 +0000614 _lastRemoteTimeStamp = rtpHeader->header.timestamp;
615
niklase@google.com470e71d2011-07-07 08:21:25 +0000616 if (!_playing)
617 {
618 // Avoid inserting into NetEQ when we are not playing. Count the
619 // packet as discarded.
620 WEBRTC_TRACE(kTraceStream, kTraceVoice,
621 VoEId(_instanceId, _channelId),
622 "received packet is discarded since playing is not"
623 " activated");
624 _numberOfDiscardedPackets++;
625 return 0;
626 }
627
628 // Push the incoming payload (parsed and ready for decoding) into the ACM
tina.legrand@webrtc.org16b6b902012-04-12 11:02:38 +0000629 if (_audioCodingModule.IncomingPacket(payloadData,
niklase@google.com470e71d2011-07-07 08:21:25 +0000630 payloadSize,
631 *rtpHeader) != 0)
632 {
633 _engineStatisticsPtr->SetLastError(
634 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
635 "Channel::OnReceivedPayloadData() unable to push data to the ACM");
636 return -1;
637 }
638
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +0000639 // Update the packet delay.
niklase@google.com470e71d2011-07-07 08:21:25 +0000640 UpdatePacketDelay(rtpHeader->header.timestamp,
641 rtpHeader->header.sequenceNumber);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +0000642
wu@webrtc.org822fbd82013-08-15 23:38:54 +0000643 uint16_t round_trip_time = 0;
644 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), &round_trip_time,
645 NULL, NULL, NULL);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +0000646
wu@webrtc.org822fbd82013-08-15 23:38:54 +0000647 std::vector<uint16_t> nack_list = _audioCodingModule.GetNackList(
648 round_trip_time);
649 if (!nack_list.empty()) {
650 // Can't use nack_list.data() since it's not supported by all
651 // compilers.
652 ResendPackets(&(nack_list[0]), static_cast<int>(nack_list.size()));
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +0000653 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000654 return 0;
655}
656
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +0000657bool Channel::OnRecoveredPacket(const uint8_t* rtp_packet,
658 int rtp_packet_length) {
659 RTPHeader header;
660 if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length, &header)) {
661 WEBRTC_TRACE(kTraceDebug, webrtc::kTraceVoice, _channelId,
662 "IncomingPacket invalid RTP header");
663 return false;
664 }
665 header.payload_type_frequency =
666 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
667 if (header.payload_type_frequency < 0)
668 return false;
669 return ReceivePacket(rtp_packet, rtp_packet_length, header, false);
670}
671
pbos@webrtc.org92135212013-05-14 08:31:39 +0000672int32_t Channel::GetAudioFrame(int32_t id, AudioFrame& audioFrame)
niklase@google.com470e71d2011-07-07 08:21:25 +0000673{
674 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
675 "Channel::GetAudioFrame(id=%d)", id);
676
677 // Get 10ms raw PCM data from the ACM (mixer limits output frequency)
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000678 if (_audioCodingModule.PlayoutData10Ms(audioFrame.sample_rate_hz_,
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +0000679 &audioFrame) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000680 {
681 WEBRTC_TRACE(kTraceError, kTraceVoice,
682 VoEId(_instanceId,_channelId),
683 "Channel::GetAudioFrame() PlayoutData10Ms() failed!");
andrew@webrtc.org7859e102012-01-13 00:30:11 +0000684 // In all likelihood, the audio in this frame is garbage. We return an
685 // error so that the audio mixer module doesn't add it to the mix. As
686 // a result, it won't be played out and the actions skipped here are
687 // irrelevant.
688 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +0000689 }
690
691 if (_RxVadDetection)
692 {
693 UpdateRxVadDetection(audioFrame);
694 }
695
696 // Convert module ID to internal VoE channel ID
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000697 audioFrame.id_ = VoEChannelId(audioFrame.id_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000698 // Store speech type for dead-or-alive detection
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000699 _outputSpeechType = audioFrame.speech_type_;
niklase@google.com470e71d2011-07-07 08:21:25 +0000700
701 // Perform far-end AudioProcessing module processing on the received signal
702 if (_rxApmIsEnabled)
703 {
704 ApmProcessRx(audioFrame);
705 }
706
707 // Output volume scaling
708 if (_outputGain < 0.99f || _outputGain > 1.01f)
709 {
710 AudioFrameOperations::ScaleWithSat(_outputGain, audioFrame);
711 }
712
713 // Scale left and/or right channel(s) if stereo and master balance is
714 // active
715
716 if (_panLeft != 1.0f || _panRight != 1.0f)
717 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000718 if (audioFrame.num_channels_ == 1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000719 {
720 // Emulate stereo mode since panning is active.
721 // The mono signal is copied to both left and right channels here.
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +0000722 AudioFrameOperations::MonoToStereo(&audioFrame);
niklase@google.com470e71d2011-07-07 08:21:25 +0000723 }
724 // For true stereo mode (when we are receiving a stereo signal), no
725 // action is needed.
726
727 // Do the panning operation (the audio frame contains stereo at this
728 // stage)
729 AudioFrameOperations::Scale(_panLeft, _panRight, audioFrame);
730 }
731
732 // Mix decoded PCM output with file if file mixing is enabled
733 if (_outputFilePlaying)
734 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000735 MixAudioWithFile(audioFrame, audioFrame.sample_rate_hz_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000736 }
737
738 // Place channel in on-hold state (~muted) if on-hold is activated
739 if (_outputIsOnHold)
740 {
741 AudioFrameOperations::Mute(audioFrame);
742 }
743
744 // External media
745 if (_outputExternalMedia)
746 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000747 CriticalSectionScoped cs(&_callbackCritSect);
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000748 const bool isStereo = (audioFrame.num_channels_ == 2);
niklase@google.com470e71d2011-07-07 08:21:25 +0000749 if (_outputExternalMediaCallbackPtr)
750 {
751 _outputExternalMediaCallbackPtr->Process(
752 _channelId,
753 kPlaybackPerChannel,
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000754 (int16_t*)audioFrame.data_,
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000755 audioFrame.samples_per_channel_,
756 audioFrame.sample_rate_hz_,
niklase@google.com470e71d2011-07-07 08:21:25 +0000757 isStereo);
758 }
759 }
760
761 // Record playout if enabled
762 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000763 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000764
765 if (_outputFileRecording && _outputFileRecorderPtr)
766 {
niklas.enbom@webrtc.org5398d952012-03-26 08:11:25 +0000767 _outputFileRecorderPtr->RecordAudioToFile(audioFrame);
niklase@google.com470e71d2011-07-07 08:21:25 +0000768 }
769 }
770
771 // Measure audio level (0-9)
772 _outputAudioLevel.ComputeLevel(audioFrame);
773
774 return 0;
775}
776
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000777int32_t
pbos@webrtc.org92135212013-05-14 08:31:39 +0000778Channel::NeededFrequency(int32_t id)
niklase@google.com470e71d2011-07-07 08:21:25 +0000779{
780 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
781 "Channel::NeededFrequency(id=%d)", id);
782
783 int highestNeeded = 0;
784
785 // Determine highest needed receive frequency
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000786 int32_t receiveFrequency = _audioCodingModule.ReceiveFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +0000787
788 // Return the bigger of playout and receive frequency in the ACM.
789 if (_audioCodingModule.PlayoutFrequency() > receiveFrequency)
790 {
791 highestNeeded = _audioCodingModule.PlayoutFrequency();
792 }
793 else
794 {
795 highestNeeded = receiveFrequency;
796 }
797
798 // Special case, if we're playing a file on the playout side
799 // we take that frequency into consideration as well
800 // This is not needed on sending side, since the codec will
801 // limit the spectrum anyway.
802 if (_outputFilePlaying)
803 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000804 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000805 if (_outputFilePlayerPtr && _outputFilePlaying)
806 {
807 if(_outputFilePlayerPtr->Frequency()>highestNeeded)
808 {
809 highestNeeded=_outputFilePlayerPtr->Frequency();
810 }
811 }
812 }
813
814 return(highestNeeded);
815}
816
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000817int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +0000818Channel::CreateChannel(Channel*& channel,
pbos@webrtc.org92135212013-05-14 08:31:39 +0000819 int32_t channelId,
minyue@webrtc.orge509f942013-09-12 17:03:00 +0000820 uint32_t instanceId,
821 const Config& config)
niklase@google.com470e71d2011-07-07 08:21:25 +0000822{
823 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId,channelId),
824 "Channel::CreateChannel(channelId=%d, instanceId=%d)",
825 channelId, instanceId);
826
minyue@webrtc.orge509f942013-09-12 17:03:00 +0000827 channel = new Channel(channelId, instanceId, config);
niklase@google.com470e71d2011-07-07 08:21:25 +0000828 if (channel == NULL)
829 {
830 WEBRTC_TRACE(kTraceMemory, kTraceVoice,
831 VoEId(instanceId,channelId),
832 "Channel::CreateChannel() unable to allocate memory for"
833 " channel");
834 return -1;
835 }
836 return 0;
837}
838
839void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000840Channel::PlayNotification(int32_t id, uint32_t durationMs)
niklase@google.com470e71d2011-07-07 08:21:25 +0000841{
842 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
843 "Channel::PlayNotification(id=%d, durationMs=%d)",
844 id, durationMs);
845
846 // Not implement yet
847}
848
849void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000850Channel::RecordNotification(int32_t id, uint32_t durationMs)
niklase@google.com470e71d2011-07-07 08:21:25 +0000851{
852 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
853 "Channel::RecordNotification(id=%d, durationMs=%d)",
854 id, durationMs);
855
856 // Not implement yet
857}
858
859void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000860Channel::PlayFileEnded(int32_t id)
niklase@google.com470e71d2011-07-07 08:21:25 +0000861{
862 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
863 "Channel::PlayFileEnded(id=%d)", id);
864
865 if (id == _inputFilePlayerId)
866 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000867 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000868
869 _inputFilePlaying = false;
870 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
871 VoEId(_instanceId,_channelId),
872 "Channel::PlayFileEnded() => input file player module is"
873 " shutdown");
874 }
875 else if (id == _outputFilePlayerId)
876 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000877 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000878
879 _outputFilePlaying = false;
880 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
881 VoEId(_instanceId,_channelId),
882 "Channel::PlayFileEnded() => output file player module is"
883 " shutdown");
884 }
885}
886
887void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000888Channel::RecordFileEnded(int32_t id)
niklase@google.com470e71d2011-07-07 08:21:25 +0000889{
890 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
891 "Channel::RecordFileEnded(id=%d)", id);
892
893 assert(id == _outputFileRecorderId);
894
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000895 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000896
897 _outputFileRecording = false;
898 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
899 VoEId(_instanceId,_channelId),
900 "Channel::RecordFileEnded() => output file recorder module is"
901 " shutdown");
902}
903
pbos@webrtc.org92135212013-05-14 08:31:39 +0000904Channel::Channel(int32_t channelId,
minyue@webrtc.orge509f942013-09-12 17:03:00 +0000905 uint32_t instanceId,
906 const Config& config) :
niklase@google.com470e71d2011-07-07 08:21:25 +0000907 _fileCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
908 _callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
niklase@google.com470e71d2011-07-07 08:21:25 +0000909 _instanceId(instanceId),
xians@google.com22963ab2011-08-03 12:40:23 +0000910 _channelId(channelId),
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +0000911 rtp_header_parser_(RtpHeaderParser::Create()),
wu@webrtc.org822fbd82013-08-15 23:38:54 +0000912 rtp_payload_registry_(
913 new RTPPayloadRegistry(channelId,
914 RTPPayloadStrategy::CreateStrategy(true))),
915 rtp_receive_statistics_(ReceiveStatistics::Create(
916 Clock::GetRealTimeClock())),
917 rtp_receiver_(RtpReceiver::CreateAudioReceiver(
918 VoEModuleId(instanceId, channelId), Clock::GetRealTimeClock(), this,
919 this, this, rtp_payload_registry_.get())),
920 telephone_event_handler_(rtp_receiver_->GetTelephoneEventHandler()),
minyue@webrtc.orge509f942013-09-12 17:03:00 +0000921 _audioCodingModule(*config.Get<AudioCodingModuleFactory>().Create(
xians@google.com22963ab2011-08-03 12:40:23 +0000922 VoEModuleId(instanceId, channelId))),
niklase@google.com470e71d2011-07-07 08:21:25 +0000923 _rtpDumpIn(*RtpDump::CreateRtpDump()),
924 _rtpDumpOut(*RtpDump::CreateRtpDump()),
niklase@google.com470e71d2011-07-07 08:21:25 +0000925 _outputAudioLevel(),
niklase@google.com470e71d2011-07-07 08:21:25 +0000926 _externalTransport(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000927 _inputFilePlayerPtr(NULL),
928 _outputFilePlayerPtr(NULL),
929 _outputFileRecorderPtr(NULL),
930 // Avoid conflict with other channels by adding 1024 - 1026,
931 // won't use as much as 1024 channels.
932 _inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
933 _outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
934 _outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
935 _inputFilePlaying(false),
936 _outputFilePlaying(false),
937 _outputFileRecording(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000938 _inbandDtmfQueue(VoEModuleId(instanceId, channelId)),
939 _inbandDtmfGenerator(VoEModuleId(instanceId, channelId)),
niklase@google.com470e71d2011-07-07 08:21:25 +0000940 _inputExternalMedia(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000941 _outputExternalMedia(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000942 _inputExternalMediaCallbackPtr(NULL),
943 _outputExternalMediaCallbackPtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000944 _encryptionRTPBufferPtr(NULL),
945 _decryptionRTPBufferPtr(NULL),
946 _encryptionRTCPBufferPtr(NULL),
947 _decryptionRTCPBufferPtr(NULL),
948 _timeStamp(0), // This is just an offset, RTP module will add it's own random offset
949 _sendTelephoneEventPayloadType(106),
pwestin@webrtc.org1de01352013-04-11 20:23:35 +0000950 playout_timestamp_rtp_(0),
951 playout_timestamp_rtcp_(0),
xians@google.com22963ab2011-08-03 12:40:23 +0000952 _numberOfDiscardedPackets(0),
xians@webrtc.org09e8c472013-07-31 16:30:19 +0000953 send_sequence_number_(0),
xians@google.com22963ab2011-08-03 12:40:23 +0000954 _engineStatisticsPtr(NULL),
henrika@webrtc.org2919e952012-01-31 08:45:03 +0000955 _outputMixerPtr(NULL),
956 _transmitMixerPtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000957 _moduleProcessThreadPtr(NULL),
958 _audioDeviceModulePtr(NULL),
959 _voiceEngineObserverPtr(NULL),
960 _callbackCritSectPtr(NULL),
961 _transportPtr(NULL),
962 _encryptionPtr(NULL),
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000963 _rtpAudioProc(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000964 _rxAudioProcessingModulePtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000965 _rxVadObserverPtr(NULL),
966 _oldVadDecision(-1),
967 _sendFrameType(0),
niklase@google.com470e71d2011-07-07 08:21:25 +0000968 _rtpObserverPtr(NULL),
969 _rtcpObserverPtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000970 _outputIsOnHold(false),
971 _externalPlayout(false),
roosa@google.com1b60ceb2012-12-12 23:00:29 +0000972 _externalMixing(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000973 _inputIsOnHold(false),
974 _playing(false),
975 _sending(false),
976 _receiving(false),
977 _mixFileWithMicrophone(false),
978 _rtpObserver(false),
979 _rtcpObserver(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000980 _mute(false),
981 _panLeft(1.0f),
982 _panRight(1.0f),
983 _outputGain(1.0f),
xians@google.com22963ab2011-08-03 12:40:23 +0000984 _encrypting(false),
985 _decrypting(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000986 _playOutbandDtmfEvent(false),
987 _playInbandDtmfEvent(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000988 _extraPayloadType(0),
989 _insertExtraRTPPacket(false),
990 _extraMarkerBit(false),
991 _lastLocalTimeStamp(0),
roosa@google.com0870f022012-12-12 21:31:41 +0000992 _lastRemoteTimeStamp(0),
niklase@google.com470e71d2011-07-07 08:21:25 +0000993 _lastPayloadType(0),
xians@google.com22963ab2011-08-03 12:40:23 +0000994 _includeAudioLevelIndication(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000995 _rtpPacketTimedOut(false),
996 _rtpPacketTimeOutIsEnabled(false),
997 _rtpTimeOutSeconds(0),
998 _connectionObserver(false),
999 _connectionObserverPtr(NULL),
1000 _countAliveDetections(0),
1001 _countDeadDetections(0),
1002 _outputSpeechType(AudioFrame::kNormalSpeech),
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00001003 _average_jitter_buffer_delay_us(0),
turaj@webrtc.orge46c8d32013-05-22 20:39:43 +00001004 least_required_delay_ms_(0),
niklase@google.com470e71d2011-07-07 08:21:25 +00001005 _previousTimestamp(0),
1006 _recPacketDelayMs(20),
1007 _RxVadDetection(false),
1008 _rxApmIsEnabled(false),
1009 _rxAgcIsEnabled(false),
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001010 _rxNsIsEnabled(false),
1011 restored_packet_in_use_(false)
niklase@google.com470e71d2011-07-07 08:21:25 +00001012{
1013 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
1014 "Channel::Channel() - ctor");
1015 _inbandDtmfQueue.ResetDtmf();
1016 _inbandDtmfGenerator.Init();
1017 _outputAudioLevel.Clear();
1018
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001019 RtpRtcp::Configuration configuration;
1020 configuration.id = VoEModuleId(instanceId, channelId);
1021 configuration.audio = true;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001022 configuration.outgoing_transport = this;
1023 configuration.rtcp_feedback = this;
1024 configuration.audio_messages = this;
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001025 configuration.receive_statistics = rtp_receive_statistics_.get();
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001026
1027 _rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
1028
niklase@google.com470e71d2011-07-07 08:21:25 +00001029 // Create far end AudioProcessing Module
1030 _rxAudioProcessingModulePtr = AudioProcessing::Create(
1031 VoEModuleId(instanceId, channelId));
1032}
1033
1034Channel::~Channel()
1035{
1036 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
1037 "Channel::~Channel() - dtor");
1038
1039 if (_outputExternalMedia)
1040 {
1041 DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
1042 }
1043 if (_inputExternalMedia)
1044 {
1045 DeRegisterExternalMediaProcessing(kRecordingPerChannel);
1046 }
1047 StopSend();
niklase@google.com470e71d2011-07-07 08:21:25 +00001048 StopPlayout();
1049
1050 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001051 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001052 if (_inputFilePlayerPtr)
1053 {
1054 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1055 _inputFilePlayerPtr->StopPlayingFile();
1056 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1057 _inputFilePlayerPtr = NULL;
1058 }
1059 if (_outputFilePlayerPtr)
1060 {
1061 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1062 _outputFilePlayerPtr->StopPlayingFile();
1063 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1064 _outputFilePlayerPtr = NULL;
1065 }
1066 if (_outputFileRecorderPtr)
1067 {
1068 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
1069 _outputFileRecorderPtr->StopRecording();
1070 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
1071 _outputFileRecorderPtr = NULL;
1072 }
1073 }
1074
1075 // The order to safely shutdown modules in a channel is:
1076 // 1. De-register callbacks in modules
1077 // 2. De-register modules in process thread
1078 // 3. Destroy modules
niklase@google.com470e71d2011-07-07 08:21:25 +00001079 if (_audioCodingModule.RegisterTransportCallback(NULL) == -1)
1080 {
1081 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1082 VoEId(_instanceId,_channelId),
1083 "~Channel() failed to de-register transport callback"
1084 " (Audio coding module)");
1085 }
1086 if (_audioCodingModule.RegisterVADCallback(NULL) == -1)
1087 {
1088 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1089 VoEId(_instanceId,_channelId),
1090 "~Channel() failed to de-register VAD callback"
1091 " (Audio coding module)");
1092 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001093 // De-register modules in process thread
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001094 if (_moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get()) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001095 {
1096 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
1097 VoEId(_instanceId,_channelId),
1098 "~Channel() failed to deregister RTP/RTCP module");
1099 }
1100
1101 // Destroy modules
niklase@google.com470e71d2011-07-07 08:21:25 +00001102 AudioCodingModule::Destroy(&_audioCodingModule);
niklase@google.com470e71d2011-07-07 08:21:25 +00001103 if (_rxAudioProcessingModulePtr != NULL)
1104 {
1105 AudioProcessing::Destroy(_rxAudioProcessingModulePtr); // far end APM
1106 _rxAudioProcessingModulePtr = NULL;
1107 }
1108
1109 // End of modules shutdown
1110
1111 // Delete other objects
1112 RtpDump::DestroyRtpDump(&_rtpDumpIn);
1113 RtpDump::DestroyRtpDump(&_rtpDumpOut);
1114 delete [] _encryptionRTPBufferPtr;
1115 delete [] _decryptionRTPBufferPtr;
1116 delete [] _encryptionRTCPBufferPtr;
1117 delete [] _decryptionRTCPBufferPtr;
1118 delete &_callbackCritSect;
niklase@google.com470e71d2011-07-07 08:21:25 +00001119 delete &_fileCritSect;
1120}
1121
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001122int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001123Channel::Init()
1124{
1125 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1126 "Channel::Init()");
1127
1128 // --- Initial sanity
1129
1130 if ((_engineStatisticsPtr == NULL) ||
1131 (_moduleProcessThreadPtr == NULL))
1132 {
1133 WEBRTC_TRACE(kTraceError, kTraceVoice,
1134 VoEId(_instanceId,_channelId),
1135 "Channel::Init() must call SetEngineInformation() first");
1136 return -1;
1137 }
1138
1139 // --- Add modules to process thread (for periodic schedulation)
1140
1141 const bool processThreadFail =
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001142 ((_moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get()) != 0) ||
niklase@google.com470e71d2011-07-07 08:21:25 +00001143 false);
niklase@google.com470e71d2011-07-07 08:21:25 +00001144 if (processThreadFail)
1145 {
1146 _engineStatisticsPtr->SetLastError(
1147 VE_CANNOT_INIT_CHANNEL, kTraceError,
1148 "Channel::Init() modules not registered");
1149 return -1;
1150 }
pwestin@webrtc.orgc450a192012-01-04 15:00:12 +00001151 // --- ACM initialization
niklase@google.com470e71d2011-07-07 08:21:25 +00001152
1153 if ((_audioCodingModule.InitializeReceiver() == -1) ||
1154#ifdef WEBRTC_CODEC_AVT
1155 // out-of-band Dtmf tones are played out by default
1156 (_audioCodingModule.SetDtmfPlayoutStatus(true) == -1) ||
1157#endif
niklase@google.com470e71d2011-07-07 08:21:25 +00001158 (_audioCodingModule.InitializeSender() == -1))
1159 {
1160 _engineStatisticsPtr->SetLastError(
1161 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1162 "Channel::Init() unable to initialize the ACM - 1");
1163 return -1;
1164 }
1165
1166 // --- RTP/RTCP module initialization
1167
1168 // Ensure that RTCP is enabled by default for the created channel.
1169 // Note that, the module will keep generating RTCP until it is explicitly
1170 // disabled by the user.
1171 // After StopListen (when no sockets exists), RTCP packets will no longer
1172 // be transmitted since the Transport object will then be invalid.
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001173 telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
1174 // RTCP is enabled by default.
1175 if (_rtpRtcpModule->SetRTCPStatus(kRtcpCompound) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001176 {
1177 _engineStatisticsPtr->SetLastError(
1178 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1179 "Channel::Init() RTP/RTCP module not initialized");
1180 return -1;
1181 }
1182
1183 // --- Register all permanent callbacks
niklase@google.com470e71d2011-07-07 08:21:25 +00001184 const bool fail =
niklase@google.com470e71d2011-07-07 08:21:25 +00001185 (_audioCodingModule.RegisterTransportCallback(this) == -1) ||
1186 (_audioCodingModule.RegisterVADCallback(this) == -1);
1187
1188 if (fail)
1189 {
1190 _engineStatisticsPtr->SetLastError(
1191 VE_CANNOT_INIT_CHANNEL, kTraceError,
1192 "Channel::Init() callbacks not registered");
1193 return -1;
1194 }
1195
1196 // --- Register all supported codecs to the receiving side of the
1197 // RTP/RTCP module
1198
1199 CodecInst codec;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001200 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
niklase@google.com470e71d2011-07-07 08:21:25 +00001201
1202 for (int idx = 0; idx < nSupportedCodecs; idx++)
1203 {
1204 // Open up the RTP/RTCP receiver for all supported codecs
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001205 if ((_audioCodingModule.Codec(idx, &codec) == -1) ||
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001206 (rtp_receiver_->RegisterReceivePayload(
1207 codec.plname,
1208 codec.pltype,
1209 codec.plfreq,
1210 codec.channels,
1211 (codec.rate < 0) ? 0 : codec.rate) == -1))
niklase@google.com470e71d2011-07-07 08:21:25 +00001212 {
1213 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1214 VoEId(_instanceId,_channelId),
1215 "Channel::Init() unable to register %s (%d/%d/%d/%d) "
1216 "to RTP/RTCP receiver",
1217 codec.plname, codec.pltype, codec.plfreq,
1218 codec.channels, codec.rate);
1219 }
1220 else
1221 {
1222 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
1223 VoEId(_instanceId,_channelId),
1224 "Channel::Init() %s (%d/%d/%d/%d) has been added to "
1225 "the RTP/RTCP receiver",
1226 codec.plname, codec.pltype, codec.plfreq,
1227 codec.channels, codec.rate);
1228 }
1229
1230 // Ensure that PCMU is used as default codec on the sending side
tina.legrand@webrtc.org45175852012-06-01 09:27:35 +00001231 if (!STR_CASE_CMP(codec.plname, "PCMU") && (codec.channels == 1))
niklase@google.com470e71d2011-07-07 08:21:25 +00001232 {
1233 SetSendCodec(codec);
1234 }
1235
1236 // Register default PT for outband 'telephone-event'
1237 if (!STR_CASE_CMP(codec.plname, "telephone-event"))
1238 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001239 if ((_rtpRtcpModule->RegisterSendPayload(codec) == -1) ||
niklase@google.com470e71d2011-07-07 08:21:25 +00001240 (_audioCodingModule.RegisterReceiveCodec(codec) == -1))
1241 {
1242 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1243 VoEId(_instanceId,_channelId),
1244 "Channel::Init() failed to register outband "
1245 "'telephone-event' (%d/%d) correctly",
1246 codec.pltype, codec.plfreq);
1247 }
1248 }
1249
1250 if (!STR_CASE_CMP(codec.plname, "CN"))
1251 {
1252 if ((_audioCodingModule.RegisterSendCodec(codec) == -1) ||
1253 (_audioCodingModule.RegisterReceiveCodec(codec) == -1) ||
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001254 (_rtpRtcpModule->RegisterSendPayload(codec) == -1))
niklase@google.com470e71d2011-07-07 08:21:25 +00001255 {
1256 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1257 VoEId(_instanceId,_channelId),
1258 "Channel::Init() failed to register CN (%d/%d) "
1259 "correctly - 1",
1260 codec.pltype, codec.plfreq);
1261 }
1262 }
1263#ifdef WEBRTC_CODEC_RED
1264 // Register RED to the receiving side of the ACM.
1265 // We will not receive an OnInitializeDecoder() callback for RED.
1266 if (!STR_CASE_CMP(codec.plname, "RED"))
1267 {
1268 if (_audioCodingModule.RegisterReceiveCodec(codec) == -1)
1269 {
1270 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1271 VoEId(_instanceId,_channelId),
1272 "Channel::Init() failed to register RED (%d/%d) "
1273 "correctly",
1274 codec.pltype, codec.plfreq);
1275 }
1276 }
1277#endif
1278 }
pwestin@webrtc.org684f0572013-03-13 23:20:57 +00001279
niklase@google.com470e71d2011-07-07 08:21:25 +00001280 // Initialize the far end AP module
1281 // Using 8 kHz as initial Fs, the same as in transmission. Might be
1282 // changed at the first receiving audio.
1283 if (_rxAudioProcessingModulePtr == NULL)
1284 {
1285 _engineStatisticsPtr->SetLastError(
1286 VE_NO_MEMORY, kTraceCritical,
1287 "Channel::Init() failed to create the far-end AudioProcessing"
1288 " module");
1289 return -1;
1290 }
1291
niklase@google.com470e71d2011-07-07 08:21:25 +00001292 if (_rxAudioProcessingModulePtr->set_sample_rate_hz(8000))
1293 {
1294 _engineStatisticsPtr->SetLastError(
1295 VE_APM_ERROR, kTraceWarning,
1296 "Channel::Init() failed to set the sample rate to 8K for"
1297 " far-end AP module");
1298 }
1299
1300 if (_rxAudioProcessingModulePtr->set_num_channels(1, 1) != 0)
1301 {
1302 _engineStatisticsPtr->SetLastError(
1303 VE_SOUNDCARD_ERROR, kTraceWarning,
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00001304 "Init() failed to set channels for the primary audio stream");
niklase@google.com470e71d2011-07-07 08:21:25 +00001305 }
1306
1307 if (_rxAudioProcessingModulePtr->high_pass_filter()->Enable(
1308 WEBRTC_VOICE_ENGINE_RX_HP_DEFAULT_STATE) != 0)
1309 {
1310 _engineStatisticsPtr->SetLastError(
1311 VE_APM_ERROR, kTraceWarning,
1312 "Channel::Init() failed to set the high-pass filter for"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00001313 " far-end AP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00001314 }
1315
1316 if (_rxAudioProcessingModulePtr->noise_suppression()->set_level(
1317 (NoiseSuppression::Level)WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE) != 0)
1318 {
1319 _engineStatisticsPtr->SetLastError(
1320 VE_APM_ERROR, kTraceWarning,
1321 "Init() failed to set noise reduction level for far-end"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00001322 " AP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00001323 }
1324 if (_rxAudioProcessingModulePtr->noise_suppression()->Enable(
1325 WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_STATE) != 0)
1326 {
1327 _engineStatisticsPtr->SetLastError(
1328 VE_APM_ERROR, kTraceWarning,
1329 "Init() failed to set noise reduction state for far-end"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00001330 " AP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00001331 }
1332
1333 if (_rxAudioProcessingModulePtr->gain_control()->set_mode(
1334 (GainControl::Mode)WEBRTC_VOICE_ENGINE_RX_AGC_DEFAULT_MODE) != 0)
1335 {
1336 _engineStatisticsPtr->SetLastError(
1337 VE_APM_ERROR, kTraceWarning,
1338 "Init() failed to set AGC mode for far-end AP module");
1339 }
1340 if (_rxAudioProcessingModulePtr->gain_control()->Enable(
1341 WEBRTC_VOICE_ENGINE_RX_AGC_DEFAULT_STATE) != 0)
1342 {
1343 _engineStatisticsPtr->SetLastError(
1344 VE_APM_ERROR, kTraceWarning,
1345 "Init() failed to set AGC state for far-end AP module");
1346 }
1347
1348 return 0;
1349}
1350
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001351int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001352Channel::SetEngineInformation(Statistics& engineStatistics,
1353 OutputMixer& outputMixer,
1354 voe::TransmitMixer& transmitMixer,
1355 ProcessThread& moduleProcessThread,
1356 AudioDeviceModule& audioDeviceModule,
1357 VoiceEngineObserver* voiceEngineObserver,
1358 CriticalSectionWrapper* callbackCritSect)
1359{
1360 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1361 "Channel::SetEngineInformation()");
1362 _engineStatisticsPtr = &engineStatistics;
1363 _outputMixerPtr = &outputMixer;
1364 _transmitMixerPtr = &transmitMixer,
1365 _moduleProcessThreadPtr = &moduleProcessThread;
1366 _audioDeviceModulePtr = &audioDeviceModule;
1367 _voiceEngineObserverPtr = voiceEngineObserver;
1368 _callbackCritSectPtr = callbackCritSect;
1369 return 0;
1370}
1371
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001372int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001373Channel::UpdateLocalTimeStamp()
1374{
1375
andrew@webrtc.org63a50982012-05-02 23:56:37 +00001376 _timeStamp += _audioFrame.samples_per_channel_;
niklase@google.com470e71d2011-07-07 08:21:25 +00001377 return 0;
1378}
1379
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001380int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001381Channel::StartPlayout()
1382{
1383 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1384 "Channel::StartPlayout()");
1385 if (_playing)
1386 {
1387 return 0;
1388 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00001389
1390 if (!_externalMixing) {
1391 // Add participant as candidates for mixing.
1392 if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0)
1393 {
1394 _engineStatisticsPtr->SetLastError(
1395 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1396 "StartPlayout() failed to add participant to mixer");
1397 return -1;
1398 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001399 }
1400
1401 _playing = true;
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001402
1403 if (RegisterFilePlayingToMixer() != 0)
1404 return -1;
1405
niklase@google.com470e71d2011-07-07 08:21:25 +00001406 return 0;
1407}
1408
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001409int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001410Channel::StopPlayout()
1411{
1412 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1413 "Channel::StopPlayout()");
1414 if (!_playing)
1415 {
1416 return 0;
1417 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00001418
1419 if (!_externalMixing) {
1420 // Remove participant as candidates for mixing
1421 if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0)
1422 {
1423 _engineStatisticsPtr->SetLastError(
1424 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1425 "StopPlayout() failed to remove participant from mixer");
1426 return -1;
1427 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001428 }
1429
1430 _playing = false;
1431 _outputAudioLevel.Clear();
1432
1433 return 0;
1434}
1435
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001436int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001437Channel::StartSend()
1438{
1439 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1440 "Channel::StartSend()");
xians@webrtc.org09e8c472013-07-31 16:30:19 +00001441 // Resume the previous sequence number which was reset by StopSend().
1442 // This needs to be done before |_sending| is set to true.
1443 if (send_sequence_number_)
1444 SetInitSequenceNumber(send_sequence_number_);
1445
niklase@google.com470e71d2011-07-07 08:21:25 +00001446 {
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001447 // A lock is needed because |_sending| can be accessed or modified by
1448 // another thread at the same time.
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001449 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001450
1451 if (_sending)
1452 {
1453 return 0;
1454 }
1455 _sending = true;
niklase@google.com470e71d2011-07-07 08:21:25 +00001456 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001457
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001458 if (_rtpRtcpModule->SetSendingStatus(true) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001459 {
1460 _engineStatisticsPtr->SetLastError(
1461 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1462 "StartSend() RTP/RTCP failed to start sending");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001463 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001464 _sending = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00001465 return -1;
1466 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001467
niklase@google.com470e71d2011-07-07 08:21:25 +00001468 return 0;
1469}
1470
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001471int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001472Channel::StopSend()
1473{
1474 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1475 "Channel::StopSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001476 {
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001477 // A lock is needed because |_sending| can be accessed or modified by
1478 // another thread at the same time.
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001479 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001480
1481 if (!_sending)
1482 {
1483 return 0;
1484 }
1485 _sending = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00001486 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001487
xians@webrtc.org09e8c472013-07-31 16:30:19 +00001488 // Store the sequence number to be able to pick up the same sequence for
1489 // the next StartSend(). This is needed for restarting device, otherwise
1490 // it might cause libSRTP to complain about packets being replayed.
1491 // TODO(xians): Remove this workaround after RtpRtcpModule's refactoring
1492 // CL is landed. See issue
1493 // https://code.google.com/p/webrtc/issues/detail?id=2111 .
1494 send_sequence_number_ = _rtpRtcpModule->SequenceNumber();
1495
niklase@google.com470e71d2011-07-07 08:21:25 +00001496 // Reset sending SSRC and sequence number and triggers direct transmission
1497 // of RTCP BYE
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001498 if (_rtpRtcpModule->SetSendingStatus(false) == -1 ||
1499 _rtpRtcpModule->ResetSendDataCountersRTP() == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001500 {
1501 _engineStatisticsPtr->SetLastError(
1502 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1503 "StartSend() RTP/RTCP failed to stop sending");
1504 }
1505
niklase@google.com470e71d2011-07-07 08:21:25 +00001506 return 0;
1507}
1508
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001509int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001510Channel::StartReceiving()
1511{
1512 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1513 "Channel::StartReceiving()");
1514 if (_receiving)
1515 {
1516 return 0;
1517 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001518 _receiving = true;
1519 _numberOfDiscardedPackets = 0;
1520 return 0;
1521}
1522
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001523int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001524Channel::StopReceiving()
1525{
1526 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1527 "Channel::StopReceiving()");
1528 if (!_receiving)
1529 {
1530 return 0;
1531 }
pwestin@webrtc.org684f0572013-03-13 23:20:57 +00001532
henrika@webrtc.orgaf71f0e2011-12-05 07:02:22 +00001533 // Recover DTMF detection status.
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001534 telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
niklase@google.com470e71d2011-07-07 08:21:25 +00001535 RegisterReceiveCodecsToRTPModule();
1536 _receiving = false;
1537 return 0;
1538}
1539
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001540int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001541Channel::SetNetEQPlayoutMode(NetEqModes mode)
1542{
1543 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1544 "Channel::SetNetEQPlayoutMode()");
1545 AudioPlayoutMode playoutMode(voice);
1546 switch (mode)
1547 {
1548 case kNetEqDefault:
1549 playoutMode = voice;
1550 break;
1551 case kNetEqStreaming:
1552 playoutMode = streaming;
1553 break;
1554 case kNetEqFax:
1555 playoutMode = fax;
1556 break;
roosa@google.comb7186192012-12-12 21:59:14 +00001557 case kNetEqOff:
1558 playoutMode = off;
1559 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00001560 }
1561 if (_audioCodingModule.SetPlayoutMode(playoutMode) != 0)
1562 {
1563 _engineStatisticsPtr->SetLastError(
1564 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1565 "SetNetEQPlayoutMode() failed to set playout mode");
1566 return -1;
1567 }
1568 return 0;
1569}
1570
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001571int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001572Channel::GetNetEQPlayoutMode(NetEqModes& mode)
1573{
1574 const AudioPlayoutMode playoutMode = _audioCodingModule.PlayoutMode();
1575 switch (playoutMode)
1576 {
1577 case voice:
1578 mode = kNetEqDefault;
1579 break;
1580 case streaming:
1581 mode = kNetEqStreaming;
1582 break;
1583 case fax:
1584 mode = kNetEqFax;
1585 break;
roosa@google.comb7186192012-12-12 21:59:14 +00001586 case off:
1587 mode = kNetEqOff;
niklase@google.com470e71d2011-07-07 08:21:25 +00001588 }
1589 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
1590 VoEId(_instanceId,_channelId),
1591 "Channel::GetNetEQPlayoutMode() => mode=%u", mode);
1592 return 0;
1593}
1594
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001595int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001596Channel::SetOnHoldStatus(bool enable, OnHoldModes mode)
1597{
1598 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1599 "Channel::SetOnHoldStatus()");
1600 if (mode == kHoldSendAndPlay)
1601 {
1602 _outputIsOnHold = enable;
1603 _inputIsOnHold = enable;
1604 }
1605 else if (mode == kHoldPlayOnly)
1606 {
1607 _outputIsOnHold = enable;
1608 }
1609 if (mode == kHoldSendOnly)
1610 {
1611 _inputIsOnHold = enable;
1612 }
1613 return 0;
1614}
1615
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001616int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001617Channel::GetOnHoldStatus(bool& enabled, OnHoldModes& mode)
1618{
1619 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1620 "Channel::GetOnHoldStatus()");
1621 enabled = (_outputIsOnHold || _inputIsOnHold);
1622 if (_outputIsOnHold && _inputIsOnHold)
1623 {
1624 mode = kHoldSendAndPlay;
1625 }
1626 else if (_outputIsOnHold && !_inputIsOnHold)
1627 {
1628 mode = kHoldPlayOnly;
1629 }
1630 else if (!_outputIsOnHold && _inputIsOnHold)
1631 {
1632 mode = kHoldSendOnly;
1633 }
1634 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1635 "Channel::GetOnHoldStatus() => enabled=%d, mode=%d",
1636 enabled, mode);
1637 return 0;
1638}
1639
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001640int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001641Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer)
1642{
1643 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1644 "Channel::RegisterVoiceEngineObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001645 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001646
1647 if (_voiceEngineObserverPtr)
1648 {
1649 _engineStatisticsPtr->SetLastError(
1650 VE_INVALID_OPERATION, kTraceError,
1651 "RegisterVoiceEngineObserver() observer already enabled");
1652 return -1;
1653 }
1654 _voiceEngineObserverPtr = &observer;
1655 return 0;
1656}
1657
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001658int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001659Channel::DeRegisterVoiceEngineObserver()
1660{
1661 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1662 "Channel::DeRegisterVoiceEngineObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001663 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001664
1665 if (!_voiceEngineObserverPtr)
1666 {
1667 _engineStatisticsPtr->SetLastError(
1668 VE_INVALID_OPERATION, kTraceWarning,
1669 "DeRegisterVoiceEngineObserver() observer already disabled");
1670 return 0;
1671 }
1672 _voiceEngineObserverPtr = NULL;
1673 return 0;
1674}
1675
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001676int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001677Channel::GetSendCodec(CodecInst& codec)
1678{
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001679 return (_audioCodingModule.SendCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001680}
1681
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001682int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001683Channel::GetRecCodec(CodecInst& codec)
1684{
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001685 return (_audioCodingModule.ReceiveCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001686}
1687
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001688int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001689Channel::SetSendCodec(const CodecInst& codec)
1690{
1691 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1692 "Channel::SetSendCodec()");
1693
1694 if (_audioCodingModule.RegisterSendCodec(codec) != 0)
1695 {
1696 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1697 "SetSendCodec() failed to register codec to ACM");
1698 return -1;
1699 }
1700
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001701 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001702 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001703 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1704 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001705 {
1706 WEBRTC_TRACE(
1707 kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1708 "SetSendCodec() failed to register codec to"
1709 " RTP/RTCP module");
1710 return -1;
1711 }
1712 }
1713
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001714 if (_rtpRtcpModule->SetAudioPacketSize(codec.pacsize) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001715 {
1716 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1717 "SetSendCodec() failed to set audio packet size");
1718 return -1;
1719 }
1720
1721 return 0;
1722}
1723
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001724int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001725Channel::SetVADStatus(bool enableVAD, ACMVADMode mode, bool disableDTX)
1726{
1727 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1728 "Channel::SetVADStatus(mode=%d)", mode);
1729 // To disable VAD, DTX must be disabled too
1730 disableDTX = ((enableVAD == false) ? true : disableDTX);
1731 if (_audioCodingModule.SetVAD(!disableDTX, enableVAD, mode) != 0)
1732 {
1733 _engineStatisticsPtr->SetLastError(
1734 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1735 "SetVADStatus() failed to set VAD");
1736 return -1;
1737 }
1738 return 0;
1739}
1740
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001741int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001742Channel::GetVADStatus(bool& enabledVAD, ACMVADMode& mode, bool& disabledDTX)
1743{
1744 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1745 "Channel::GetVADStatus");
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001746 if (_audioCodingModule.VAD(&disabledDTX, &enabledVAD, &mode) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001747 {
1748 _engineStatisticsPtr->SetLastError(
1749 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1750 "GetVADStatus() failed to get VAD status");
1751 return -1;
1752 }
1753 disabledDTX = !disabledDTX;
1754 return 0;
1755}
1756
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001757int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001758Channel::SetRecPayloadType(const CodecInst& codec)
1759{
1760 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1761 "Channel::SetRecPayloadType()");
1762
1763 if (_playing)
1764 {
1765 _engineStatisticsPtr->SetLastError(
1766 VE_ALREADY_PLAYING, kTraceError,
1767 "SetRecPayloadType() unable to set PT while playing");
1768 return -1;
1769 }
1770 if (_receiving)
1771 {
1772 _engineStatisticsPtr->SetLastError(
1773 VE_ALREADY_LISTENING, kTraceError,
1774 "SetRecPayloadType() unable to set PT while listening");
1775 return -1;
1776 }
1777
1778 if (codec.pltype == -1)
1779 {
1780 // De-register the selected codec (RTP/RTCP module and ACM)
1781
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001782 int8_t pltype(-1);
niklase@google.com470e71d2011-07-07 08:21:25 +00001783 CodecInst rxCodec = codec;
1784
1785 // Get payload type for the given codec
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001786 rtp_payload_registry_->ReceivePayloadType(
1787 rxCodec.plname,
1788 rxCodec.plfreq,
1789 rxCodec.channels,
1790 (rxCodec.rate < 0) ? 0 : rxCodec.rate,
1791 &pltype);
niklase@google.com470e71d2011-07-07 08:21:25 +00001792 rxCodec.pltype = pltype;
1793
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001794 if (rtp_receiver_->DeRegisterReceivePayload(pltype) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001795 {
1796 _engineStatisticsPtr->SetLastError(
1797 VE_RTP_RTCP_MODULE_ERROR,
1798 kTraceError,
1799 "SetRecPayloadType() RTP/RTCP-module deregistration "
1800 "failed");
1801 return -1;
1802 }
1803 if (_audioCodingModule.UnregisterReceiveCodec(rxCodec.pltype) != 0)
1804 {
1805 _engineStatisticsPtr->SetLastError(
1806 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1807 "SetRecPayloadType() ACM deregistration failed - 1");
1808 return -1;
1809 }
1810 return 0;
1811 }
1812
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001813 if (rtp_receiver_->RegisterReceivePayload(
1814 codec.plname,
1815 codec.pltype,
1816 codec.plfreq,
1817 codec.channels,
1818 (codec.rate < 0) ? 0 : codec.rate) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001819 {
1820 // First attempt to register failed => de-register and try again
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001821 rtp_receiver_->DeRegisterReceivePayload(codec.pltype);
1822 if (rtp_receiver_->RegisterReceivePayload(
1823 codec.plname,
1824 codec.pltype,
1825 codec.plfreq,
1826 codec.channels,
1827 (codec.rate < 0) ? 0 : codec.rate) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001828 {
1829 _engineStatisticsPtr->SetLastError(
1830 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1831 "SetRecPayloadType() RTP/RTCP-module registration failed");
1832 return -1;
1833 }
1834 }
1835 if (_audioCodingModule.RegisterReceiveCodec(codec) != 0)
1836 {
1837 _audioCodingModule.UnregisterReceiveCodec(codec.pltype);
1838 if (_audioCodingModule.RegisterReceiveCodec(codec) != 0)
1839 {
1840 _engineStatisticsPtr->SetLastError(
1841 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1842 "SetRecPayloadType() ACM registration failed - 1");
1843 return -1;
1844 }
1845 }
1846 return 0;
1847}
1848
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001849int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001850Channel::GetRecPayloadType(CodecInst& codec)
1851{
1852 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1853 "Channel::GetRecPayloadType()");
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001854 int8_t payloadType(-1);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001855 if (rtp_payload_registry_->ReceivePayloadType(
1856 codec.plname,
1857 codec.plfreq,
1858 codec.channels,
1859 (codec.rate < 0) ? 0 : codec.rate,
1860 &payloadType) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001861 {
1862 _engineStatisticsPtr->SetLastError(
henrika@webrtc.org37198002012-06-18 11:00:12 +00001863 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
niklase@google.com470e71d2011-07-07 08:21:25 +00001864 "GetRecPayloadType() failed to retrieve RX payload type");
1865 return -1;
1866 }
1867 codec.pltype = payloadType;
1868 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1869 "Channel::GetRecPayloadType() => pltype=%u", codec.pltype);
1870 return 0;
1871}
1872
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001873int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001874Channel::SetAMREncFormat(AmrMode mode)
1875{
1876 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1877 "Channel::SetAMREncFormat()");
1878
1879 // ACM doesn't support AMR
1880 return -1;
1881}
1882
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001883int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001884Channel::SetAMRDecFormat(AmrMode mode)
1885{
1886 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1887 "Channel::SetAMRDecFormat()");
1888
1889 // ACM doesn't support AMR
1890 return -1;
1891}
1892
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001893int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001894Channel::SetAMRWbEncFormat(AmrMode mode)
1895{
1896 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1897 "Channel::SetAMRWbEncFormat()");
1898
1899 // ACM doesn't support AMR
1900 return -1;
1901
1902}
1903
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001904int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001905Channel::SetAMRWbDecFormat(AmrMode mode)
1906{
1907 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1908 "Channel::SetAMRWbDecFormat()");
1909
1910 // ACM doesn't support AMR
1911 return -1;
1912}
1913
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001914int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001915Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency)
1916{
1917 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1918 "Channel::SetSendCNPayloadType()");
1919
1920 CodecInst codec;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001921 int32_t samplingFreqHz(-1);
tina.legrand@webrtc.org45175852012-06-01 09:27:35 +00001922 const int kMono = 1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001923 if (frequency == kFreq32000Hz)
1924 samplingFreqHz = 32000;
1925 else if (frequency == kFreq16000Hz)
1926 samplingFreqHz = 16000;
1927
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001928 if (_audioCodingModule.Codec("CN", &codec, samplingFreqHz, kMono) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001929 {
1930 _engineStatisticsPtr->SetLastError(
1931 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1932 "SetSendCNPayloadType() failed to retrieve default CN codec "
1933 "settings");
1934 return -1;
1935 }
1936
1937 // Modify the payload type (must be set to dynamic range)
1938 codec.pltype = type;
1939
1940 if (_audioCodingModule.RegisterSendCodec(codec) != 0)
1941 {
1942 _engineStatisticsPtr->SetLastError(
1943 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1944 "SetSendCNPayloadType() failed to register CN to ACM");
1945 return -1;
1946 }
1947
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001948 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001949 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001950 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1951 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001952 {
1953 _engineStatisticsPtr->SetLastError(
1954 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1955 "SetSendCNPayloadType() failed to register CN to RTP/RTCP "
1956 "module");
1957 return -1;
1958 }
1959 }
1960 return 0;
1961}
1962
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001963int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001964Channel::SetISACInitTargetRate(int rateBps, bool useFixedFrameSize)
1965{
1966 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1967 "Channel::SetISACInitTargetRate()");
1968
1969 CodecInst sendCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001970 if (_audioCodingModule.SendCodec(&sendCodec) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001971 {
1972 _engineStatisticsPtr->SetLastError(
1973 VE_CODEC_ERROR, kTraceError,
1974 "SetISACInitTargetRate() failed to retrieve send codec");
1975 return -1;
1976 }
1977 if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
1978 {
1979 // This API is only valid if iSAC is setup to run in channel-adaptive
1980 // mode.
1981 // We do not validate the adaptive mode here. It is done later in the
1982 // ConfigISACBandwidthEstimator() API.
1983 _engineStatisticsPtr->SetLastError(
1984 VE_CODEC_ERROR, kTraceError,
1985 "SetISACInitTargetRate() send codec is not iSAC");
1986 return -1;
1987 }
1988
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001989 uint8_t initFrameSizeMsec(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00001990 if (16000 == sendCodec.plfreq)
1991 {
1992 // Note that 0 is a valid and corresponds to "use default
1993 if ((rateBps != 0 &&
1994 rateBps < kVoiceEngineMinIsacInitTargetRateBpsWb) ||
1995 (rateBps > kVoiceEngineMaxIsacInitTargetRateBpsWb))
1996 {
1997 _engineStatisticsPtr->SetLastError(
1998 VE_INVALID_ARGUMENT, kTraceError,
1999 "SetISACInitTargetRate() invalid target rate - 1");
2000 return -1;
2001 }
2002 // 30 or 60ms
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002003 initFrameSizeMsec = (uint8_t)(sendCodec.pacsize / 16);
niklase@google.com470e71d2011-07-07 08:21:25 +00002004 }
2005 else if (32000 == sendCodec.plfreq)
2006 {
2007 if ((rateBps != 0 &&
2008 rateBps < kVoiceEngineMinIsacInitTargetRateBpsSwb) ||
2009 (rateBps > kVoiceEngineMaxIsacInitTargetRateBpsSwb))
2010 {
2011 _engineStatisticsPtr->SetLastError(
2012 VE_INVALID_ARGUMENT, kTraceError,
2013 "SetISACInitTargetRate() invalid target rate - 2");
2014 return -1;
2015 }
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002016 initFrameSizeMsec = (uint8_t)(sendCodec.pacsize / 32); // 30ms
niklase@google.com470e71d2011-07-07 08:21:25 +00002017 }
2018
2019 if (_audioCodingModule.ConfigISACBandwidthEstimator(
2020 initFrameSizeMsec, rateBps, useFixedFrameSize) == -1)
2021 {
2022 _engineStatisticsPtr->SetLastError(
2023 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2024 "SetISACInitTargetRate() iSAC BWE config failed");
2025 return -1;
2026 }
2027
2028 return 0;
2029}
2030
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002031int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00002032Channel::SetISACMaxRate(int rateBps)
2033{
2034 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2035 "Channel::SetISACMaxRate()");
2036
2037 CodecInst sendCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00002038 if (_audioCodingModule.SendCodec(&sendCodec) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00002039 {
2040 _engineStatisticsPtr->SetLastError(
2041 VE_CODEC_ERROR, kTraceError,
2042 "SetISACMaxRate() failed to retrieve send codec");
2043 return -1;
2044 }
2045 if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
2046 {
2047 // This API is only valid if iSAC is selected as sending codec.
2048 _engineStatisticsPtr->SetLastError(
2049 VE_CODEC_ERROR, kTraceError,
2050 "SetISACMaxRate() send codec is not iSAC");
2051 return -1;
2052 }
2053 if (16000 == sendCodec.plfreq)
2054 {
2055 if ((rateBps < kVoiceEngineMinIsacMaxRateBpsWb) ||
2056 (rateBps > kVoiceEngineMaxIsacMaxRateBpsWb))
2057 {
2058 _engineStatisticsPtr->SetLastError(
2059 VE_INVALID_ARGUMENT, kTraceError,
2060 "SetISACMaxRate() invalid max rate - 1");
2061 return -1;
2062 }
2063 }
2064 else if (32000 == sendCodec.plfreq)
2065 {
2066 if ((rateBps < kVoiceEngineMinIsacMaxRateBpsSwb) ||
2067 (rateBps > kVoiceEngineMaxIsacMaxRateBpsSwb))
2068 {
2069 _engineStatisticsPtr->SetLastError(
2070 VE_INVALID_ARGUMENT, kTraceError,
2071 "SetISACMaxRate() invalid max rate - 2");
2072 return -1;
2073 }
2074 }
2075 if (_sending)
2076 {
2077 _engineStatisticsPtr->SetLastError(
2078 VE_SENDING, kTraceError,
2079 "SetISACMaxRate() unable to set max rate while sending");
2080 return -1;
2081 }
2082
2083 // Set the maximum instantaneous rate of iSAC (works for both adaptive
2084 // and non-adaptive mode)
2085 if (_audioCodingModule.SetISACMaxRate(rateBps) == -1)
2086 {
2087 _engineStatisticsPtr->SetLastError(
2088 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2089 "SetISACMaxRate() failed to set max rate");
2090 return -1;
2091 }
2092
2093 return 0;
2094}
2095
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002096int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00002097Channel::SetISACMaxPayloadSize(int sizeBytes)
2098{
2099 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2100 "Channel::SetISACMaxPayloadSize()");
2101 CodecInst sendCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00002102 if (_audioCodingModule.SendCodec(&sendCodec) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00002103 {
2104 _engineStatisticsPtr->SetLastError(
2105 VE_CODEC_ERROR, kTraceError,
2106 "SetISACMaxPayloadSize() failed to retrieve send codec");
2107 return -1;
2108 }
2109 if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
2110 {
2111 _engineStatisticsPtr->SetLastError(
2112 VE_CODEC_ERROR, kTraceError,
2113 "SetISACMaxPayloadSize() send codec is not iSAC");
2114 return -1;
2115 }
2116 if (16000 == sendCodec.plfreq)
2117 {
2118 if ((sizeBytes < kVoiceEngineMinIsacMaxPayloadSizeBytesWb) ||
2119 (sizeBytes > kVoiceEngineMaxIsacMaxPayloadSizeBytesWb))
2120 {
2121 _engineStatisticsPtr->SetLastError(
2122 VE_INVALID_ARGUMENT, kTraceError,
2123 "SetISACMaxPayloadSize() invalid max payload - 1");
2124 return -1;
2125 }
2126 }
2127 else if (32000 == sendCodec.plfreq)
2128 {
2129 if ((sizeBytes < kVoiceEngineMinIsacMaxPayloadSizeBytesSwb) ||
2130 (sizeBytes > kVoiceEngineMaxIsacMaxPayloadSizeBytesSwb))
2131 {
2132 _engineStatisticsPtr->SetLastError(
2133 VE_INVALID_ARGUMENT, kTraceError,
2134 "SetISACMaxPayloadSize() invalid max payload - 2");
2135 return -1;
2136 }
2137 }
2138 if (_sending)
2139 {
2140 _engineStatisticsPtr->SetLastError(
2141 VE_SENDING, kTraceError,
2142 "SetISACMaxPayloadSize() unable to set max rate while sending");
2143 return -1;
2144 }
2145
2146 if (_audioCodingModule.SetISACMaxPayloadSize(sizeBytes) == -1)
2147 {
2148 _engineStatisticsPtr->SetLastError(
2149 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2150 "SetISACMaxPayloadSize() failed to set max payload size");
2151 return -1;
2152 }
2153 return 0;
2154}
2155
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002156int32_t Channel::RegisterExternalTransport(Transport& transport)
niklase@google.com470e71d2011-07-07 08:21:25 +00002157{
2158 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2159 "Channel::RegisterExternalTransport()");
2160
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002161 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002162
niklase@google.com470e71d2011-07-07 08:21:25 +00002163 if (_externalTransport)
2164 {
2165 _engineStatisticsPtr->SetLastError(VE_INVALID_OPERATION,
2166 kTraceError,
2167 "RegisterExternalTransport() external transport already enabled");
2168 return -1;
2169 }
2170 _externalTransport = true;
2171 _transportPtr = &transport;
2172 return 0;
2173}
2174
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002175int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00002176Channel::DeRegisterExternalTransport()
2177{
2178 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2179 "Channel::DeRegisterExternalTransport()");
2180
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002181 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.org83661f52011-11-25 10:58:15 +00002182
niklase@google.com470e71d2011-07-07 08:21:25 +00002183 if (!_transportPtr)
2184 {
2185 _engineStatisticsPtr->SetLastError(
2186 VE_INVALID_OPERATION, kTraceWarning,
2187 "DeRegisterExternalTransport() external transport already "
2188 "disabled");
2189 return 0;
2190 }
2191 _externalTransport = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00002192 _transportPtr = NULL;
2193 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2194 "DeRegisterExternalTransport() all transport is disabled");
niklase@google.com470e71d2011-07-07 08:21:25 +00002195 return 0;
2196}
2197
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002198int32_t Channel::ReceivedRTPPacket(const int8_t* data, int32_t length) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002199 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
2200 "Channel::ReceivedRTPPacket()");
2201
2202 // Store playout timestamp for the received RTP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00002203 UpdatePlayoutTimestamp(false);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002204
2205 // Dump the RTP packet to a file (if RTP dump is enabled).
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002206 if (_rtpDumpIn.DumpPacket((const uint8_t*)data,
2207 (uint16_t)length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002208 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
2209 VoEId(_instanceId,_channelId),
2210 "Channel::SendPacket() RTP dump to input file failed");
2211 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00002212 const uint8_t* received_packet = reinterpret_cast<const uint8_t*>(data);
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00002213 RTPHeader header;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00002214 if (!rtp_header_parser_->Parse(received_packet, length, &header)) {
2215 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
2216 "Incoming packet: invalid RTP header");
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00002217 return -1;
2218 }
wu@webrtc.org822fbd82013-08-15 23:38:54 +00002219 header.payload_type_frequency =
2220 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00002221 if (header.payload_type_frequency < 0)
wu@webrtc.org822fbd82013-08-15 23:38:54 +00002222 return -1;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00002223 rtp_receive_statistics_->IncomingPacket(header, length,
2224 IsPacketRetransmitted(header));
2225 rtp_payload_registry_->SetIncomingPayloadType(header);
2226 return ReceivePacket(received_packet, length, header,
2227 IsPacketInOrder(header)) ? 0 : -1;
2228}
2229
2230bool Channel::ReceivePacket(const uint8_t* packet,
2231 int packet_length,
2232 const RTPHeader& header,
2233 bool in_order) {
2234 if (rtp_payload_registry_->IsEncapsulated(header)) {
2235 return HandleEncapsulation(packet, packet_length, header);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00002236 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00002237 const uint8_t* payload = packet + header.headerLength;
2238 int payload_length = packet_length - header.headerLength;
2239 assert(payload_length >= 0);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00002240 PayloadUnion payload_specific;
2241 if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00002242 &payload_specific)) {
2243 return false;
wu@webrtc.org822fbd82013-08-15 23:38:54 +00002244 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00002245 return rtp_receiver_->IncomingRtpPacket(header, payload, payload_length,
2246 payload_specific, in_order);
2247}
2248
2249bool Channel::HandleEncapsulation(const uint8_t* packet,
2250 int packet_length,
2251 const RTPHeader& header) {
2252 if (!rtp_payload_registry_->IsRtx(header))
2253 return false;
2254
2255 // Remove the RTX header and parse the original RTP header.
2256 if (packet_length < header.headerLength)
2257 return false;
2258 if (packet_length > kVoiceEngineMaxIpPacketSizeBytes)
2259 return false;
2260 if (restored_packet_in_use_) {
2261 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
2262 "Multiple RTX headers detected, dropping packet");
2263 return false;
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002264 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00002265 uint8_t* restored_packet_ptr = restored_packet_;
2266 if (!rtp_payload_registry_->RestoreOriginalPacket(
2267 &restored_packet_ptr, packet, &packet_length, rtp_receiver_->SSRC(),
2268 header)) {
2269 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
2270 "Incoming RTX packet: invalid RTP header");
2271 return false;
2272 }
2273 restored_packet_in_use_ = true;
2274 bool ret = OnRecoveredPacket(restored_packet_ptr, packet_length);
2275 restored_packet_in_use_ = false;
2276 return ret;
2277}
2278
2279bool Channel::IsPacketInOrder(const RTPHeader& header) const {
2280 StreamStatistician* statistician =
2281 rtp_receive_statistics_->GetStatistician(header.ssrc);
2282 if (!statistician)
2283 return false;
2284 return statistician->IsPacketInOrder(header.sequenceNumber);
niklase@google.com470e71d2011-07-07 08:21:25 +00002285}
2286
wu@webrtc.org822fbd82013-08-15 23:38:54 +00002287bool Channel::IsPacketRetransmitted(const RTPHeader& header) const {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00002288 // Retransmissions are handled separately if RTX is enabled.
2289 if (rtp_payload_registry_->RtxEnabled())
2290 return false;
2291 StreamStatistician* statistician =
2292 rtp_receive_statistics_->GetStatistician(header.ssrc);
2293 if (!statistician)
2294 return false;
2295 // Check if this is a retransmission.
2296 uint16_t min_rtt = 0;
2297 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL);
2298 return !IsPacketInOrder(header) &&
2299 statistician->IsRetransmitOfOldPacket(header, min_rtt);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00002300}
2301
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002302int32_t Channel::ReceivedRTCPPacket(const int8_t* data, int32_t length) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002303 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
2304 "Channel::ReceivedRTCPPacket()");
2305 // Store playout timestamp for the received RTCP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00002306 UpdatePlayoutTimestamp(true);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002307
2308 // Dump the RTCP packet to a file (if RTP dump is enabled).
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002309 if (_rtpDumpIn.DumpPacket((const uint8_t*)data,
2310 (uint16_t)length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002311 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
2312 VoEId(_instanceId,_channelId),
2313 "Channel::SendPacket() RTCP dump to input file failed");
2314 }
2315
2316 // Deliver RTCP packet to RTP/RTCP module for parsing
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00002317 if (_rtpRtcpModule->IncomingRtcpPacket((const uint8_t*)data,
2318 (uint16_t)length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002319 _engineStatisticsPtr->SetLastError(
2320 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
2321 "Channel::IncomingRTPPacket() RTCP packet is invalid");
2322 }
2323 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002324}
2325
niklase@google.com470e71d2011-07-07 08:21:25 +00002326int Channel::StartPlayingFileLocally(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00002327 bool loop,
2328 FileFormats format,
2329 int startPosition,
2330 float volumeScaling,
2331 int stopPosition,
niklase@google.com470e71d2011-07-07 08:21:25 +00002332 const CodecInst* codecInst)
2333{
2334 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2335 "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d,"
2336 " format=%d, volumeScaling=%5.3f, startPosition=%d, "
2337 "stopPosition=%d)", fileName, loop, format, volumeScaling,
2338 startPosition, stopPosition);
2339
2340 if (_outputFilePlaying)
2341 {
2342 _engineStatisticsPtr->SetLastError(
2343 VE_ALREADY_PLAYING, kTraceError,
2344 "StartPlayingFileLocally() is already playing");
2345 return -1;
2346 }
2347
niklase@google.com470e71d2011-07-07 08:21:25 +00002348 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002349 CriticalSectionScoped cs(&_fileCritSect);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002350
2351 if (_outputFilePlayerPtr)
2352 {
2353 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2354 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2355 _outputFilePlayerPtr = NULL;
2356 }
2357
2358 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2359 _outputFilePlayerId, (const FileFormats)format);
2360
2361 if (_outputFilePlayerPtr == NULL)
2362 {
2363 _engineStatisticsPtr->SetLastError(
2364 VE_INVALID_ARGUMENT, kTraceError,
henrike@webrtc.org31d30702011-11-18 19:59:32 +00002365 "StartPlayingFileLocally() filePlayer format is not correct");
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002366 return -1;
2367 }
2368
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002369 const uint32_t notificationTime(0);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002370
2371 if (_outputFilePlayerPtr->StartPlayingFile(
2372 fileName,
2373 loop,
2374 startPosition,
2375 volumeScaling,
2376 notificationTime,
2377 stopPosition,
2378 (const CodecInst*)codecInst) != 0)
2379 {
2380 _engineStatisticsPtr->SetLastError(
2381 VE_BAD_FILE, kTraceError,
2382 "StartPlayingFile() failed to start file playout");
2383 _outputFilePlayerPtr->StopPlayingFile();
2384 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2385 _outputFilePlayerPtr = NULL;
2386 return -1;
2387 }
2388 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
2389 _outputFilePlaying = true;
niklase@google.com470e71d2011-07-07 08:21:25 +00002390 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00002391
2392 if (RegisterFilePlayingToMixer() != 0)
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002393 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002394
2395 return 0;
2396}
2397
2398int Channel::StartPlayingFileLocally(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00002399 FileFormats format,
2400 int startPosition,
2401 float volumeScaling,
2402 int stopPosition,
niklase@google.com470e71d2011-07-07 08:21:25 +00002403 const CodecInst* codecInst)
2404{
2405 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2406 "Channel::StartPlayingFileLocally(format=%d,"
2407 " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
2408 format, volumeScaling, startPosition, stopPosition);
2409
2410 if(stream == NULL)
2411 {
2412 _engineStatisticsPtr->SetLastError(
2413 VE_BAD_FILE, kTraceError,
2414 "StartPlayingFileLocally() NULL as input stream");
2415 return -1;
2416 }
2417
2418
2419 if (_outputFilePlaying)
2420 {
2421 _engineStatisticsPtr->SetLastError(
2422 VE_ALREADY_PLAYING, kTraceError,
2423 "StartPlayingFileLocally() is already playing");
2424 return -1;
2425 }
2426
niklase@google.com470e71d2011-07-07 08:21:25 +00002427 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002428 CriticalSectionScoped cs(&_fileCritSect);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002429
2430 // Destroy the old instance
2431 if (_outputFilePlayerPtr)
2432 {
2433 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2434 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2435 _outputFilePlayerPtr = NULL;
2436 }
2437
2438 // Create the instance
2439 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2440 _outputFilePlayerId,
2441 (const FileFormats)format);
2442
2443 if (_outputFilePlayerPtr == NULL)
2444 {
2445 _engineStatisticsPtr->SetLastError(
2446 VE_INVALID_ARGUMENT, kTraceError,
2447 "StartPlayingFileLocally() filePlayer format isnot correct");
2448 return -1;
2449 }
2450
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002451 const uint32_t notificationTime(0);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002452
2453 if (_outputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
2454 volumeScaling,
2455 notificationTime,
2456 stopPosition, codecInst) != 0)
2457 {
2458 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2459 "StartPlayingFile() failed to "
2460 "start file playout");
2461 _outputFilePlayerPtr->StopPlayingFile();
2462 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2463 _outputFilePlayerPtr = NULL;
2464 return -1;
2465 }
2466 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
2467 _outputFilePlaying = true;
niklase@google.com470e71d2011-07-07 08:21:25 +00002468 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00002469
2470 if (RegisterFilePlayingToMixer() != 0)
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002471 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002472
niklase@google.com470e71d2011-07-07 08:21:25 +00002473 return 0;
2474}
2475
2476int Channel::StopPlayingFileLocally()
2477{
2478 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2479 "Channel::StopPlayingFileLocally()");
2480
2481 if (!_outputFilePlaying)
2482 {
2483 _engineStatisticsPtr->SetLastError(
2484 VE_INVALID_OPERATION, kTraceWarning,
2485 "StopPlayingFileLocally() isnot playing");
2486 return 0;
2487 }
2488
niklase@google.com470e71d2011-07-07 08:21:25 +00002489 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002490 CriticalSectionScoped cs(&_fileCritSect);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002491
2492 if (_outputFilePlayerPtr->StopPlayingFile() != 0)
2493 {
2494 _engineStatisticsPtr->SetLastError(
2495 VE_STOP_RECORDING_FAILED, kTraceError,
2496 "StopPlayingFile() could not stop playing");
2497 return -1;
2498 }
2499 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2500 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2501 _outputFilePlayerPtr = NULL;
2502 _outputFilePlaying = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00002503 }
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002504 // _fileCritSect cannot be taken while calling
2505 // SetAnonymousMixibilityStatus. Refer to comments in
2506 // StartPlayingFileLocally(const char* ...) for more details.
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002507 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0)
2508 {
2509 _engineStatisticsPtr->SetLastError(
2510 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002511 "StopPlayingFile() failed to stop participant from playing as"
2512 "file in the mixer");
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002513 return -1;
2514 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002515
2516 return 0;
2517}
2518
2519int Channel::IsPlayingFileLocally() const
2520{
2521 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2522 "Channel::IsPlayingFileLocally()");
2523
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002524 return (int32_t)_outputFilePlaying;
niklase@google.com470e71d2011-07-07 08:21:25 +00002525}
2526
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00002527int Channel::RegisterFilePlayingToMixer()
2528{
2529 // Return success for not registering for file playing to mixer if:
2530 // 1. playing file before playout is started on that channel.
2531 // 2. starting playout without file playing on that channel.
2532 if (!_playing || !_outputFilePlaying)
2533 {
2534 return 0;
2535 }
2536
2537 // |_fileCritSect| cannot be taken while calling
2538 // SetAnonymousMixabilityStatus() since as soon as the participant is added
2539 // frames can be pulled by the mixer. Since the frames are generated from
2540 // the file, _fileCritSect will be taken. This would result in a deadlock.
2541 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0)
2542 {
2543 CriticalSectionScoped cs(&_fileCritSect);
2544 _outputFilePlaying = false;
2545 _engineStatisticsPtr->SetLastError(
2546 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
2547 "StartPlayingFile() failed to add participant as file to mixer");
2548 _outputFilePlayerPtr->StopPlayingFile();
2549 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2550 _outputFilePlayerPtr = NULL;
2551 return -1;
2552 }
2553
2554 return 0;
2555}
2556
pbos@webrtc.org92135212013-05-14 08:31:39 +00002557int Channel::ScaleLocalFilePlayout(float scale)
niklase@google.com470e71d2011-07-07 08:21:25 +00002558{
2559 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2560 "Channel::ScaleLocalFilePlayout(scale=%5.3f)", scale);
2561
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002562 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002563
2564 if (!_outputFilePlaying)
2565 {
2566 _engineStatisticsPtr->SetLastError(
2567 VE_INVALID_OPERATION, kTraceError,
2568 "ScaleLocalFilePlayout() isnot playing");
2569 return -1;
2570 }
2571 if ((_outputFilePlayerPtr == NULL) ||
2572 (_outputFilePlayerPtr->SetAudioScaling(scale) != 0))
2573 {
2574 _engineStatisticsPtr->SetLastError(
2575 VE_BAD_ARGUMENT, kTraceError,
2576 "SetAudioScaling() failed to scale the playout");
2577 return -1;
2578 }
2579
2580 return 0;
2581}
2582
2583int Channel::GetLocalPlayoutPosition(int& positionMs)
2584{
2585 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2586 "Channel::GetLocalPlayoutPosition(position=?)");
2587
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002588 uint32_t position;
niklase@google.com470e71d2011-07-07 08:21:25 +00002589
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002590 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002591
2592 if (_outputFilePlayerPtr == NULL)
2593 {
2594 _engineStatisticsPtr->SetLastError(
2595 VE_INVALID_OPERATION, kTraceError,
2596 "GetLocalPlayoutPosition() filePlayer instance doesnot exist");
2597 return -1;
2598 }
2599
2600 if (_outputFilePlayerPtr->GetPlayoutPosition(position) != 0)
2601 {
2602 _engineStatisticsPtr->SetLastError(
2603 VE_BAD_FILE, kTraceError,
2604 "GetLocalPlayoutPosition() failed");
2605 return -1;
2606 }
2607 positionMs = position;
2608
2609 return 0;
2610}
2611
2612int Channel::StartPlayingFileAsMicrophone(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00002613 bool loop,
2614 FileFormats format,
2615 int startPosition,
2616 float volumeScaling,
2617 int stopPosition,
niklase@google.com470e71d2011-07-07 08:21:25 +00002618 const CodecInst* codecInst)
2619{
2620 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2621 "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, "
2622 "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, "
2623 "stopPosition=%d)", fileName, loop, format, volumeScaling,
2624 startPosition, stopPosition);
2625
2626 if (_inputFilePlaying)
2627 {
2628 _engineStatisticsPtr->SetLastError(
2629 VE_ALREADY_PLAYING, kTraceWarning,
2630 "StartPlayingFileAsMicrophone() filePlayer is playing");
2631 return 0;
2632 }
2633
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002634 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002635
2636 // Destroy the old instance
2637 if (_inputFilePlayerPtr)
2638 {
2639 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2640 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2641 _inputFilePlayerPtr = NULL;
2642 }
2643
2644 // Create the instance
2645 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2646 _inputFilePlayerId, (const FileFormats)format);
2647
2648 if (_inputFilePlayerPtr == NULL)
2649 {
2650 _engineStatisticsPtr->SetLastError(
2651 VE_INVALID_ARGUMENT, kTraceError,
2652 "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
2653 return -1;
2654 }
2655
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002656 const uint32_t notificationTime(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00002657
2658 if (_inputFilePlayerPtr->StartPlayingFile(
2659 fileName,
2660 loop,
2661 startPosition,
2662 volumeScaling,
2663 notificationTime,
2664 stopPosition,
2665 (const CodecInst*)codecInst) != 0)
2666 {
2667 _engineStatisticsPtr->SetLastError(
2668 VE_BAD_FILE, kTraceError,
2669 "StartPlayingFile() failed to start file playout");
2670 _inputFilePlayerPtr->StopPlayingFile();
2671 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2672 _inputFilePlayerPtr = NULL;
2673 return -1;
2674 }
2675 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2676 _inputFilePlaying = true;
2677
2678 return 0;
2679}
2680
2681int Channel::StartPlayingFileAsMicrophone(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00002682 FileFormats format,
2683 int startPosition,
2684 float volumeScaling,
2685 int stopPosition,
niklase@google.com470e71d2011-07-07 08:21:25 +00002686 const CodecInst* codecInst)
2687{
2688 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2689 "Channel::StartPlayingFileAsMicrophone(format=%d, "
2690 "volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
2691 format, volumeScaling, startPosition, stopPosition);
2692
2693 if(stream == NULL)
2694 {
2695 _engineStatisticsPtr->SetLastError(
2696 VE_BAD_FILE, kTraceError,
2697 "StartPlayingFileAsMicrophone NULL as input stream");
2698 return -1;
2699 }
2700
2701 if (_inputFilePlaying)
2702 {
2703 _engineStatisticsPtr->SetLastError(
2704 VE_ALREADY_PLAYING, kTraceWarning,
2705 "StartPlayingFileAsMicrophone() is playing");
2706 return 0;
2707 }
2708
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002709 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002710
2711 // Destroy the old instance
2712 if (_inputFilePlayerPtr)
2713 {
2714 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2715 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2716 _inputFilePlayerPtr = NULL;
2717 }
2718
2719 // Create the instance
2720 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2721 _inputFilePlayerId, (const FileFormats)format);
2722
2723 if (_inputFilePlayerPtr == NULL)
2724 {
2725 _engineStatisticsPtr->SetLastError(
2726 VE_INVALID_ARGUMENT, kTraceError,
2727 "StartPlayingInputFile() filePlayer format isnot correct");
2728 return -1;
2729 }
2730
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002731 const uint32_t notificationTime(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00002732
2733 if (_inputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
2734 volumeScaling, notificationTime,
2735 stopPosition, codecInst) != 0)
2736 {
2737 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2738 "StartPlayingFile() failed to start "
2739 "file playout");
2740 _inputFilePlayerPtr->StopPlayingFile();
2741 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2742 _inputFilePlayerPtr = NULL;
2743 return -1;
2744 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00002745
niklase@google.com470e71d2011-07-07 08:21:25 +00002746 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2747 _inputFilePlaying = true;
2748
2749 return 0;
2750}
2751
2752int Channel::StopPlayingFileAsMicrophone()
2753{
2754 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2755 "Channel::StopPlayingFileAsMicrophone()");
2756
2757 if (!_inputFilePlaying)
2758 {
2759 _engineStatisticsPtr->SetLastError(
2760 VE_INVALID_OPERATION, kTraceWarning,
2761 "StopPlayingFileAsMicrophone() isnot playing");
2762 return 0;
2763 }
2764
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002765 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002766 if (_inputFilePlayerPtr->StopPlayingFile() != 0)
2767 {
2768 _engineStatisticsPtr->SetLastError(
2769 VE_STOP_RECORDING_FAILED, kTraceError,
2770 "StopPlayingFile() could not stop playing");
2771 return -1;
2772 }
2773 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2774 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2775 _inputFilePlayerPtr = NULL;
2776 _inputFilePlaying = false;
2777
2778 return 0;
2779}
2780
2781int Channel::IsPlayingFileAsMicrophone() const
2782{
2783 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2784 "Channel::IsPlayingFileAsMicrophone()");
2785
2786 return _inputFilePlaying;
2787}
2788
pbos@webrtc.org92135212013-05-14 08:31:39 +00002789int Channel::ScaleFileAsMicrophonePlayout(float scale)
niklase@google.com470e71d2011-07-07 08:21:25 +00002790{
2791 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2792 "Channel::ScaleFileAsMicrophonePlayout(scale=%5.3f)", scale);
2793
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002794 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002795
2796 if (!_inputFilePlaying)
2797 {
2798 _engineStatisticsPtr->SetLastError(
2799 VE_INVALID_OPERATION, kTraceError,
2800 "ScaleFileAsMicrophonePlayout() isnot playing");
2801 return -1;
2802 }
2803
2804 if ((_inputFilePlayerPtr == NULL) ||
2805 (_inputFilePlayerPtr->SetAudioScaling(scale) != 0))
2806 {
2807 _engineStatisticsPtr->SetLastError(
2808 VE_BAD_ARGUMENT, kTraceError,
2809 "SetAudioScaling() failed to scale playout");
2810 return -1;
2811 }
2812
2813 return 0;
2814}
2815
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00002816int Channel::StartRecordingPlayout(const char* fileName,
niklase@google.com470e71d2011-07-07 08:21:25 +00002817 const CodecInst* codecInst)
2818{
2819 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2820 "Channel::StartRecordingPlayout(fileName=%s)", fileName);
2821
2822 if (_outputFileRecording)
2823 {
2824 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
2825 "StartRecordingPlayout() is already recording");
2826 return 0;
2827 }
2828
2829 FileFormats format;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002830 const uint32_t notificationTime(0); // Not supported in VoE
niklase@google.com470e71d2011-07-07 08:21:25 +00002831 CodecInst dummyCodec={100,"L16",16000,320,1,320000};
2832
niklas.enbom@webrtc.org40197d72012-03-26 08:45:47 +00002833 if ((codecInst != NULL) &&
2834 ((codecInst->channels < 1) || (codecInst->channels > 2)))
niklase@google.com470e71d2011-07-07 08:21:25 +00002835 {
2836 _engineStatisticsPtr->SetLastError(
2837 VE_BAD_ARGUMENT, kTraceError,
2838 "StartRecordingPlayout() invalid compression");
2839 return(-1);
2840 }
2841 if(codecInst == NULL)
2842 {
2843 format = kFileFormatPcm16kHzFile;
2844 codecInst=&dummyCodec;
2845 }
2846 else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
2847 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
2848 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
2849 {
2850 format = kFileFormatWavFile;
2851 }
2852 else
2853 {
2854 format = kFileFormatCompressedFile;
2855 }
2856
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002857 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002858
2859 // Destroy the old instance
2860 if (_outputFileRecorderPtr)
2861 {
2862 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2863 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2864 _outputFileRecorderPtr = NULL;
2865 }
2866
2867 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2868 _outputFileRecorderId, (const FileFormats)format);
2869 if (_outputFileRecorderPtr == NULL)
2870 {
2871 _engineStatisticsPtr->SetLastError(
2872 VE_INVALID_ARGUMENT, kTraceError,
2873 "StartRecordingPlayout() fileRecorder format isnot correct");
2874 return -1;
2875 }
2876
2877 if (_outputFileRecorderPtr->StartRecordingAudioFile(
2878 fileName, (const CodecInst&)*codecInst, notificationTime) != 0)
2879 {
2880 _engineStatisticsPtr->SetLastError(
2881 VE_BAD_FILE, kTraceError,
2882 "StartRecordingAudioFile() failed to start file recording");
2883 _outputFileRecorderPtr->StopRecording();
2884 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2885 _outputFileRecorderPtr = NULL;
2886 return -1;
2887 }
2888 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2889 _outputFileRecording = true;
2890
2891 return 0;
2892}
2893
2894int Channel::StartRecordingPlayout(OutStream* stream,
2895 const CodecInst* codecInst)
2896{
2897 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2898 "Channel::StartRecordingPlayout()");
2899
2900 if (_outputFileRecording)
2901 {
2902 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
2903 "StartRecordingPlayout() is already recording");
2904 return 0;
2905 }
2906
2907 FileFormats format;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002908 const uint32_t notificationTime(0); // Not supported in VoE
niklase@google.com470e71d2011-07-07 08:21:25 +00002909 CodecInst dummyCodec={100,"L16",16000,320,1,320000};
2910
2911 if (codecInst != NULL && codecInst->channels != 1)
2912 {
2913 _engineStatisticsPtr->SetLastError(
2914 VE_BAD_ARGUMENT, kTraceError,
2915 "StartRecordingPlayout() invalid compression");
2916 return(-1);
2917 }
2918 if(codecInst == NULL)
2919 {
2920 format = kFileFormatPcm16kHzFile;
2921 codecInst=&dummyCodec;
2922 }
2923 else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
2924 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
2925 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
2926 {
2927 format = kFileFormatWavFile;
2928 }
2929 else
2930 {
2931 format = kFileFormatCompressedFile;
2932 }
2933
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002934 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002935
2936 // Destroy the old instance
2937 if (_outputFileRecorderPtr)
2938 {
2939 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2940 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2941 _outputFileRecorderPtr = NULL;
2942 }
2943
2944 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2945 _outputFileRecorderId, (const FileFormats)format);
2946 if (_outputFileRecorderPtr == NULL)
2947 {
2948 _engineStatisticsPtr->SetLastError(
2949 VE_INVALID_ARGUMENT, kTraceError,
2950 "StartRecordingPlayout() fileRecorder format isnot correct");
2951 return -1;
2952 }
2953
2954 if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream, *codecInst,
2955 notificationTime) != 0)
2956 {
2957 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2958 "StartRecordingPlayout() failed to "
2959 "start file recording");
2960 _outputFileRecorderPtr->StopRecording();
2961 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2962 _outputFileRecorderPtr = NULL;
2963 return -1;
2964 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00002965
niklase@google.com470e71d2011-07-07 08:21:25 +00002966 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2967 _outputFileRecording = true;
2968
2969 return 0;
2970}
2971
2972int Channel::StopRecordingPlayout()
2973{
2974 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
2975 "Channel::StopRecordingPlayout()");
2976
2977 if (!_outputFileRecording)
2978 {
2979 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
2980 "StopRecordingPlayout() isnot recording");
2981 return -1;
2982 }
2983
2984
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002985 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002986
2987 if (_outputFileRecorderPtr->StopRecording() != 0)
2988 {
2989 _engineStatisticsPtr->SetLastError(
2990 VE_STOP_RECORDING_FAILED, kTraceError,
2991 "StopRecording() could not stop recording");
2992 return(-1);
2993 }
2994 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2995 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2996 _outputFileRecorderPtr = NULL;
2997 _outputFileRecording = false;
2998
2999 return 0;
3000}
3001
3002void
3003Channel::SetMixWithMicStatus(bool mix)
3004{
3005 _mixFileWithMicrophone=mix;
3006}
3007
3008int
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003009Channel::GetSpeechOutputLevel(uint32_t& level) const
niklase@google.com470e71d2011-07-07 08:21:25 +00003010{
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003011 int8_t currentLevel = _outputAudioLevel.Level();
3012 level = static_cast<int32_t> (currentLevel);
niklase@google.com470e71d2011-07-07 08:21:25 +00003013 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3014 VoEId(_instanceId,_channelId),
3015 "GetSpeechOutputLevel() => level=%u", level);
3016 return 0;
3017}
3018
3019int
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003020Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const
niklase@google.com470e71d2011-07-07 08:21:25 +00003021{
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003022 int16_t currentLevel = _outputAudioLevel.LevelFullRange();
3023 level = static_cast<int32_t> (currentLevel);
niklase@google.com470e71d2011-07-07 08:21:25 +00003024 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3025 VoEId(_instanceId,_channelId),
3026 "GetSpeechOutputLevelFullRange() => level=%u", level);
3027 return 0;
3028}
3029
3030int
3031Channel::SetMute(bool enable)
3032{
3033 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3034 "Channel::SetMute(enable=%d)", enable);
3035 _mute = enable;
3036 return 0;
3037}
3038
3039bool
3040Channel::Mute() const
3041{
3042 return _mute;
3043}
3044
3045int
3046Channel::SetOutputVolumePan(float left, float right)
3047{
3048 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3049 "Channel::SetOutputVolumePan()");
3050 _panLeft = left;
3051 _panRight = right;
3052 return 0;
3053}
3054
3055int
3056Channel::GetOutputVolumePan(float& left, float& right) const
3057{
3058 left = _panLeft;
3059 right = _panRight;
3060 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3061 VoEId(_instanceId,_channelId),
3062 "GetOutputVolumePan() => left=%3.2f, right=%3.2f", left, right);
3063 return 0;
3064}
3065
3066int
3067Channel::SetChannelOutputVolumeScaling(float scaling)
3068{
3069 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3070 "Channel::SetChannelOutputVolumeScaling()");
3071 _outputGain = scaling;
3072 return 0;
3073}
3074
3075int
3076Channel::GetChannelOutputVolumeScaling(float& scaling) const
3077{
3078 scaling = _outputGain;
3079 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3080 VoEId(_instanceId,_channelId),
3081 "GetChannelOutputVolumeScaling() => scaling=%3.2f", scaling);
3082 return 0;
3083}
3084
niklase@google.com470e71d2011-07-07 08:21:25 +00003085int
3086Channel::RegisterExternalEncryption(Encryption& encryption)
3087{
3088 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3089 "Channel::RegisterExternalEncryption()");
3090
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003091 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003092
3093 if (_encryptionPtr)
3094 {
3095 _engineStatisticsPtr->SetLastError(
3096 VE_INVALID_OPERATION, kTraceError,
3097 "RegisterExternalEncryption() encryption already enabled");
3098 return -1;
3099 }
3100
3101 _encryptionPtr = &encryption;
3102
3103 _decrypting = true;
3104 _encrypting = true;
3105
3106 return 0;
3107}
3108
3109int
3110Channel::DeRegisterExternalEncryption()
3111{
3112 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3113 "Channel::DeRegisterExternalEncryption()");
3114
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003115 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003116
3117 if (!_encryptionPtr)
3118 {
3119 _engineStatisticsPtr->SetLastError(
3120 VE_INVALID_OPERATION, kTraceWarning,
3121 "DeRegisterExternalEncryption() encryption already disabled");
3122 return 0;
3123 }
3124
3125 _decrypting = false;
3126 _encrypting = false;
3127
3128 _encryptionPtr = NULL;
3129
3130 return 0;
3131}
3132
3133int Channel::SendTelephoneEventOutband(unsigned char eventCode,
wu@webrtc.org822fbd82013-08-15 23:38:54 +00003134 int lengthMs, int attenuationDb,
3135 bool playDtmfEvent)
niklase@google.com470e71d2011-07-07 08:21:25 +00003136{
3137 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3138 "Channel::SendTelephoneEventOutband(..., playDtmfEvent=%d)",
3139 playDtmfEvent);
3140
3141 _playOutbandDtmfEvent = playDtmfEvent;
3142
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003143 if (_rtpRtcpModule->SendTelephoneEventOutband(eventCode, lengthMs,
niklase@google.com470e71d2011-07-07 08:21:25 +00003144 attenuationDb) != 0)
3145 {
3146 _engineStatisticsPtr->SetLastError(
3147 VE_SEND_DTMF_FAILED,
3148 kTraceWarning,
3149 "SendTelephoneEventOutband() failed to send event");
3150 return -1;
3151 }
3152 return 0;
3153}
3154
3155int Channel::SendTelephoneEventInband(unsigned char eventCode,
3156 int lengthMs,
3157 int attenuationDb,
3158 bool playDtmfEvent)
3159{
3160 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3161 "Channel::SendTelephoneEventInband(..., playDtmfEvent=%d)",
3162 playDtmfEvent);
3163
3164 _playInbandDtmfEvent = playDtmfEvent;
3165 _inbandDtmfQueue.AddDtmf(eventCode, lengthMs, attenuationDb);
3166
3167 return 0;
3168}
3169
3170int
3171Channel::SetDtmfPlayoutStatus(bool enable)
3172{
3173 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3174 "Channel::SetDtmfPlayoutStatus()");
3175 if (_audioCodingModule.SetDtmfPlayoutStatus(enable) != 0)
3176 {
3177 _engineStatisticsPtr->SetLastError(
3178 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
3179 "SetDtmfPlayoutStatus() failed to set Dtmf playout");
3180 return -1;
3181 }
3182 return 0;
3183}
3184
3185bool
3186Channel::DtmfPlayoutStatus() const
3187{
3188 return _audioCodingModule.DtmfPlayoutStatus();
3189}
3190
3191int
3192Channel::SetSendTelephoneEventPayloadType(unsigned char type)
3193{
3194 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3195 "Channel::SetSendTelephoneEventPayloadType()");
andrew@webrtc.orgf81f9f82011-08-19 22:56:22 +00003196 if (type > 127)
niklase@google.com470e71d2011-07-07 08:21:25 +00003197 {
3198 _engineStatisticsPtr->SetLastError(
3199 VE_INVALID_ARGUMENT, kTraceError,
3200 "SetSendTelephoneEventPayloadType() invalid type");
3201 return -1;
3202 }
pbos@webrtc.org5b10d8f2013-07-11 15:50:07 +00003203 CodecInst codec = {};
pwestin@webrtc.org1da1ce02011-10-13 15:19:55 +00003204 codec.plfreq = 8000;
3205 codec.pltype = type;
3206 memcpy(codec.plname, "telephone-event", 16);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003207 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003208 {
henrika@webrtc.org4392d5f2013-04-17 07:34:25 +00003209 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
3210 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
3211 _engineStatisticsPtr->SetLastError(
3212 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3213 "SetSendTelephoneEventPayloadType() failed to register send"
3214 "payload type");
3215 return -1;
3216 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003217 }
3218 _sendTelephoneEventPayloadType = type;
3219 return 0;
3220}
3221
3222int
3223Channel::GetSendTelephoneEventPayloadType(unsigned char& type)
3224{
3225 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3226 "Channel::GetSendTelephoneEventPayloadType()");
3227 type = _sendTelephoneEventPayloadType;
3228 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3229 VoEId(_instanceId,_channelId),
3230 "GetSendTelephoneEventPayloadType() => type=%u", type);
3231 return 0;
3232}
3233
niklase@google.com470e71d2011-07-07 08:21:25 +00003234int
3235Channel::UpdateRxVadDetection(AudioFrame& audioFrame)
3236{
3237 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3238 "Channel::UpdateRxVadDetection()");
3239
3240 int vadDecision = 1;
3241
andrew@webrtc.org63a50982012-05-02 23:56:37 +00003242 vadDecision = (audioFrame.vad_activity_ == AudioFrame::kVadActive)? 1 : 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003243
3244 if ((vadDecision != _oldVadDecision) && _rxVadObserverPtr)
3245 {
3246 OnRxVadDetected(vadDecision);
3247 _oldVadDecision = vadDecision;
3248 }
3249
3250 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3251 "Channel::UpdateRxVadDetection() => vadDecision=%d",
3252 vadDecision);
3253 return 0;
3254}
3255
3256int
3257Channel::RegisterRxVadObserver(VoERxVadCallback &observer)
3258{
3259 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3260 "Channel::RegisterRxVadObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003261 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003262
3263 if (_rxVadObserverPtr)
3264 {
3265 _engineStatisticsPtr->SetLastError(
3266 VE_INVALID_OPERATION, kTraceError,
3267 "RegisterRxVadObserver() observer already enabled");
3268 return -1;
3269 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003270 _rxVadObserverPtr = &observer;
3271 _RxVadDetection = true;
3272 return 0;
3273}
3274
3275int
3276Channel::DeRegisterRxVadObserver()
3277{
3278 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3279 "Channel::DeRegisterRxVadObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003280 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003281
3282 if (!_rxVadObserverPtr)
3283 {
3284 _engineStatisticsPtr->SetLastError(
3285 VE_INVALID_OPERATION, kTraceWarning,
3286 "DeRegisterRxVadObserver() observer already disabled");
3287 return 0;
3288 }
3289 _rxVadObserverPtr = NULL;
3290 _RxVadDetection = false;
3291 return 0;
3292}
3293
3294int
3295Channel::VoiceActivityIndicator(int &activity)
3296{
3297 activity = _sendFrameType;
3298
3299 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3300 "Channel::VoiceActivityIndicator(indicator=%d)", activity);
3301 return 0;
3302}
3303
3304#ifdef WEBRTC_VOICE_ENGINE_AGC
3305
3306int
pbos@webrtc.org92135212013-05-14 08:31:39 +00003307Channel::SetRxAgcStatus(bool enable, AgcModes mode)
niklase@google.com470e71d2011-07-07 08:21:25 +00003308{
3309 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3310 "Channel::SetRxAgcStatus(enable=%d, mode=%d)",
3311 (int)enable, (int)mode);
3312
3313 GainControl::Mode agcMode(GainControl::kFixedDigital);
3314 switch (mode)
3315 {
3316 case kAgcDefault:
3317 agcMode = GainControl::kAdaptiveDigital;
3318 break;
3319 case kAgcUnchanged:
3320 agcMode = _rxAudioProcessingModulePtr->gain_control()->mode();
3321 break;
3322 case kAgcFixedDigital:
3323 agcMode = GainControl::kFixedDigital;
3324 break;
3325 case kAgcAdaptiveDigital:
3326 agcMode =GainControl::kAdaptiveDigital;
3327 break;
3328 default:
3329 _engineStatisticsPtr->SetLastError(
3330 VE_INVALID_ARGUMENT, kTraceError,
3331 "SetRxAgcStatus() invalid Agc mode");
3332 return -1;
3333 }
3334
3335 if (_rxAudioProcessingModulePtr->gain_control()->set_mode(agcMode) != 0)
3336 {
3337 _engineStatisticsPtr->SetLastError(
3338 VE_APM_ERROR, kTraceError,
3339 "SetRxAgcStatus() failed to set Agc mode");
3340 return -1;
3341 }
3342 if (_rxAudioProcessingModulePtr->gain_control()->Enable(enable) != 0)
3343 {
3344 _engineStatisticsPtr->SetLastError(
3345 VE_APM_ERROR, kTraceError,
3346 "SetRxAgcStatus() failed to set Agc state");
3347 return -1;
3348 }
3349
3350 _rxAgcIsEnabled = enable;
niklase@google.com470e71d2011-07-07 08:21:25 +00003351 _rxApmIsEnabled = ((_rxAgcIsEnabled == true) || (_rxNsIsEnabled == true));
3352
3353 return 0;
3354}
3355
3356int
3357Channel::GetRxAgcStatus(bool& enabled, AgcModes& mode)
3358{
3359 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3360 "Channel::GetRxAgcStatus(enable=?, mode=?)");
3361
3362 bool enable = _rxAudioProcessingModulePtr->gain_control()->is_enabled();
3363 GainControl::Mode agcMode =
3364 _rxAudioProcessingModulePtr->gain_control()->mode();
3365
3366 enabled = enable;
3367
3368 switch (agcMode)
3369 {
3370 case GainControl::kFixedDigital:
3371 mode = kAgcFixedDigital;
3372 break;
3373 case GainControl::kAdaptiveDigital:
3374 mode = kAgcAdaptiveDigital;
3375 break;
3376 default:
3377 _engineStatisticsPtr->SetLastError(
3378 VE_APM_ERROR, kTraceError,
3379 "GetRxAgcStatus() invalid Agc mode");
3380 return -1;
3381 }
3382
3383 return 0;
3384}
3385
3386int
pbos@webrtc.org92135212013-05-14 08:31:39 +00003387Channel::SetRxAgcConfig(AgcConfig config)
niklase@google.com470e71d2011-07-07 08:21:25 +00003388{
3389 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3390 "Channel::SetRxAgcConfig()");
3391
3392 if (_rxAudioProcessingModulePtr->gain_control()->set_target_level_dbfs(
3393 config.targetLeveldBOv) != 0)
3394 {
3395 _engineStatisticsPtr->SetLastError(
3396 VE_APM_ERROR, kTraceError,
3397 "SetRxAgcConfig() failed to set target peak |level|"
3398 "(or envelope) of the Agc");
3399 return -1;
3400 }
3401 if (_rxAudioProcessingModulePtr->gain_control()->set_compression_gain_db(
3402 config.digitalCompressionGaindB) != 0)
3403 {
3404 _engineStatisticsPtr->SetLastError(
3405 VE_APM_ERROR, kTraceError,
3406 "SetRxAgcConfig() failed to set the range in |gain| the"
3407 " digital compression stage may apply");
3408 return -1;
3409 }
3410 if (_rxAudioProcessingModulePtr->gain_control()->enable_limiter(
3411 config.limiterEnable) != 0)
3412 {
3413 _engineStatisticsPtr->SetLastError(
3414 VE_APM_ERROR, kTraceError,
3415 "SetRxAgcConfig() failed to set hard limiter to the signal");
3416 return -1;
3417 }
3418
3419 return 0;
3420}
3421
3422int
3423Channel::GetRxAgcConfig(AgcConfig& config)
3424{
3425 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3426 "Channel::GetRxAgcConfig(config=%?)");
3427
3428 config.targetLeveldBOv =
3429 _rxAudioProcessingModulePtr->gain_control()->target_level_dbfs();
3430 config.digitalCompressionGaindB =
3431 _rxAudioProcessingModulePtr->gain_control()->compression_gain_db();
3432 config.limiterEnable =
3433 _rxAudioProcessingModulePtr->gain_control()->is_limiter_enabled();
3434
3435 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3436 VoEId(_instanceId,_channelId), "GetRxAgcConfig() => "
3437 "targetLeveldBOv=%u, digitalCompressionGaindB=%u,"
3438 " limiterEnable=%d",
3439 config.targetLeveldBOv,
3440 config.digitalCompressionGaindB,
3441 config.limiterEnable);
3442
3443 return 0;
3444}
3445
3446#endif // #ifdef WEBRTC_VOICE_ENGINE_AGC
3447
3448#ifdef WEBRTC_VOICE_ENGINE_NR
3449
3450int
pbos@webrtc.org92135212013-05-14 08:31:39 +00003451Channel::SetRxNsStatus(bool enable, NsModes mode)
niklase@google.com470e71d2011-07-07 08:21:25 +00003452{
3453 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3454 "Channel::SetRxNsStatus(enable=%d, mode=%d)",
3455 (int)enable, (int)mode);
3456
3457 NoiseSuppression::Level nsLevel(
3458 (NoiseSuppression::Level)WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE);
3459 switch (mode)
3460 {
3461
3462 case kNsDefault:
3463 nsLevel = (NoiseSuppression::Level)
3464 WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE;
3465 break;
3466 case kNsUnchanged:
3467 nsLevel = _rxAudioProcessingModulePtr->noise_suppression()->level();
3468 break;
3469 case kNsConference:
3470 nsLevel = NoiseSuppression::kHigh;
3471 break;
3472 case kNsLowSuppression:
3473 nsLevel = NoiseSuppression::kLow;
3474 break;
3475 case kNsModerateSuppression:
3476 nsLevel = NoiseSuppression::kModerate;
3477 break;
3478 case kNsHighSuppression:
3479 nsLevel = NoiseSuppression::kHigh;
3480 break;
3481 case kNsVeryHighSuppression:
3482 nsLevel = NoiseSuppression::kVeryHigh;
3483 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00003484 }
3485
3486 if (_rxAudioProcessingModulePtr->noise_suppression()->set_level(nsLevel)
3487 != 0)
3488 {
3489 _engineStatisticsPtr->SetLastError(
3490 VE_APM_ERROR, kTraceError,
3491 "SetRxAgcStatus() failed to set Ns level");
3492 return -1;
3493 }
3494 if (_rxAudioProcessingModulePtr->noise_suppression()->Enable(enable) != 0)
3495 {
3496 _engineStatisticsPtr->SetLastError(
3497 VE_APM_ERROR, kTraceError,
3498 "SetRxAgcStatus() failed to set Agc state");
3499 return -1;
3500 }
3501
3502 _rxNsIsEnabled = enable;
3503 _rxApmIsEnabled = ((_rxAgcIsEnabled == true) || (_rxNsIsEnabled == true));
3504
3505 return 0;
3506}
3507
3508int
3509Channel::GetRxNsStatus(bool& enabled, NsModes& mode)
3510{
3511 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3512 "Channel::GetRxNsStatus(enable=?, mode=?)");
3513
3514 bool enable =
3515 _rxAudioProcessingModulePtr->noise_suppression()->is_enabled();
3516 NoiseSuppression::Level ncLevel =
3517 _rxAudioProcessingModulePtr->noise_suppression()->level();
3518
3519 enabled = enable;
3520
3521 switch (ncLevel)
3522 {
3523 case NoiseSuppression::kLow:
3524 mode = kNsLowSuppression;
3525 break;
3526 case NoiseSuppression::kModerate:
3527 mode = kNsModerateSuppression;
3528 break;
3529 case NoiseSuppression::kHigh:
3530 mode = kNsHighSuppression;
3531 break;
3532 case NoiseSuppression::kVeryHigh:
3533 mode = kNsVeryHighSuppression;
3534 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00003535 }
3536
3537 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3538 VoEId(_instanceId,_channelId),
3539 "GetRxNsStatus() => enabled=%d, mode=%d", enabled, mode);
3540 return 0;
3541}
3542
3543#endif // #ifdef WEBRTC_VOICE_ENGINE_NR
3544
3545int
3546Channel::RegisterRTPObserver(VoERTPObserver& observer)
3547{
3548 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3549 "Channel::RegisterRTPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003550 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003551
3552 if (_rtpObserverPtr)
3553 {
3554 _engineStatisticsPtr->SetLastError(
3555 VE_INVALID_OPERATION, kTraceError,
3556 "RegisterRTPObserver() observer already enabled");
3557 return -1;
3558 }
3559
3560 _rtpObserverPtr = &observer;
3561 _rtpObserver = true;
3562
3563 return 0;
3564}
3565
3566int
3567Channel::DeRegisterRTPObserver()
3568{
3569 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3570 "Channel::DeRegisterRTPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003571 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003572
3573 if (!_rtpObserverPtr)
3574 {
3575 _engineStatisticsPtr->SetLastError(
3576 VE_INVALID_OPERATION, kTraceWarning,
3577 "DeRegisterRTPObserver() observer already disabled");
3578 return 0;
3579 }
3580
3581 _rtpObserver = false;
3582 _rtpObserverPtr = NULL;
3583
3584 return 0;
3585}
3586
3587int
3588Channel::RegisterRTCPObserver(VoERTCPObserver& observer)
3589{
3590 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3591 "Channel::RegisterRTCPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003592 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003593
3594 if (_rtcpObserverPtr)
3595 {
3596 _engineStatisticsPtr->SetLastError(
3597 VE_INVALID_OPERATION, kTraceError,
3598 "RegisterRTCPObserver() observer already enabled");
3599 return -1;
3600 }
3601
3602 _rtcpObserverPtr = &observer;
3603 _rtcpObserver = true;
3604
3605 return 0;
3606}
3607
3608int
3609Channel::DeRegisterRTCPObserver()
3610{
3611 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3612 "Channel::DeRegisterRTCPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003613 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003614
3615 if (!_rtcpObserverPtr)
3616 {
3617 _engineStatisticsPtr->SetLastError(
3618 VE_INVALID_OPERATION, kTraceWarning,
3619 "DeRegisterRTCPObserver() observer already disabled");
3620 return 0;
3621 }
3622
3623 _rtcpObserver = false;
3624 _rtcpObserverPtr = NULL;
3625
3626 return 0;
3627}
3628
3629int
3630Channel::SetLocalSSRC(unsigned int ssrc)
3631{
3632 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3633 "Channel::SetLocalSSRC()");
3634 if (_sending)
3635 {
3636 _engineStatisticsPtr->SetLastError(
3637 VE_ALREADY_SENDING, kTraceError,
3638 "SetLocalSSRC() already sending");
3639 return -1;
3640 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003641 if (_rtpRtcpModule->SetSSRC(ssrc) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003642 {
3643 _engineStatisticsPtr->SetLastError(
3644 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3645 "SetLocalSSRC() failed to set SSRC");
3646 return -1;
3647 }
3648 return 0;
3649}
3650
3651int
3652Channel::GetLocalSSRC(unsigned int& ssrc)
3653{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003654 ssrc = _rtpRtcpModule->SSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00003655 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3656 VoEId(_instanceId,_channelId),
3657 "GetLocalSSRC() => ssrc=%lu", ssrc);
3658 return 0;
3659}
3660
3661int
3662Channel::GetRemoteSSRC(unsigned int& ssrc)
3663{
wu@webrtc.org822fbd82013-08-15 23:38:54 +00003664 ssrc = rtp_receiver_->SSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00003665 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3666 VoEId(_instanceId,_channelId),
3667 "GetRemoteSSRC() => ssrc=%lu", ssrc);
3668 return 0;
3669}
3670
3671int
3672Channel::GetRemoteCSRCs(unsigned int arrCSRC[15])
3673{
3674 if (arrCSRC == NULL)
3675 {
3676 _engineStatisticsPtr->SetLastError(
3677 VE_INVALID_ARGUMENT, kTraceError,
3678 "GetRemoteCSRCs() invalid array argument");
3679 return -1;
3680 }
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003681 uint32_t arrOfCSRC[kRtpCsrcSize];
3682 int32_t CSRCs(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003683 CSRCs = _rtpRtcpModule->CSRCs(arrOfCSRC);
niklase@google.com470e71d2011-07-07 08:21:25 +00003684 if (CSRCs > 0)
3685 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003686 memcpy(arrCSRC, arrOfCSRC, CSRCs * sizeof(uint32_t));
niklase@google.com470e71d2011-07-07 08:21:25 +00003687 for (int i = 0; i < (int) CSRCs; i++)
3688 {
3689 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3690 VoEId(_instanceId, _channelId),
3691 "GetRemoteCSRCs() => arrCSRC[%d]=%lu", i, arrCSRC[i]);
3692 }
3693 } else
3694 {
3695 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3696 VoEId(_instanceId, _channelId),
3697 "GetRemoteCSRCs() => list is empty!");
3698 }
3699 return CSRCs;
3700}
3701
3702int
3703Channel::SetRTPAudioLevelIndicationStatus(bool enable, unsigned char ID)
3704{
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00003705 if (_rtpAudioProc.get() == NULL)
3706 {
3707 _rtpAudioProc.reset(AudioProcessing::Create(VoEModuleId(_instanceId,
3708 _channelId)));
3709 if (_rtpAudioProc.get() == NULL)
3710 {
3711 _engineStatisticsPtr->SetLastError(VE_NO_MEMORY, kTraceCritical,
3712 "Failed to create AudioProcessing");
3713 return -1;
3714 }
3715 }
3716
3717 if (_rtpAudioProc->level_estimator()->Enable(enable) !=
3718 AudioProcessing::kNoError)
3719 {
3720 _engineStatisticsPtr->SetLastError(VE_APM_ERROR, kTraceWarning,
3721 "Failed to enable AudioProcessing::level_estimator()");
3722 }
3723
niklase@google.com470e71d2011-07-07 08:21:25 +00003724 _includeAudioLevelIndication = enable;
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00003725 if (enable) {
3726 rtp_header_parser_->RegisterRtpHeaderExtension(kRtpExtensionAudioLevel,
3727 ID);
3728 } else {
3729 rtp_header_parser_->DeregisterRtpHeaderExtension(kRtpExtensionAudioLevel);
3730 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003731 return _rtpRtcpModule->SetRTPAudioLevelIndicationStatus(enable, ID);
niklase@google.com470e71d2011-07-07 08:21:25 +00003732}
3733int
3734Channel::GetRTPAudioLevelIndicationStatus(bool& enabled, unsigned char& ID)
3735{
3736 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3737 VoEId(_instanceId,_channelId),
3738 "GetRTPAudioLevelIndicationStatus() => enabled=%d, ID=%u",
3739 enabled, ID);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003740 return _rtpRtcpModule->GetRTPAudioLevelIndicationStatus(enabled, ID);
niklase@google.com470e71d2011-07-07 08:21:25 +00003741}
3742
3743int
3744Channel::SetRTCPStatus(bool enable)
3745{
3746 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3747 "Channel::SetRTCPStatus()");
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003748 if (_rtpRtcpModule->SetRTCPStatus(enable ?
niklase@google.com470e71d2011-07-07 08:21:25 +00003749 kRtcpCompound : kRtcpOff) != 0)
3750 {
3751 _engineStatisticsPtr->SetLastError(
3752 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3753 "SetRTCPStatus() failed to set RTCP status");
3754 return -1;
3755 }
3756 return 0;
3757}
3758
3759int
3760Channel::GetRTCPStatus(bool& enabled)
3761{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003762 RTCPMethod method = _rtpRtcpModule->RTCP();
niklase@google.com470e71d2011-07-07 08:21:25 +00003763 enabled = (method != kRtcpOff);
3764 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3765 VoEId(_instanceId,_channelId),
3766 "GetRTCPStatus() => enabled=%d", enabled);
3767 return 0;
3768}
3769
3770int
3771Channel::SetRTCP_CNAME(const char cName[256])
3772{
3773 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3774 "Channel::SetRTCP_CNAME()");
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003775 if (_rtpRtcpModule->SetCNAME(cName) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003776 {
3777 _engineStatisticsPtr->SetLastError(
3778 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3779 "SetRTCP_CNAME() failed to set RTCP CNAME");
3780 return -1;
3781 }
3782 return 0;
3783}
3784
3785int
3786Channel::GetRTCP_CNAME(char cName[256])
3787{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003788 if (_rtpRtcpModule->CNAME(cName) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003789 {
3790 _engineStatisticsPtr->SetLastError(
3791 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3792 "GetRTCP_CNAME() failed to retrieve RTCP CNAME");
3793 return -1;
3794 }
3795 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3796 VoEId(_instanceId, _channelId),
3797 "GetRTCP_CNAME() => cName=%s", cName);
3798 return 0;
3799}
3800
3801int
3802Channel::GetRemoteRTCP_CNAME(char cName[256])
3803{
3804 if (cName == NULL)
3805 {
3806 _engineStatisticsPtr->SetLastError(
3807 VE_INVALID_ARGUMENT, kTraceError,
3808 "GetRemoteRTCP_CNAME() invalid CNAME input buffer");
3809 return -1;
3810 }
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00003811 char cname[RTCP_CNAME_SIZE];
wu@webrtc.org822fbd82013-08-15 23:38:54 +00003812 const uint32_t remoteSSRC = rtp_receiver_->SSRC();
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003813 if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003814 {
3815 _engineStatisticsPtr->SetLastError(
3816 VE_CANNOT_RETRIEVE_CNAME, kTraceError,
3817 "GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME");
3818 return -1;
3819 }
3820 strcpy(cName, cname);
3821 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3822 VoEId(_instanceId, _channelId),
3823 "GetRemoteRTCP_CNAME() => cName=%s", cName);
3824 return 0;
3825}
3826
3827int
3828Channel::GetRemoteRTCPData(
3829 unsigned int& NTPHigh,
3830 unsigned int& NTPLow,
3831 unsigned int& timestamp,
3832 unsigned int& playoutTimestamp,
3833 unsigned int* jitter,
3834 unsigned short* fractionLost)
3835{
3836 // --- Information from sender info in received Sender Reports
3837
3838 RTCPSenderInfo senderInfo;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003839 if (_rtpRtcpModule->RemoteRTCPStat(&senderInfo) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003840 {
3841 _engineStatisticsPtr->SetLastError(
3842 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00003843 "GetRemoteRTCPData() failed to retrieve sender info for remote "
niklase@google.com470e71d2011-07-07 08:21:25 +00003844 "side");
3845 return -1;
3846 }
3847
3848 // We only utilize 12 out of 20 bytes in the sender info (ignores packet
3849 // and octet count)
3850 NTPHigh = senderInfo.NTPseconds;
3851 NTPLow = senderInfo.NTPfraction;
3852 timestamp = senderInfo.RTPtimeStamp;
3853
3854 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3855 VoEId(_instanceId, _channelId),
3856 "GetRemoteRTCPData() => NTPHigh=%lu, NTPLow=%lu, "
3857 "timestamp=%lu",
3858 NTPHigh, NTPLow, timestamp);
3859
3860 // --- Locally derived information
3861
3862 // This value is updated on each incoming RTCP packet (0 when no packet
3863 // has been received)
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003864 playoutTimestamp = playout_timestamp_rtcp_;
niklase@google.com470e71d2011-07-07 08:21:25 +00003865
3866 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3867 VoEId(_instanceId, _channelId),
3868 "GetRemoteRTCPData() => playoutTimestamp=%lu",
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003869 playout_timestamp_rtcp_);
niklase@google.com470e71d2011-07-07 08:21:25 +00003870
3871 if (NULL != jitter || NULL != fractionLost)
3872 {
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003873 // Get all RTCP receiver report blocks that have been received on this
3874 // channel. If we receive RTP packets from a remote source we know the
3875 // remote SSRC and use the report block from him.
3876 // Otherwise use the first report block.
3877 std::vector<RTCPReportBlock> remote_stats;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003878 if (_rtpRtcpModule->RemoteRTCPStat(&remote_stats) != 0 ||
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003879 remote_stats.empty()) {
3880 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3881 VoEId(_instanceId, _channelId),
3882 "GetRemoteRTCPData() failed to measure statistics due"
3883 " to lack of received RTP and/or RTCP packets");
3884 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003885 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003886
wu@webrtc.org822fbd82013-08-15 23:38:54 +00003887 uint32_t remoteSSRC = rtp_receiver_->SSRC();
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003888 std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
3889 for (; it != remote_stats.end(); ++it) {
3890 if (it->remoteSSRC == remoteSSRC)
3891 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00003892 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003893
3894 if (it == remote_stats.end()) {
3895 // If we have not received any RTCP packets from this SSRC it probably
3896 // means that we have not received any RTP packets.
3897 // Use the first received report block instead.
3898 it = remote_stats.begin();
3899 remoteSSRC = it->remoteSSRC;
niklase@google.com470e71d2011-07-07 08:21:25 +00003900 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003901
xians@webrtc.org79af7342012-01-31 12:22:14 +00003902 if (jitter) {
3903 *jitter = it->jitter;
3904 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3905 VoEId(_instanceId, _channelId),
3906 "GetRemoteRTCPData() => jitter = %lu", *jitter);
3907 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003908
xians@webrtc.org79af7342012-01-31 12:22:14 +00003909 if (fractionLost) {
3910 *fractionLost = it->fractionLost;
3911 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3912 VoEId(_instanceId, _channelId),
3913 "GetRemoteRTCPData() => fractionLost = %lu",
3914 *fractionLost);
3915 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003916 }
3917 return 0;
3918}
3919
3920int
pbos@webrtc.org92135212013-05-14 08:31:39 +00003921Channel::SendApplicationDefinedRTCPPacket(unsigned char subType,
niklase@google.com470e71d2011-07-07 08:21:25 +00003922 unsigned int name,
3923 const char* data,
3924 unsigned short dataLengthInBytes)
3925{
3926 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3927 "Channel::SendApplicationDefinedRTCPPacket()");
3928 if (!_sending)
3929 {
3930 _engineStatisticsPtr->SetLastError(
3931 VE_NOT_SENDING, kTraceError,
3932 "SendApplicationDefinedRTCPPacket() not sending");
3933 return -1;
3934 }
3935 if (NULL == data)
3936 {
3937 _engineStatisticsPtr->SetLastError(
3938 VE_INVALID_ARGUMENT, kTraceError,
3939 "SendApplicationDefinedRTCPPacket() invalid data value");
3940 return -1;
3941 }
3942 if (dataLengthInBytes % 4 != 0)
3943 {
3944 _engineStatisticsPtr->SetLastError(
3945 VE_INVALID_ARGUMENT, kTraceError,
3946 "SendApplicationDefinedRTCPPacket() invalid length value");
3947 return -1;
3948 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003949 RTCPMethod status = _rtpRtcpModule->RTCP();
niklase@google.com470e71d2011-07-07 08:21:25 +00003950 if (status == kRtcpOff)
3951 {
3952 _engineStatisticsPtr->SetLastError(
3953 VE_RTCP_ERROR, kTraceError,
3954 "SendApplicationDefinedRTCPPacket() RTCP is disabled");
3955 return -1;
3956 }
3957
3958 // Create and schedule the RTCP APP packet for transmission
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003959 if (_rtpRtcpModule->SetRTCPApplicationSpecificData(
niklase@google.com470e71d2011-07-07 08:21:25 +00003960 subType,
3961 name,
3962 (const unsigned char*) data,
3963 dataLengthInBytes) != 0)
3964 {
3965 _engineStatisticsPtr->SetLastError(
3966 VE_SEND_ERROR, kTraceError,
3967 "SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
3968 return -1;
3969 }
3970 return 0;
3971}
3972
3973int
3974Channel::GetRTPStatistics(
3975 unsigned int& averageJitterMs,
3976 unsigned int& maxJitterMs,
3977 unsigned int& discardedPackets)
3978{
niklase@google.com470e71d2011-07-07 08:21:25 +00003979 // The jitter statistics is updated for each received RTP packet and is
3980 // based on received packets.
stefan@webrtc.org286fe0b2013-08-21 20:58:21 +00003981 StreamStatistician::Statistics statistics;
3982 StreamStatistician* statistician =
3983 rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
3984 if (!statistician || !statistician->GetStatistics(
wu@webrtc.org822fbd82013-08-15 23:38:54 +00003985 &statistics, _rtpRtcpModule->RTCP() == kRtcpOff)) {
3986 _engineStatisticsPtr->SetLastError(
3987 VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
3988 "GetRTPStatistics() failed to read RTP statistics from the "
3989 "RTP/RTCP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00003990 }
3991
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003992 const int32_t playoutFrequency =
niklase@google.com470e71d2011-07-07 08:21:25 +00003993 _audioCodingModule.PlayoutFrequency();
3994 if (playoutFrequency > 0)
3995 {
3996 // Scale RTP statistics given the current playout frequency
wu@webrtc.org822fbd82013-08-15 23:38:54 +00003997 maxJitterMs = statistics.max_jitter / (playoutFrequency / 1000);
3998 averageJitterMs = statistics.jitter / (playoutFrequency / 1000);
niklase@google.com470e71d2011-07-07 08:21:25 +00003999 }
4000
4001 discardedPackets = _numberOfDiscardedPackets;
4002
4003 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4004 VoEId(_instanceId, _channelId),
4005 "GetRTPStatistics() => averageJitterMs = %lu, maxJitterMs = %lu,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004006 " discardedPackets = %lu)",
niklase@google.com470e71d2011-07-07 08:21:25 +00004007 averageJitterMs, maxJitterMs, discardedPackets);
4008 return 0;
4009}
4010
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00004011int Channel::GetRemoteRTCPSenderInfo(SenderInfo* sender_info) {
4012 if (sender_info == NULL) {
4013 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
4014 "GetRemoteRTCPSenderInfo() invalid sender_info.");
4015 return -1;
4016 }
4017
4018 // Get the sender info from the latest received RTCP Sender Report.
4019 RTCPSenderInfo rtcp_sender_info;
4020 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_sender_info) != 0) {
4021 _engineStatisticsPtr->SetLastError(VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4022 "GetRemoteRTCPSenderInfo() failed to read RTCP SR sender info.");
4023 return -1;
4024 }
4025
4026 sender_info->NTP_timestamp_high = rtcp_sender_info.NTPseconds;
4027 sender_info->NTP_timestamp_low = rtcp_sender_info.NTPfraction;
4028 sender_info->RTP_timestamp = rtcp_sender_info.RTPtimeStamp;
4029 sender_info->sender_packet_count = rtcp_sender_info.sendPacketCount;
4030 sender_info->sender_octet_count = rtcp_sender_info.sendOctetCount;
4031 return 0;
4032}
4033
4034int Channel::GetRemoteRTCPReportBlocks(
4035 std::vector<ReportBlock>* report_blocks) {
4036 if (report_blocks == NULL) {
4037 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
4038 "GetRemoteRTCPReportBlock()s invalid report_blocks.");
4039 return -1;
4040 }
4041
4042 // Get the report blocks from the latest received RTCP Sender or Receiver
4043 // Report. Each element in the vector contains the sender's SSRC and a
4044 // report block according to RFC 3550.
4045 std::vector<RTCPReportBlock> rtcp_report_blocks;
4046 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks) != 0) {
4047 _engineStatisticsPtr->SetLastError(VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4048 "GetRemoteRTCPReportBlocks() failed to read RTCP SR/RR report block.");
4049 return -1;
4050 }
4051
4052 if (rtcp_report_blocks.empty())
4053 return 0;
4054
4055 std::vector<RTCPReportBlock>::const_iterator it = rtcp_report_blocks.begin();
4056 for (; it != rtcp_report_blocks.end(); ++it) {
4057 ReportBlock report_block;
4058 report_block.sender_SSRC = it->remoteSSRC;
4059 report_block.source_SSRC = it->sourceSSRC;
4060 report_block.fraction_lost = it->fractionLost;
4061 report_block.cumulative_num_packets_lost = it->cumulativeLost;
4062 report_block.extended_highest_sequence_number = it->extendedHighSeqNum;
4063 report_block.interarrival_jitter = it->jitter;
4064 report_block.last_SR_timestamp = it->lastSR;
4065 report_block.delay_since_last_SR = it->delaySinceLastSR;
4066 report_blocks->push_back(report_block);
4067 }
4068 return 0;
4069}
4070
niklase@google.com470e71d2011-07-07 08:21:25 +00004071int
4072Channel::GetRTPStatistics(CallStatistics& stats)
4073{
niklase@google.com470e71d2011-07-07 08:21:25 +00004074 // --- Part one of the final structure (four values)
4075
4076 // The jitter statistics is updated for each received RTP packet and is
4077 // based on received packets.
stefan@webrtc.org286fe0b2013-08-21 20:58:21 +00004078 StreamStatistician::Statistics statistics;
4079 StreamStatistician* statistician =
4080 rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
4081 if (!statistician || !statistician->GetStatistics(
wu@webrtc.org822fbd82013-08-15 23:38:54 +00004082 &statistics, _rtpRtcpModule->RTCP() == kRtcpOff)) {
4083 _engineStatisticsPtr->SetLastError(
4084 VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
4085 "GetRTPStatistics() failed to read RTP statistics from the "
4086 "RTP/RTCP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00004087 }
4088
wu@webrtc.org822fbd82013-08-15 23:38:54 +00004089 stats.fractionLost = statistics.fraction_lost;
4090 stats.cumulativeLost = statistics.cumulative_lost;
4091 stats.extendedMax = statistics.extended_max_sequence_number;
4092 stats.jitterSamples = statistics.jitter;
niklase@google.com470e71d2011-07-07 08:21:25 +00004093
4094 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4095 VoEId(_instanceId, _channelId),
4096 "GetRTPStatistics() => fractionLost=%lu, cumulativeLost=%lu,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004097 " extendedMax=%lu, jitterSamples=%li)",
niklase@google.com470e71d2011-07-07 08:21:25 +00004098 stats.fractionLost, stats.cumulativeLost, stats.extendedMax,
4099 stats.jitterSamples);
4100
4101 // --- Part two of the final structure (one value)
4102
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004103 uint16_t RTT(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004104 RTCPMethod method = _rtpRtcpModule->RTCP();
niklase@google.com470e71d2011-07-07 08:21:25 +00004105 if (method == kRtcpOff)
4106 {
4107 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4108 VoEId(_instanceId, _channelId),
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004109 "GetRTPStatistics() RTCP is disabled => valid RTT "
niklase@google.com470e71d2011-07-07 08:21:25 +00004110 "measurements cannot be retrieved");
4111 } else
4112 {
4113 // The remote SSRC will be zero if no RTP packet has been received.
wu@webrtc.org822fbd82013-08-15 23:38:54 +00004114 uint32_t remoteSSRC = rtp_receiver_->SSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00004115 if (remoteSSRC > 0)
4116 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004117 uint16_t avgRTT(0);
4118 uint16_t maxRTT(0);
4119 uint16_t minRTT(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004120
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004121 if (_rtpRtcpModule->RTT(remoteSSRC, &RTT, &avgRTT, &minRTT, &maxRTT)
niklase@google.com470e71d2011-07-07 08:21:25 +00004122 != 0)
4123 {
4124 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4125 VoEId(_instanceId, _channelId),
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004126 "GetRTPStatistics() failed to retrieve RTT from "
niklase@google.com470e71d2011-07-07 08:21:25 +00004127 "the RTP/RTCP module");
4128 }
4129 } else
4130 {
4131 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4132 VoEId(_instanceId, _channelId),
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004133 "GetRTPStatistics() failed to measure RTT since no "
niklase@google.com470e71d2011-07-07 08:21:25 +00004134 "RTP packets have been received yet");
4135 }
4136 }
4137
4138 stats.rttMs = static_cast<int> (RTT);
4139
4140 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4141 VoEId(_instanceId, _channelId),
4142 "GetRTPStatistics() => rttMs=%d", stats.rttMs);
4143
4144 // --- Part three of the final structure (four values)
4145
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004146 uint32_t bytesSent(0);
4147 uint32_t packetsSent(0);
4148 uint32_t bytesReceived(0);
4149 uint32_t packetsReceived(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004150
stefan@webrtc.org286fe0b2013-08-21 20:58:21 +00004151 if (statistician) {
4152 statistician->GetDataCounters(&bytesReceived, &packetsReceived);
4153 }
wu@webrtc.org822fbd82013-08-15 23:38:54 +00004154
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004155 if (_rtpRtcpModule->DataCountersRTP(&bytesSent,
wu@webrtc.org822fbd82013-08-15 23:38:54 +00004156 &packetsSent) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004157 {
4158 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4159 VoEId(_instanceId, _channelId),
4160 "GetRTPStatistics() failed to retrieve RTP datacounters =>"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004161 " output will not be complete");
niklase@google.com470e71d2011-07-07 08:21:25 +00004162 }
4163
4164 stats.bytesSent = bytesSent;
4165 stats.packetsSent = packetsSent;
4166 stats.bytesReceived = bytesReceived;
4167 stats.packetsReceived = packetsReceived;
4168
4169 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4170 VoEId(_instanceId, _channelId),
4171 "GetRTPStatistics() => bytesSent=%d, packetsSent=%d,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004172 " bytesReceived=%d, packetsReceived=%d)",
niklase@google.com470e71d2011-07-07 08:21:25 +00004173 stats.bytesSent, stats.packetsSent, stats.bytesReceived,
4174 stats.packetsReceived);
4175
4176 return 0;
4177}
4178
turaj@webrtc.org42259e72012-12-11 02:15:12 +00004179int Channel::SetFECStatus(bool enable, int redPayloadtype) {
4180 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4181 "Channel::SetFECStatus()");
niklase@google.com470e71d2011-07-07 08:21:25 +00004182
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00004183 if (enable) {
4184 if (redPayloadtype < 0 || redPayloadtype > 127) {
4185 _engineStatisticsPtr->SetLastError(
4186 VE_PLTYPE_ERROR, kTraceError,
4187 "SetFECStatus() invalid RED payload type");
4188 return -1;
4189 }
4190
4191 if (SetRedPayloadType(redPayloadtype) < 0) {
4192 _engineStatisticsPtr->SetLastError(
4193 VE_CODEC_ERROR, kTraceError,
4194 "SetSecondarySendCodec() Failed to register RED ACM");
4195 return -1;
4196 }
turaj@webrtc.org42259e72012-12-11 02:15:12 +00004197 }
niklase@google.com470e71d2011-07-07 08:21:25 +00004198
turaj@webrtc.org42259e72012-12-11 02:15:12 +00004199 if (_audioCodingModule.SetFECStatus(enable) != 0) {
4200 _engineStatisticsPtr->SetLastError(
4201 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4202 "SetFECStatus() failed to set FEC state in the ACM");
4203 return -1;
4204 }
4205 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00004206}
4207
4208int
4209Channel::GetFECStatus(bool& enabled, int& redPayloadtype)
4210{
4211 enabled = _audioCodingModule.FECStatus();
4212 if (enabled)
4213 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004214 int8_t payloadType(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004215 if (_rtpRtcpModule->SendREDPayloadType(payloadType) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004216 {
4217 _engineStatisticsPtr->SetLastError(
4218 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4219 "GetFECStatus() failed to retrieve RED PT from RTP/RTCP "
4220 "module");
4221 return -1;
4222 }
4223 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4224 VoEId(_instanceId, _channelId),
4225 "GetFECStatus() => enabled=%d, redPayloadtype=%d",
4226 enabled, redPayloadtype);
4227 return 0;
4228 }
4229 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4230 VoEId(_instanceId, _channelId),
4231 "GetFECStatus() => enabled=%d", enabled);
4232 return 0;
4233}
4234
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00004235void Channel::SetNACKStatus(bool enable, int maxNumberOfPackets) {
4236 // None of these functions can fail.
4237 _rtpRtcpModule->SetStorePacketsStatus(enable, maxNumberOfPackets);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00004238 rtp_receive_statistics_->SetMaxReorderingThreshold(maxNumberOfPackets);
4239 rtp_receiver_->SetNACKStatus(enable ? kNackRtcp : kNackOff);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00004240 if (enable)
4241 _audioCodingModule.EnableNack(maxNumberOfPackets);
4242 else
4243 _audioCodingModule.DisableNack();
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00004244}
4245
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00004246// Called when we are missing one or more packets.
4247int Channel::ResendPackets(const uint16_t* sequence_numbers, int length) {
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00004248 return _rtpRtcpModule->SendNACK(sequence_numbers, length);
4249}
4250
niklase@google.com470e71d2011-07-07 08:21:25 +00004251int
niklase@google.com470e71d2011-07-07 08:21:25 +00004252Channel::StartRTPDump(const char fileNameUTF8[1024],
4253 RTPDirections direction)
4254{
4255 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4256 "Channel::StartRTPDump()");
4257 if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
4258 {
4259 _engineStatisticsPtr->SetLastError(
4260 VE_INVALID_ARGUMENT, kTraceError,
4261 "StartRTPDump() invalid RTP direction");
4262 return -1;
4263 }
4264 RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4265 &_rtpDumpIn : &_rtpDumpOut;
4266 if (rtpDumpPtr == NULL)
4267 {
4268 assert(false);
4269 return -1;
4270 }
4271 if (rtpDumpPtr->IsActive())
4272 {
4273 rtpDumpPtr->Stop();
4274 }
4275 if (rtpDumpPtr->Start(fileNameUTF8) != 0)
4276 {
4277 _engineStatisticsPtr->SetLastError(
4278 VE_BAD_FILE, kTraceError,
4279 "StartRTPDump() failed to create file");
4280 return -1;
4281 }
4282 return 0;
4283}
4284
4285int
4286Channel::StopRTPDump(RTPDirections direction)
4287{
4288 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4289 "Channel::StopRTPDump()");
4290 if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
4291 {
4292 _engineStatisticsPtr->SetLastError(
4293 VE_INVALID_ARGUMENT, kTraceError,
4294 "StopRTPDump() invalid RTP direction");
4295 return -1;
4296 }
4297 RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4298 &_rtpDumpIn : &_rtpDumpOut;
4299 if (rtpDumpPtr == NULL)
4300 {
4301 assert(false);
4302 return -1;
4303 }
4304 if (!rtpDumpPtr->IsActive())
4305 {
4306 return 0;
4307 }
4308 return rtpDumpPtr->Stop();
4309}
4310
4311bool
4312Channel::RTPDumpIsActive(RTPDirections direction)
4313{
4314 if ((direction != kRtpIncoming) &&
4315 (direction != kRtpOutgoing))
4316 {
4317 _engineStatisticsPtr->SetLastError(
4318 VE_INVALID_ARGUMENT, kTraceError,
4319 "RTPDumpIsActive() invalid RTP direction");
4320 return false;
4321 }
4322 RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4323 &_rtpDumpIn : &_rtpDumpOut;
4324 return rtpDumpPtr->IsActive();
4325}
4326
4327int
4328Channel::InsertExtraRTPPacket(unsigned char payloadType,
4329 bool markerBit,
4330 const char* payloadData,
4331 unsigned short payloadSize)
4332{
4333 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4334 "Channel::InsertExtraRTPPacket()");
4335 if (payloadType > 127)
4336 {
4337 _engineStatisticsPtr->SetLastError(
4338 VE_INVALID_PLTYPE, kTraceError,
4339 "InsertExtraRTPPacket() invalid payload type");
4340 return -1;
4341 }
4342 if (payloadData == NULL)
4343 {
4344 _engineStatisticsPtr->SetLastError(
4345 VE_INVALID_ARGUMENT, kTraceError,
4346 "InsertExtraRTPPacket() invalid payload data");
4347 return -1;
4348 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004349 if (payloadSize > _rtpRtcpModule->MaxDataPayloadLength())
niklase@google.com470e71d2011-07-07 08:21:25 +00004350 {
4351 _engineStatisticsPtr->SetLastError(
4352 VE_INVALID_ARGUMENT, kTraceError,
4353 "InsertExtraRTPPacket() invalid payload size");
4354 return -1;
4355 }
4356 if (!_sending)
4357 {
4358 _engineStatisticsPtr->SetLastError(
4359 VE_NOT_SENDING, kTraceError,
4360 "InsertExtraRTPPacket() not sending");
4361 return -1;
4362 }
4363
4364 // Create extra RTP packet by calling RtpRtcp::SendOutgoingData().
4365 // Transport::SendPacket() will be called by the module when the RTP packet
4366 // is created.
4367 // The call to SendOutgoingData() does *not* modify the timestamp and
4368 // payloadtype to ensure that the RTP module generates a valid RTP packet
4369 // (user might utilize a non-registered payload type).
4370 // The marker bit and payload type will be replaced just before the actual
4371 // transmission, i.e., the actual modification is done *after* the RTP
4372 // module has delivered its RTP packet back to the VoE.
4373 // We will use the stored values above when the packet is modified
4374 // (see Channel::SendPacket()).
4375
4376 _extraPayloadType = payloadType;
4377 _extraMarkerBit = markerBit;
4378 _insertExtraRTPPacket = true;
4379
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004380 if (_rtpRtcpModule->SendOutgoingData(kAudioFrameSpeech,
niklase@google.com470e71d2011-07-07 08:21:25 +00004381 _lastPayloadType,
4382 _lastLocalTimeStamp,
stefan@webrtc.orgddfdfed2012-07-03 13:21:22 +00004383 // Leaving the time when this frame was
4384 // received from the capture device as
4385 // undefined for voice for now.
4386 -1,
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004387 (const uint8_t*) payloadData,
niklase@google.com470e71d2011-07-07 08:21:25 +00004388 payloadSize) != 0)
4389 {
4390 _engineStatisticsPtr->SetLastError(
4391 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4392 "InsertExtraRTPPacket() failed to send extra RTP packet");
4393 return -1;
4394 }
4395
4396 return 0;
4397}
4398
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004399uint32_t
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004400Channel::Demultiplex(const AudioFrame& audioFrame)
niklase@google.com470e71d2011-07-07 08:21:25 +00004401{
4402 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004403 "Channel::Demultiplex()");
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00004404 _audioFrame.CopyFrom(audioFrame);
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004405 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00004406 return 0;
4407}
4408
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00004409// TODO(xians): This method borrows quite some code from
4410// TransmitMixer::GenerateAudioFrame(), refactor these two methods and reduce
4411// code duplication.
4412void Channel::Demultiplex(const int16_t* audio_data,
xians@webrtc.org8fff1f02013-07-31 16:27:42 +00004413 int sample_rate,
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00004414 int number_of_frames,
xians@webrtc.org8fff1f02013-07-31 16:27:42 +00004415 int number_of_channels) {
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00004416 // The highest sample rate that WebRTC supports for mono audio is 96kHz.
4417 static const int kMaxNumberOfFrames = 960;
4418 assert(number_of_frames <= kMaxNumberOfFrames);
4419
4420 // Get the send codec information for doing resampling or downmixing later on.
4421 CodecInst codec;
4422 GetSendCodec(codec);
4423 assert(codec.channels == 1 || codec.channels == 2);
4424 int support_sample_rate = std::min(32000,
4425 std::min(sample_rate, codec.plfreq));
4426
4427 // Downmix the data to mono if needed.
4428 const int16_t* audio_ptr = audio_data;
4429 if (number_of_channels == 2 && codec.channels == 1) {
4430 if (!mono_recording_audio_.get())
4431 mono_recording_audio_.reset(new int16_t[kMaxNumberOfFrames]);
4432
4433 AudioFrameOperations::StereoToMono(audio_data, number_of_frames,
4434 mono_recording_audio_.get());
4435 audio_ptr = mono_recording_audio_.get();
4436 }
4437
4438 // Resample the data to the sample rate that the codec is using.
4439 if (input_resampler_.InitializeIfNeeded(sample_rate,
4440 support_sample_rate,
4441 codec.channels)) {
4442 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
4443 "Channel::Demultiplex() unable to resample");
4444 return;
4445 }
4446
4447 int out_length = input_resampler_.Resample(audio_ptr,
4448 number_of_frames * codec.channels,
4449 _audioFrame.data_,
4450 AudioFrame::kMaxDataSizeSamples);
4451 if (out_length == -1) {
4452 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
4453 "Channel::Demultiplex() resampling failed");
4454 return;
4455 }
4456
4457 _audioFrame.samples_per_channel_ = out_length / codec.channels;
4458 _audioFrame.timestamp_ = -1;
4459 _audioFrame.sample_rate_hz_ = support_sample_rate;
4460 _audioFrame.speech_type_ = AudioFrame::kNormalSpeech;
4461 _audioFrame.vad_activity_ = AudioFrame::kVadUnknown;
4462 _audioFrame.num_channels_ = codec.channels;
4463 _audioFrame.id_ = _channelId;
4464}
4465
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004466uint32_t
xians@google.com0b0665a2011-08-08 08:18:44 +00004467Channel::PrepareEncodeAndSend(int mixingFrequency)
niklase@google.com470e71d2011-07-07 08:21:25 +00004468{
4469 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4470 "Channel::PrepareEncodeAndSend()");
4471
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004472 if (_audioFrame.samples_per_channel_ == 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004473 {
4474 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4475 "Channel::PrepareEncodeAndSend() invalid audio frame");
4476 return -1;
4477 }
4478
4479 if (_inputFilePlaying)
4480 {
4481 MixOrReplaceAudioWithFile(mixingFrequency);
4482 }
4483
4484 if (_mute)
4485 {
4486 AudioFrameOperations::Mute(_audioFrame);
4487 }
4488
4489 if (_inputExternalMedia)
4490 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004491 CriticalSectionScoped cs(&_callbackCritSect);
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004492 const bool isStereo = (_audioFrame.num_channels_ == 2);
niklase@google.com470e71d2011-07-07 08:21:25 +00004493 if (_inputExternalMediaCallbackPtr)
4494 {
4495 _inputExternalMediaCallbackPtr->Process(
4496 _channelId,
4497 kRecordingPerChannel,
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004498 (int16_t*)_audioFrame.data_,
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004499 _audioFrame.samples_per_channel_,
4500 _audioFrame.sample_rate_hz_,
niklase@google.com470e71d2011-07-07 08:21:25 +00004501 isStereo);
4502 }
4503 }
4504
4505 InsertInbandDtmfTone();
4506
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004507 if (_includeAudioLevelIndication)
4508 {
4509 assert(_rtpAudioProc.get() != NULL);
4510
4511 // Check if settings need to be updated.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004512 if (_rtpAudioProc->sample_rate_hz() != _audioFrame.sample_rate_hz_)
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004513 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004514 if (_rtpAudioProc->set_sample_rate_hz(_audioFrame.sample_rate_hz_) !=
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004515 AudioProcessing::kNoError)
4516 {
4517 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4518 VoEId(_instanceId, _channelId),
4519 "Error setting AudioProcessing sample rate");
4520 return -1;
4521 }
4522 }
4523
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004524 if (_rtpAudioProc->num_input_channels() != _audioFrame.num_channels_)
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004525 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004526 if (_rtpAudioProc->set_num_channels(_audioFrame.num_channels_,
4527 _audioFrame.num_channels_)
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004528 != AudioProcessing::kNoError)
4529 {
4530 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4531 VoEId(_instanceId, _channelId),
4532 "Error setting AudioProcessing channels");
4533 return -1;
4534 }
4535 }
4536
4537 // Performs level analysis only; does not affect the signal.
4538 _rtpAudioProc->ProcessStream(&_audioFrame);
4539 }
4540
niklase@google.com470e71d2011-07-07 08:21:25 +00004541 return 0;
4542}
4543
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004544uint32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00004545Channel::EncodeAndSend()
4546{
4547 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4548 "Channel::EncodeAndSend()");
4549
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004550 assert(_audioFrame.num_channels_ <= 2);
4551 if (_audioFrame.samples_per_channel_ == 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004552 {
4553 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4554 "Channel::EncodeAndSend() invalid audio frame");
4555 return -1;
4556 }
4557
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004558 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00004559
4560 // --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz.
4561
4562 // The ACM resamples internally.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004563 _audioFrame.timestamp_ = _timeStamp;
niklase@google.com470e71d2011-07-07 08:21:25 +00004564 if (_audioCodingModule.Add10MsData((AudioFrame&)_audioFrame) != 0)
4565 {
4566 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
4567 "Channel::EncodeAndSend() ACM encoding failed");
4568 return -1;
4569 }
4570
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004571 _timeStamp += _audioFrame.samples_per_channel_;
niklase@google.com470e71d2011-07-07 08:21:25 +00004572
4573 // --- Encode if complete frame is ready
4574
4575 // This call will trigger AudioPacketizationCallback::SendData if encoding
4576 // is done and payload is ready for packetization and transmission.
4577 return _audioCodingModule.Process();
4578}
4579
4580int Channel::RegisterExternalMediaProcessing(
4581 ProcessingTypes type,
4582 VoEMediaProcess& processObject)
4583{
4584 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4585 "Channel::RegisterExternalMediaProcessing()");
4586
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004587 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004588
4589 if (kPlaybackPerChannel == type)
4590 {
4591 if (_outputExternalMediaCallbackPtr)
4592 {
4593 _engineStatisticsPtr->SetLastError(
4594 VE_INVALID_OPERATION, kTraceError,
4595 "Channel::RegisterExternalMediaProcessing() "
4596 "output external media already enabled");
4597 return -1;
4598 }
4599 _outputExternalMediaCallbackPtr = &processObject;
4600 _outputExternalMedia = true;
4601 }
4602 else if (kRecordingPerChannel == type)
4603 {
4604 if (_inputExternalMediaCallbackPtr)
4605 {
4606 _engineStatisticsPtr->SetLastError(
4607 VE_INVALID_OPERATION, kTraceError,
4608 "Channel::RegisterExternalMediaProcessing() "
4609 "output external media already enabled");
4610 return -1;
4611 }
4612 _inputExternalMediaCallbackPtr = &processObject;
4613 _inputExternalMedia = true;
4614 }
4615 return 0;
4616}
4617
4618int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type)
4619{
4620 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4621 "Channel::DeRegisterExternalMediaProcessing()");
4622
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004623 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004624
4625 if (kPlaybackPerChannel == type)
4626 {
4627 if (!_outputExternalMediaCallbackPtr)
4628 {
4629 _engineStatisticsPtr->SetLastError(
4630 VE_INVALID_OPERATION, kTraceWarning,
4631 "Channel::DeRegisterExternalMediaProcessing() "
4632 "output external media already disabled");
4633 return 0;
4634 }
4635 _outputExternalMedia = false;
4636 _outputExternalMediaCallbackPtr = NULL;
4637 }
4638 else if (kRecordingPerChannel == type)
4639 {
4640 if (!_inputExternalMediaCallbackPtr)
4641 {
4642 _engineStatisticsPtr->SetLastError(
4643 VE_INVALID_OPERATION, kTraceWarning,
4644 "Channel::DeRegisterExternalMediaProcessing() "
4645 "input external media already disabled");
4646 return 0;
4647 }
4648 _inputExternalMedia = false;
4649 _inputExternalMediaCallbackPtr = NULL;
4650 }
4651
4652 return 0;
4653}
4654
roosa@google.com1b60ceb2012-12-12 23:00:29 +00004655int Channel::SetExternalMixing(bool enabled) {
4656 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4657 "Channel::SetExternalMixing(enabled=%d)", enabled);
4658
4659 if (_playing)
4660 {
4661 _engineStatisticsPtr->SetLastError(
4662 VE_INVALID_OPERATION, kTraceError,
4663 "Channel::SetExternalMixing() "
4664 "external mixing cannot be changed while playing.");
4665 return -1;
4666 }
4667
4668 _externalMixing = enabled;
4669
4670 return 0;
4671}
4672
niklase@google.com470e71d2011-07-07 08:21:25 +00004673int
4674Channel::ResetRTCPStatistics()
4675{
4676 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4677 "Channel::ResetRTCPStatistics()");
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004678 uint32_t remoteSSRC(0);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00004679 remoteSSRC = rtp_receiver_->SSRC();
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004680 return _rtpRtcpModule->ResetRTT(remoteSSRC);
niklase@google.com470e71d2011-07-07 08:21:25 +00004681}
4682
4683int
4684Channel::GetRoundTripTimeSummary(StatVal& delaysMs) const
4685{
4686 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4687 "Channel::GetRoundTripTimeSummary()");
4688 // Override default module outputs for the case when RTCP is disabled.
4689 // This is done to ensure that we are backward compatible with the
4690 // VoiceEngine where we did not use RTP/RTCP module.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004691 if (!_rtpRtcpModule->RTCP())
niklase@google.com470e71d2011-07-07 08:21:25 +00004692 {
4693 delaysMs.min = -1;
4694 delaysMs.max = -1;
4695 delaysMs.average = -1;
4696 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4697 "Channel::GetRoundTripTimeSummary() RTCP is disabled =>"
4698 " valid RTT measurements cannot be retrieved");
4699 return 0;
4700 }
4701
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004702 uint32_t remoteSSRC;
4703 uint16_t RTT;
4704 uint16_t avgRTT;
4705 uint16_t maxRTT;
4706 uint16_t minRTT;
niklase@google.com470e71d2011-07-07 08:21:25 +00004707 // The remote SSRC will be zero if no RTP packet has been received.
wu@webrtc.org822fbd82013-08-15 23:38:54 +00004708 remoteSSRC = rtp_receiver_->SSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00004709 if (remoteSSRC == 0)
4710 {
4711 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4712 "Channel::GetRoundTripTimeSummary() unable to measure RTT"
4713 " since no RTP packet has been received yet");
4714 }
4715
4716 // Retrieve RTT statistics from the RTP/RTCP module for the specified
4717 // channel and SSRC. The SSRC is required to parse out the correct source
4718 // in conference scenarios.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004719 if (_rtpRtcpModule->RTT(remoteSSRC, &RTT, &avgRTT, &minRTT,&maxRTT) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004720 {
4721 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4722 "GetRoundTripTimeSummary unable to retrieve RTT values"
4723 " from the RTCP layer");
4724 delaysMs.min = -1; delaysMs.max = -1; delaysMs.average = -1;
4725 }
4726 else
4727 {
4728 delaysMs.min = minRTT;
4729 delaysMs.max = maxRTT;
4730 delaysMs.average = avgRTT;
4731 }
4732 return 0;
4733}
4734
4735int
4736Channel::GetNetworkStatistics(NetworkStatistics& stats)
4737{
4738 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4739 "Channel::GetNetworkStatistics()");
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00004740 ACMNetworkStatistics acm_stats;
4741 int return_value = _audioCodingModule.NetworkStatistics(&acm_stats);
4742 if (return_value >= 0) {
4743 memcpy(&stats, &acm_stats, sizeof(NetworkStatistics));
4744 }
4745 return return_value;
niklase@google.com470e71d2011-07-07 08:21:25 +00004746}
4747
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00004748bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms,
4749 int* playout_buffer_delay_ms) const {
4750 if (_average_jitter_buffer_delay_us == 0) {
niklase@google.com470e71d2011-07-07 08:21:25 +00004751 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00004752 "Channel::GetDelayEstimate() no valid estimate.");
4753 return false;
4754 }
4755 *jitter_buffer_delay_ms = (_average_jitter_buffer_delay_us + 500) / 1000 +
4756 _recPacketDelayMs;
4757 *playout_buffer_delay_ms = playout_delay_ms_;
4758 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4759 "Channel::GetDelayEstimate()");
4760 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +00004761}
4762
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +00004763int Channel::SetInitialPlayoutDelay(int delay_ms)
4764{
4765 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4766 "Channel::SetInitialPlayoutDelay()");
4767 if ((delay_ms < kVoiceEngineMinMinPlayoutDelayMs) ||
4768 (delay_ms > kVoiceEngineMaxMinPlayoutDelayMs))
4769 {
4770 _engineStatisticsPtr->SetLastError(
4771 VE_INVALID_ARGUMENT, kTraceError,
4772 "SetInitialPlayoutDelay() invalid min delay");
4773 return -1;
4774 }
4775 if (_audioCodingModule.SetInitialPlayoutDelay(delay_ms) != 0)
4776 {
4777 _engineStatisticsPtr->SetLastError(
4778 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4779 "SetInitialPlayoutDelay() failed to set min playout delay");
4780 return -1;
4781 }
4782 return 0;
4783}
4784
4785
niklase@google.com470e71d2011-07-07 08:21:25 +00004786int
4787Channel::SetMinimumPlayoutDelay(int delayMs)
4788{
4789 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4790 "Channel::SetMinimumPlayoutDelay()");
4791 if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
4792 (delayMs > kVoiceEngineMaxMinPlayoutDelayMs))
4793 {
4794 _engineStatisticsPtr->SetLastError(
4795 VE_INVALID_ARGUMENT, kTraceError,
4796 "SetMinimumPlayoutDelay() invalid min delay");
4797 return -1;
4798 }
4799 if (_audioCodingModule.SetMinimumPlayoutDelay(delayMs) != 0)
4800 {
4801 _engineStatisticsPtr->SetLastError(
4802 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4803 "SetMinimumPlayoutDelay() failed to set min playout delay");
4804 return -1;
4805 }
4806 return 0;
4807}
4808
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00004809void Channel::UpdatePlayoutTimestamp(bool rtcp) {
4810 uint32_t playout_timestamp = 0;
4811
4812 if (_audioCodingModule.PlayoutTimestamp(&playout_timestamp) == -1) {
4813 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4814 "Channel::UpdatePlayoutTimestamp() failed to read playout"
4815 " timestamp from the ACM");
4816 _engineStatisticsPtr->SetLastError(
4817 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4818 "UpdatePlayoutTimestamp() failed to retrieve timestamp");
4819 return;
4820 }
4821
4822 uint16_t delay_ms = 0;
4823 if (_audioDeviceModulePtr->PlayoutDelay(&delay_ms) == -1) {
4824 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4825 "Channel::UpdatePlayoutTimestamp() failed to read playout"
4826 " delay from the ADM");
4827 _engineStatisticsPtr->SetLastError(
4828 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4829 "UpdatePlayoutTimestamp() failed to retrieve playout delay");
4830 return;
4831 }
4832
4833 int32_t playout_frequency = _audioCodingModule.PlayoutFrequency();
4834 CodecInst current_recive_codec;
4835 if (_audioCodingModule.ReceiveCodec(&current_recive_codec) == 0) {
4836 if (STR_CASE_CMP("G722", current_recive_codec.plname) == 0) {
4837 playout_frequency = 8000;
4838 } else if (STR_CASE_CMP("opus", current_recive_codec.plname) == 0) {
4839 playout_frequency = 48000;
niklase@google.com470e71d2011-07-07 08:21:25 +00004840 }
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00004841 }
4842
4843 // Remove the playout delay.
4844 playout_timestamp -= (delay_ms * (playout_frequency / 1000));
4845
4846 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4847 "Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu",
4848 playout_timestamp);
4849
4850 if (rtcp) {
4851 playout_timestamp_rtcp_ = playout_timestamp;
4852 } else {
4853 playout_timestamp_rtp_ = playout_timestamp;
4854 }
4855 playout_delay_ms_ = delay_ms;
4856}
4857
4858int Channel::GetPlayoutTimestamp(unsigned int& timestamp) {
4859 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4860 "Channel::GetPlayoutTimestamp()");
4861 if (playout_timestamp_rtp_ == 0) {
4862 _engineStatisticsPtr->SetLastError(
4863 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4864 "GetPlayoutTimestamp() failed to retrieve timestamp");
4865 return -1;
4866 }
4867 timestamp = playout_timestamp_rtp_;
4868 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4869 VoEId(_instanceId,_channelId),
4870 "GetPlayoutTimestamp() => timestamp=%u", timestamp);
4871 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00004872}
4873
4874int
4875Channel::SetInitTimestamp(unsigned int timestamp)
4876{
4877 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4878 "Channel::SetInitTimestamp()");
4879 if (_sending)
4880 {
4881 _engineStatisticsPtr->SetLastError(
4882 VE_SENDING, kTraceError, "SetInitTimestamp() already sending");
4883 return -1;
4884 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004885 if (_rtpRtcpModule->SetStartTimestamp(timestamp) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004886 {
4887 _engineStatisticsPtr->SetLastError(
4888 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4889 "SetInitTimestamp() failed to set timestamp");
4890 return -1;
4891 }
4892 return 0;
4893}
4894
4895int
4896Channel::SetInitSequenceNumber(short sequenceNumber)
4897{
4898 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4899 "Channel::SetInitSequenceNumber()");
4900 if (_sending)
4901 {
4902 _engineStatisticsPtr->SetLastError(
4903 VE_SENDING, kTraceError,
4904 "SetInitSequenceNumber() already sending");
4905 return -1;
4906 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004907 if (_rtpRtcpModule->SetSequenceNumber(sequenceNumber) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004908 {
4909 _engineStatisticsPtr->SetLastError(
4910 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4911 "SetInitSequenceNumber() failed to set sequence number");
4912 return -1;
4913 }
4914 return 0;
4915}
4916
4917int
wu@webrtc.org822fbd82013-08-15 23:38:54 +00004918Channel::GetRtpRtcp(RtpRtcp** rtpRtcpModule, RtpReceiver** rtp_receiver) const
niklase@google.com470e71d2011-07-07 08:21:25 +00004919{
4920 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4921 "Channel::GetRtpRtcp()");
wu@webrtc.org822fbd82013-08-15 23:38:54 +00004922 *rtpRtcpModule = _rtpRtcpModule.get();
4923 *rtp_receiver = rtp_receiver_.get();
niklase@google.com470e71d2011-07-07 08:21:25 +00004924 return 0;
4925}
4926
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004927// TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use
4928// a shared helper.
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004929int32_t
pbos@webrtc.org92135212013-05-14 08:31:39 +00004930Channel::MixOrReplaceAudioWithFile(int mixingFrequency)
niklase@google.com470e71d2011-07-07 08:21:25 +00004931{
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004932 scoped_array<int16_t> fileBuffer(new int16_t[640]);
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004933 int fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004934
4935 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004936 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004937
4938 if (_inputFilePlayerPtr == NULL)
4939 {
4940 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4941 VoEId(_instanceId, _channelId),
4942 "Channel::MixOrReplaceAudioWithFile() fileplayer"
4943 " doesnt exist");
4944 return -1;
4945 }
4946
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004947 if (_inputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
niklase@google.com470e71d2011-07-07 08:21:25 +00004948 fileSamples,
4949 mixingFrequency) == -1)
4950 {
4951 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4952 VoEId(_instanceId, _channelId),
4953 "Channel::MixOrReplaceAudioWithFile() file mixing "
4954 "failed");
4955 return -1;
4956 }
4957 if (fileSamples == 0)
4958 {
4959 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4960 VoEId(_instanceId, _channelId),
4961 "Channel::MixOrReplaceAudioWithFile() file is ended");
4962 return 0;
4963 }
4964 }
4965
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004966 assert(_audioFrame.samples_per_channel_ == fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004967
4968 if (_mixFileWithMicrophone)
4969 {
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004970 // Currently file stream is always mono.
4971 // TODO(xians): Change the code when FilePlayer supports real stereo.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004972 Utility::MixWithSat(_audioFrame.data_,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004973 _audioFrame.num_channels_,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004974 fileBuffer.get(),
4975 1,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004976 fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004977 }
4978 else
4979 {
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004980 // Replace ACM audio with file.
4981 // Currently file stream is always mono.
4982 // TODO(xians): Change the code when FilePlayer supports real stereo.
niklase@google.com470e71d2011-07-07 08:21:25 +00004983 _audioFrame.UpdateFrame(_channelId,
4984 -1,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004985 fileBuffer.get(),
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004986 fileSamples,
niklase@google.com470e71d2011-07-07 08:21:25 +00004987 mixingFrequency,
4988 AudioFrame::kNormalSpeech,
4989 AudioFrame::kVadUnknown,
4990 1);
4991
4992 }
4993 return 0;
4994}
4995
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004996int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00004997Channel::MixAudioWithFile(AudioFrame& audioFrame,
pbos@webrtc.org92135212013-05-14 08:31:39 +00004998 int mixingFrequency)
niklase@google.com470e71d2011-07-07 08:21:25 +00004999{
5000 assert(mixingFrequency <= 32000);
5001
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005002 scoped_array<int16_t> fileBuffer(new int16_t[640]);
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00005003 int fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00005004
5005 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00005006 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00005007
5008 if (_outputFilePlayerPtr == NULL)
5009 {
5010 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
5011 VoEId(_instanceId, _channelId),
5012 "Channel::MixAudioWithFile() file mixing failed");
5013 return -1;
5014 }
5015
5016 // We should get the frequency we ask for.
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00005017 if (_outputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
niklase@google.com470e71d2011-07-07 08:21:25 +00005018 fileSamples,
5019 mixingFrequency) == -1)
5020 {
5021 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
5022 VoEId(_instanceId, _channelId),
5023 "Channel::MixAudioWithFile() file mixing failed");
5024 return -1;
5025 }
5026 }
5027
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005028 if (audioFrame.samples_per_channel_ == fileSamples)
niklase@google.com470e71d2011-07-07 08:21:25 +00005029 {
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00005030 // Currently file stream is always mono.
5031 // TODO(xians): Change the code when FilePlayer supports real stereo.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005032 Utility::MixWithSat(audioFrame.data_,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00005033 audioFrame.num_channels_,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00005034 fileBuffer.get(),
5035 1,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00005036 fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00005037 }
5038 else
5039 {
5040 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005041 "Channel::MixAudioWithFile() samples_per_channel_(%d) != "
niklase@google.com470e71d2011-07-07 08:21:25 +00005042 "fileSamples(%d)",
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005043 audioFrame.samples_per_channel_, fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00005044 return -1;
5045 }
5046
5047 return 0;
5048}
5049
5050int
5051Channel::InsertInbandDtmfTone()
5052{
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00005053 // Check if we should start a new tone.
niklase@google.com470e71d2011-07-07 08:21:25 +00005054 if (_inbandDtmfQueue.PendingDtmf() &&
5055 !_inbandDtmfGenerator.IsAddingTone() &&
5056 _inbandDtmfGenerator.DelaySinceLastTone() >
5057 kMinTelephoneEventSeparationMs)
5058 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005059 int8_t eventCode(0);
5060 uint16_t lengthMs(0);
5061 uint8_t attenuationDb(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00005062
5063 eventCode = _inbandDtmfQueue.NextDtmf(&lengthMs, &attenuationDb);
5064 _inbandDtmfGenerator.AddTone(eventCode, lengthMs, attenuationDb);
5065 if (_playInbandDtmfEvent)
5066 {
5067 // Add tone to output mixer using a reduced length to minimize
5068 // risk of echo.
5069 _outputMixerPtr->PlayDtmfTone(eventCode, lengthMs - 80,
5070 attenuationDb);
5071 }
5072 }
5073
5074 if (_inbandDtmfGenerator.IsAddingTone())
5075 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005076 uint16_t frequency(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00005077 _inbandDtmfGenerator.GetSampleRate(frequency);
5078
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005079 if (frequency != _audioFrame.sample_rate_hz_)
niklase@google.com470e71d2011-07-07 08:21:25 +00005080 {
5081 // Update sample rate of Dtmf tone since the mixing frequency
5082 // has changed.
5083 _inbandDtmfGenerator.SetSampleRate(
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005084 (uint16_t) (_audioFrame.sample_rate_hz_));
niklase@google.com470e71d2011-07-07 08:21:25 +00005085 // Reset the tone to be added taking the new sample rate into
5086 // account.
5087 _inbandDtmfGenerator.ResetTone();
5088 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00005089
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005090 int16_t toneBuffer[320];
5091 uint16_t toneSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00005092 // Get 10ms tone segment and set time since last tone to zero
5093 if (_inbandDtmfGenerator.Get10msTone(toneBuffer, toneSamples) == -1)
5094 {
5095 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
5096 VoEId(_instanceId, _channelId),
5097 "Channel::EncodeAndSend() inserting Dtmf failed");
5098 return -1;
5099 }
5100
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00005101 // Replace mixed audio with DTMF tone.
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00005102 for (int sample = 0;
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005103 sample < _audioFrame.samples_per_channel_;
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00005104 sample++)
5105 {
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00005106 for (int channel = 0;
5107 channel < _audioFrame.num_channels_;
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00005108 channel++)
5109 {
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00005110 const int index = sample * _audioFrame.num_channels_ + channel;
5111 _audioFrame.data_[index] = toneBuffer[sample];
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00005112 }
5113 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00005114
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005115 assert(_audioFrame.samples_per_channel_ == toneSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00005116 } else
5117 {
5118 // Add 10ms to "delay-since-last-tone" counter
5119 _inbandDtmfGenerator.UpdateDelaySinceLastTone();
5120 }
5121 return 0;
5122}
5123
niklase@google.com470e71d2011-07-07 08:21:25 +00005124void
5125Channel::ResetDeadOrAliveCounters()
5126{
5127 _countDeadDetections = 0;
5128 _countAliveDetections = 0;
5129}
5130
5131void
5132Channel::UpdateDeadOrAliveCounters(bool alive)
5133{
5134 if (alive)
5135 _countAliveDetections++;
5136 else
5137 _countDeadDetections++;
5138}
5139
5140int
5141Channel::GetDeadOrAliveCounters(int& countDead, int& countAlive) const
5142{
niklase@google.com470e71d2011-07-07 08:21:25 +00005143 return 0;
5144}
5145
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005146int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00005147Channel::SendPacketRaw(const void *data, int len, bool RTCP)
5148{
5149 if (_transportPtr == NULL)
5150 {
5151 return -1;
5152 }
5153 if (!RTCP)
5154 {
5155 return _transportPtr->SendPacket(_channelId, data, len);
5156 }
5157 else
5158 {
5159 return _transportPtr->SendRTCPPacket(_channelId, data, len);
5160 }
5161}
5162
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005163// Called for incoming RTP packets after successful RTP header parsing.
5164void Channel::UpdatePacketDelay(uint32_t rtp_timestamp,
5165 uint16_t sequence_number) {
5166 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
5167 "Channel::UpdatePacketDelay(timestamp=%lu, sequenceNumber=%u)",
5168 rtp_timestamp, sequence_number);
niklase@google.com470e71d2011-07-07 08:21:25 +00005169
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005170 // Get frequency of last received payload
5171 int rtp_receive_frequency = _audioCodingModule.ReceiveFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +00005172
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005173 CodecInst current_receive_codec;
5174 if (_audioCodingModule.ReceiveCodec(&current_receive_codec) != 0) {
5175 return;
5176 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005177
turaj@webrtc.orge46c8d32013-05-22 20:39:43 +00005178 // Update the least required delay.
5179 least_required_delay_ms_ = _audioCodingModule.LeastRequiredDelayMs();
5180
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005181 if (STR_CASE_CMP("G722", current_receive_codec.plname) == 0) {
5182 // Even though the actual sampling rate for G.722 audio is
5183 // 16,000 Hz, the RTP clock rate for the G722 payload format is
5184 // 8,000 Hz because that value was erroneously assigned in
5185 // RFC 1890 and must remain unchanged for backward compatibility.
5186 rtp_receive_frequency = 8000;
5187 } else if (STR_CASE_CMP("opus", current_receive_codec.plname) == 0) {
5188 // We are resampling Opus internally to 32,000 Hz until all our
5189 // DSP routines can operate at 48,000 Hz, but the RTP clock
5190 // rate for the Opus payload format is standardized to 48,000 Hz,
5191 // because that is the maximum supported decoding sampling rate.
5192 rtp_receive_frequency = 48000;
5193 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005194
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005195 // playout_timestamp_rtp_ updated in UpdatePlayoutTimestamp for every incoming
5196 // packet.
5197 uint32_t timestamp_diff_ms = (rtp_timestamp - playout_timestamp_rtp_) /
5198 (rtp_receive_frequency / 1000);
niklase@google.com470e71d2011-07-07 08:21:25 +00005199
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005200 uint16_t packet_delay_ms = (rtp_timestamp - _previousTimestamp) /
5201 (rtp_receive_frequency / 1000);
niklase@google.com470e71d2011-07-07 08:21:25 +00005202
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005203 _previousTimestamp = rtp_timestamp;
niklase@google.com470e71d2011-07-07 08:21:25 +00005204
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005205 if (timestamp_diff_ms > (2 * kVoiceEngineMaxMinPlayoutDelayMs)) {
5206 timestamp_diff_ms = 0;
5207 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005208
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005209 if (timestamp_diff_ms == 0) return;
niklase@google.com470e71d2011-07-07 08:21:25 +00005210
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005211 if (packet_delay_ms >= 10 && packet_delay_ms <= 60) {
5212 _recPacketDelayMs = packet_delay_ms;
5213 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005214
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005215 if (_average_jitter_buffer_delay_us == 0) {
5216 _average_jitter_buffer_delay_us = timestamp_diff_ms * 1000;
5217 return;
5218 }
5219
5220 // Filter average delay value using exponential filter (alpha is
5221 // 7/8). We derive 1000 *_average_jitter_buffer_delay_us here (reduces
5222 // risk of rounding error) and compensate for it in GetDelayEstimate()
5223 // later.
5224 _average_jitter_buffer_delay_us = (_average_jitter_buffer_delay_us * 7 +
5225 1000 * timestamp_diff_ms + 500) / 8;
niklase@google.com470e71d2011-07-07 08:21:25 +00005226}
5227
5228void
5229Channel::RegisterReceiveCodecsToRTPModule()
5230{
5231 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
5232 "Channel::RegisterReceiveCodecsToRTPModule()");
5233
5234
5235 CodecInst codec;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005236 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
niklase@google.com470e71d2011-07-07 08:21:25 +00005237
5238 for (int idx = 0; idx < nSupportedCodecs; idx++)
5239 {
5240 // Open up the RTP/RTCP receiver for all supported codecs
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00005241 if ((_audioCodingModule.Codec(idx, &codec) == -1) ||
wu@webrtc.org822fbd82013-08-15 23:38:54 +00005242 (rtp_receiver_->RegisterReceivePayload(
5243 codec.plname,
5244 codec.pltype,
5245 codec.plfreq,
5246 codec.channels,
5247 (codec.rate < 0) ? 0 : codec.rate) == -1))
niklase@google.com470e71d2011-07-07 08:21:25 +00005248 {
5249 WEBRTC_TRACE(
5250 kTraceWarning,
5251 kTraceVoice,
5252 VoEId(_instanceId, _channelId),
5253 "Channel::RegisterReceiveCodecsToRTPModule() unable"
5254 " to register %s (%d/%d/%d/%d) to RTP/RTCP receiver",
5255 codec.plname, codec.pltype, codec.plfreq,
5256 codec.channels, codec.rate);
5257 }
5258 else
5259 {
5260 WEBRTC_TRACE(
5261 kTraceInfo,
5262 kTraceVoice,
5263 VoEId(_instanceId, _channelId),
5264 "Channel::RegisterReceiveCodecsToRTPModule() %s "
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00005265 "(%d/%d/%d/%d) has been added to the RTP/RTCP "
niklase@google.com470e71d2011-07-07 08:21:25 +00005266 "receiver",
5267 codec.plname, codec.pltype, codec.plfreq,
5268 codec.channels, codec.rate);
5269 }
5270 }
5271}
5272
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005273int Channel::ApmProcessRx(AudioFrame& frame) {
5274 AudioProcessing* audioproc = _rxAudioProcessingModulePtr;
5275 // Register the (possibly new) frame parameters.
5276 if (audioproc->set_sample_rate_hz(frame.sample_rate_hz_) != 0) {
andrew@webrtc.org655d8f52012-11-20 07:34:45 +00005277 LOG_FERR1(LS_WARNING, set_sample_rate_hz, frame.sample_rate_hz_);
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005278 }
5279 if (audioproc->set_num_channels(frame.num_channels_,
5280 frame.num_channels_) != 0) {
andrew@webrtc.org655d8f52012-11-20 07:34:45 +00005281 LOG_FERR1(LS_WARNING, set_num_channels, frame.num_channels_);
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005282 }
5283 if (audioproc->ProcessStream(&frame) != 0) {
andrew@webrtc.org655d8f52012-11-20 07:34:45 +00005284 LOG_FERR0(LS_WARNING, ProcessStream);
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005285 }
5286 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00005287}
5288
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005289int Channel::SetSecondarySendCodec(const CodecInst& codec,
5290 int red_payload_type) {
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00005291 // Sanity check for payload type.
5292 if (red_payload_type < 0 || red_payload_type > 127) {
5293 _engineStatisticsPtr->SetLastError(
5294 VE_PLTYPE_ERROR, kTraceError,
5295 "SetRedPayloadType() invalid RED payload type");
5296 return -1;
5297 }
5298
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005299 if (SetRedPayloadType(red_payload_type) < 0) {
5300 _engineStatisticsPtr->SetLastError(
5301 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5302 "SetSecondarySendCodec() Failed to register RED ACM");
5303 return -1;
5304 }
5305 if (_audioCodingModule.RegisterSecondarySendCodec(codec) < 0) {
5306 _engineStatisticsPtr->SetLastError(
5307 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5308 "SetSecondarySendCodec() Failed to register secondary send codec in "
5309 "ACM");
5310 return -1;
5311 }
5312
5313 return 0;
5314}
5315
5316void Channel::RemoveSecondarySendCodec() {
5317 _audioCodingModule.UnregisterSecondarySendCodec();
5318}
5319
5320int Channel::GetSecondarySendCodec(CodecInst* codec) {
5321 if (_audioCodingModule.SecondarySendCodec(codec) < 0) {
5322 _engineStatisticsPtr->SetLastError(
5323 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5324 "GetSecondarySendCodec() Failed to get secondary sent codec from ACM");
5325 return -1;
5326 }
5327 return 0;
5328}
5329
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00005330// Assuming this method is called with valid payload type.
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005331int Channel::SetRedPayloadType(int red_payload_type) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005332 CodecInst codec;
5333 bool found_red = false;
5334
5335 // Get default RED settings from the ACM database
5336 const int num_codecs = AudioCodingModule::NumberOfCodecs();
5337 for (int idx = 0; idx < num_codecs; idx++) {
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00005338 _audioCodingModule.Codec(idx, &codec);
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005339 if (!STR_CASE_CMP(codec.plname, "RED")) {
5340 found_red = true;
5341 break;
5342 }
5343 }
5344
5345 if (!found_red) {
5346 _engineStatisticsPtr->SetLastError(
5347 VE_CODEC_ERROR, kTraceError,
5348 "SetRedPayloadType() RED is not supported");
5349 return -1;
5350 }
5351
turaj@webrtc.org9d532fd2013-01-31 18:34:19 +00005352 codec.pltype = red_payload_type;
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005353 if (_audioCodingModule.RegisterSendCodec(codec) < 0) {
5354 _engineStatisticsPtr->SetLastError(
5355 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5356 "SetRedPayloadType() RED registration in ACM module failed");
5357 return -1;
5358 }
5359
5360 if (_rtpRtcpModule->SetSendREDPayloadType(red_payload_type) != 0) {
5361 _engineStatisticsPtr->SetLastError(
5362 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
5363 "SetRedPayloadType() RED registration in RTP/RTCP module failed");
5364 return -1;
5365 }
5366 return 0;
5367}
5368
pbos@webrtc.orgd900e8b2013-07-03 15:12:26 +00005369} // namespace voe
5370} // namespace webrtc