blob: 6572f6976fe1e485a387f743ebfe955ca033c7fa [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
henrika@webrtc.org2919e952012-01-31 08:45:03 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000011#include "webrtc/voice_engine/channel.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000013#include "webrtc/modules/audio_device/include/audio_device.h"
14#include "webrtc/modules/audio_processing/include/audio_processing.h"
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +000015#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
16#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
17#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
18#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000019#include "webrtc/modules/utility/interface/audio_frame_operations.h"
20#include "webrtc/modules/utility/interface/process_thread.h"
21#include "webrtc/modules/utility/interface/rtp_dump.h"
22#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
23#include "webrtc/system_wrappers/interface/logging.h"
24#include "webrtc/system_wrappers/interface/trace.h"
25#include "webrtc/voice_engine/include/voe_base.h"
26#include "webrtc/voice_engine/include/voe_external_media.h"
27#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
28#include "webrtc/voice_engine/output_mixer.h"
29#include "webrtc/voice_engine/statistics.h"
30#include "webrtc/voice_engine/transmit_mixer.h"
31#include "webrtc/voice_engine/utility.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000032
33#if defined(_WIN32)
34#include <Qos.h>
35#endif
36
andrew@webrtc.org50419b02012-11-14 19:07:54 +000037namespace webrtc {
38namespace voe {
niklase@google.com470e71d2011-07-07 08:21:25 +000039
pbos@webrtc.org6141e132013-04-09 10:09:10 +000040int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +000041Channel::SendData(FrameType frameType,
pbos@webrtc.org6141e132013-04-09 10:09:10 +000042 uint8_t payloadType,
43 uint32_t timeStamp,
44 const uint8_t* payloadData,
45 uint16_t payloadSize,
niklase@google.com470e71d2011-07-07 08:21:25 +000046 const RTPFragmentationHeader* fragmentation)
47{
48 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
49 "Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u,"
50 " payloadSize=%u, fragmentation=0x%x)",
51 frameType, payloadType, timeStamp, payloadSize, fragmentation);
52
53 if (_includeAudioLevelIndication)
54 {
andrew@webrtc.org755b04a2011-11-15 16:57:56 +000055 assert(_rtpAudioProc.get() != NULL);
niklase@google.com470e71d2011-07-07 08:21:25 +000056 // Store current audio level in the RTP/RTCP module.
57 // The level will be used in combination with voice-activity state
58 // (frameType) to add an RTP header extension
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +000059 _rtpRtcpModule->SetAudioLevel(_rtpAudioProc->level_estimator()->RMS());
niklase@google.com470e71d2011-07-07 08:21:25 +000060 }
61
62 // Push data from ACM to RTP/RTCP-module to deliver audio frame for
63 // packetization.
64 // This call will trigger Transport::SendPacket() from the RTP/RTCP module.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +000065 if (_rtpRtcpModule->SendOutgoingData((FrameType&)frameType,
niklase@google.com470e71d2011-07-07 08:21:25 +000066 payloadType,
67 timeStamp,
stefan@webrtc.orgddfdfed2012-07-03 13:21:22 +000068 // Leaving the time when this frame was
69 // received from the capture device as
70 // undefined for voice for now.
71 -1,
niklase@google.com470e71d2011-07-07 08:21:25 +000072 payloadData,
73 payloadSize,
74 fragmentation) == -1)
75 {
76 _engineStatisticsPtr->SetLastError(
77 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
78 "Channel::SendData() failed to send data to RTP/RTCP module");
79 return -1;
80 }
81
82 _lastLocalTimeStamp = timeStamp;
83 _lastPayloadType = payloadType;
84
85 return 0;
86}
87
pbos@webrtc.org6141e132013-04-09 10:09:10 +000088int32_t
89Channel::InFrameType(int16_t frameType)
niklase@google.com470e71d2011-07-07 08:21:25 +000090{
91 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
92 "Channel::InFrameType(frameType=%d)", frameType);
93
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +000094 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +000095 // 1 indicates speech
96 _sendFrameType = (frameType == 1) ? 1 : 0;
97 return 0;
98}
99
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000100int32_t
pbos@webrtc.org92135212013-05-14 08:31:39 +0000101Channel::OnRxVadDetected(int vadDecision)
niklase@google.com470e71d2011-07-07 08:21:25 +0000102{
103 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
104 "Channel::OnRxVadDetected(vadDecision=%d)", vadDecision);
105
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000106 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000107 if (_rxVadObserverPtr)
108 {
109 _rxVadObserverPtr->OnRxVad(_channelId, vadDecision);
110 }
111
112 return 0;
113}
114
115int
116Channel::SendPacket(int channel, const void *data, int len)
117{
118 channel = VoEChannelId(channel);
119 assert(channel == _channelId);
120
121 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
122 "Channel::SendPacket(channel=%d, len=%d)", channel, len);
123
124 if (_transportPtr == NULL)
125 {
126 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
127 "Channel::SendPacket() failed to send RTP packet due to"
128 " invalid transport object");
129 return -1;
130 }
131
132 // Insert extra RTP packet using if user has called the InsertExtraRTPPacket
133 // API
134 if (_insertExtraRTPPacket)
135 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000136 uint8_t* rtpHdr = (uint8_t*)data;
137 uint8_t M_PT(0);
niklase@google.com470e71d2011-07-07 08:21:25 +0000138 if (_extraMarkerBit)
139 {
140 M_PT = 0x80; // set the M-bit
141 }
142 M_PT += _extraPayloadType; // set the payload type
143 *(++rtpHdr) = M_PT; // modify the M|PT-byte within the RTP header
144 _insertExtraRTPPacket = false; // insert one packet only
145 }
146
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000147 uint8_t* bufferToSendPtr = (uint8_t*)data;
148 int32_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000149
150 // Dump the RTP packet to a file (if RTP dump is enabled).
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000151 if (_rtpDumpOut.DumpPacket((const uint8_t*)data, len) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000152 {
153 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
154 VoEId(_instanceId,_channelId),
155 "Channel::SendPacket() RTP dump to output file failed");
156 }
157
158 // SRTP or External encryption
159 if (_encrypting)
160 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000161 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000162
163 if (_encryptionPtr)
164 {
165 if (!_encryptionRTPBufferPtr)
166 {
167 // Allocate memory for encryption buffer one time only
168 _encryptionRTPBufferPtr =
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000169 new uint8_t[kVoiceEngineMaxIpPacketSizeBytes];
xians@webrtc.org51253502012-10-25 13:58:02 +0000170 memset(_encryptionRTPBufferPtr, 0,
171 kVoiceEngineMaxIpPacketSizeBytes);
niklase@google.com470e71d2011-07-07 08:21:25 +0000172 }
173
174 // Perform encryption (SRTP or external)
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000175 int32_t encryptedBufferLength = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000176 _encryptionPtr->encrypt(_channelId,
177 bufferToSendPtr,
178 _encryptionRTPBufferPtr,
179 bufferLength,
180 (int*)&encryptedBufferLength);
181 if (encryptedBufferLength <= 0)
182 {
183 _engineStatisticsPtr->SetLastError(
184 VE_ENCRYPTION_FAILED,
185 kTraceError, "Channel::SendPacket() encryption failed");
186 return -1;
187 }
188
189 // Replace default data buffer with encrypted buffer
190 bufferToSendPtr = _encryptionRTPBufferPtr;
191 bufferLength = encryptedBufferLength;
192 }
193 }
194
195 // Packet transmission using WebRtc socket transport
196 if (!_externalTransport)
197 {
198 int n = _transportPtr->SendPacket(channel, bufferToSendPtr,
199 bufferLength);
200 if (n < 0)
201 {
202 WEBRTC_TRACE(kTraceError, kTraceVoice,
203 VoEId(_instanceId,_channelId),
204 "Channel::SendPacket() RTP transmission using WebRtc"
205 " sockets failed");
206 return -1;
207 }
208 return n;
209 }
210
211 // Packet transmission using external transport transport
212 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000213 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000214
215 int n = _transportPtr->SendPacket(channel,
216 bufferToSendPtr,
217 bufferLength);
218 if (n < 0)
219 {
220 WEBRTC_TRACE(kTraceError, kTraceVoice,
221 VoEId(_instanceId,_channelId),
222 "Channel::SendPacket() RTP transmission using external"
223 " transport failed");
224 return -1;
225 }
226 return n;
227 }
228}
229
230int
231Channel::SendRTCPPacket(int channel, const void *data, int len)
232{
233 channel = VoEChannelId(channel);
234 assert(channel == _channelId);
235
236 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
237 "Channel::SendRTCPPacket(channel=%d, len=%d)", channel, len);
238
niklase@google.com470e71d2011-07-07 08:21:25 +0000239 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000240 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.org83661f52011-11-25 10:58:15 +0000241 if (_transportPtr == NULL)
242 {
243 WEBRTC_TRACE(kTraceError, kTraceVoice,
244 VoEId(_instanceId,_channelId),
245 "Channel::SendRTCPPacket() failed to send RTCP packet"
246 " due to invalid transport object");
247 return -1;
248 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000249 }
250
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000251 uint8_t* bufferToSendPtr = (uint8_t*)data;
252 int32_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000253
254 // Dump the RTCP packet to a file (if RTP dump is enabled).
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000255 if (_rtpDumpOut.DumpPacket((const uint8_t*)data, len) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000256 {
257 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
258 VoEId(_instanceId,_channelId),
259 "Channel::SendPacket() RTCP dump to output file failed");
260 }
261
262 // SRTP or External encryption
263 if (_encrypting)
264 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000265 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000266
267 if (_encryptionPtr)
268 {
269 if (!_encryptionRTCPBufferPtr)
270 {
271 // Allocate memory for encryption buffer one time only
272 _encryptionRTCPBufferPtr =
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000273 new uint8_t[kVoiceEngineMaxIpPacketSizeBytes];
niklase@google.com470e71d2011-07-07 08:21:25 +0000274 }
275
276 // Perform encryption (SRTP or external).
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000277 int32_t encryptedBufferLength = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000278 _encryptionPtr->encrypt_rtcp(_channelId,
279 bufferToSendPtr,
280 _encryptionRTCPBufferPtr,
281 bufferLength,
282 (int*)&encryptedBufferLength);
283 if (encryptedBufferLength <= 0)
284 {
285 _engineStatisticsPtr->SetLastError(
286 VE_ENCRYPTION_FAILED, kTraceError,
287 "Channel::SendRTCPPacket() encryption failed");
288 return -1;
289 }
290
291 // Replace default data buffer with encrypted buffer
292 bufferToSendPtr = _encryptionRTCPBufferPtr;
293 bufferLength = encryptedBufferLength;
294 }
295 }
296
297 // Packet transmission using WebRtc socket transport
298 if (!_externalTransport)
299 {
300 int n = _transportPtr->SendRTCPPacket(channel,
301 bufferToSendPtr,
302 bufferLength);
303 if (n < 0)
304 {
305 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
306 VoEId(_instanceId,_channelId),
307 "Channel::SendRTCPPacket() transmission using WebRtc"
308 " sockets failed");
309 return -1;
310 }
311 return n;
312 }
313
314 // Packet transmission using external transport transport
315 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000316 CriticalSectionScoped cs(&_callbackCritSect);
henrike@webrtc.orgde727ab2012-11-18 18:49:13 +0000317 if (_transportPtr == NULL)
318 {
319 return -1;
320 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000321 int n = _transportPtr->SendRTCPPacket(channel,
322 bufferToSendPtr,
323 bufferLength);
324 if (n < 0)
325 {
326 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
327 VoEId(_instanceId,_channelId),
328 "Channel::SendRTCPPacket() transmission using external"
329 " transport failed");
330 return -1;
331 }
332 return n;
333 }
334
335 return len;
336}
337
338void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000339Channel::OnPlayTelephoneEvent(int32_t id,
340 uint8_t event,
341 uint16_t lengthMs,
342 uint8_t volume)
niklase@google.com470e71d2011-07-07 08:21:25 +0000343{
344 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
345 "Channel::OnPlayTelephoneEvent(id=%d, event=%u, lengthMs=%u,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +0000346 " volume=%u)", id, event, lengthMs, volume);
niklase@google.com470e71d2011-07-07 08:21:25 +0000347
348 if (!_playOutbandDtmfEvent || (event > 15))
349 {
350 // Ignore callback since feedback is disabled or event is not a
351 // Dtmf tone event.
352 return;
353 }
354
355 assert(_outputMixerPtr != NULL);
356
357 // Start playing out the Dtmf tone (if playout is enabled).
358 // Reduce length of tone with 80ms to the reduce risk of echo.
359 _outputMixerPtr->PlayDtmfTone(event, lengthMs - 80, volume);
360}
361
362void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000363Channel::OnIncomingSSRCChanged(int32_t id,
364 uint32_t SSRC)
niklase@google.com470e71d2011-07-07 08:21:25 +0000365{
366 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
367 "Channel::OnIncomingSSRCChanged(id=%d, SSRC=%d)",
368 id, SSRC);
369
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000370 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000371 assert(channel == _channelId);
372
373 // Reset RTP-module counters since a new incoming RTP stream is detected
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +0000374 rtp_receive_statistics_->ResetDataCounters();
375 rtp_receive_statistics_->ResetStatistics();
niklase@google.com470e71d2011-07-07 08:21:25 +0000376
377 if (_rtpObserver)
378 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000379 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000380
381 if (_rtpObserverPtr)
382 {
383 // Send new SSRC to registered observer using callback
384 _rtpObserverPtr->OnIncomingSSRCChanged(channel, SSRC);
385 }
386 }
387}
388
pbos@webrtc.org92135212013-05-14 08:31:39 +0000389void Channel::OnIncomingCSRCChanged(int32_t id,
390 uint32_t CSRC,
391 bool added)
niklase@google.com470e71d2011-07-07 08:21:25 +0000392{
393 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
394 "Channel::OnIncomingCSRCChanged(id=%d, CSRC=%d, added=%d)",
395 id, CSRC, added);
396
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000397 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000398 assert(channel == _channelId);
399
400 if (_rtpObserver)
401 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000402 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000403
404 if (_rtpObserverPtr)
405 {
406 _rtpObserverPtr->OnIncomingCSRCChanged(channel, CSRC, added);
407 }
408 }
409}
410
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +0000411void Channel::OnResetStatistics() {
412 rtp_receive_statistics_->ResetStatistics();
413}
414
niklase@google.com470e71d2011-07-07 08:21:25 +0000415void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000416Channel::OnApplicationDataReceived(int32_t id,
417 uint8_t subType,
418 uint32_t name,
419 uint16_t length,
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000420 const uint8_t* data)
niklase@google.com470e71d2011-07-07 08:21:25 +0000421{
422 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
423 "Channel::OnApplicationDataReceived(id=%d, subType=%u,"
424 " name=%u, length=%u)",
425 id, subType, name, length);
426
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000427 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000428 assert(channel == _channelId);
429
430 if (_rtcpObserver)
431 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000432 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000433
434 if (_rtcpObserverPtr)
435 {
436 _rtcpObserverPtr->OnApplicationDataReceived(channel,
437 subType,
438 name,
439 data,
440 length);
441 }
442 }
443}
444
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000445int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +0000446Channel::OnInitializeDecoder(
pbos@webrtc.org92135212013-05-14 08:31:39 +0000447 int32_t id,
448 int8_t payloadType,
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +0000449 const char payloadName[RTP_PAYLOAD_NAME_SIZE],
pbos@webrtc.org92135212013-05-14 08:31:39 +0000450 int frequency,
451 uint8_t channels,
452 uint32_t rate)
niklase@google.com470e71d2011-07-07 08:21:25 +0000453{
454 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
455 "Channel::OnInitializeDecoder(id=%d, payloadType=%d, "
456 "payloadName=%s, frequency=%u, channels=%u, rate=%u)",
457 id, payloadType, payloadName, frequency, channels, rate);
458
andrew@webrtc.orgceb148c2011-08-23 17:53:54 +0000459 assert(VoEChannelId(id) == _channelId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000460
henrika@webrtc.orgf75901f2012-01-16 08:45:42 +0000461 CodecInst receiveCodec = {0};
462 CodecInst dummyCodec = {0};
niklase@google.com470e71d2011-07-07 08:21:25 +0000463
464 receiveCodec.pltype = payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +0000465 receiveCodec.plfreq = frequency;
466 receiveCodec.channels = channels;
467 receiveCodec.rate = rate;
henrika@webrtc.orgf75901f2012-01-16 08:45:42 +0000468 strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +0000469
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +0000470 _audioCodingModule.Codec(payloadName, &dummyCodec, frequency, channels);
niklase@google.com470e71d2011-07-07 08:21:25 +0000471 receiveCodec.pacsize = dummyCodec.pacsize;
472
473 // Register the new codec to the ACM
474 if (_audioCodingModule.RegisterReceiveCodec(receiveCodec) == -1)
475 {
476 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
andrew@webrtc.orgceb148c2011-08-23 17:53:54 +0000477 VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +0000478 "Channel::OnInitializeDecoder() invalid codec ("
479 "pt=%d, name=%s) received - 1", payloadType, payloadName);
480 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR);
481 return -1;
482 }
483
484 return 0;
485}
486
487void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000488Channel::OnPacketTimeout(int32_t id)
niklase@google.com470e71d2011-07-07 08:21:25 +0000489{
490 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
491 "Channel::OnPacketTimeout(id=%d)", id);
492
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000493 CriticalSectionScoped cs(_callbackCritSectPtr);
niklase@google.com470e71d2011-07-07 08:21:25 +0000494 if (_voiceEngineObserverPtr)
495 {
496 if (_receiving || _externalTransport)
497 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000498 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000499 assert(channel == _channelId);
500 // Ensure that next OnReceivedPacket() callback will trigger
501 // a VE_PACKET_RECEIPT_RESTARTED callback.
502 _rtpPacketTimedOut = true;
503 // Deliver callback to the observer
504 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
505 VoEId(_instanceId,_channelId),
506 "Channel::OnPacketTimeout() => "
507 "CallbackOnError(VE_RECEIVE_PACKET_TIMEOUT)");
508 _voiceEngineObserverPtr->CallbackOnError(channel,
509 VE_RECEIVE_PACKET_TIMEOUT);
510 }
511 }
512}
513
514void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000515Channel::OnReceivedPacket(int32_t id,
516 RtpRtcpPacketType packetType)
niklase@google.com470e71d2011-07-07 08:21:25 +0000517{
518 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
519 "Channel::OnReceivedPacket(id=%d, packetType=%d)",
520 id, packetType);
521
andrew@webrtc.orgceb148c2011-08-23 17:53:54 +0000522 assert(VoEChannelId(id) == _channelId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000523
524 // Notify only for the case when we have restarted an RTP session.
525 if (_rtpPacketTimedOut && (kPacketRtp == packetType))
526 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000527 CriticalSectionScoped cs(_callbackCritSectPtr);
niklase@google.com470e71d2011-07-07 08:21:25 +0000528 if (_voiceEngineObserverPtr)
529 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000530 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000531 assert(channel == _channelId);
532 // Reset timeout mechanism
533 _rtpPacketTimedOut = false;
534 // Deliver callback to the observer
535 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
536 VoEId(_instanceId,_channelId),
537 "Channel::OnPacketTimeout() =>"
538 " CallbackOnError(VE_PACKET_RECEIPT_RESTARTED)");
539 _voiceEngineObserverPtr->CallbackOnError(
540 channel,
541 VE_PACKET_RECEIPT_RESTARTED);
542 }
543 }
544}
545
546void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000547Channel::OnPeriodicDeadOrAlive(int32_t id,
548 RTPAliveType alive)
niklase@google.com470e71d2011-07-07 08:21:25 +0000549{
550 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
551 "Channel::OnPeriodicDeadOrAlive(id=%d, alive=%d)", id, alive);
552
henrika@webrtc.org19da7192013-04-05 14:34:57 +0000553 {
554 CriticalSectionScoped cs(&_callbackCritSect);
555 if (!_connectionObserver)
556 return;
557 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000558
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000559 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000560 assert(channel == _channelId);
561
562 // Use Alive as default to limit risk of false Dead detections
563 bool isAlive(true);
564
565 // Always mark the connection as Dead when the module reports kRtpDead
566 if (kRtpDead == alive)
567 {
568 isAlive = false;
569 }
570
571 // It is possible that the connection is alive even if no RTP packet has
572 // been received for a long time since the other side might use VAD/DTX
573 // and a low SID-packet update rate.
574 if ((kRtpNoRtp == alive) && _playing)
575 {
576 // Detect Alive for all NetEQ states except for the case when we are
577 // in PLC_CNG state.
578 // PLC_CNG <=> background noise only due to long expand or error.
579 // Note that, the case where the other side stops sending during CNG
580 // state will be detected as Alive. Dead is is not set until after
581 // missing RTCP packets for at least twelve seconds (handled
582 // internally by the RTP/RTCP module).
583 isAlive = (_outputSpeechType != AudioFrame::kPLCCNG);
584 }
585
586 UpdateDeadOrAliveCounters(isAlive);
587
588 // Send callback to the registered observer
589 if (_connectionObserver)
590 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000591 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000592 if (_connectionObserverPtr)
593 {
594 _connectionObserverPtr->OnPeriodicDeadOrAlive(channel, isAlive);
595 }
596 }
597}
598
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000599int32_t
600Channel::OnReceivedPayloadData(const uint8_t* payloadData,
pbos@webrtc.org92135212013-05-14 08:31:39 +0000601 uint16_t payloadSize,
niklase@google.com470e71d2011-07-07 08:21:25 +0000602 const WebRtcRTPHeader* rtpHeader)
603{
604 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
605 "Channel::OnReceivedPayloadData(payloadSize=%d,"
606 " payloadType=%u, audioChannel=%u)",
607 payloadSize,
608 rtpHeader->header.payloadType,
609 rtpHeader->type.Audio.channel);
610
roosa@google.com0870f022012-12-12 21:31:41 +0000611 _lastRemoteTimeStamp = rtpHeader->header.timestamp;
612
niklase@google.com470e71d2011-07-07 08:21:25 +0000613 if (!_playing)
614 {
615 // Avoid inserting into NetEQ when we are not playing. Count the
616 // packet as discarded.
617 WEBRTC_TRACE(kTraceStream, kTraceVoice,
618 VoEId(_instanceId, _channelId),
619 "received packet is discarded since playing is not"
620 " activated");
621 _numberOfDiscardedPackets++;
622 return 0;
623 }
624
625 // Push the incoming payload (parsed and ready for decoding) into the ACM
tina.legrand@webrtc.org16b6b902012-04-12 11:02:38 +0000626 if (_audioCodingModule.IncomingPacket(payloadData,
niklase@google.com470e71d2011-07-07 08:21:25 +0000627 payloadSize,
628 *rtpHeader) != 0)
629 {
630 _engineStatisticsPtr->SetLastError(
631 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
632 "Channel::OnReceivedPayloadData() unable to push data to the ACM");
633 return -1;
634 }
635
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +0000636 // Update the packet delay.
niklase@google.com470e71d2011-07-07 08:21:25 +0000637 UpdatePacketDelay(rtpHeader->header.timestamp,
638 rtpHeader->header.sequenceNumber);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +0000639
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +0000640 uint16_t round_trip_time = 0;
641 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), &round_trip_time,
642 NULL, NULL, NULL);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +0000643
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +0000644 std::vector<uint16_t> nack_list = _audioCodingModule.GetNackList(
645 round_trip_time);
646 if (!nack_list.empty()) {
647 // Can't use nack_list.data() since it's not supported by all
648 // compilers.
649 ResendPackets(&(nack_list[0]), static_cast<int>(nack_list.size()));
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +0000650 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000651 return 0;
652}
653
pbos@webrtc.org92135212013-05-14 08:31:39 +0000654int32_t Channel::GetAudioFrame(int32_t id, AudioFrame& audioFrame)
niklase@google.com470e71d2011-07-07 08:21:25 +0000655{
656 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
657 "Channel::GetAudioFrame(id=%d)", id);
658
659 // Get 10ms raw PCM data from the ACM (mixer limits output frequency)
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000660 if (_audioCodingModule.PlayoutData10Ms(audioFrame.sample_rate_hz_,
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +0000661 &audioFrame) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000662 {
663 WEBRTC_TRACE(kTraceError, kTraceVoice,
664 VoEId(_instanceId,_channelId),
665 "Channel::GetAudioFrame() PlayoutData10Ms() failed!");
andrew@webrtc.org7859e102012-01-13 00:30:11 +0000666 // In all likelihood, the audio in this frame is garbage. We return an
667 // error so that the audio mixer module doesn't add it to the mix. As
668 // a result, it won't be played out and the actions skipped here are
669 // irrelevant.
670 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +0000671 }
672
673 if (_RxVadDetection)
674 {
675 UpdateRxVadDetection(audioFrame);
676 }
677
678 // Convert module ID to internal VoE channel ID
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000679 audioFrame.id_ = VoEChannelId(audioFrame.id_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000680 // Store speech type for dead-or-alive detection
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000681 _outputSpeechType = audioFrame.speech_type_;
niklase@google.com470e71d2011-07-07 08:21:25 +0000682
683 // Perform far-end AudioProcessing module processing on the received signal
684 if (_rxApmIsEnabled)
685 {
686 ApmProcessRx(audioFrame);
687 }
688
689 // Output volume scaling
690 if (_outputGain < 0.99f || _outputGain > 1.01f)
691 {
692 AudioFrameOperations::ScaleWithSat(_outputGain, audioFrame);
693 }
694
695 // Scale left and/or right channel(s) if stereo and master balance is
696 // active
697
698 if (_panLeft != 1.0f || _panRight != 1.0f)
699 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000700 if (audioFrame.num_channels_ == 1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000701 {
702 // Emulate stereo mode since panning is active.
703 // The mono signal is copied to both left and right channels here.
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +0000704 AudioFrameOperations::MonoToStereo(&audioFrame);
niklase@google.com470e71d2011-07-07 08:21:25 +0000705 }
706 // For true stereo mode (when we are receiving a stereo signal), no
707 // action is needed.
708
709 // Do the panning operation (the audio frame contains stereo at this
710 // stage)
711 AudioFrameOperations::Scale(_panLeft, _panRight, audioFrame);
712 }
713
714 // Mix decoded PCM output with file if file mixing is enabled
715 if (_outputFilePlaying)
716 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000717 MixAudioWithFile(audioFrame, audioFrame.sample_rate_hz_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000718 }
719
720 // Place channel in on-hold state (~muted) if on-hold is activated
721 if (_outputIsOnHold)
722 {
723 AudioFrameOperations::Mute(audioFrame);
724 }
725
726 // External media
727 if (_outputExternalMedia)
728 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000729 CriticalSectionScoped cs(&_callbackCritSect);
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000730 const bool isStereo = (audioFrame.num_channels_ == 2);
niklase@google.com470e71d2011-07-07 08:21:25 +0000731 if (_outputExternalMediaCallbackPtr)
732 {
733 _outputExternalMediaCallbackPtr->Process(
734 _channelId,
735 kPlaybackPerChannel,
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000736 (int16_t*)audioFrame.data_,
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000737 audioFrame.samples_per_channel_,
738 audioFrame.sample_rate_hz_,
niklase@google.com470e71d2011-07-07 08:21:25 +0000739 isStereo);
740 }
741 }
742
743 // Record playout if enabled
744 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000745 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000746
747 if (_outputFileRecording && _outputFileRecorderPtr)
748 {
niklas.enbom@webrtc.org5398d952012-03-26 08:11:25 +0000749 _outputFileRecorderPtr->RecordAudioToFile(audioFrame);
niklase@google.com470e71d2011-07-07 08:21:25 +0000750 }
751 }
752
753 // Measure audio level (0-9)
754 _outputAudioLevel.ComputeLevel(audioFrame);
755
756 return 0;
757}
758
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000759int32_t
pbos@webrtc.org92135212013-05-14 08:31:39 +0000760Channel::NeededFrequency(int32_t id)
niklase@google.com470e71d2011-07-07 08:21:25 +0000761{
762 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
763 "Channel::NeededFrequency(id=%d)", id);
764
765 int highestNeeded = 0;
766
767 // Determine highest needed receive frequency
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000768 int32_t receiveFrequency = _audioCodingModule.ReceiveFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +0000769
770 // Return the bigger of playout and receive frequency in the ACM.
771 if (_audioCodingModule.PlayoutFrequency() > receiveFrequency)
772 {
773 highestNeeded = _audioCodingModule.PlayoutFrequency();
774 }
775 else
776 {
777 highestNeeded = receiveFrequency;
778 }
779
780 // Special case, if we're playing a file on the playout side
781 // we take that frequency into consideration as well
782 // This is not needed on sending side, since the codec will
783 // limit the spectrum anyway.
784 if (_outputFilePlaying)
785 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000786 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000787 if (_outputFilePlayerPtr && _outputFilePlaying)
788 {
789 if(_outputFilePlayerPtr->Frequency()>highestNeeded)
790 {
791 highestNeeded=_outputFilePlayerPtr->Frequency();
792 }
793 }
794 }
795
796 return(highestNeeded);
797}
798
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000799int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +0000800Channel::CreateChannel(Channel*& channel,
pbos@webrtc.org92135212013-05-14 08:31:39 +0000801 int32_t channelId,
802 uint32_t instanceId)
niklase@google.com470e71d2011-07-07 08:21:25 +0000803{
804 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId,channelId),
805 "Channel::CreateChannel(channelId=%d, instanceId=%d)",
806 channelId, instanceId);
807
808 channel = new Channel(channelId, instanceId);
809 if (channel == NULL)
810 {
811 WEBRTC_TRACE(kTraceMemory, kTraceVoice,
812 VoEId(instanceId,channelId),
813 "Channel::CreateChannel() unable to allocate memory for"
814 " channel");
815 return -1;
816 }
817 return 0;
818}
819
820void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000821Channel::PlayNotification(int32_t id, uint32_t durationMs)
niklase@google.com470e71d2011-07-07 08:21:25 +0000822{
823 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
824 "Channel::PlayNotification(id=%d, durationMs=%d)",
825 id, durationMs);
826
827 // Not implement yet
828}
829
830void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000831Channel::RecordNotification(int32_t id, uint32_t durationMs)
niklase@google.com470e71d2011-07-07 08:21:25 +0000832{
833 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
834 "Channel::RecordNotification(id=%d, durationMs=%d)",
835 id, durationMs);
836
837 // Not implement yet
838}
839
840void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000841Channel::PlayFileEnded(int32_t id)
niklase@google.com470e71d2011-07-07 08:21:25 +0000842{
843 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
844 "Channel::PlayFileEnded(id=%d)", id);
845
846 if (id == _inputFilePlayerId)
847 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000848 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000849
850 _inputFilePlaying = false;
851 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
852 VoEId(_instanceId,_channelId),
853 "Channel::PlayFileEnded() => input file player module is"
854 " shutdown");
855 }
856 else if (id == _outputFilePlayerId)
857 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000858 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000859
860 _outputFilePlaying = false;
861 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
862 VoEId(_instanceId,_channelId),
863 "Channel::PlayFileEnded() => output file player module is"
864 " shutdown");
865 }
866}
867
868void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000869Channel::RecordFileEnded(int32_t id)
niklase@google.com470e71d2011-07-07 08:21:25 +0000870{
871 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
872 "Channel::RecordFileEnded(id=%d)", id);
873
874 assert(id == _outputFileRecorderId);
875
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000876 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000877
878 _outputFileRecording = false;
879 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
880 VoEId(_instanceId,_channelId),
881 "Channel::RecordFileEnded() => output file recorder module is"
882 " shutdown");
883}
884
pbos@webrtc.org92135212013-05-14 08:31:39 +0000885Channel::Channel(int32_t channelId,
886 uint32_t instanceId) :
niklase@google.com470e71d2011-07-07 08:21:25 +0000887 _fileCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
888 _callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
niklase@google.com470e71d2011-07-07 08:21:25 +0000889 _instanceId(instanceId),
xians@google.com22963ab2011-08-03 12:40:23 +0000890 _channelId(channelId),
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +0000891 rtp_header_parser_(RtpHeaderParser::Create()),
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +0000892 rtp_payload_registry_(
893 new RTPPayloadRegistry(channelId,
894 RTPPayloadStrategy::CreateStrategy(true))),
895 rtp_receive_statistics_(ReceiveStatistics::Create(
896 Clock::GetRealTimeClock())),
897 rtp_receiver_(RtpReceiver::CreateAudioReceiver(
898 VoEModuleId(instanceId, channelId), Clock::GetRealTimeClock(), this,
899 this, this, rtp_payload_registry_.get())),
900 telephone_event_handler_(rtp_receiver_->GetTelephoneEventHandler()),
niklase@google.com470e71d2011-07-07 08:21:25 +0000901 _audioCodingModule(*AudioCodingModule::Create(
xians@google.com22963ab2011-08-03 12:40:23 +0000902 VoEModuleId(instanceId, channelId))),
niklase@google.com470e71d2011-07-07 08:21:25 +0000903 _rtpDumpIn(*RtpDump::CreateRtpDump()),
904 _rtpDumpOut(*RtpDump::CreateRtpDump()),
niklase@google.com470e71d2011-07-07 08:21:25 +0000905 _outputAudioLevel(),
niklase@google.com470e71d2011-07-07 08:21:25 +0000906 _externalTransport(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000907 _inputFilePlayerPtr(NULL),
908 _outputFilePlayerPtr(NULL),
909 _outputFileRecorderPtr(NULL),
910 // Avoid conflict with other channels by adding 1024 - 1026,
911 // won't use as much as 1024 channels.
912 _inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
913 _outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
914 _outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
915 _inputFilePlaying(false),
916 _outputFilePlaying(false),
917 _outputFileRecording(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000918 _inbandDtmfQueue(VoEModuleId(instanceId, channelId)),
919 _inbandDtmfGenerator(VoEModuleId(instanceId, channelId)),
niklase@google.com470e71d2011-07-07 08:21:25 +0000920 _inputExternalMedia(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000921 _outputExternalMedia(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000922 _inputExternalMediaCallbackPtr(NULL),
923 _outputExternalMediaCallbackPtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000924 _encryptionRTPBufferPtr(NULL),
925 _decryptionRTPBufferPtr(NULL),
926 _encryptionRTCPBufferPtr(NULL),
927 _decryptionRTCPBufferPtr(NULL),
928 _timeStamp(0), // This is just an offset, RTP module will add it's own random offset
929 _sendTelephoneEventPayloadType(106),
pwestin@webrtc.org1de01352013-04-11 20:23:35 +0000930 playout_timestamp_rtp_(0),
931 playout_timestamp_rtcp_(0),
xians@google.com22963ab2011-08-03 12:40:23 +0000932 _numberOfDiscardedPackets(0),
933 _engineStatisticsPtr(NULL),
henrika@webrtc.org2919e952012-01-31 08:45:03 +0000934 _outputMixerPtr(NULL),
935 _transmitMixerPtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000936 _moduleProcessThreadPtr(NULL),
937 _audioDeviceModulePtr(NULL),
938 _voiceEngineObserverPtr(NULL),
939 _callbackCritSectPtr(NULL),
940 _transportPtr(NULL),
941 _encryptionPtr(NULL),
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000942 _rtpAudioProc(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000943 _rxAudioProcessingModulePtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000944 _rxVadObserverPtr(NULL),
945 _oldVadDecision(-1),
946 _sendFrameType(0),
niklase@google.com470e71d2011-07-07 08:21:25 +0000947 _rtpObserverPtr(NULL),
948 _rtcpObserverPtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000949 _outputIsOnHold(false),
950 _externalPlayout(false),
roosa@google.com1b60ceb2012-12-12 23:00:29 +0000951 _externalMixing(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000952 _inputIsOnHold(false),
953 _playing(false),
954 _sending(false),
955 _receiving(false),
956 _mixFileWithMicrophone(false),
957 _rtpObserver(false),
958 _rtcpObserver(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000959 _mute(false),
960 _panLeft(1.0f),
961 _panRight(1.0f),
962 _outputGain(1.0f),
xians@google.com22963ab2011-08-03 12:40:23 +0000963 _encrypting(false),
964 _decrypting(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000965 _playOutbandDtmfEvent(false),
966 _playInbandDtmfEvent(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000967 _extraPayloadType(0),
968 _insertExtraRTPPacket(false),
969 _extraMarkerBit(false),
970 _lastLocalTimeStamp(0),
roosa@google.com0870f022012-12-12 21:31:41 +0000971 _lastRemoteTimeStamp(0),
niklase@google.com470e71d2011-07-07 08:21:25 +0000972 _lastPayloadType(0),
xians@google.com22963ab2011-08-03 12:40:23 +0000973 _includeAudioLevelIndication(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000974 _rtpPacketTimedOut(false),
975 _rtpPacketTimeOutIsEnabled(false),
976 _rtpTimeOutSeconds(0),
977 _connectionObserver(false),
978 _connectionObserverPtr(NULL),
979 _countAliveDetections(0),
980 _countDeadDetections(0),
981 _outputSpeechType(AudioFrame::kNormalSpeech),
pwestin@webrtc.org1de01352013-04-11 20:23:35 +0000982 _average_jitter_buffer_delay_us(0),
turaj@webrtc.orge46c8d32013-05-22 20:39:43 +0000983 least_required_delay_ms_(0),
niklase@google.com470e71d2011-07-07 08:21:25 +0000984 _previousTimestamp(0),
985 _recPacketDelayMs(20),
986 _RxVadDetection(false),
987 _rxApmIsEnabled(false),
988 _rxAgcIsEnabled(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000989 _rxNsIsEnabled(false)
niklase@google.com470e71d2011-07-07 08:21:25 +0000990{
991 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
992 "Channel::Channel() - ctor");
993 _inbandDtmfQueue.ResetDtmf();
994 _inbandDtmfGenerator.Init();
995 _outputAudioLevel.Clear();
996
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +0000997 RtpRtcp::Configuration configuration;
998 configuration.id = VoEModuleId(instanceId, channelId);
999 configuration.audio = true;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001000 configuration.outgoing_transport = this;
1001 configuration.rtcp_feedback = this;
1002 configuration.audio_messages = this;
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00001003 configuration.receive_statistics = rtp_receive_statistics_.get();
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001004
1005 _rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
1006
niklase@google.com470e71d2011-07-07 08:21:25 +00001007 // Create far end AudioProcessing Module
1008 _rxAudioProcessingModulePtr = AudioProcessing::Create(
1009 VoEModuleId(instanceId, channelId));
1010}
1011
1012Channel::~Channel()
1013{
1014 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
1015 "Channel::~Channel() - dtor");
1016
1017 if (_outputExternalMedia)
1018 {
1019 DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
1020 }
1021 if (_inputExternalMedia)
1022 {
1023 DeRegisterExternalMediaProcessing(kRecordingPerChannel);
1024 }
1025 StopSend();
niklase@google.com470e71d2011-07-07 08:21:25 +00001026 StopPlayout();
1027
1028 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001029 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001030 if (_inputFilePlayerPtr)
1031 {
1032 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1033 _inputFilePlayerPtr->StopPlayingFile();
1034 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1035 _inputFilePlayerPtr = NULL;
1036 }
1037 if (_outputFilePlayerPtr)
1038 {
1039 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1040 _outputFilePlayerPtr->StopPlayingFile();
1041 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1042 _outputFilePlayerPtr = NULL;
1043 }
1044 if (_outputFileRecorderPtr)
1045 {
1046 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
1047 _outputFileRecorderPtr->StopRecording();
1048 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
1049 _outputFileRecorderPtr = NULL;
1050 }
1051 }
1052
1053 // The order to safely shutdown modules in a channel is:
1054 // 1. De-register callbacks in modules
1055 // 2. De-register modules in process thread
1056 // 3. Destroy modules
niklase@google.com470e71d2011-07-07 08:21:25 +00001057 if (_audioCodingModule.RegisterTransportCallback(NULL) == -1)
1058 {
1059 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1060 VoEId(_instanceId,_channelId),
1061 "~Channel() failed to de-register transport callback"
1062 " (Audio coding module)");
1063 }
1064 if (_audioCodingModule.RegisterVADCallback(NULL) == -1)
1065 {
1066 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1067 VoEId(_instanceId,_channelId),
1068 "~Channel() failed to de-register VAD callback"
1069 " (Audio coding module)");
1070 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001071 // De-register modules in process thread
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001072 if (_moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get()) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001073 {
1074 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
1075 VoEId(_instanceId,_channelId),
1076 "~Channel() failed to deregister RTP/RTCP module");
1077 }
1078
1079 // Destroy modules
niklase@google.com470e71d2011-07-07 08:21:25 +00001080 AudioCodingModule::Destroy(&_audioCodingModule);
niklase@google.com470e71d2011-07-07 08:21:25 +00001081 if (_rxAudioProcessingModulePtr != NULL)
1082 {
1083 AudioProcessing::Destroy(_rxAudioProcessingModulePtr); // far end APM
1084 _rxAudioProcessingModulePtr = NULL;
1085 }
1086
1087 // End of modules shutdown
1088
1089 // Delete other objects
1090 RtpDump::DestroyRtpDump(&_rtpDumpIn);
1091 RtpDump::DestroyRtpDump(&_rtpDumpOut);
1092 delete [] _encryptionRTPBufferPtr;
1093 delete [] _decryptionRTPBufferPtr;
1094 delete [] _encryptionRTCPBufferPtr;
1095 delete [] _decryptionRTCPBufferPtr;
1096 delete &_callbackCritSect;
niklase@google.com470e71d2011-07-07 08:21:25 +00001097 delete &_fileCritSect;
1098}
1099
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001100int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001101Channel::Init()
1102{
1103 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1104 "Channel::Init()");
1105
1106 // --- Initial sanity
1107
1108 if ((_engineStatisticsPtr == NULL) ||
1109 (_moduleProcessThreadPtr == NULL))
1110 {
1111 WEBRTC_TRACE(kTraceError, kTraceVoice,
1112 VoEId(_instanceId,_channelId),
1113 "Channel::Init() must call SetEngineInformation() first");
1114 return -1;
1115 }
1116
1117 // --- Add modules to process thread (for periodic schedulation)
1118
1119 const bool processThreadFail =
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001120 ((_moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get()) != 0) ||
niklase@google.com470e71d2011-07-07 08:21:25 +00001121 false);
niklase@google.com470e71d2011-07-07 08:21:25 +00001122 if (processThreadFail)
1123 {
1124 _engineStatisticsPtr->SetLastError(
1125 VE_CANNOT_INIT_CHANNEL, kTraceError,
1126 "Channel::Init() modules not registered");
1127 return -1;
1128 }
pwestin@webrtc.orgc450a192012-01-04 15:00:12 +00001129 // --- ACM initialization
niklase@google.com470e71d2011-07-07 08:21:25 +00001130
1131 if ((_audioCodingModule.InitializeReceiver() == -1) ||
1132#ifdef WEBRTC_CODEC_AVT
1133 // out-of-band Dtmf tones are played out by default
1134 (_audioCodingModule.SetDtmfPlayoutStatus(true) == -1) ||
1135#endif
niklase@google.com470e71d2011-07-07 08:21:25 +00001136 (_audioCodingModule.InitializeSender() == -1))
1137 {
1138 _engineStatisticsPtr->SetLastError(
1139 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1140 "Channel::Init() unable to initialize the ACM - 1");
1141 return -1;
1142 }
1143
1144 // --- RTP/RTCP module initialization
1145
1146 // Ensure that RTCP is enabled by default for the created channel.
1147 // Note that, the module will keep generating RTCP until it is explicitly
1148 // disabled by the user.
1149 // After StopListen (when no sockets exists), RTCP packets will no longer
1150 // be transmitted since the Transport object will then be invalid.
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00001151 telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
1152 // RTCP is enabled by default.
1153 if (_rtpRtcpModule->SetRTCPStatus(kRtcpCompound) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001154 {
1155 _engineStatisticsPtr->SetLastError(
1156 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1157 "Channel::Init() RTP/RTCP module not initialized");
1158 return -1;
1159 }
1160
1161 // --- Register all permanent callbacks
niklase@google.com470e71d2011-07-07 08:21:25 +00001162 const bool fail =
niklase@google.com470e71d2011-07-07 08:21:25 +00001163 (_audioCodingModule.RegisterTransportCallback(this) == -1) ||
1164 (_audioCodingModule.RegisterVADCallback(this) == -1);
1165
1166 if (fail)
1167 {
1168 _engineStatisticsPtr->SetLastError(
1169 VE_CANNOT_INIT_CHANNEL, kTraceError,
1170 "Channel::Init() callbacks not registered");
1171 return -1;
1172 }
1173
1174 // --- Register all supported codecs to the receiving side of the
1175 // RTP/RTCP module
1176
1177 CodecInst codec;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001178 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
niklase@google.com470e71d2011-07-07 08:21:25 +00001179
1180 for (int idx = 0; idx < nSupportedCodecs; idx++)
1181 {
1182 // Open up the RTP/RTCP receiver for all supported codecs
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001183 if ((_audioCodingModule.Codec(idx, &codec) == -1) ||
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00001184 (rtp_receiver_->RegisterReceivePayload(
1185 codec.plname,
1186 codec.pltype,
1187 codec.plfreq,
1188 codec.channels,
1189 (codec.rate < 0) ? 0 : codec.rate) == -1))
niklase@google.com470e71d2011-07-07 08:21:25 +00001190 {
1191 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1192 VoEId(_instanceId,_channelId),
1193 "Channel::Init() unable to register %s (%d/%d/%d/%d) "
1194 "to RTP/RTCP receiver",
1195 codec.plname, codec.pltype, codec.plfreq,
1196 codec.channels, codec.rate);
1197 }
1198 else
1199 {
1200 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
1201 VoEId(_instanceId,_channelId),
1202 "Channel::Init() %s (%d/%d/%d/%d) has been added to "
1203 "the RTP/RTCP receiver",
1204 codec.plname, codec.pltype, codec.plfreq,
1205 codec.channels, codec.rate);
1206 }
1207
1208 // Ensure that PCMU is used as default codec on the sending side
tina.legrand@webrtc.org45175852012-06-01 09:27:35 +00001209 if (!STR_CASE_CMP(codec.plname, "PCMU") && (codec.channels == 1))
niklase@google.com470e71d2011-07-07 08:21:25 +00001210 {
1211 SetSendCodec(codec);
1212 }
1213
1214 // Register default PT for outband 'telephone-event'
1215 if (!STR_CASE_CMP(codec.plname, "telephone-event"))
1216 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001217 if ((_rtpRtcpModule->RegisterSendPayload(codec) == -1) ||
niklase@google.com470e71d2011-07-07 08:21:25 +00001218 (_audioCodingModule.RegisterReceiveCodec(codec) == -1))
1219 {
1220 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1221 VoEId(_instanceId,_channelId),
1222 "Channel::Init() failed to register outband "
1223 "'telephone-event' (%d/%d) correctly",
1224 codec.pltype, codec.plfreq);
1225 }
1226 }
1227
1228 if (!STR_CASE_CMP(codec.plname, "CN"))
1229 {
1230 if ((_audioCodingModule.RegisterSendCodec(codec) == -1) ||
1231 (_audioCodingModule.RegisterReceiveCodec(codec) == -1) ||
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001232 (_rtpRtcpModule->RegisterSendPayload(codec) == -1))
niklase@google.com470e71d2011-07-07 08:21:25 +00001233 {
1234 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1235 VoEId(_instanceId,_channelId),
1236 "Channel::Init() failed to register CN (%d/%d) "
1237 "correctly - 1",
1238 codec.pltype, codec.plfreq);
1239 }
1240 }
1241#ifdef WEBRTC_CODEC_RED
1242 // Register RED to the receiving side of the ACM.
1243 // We will not receive an OnInitializeDecoder() callback for RED.
1244 if (!STR_CASE_CMP(codec.plname, "RED"))
1245 {
1246 if (_audioCodingModule.RegisterReceiveCodec(codec) == -1)
1247 {
1248 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1249 VoEId(_instanceId,_channelId),
1250 "Channel::Init() failed to register RED (%d/%d) "
1251 "correctly",
1252 codec.pltype, codec.plfreq);
1253 }
1254 }
1255#endif
1256 }
pwestin@webrtc.org684f0572013-03-13 23:20:57 +00001257
niklase@google.com470e71d2011-07-07 08:21:25 +00001258 // Initialize the far end AP module
1259 // Using 8 kHz as initial Fs, the same as in transmission. Might be
1260 // changed at the first receiving audio.
1261 if (_rxAudioProcessingModulePtr == NULL)
1262 {
1263 _engineStatisticsPtr->SetLastError(
1264 VE_NO_MEMORY, kTraceCritical,
1265 "Channel::Init() failed to create the far-end AudioProcessing"
1266 " module");
1267 return -1;
1268 }
1269
niklase@google.com470e71d2011-07-07 08:21:25 +00001270 if (_rxAudioProcessingModulePtr->set_sample_rate_hz(8000))
1271 {
1272 _engineStatisticsPtr->SetLastError(
1273 VE_APM_ERROR, kTraceWarning,
1274 "Channel::Init() failed to set the sample rate to 8K for"
1275 " far-end AP module");
1276 }
1277
1278 if (_rxAudioProcessingModulePtr->set_num_channels(1, 1) != 0)
1279 {
1280 _engineStatisticsPtr->SetLastError(
1281 VE_SOUNDCARD_ERROR, kTraceWarning,
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00001282 "Init() failed to set channels for the primary audio stream");
niklase@google.com470e71d2011-07-07 08:21:25 +00001283 }
1284
1285 if (_rxAudioProcessingModulePtr->high_pass_filter()->Enable(
1286 WEBRTC_VOICE_ENGINE_RX_HP_DEFAULT_STATE) != 0)
1287 {
1288 _engineStatisticsPtr->SetLastError(
1289 VE_APM_ERROR, kTraceWarning,
1290 "Channel::Init() failed to set the high-pass filter for"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00001291 " far-end AP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00001292 }
1293
1294 if (_rxAudioProcessingModulePtr->noise_suppression()->set_level(
1295 (NoiseSuppression::Level)WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE) != 0)
1296 {
1297 _engineStatisticsPtr->SetLastError(
1298 VE_APM_ERROR, kTraceWarning,
1299 "Init() failed to set noise reduction level for far-end"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00001300 " AP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00001301 }
1302 if (_rxAudioProcessingModulePtr->noise_suppression()->Enable(
1303 WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_STATE) != 0)
1304 {
1305 _engineStatisticsPtr->SetLastError(
1306 VE_APM_ERROR, kTraceWarning,
1307 "Init() failed to set noise reduction state for far-end"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00001308 " AP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00001309 }
1310
1311 if (_rxAudioProcessingModulePtr->gain_control()->set_mode(
1312 (GainControl::Mode)WEBRTC_VOICE_ENGINE_RX_AGC_DEFAULT_MODE) != 0)
1313 {
1314 _engineStatisticsPtr->SetLastError(
1315 VE_APM_ERROR, kTraceWarning,
1316 "Init() failed to set AGC mode for far-end AP module");
1317 }
1318 if (_rxAudioProcessingModulePtr->gain_control()->Enable(
1319 WEBRTC_VOICE_ENGINE_RX_AGC_DEFAULT_STATE) != 0)
1320 {
1321 _engineStatisticsPtr->SetLastError(
1322 VE_APM_ERROR, kTraceWarning,
1323 "Init() failed to set AGC state for far-end AP module");
1324 }
1325
1326 return 0;
1327}
1328
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001329int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001330Channel::SetEngineInformation(Statistics& engineStatistics,
1331 OutputMixer& outputMixer,
1332 voe::TransmitMixer& transmitMixer,
1333 ProcessThread& moduleProcessThread,
1334 AudioDeviceModule& audioDeviceModule,
1335 VoiceEngineObserver* voiceEngineObserver,
1336 CriticalSectionWrapper* callbackCritSect)
1337{
1338 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1339 "Channel::SetEngineInformation()");
1340 _engineStatisticsPtr = &engineStatistics;
1341 _outputMixerPtr = &outputMixer;
1342 _transmitMixerPtr = &transmitMixer,
1343 _moduleProcessThreadPtr = &moduleProcessThread;
1344 _audioDeviceModulePtr = &audioDeviceModule;
1345 _voiceEngineObserverPtr = voiceEngineObserver;
1346 _callbackCritSectPtr = callbackCritSect;
1347 return 0;
1348}
1349
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001350int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001351Channel::UpdateLocalTimeStamp()
1352{
1353
andrew@webrtc.org63a50982012-05-02 23:56:37 +00001354 _timeStamp += _audioFrame.samples_per_channel_;
niklase@google.com470e71d2011-07-07 08:21:25 +00001355 return 0;
1356}
1357
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001358int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001359Channel::StartPlayout()
1360{
1361 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1362 "Channel::StartPlayout()");
1363 if (_playing)
1364 {
1365 return 0;
1366 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00001367
1368 if (!_externalMixing) {
1369 // Add participant as candidates for mixing.
1370 if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0)
1371 {
1372 _engineStatisticsPtr->SetLastError(
1373 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1374 "StartPlayout() failed to add participant to mixer");
1375 return -1;
1376 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001377 }
1378
1379 _playing = true;
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001380
1381 if (RegisterFilePlayingToMixer() != 0)
1382 return -1;
1383
niklase@google.com470e71d2011-07-07 08:21:25 +00001384 return 0;
1385}
1386
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001387int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001388Channel::StopPlayout()
1389{
1390 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1391 "Channel::StopPlayout()");
1392 if (!_playing)
1393 {
1394 return 0;
1395 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00001396
1397 if (!_externalMixing) {
1398 // Remove participant as candidates for mixing
1399 if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0)
1400 {
1401 _engineStatisticsPtr->SetLastError(
1402 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1403 "StopPlayout() failed to remove participant from mixer");
1404 return -1;
1405 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001406 }
1407
1408 _playing = false;
1409 _outputAudioLevel.Clear();
1410
1411 return 0;
1412}
1413
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001414int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001415Channel::StartSend()
1416{
1417 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1418 "Channel::StartSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001419 {
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001420 // A lock is needed because |_sending| can be accessed or modified by
1421 // another thread at the same time.
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001422 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001423
1424 if (_sending)
1425 {
1426 return 0;
1427 }
1428 _sending = true;
niklase@google.com470e71d2011-07-07 08:21:25 +00001429 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001430
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001431 if (_rtpRtcpModule->SetSendingStatus(true) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001432 {
1433 _engineStatisticsPtr->SetLastError(
1434 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1435 "StartSend() RTP/RTCP failed to start sending");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001436 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001437 _sending = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00001438 return -1;
1439 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001440
niklase@google.com470e71d2011-07-07 08:21:25 +00001441 return 0;
1442}
1443
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001444int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001445Channel::StopSend()
1446{
1447 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1448 "Channel::StopSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001449 {
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001450 // A lock is needed because |_sending| can be accessed or modified by
1451 // another thread at the same time.
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001452 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001453
1454 if (!_sending)
1455 {
1456 return 0;
1457 }
1458 _sending = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00001459 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001460
niklase@google.com470e71d2011-07-07 08:21:25 +00001461 // Reset sending SSRC and sequence number and triggers direct transmission
1462 // of RTCP BYE
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001463 if (_rtpRtcpModule->SetSendingStatus(false) == -1 ||
1464 _rtpRtcpModule->ResetSendDataCountersRTP() == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001465 {
1466 _engineStatisticsPtr->SetLastError(
1467 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1468 "StartSend() RTP/RTCP failed to stop sending");
1469 }
1470
niklase@google.com470e71d2011-07-07 08:21:25 +00001471 return 0;
1472}
1473
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001474int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001475Channel::StartReceiving()
1476{
1477 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1478 "Channel::StartReceiving()");
1479 if (_receiving)
1480 {
1481 return 0;
1482 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001483 _receiving = true;
1484 _numberOfDiscardedPackets = 0;
1485 return 0;
1486}
1487
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001488int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001489Channel::StopReceiving()
1490{
1491 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1492 "Channel::StopReceiving()");
1493 if (!_receiving)
1494 {
1495 return 0;
1496 }
pwestin@webrtc.org684f0572013-03-13 23:20:57 +00001497
henrika@webrtc.orgaf71f0e2011-12-05 07:02:22 +00001498 // Recover DTMF detection status.
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00001499 telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
niklase@google.com470e71d2011-07-07 08:21:25 +00001500 RegisterReceiveCodecsToRTPModule();
1501 _receiving = false;
1502 return 0;
1503}
1504
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001505int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001506Channel::SetNetEQPlayoutMode(NetEqModes mode)
1507{
1508 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1509 "Channel::SetNetEQPlayoutMode()");
1510 AudioPlayoutMode playoutMode(voice);
1511 switch (mode)
1512 {
1513 case kNetEqDefault:
1514 playoutMode = voice;
1515 break;
1516 case kNetEqStreaming:
1517 playoutMode = streaming;
1518 break;
1519 case kNetEqFax:
1520 playoutMode = fax;
1521 break;
roosa@google.comb7186192012-12-12 21:59:14 +00001522 case kNetEqOff:
1523 playoutMode = off;
1524 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00001525 }
1526 if (_audioCodingModule.SetPlayoutMode(playoutMode) != 0)
1527 {
1528 _engineStatisticsPtr->SetLastError(
1529 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1530 "SetNetEQPlayoutMode() failed to set playout mode");
1531 return -1;
1532 }
1533 return 0;
1534}
1535
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001536int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001537Channel::GetNetEQPlayoutMode(NetEqModes& mode)
1538{
1539 const AudioPlayoutMode playoutMode = _audioCodingModule.PlayoutMode();
1540 switch (playoutMode)
1541 {
1542 case voice:
1543 mode = kNetEqDefault;
1544 break;
1545 case streaming:
1546 mode = kNetEqStreaming;
1547 break;
1548 case fax:
1549 mode = kNetEqFax;
1550 break;
roosa@google.comb7186192012-12-12 21:59:14 +00001551 case off:
1552 mode = kNetEqOff;
niklase@google.com470e71d2011-07-07 08:21:25 +00001553 }
1554 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
1555 VoEId(_instanceId,_channelId),
1556 "Channel::GetNetEQPlayoutMode() => mode=%u", mode);
1557 return 0;
1558}
1559
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001560int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001561Channel::SetOnHoldStatus(bool enable, OnHoldModes mode)
1562{
1563 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1564 "Channel::SetOnHoldStatus()");
1565 if (mode == kHoldSendAndPlay)
1566 {
1567 _outputIsOnHold = enable;
1568 _inputIsOnHold = enable;
1569 }
1570 else if (mode == kHoldPlayOnly)
1571 {
1572 _outputIsOnHold = enable;
1573 }
1574 if (mode == kHoldSendOnly)
1575 {
1576 _inputIsOnHold = enable;
1577 }
1578 return 0;
1579}
1580
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001581int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001582Channel::GetOnHoldStatus(bool& enabled, OnHoldModes& mode)
1583{
1584 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1585 "Channel::GetOnHoldStatus()");
1586 enabled = (_outputIsOnHold || _inputIsOnHold);
1587 if (_outputIsOnHold && _inputIsOnHold)
1588 {
1589 mode = kHoldSendAndPlay;
1590 }
1591 else if (_outputIsOnHold && !_inputIsOnHold)
1592 {
1593 mode = kHoldPlayOnly;
1594 }
1595 else if (!_outputIsOnHold && _inputIsOnHold)
1596 {
1597 mode = kHoldSendOnly;
1598 }
1599 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1600 "Channel::GetOnHoldStatus() => enabled=%d, mode=%d",
1601 enabled, mode);
1602 return 0;
1603}
1604
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001605int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001606Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer)
1607{
1608 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1609 "Channel::RegisterVoiceEngineObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001610 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001611
1612 if (_voiceEngineObserverPtr)
1613 {
1614 _engineStatisticsPtr->SetLastError(
1615 VE_INVALID_OPERATION, kTraceError,
1616 "RegisterVoiceEngineObserver() observer already enabled");
1617 return -1;
1618 }
1619 _voiceEngineObserverPtr = &observer;
1620 return 0;
1621}
1622
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001623int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001624Channel::DeRegisterVoiceEngineObserver()
1625{
1626 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1627 "Channel::DeRegisterVoiceEngineObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001628 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001629
1630 if (!_voiceEngineObserverPtr)
1631 {
1632 _engineStatisticsPtr->SetLastError(
1633 VE_INVALID_OPERATION, kTraceWarning,
1634 "DeRegisterVoiceEngineObserver() observer already disabled");
1635 return 0;
1636 }
1637 _voiceEngineObserverPtr = NULL;
1638 return 0;
1639}
1640
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001641int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001642Channel::GetSendCodec(CodecInst& codec)
1643{
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001644 return (_audioCodingModule.SendCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001645}
1646
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001647int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001648Channel::GetRecCodec(CodecInst& codec)
1649{
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001650 return (_audioCodingModule.ReceiveCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001651}
1652
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001653int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001654Channel::SetSendCodec(const CodecInst& codec)
1655{
1656 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1657 "Channel::SetSendCodec()");
1658
1659 if (_audioCodingModule.RegisterSendCodec(codec) != 0)
1660 {
1661 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1662 "SetSendCodec() failed to register codec to ACM");
1663 return -1;
1664 }
1665
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001666 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001667 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001668 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1669 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001670 {
1671 WEBRTC_TRACE(
1672 kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1673 "SetSendCodec() failed to register codec to"
1674 " RTP/RTCP module");
1675 return -1;
1676 }
1677 }
1678
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001679 if (_rtpRtcpModule->SetAudioPacketSize(codec.pacsize) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001680 {
1681 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1682 "SetSendCodec() failed to set audio packet size");
1683 return -1;
1684 }
1685
1686 return 0;
1687}
1688
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001689int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001690Channel::SetVADStatus(bool enableVAD, ACMVADMode mode, bool disableDTX)
1691{
1692 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1693 "Channel::SetVADStatus(mode=%d)", mode);
1694 // To disable VAD, DTX must be disabled too
1695 disableDTX = ((enableVAD == false) ? true : disableDTX);
1696 if (_audioCodingModule.SetVAD(!disableDTX, enableVAD, mode) != 0)
1697 {
1698 _engineStatisticsPtr->SetLastError(
1699 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1700 "SetVADStatus() failed to set VAD");
1701 return -1;
1702 }
1703 return 0;
1704}
1705
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001706int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001707Channel::GetVADStatus(bool& enabledVAD, ACMVADMode& mode, bool& disabledDTX)
1708{
1709 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1710 "Channel::GetVADStatus");
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001711 if (_audioCodingModule.VAD(&disabledDTX, &enabledVAD, &mode) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001712 {
1713 _engineStatisticsPtr->SetLastError(
1714 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1715 "GetVADStatus() failed to get VAD status");
1716 return -1;
1717 }
1718 disabledDTX = !disabledDTX;
1719 return 0;
1720}
1721
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001722int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001723Channel::SetRecPayloadType(const CodecInst& codec)
1724{
1725 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1726 "Channel::SetRecPayloadType()");
1727
1728 if (_playing)
1729 {
1730 _engineStatisticsPtr->SetLastError(
1731 VE_ALREADY_PLAYING, kTraceError,
1732 "SetRecPayloadType() unable to set PT while playing");
1733 return -1;
1734 }
1735 if (_receiving)
1736 {
1737 _engineStatisticsPtr->SetLastError(
1738 VE_ALREADY_LISTENING, kTraceError,
1739 "SetRecPayloadType() unable to set PT while listening");
1740 return -1;
1741 }
1742
1743 if (codec.pltype == -1)
1744 {
1745 // De-register the selected codec (RTP/RTCP module and ACM)
1746
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001747 int8_t pltype(-1);
niklase@google.com470e71d2011-07-07 08:21:25 +00001748 CodecInst rxCodec = codec;
1749
1750 // Get payload type for the given codec
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00001751 rtp_payload_registry_->ReceivePayloadType(
1752 rxCodec.plname,
1753 rxCodec.plfreq,
1754 rxCodec.channels,
1755 (rxCodec.rate < 0) ? 0 : rxCodec.rate,
1756 &pltype);
niklase@google.com470e71d2011-07-07 08:21:25 +00001757 rxCodec.pltype = pltype;
1758
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00001759 if (rtp_receiver_->DeRegisterReceivePayload(pltype) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001760 {
1761 _engineStatisticsPtr->SetLastError(
1762 VE_RTP_RTCP_MODULE_ERROR,
1763 kTraceError,
1764 "SetRecPayloadType() RTP/RTCP-module deregistration "
1765 "failed");
1766 return -1;
1767 }
1768 if (_audioCodingModule.UnregisterReceiveCodec(rxCodec.pltype) != 0)
1769 {
1770 _engineStatisticsPtr->SetLastError(
1771 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1772 "SetRecPayloadType() ACM deregistration failed - 1");
1773 return -1;
1774 }
1775 return 0;
1776 }
1777
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00001778 if (rtp_receiver_->RegisterReceivePayload(
1779 codec.plname,
1780 codec.pltype,
1781 codec.plfreq,
1782 codec.channels,
1783 (codec.rate < 0) ? 0 : codec.rate) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001784 {
1785 // First attempt to register failed => de-register and try again
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00001786 rtp_receiver_->DeRegisterReceivePayload(codec.pltype);
1787 if (rtp_receiver_->RegisterReceivePayload(
1788 codec.plname,
1789 codec.pltype,
1790 codec.plfreq,
1791 codec.channels,
1792 (codec.rate < 0) ? 0 : codec.rate) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001793 {
1794 _engineStatisticsPtr->SetLastError(
1795 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1796 "SetRecPayloadType() RTP/RTCP-module registration failed");
1797 return -1;
1798 }
1799 }
1800 if (_audioCodingModule.RegisterReceiveCodec(codec) != 0)
1801 {
1802 _audioCodingModule.UnregisterReceiveCodec(codec.pltype);
1803 if (_audioCodingModule.RegisterReceiveCodec(codec) != 0)
1804 {
1805 _engineStatisticsPtr->SetLastError(
1806 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1807 "SetRecPayloadType() ACM registration failed - 1");
1808 return -1;
1809 }
1810 }
1811 return 0;
1812}
1813
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001814int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001815Channel::GetRecPayloadType(CodecInst& codec)
1816{
1817 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1818 "Channel::GetRecPayloadType()");
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001819 int8_t payloadType(-1);
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00001820 if (rtp_payload_registry_->ReceivePayloadType(
1821 codec.plname,
1822 codec.plfreq,
1823 codec.channels,
1824 (codec.rate < 0) ? 0 : codec.rate,
1825 &payloadType) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001826 {
1827 _engineStatisticsPtr->SetLastError(
henrika@webrtc.org37198002012-06-18 11:00:12 +00001828 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
niklase@google.com470e71d2011-07-07 08:21:25 +00001829 "GetRecPayloadType() failed to retrieve RX payload type");
1830 return -1;
1831 }
1832 codec.pltype = payloadType;
1833 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1834 "Channel::GetRecPayloadType() => pltype=%u", codec.pltype);
1835 return 0;
1836}
1837
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001838int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001839Channel::SetAMREncFormat(AmrMode mode)
1840{
1841 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1842 "Channel::SetAMREncFormat()");
1843
1844 // ACM doesn't support AMR
1845 return -1;
1846}
1847
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001848int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001849Channel::SetAMRDecFormat(AmrMode mode)
1850{
1851 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1852 "Channel::SetAMRDecFormat()");
1853
1854 // ACM doesn't support AMR
1855 return -1;
1856}
1857
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001858int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001859Channel::SetAMRWbEncFormat(AmrMode mode)
1860{
1861 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1862 "Channel::SetAMRWbEncFormat()");
1863
1864 // ACM doesn't support AMR
1865 return -1;
1866
1867}
1868
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001869int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001870Channel::SetAMRWbDecFormat(AmrMode mode)
1871{
1872 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1873 "Channel::SetAMRWbDecFormat()");
1874
1875 // ACM doesn't support AMR
1876 return -1;
1877}
1878
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001879int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001880Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency)
1881{
1882 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1883 "Channel::SetSendCNPayloadType()");
1884
1885 CodecInst codec;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001886 int32_t samplingFreqHz(-1);
tina.legrand@webrtc.org45175852012-06-01 09:27:35 +00001887 const int kMono = 1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001888 if (frequency == kFreq32000Hz)
1889 samplingFreqHz = 32000;
1890 else if (frequency == kFreq16000Hz)
1891 samplingFreqHz = 16000;
1892
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001893 if (_audioCodingModule.Codec("CN", &codec, samplingFreqHz, kMono) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001894 {
1895 _engineStatisticsPtr->SetLastError(
1896 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1897 "SetSendCNPayloadType() failed to retrieve default CN codec "
1898 "settings");
1899 return -1;
1900 }
1901
1902 // Modify the payload type (must be set to dynamic range)
1903 codec.pltype = type;
1904
1905 if (_audioCodingModule.RegisterSendCodec(codec) != 0)
1906 {
1907 _engineStatisticsPtr->SetLastError(
1908 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1909 "SetSendCNPayloadType() failed to register CN to ACM");
1910 return -1;
1911 }
1912
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001913 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001914 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001915 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1916 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001917 {
1918 _engineStatisticsPtr->SetLastError(
1919 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1920 "SetSendCNPayloadType() failed to register CN to RTP/RTCP "
1921 "module");
1922 return -1;
1923 }
1924 }
1925 return 0;
1926}
1927
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001928int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001929Channel::SetISACInitTargetRate(int rateBps, bool useFixedFrameSize)
1930{
1931 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1932 "Channel::SetISACInitTargetRate()");
1933
1934 CodecInst sendCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001935 if (_audioCodingModule.SendCodec(&sendCodec) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001936 {
1937 _engineStatisticsPtr->SetLastError(
1938 VE_CODEC_ERROR, kTraceError,
1939 "SetISACInitTargetRate() failed to retrieve send codec");
1940 return -1;
1941 }
1942 if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
1943 {
1944 // This API is only valid if iSAC is setup to run in channel-adaptive
1945 // mode.
1946 // We do not validate the adaptive mode here. It is done later in the
1947 // ConfigISACBandwidthEstimator() API.
1948 _engineStatisticsPtr->SetLastError(
1949 VE_CODEC_ERROR, kTraceError,
1950 "SetISACInitTargetRate() send codec is not iSAC");
1951 return -1;
1952 }
1953
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001954 uint8_t initFrameSizeMsec(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00001955 if (16000 == sendCodec.plfreq)
1956 {
1957 // Note that 0 is a valid and corresponds to "use default
1958 if ((rateBps != 0 &&
1959 rateBps < kVoiceEngineMinIsacInitTargetRateBpsWb) ||
1960 (rateBps > kVoiceEngineMaxIsacInitTargetRateBpsWb))
1961 {
1962 _engineStatisticsPtr->SetLastError(
1963 VE_INVALID_ARGUMENT, kTraceError,
1964 "SetISACInitTargetRate() invalid target rate - 1");
1965 return -1;
1966 }
1967 // 30 or 60ms
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001968 initFrameSizeMsec = (uint8_t)(sendCodec.pacsize / 16);
niklase@google.com470e71d2011-07-07 08:21:25 +00001969 }
1970 else if (32000 == sendCodec.plfreq)
1971 {
1972 if ((rateBps != 0 &&
1973 rateBps < kVoiceEngineMinIsacInitTargetRateBpsSwb) ||
1974 (rateBps > kVoiceEngineMaxIsacInitTargetRateBpsSwb))
1975 {
1976 _engineStatisticsPtr->SetLastError(
1977 VE_INVALID_ARGUMENT, kTraceError,
1978 "SetISACInitTargetRate() invalid target rate - 2");
1979 return -1;
1980 }
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001981 initFrameSizeMsec = (uint8_t)(sendCodec.pacsize / 32); // 30ms
niklase@google.com470e71d2011-07-07 08:21:25 +00001982 }
1983
1984 if (_audioCodingModule.ConfigISACBandwidthEstimator(
1985 initFrameSizeMsec, rateBps, useFixedFrameSize) == -1)
1986 {
1987 _engineStatisticsPtr->SetLastError(
1988 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1989 "SetISACInitTargetRate() iSAC BWE config failed");
1990 return -1;
1991 }
1992
1993 return 0;
1994}
1995
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001996int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001997Channel::SetISACMaxRate(int rateBps)
1998{
1999 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2000 "Channel::SetISACMaxRate()");
2001
2002 CodecInst sendCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00002003 if (_audioCodingModule.SendCodec(&sendCodec) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00002004 {
2005 _engineStatisticsPtr->SetLastError(
2006 VE_CODEC_ERROR, kTraceError,
2007 "SetISACMaxRate() failed to retrieve send codec");
2008 return -1;
2009 }
2010 if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
2011 {
2012 // This API is only valid if iSAC is selected as sending codec.
2013 _engineStatisticsPtr->SetLastError(
2014 VE_CODEC_ERROR, kTraceError,
2015 "SetISACMaxRate() send codec is not iSAC");
2016 return -1;
2017 }
2018 if (16000 == sendCodec.plfreq)
2019 {
2020 if ((rateBps < kVoiceEngineMinIsacMaxRateBpsWb) ||
2021 (rateBps > kVoiceEngineMaxIsacMaxRateBpsWb))
2022 {
2023 _engineStatisticsPtr->SetLastError(
2024 VE_INVALID_ARGUMENT, kTraceError,
2025 "SetISACMaxRate() invalid max rate - 1");
2026 return -1;
2027 }
2028 }
2029 else if (32000 == sendCodec.plfreq)
2030 {
2031 if ((rateBps < kVoiceEngineMinIsacMaxRateBpsSwb) ||
2032 (rateBps > kVoiceEngineMaxIsacMaxRateBpsSwb))
2033 {
2034 _engineStatisticsPtr->SetLastError(
2035 VE_INVALID_ARGUMENT, kTraceError,
2036 "SetISACMaxRate() invalid max rate - 2");
2037 return -1;
2038 }
2039 }
2040 if (_sending)
2041 {
2042 _engineStatisticsPtr->SetLastError(
2043 VE_SENDING, kTraceError,
2044 "SetISACMaxRate() unable to set max rate while sending");
2045 return -1;
2046 }
2047
2048 // Set the maximum instantaneous rate of iSAC (works for both adaptive
2049 // and non-adaptive mode)
2050 if (_audioCodingModule.SetISACMaxRate(rateBps) == -1)
2051 {
2052 _engineStatisticsPtr->SetLastError(
2053 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2054 "SetISACMaxRate() failed to set max rate");
2055 return -1;
2056 }
2057
2058 return 0;
2059}
2060
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002061int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00002062Channel::SetISACMaxPayloadSize(int sizeBytes)
2063{
2064 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2065 "Channel::SetISACMaxPayloadSize()");
2066 CodecInst sendCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00002067 if (_audioCodingModule.SendCodec(&sendCodec) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00002068 {
2069 _engineStatisticsPtr->SetLastError(
2070 VE_CODEC_ERROR, kTraceError,
2071 "SetISACMaxPayloadSize() failed to retrieve send codec");
2072 return -1;
2073 }
2074 if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
2075 {
2076 _engineStatisticsPtr->SetLastError(
2077 VE_CODEC_ERROR, kTraceError,
2078 "SetISACMaxPayloadSize() send codec is not iSAC");
2079 return -1;
2080 }
2081 if (16000 == sendCodec.plfreq)
2082 {
2083 if ((sizeBytes < kVoiceEngineMinIsacMaxPayloadSizeBytesWb) ||
2084 (sizeBytes > kVoiceEngineMaxIsacMaxPayloadSizeBytesWb))
2085 {
2086 _engineStatisticsPtr->SetLastError(
2087 VE_INVALID_ARGUMENT, kTraceError,
2088 "SetISACMaxPayloadSize() invalid max payload - 1");
2089 return -1;
2090 }
2091 }
2092 else if (32000 == sendCodec.plfreq)
2093 {
2094 if ((sizeBytes < kVoiceEngineMinIsacMaxPayloadSizeBytesSwb) ||
2095 (sizeBytes > kVoiceEngineMaxIsacMaxPayloadSizeBytesSwb))
2096 {
2097 _engineStatisticsPtr->SetLastError(
2098 VE_INVALID_ARGUMENT, kTraceError,
2099 "SetISACMaxPayloadSize() invalid max payload - 2");
2100 return -1;
2101 }
2102 }
2103 if (_sending)
2104 {
2105 _engineStatisticsPtr->SetLastError(
2106 VE_SENDING, kTraceError,
2107 "SetISACMaxPayloadSize() unable to set max rate while sending");
2108 return -1;
2109 }
2110
2111 if (_audioCodingModule.SetISACMaxPayloadSize(sizeBytes) == -1)
2112 {
2113 _engineStatisticsPtr->SetLastError(
2114 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2115 "SetISACMaxPayloadSize() failed to set max payload size");
2116 return -1;
2117 }
2118 return 0;
2119}
2120
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002121int32_t Channel::RegisterExternalTransport(Transport& transport)
niklase@google.com470e71d2011-07-07 08:21:25 +00002122{
2123 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2124 "Channel::RegisterExternalTransport()");
2125
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002126 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002127
niklase@google.com470e71d2011-07-07 08:21:25 +00002128 if (_externalTransport)
2129 {
2130 _engineStatisticsPtr->SetLastError(VE_INVALID_OPERATION,
2131 kTraceError,
2132 "RegisterExternalTransport() external transport already enabled");
2133 return -1;
2134 }
2135 _externalTransport = true;
2136 _transportPtr = &transport;
2137 return 0;
2138}
2139
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002140int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00002141Channel::DeRegisterExternalTransport()
2142{
2143 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2144 "Channel::DeRegisterExternalTransport()");
2145
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002146 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.org83661f52011-11-25 10:58:15 +00002147
niklase@google.com470e71d2011-07-07 08:21:25 +00002148 if (!_transportPtr)
2149 {
2150 _engineStatisticsPtr->SetLastError(
2151 VE_INVALID_OPERATION, kTraceWarning,
2152 "DeRegisterExternalTransport() external transport already "
2153 "disabled");
2154 return 0;
2155 }
2156 _externalTransport = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00002157 _transportPtr = NULL;
2158 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2159 "DeRegisterExternalTransport() all transport is disabled");
niklase@google.com470e71d2011-07-07 08:21:25 +00002160 return 0;
2161}
2162
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002163int32_t Channel::ReceivedRTPPacket(const int8_t* data, int32_t length) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002164 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
2165 "Channel::ReceivedRTPPacket()");
2166
2167 // Store playout timestamp for the received RTP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00002168 UpdatePlayoutTimestamp(false);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002169
2170 // Dump the RTP packet to a file (if RTP dump is enabled).
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002171 if (_rtpDumpIn.DumpPacket((const uint8_t*)data,
2172 (uint16_t)length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002173 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
2174 VoEId(_instanceId,_channelId),
2175 "Channel::SendPacket() RTP dump to input file failed");
2176 }
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00002177 RTPHeader header;
2178 if (!rtp_header_parser_->Parse(reinterpret_cast<const uint8_t*>(data),
2179 static_cast<uint16_t>(length), &header)) {
2180 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideo,
2181 VoEId(_instanceId,_channelId),
2182 "IncomingPacket invalid RTP header");
2183 return -1;
2184 }
pbos@webrtc.org08933a52013-07-10 10:06:29 +00002185 header.payload_type_frequency =
2186 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
stefan@webrtc.org9de89a62013-07-10 12:42:15 +00002187 if (header.payload_type_frequency < 0) {
2188 return -1;
2189 }
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00002190 bool retransmitted = IsPacketRetransmitted(header);
2191 bool in_order = rtp_receiver_->InOrderPacket(header.sequenceNumber);
2192 rtp_receive_statistics_->IncomingPacket(header, static_cast<uint16_t>(length),
2193 retransmitted, in_order);
2194 PayloadUnion payload_specific;
2195 if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
2196 &payload_specific)) {
2197 return -1;
2198 }
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002199 // Deliver RTP packet to RTP/RTCP module for parsing
2200 // The packet will be pushed back to the channel thru the
2201 // OnReceivedPayloadData callback so we don't push it to the ACM here
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00002202 if (!rtp_receiver_->IncomingRtpPacket(&header,
2203 reinterpret_cast<const uint8_t*>(data),
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00002204 static_cast<uint16_t>(length),
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00002205 payload_specific, in_order)) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002206 _engineStatisticsPtr->SetLastError(
2207 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
2208 "Channel::IncomingRTPPacket() RTP packet is invalid");
2209 }
2210 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002211}
2212
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00002213bool Channel::IsPacketRetransmitted(const RTPHeader& header) const {
2214 bool rtx_enabled = false;
2215 uint32_t rtx_ssrc = 0;
2216 int rtx_payload_type = 0;
2217 rtp_receiver_->RTXStatus(&rtx_enabled, &rtx_ssrc, &rtx_payload_type);
2218 if (!rtx_enabled) {
2219 // Check if this is a retransmission.
2220 ReceiveStatistics::RtpReceiveStatistics stats;
2221 if (rtp_receive_statistics_->Statistics(&stats, false)) {
2222 uint16_t min_rtt = 0;
2223 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL);
2224 return rtp_receiver_->RetransmitOfOldPacket(header, stats.jitter,
2225 min_rtt);
2226 }
2227 }
2228 return false;
2229}
2230
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002231int32_t Channel::ReceivedRTCPPacket(const int8_t* data, int32_t length) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002232 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
2233 "Channel::ReceivedRTCPPacket()");
2234 // Store playout timestamp for the received RTCP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00002235 UpdatePlayoutTimestamp(true);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002236
2237 // Dump the RTCP packet to a file (if RTP dump is enabled).
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002238 if (_rtpDumpIn.DumpPacket((const uint8_t*)data,
2239 (uint16_t)length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002240 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
2241 VoEId(_instanceId,_channelId),
2242 "Channel::SendPacket() RTCP dump to input file failed");
2243 }
2244
2245 // Deliver RTCP packet to RTP/RTCP module for parsing
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00002246 if (_rtpRtcpModule->IncomingRtcpPacket((const uint8_t*)data,
2247 (uint16_t)length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002248 _engineStatisticsPtr->SetLastError(
2249 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
2250 "Channel::IncomingRTPPacket() RTCP packet is invalid");
2251 }
2252 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002253}
2254
niklase@google.com470e71d2011-07-07 08:21:25 +00002255int Channel::StartPlayingFileLocally(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00002256 bool loop,
2257 FileFormats format,
2258 int startPosition,
2259 float volumeScaling,
2260 int stopPosition,
niklase@google.com470e71d2011-07-07 08:21:25 +00002261 const CodecInst* codecInst)
2262{
2263 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2264 "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d,"
2265 " format=%d, volumeScaling=%5.3f, startPosition=%d, "
2266 "stopPosition=%d)", fileName, loop, format, volumeScaling,
2267 startPosition, stopPosition);
2268
2269 if (_outputFilePlaying)
2270 {
2271 _engineStatisticsPtr->SetLastError(
2272 VE_ALREADY_PLAYING, kTraceError,
2273 "StartPlayingFileLocally() is already playing");
2274 return -1;
2275 }
2276
niklase@google.com470e71d2011-07-07 08:21:25 +00002277 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002278 CriticalSectionScoped cs(&_fileCritSect);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002279
2280 if (_outputFilePlayerPtr)
2281 {
2282 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2283 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2284 _outputFilePlayerPtr = NULL;
2285 }
2286
2287 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2288 _outputFilePlayerId, (const FileFormats)format);
2289
2290 if (_outputFilePlayerPtr == NULL)
2291 {
2292 _engineStatisticsPtr->SetLastError(
2293 VE_INVALID_ARGUMENT, kTraceError,
henrike@webrtc.org31d30702011-11-18 19:59:32 +00002294 "StartPlayingFileLocally() filePlayer format is not correct");
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002295 return -1;
2296 }
2297
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002298 const uint32_t notificationTime(0);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002299
2300 if (_outputFilePlayerPtr->StartPlayingFile(
2301 fileName,
2302 loop,
2303 startPosition,
2304 volumeScaling,
2305 notificationTime,
2306 stopPosition,
2307 (const CodecInst*)codecInst) != 0)
2308 {
2309 _engineStatisticsPtr->SetLastError(
2310 VE_BAD_FILE, kTraceError,
2311 "StartPlayingFile() failed to start file playout");
2312 _outputFilePlayerPtr->StopPlayingFile();
2313 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2314 _outputFilePlayerPtr = NULL;
2315 return -1;
2316 }
2317 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
2318 _outputFilePlaying = true;
niklase@google.com470e71d2011-07-07 08:21:25 +00002319 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00002320
2321 if (RegisterFilePlayingToMixer() != 0)
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002322 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002323
2324 return 0;
2325}
2326
2327int Channel::StartPlayingFileLocally(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00002328 FileFormats format,
2329 int startPosition,
2330 float volumeScaling,
2331 int stopPosition,
niklase@google.com470e71d2011-07-07 08:21:25 +00002332 const CodecInst* codecInst)
2333{
2334 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2335 "Channel::StartPlayingFileLocally(format=%d,"
2336 " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
2337 format, volumeScaling, startPosition, stopPosition);
2338
2339 if(stream == NULL)
2340 {
2341 _engineStatisticsPtr->SetLastError(
2342 VE_BAD_FILE, kTraceError,
2343 "StartPlayingFileLocally() NULL as input stream");
2344 return -1;
2345 }
2346
2347
2348 if (_outputFilePlaying)
2349 {
2350 _engineStatisticsPtr->SetLastError(
2351 VE_ALREADY_PLAYING, kTraceError,
2352 "StartPlayingFileLocally() is already playing");
2353 return -1;
2354 }
2355
niklase@google.com470e71d2011-07-07 08:21:25 +00002356 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002357 CriticalSectionScoped cs(&_fileCritSect);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002358
2359 // Destroy the old instance
2360 if (_outputFilePlayerPtr)
2361 {
2362 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2363 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2364 _outputFilePlayerPtr = NULL;
2365 }
2366
2367 // Create the instance
2368 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2369 _outputFilePlayerId,
2370 (const FileFormats)format);
2371
2372 if (_outputFilePlayerPtr == NULL)
2373 {
2374 _engineStatisticsPtr->SetLastError(
2375 VE_INVALID_ARGUMENT, kTraceError,
2376 "StartPlayingFileLocally() filePlayer format isnot correct");
2377 return -1;
2378 }
2379
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002380 const uint32_t notificationTime(0);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002381
2382 if (_outputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
2383 volumeScaling,
2384 notificationTime,
2385 stopPosition, codecInst) != 0)
2386 {
2387 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2388 "StartPlayingFile() failed to "
2389 "start file playout");
2390 _outputFilePlayerPtr->StopPlayingFile();
2391 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2392 _outputFilePlayerPtr = NULL;
2393 return -1;
2394 }
2395 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
2396 _outputFilePlaying = true;
niklase@google.com470e71d2011-07-07 08:21:25 +00002397 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00002398
2399 if (RegisterFilePlayingToMixer() != 0)
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002400 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002401
niklase@google.com470e71d2011-07-07 08:21:25 +00002402 return 0;
2403}
2404
2405int Channel::StopPlayingFileLocally()
2406{
2407 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2408 "Channel::StopPlayingFileLocally()");
2409
2410 if (!_outputFilePlaying)
2411 {
2412 _engineStatisticsPtr->SetLastError(
2413 VE_INVALID_OPERATION, kTraceWarning,
2414 "StopPlayingFileLocally() isnot playing");
2415 return 0;
2416 }
2417
niklase@google.com470e71d2011-07-07 08:21:25 +00002418 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002419 CriticalSectionScoped cs(&_fileCritSect);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002420
2421 if (_outputFilePlayerPtr->StopPlayingFile() != 0)
2422 {
2423 _engineStatisticsPtr->SetLastError(
2424 VE_STOP_RECORDING_FAILED, kTraceError,
2425 "StopPlayingFile() could not stop playing");
2426 return -1;
2427 }
2428 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2429 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2430 _outputFilePlayerPtr = NULL;
2431 _outputFilePlaying = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00002432 }
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002433 // _fileCritSect cannot be taken while calling
2434 // SetAnonymousMixibilityStatus. Refer to comments in
2435 // StartPlayingFileLocally(const char* ...) for more details.
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002436 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0)
2437 {
2438 _engineStatisticsPtr->SetLastError(
2439 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002440 "StopPlayingFile() failed to stop participant from playing as"
2441 "file in the mixer");
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002442 return -1;
2443 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002444
2445 return 0;
2446}
2447
2448int Channel::IsPlayingFileLocally() const
2449{
2450 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2451 "Channel::IsPlayingFileLocally()");
2452
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002453 return (int32_t)_outputFilePlaying;
niklase@google.com470e71d2011-07-07 08:21:25 +00002454}
2455
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00002456int Channel::RegisterFilePlayingToMixer()
2457{
2458 // Return success for not registering for file playing to mixer if:
2459 // 1. playing file before playout is started on that channel.
2460 // 2. starting playout without file playing on that channel.
2461 if (!_playing || !_outputFilePlaying)
2462 {
2463 return 0;
2464 }
2465
2466 // |_fileCritSect| cannot be taken while calling
2467 // SetAnonymousMixabilityStatus() since as soon as the participant is added
2468 // frames can be pulled by the mixer. Since the frames are generated from
2469 // the file, _fileCritSect will be taken. This would result in a deadlock.
2470 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0)
2471 {
2472 CriticalSectionScoped cs(&_fileCritSect);
2473 _outputFilePlaying = false;
2474 _engineStatisticsPtr->SetLastError(
2475 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
2476 "StartPlayingFile() failed to add participant as file to mixer");
2477 _outputFilePlayerPtr->StopPlayingFile();
2478 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2479 _outputFilePlayerPtr = NULL;
2480 return -1;
2481 }
2482
2483 return 0;
2484}
2485
pbos@webrtc.org92135212013-05-14 08:31:39 +00002486int Channel::ScaleLocalFilePlayout(float scale)
niklase@google.com470e71d2011-07-07 08:21:25 +00002487{
2488 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2489 "Channel::ScaleLocalFilePlayout(scale=%5.3f)", scale);
2490
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002491 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002492
2493 if (!_outputFilePlaying)
2494 {
2495 _engineStatisticsPtr->SetLastError(
2496 VE_INVALID_OPERATION, kTraceError,
2497 "ScaleLocalFilePlayout() isnot playing");
2498 return -1;
2499 }
2500 if ((_outputFilePlayerPtr == NULL) ||
2501 (_outputFilePlayerPtr->SetAudioScaling(scale) != 0))
2502 {
2503 _engineStatisticsPtr->SetLastError(
2504 VE_BAD_ARGUMENT, kTraceError,
2505 "SetAudioScaling() failed to scale the playout");
2506 return -1;
2507 }
2508
2509 return 0;
2510}
2511
2512int Channel::GetLocalPlayoutPosition(int& positionMs)
2513{
2514 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2515 "Channel::GetLocalPlayoutPosition(position=?)");
2516
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002517 uint32_t position;
niklase@google.com470e71d2011-07-07 08:21:25 +00002518
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002519 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002520
2521 if (_outputFilePlayerPtr == NULL)
2522 {
2523 _engineStatisticsPtr->SetLastError(
2524 VE_INVALID_OPERATION, kTraceError,
2525 "GetLocalPlayoutPosition() filePlayer instance doesnot exist");
2526 return -1;
2527 }
2528
2529 if (_outputFilePlayerPtr->GetPlayoutPosition(position) != 0)
2530 {
2531 _engineStatisticsPtr->SetLastError(
2532 VE_BAD_FILE, kTraceError,
2533 "GetLocalPlayoutPosition() failed");
2534 return -1;
2535 }
2536 positionMs = position;
2537
2538 return 0;
2539}
2540
2541int Channel::StartPlayingFileAsMicrophone(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00002542 bool loop,
2543 FileFormats format,
2544 int startPosition,
2545 float volumeScaling,
2546 int stopPosition,
niklase@google.com470e71d2011-07-07 08:21:25 +00002547 const CodecInst* codecInst)
2548{
2549 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2550 "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, "
2551 "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, "
2552 "stopPosition=%d)", fileName, loop, format, volumeScaling,
2553 startPosition, stopPosition);
2554
2555 if (_inputFilePlaying)
2556 {
2557 _engineStatisticsPtr->SetLastError(
2558 VE_ALREADY_PLAYING, kTraceWarning,
2559 "StartPlayingFileAsMicrophone() filePlayer is playing");
2560 return 0;
2561 }
2562
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002563 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002564
2565 // Destroy the old instance
2566 if (_inputFilePlayerPtr)
2567 {
2568 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2569 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2570 _inputFilePlayerPtr = NULL;
2571 }
2572
2573 // Create the instance
2574 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2575 _inputFilePlayerId, (const FileFormats)format);
2576
2577 if (_inputFilePlayerPtr == NULL)
2578 {
2579 _engineStatisticsPtr->SetLastError(
2580 VE_INVALID_ARGUMENT, kTraceError,
2581 "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
2582 return -1;
2583 }
2584
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002585 const uint32_t notificationTime(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00002586
2587 if (_inputFilePlayerPtr->StartPlayingFile(
2588 fileName,
2589 loop,
2590 startPosition,
2591 volumeScaling,
2592 notificationTime,
2593 stopPosition,
2594 (const CodecInst*)codecInst) != 0)
2595 {
2596 _engineStatisticsPtr->SetLastError(
2597 VE_BAD_FILE, kTraceError,
2598 "StartPlayingFile() failed to start file playout");
2599 _inputFilePlayerPtr->StopPlayingFile();
2600 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2601 _inputFilePlayerPtr = NULL;
2602 return -1;
2603 }
2604 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2605 _inputFilePlaying = true;
2606
2607 return 0;
2608}
2609
2610int Channel::StartPlayingFileAsMicrophone(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00002611 FileFormats format,
2612 int startPosition,
2613 float volumeScaling,
2614 int stopPosition,
niklase@google.com470e71d2011-07-07 08:21:25 +00002615 const CodecInst* codecInst)
2616{
2617 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2618 "Channel::StartPlayingFileAsMicrophone(format=%d, "
2619 "volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
2620 format, volumeScaling, startPosition, stopPosition);
2621
2622 if(stream == NULL)
2623 {
2624 _engineStatisticsPtr->SetLastError(
2625 VE_BAD_FILE, kTraceError,
2626 "StartPlayingFileAsMicrophone NULL as input stream");
2627 return -1;
2628 }
2629
2630 if (_inputFilePlaying)
2631 {
2632 _engineStatisticsPtr->SetLastError(
2633 VE_ALREADY_PLAYING, kTraceWarning,
2634 "StartPlayingFileAsMicrophone() is playing");
2635 return 0;
2636 }
2637
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002638 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002639
2640 // Destroy the old instance
2641 if (_inputFilePlayerPtr)
2642 {
2643 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2644 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2645 _inputFilePlayerPtr = NULL;
2646 }
2647
2648 // Create the instance
2649 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2650 _inputFilePlayerId, (const FileFormats)format);
2651
2652 if (_inputFilePlayerPtr == NULL)
2653 {
2654 _engineStatisticsPtr->SetLastError(
2655 VE_INVALID_ARGUMENT, kTraceError,
2656 "StartPlayingInputFile() filePlayer format isnot correct");
2657 return -1;
2658 }
2659
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002660 const uint32_t notificationTime(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00002661
2662 if (_inputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
2663 volumeScaling, notificationTime,
2664 stopPosition, codecInst) != 0)
2665 {
2666 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2667 "StartPlayingFile() failed to start "
2668 "file playout");
2669 _inputFilePlayerPtr->StopPlayingFile();
2670 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2671 _inputFilePlayerPtr = NULL;
2672 return -1;
2673 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00002674
niklase@google.com470e71d2011-07-07 08:21:25 +00002675 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2676 _inputFilePlaying = true;
2677
2678 return 0;
2679}
2680
2681int Channel::StopPlayingFileAsMicrophone()
2682{
2683 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2684 "Channel::StopPlayingFileAsMicrophone()");
2685
2686 if (!_inputFilePlaying)
2687 {
2688 _engineStatisticsPtr->SetLastError(
2689 VE_INVALID_OPERATION, kTraceWarning,
2690 "StopPlayingFileAsMicrophone() isnot playing");
2691 return 0;
2692 }
2693
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002694 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002695 if (_inputFilePlayerPtr->StopPlayingFile() != 0)
2696 {
2697 _engineStatisticsPtr->SetLastError(
2698 VE_STOP_RECORDING_FAILED, kTraceError,
2699 "StopPlayingFile() could not stop playing");
2700 return -1;
2701 }
2702 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2703 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2704 _inputFilePlayerPtr = NULL;
2705 _inputFilePlaying = false;
2706
2707 return 0;
2708}
2709
2710int Channel::IsPlayingFileAsMicrophone() const
2711{
2712 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2713 "Channel::IsPlayingFileAsMicrophone()");
2714
2715 return _inputFilePlaying;
2716}
2717
pbos@webrtc.org92135212013-05-14 08:31:39 +00002718int Channel::ScaleFileAsMicrophonePlayout(float scale)
niklase@google.com470e71d2011-07-07 08:21:25 +00002719{
2720 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2721 "Channel::ScaleFileAsMicrophonePlayout(scale=%5.3f)", scale);
2722
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002723 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002724
2725 if (!_inputFilePlaying)
2726 {
2727 _engineStatisticsPtr->SetLastError(
2728 VE_INVALID_OPERATION, kTraceError,
2729 "ScaleFileAsMicrophonePlayout() isnot playing");
2730 return -1;
2731 }
2732
2733 if ((_inputFilePlayerPtr == NULL) ||
2734 (_inputFilePlayerPtr->SetAudioScaling(scale) != 0))
2735 {
2736 _engineStatisticsPtr->SetLastError(
2737 VE_BAD_ARGUMENT, kTraceError,
2738 "SetAudioScaling() failed to scale playout");
2739 return -1;
2740 }
2741
2742 return 0;
2743}
2744
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00002745int Channel::StartRecordingPlayout(const char* fileName,
niklase@google.com470e71d2011-07-07 08:21:25 +00002746 const CodecInst* codecInst)
2747{
2748 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2749 "Channel::StartRecordingPlayout(fileName=%s)", fileName);
2750
2751 if (_outputFileRecording)
2752 {
2753 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
2754 "StartRecordingPlayout() is already recording");
2755 return 0;
2756 }
2757
2758 FileFormats format;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002759 const uint32_t notificationTime(0); // Not supported in VoE
niklase@google.com470e71d2011-07-07 08:21:25 +00002760 CodecInst dummyCodec={100,"L16",16000,320,1,320000};
2761
niklas.enbom@webrtc.org40197d72012-03-26 08:45:47 +00002762 if ((codecInst != NULL) &&
2763 ((codecInst->channels < 1) || (codecInst->channels > 2)))
niklase@google.com470e71d2011-07-07 08:21:25 +00002764 {
2765 _engineStatisticsPtr->SetLastError(
2766 VE_BAD_ARGUMENT, kTraceError,
2767 "StartRecordingPlayout() invalid compression");
2768 return(-1);
2769 }
2770 if(codecInst == NULL)
2771 {
2772 format = kFileFormatPcm16kHzFile;
2773 codecInst=&dummyCodec;
2774 }
2775 else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
2776 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
2777 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
2778 {
2779 format = kFileFormatWavFile;
2780 }
2781 else
2782 {
2783 format = kFileFormatCompressedFile;
2784 }
2785
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002786 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002787
2788 // Destroy the old instance
2789 if (_outputFileRecorderPtr)
2790 {
2791 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2792 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2793 _outputFileRecorderPtr = NULL;
2794 }
2795
2796 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2797 _outputFileRecorderId, (const FileFormats)format);
2798 if (_outputFileRecorderPtr == NULL)
2799 {
2800 _engineStatisticsPtr->SetLastError(
2801 VE_INVALID_ARGUMENT, kTraceError,
2802 "StartRecordingPlayout() fileRecorder format isnot correct");
2803 return -1;
2804 }
2805
2806 if (_outputFileRecorderPtr->StartRecordingAudioFile(
2807 fileName, (const CodecInst&)*codecInst, notificationTime) != 0)
2808 {
2809 _engineStatisticsPtr->SetLastError(
2810 VE_BAD_FILE, kTraceError,
2811 "StartRecordingAudioFile() failed to start file recording");
2812 _outputFileRecorderPtr->StopRecording();
2813 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2814 _outputFileRecorderPtr = NULL;
2815 return -1;
2816 }
2817 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2818 _outputFileRecording = true;
2819
2820 return 0;
2821}
2822
2823int Channel::StartRecordingPlayout(OutStream* stream,
2824 const CodecInst* codecInst)
2825{
2826 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2827 "Channel::StartRecordingPlayout()");
2828
2829 if (_outputFileRecording)
2830 {
2831 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
2832 "StartRecordingPlayout() is already recording");
2833 return 0;
2834 }
2835
2836 FileFormats format;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002837 const uint32_t notificationTime(0); // Not supported in VoE
niklase@google.com470e71d2011-07-07 08:21:25 +00002838 CodecInst dummyCodec={100,"L16",16000,320,1,320000};
2839
2840 if (codecInst != NULL && codecInst->channels != 1)
2841 {
2842 _engineStatisticsPtr->SetLastError(
2843 VE_BAD_ARGUMENT, kTraceError,
2844 "StartRecordingPlayout() invalid compression");
2845 return(-1);
2846 }
2847 if(codecInst == NULL)
2848 {
2849 format = kFileFormatPcm16kHzFile;
2850 codecInst=&dummyCodec;
2851 }
2852 else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
2853 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
2854 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
2855 {
2856 format = kFileFormatWavFile;
2857 }
2858 else
2859 {
2860 format = kFileFormatCompressedFile;
2861 }
2862
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002863 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002864
2865 // Destroy the old instance
2866 if (_outputFileRecorderPtr)
2867 {
2868 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2869 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2870 _outputFileRecorderPtr = NULL;
2871 }
2872
2873 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2874 _outputFileRecorderId, (const FileFormats)format);
2875 if (_outputFileRecorderPtr == NULL)
2876 {
2877 _engineStatisticsPtr->SetLastError(
2878 VE_INVALID_ARGUMENT, kTraceError,
2879 "StartRecordingPlayout() fileRecorder format isnot correct");
2880 return -1;
2881 }
2882
2883 if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream, *codecInst,
2884 notificationTime) != 0)
2885 {
2886 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2887 "StartRecordingPlayout() failed to "
2888 "start file recording");
2889 _outputFileRecorderPtr->StopRecording();
2890 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2891 _outputFileRecorderPtr = NULL;
2892 return -1;
2893 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00002894
niklase@google.com470e71d2011-07-07 08:21:25 +00002895 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2896 _outputFileRecording = true;
2897
2898 return 0;
2899}
2900
2901int Channel::StopRecordingPlayout()
2902{
2903 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
2904 "Channel::StopRecordingPlayout()");
2905
2906 if (!_outputFileRecording)
2907 {
2908 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
2909 "StopRecordingPlayout() isnot recording");
2910 return -1;
2911 }
2912
2913
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002914 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002915
2916 if (_outputFileRecorderPtr->StopRecording() != 0)
2917 {
2918 _engineStatisticsPtr->SetLastError(
2919 VE_STOP_RECORDING_FAILED, kTraceError,
2920 "StopRecording() could not stop recording");
2921 return(-1);
2922 }
2923 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2924 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2925 _outputFileRecorderPtr = NULL;
2926 _outputFileRecording = false;
2927
2928 return 0;
2929}
2930
2931void
2932Channel::SetMixWithMicStatus(bool mix)
2933{
2934 _mixFileWithMicrophone=mix;
2935}
2936
2937int
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002938Channel::GetSpeechOutputLevel(uint32_t& level) const
niklase@google.com470e71d2011-07-07 08:21:25 +00002939{
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002940 int8_t currentLevel = _outputAudioLevel.Level();
2941 level = static_cast<int32_t> (currentLevel);
niklase@google.com470e71d2011-07-07 08:21:25 +00002942 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2943 VoEId(_instanceId,_channelId),
2944 "GetSpeechOutputLevel() => level=%u", level);
2945 return 0;
2946}
2947
2948int
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002949Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const
niklase@google.com470e71d2011-07-07 08:21:25 +00002950{
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002951 int16_t currentLevel = _outputAudioLevel.LevelFullRange();
2952 level = static_cast<int32_t> (currentLevel);
niklase@google.com470e71d2011-07-07 08:21:25 +00002953 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2954 VoEId(_instanceId,_channelId),
2955 "GetSpeechOutputLevelFullRange() => level=%u", level);
2956 return 0;
2957}
2958
2959int
2960Channel::SetMute(bool enable)
2961{
2962 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2963 "Channel::SetMute(enable=%d)", enable);
2964 _mute = enable;
2965 return 0;
2966}
2967
2968bool
2969Channel::Mute() const
2970{
2971 return _mute;
2972}
2973
2974int
2975Channel::SetOutputVolumePan(float left, float right)
2976{
2977 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2978 "Channel::SetOutputVolumePan()");
2979 _panLeft = left;
2980 _panRight = right;
2981 return 0;
2982}
2983
2984int
2985Channel::GetOutputVolumePan(float& left, float& right) const
2986{
2987 left = _panLeft;
2988 right = _panRight;
2989 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2990 VoEId(_instanceId,_channelId),
2991 "GetOutputVolumePan() => left=%3.2f, right=%3.2f", left, right);
2992 return 0;
2993}
2994
2995int
2996Channel::SetChannelOutputVolumeScaling(float scaling)
2997{
2998 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2999 "Channel::SetChannelOutputVolumeScaling()");
3000 _outputGain = scaling;
3001 return 0;
3002}
3003
3004int
3005Channel::GetChannelOutputVolumeScaling(float& scaling) const
3006{
3007 scaling = _outputGain;
3008 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3009 VoEId(_instanceId,_channelId),
3010 "GetChannelOutputVolumeScaling() => scaling=%3.2f", scaling);
3011 return 0;
3012}
3013
niklase@google.com470e71d2011-07-07 08:21:25 +00003014int
3015Channel::RegisterExternalEncryption(Encryption& encryption)
3016{
3017 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3018 "Channel::RegisterExternalEncryption()");
3019
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003020 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003021
3022 if (_encryptionPtr)
3023 {
3024 _engineStatisticsPtr->SetLastError(
3025 VE_INVALID_OPERATION, kTraceError,
3026 "RegisterExternalEncryption() encryption already enabled");
3027 return -1;
3028 }
3029
3030 _encryptionPtr = &encryption;
3031
3032 _decrypting = true;
3033 _encrypting = true;
3034
3035 return 0;
3036}
3037
3038int
3039Channel::DeRegisterExternalEncryption()
3040{
3041 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3042 "Channel::DeRegisterExternalEncryption()");
3043
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003044 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003045
3046 if (!_encryptionPtr)
3047 {
3048 _engineStatisticsPtr->SetLastError(
3049 VE_INVALID_OPERATION, kTraceWarning,
3050 "DeRegisterExternalEncryption() encryption already disabled");
3051 return 0;
3052 }
3053
3054 _decrypting = false;
3055 _encrypting = false;
3056
3057 _encryptionPtr = NULL;
3058
3059 return 0;
3060}
3061
3062int Channel::SendTelephoneEventOutband(unsigned char eventCode,
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00003063 int lengthMs, int attenuationDb,
3064 bool playDtmfEvent)
niklase@google.com470e71d2011-07-07 08:21:25 +00003065{
3066 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3067 "Channel::SendTelephoneEventOutband(..., playDtmfEvent=%d)",
3068 playDtmfEvent);
3069
3070 _playOutbandDtmfEvent = playDtmfEvent;
3071
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003072 if (_rtpRtcpModule->SendTelephoneEventOutband(eventCode, lengthMs,
niklase@google.com470e71d2011-07-07 08:21:25 +00003073 attenuationDb) != 0)
3074 {
3075 _engineStatisticsPtr->SetLastError(
3076 VE_SEND_DTMF_FAILED,
3077 kTraceWarning,
3078 "SendTelephoneEventOutband() failed to send event");
3079 return -1;
3080 }
3081 return 0;
3082}
3083
3084int Channel::SendTelephoneEventInband(unsigned char eventCode,
3085 int lengthMs,
3086 int attenuationDb,
3087 bool playDtmfEvent)
3088{
3089 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3090 "Channel::SendTelephoneEventInband(..., playDtmfEvent=%d)",
3091 playDtmfEvent);
3092
3093 _playInbandDtmfEvent = playDtmfEvent;
3094 _inbandDtmfQueue.AddDtmf(eventCode, lengthMs, attenuationDb);
3095
3096 return 0;
3097}
3098
3099int
3100Channel::SetDtmfPlayoutStatus(bool enable)
3101{
3102 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3103 "Channel::SetDtmfPlayoutStatus()");
3104 if (_audioCodingModule.SetDtmfPlayoutStatus(enable) != 0)
3105 {
3106 _engineStatisticsPtr->SetLastError(
3107 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
3108 "SetDtmfPlayoutStatus() failed to set Dtmf playout");
3109 return -1;
3110 }
3111 return 0;
3112}
3113
3114bool
3115Channel::DtmfPlayoutStatus() const
3116{
3117 return _audioCodingModule.DtmfPlayoutStatus();
3118}
3119
3120int
3121Channel::SetSendTelephoneEventPayloadType(unsigned char type)
3122{
3123 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3124 "Channel::SetSendTelephoneEventPayloadType()");
andrew@webrtc.orgf81f9f82011-08-19 22:56:22 +00003125 if (type > 127)
niklase@google.com470e71d2011-07-07 08:21:25 +00003126 {
3127 _engineStatisticsPtr->SetLastError(
3128 VE_INVALID_ARGUMENT, kTraceError,
3129 "SetSendTelephoneEventPayloadType() invalid type");
3130 return -1;
3131 }
pwestin@webrtc.org1da1ce02011-10-13 15:19:55 +00003132 CodecInst codec;
3133 codec.plfreq = 8000;
3134 codec.pltype = type;
3135 memcpy(codec.plname, "telephone-event", 16);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003136 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003137 {
henrika@webrtc.org4392d5f2013-04-17 07:34:25 +00003138 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
3139 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
3140 _engineStatisticsPtr->SetLastError(
3141 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3142 "SetSendTelephoneEventPayloadType() failed to register send"
3143 "payload type");
3144 return -1;
3145 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003146 }
3147 _sendTelephoneEventPayloadType = type;
3148 return 0;
3149}
3150
3151int
3152Channel::GetSendTelephoneEventPayloadType(unsigned char& type)
3153{
3154 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3155 "Channel::GetSendTelephoneEventPayloadType()");
3156 type = _sendTelephoneEventPayloadType;
3157 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3158 VoEId(_instanceId,_channelId),
3159 "GetSendTelephoneEventPayloadType() => type=%u", type);
3160 return 0;
3161}
3162
niklase@google.com470e71d2011-07-07 08:21:25 +00003163int
3164Channel::UpdateRxVadDetection(AudioFrame& audioFrame)
3165{
3166 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3167 "Channel::UpdateRxVadDetection()");
3168
3169 int vadDecision = 1;
3170
andrew@webrtc.org63a50982012-05-02 23:56:37 +00003171 vadDecision = (audioFrame.vad_activity_ == AudioFrame::kVadActive)? 1 : 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003172
3173 if ((vadDecision != _oldVadDecision) && _rxVadObserverPtr)
3174 {
3175 OnRxVadDetected(vadDecision);
3176 _oldVadDecision = vadDecision;
3177 }
3178
3179 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3180 "Channel::UpdateRxVadDetection() => vadDecision=%d",
3181 vadDecision);
3182 return 0;
3183}
3184
3185int
3186Channel::RegisterRxVadObserver(VoERxVadCallback &observer)
3187{
3188 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3189 "Channel::RegisterRxVadObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003190 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003191
3192 if (_rxVadObserverPtr)
3193 {
3194 _engineStatisticsPtr->SetLastError(
3195 VE_INVALID_OPERATION, kTraceError,
3196 "RegisterRxVadObserver() observer already enabled");
3197 return -1;
3198 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003199 _rxVadObserverPtr = &observer;
3200 _RxVadDetection = true;
3201 return 0;
3202}
3203
3204int
3205Channel::DeRegisterRxVadObserver()
3206{
3207 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3208 "Channel::DeRegisterRxVadObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003209 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003210
3211 if (!_rxVadObserverPtr)
3212 {
3213 _engineStatisticsPtr->SetLastError(
3214 VE_INVALID_OPERATION, kTraceWarning,
3215 "DeRegisterRxVadObserver() observer already disabled");
3216 return 0;
3217 }
3218 _rxVadObserverPtr = NULL;
3219 _RxVadDetection = false;
3220 return 0;
3221}
3222
3223int
3224Channel::VoiceActivityIndicator(int &activity)
3225{
3226 activity = _sendFrameType;
3227
3228 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3229 "Channel::VoiceActivityIndicator(indicator=%d)", activity);
3230 return 0;
3231}
3232
3233#ifdef WEBRTC_VOICE_ENGINE_AGC
3234
3235int
pbos@webrtc.org92135212013-05-14 08:31:39 +00003236Channel::SetRxAgcStatus(bool enable, AgcModes mode)
niklase@google.com470e71d2011-07-07 08:21:25 +00003237{
3238 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3239 "Channel::SetRxAgcStatus(enable=%d, mode=%d)",
3240 (int)enable, (int)mode);
3241
3242 GainControl::Mode agcMode(GainControl::kFixedDigital);
3243 switch (mode)
3244 {
3245 case kAgcDefault:
3246 agcMode = GainControl::kAdaptiveDigital;
3247 break;
3248 case kAgcUnchanged:
3249 agcMode = _rxAudioProcessingModulePtr->gain_control()->mode();
3250 break;
3251 case kAgcFixedDigital:
3252 agcMode = GainControl::kFixedDigital;
3253 break;
3254 case kAgcAdaptiveDigital:
3255 agcMode =GainControl::kAdaptiveDigital;
3256 break;
3257 default:
3258 _engineStatisticsPtr->SetLastError(
3259 VE_INVALID_ARGUMENT, kTraceError,
3260 "SetRxAgcStatus() invalid Agc mode");
3261 return -1;
3262 }
3263
3264 if (_rxAudioProcessingModulePtr->gain_control()->set_mode(agcMode) != 0)
3265 {
3266 _engineStatisticsPtr->SetLastError(
3267 VE_APM_ERROR, kTraceError,
3268 "SetRxAgcStatus() failed to set Agc mode");
3269 return -1;
3270 }
3271 if (_rxAudioProcessingModulePtr->gain_control()->Enable(enable) != 0)
3272 {
3273 _engineStatisticsPtr->SetLastError(
3274 VE_APM_ERROR, kTraceError,
3275 "SetRxAgcStatus() failed to set Agc state");
3276 return -1;
3277 }
3278
3279 _rxAgcIsEnabled = enable;
niklase@google.com470e71d2011-07-07 08:21:25 +00003280 _rxApmIsEnabled = ((_rxAgcIsEnabled == true) || (_rxNsIsEnabled == true));
3281
3282 return 0;
3283}
3284
3285int
3286Channel::GetRxAgcStatus(bool& enabled, AgcModes& mode)
3287{
3288 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3289 "Channel::GetRxAgcStatus(enable=?, mode=?)");
3290
3291 bool enable = _rxAudioProcessingModulePtr->gain_control()->is_enabled();
3292 GainControl::Mode agcMode =
3293 _rxAudioProcessingModulePtr->gain_control()->mode();
3294
3295 enabled = enable;
3296
3297 switch (agcMode)
3298 {
3299 case GainControl::kFixedDigital:
3300 mode = kAgcFixedDigital;
3301 break;
3302 case GainControl::kAdaptiveDigital:
3303 mode = kAgcAdaptiveDigital;
3304 break;
3305 default:
3306 _engineStatisticsPtr->SetLastError(
3307 VE_APM_ERROR, kTraceError,
3308 "GetRxAgcStatus() invalid Agc mode");
3309 return -1;
3310 }
3311
3312 return 0;
3313}
3314
3315int
pbos@webrtc.org92135212013-05-14 08:31:39 +00003316Channel::SetRxAgcConfig(AgcConfig config)
niklase@google.com470e71d2011-07-07 08:21:25 +00003317{
3318 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3319 "Channel::SetRxAgcConfig()");
3320
3321 if (_rxAudioProcessingModulePtr->gain_control()->set_target_level_dbfs(
3322 config.targetLeveldBOv) != 0)
3323 {
3324 _engineStatisticsPtr->SetLastError(
3325 VE_APM_ERROR, kTraceError,
3326 "SetRxAgcConfig() failed to set target peak |level|"
3327 "(or envelope) of the Agc");
3328 return -1;
3329 }
3330 if (_rxAudioProcessingModulePtr->gain_control()->set_compression_gain_db(
3331 config.digitalCompressionGaindB) != 0)
3332 {
3333 _engineStatisticsPtr->SetLastError(
3334 VE_APM_ERROR, kTraceError,
3335 "SetRxAgcConfig() failed to set the range in |gain| the"
3336 " digital compression stage may apply");
3337 return -1;
3338 }
3339 if (_rxAudioProcessingModulePtr->gain_control()->enable_limiter(
3340 config.limiterEnable) != 0)
3341 {
3342 _engineStatisticsPtr->SetLastError(
3343 VE_APM_ERROR, kTraceError,
3344 "SetRxAgcConfig() failed to set hard limiter to the signal");
3345 return -1;
3346 }
3347
3348 return 0;
3349}
3350
3351int
3352Channel::GetRxAgcConfig(AgcConfig& config)
3353{
3354 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3355 "Channel::GetRxAgcConfig(config=%?)");
3356
3357 config.targetLeveldBOv =
3358 _rxAudioProcessingModulePtr->gain_control()->target_level_dbfs();
3359 config.digitalCompressionGaindB =
3360 _rxAudioProcessingModulePtr->gain_control()->compression_gain_db();
3361 config.limiterEnable =
3362 _rxAudioProcessingModulePtr->gain_control()->is_limiter_enabled();
3363
3364 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3365 VoEId(_instanceId,_channelId), "GetRxAgcConfig() => "
3366 "targetLeveldBOv=%u, digitalCompressionGaindB=%u,"
3367 " limiterEnable=%d",
3368 config.targetLeveldBOv,
3369 config.digitalCompressionGaindB,
3370 config.limiterEnable);
3371
3372 return 0;
3373}
3374
3375#endif // #ifdef WEBRTC_VOICE_ENGINE_AGC
3376
3377#ifdef WEBRTC_VOICE_ENGINE_NR
3378
3379int
pbos@webrtc.org92135212013-05-14 08:31:39 +00003380Channel::SetRxNsStatus(bool enable, NsModes mode)
niklase@google.com470e71d2011-07-07 08:21:25 +00003381{
3382 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3383 "Channel::SetRxNsStatus(enable=%d, mode=%d)",
3384 (int)enable, (int)mode);
3385
3386 NoiseSuppression::Level nsLevel(
3387 (NoiseSuppression::Level)WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE);
3388 switch (mode)
3389 {
3390
3391 case kNsDefault:
3392 nsLevel = (NoiseSuppression::Level)
3393 WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE;
3394 break;
3395 case kNsUnchanged:
3396 nsLevel = _rxAudioProcessingModulePtr->noise_suppression()->level();
3397 break;
3398 case kNsConference:
3399 nsLevel = NoiseSuppression::kHigh;
3400 break;
3401 case kNsLowSuppression:
3402 nsLevel = NoiseSuppression::kLow;
3403 break;
3404 case kNsModerateSuppression:
3405 nsLevel = NoiseSuppression::kModerate;
3406 break;
3407 case kNsHighSuppression:
3408 nsLevel = NoiseSuppression::kHigh;
3409 break;
3410 case kNsVeryHighSuppression:
3411 nsLevel = NoiseSuppression::kVeryHigh;
3412 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00003413 }
3414
3415 if (_rxAudioProcessingModulePtr->noise_suppression()->set_level(nsLevel)
3416 != 0)
3417 {
3418 _engineStatisticsPtr->SetLastError(
3419 VE_APM_ERROR, kTraceError,
3420 "SetRxAgcStatus() failed to set Ns level");
3421 return -1;
3422 }
3423 if (_rxAudioProcessingModulePtr->noise_suppression()->Enable(enable) != 0)
3424 {
3425 _engineStatisticsPtr->SetLastError(
3426 VE_APM_ERROR, kTraceError,
3427 "SetRxAgcStatus() failed to set Agc state");
3428 return -1;
3429 }
3430
3431 _rxNsIsEnabled = enable;
3432 _rxApmIsEnabled = ((_rxAgcIsEnabled == true) || (_rxNsIsEnabled == true));
3433
3434 return 0;
3435}
3436
3437int
3438Channel::GetRxNsStatus(bool& enabled, NsModes& mode)
3439{
3440 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3441 "Channel::GetRxNsStatus(enable=?, mode=?)");
3442
3443 bool enable =
3444 _rxAudioProcessingModulePtr->noise_suppression()->is_enabled();
3445 NoiseSuppression::Level ncLevel =
3446 _rxAudioProcessingModulePtr->noise_suppression()->level();
3447
3448 enabled = enable;
3449
3450 switch (ncLevel)
3451 {
3452 case NoiseSuppression::kLow:
3453 mode = kNsLowSuppression;
3454 break;
3455 case NoiseSuppression::kModerate:
3456 mode = kNsModerateSuppression;
3457 break;
3458 case NoiseSuppression::kHigh:
3459 mode = kNsHighSuppression;
3460 break;
3461 case NoiseSuppression::kVeryHigh:
3462 mode = kNsVeryHighSuppression;
3463 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00003464 }
3465
3466 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3467 VoEId(_instanceId,_channelId),
3468 "GetRxNsStatus() => enabled=%d, mode=%d", enabled, mode);
3469 return 0;
3470}
3471
3472#endif // #ifdef WEBRTC_VOICE_ENGINE_NR
3473
3474int
3475Channel::RegisterRTPObserver(VoERTPObserver& observer)
3476{
3477 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3478 "Channel::RegisterRTPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003479 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003480
3481 if (_rtpObserverPtr)
3482 {
3483 _engineStatisticsPtr->SetLastError(
3484 VE_INVALID_OPERATION, kTraceError,
3485 "RegisterRTPObserver() observer already enabled");
3486 return -1;
3487 }
3488
3489 _rtpObserverPtr = &observer;
3490 _rtpObserver = true;
3491
3492 return 0;
3493}
3494
3495int
3496Channel::DeRegisterRTPObserver()
3497{
3498 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3499 "Channel::DeRegisterRTPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003500 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003501
3502 if (!_rtpObserverPtr)
3503 {
3504 _engineStatisticsPtr->SetLastError(
3505 VE_INVALID_OPERATION, kTraceWarning,
3506 "DeRegisterRTPObserver() observer already disabled");
3507 return 0;
3508 }
3509
3510 _rtpObserver = false;
3511 _rtpObserverPtr = NULL;
3512
3513 return 0;
3514}
3515
3516int
3517Channel::RegisterRTCPObserver(VoERTCPObserver& observer)
3518{
3519 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3520 "Channel::RegisterRTCPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003521 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003522
3523 if (_rtcpObserverPtr)
3524 {
3525 _engineStatisticsPtr->SetLastError(
3526 VE_INVALID_OPERATION, kTraceError,
3527 "RegisterRTCPObserver() observer already enabled");
3528 return -1;
3529 }
3530
3531 _rtcpObserverPtr = &observer;
3532 _rtcpObserver = true;
3533
3534 return 0;
3535}
3536
3537int
3538Channel::DeRegisterRTCPObserver()
3539{
3540 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3541 "Channel::DeRegisterRTCPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003542 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003543
3544 if (!_rtcpObserverPtr)
3545 {
3546 _engineStatisticsPtr->SetLastError(
3547 VE_INVALID_OPERATION, kTraceWarning,
3548 "DeRegisterRTCPObserver() observer already disabled");
3549 return 0;
3550 }
3551
3552 _rtcpObserver = false;
3553 _rtcpObserverPtr = NULL;
3554
3555 return 0;
3556}
3557
3558int
3559Channel::SetLocalSSRC(unsigned int ssrc)
3560{
3561 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3562 "Channel::SetLocalSSRC()");
3563 if (_sending)
3564 {
3565 _engineStatisticsPtr->SetLastError(
3566 VE_ALREADY_SENDING, kTraceError,
3567 "SetLocalSSRC() already sending");
3568 return -1;
3569 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003570 if (_rtpRtcpModule->SetSSRC(ssrc) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003571 {
3572 _engineStatisticsPtr->SetLastError(
3573 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3574 "SetLocalSSRC() failed to set SSRC");
3575 return -1;
3576 }
3577 return 0;
3578}
3579
3580int
3581Channel::GetLocalSSRC(unsigned int& ssrc)
3582{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003583 ssrc = _rtpRtcpModule->SSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00003584 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3585 VoEId(_instanceId,_channelId),
3586 "GetLocalSSRC() => ssrc=%lu", ssrc);
3587 return 0;
3588}
3589
3590int
3591Channel::GetRemoteSSRC(unsigned int& ssrc)
3592{
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00003593 ssrc = rtp_receiver_->SSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00003594 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3595 VoEId(_instanceId,_channelId),
3596 "GetRemoteSSRC() => ssrc=%lu", ssrc);
3597 return 0;
3598}
3599
3600int
3601Channel::GetRemoteCSRCs(unsigned int arrCSRC[15])
3602{
3603 if (arrCSRC == NULL)
3604 {
3605 _engineStatisticsPtr->SetLastError(
3606 VE_INVALID_ARGUMENT, kTraceError,
3607 "GetRemoteCSRCs() invalid array argument");
3608 return -1;
3609 }
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003610 uint32_t arrOfCSRC[kRtpCsrcSize];
3611 int32_t CSRCs(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003612 CSRCs = _rtpRtcpModule->CSRCs(arrOfCSRC);
niklase@google.com470e71d2011-07-07 08:21:25 +00003613 if (CSRCs > 0)
3614 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003615 memcpy(arrCSRC, arrOfCSRC, CSRCs * sizeof(uint32_t));
niklase@google.com470e71d2011-07-07 08:21:25 +00003616 for (int i = 0; i < (int) CSRCs; i++)
3617 {
3618 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3619 VoEId(_instanceId, _channelId),
3620 "GetRemoteCSRCs() => arrCSRC[%d]=%lu", i, arrCSRC[i]);
3621 }
3622 } else
3623 {
3624 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3625 VoEId(_instanceId, _channelId),
3626 "GetRemoteCSRCs() => list is empty!");
3627 }
3628 return CSRCs;
3629}
3630
3631int
3632Channel::SetRTPAudioLevelIndicationStatus(bool enable, unsigned char ID)
3633{
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00003634 if (_rtpAudioProc.get() == NULL)
3635 {
3636 _rtpAudioProc.reset(AudioProcessing::Create(VoEModuleId(_instanceId,
3637 _channelId)));
3638 if (_rtpAudioProc.get() == NULL)
3639 {
3640 _engineStatisticsPtr->SetLastError(VE_NO_MEMORY, kTraceCritical,
3641 "Failed to create AudioProcessing");
3642 return -1;
3643 }
3644 }
3645
3646 if (_rtpAudioProc->level_estimator()->Enable(enable) !=
3647 AudioProcessing::kNoError)
3648 {
3649 _engineStatisticsPtr->SetLastError(VE_APM_ERROR, kTraceWarning,
3650 "Failed to enable AudioProcessing::level_estimator()");
3651 }
3652
niklase@google.com470e71d2011-07-07 08:21:25 +00003653 _includeAudioLevelIndication = enable;
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00003654 if (enable) {
3655 rtp_header_parser_->RegisterRtpHeaderExtension(kRtpExtensionAudioLevel,
3656 ID);
3657 } else {
3658 rtp_header_parser_->DeregisterRtpHeaderExtension(kRtpExtensionAudioLevel);
3659 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003660 return _rtpRtcpModule->SetRTPAudioLevelIndicationStatus(enable, ID);
niklase@google.com470e71d2011-07-07 08:21:25 +00003661}
3662int
3663Channel::GetRTPAudioLevelIndicationStatus(bool& enabled, unsigned char& ID)
3664{
3665 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3666 VoEId(_instanceId,_channelId),
3667 "GetRTPAudioLevelIndicationStatus() => enabled=%d, ID=%u",
3668 enabled, ID);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003669 return _rtpRtcpModule->GetRTPAudioLevelIndicationStatus(enabled, ID);
niklase@google.com470e71d2011-07-07 08:21:25 +00003670}
3671
3672int
3673Channel::SetRTCPStatus(bool enable)
3674{
3675 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3676 "Channel::SetRTCPStatus()");
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003677 if (_rtpRtcpModule->SetRTCPStatus(enable ?
niklase@google.com470e71d2011-07-07 08:21:25 +00003678 kRtcpCompound : kRtcpOff) != 0)
3679 {
3680 _engineStatisticsPtr->SetLastError(
3681 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3682 "SetRTCPStatus() failed to set RTCP status");
3683 return -1;
3684 }
3685 return 0;
3686}
3687
3688int
3689Channel::GetRTCPStatus(bool& enabled)
3690{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003691 RTCPMethod method = _rtpRtcpModule->RTCP();
niklase@google.com470e71d2011-07-07 08:21:25 +00003692 enabled = (method != kRtcpOff);
3693 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3694 VoEId(_instanceId,_channelId),
3695 "GetRTCPStatus() => enabled=%d", enabled);
3696 return 0;
3697}
3698
3699int
3700Channel::SetRTCP_CNAME(const char cName[256])
3701{
3702 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3703 "Channel::SetRTCP_CNAME()");
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003704 if (_rtpRtcpModule->SetCNAME(cName) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003705 {
3706 _engineStatisticsPtr->SetLastError(
3707 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3708 "SetRTCP_CNAME() failed to set RTCP CNAME");
3709 return -1;
3710 }
3711 return 0;
3712}
3713
3714int
3715Channel::GetRTCP_CNAME(char cName[256])
3716{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003717 if (_rtpRtcpModule->CNAME(cName) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003718 {
3719 _engineStatisticsPtr->SetLastError(
3720 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3721 "GetRTCP_CNAME() failed to retrieve RTCP CNAME");
3722 return -1;
3723 }
3724 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3725 VoEId(_instanceId, _channelId),
3726 "GetRTCP_CNAME() => cName=%s", cName);
3727 return 0;
3728}
3729
3730int
3731Channel::GetRemoteRTCP_CNAME(char cName[256])
3732{
3733 if (cName == NULL)
3734 {
3735 _engineStatisticsPtr->SetLastError(
3736 VE_INVALID_ARGUMENT, kTraceError,
3737 "GetRemoteRTCP_CNAME() invalid CNAME input buffer");
3738 return -1;
3739 }
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00003740 char cname[RTCP_CNAME_SIZE];
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00003741 const uint32_t remoteSSRC = rtp_receiver_->SSRC();
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003742 if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003743 {
3744 _engineStatisticsPtr->SetLastError(
3745 VE_CANNOT_RETRIEVE_CNAME, kTraceError,
3746 "GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME");
3747 return -1;
3748 }
3749 strcpy(cName, cname);
3750 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3751 VoEId(_instanceId, _channelId),
3752 "GetRemoteRTCP_CNAME() => cName=%s", cName);
3753 return 0;
3754}
3755
3756int
3757Channel::GetRemoteRTCPData(
3758 unsigned int& NTPHigh,
3759 unsigned int& NTPLow,
3760 unsigned int& timestamp,
3761 unsigned int& playoutTimestamp,
3762 unsigned int* jitter,
3763 unsigned short* fractionLost)
3764{
3765 // --- Information from sender info in received Sender Reports
3766
3767 RTCPSenderInfo senderInfo;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003768 if (_rtpRtcpModule->RemoteRTCPStat(&senderInfo) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003769 {
3770 _engineStatisticsPtr->SetLastError(
3771 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00003772 "GetRemoteRTCPData() failed to retrieve sender info for remote "
niklase@google.com470e71d2011-07-07 08:21:25 +00003773 "side");
3774 return -1;
3775 }
3776
3777 // We only utilize 12 out of 20 bytes in the sender info (ignores packet
3778 // and octet count)
3779 NTPHigh = senderInfo.NTPseconds;
3780 NTPLow = senderInfo.NTPfraction;
3781 timestamp = senderInfo.RTPtimeStamp;
3782
3783 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3784 VoEId(_instanceId, _channelId),
3785 "GetRemoteRTCPData() => NTPHigh=%lu, NTPLow=%lu, "
3786 "timestamp=%lu",
3787 NTPHigh, NTPLow, timestamp);
3788
3789 // --- Locally derived information
3790
3791 // This value is updated on each incoming RTCP packet (0 when no packet
3792 // has been received)
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003793 playoutTimestamp = playout_timestamp_rtcp_;
niklase@google.com470e71d2011-07-07 08:21:25 +00003794
3795 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3796 VoEId(_instanceId, _channelId),
3797 "GetRemoteRTCPData() => playoutTimestamp=%lu",
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003798 playout_timestamp_rtcp_);
niklase@google.com470e71d2011-07-07 08:21:25 +00003799
3800 if (NULL != jitter || NULL != fractionLost)
3801 {
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003802 // Get all RTCP receiver report blocks that have been received on this
3803 // channel. If we receive RTP packets from a remote source we know the
3804 // remote SSRC and use the report block from him.
3805 // Otherwise use the first report block.
3806 std::vector<RTCPReportBlock> remote_stats;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003807 if (_rtpRtcpModule->RemoteRTCPStat(&remote_stats) != 0 ||
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003808 remote_stats.empty()) {
3809 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3810 VoEId(_instanceId, _channelId),
3811 "GetRemoteRTCPData() failed to measure statistics due"
3812 " to lack of received RTP and/or RTCP packets");
3813 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003814 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003815
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00003816 uint32_t remoteSSRC = rtp_receiver_->SSRC();
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003817 std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
3818 for (; it != remote_stats.end(); ++it) {
3819 if (it->remoteSSRC == remoteSSRC)
3820 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00003821 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003822
3823 if (it == remote_stats.end()) {
3824 // If we have not received any RTCP packets from this SSRC it probably
3825 // means that we have not received any RTP packets.
3826 // Use the first received report block instead.
3827 it = remote_stats.begin();
3828 remoteSSRC = it->remoteSSRC;
niklase@google.com470e71d2011-07-07 08:21:25 +00003829 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003830
xians@webrtc.org79af7342012-01-31 12:22:14 +00003831 if (jitter) {
3832 *jitter = it->jitter;
3833 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3834 VoEId(_instanceId, _channelId),
3835 "GetRemoteRTCPData() => jitter = %lu", *jitter);
3836 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003837
xians@webrtc.org79af7342012-01-31 12:22:14 +00003838 if (fractionLost) {
3839 *fractionLost = it->fractionLost;
3840 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3841 VoEId(_instanceId, _channelId),
3842 "GetRemoteRTCPData() => fractionLost = %lu",
3843 *fractionLost);
3844 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003845 }
3846 return 0;
3847}
3848
3849int
pbos@webrtc.org92135212013-05-14 08:31:39 +00003850Channel::SendApplicationDefinedRTCPPacket(unsigned char subType,
niklase@google.com470e71d2011-07-07 08:21:25 +00003851 unsigned int name,
3852 const char* data,
3853 unsigned short dataLengthInBytes)
3854{
3855 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3856 "Channel::SendApplicationDefinedRTCPPacket()");
3857 if (!_sending)
3858 {
3859 _engineStatisticsPtr->SetLastError(
3860 VE_NOT_SENDING, kTraceError,
3861 "SendApplicationDefinedRTCPPacket() not sending");
3862 return -1;
3863 }
3864 if (NULL == data)
3865 {
3866 _engineStatisticsPtr->SetLastError(
3867 VE_INVALID_ARGUMENT, kTraceError,
3868 "SendApplicationDefinedRTCPPacket() invalid data value");
3869 return -1;
3870 }
3871 if (dataLengthInBytes % 4 != 0)
3872 {
3873 _engineStatisticsPtr->SetLastError(
3874 VE_INVALID_ARGUMENT, kTraceError,
3875 "SendApplicationDefinedRTCPPacket() invalid length value");
3876 return -1;
3877 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003878 RTCPMethod status = _rtpRtcpModule->RTCP();
niklase@google.com470e71d2011-07-07 08:21:25 +00003879 if (status == kRtcpOff)
3880 {
3881 _engineStatisticsPtr->SetLastError(
3882 VE_RTCP_ERROR, kTraceError,
3883 "SendApplicationDefinedRTCPPacket() RTCP is disabled");
3884 return -1;
3885 }
3886
3887 // Create and schedule the RTCP APP packet for transmission
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003888 if (_rtpRtcpModule->SetRTCPApplicationSpecificData(
niklase@google.com470e71d2011-07-07 08:21:25 +00003889 subType,
3890 name,
3891 (const unsigned char*) data,
3892 dataLengthInBytes) != 0)
3893 {
3894 _engineStatisticsPtr->SetLastError(
3895 VE_SEND_ERROR, kTraceError,
3896 "SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
3897 return -1;
3898 }
3899 return 0;
3900}
3901
3902int
3903Channel::GetRTPStatistics(
3904 unsigned int& averageJitterMs,
3905 unsigned int& maxJitterMs,
3906 unsigned int& discardedPackets)
3907{
niklase@google.com470e71d2011-07-07 08:21:25 +00003908 // The jitter statistics is updated for each received RTP packet and is
3909 // based on received packets.
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00003910 ReceiveStatistics::RtpReceiveStatistics statistics;
3911 if (!rtp_receive_statistics_->Statistics(
3912 &statistics, _rtpRtcpModule->RTCP() == kRtcpOff)) {
3913 _engineStatisticsPtr->SetLastError(
3914 VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
3915 "GetRTPStatistics() failed to read RTP statistics from the "
3916 "RTP/RTCP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00003917 }
3918
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003919 const int32_t playoutFrequency =
niklase@google.com470e71d2011-07-07 08:21:25 +00003920 _audioCodingModule.PlayoutFrequency();
3921 if (playoutFrequency > 0)
3922 {
3923 // Scale RTP statistics given the current playout frequency
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00003924 maxJitterMs = statistics.max_jitter / (playoutFrequency / 1000);
3925 averageJitterMs = statistics.jitter / (playoutFrequency / 1000);
niklase@google.com470e71d2011-07-07 08:21:25 +00003926 }
3927
3928 discardedPackets = _numberOfDiscardedPackets;
3929
3930 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3931 VoEId(_instanceId, _channelId),
3932 "GetRTPStatistics() => averageJitterMs = %lu, maxJitterMs = %lu,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00003933 " discardedPackets = %lu)",
niklase@google.com470e71d2011-07-07 08:21:25 +00003934 averageJitterMs, maxJitterMs, discardedPackets);
3935 return 0;
3936}
3937
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00003938int Channel::GetRemoteRTCPSenderInfo(SenderInfo* sender_info) {
3939 if (sender_info == NULL) {
3940 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
3941 "GetRemoteRTCPSenderInfo() invalid sender_info.");
3942 return -1;
3943 }
3944
3945 // Get the sender info from the latest received RTCP Sender Report.
3946 RTCPSenderInfo rtcp_sender_info;
3947 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_sender_info) != 0) {
3948 _engineStatisticsPtr->SetLastError(VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3949 "GetRemoteRTCPSenderInfo() failed to read RTCP SR sender info.");
3950 return -1;
3951 }
3952
3953 sender_info->NTP_timestamp_high = rtcp_sender_info.NTPseconds;
3954 sender_info->NTP_timestamp_low = rtcp_sender_info.NTPfraction;
3955 sender_info->RTP_timestamp = rtcp_sender_info.RTPtimeStamp;
3956 sender_info->sender_packet_count = rtcp_sender_info.sendPacketCount;
3957 sender_info->sender_octet_count = rtcp_sender_info.sendOctetCount;
3958 return 0;
3959}
3960
3961int Channel::GetRemoteRTCPReportBlocks(
3962 std::vector<ReportBlock>* report_blocks) {
3963 if (report_blocks == NULL) {
3964 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
3965 "GetRemoteRTCPReportBlock()s invalid report_blocks.");
3966 return -1;
3967 }
3968
3969 // Get the report blocks from the latest received RTCP Sender or Receiver
3970 // Report. Each element in the vector contains the sender's SSRC and a
3971 // report block according to RFC 3550.
3972 std::vector<RTCPReportBlock> rtcp_report_blocks;
3973 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks) != 0) {
3974 _engineStatisticsPtr->SetLastError(VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3975 "GetRemoteRTCPReportBlocks() failed to read RTCP SR/RR report block.");
3976 return -1;
3977 }
3978
3979 if (rtcp_report_blocks.empty())
3980 return 0;
3981
3982 std::vector<RTCPReportBlock>::const_iterator it = rtcp_report_blocks.begin();
3983 for (; it != rtcp_report_blocks.end(); ++it) {
3984 ReportBlock report_block;
3985 report_block.sender_SSRC = it->remoteSSRC;
3986 report_block.source_SSRC = it->sourceSSRC;
3987 report_block.fraction_lost = it->fractionLost;
3988 report_block.cumulative_num_packets_lost = it->cumulativeLost;
3989 report_block.extended_highest_sequence_number = it->extendedHighSeqNum;
3990 report_block.interarrival_jitter = it->jitter;
3991 report_block.last_SR_timestamp = it->lastSR;
3992 report_block.delay_since_last_SR = it->delaySinceLastSR;
3993 report_blocks->push_back(report_block);
3994 }
3995 return 0;
3996}
3997
niklase@google.com470e71d2011-07-07 08:21:25 +00003998int
3999Channel::GetRTPStatistics(CallStatistics& stats)
4000{
niklase@google.com470e71d2011-07-07 08:21:25 +00004001 // --- Part one of the final structure (four values)
4002
4003 // The jitter statistics is updated for each received RTP packet and is
4004 // based on received packets.
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00004005 ReceiveStatistics::RtpReceiveStatistics statistics;
4006 if (!rtp_receive_statistics_->Statistics(
4007 &statistics, _rtpRtcpModule->RTCP() == kRtcpOff)) {
4008 _engineStatisticsPtr->SetLastError(
4009 VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
4010 "GetRTPStatistics() failed to read RTP statistics from the "
4011 "RTP/RTCP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00004012 }
4013
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00004014 stats.fractionLost = statistics.fraction_lost;
4015 stats.cumulativeLost = statistics.cumulative_lost;
4016 stats.extendedMax = statistics.extended_max_sequence_number;
4017 stats.jitterSamples = statistics.jitter;
niklase@google.com470e71d2011-07-07 08:21:25 +00004018
4019 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4020 VoEId(_instanceId, _channelId),
4021 "GetRTPStatistics() => fractionLost=%lu, cumulativeLost=%lu,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004022 " extendedMax=%lu, jitterSamples=%li)",
niklase@google.com470e71d2011-07-07 08:21:25 +00004023 stats.fractionLost, stats.cumulativeLost, stats.extendedMax,
4024 stats.jitterSamples);
4025
4026 // --- Part two of the final structure (one value)
4027
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004028 uint16_t RTT(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004029 RTCPMethod method = _rtpRtcpModule->RTCP();
niklase@google.com470e71d2011-07-07 08:21:25 +00004030 if (method == kRtcpOff)
4031 {
4032 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4033 VoEId(_instanceId, _channelId),
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004034 "GetRTPStatistics() RTCP is disabled => valid RTT "
niklase@google.com470e71d2011-07-07 08:21:25 +00004035 "measurements cannot be retrieved");
4036 } else
4037 {
4038 // The remote SSRC will be zero if no RTP packet has been received.
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00004039 uint32_t remoteSSRC = rtp_receiver_->SSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00004040 if (remoteSSRC > 0)
4041 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004042 uint16_t avgRTT(0);
4043 uint16_t maxRTT(0);
4044 uint16_t minRTT(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004045
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004046 if (_rtpRtcpModule->RTT(remoteSSRC, &RTT, &avgRTT, &minRTT, &maxRTT)
niklase@google.com470e71d2011-07-07 08:21:25 +00004047 != 0)
4048 {
4049 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4050 VoEId(_instanceId, _channelId),
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004051 "GetRTPStatistics() failed to retrieve RTT from "
niklase@google.com470e71d2011-07-07 08:21:25 +00004052 "the RTP/RTCP module");
4053 }
4054 } else
4055 {
4056 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4057 VoEId(_instanceId, _channelId),
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004058 "GetRTPStatistics() failed to measure RTT since no "
niklase@google.com470e71d2011-07-07 08:21:25 +00004059 "RTP packets have been received yet");
4060 }
4061 }
4062
4063 stats.rttMs = static_cast<int> (RTT);
4064
4065 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4066 VoEId(_instanceId, _channelId),
4067 "GetRTPStatistics() => rttMs=%d", stats.rttMs);
4068
4069 // --- Part three of the final structure (four values)
4070
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004071 uint32_t bytesSent(0);
4072 uint32_t packetsSent(0);
4073 uint32_t bytesReceived(0);
4074 uint32_t packetsReceived(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004075
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00004076 rtp_receive_statistics_->GetDataCounters(&bytesReceived, &packetsReceived);
4077
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004078 if (_rtpRtcpModule->DataCountersRTP(&bytesSent,
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00004079 &packetsSent) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004080 {
4081 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4082 VoEId(_instanceId, _channelId),
4083 "GetRTPStatistics() failed to retrieve RTP datacounters =>"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004084 " output will not be complete");
niklase@google.com470e71d2011-07-07 08:21:25 +00004085 }
4086
4087 stats.bytesSent = bytesSent;
4088 stats.packetsSent = packetsSent;
4089 stats.bytesReceived = bytesReceived;
4090 stats.packetsReceived = packetsReceived;
4091
4092 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4093 VoEId(_instanceId, _channelId),
4094 "GetRTPStatistics() => bytesSent=%d, packetsSent=%d,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004095 " bytesReceived=%d, packetsReceived=%d)",
niklase@google.com470e71d2011-07-07 08:21:25 +00004096 stats.bytesSent, stats.packetsSent, stats.bytesReceived,
4097 stats.packetsReceived);
4098
4099 return 0;
4100}
4101
turaj@webrtc.org42259e72012-12-11 02:15:12 +00004102int Channel::SetFECStatus(bool enable, int redPayloadtype) {
4103 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4104 "Channel::SetFECStatus()");
niklase@google.com470e71d2011-07-07 08:21:25 +00004105
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00004106 if (enable) {
4107 if (redPayloadtype < 0 || redPayloadtype > 127) {
4108 _engineStatisticsPtr->SetLastError(
4109 VE_PLTYPE_ERROR, kTraceError,
4110 "SetFECStatus() invalid RED payload type");
4111 return -1;
4112 }
4113
4114 if (SetRedPayloadType(redPayloadtype) < 0) {
4115 _engineStatisticsPtr->SetLastError(
4116 VE_CODEC_ERROR, kTraceError,
4117 "SetSecondarySendCodec() Failed to register RED ACM");
4118 return -1;
4119 }
turaj@webrtc.org42259e72012-12-11 02:15:12 +00004120 }
niklase@google.com470e71d2011-07-07 08:21:25 +00004121
turaj@webrtc.org42259e72012-12-11 02:15:12 +00004122 if (_audioCodingModule.SetFECStatus(enable) != 0) {
4123 _engineStatisticsPtr->SetLastError(
4124 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4125 "SetFECStatus() failed to set FEC state in the ACM");
4126 return -1;
4127 }
4128 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00004129}
4130
4131int
4132Channel::GetFECStatus(bool& enabled, int& redPayloadtype)
4133{
4134 enabled = _audioCodingModule.FECStatus();
4135 if (enabled)
4136 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004137 int8_t payloadType(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004138 if (_rtpRtcpModule->SendREDPayloadType(payloadType) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004139 {
4140 _engineStatisticsPtr->SetLastError(
4141 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4142 "GetFECStatus() failed to retrieve RED PT from RTP/RTCP "
4143 "module");
4144 return -1;
4145 }
4146 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4147 VoEId(_instanceId, _channelId),
4148 "GetFECStatus() => enabled=%d, redPayloadtype=%d",
4149 enabled, redPayloadtype);
4150 return 0;
4151 }
4152 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4153 VoEId(_instanceId, _channelId),
4154 "GetFECStatus() => enabled=%d", enabled);
4155 return 0;
4156}
4157
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00004158void Channel::SetNACKStatus(bool enable, int maxNumberOfPackets) {
4159 // None of these functions can fail.
4160 _rtpRtcpModule->SetStorePacketsStatus(enable, maxNumberOfPackets);
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00004161 rtp_receiver_->SetNACKStatus(enable ? kNackRtcp : kNackOff,
4162 maxNumberOfPackets);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00004163 if (enable)
4164 _audioCodingModule.EnableNack(maxNumberOfPackets);
4165 else
4166 _audioCodingModule.DisableNack();
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00004167}
4168
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00004169// Called when we are missing one or more packets.
4170int Channel::ResendPackets(const uint16_t* sequence_numbers, int length) {
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00004171 return _rtpRtcpModule->SendNACK(sequence_numbers, length);
4172}
4173
niklase@google.com470e71d2011-07-07 08:21:25 +00004174int
niklase@google.com470e71d2011-07-07 08:21:25 +00004175Channel::StartRTPDump(const char fileNameUTF8[1024],
4176 RTPDirections direction)
4177{
4178 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4179 "Channel::StartRTPDump()");
4180 if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
4181 {
4182 _engineStatisticsPtr->SetLastError(
4183 VE_INVALID_ARGUMENT, kTraceError,
4184 "StartRTPDump() invalid RTP direction");
4185 return -1;
4186 }
4187 RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4188 &_rtpDumpIn : &_rtpDumpOut;
4189 if (rtpDumpPtr == NULL)
4190 {
4191 assert(false);
4192 return -1;
4193 }
4194 if (rtpDumpPtr->IsActive())
4195 {
4196 rtpDumpPtr->Stop();
4197 }
4198 if (rtpDumpPtr->Start(fileNameUTF8) != 0)
4199 {
4200 _engineStatisticsPtr->SetLastError(
4201 VE_BAD_FILE, kTraceError,
4202 "StartRTPDump() failed to create file");
4203 return -1;
4204 }
4205 return 0;
4206}
4207
4208int
4209Channel::StopRTPDump(RTPDirections direction)
4210{
4211 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4212 "Channel::StopRTPDump()");
4213 if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
4214 {
4215 _engineStatisticsPtr->SetLastError(
4216 VE_INVALID_ARGUMENT, kTraceError,
4217 "StopRTPDump() invalid RTP direction");
4218 return -1;
4219 }
4220 RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4221 &_rtpDumpIn : &_rtpDumpOut;
4222 if (rtpDumpPtr == NULL)
4223 {
4224 assert(false);
4225 return -1;
4226 }
4227 if (!rtpDumpPtr->IsActive())
4228 {
4229 return 0;
4230 }
4231 return rtpDumpPtr->Stop();
4232}
4233
4234bool
4235Channel::RTPDumpIsActive(RTPDirections direction)
4236{
4237 if ((direction != kRtpIncoming) &&
4238 (direction != kRtpOutgoing))
4239 {
4240 _engineStatisticsPtr->SetLastError(
4241 VE_INVALID_ARGUMENT, kTraceError,
4242 "RTPDumpIsActive() invalid RTP direction");
4243 return false;
4244 }
4245 RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4246 &_rtpDumpIn : &_rtpDumpOut;
4247 return rtpDumpPtr->IsActive();
4248}
4249
4250int
4251Channel::InsertExtraRTPPacket(unsigned char payloadType,
4252 bool markerBit,
4253 const char* payloadData,
4254 unsigned short payloadSize)
4255{
4256 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4257 "Channel::InsertExtraRTPPacket()");
4258 if (payloadType > 127)
4259 {
4260 _engineStatisticsPtr->SetLastError(
4261 VE_INVALID_PLTYPE, kTraceError,
4262 "InsertExtraRTPPacket() invalid payload type");
4263 return -1;
4264 }
4265 if (payloadData == NULL)
4266 {
4267 _engineStatisticsPtr->SetLastError(
4268 VE_INVALID_ARGUMENT, kTraceError,
4269 "InsertExtraRTPPacket() invalid payload data");
4270 return -1;
4271 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004272 if (payloadSize > _rtpRtcpModule->MaxDataPayloadLength())
niklase@google.com470e71d2011-07-07 08:21:25 +00004273 {
4274 _engineStatisticsPtr->SetLastError(
4275 VE_INVALID_ARGUMENT, kTraceError,
4276 "InsertExtraRTPPacket() invalid payload size");
4277 return -1;
4278 }
4279 if (!_sending)
4280 {
4281 _engineStatisticsPtr->SetLastError(
4282 VE_NOT_SENDING, kTraceError,
4283 "InsertExtraRTPPacket() not sending");
4284 return -1;
4285 }
4286
4287 // Create extra RTP packet by calling RtpRtcp::SendOutgoingData().
4288 // Transport::SendPacket() will be called by the module when the RTP packet
4289 // is created.
4290 // The call to SendOutgoingData() does *not* modify the timestamp and
4291 // payloadtype to ensure that the RTP module generates a valid RTP packet
4292 // (user might utilize a non-registered payload type).
4293 // The marker bit and payload type will be replaced just before the actual
4294 // transmission, i.e., the actual modification is done *after* the RTP
4295 // module has delivered its RTP packet back to the VoE.
4296 // We will use the stored values above when the packet is modified
4297 // (see Channel::SendPacket()).
4298
4299 _extraPayloadType = payloadType;
4300 _extraMarkerBit = markerBit;
4301 _insertExtraRTPPacket = true;
4302
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004303 if (_rtpRtcpModule->SendOutgoingData(kAudioFrameSpeech,
niklase@google.com470e71d2011-07-07 08:21:25 +00004304 _lastPayloadType,
4305 _lastLocalTimeStamp,
stefan@webrtc.orgddfdfed2012-07-03 13:21:22 +00004306 // Leaving the time when this frame was
4307 // received from the capture device as
4308 // undefined for voice for now.
4309 -1,
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004310 (const uint8_t*) payloadData,
niklase@google.com470e71d2011-07-07 08:21:25 +00004311 payloadSize) != 0)
4312 {
4313 _engineStatisticsPtr->SetLastError(
4314 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4315 "InsertExtraRTPPacket() failed to send extra RTP packet");
4316 return -1;
4317 }
4318
4319 return 0;
4320}
4321
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004322uint32_t
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004323Channel::Demultiplex(const AudioFrame& audioFrame)
niklase@google.com470e71d2011-07-07 08:21:25 +00004324{
4325 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004326 "Channel::Demultiplex()");
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00004327 _audioFrame.CopyFrom(audioFrame);
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004328 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00004329 return 0;
4330}
4331
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004332uint32_t
xians@google.com0b0665a2011-08-08 08:18:44 +00004333Channel::PrepareEncodeAndSend(int mixingFrequency)
niklase@google.com470e71d2011-07-07 08:21:25 +00004334{
4335 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4336 "Channel::PrepareEncodeAndSend()");
4337
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004338 if (_audioFrame.samples_per_channel_ == 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004339 {
4340 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4341 "Channel::PrepareEncodeAndSend() invalid audio frame");
4342 return -1;
4343 }
4344
4345 if (_inputFilePlaying)
4346 {
4347 MixOrReplaceAudioWithFile(mixingFrequency);
4348 }
4349
4350 if (_mute)
4351 {
4352 AudioFrameOperations::Mute(_audioFrame);
4353 }
4354
4355 if (_inputExternalMedia)
4356 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004357 CriticalSectionScoped cs(&_callbackCritSect);
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004358 const bool isStereo = (_audioFrame.num_channels_ == 2);
niklase@google.com470e71d2011-07-07 08:21:25 +00004359 if (_inputExternalMediaCallbackPtr)
4360 {
4361 _inputExternalMediaCallbackPtr->Process(
4362 _channelId,
4363 kRecordingPerChannel,
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004364 (int16_t*)_audioFrame.data_,
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004365 _audioFrame.samples_per_channel_,
4366 _audioFrame.sample_rate_hz_,
niklase@google.com470e71d2011-07-07 08:21:25 +00004367 isStereo);
4368 }
4369 }
4370
4371 InsertInbandDtmfTone();
4372
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004373 if (_includeAudioLevelIndication)
4374 {
4375 assert(_rtpAudioProc.get() != NULL);
4376
4377 // Check if settings need to be updated.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004378 if (_rtpAudioProc->sample_rate_hz() != _audioFrame.sample_rate_hz_)
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004379 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004380 if (_rtpAudioProc->set_sample_rate_hz(_audioFrame.sample_rate_hz_) !=
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004381 AudioProcessing::kNoError)
4382 {
4383 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4384 VoEId(_instanceId, _channelId),
4385 "Error setting AudioProcessing sample rate");
4386 return -1;
4387 }
4388 }
4389
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004390 if (_rtpAudioProc->num_input_channels() != _audioFrame.num_channels_)
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004391 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004392 if (_rtpAudioProc->set_num_channels(_audioFrame.num_channels_,
4393 _audioFrame.num_channels_)
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004394 != AudioProcessing::kNoError)
4395 {
4396 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4397 VoEId(_instanceId, _channelId),
4398 "Error setting AudioProcessing channels");
4399 return -1;
4400 }
4401 }
4402
4403 // Performs level analysis only; does not affect the signal.
4404 _rtpAudioProc->ProcessStream(&_audioFrame);
4405 }
4406
niklase@google.com470e71d2011-07-07 08:21:25 +00004407 return 0;
4408}
4409
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004410uint32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00004411Channel::EncodeAndSend()
4412{
4413 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4414 "Channel::EncodeAndSend()");
4415
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004416 assert(_audioFrame.num_channels_ <= 2);
4417 if (_audioFrame.samples_per_channel_ == 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004418 {
4419 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4420 "Channel::EncodeAndSend() invalid audio frame");
4421 return -1;
4422 }
4423
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004424 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00004425
4426 // --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz.
4427
4428 // The ACM resamples internally.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004429 _audioFrame.timestamp_ = _timeStamp;
niklase@google.com470e71d2011-07-07 08:21:25 +00004430 if (_audioCodingModule.Add10MsData((AudioFrame&)_audioFrame) != 0)
4431 {
4432 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
4433 "Channel::EncodeAndSend() ACM encoding failed");
4434 return -1;
4435 }
4436
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004437 _timeStamp += _audioFrame.samples_per_channel_;
niklase@google.com470e71d2011-07-07 08:21:25 +00004438
4439 // --- Encode if complete frame is ready
4440
4441 // This call will trigger AudioPacketizationCallback::SendData if encoding
4442 // is done and payload is ready for packetization and transmission.
4443 return _audioCodingModule.Process();
4444}
4445
4446int Channel::RegisterExternalMediaProcessing(
4447 ProcessingTypes type,
4448 VoEMediaProcess& processObject)
4449{
4450 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4451 "Channel::RegisterExternalMediaProcessing()");
4452
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004453 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004454
4455 if (kPlaybackPerChannel == type)
4456 {
4457 if (_outputExternalMediaCallbackPtr)
4458 {
4459 _engineStatisticsPtr->SetLastError(
4460 VE_INVALID_OPERATION, kTraceError,
4461 "Channel::RegisterExternalMediaProcessing() "
4462 "output external media already enabled");
4463 return -1;
4464 }
4465 _outputExternalMediaCallbackPtr = &processObject;
4466 _outputExternalMedia = true;
4467 }
4468 else if (kRecordingPerChannel == type)
4469 {
4470 if (_inputExternalMediaCallbackPtr)
4471 {
4472 _engineStatisticsPtr->SetLastError(
4473 VE_INVALID_OPERATION, kTraceError,
4474 "Channel::RegisterExternalMediaProcessing() "
4475 "output external media already enabled");
4476 return -1;
4477 }
4478 _inputExternalMediaCallbackPtr = &processObject;
4479 _inputExternalMedia = true;
4480 }
4481 return 0;
4482}
4483
4484int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type)
4485{
4486 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4487 "Channel::DeRegisterExternalMediaProcessing()");
4488
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004489 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004490
4491 if (kPlaybackPerChannel == type)
4492 {
4493 if (!_outputExternalMediaCallbackPtr)
4494 {
4495 _engineStatisticsPtr->SetLastError(
4496 VE_INVALID_OPERATION, kTraceWarning,
4497 "Channel::DeRegisterExternalMediaProcessing() "
4498 "output external media already disabled");
4499 return 0;
4500 }
4501 _outputExternalMedia = false;
4502 _outputExternalMediaCallbackPtr = NULL;
4503 }
4504 else if (kRecordingPerChannel == type)
4505 {
4506 if (!_inputExternalMediaCallbackPtr)
4507 {
4508 _engineStatisticsPtr->SetLastError(
4509 VE_INVALID_OPERATION, kTraceWarning,
4510 "Channel::DeRegisterExternalMediaProcessing() "
4511 "input external media already disabled");
4512 return 0;
4513 }
4514 _inputExternalMedia = false;
4515 _inputExternalMediaCallbackPtr = NULL;
4516 }
4517
4518 return 0;
4519}
4520
roosa@google.com1b60ceb2012-12-12 23:00:29 +00004521int Channel::SetExternalMixing(bool enabled) {
4522 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4523 "Channel::SetExternalMixing(enabled=%d)", enabled);
4524
4525 if (_playing)
4526 {
4527 _engineStatisticsPtr->SetLastError(
4528 VE_INVALID_OPERATION, kTraceError,
4529 "Channel::SetExternalMixing() "
4530 "external mixing cannot be changed while playing.");
4531 return -1;
4532 }
4533
4534 _externalMixing = enabled;
4535
4536 return 0;
4537}
4538
niklase@google.com470e71d2011-07-07 08:21:25 +00004539int
4540Channel::ResetRTCPStatistics()
4541{
4542 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4543 "Channel::ResetRTCPStatistics()");
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004544 uint32_t remoteSSRC(0);
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00004545 remoteSSRC = rtp_receiver_->SSRC();
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004546 return _rtpRtcpModule->ResetRTT(remoteSSRC);
niklase@google.com470e71d2011-07-07 08:21:25 +00004547}
4548
4549int
4550Channel::GetRoundTripTimeSummary(StatVal& delaysMs) const
4551{
4552 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4553 "Channel::GetRoundTripTimeSummary()");
4554 // Override default module outputs for the case when RTCP is disabled.
4555 // This is done to ensure that we are backward compatible with the
4556 // VoiceEngine where we did not use RTP/RTCP module.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004557 if (!_rtpRtcpModule->RTCP())
niklase@google.com470e71d2011-07-07 08:21:25 +00004558 {
4559 delaysMs.min = -1;
4560 delaysMs.max = -1;
4561 delaysMs.average = -1;
4562 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4563 "Channel::GetRoundTripTimeSummary() RTCP is disabled =>"
4564 " valid RTT measurements cannot be retrieved");
4565 return 0;
4566 }
4567
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004568 uint32_t remoteSSRC;
4569 uint16_t RTT;
4570 uint16_t avgRTT;
4571 uint16_t maxRTT;
4572 uint16_t minRTT;
niklase@google.com470e71d2011-07-07 08:21:25 +00004573 // The remote SSRC will be zero if no RTP packet has been received.
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00004574 remoteSSRC = rtp_receiver_->SSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00004575 if (remoteSSRC == 0)
4576 {
4577 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4578 "Channel::GetRoundTripTimeSummary() unable to measure RTT"
4579 " since no RTP packet has been received yet");
4580 }
4581
4582 // Retrieve RTT statistics from the RTP/RTCP module for the specified
4583 // channel and SSRC. The SSRC is required to parse out the correct source
4584 // in conference scenarios.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004585 if (_rtpRtcpModule->RTT(remoteSSRC, &RTT, &avgRTT, &minRTT,&maxRTT) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004586 {
4587 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4588 "GetRoundTripTimeSummary unable to retrieve RTT values"
4589 " from the RTCP layer");
4590 delaysMs.min = -1; delaysMs.max = -1; delaysMs.average = -1;
4591 }
4592 else
4593 {
4594 delaysMs.min = minRTT;
4595 delaysMs.max = maxRTT;
4596 delaysMs.average = avgRTT;
4597 }
4598 return 0;
4599}
4600
4601int
4602Channel::GetNetworkStatistics(NetworkStatistics& stats)
4603{
4604 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4605 "Channel::GetNetworkStatistics()");
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00004606 ACMNetworkStatistics acm_stats;
4607 int return_value = _audioCodingModule.NetworkStatistics(&acm_stats);
4608 if (return_value >= 0) {
4609 memcpy(&stats, &acm_stats, sizeof(NetworkStatistics));
4610 }
4611 return return_value;
niklase@google.com470e71d2011-07-07 08:21:25 +00004612}
4613
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00004614bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms,
4615 int* playout_buffer_delay_ms) const {
4616 if (_average_jitter_buffer_delay_us == 0) {
niklase@google.com470e71d2011-07-07 08:21:25 +00004617 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00004618 "Channel::GetDelayEstimate() no valid estimate.");
4619 return false;
4620 }
4621 *jitter_buffer_delay_ms = (_average_jitter_buffer_delay_us + 500) / 1000 +
4622 _recPacketDelayMs;
4623 *playout_buffer_delay_ms = playout_delay_ms_;
4624 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4625 "Channel::GetDelayEstimate()");
4626 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +00004627}
4628
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +00004629int Channel::SetInitialPlayoutDelay(int delay_ms)
4630{
4631 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4632 "Channel::SetInitialPlayoutDelay()");
4633 if ((delay_ms < kVoiceEngineMinMinPlayoutDelayMs) ||
4634 (delay_ms > kVoiceEngineMaxMinPlayoutDelayMs))
4635 {
4636 _engineStatisticsPtr->SetLastError(
4637 VE_INVALID_ARGUMENT, kTraceError,
4638 "SetInitialPlayoutDelay() invalid min delay");
4639 return -1;
4640 }
4641 if (_audioCodingModule.SetInitialPlayoutDelay(delay_ms) != 0)
4642 {
4643 _engineStatisticsPtr->SetLastError(
4644 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4645 "SetInitialPlayoutDelay() failed to set min playout delay");
4646 return -1;
4647 }
4648 return 0;
4649}
4650
4651
niklase@google.com470e71d2011-07-07 08:21:25 +00004652int
4653Channel::SetMinimumPlayoutDelay(int delayMs)
4654{
4655 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4656 "Channel::SetMinimumPlayoutDelay()");
4657 if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
4658 (delayMs > kVoiceEngineMaxMinPlayoutDelayMs))
4659 {
4660 _engineStatisticsPtr->SetLastError(
4661 VE_INVALID_ARGUMENT, kTraceError,
4662 "SetMinimumPlayoutDelay() invalid min delay");
4663 return -1;
4664 }
4665 if (_audioCodingModule.SetMinimumPlayoutDelay(delayMs) != 0)
4666 {
4667 _engineStatisticsPtr->SetLastError(
4668 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4669 "SetMinimumPlayoutDelay() failed to set min playout delay");
4670 return -1;
4671 }
4672 return 0;
4673}
4674
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00004675void Channel::UpdatePlayoutTimestamp(bool rtcp) {
4676 uint32_t playout_timestamp = 0;
4677
4678 if (_audioCodingModule.PlayoutTimestamp(&playout_timestamp) == -1) {
4679 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4680 "Channel::UpdatePlayoutTimestamp() failed to read playout"
4681 " timestamp from the ACM");
4682 _engineStatisticsPtr->SetLastError(
4683 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4684 "UpdatePlayoutTimestamp() failed to retrieve timestamp");
4685 return;
4686 }
4687
4688 uint16_t delay_ms = 0;
4689 if (_audioDeviceModulePtr->PlayoutDelay(&delay_ms) == -1) {
4690 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4691 "Channel::UpdatePlayoutTimestamp() failed to read playout"
4692 " delay from the ADM");
4693 _engineStatisticsPtr->SetLastError(
4694 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4695 "UpdatePlayoutTimestamp() failed to retrieve playout delay");
4696 return;
4697 }
4698
4699 int32_t playout_frequency = _audioCodingModule.PlayoutFrequency();
4700 CodecInst current_recive_codec;
4701 if (_audioCodingModule.ReceiveCodec(&current_recive_codec) == 0) {
4702 if (STR_CASE_CMP("G722", current_recive_codec.plname) == 0) {
4703 playout_frequency = 8000;
4704 } else if (STR_CASE_CMP("opus", current_recive_codec.plname) == 0) {
4705 playout_frequency = 48000;
niklase@google.com470e71d2011-07-07 08:21:25 +00004706 }
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00004707 }
4708
4709 // Remove the playout delay.
4710 playout_timestamp -= (delay_ms * (playout_frequency / 1000));
4711
4712 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4713 "Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu",
4714 playout_timestamp);
4715
4716 if (rtcp) {
4717 playout_timestamp_rtcp_ = playout_timestamp;
4718 } else {
4719 playout_timestamp_rtp_ = playout_timestamp;
4720 }
4721 playout_delay_ms_ = delay_ms;
4722}
4723
4724int Channel::GetPlayoutTimestamp(unsigned int& timestamp) {
4725 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4726 "Channel::GetPlayoutTimestamp()");
4727 if (playout_timestamp_rtp_ == 0) {
4728 _engineStatisticsPtr->SetLastError(
4729 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4730 "GetPlayoutTimestamp() failed to retrieve timestamp");
4731 return -1;
4732 }
4733 timestamp = playout_timestamp_rtp_;
4734 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4735 VoEId(_instanceId,_channelId),
4736 "GetPlayoutTimestamp() => timestamp=%u", timestamp);
4737 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00004738}
4739
4740int
4741Channel::SetInitTimestamp(unsigned int timestamp)
4742{
4743 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4744 "Channel::SetInitTimestamp()");
4745 if (_sending)
4746 {
4747 _engineStatisticsPtr->SetLastError(
4748 VE_SENDING, kTraceError, "SetInitTimestamp() already sending");
4749 return -1;
4750 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004751 if (_rtpRtcpModule->SetStartTimestamp(timestamp) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004752 {
4753 _engineStatisticsPtr->SetLastError(
4754 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4755 "SetInitTimestamp() failed to set timestamp");
4756 return -1;
4757 }
4758 return 0;
4759}
4760
4761int
4762Channel::SetInitSequenceNumber(short sequenceNumber)
4763{
4764 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4765 "Channel::SetInitSequenceNumber()");
4766 if (_sending)
4767 {
4768 _engineStatisticsPtr->SetLastError(
4769 VE_SENDING, kTraceError,
4770 "SetInitSequenceNumber() already sending");
4771 return -1;
4772 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004773 if (_rtpRtcpModule->SetSequenceNumber(sequenceNumber) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004774 {
4775 _engineStatisticsPtr->SetLastError(
4776 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4777 "SetInitSequenceNumber() failed to set sequence number");
4778 return -1;
4779 }
4780 return 0;
4781}
4782
4783int
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00004784Channel::GetRtpRtcp(RtpRtcp** rtpRtcpModule, RtpReceiver** rtp_receiver) const
niklase@google.com470e71d2011-07-07 08:21:25 +00004785{
4786 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4787 "Channel::GetRtpRtcp()");
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00004788 *rtpRtcpModule = _rtpRtcpModule.get();
4789 *rtp_receiver = rtp_receiver_.get();
niklase@google.com470e71d2011-07-07 08:21:25 +00004790 return 0;
4791}
4792
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004793// TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use
4794// a shared helper.
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004795int32_t
pbos@webrtc.org92135212013-05-14 08:31:39 +00004796Channel::MixOrReplaceAudioWithFile(int mixingFrequency)
niklase@google.com470e71d2011-07-07 08:21:25 +00004797{
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004798 scoped_array<int16_t> fileBuffer(new int16_t[640]);
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004799 int fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004800
4801 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004802 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004803
4804 if (_inputFilePlayerPtr == NULL)
4805 {
4806 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4807 VoEId(_instanceId, _channelId),
4808 "Channel::MixOrReplaceAudioWithFile() fileplayer"
4809 " doesnt exist");
4810 return -1;
4811 }
4812
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004813 if (_inputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
niklase@google.com470e71d2011-07-07 08:21:25 +00004814 fileSamples,
4815 mixingFrequency) == -1)
4816 {
4817 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4818 VoEId(_instanceId, _channelId),
4819 "Channel::MixOrReplaceAudioWithFile() file mixing "
4820 "failed");
4821 return -1;
4822 }
4823 if (fileSamples == 0)
4824 {
4825 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4826 VoEId(_instanceId, _channelId),
4827 "Channel::MixOrReplaceAudioWithFile() file is ended");
4828 return 0;
4829 }
4830 }
4831
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004832 assert(_audioFrame.samples_per_channel_ == fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004833
4834 if (_mixFileWithMicrophone)
4835 {
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004836 // Currently file stream is always mono.
4837 // TODO(xians): Change the code when FilePlayer supports real stereo.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004838 Utility::MixWithSat(_audioFrame.data_,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004839 _audioFrame.num_channels_,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004840 fileBuffer.get(),
4841 1,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004842 fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004843 }
4844 else
4845 {
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004846 // Replace ACM audio with file.
4847 // Currently file stream is always mono.
4848 // TODO(xians): Change the code when FilePlayer supports real stereo.
niklase@google.com470e71d2011-07-07 08:21:25 +00004849 _audioFrame.UpdateFrame(_channelId,
4850 -1,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004851 fileBuffer.get(),
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004852 fileSamples,
niklase@google.com470e71d2011-07-07 08:21:25 +00004853 mixingFrequency,
4854 AudioFrame::kNormalSpeech,
4855 AudioFrame::kVadUnknown,
4856 1);
4857
4858 }
4859 return 0;
4860}
4861
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004862int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00004863Channel::MixAudioWithFile(AudioFrame& audioFrame,
pbos@webrtc.org92135212013-05-14 08:31:39 +00004864 int mixingFrequency)
niklase@google.com470e71d2011-07-07 08:21:25 +00004865{
4866 assert(mixingFrequency <= 32000);
4867
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004868 scoped_array<int16_t> fileBuffer(new int16_t[640]);
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004869 int fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004870
4871 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004872 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004873
4874 if (_outputFilePlayerPtr == NULL)
4875 {
4876 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4877 VoEId(_instanceId, _channelId),
4878 "Channel::MixAudioWithFile() file mixing failed");
4879 return -1;
4880 }
4881
4882 // We should get the frequency we ask for.
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004883 if (_outputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
niklase@google.com470e71d2011-07-07 08:21:25 +00004884 fileSamples,
4885 mixingFrequency) == -1)
4886 {
4887 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4888 VoEId(_instanceId, _channelId),
4889 "Channel::MixAudioWithFile() file mixing failed");
4890 return -1;
4891 }
4892 }
4893
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004894 if (audioFrame.samples_per_channel_ == fileSamples)
niklase@google.com470e71d2011-07-07 08:21:25 +00004895 {
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004896 // Currently file stream is always mono.
4897 // TODO(xians): Change the code when FilePlayer supports real stereo.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004898 Utility::MixWithSat(audioFrame.data_,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004899 audioFrame.num_channels_,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004900 fileBuffer.get(),
4901 1,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004902 fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004903 }
4904 else
4905 {
4906 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004907 "Channel::MixAudioWithFile() samples_per_channel_(%d) != "
niklase@google.com470e71d2011-07-07 08:21:25 +00004908 "fileSamples(%d)",
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004909 audioFrame.samples_per_channel_, fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004910 return -1;
4911 }
4912
4913 return 0;
4914}
4915
4916int
4917Channel::InsertInbandDtmfTone()
4918{
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00004919 // Check if we should start a new tone.
niklase@google.com470e71d2011-07-07 08:21:25 +00004920 if (_inbandDtmfQueue.PendingDtmf() &&
4921 !_inbandDtmfGenerator.IsAddingTone() &&
4922 _inbandDtmfGenerator.DelaySinceLastTone() >
4923 kMinTelephoneEventSeparationMs)
4924 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004925 int8_t eventCode(0);
4926 uint16_t lengthMs(0);
4927 uint8_t attenuationDb(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004928
4929 eventCode = _inbandDtmfQueue.NextDtmf(&lengthMs, &attenuationDb);
4930 _inbandDtmfGenerator.AddTone(eventCode, lengthMs, attenuationDb);
4931 if (_playInbandDtmfEvent)
4932 {
4933 // Add tone to output mixer using a reduced length to minimize
4934 // risk of echo.
4935 _outputMixerPtr->PlayDtmfTone(eventCode, lengthMs - 80,
4936 attenuationDb);
4937 }
4938 }
4939
4940 if (_inbandDtmfGenerator.IsAddingTone())
4941 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004942 uint16_t frequency(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004943 _inbandDtmfGenerator.GetSampleRate(frequency);
4944
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004945 if (frequency != _audioFrame.sample_rate_hz_)
niklase@google.com470e71d2011-07-07 08:21:25 +00004946 {
4947 // Update sample rate of Dtmf tone since the mixing frequency
4948 // has changed.
4949 _inbandDtmfGenerator.SetSampleRate(
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004950 (uint16_t) (_audioFrame.sample_rate_hz_));
niklase@google.com470e71d2011-07-07 08:21:25 +00004951 // Reset the tone to be added taking the new sample rate into
4952 // account.
4953 _inbandDtmfGenerator.ResetTone();
4954 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00004955
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004956 int16_t toneBuffer[320];
4957 uint16_t toneSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004958 // Get 10ms tone segment and set time since last tone to zero
4959 if (_inbandDtmfGenerator.Get10msTone(toneBuffer, toneSamples) == -1)
4960 {
4961 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4962 VoEId(_instanceId, _channelId),
4963 "Channel::EncodeAndSend() inserting Dtmf failed");
4964 return -1;
4965 }
4966
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00004967 // Replace mixed audio with DTMF tone.
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00004968 for (int sample = 0;
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004969 sample < _audioFrame.samples_per_channel_;
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00004970 sample++)
4971 {
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00004972 for (int channel = 0;
4973 channel < _audioFrame.num_channels_;
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00004974 channel++)
4975 {
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00004976 const int index = sample * _audioFrame.num_channels_ + channel;
4977 _audioFrame.data_[index] = toneBuffer[sample];
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00004978 }
4979 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00004980
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004981 assert(_audioFrame.samples_per_channel_ == toneSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004982 } else
4983 {
4984 // Add 10ms to "delay-since-last-tone" counter
4985 _inbandDtmfGenerator.UpdateDelaySinceLastTone();
4986 }
4987 return 0;
4988}
4989
niklase@google.com470e71d2011-07-07 08:21:25 +00004990void
4991Channel::ResetDeadOrAliveCounters()
4992{
4993 _countDeadDetections = 0;
4994 _countAliveDetections = 0;
4995}
4996
4997void
4998Channel::UpdateDeadOrAliveCounters(bool alive)
4999{
5000 if (alive)
5001 _countAliveDetections++;
5002 else
5003 _countDeadDetections++;
5004}
5005
5006int
5007Channel::GetDeadOrAliveCounters(int& countDead, int& countAlive) const
5008{
niklase@google.com470e71d2011-07-07 08:21:25 +00005009 return 0;
5010}
5011
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005012int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00005013Channel::SendPacketRaw(const void *data, int len, bool RTCP)
5014{
5015 if (_transportPtr == NULL)
5016 {
5017 return -1;
5018 }
5019 if (!RTCP)
5020 {
5021 return _transportPtr->SendPacket(_channelId, data, len);
5022 }
5023 else
5024 {
5025 return _transportPtr->SendRTCPPacket(_channelId, data, len);
5026 }
5027}
5028
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005029// Called for incoming RTP packets after successful RTP header parsing.
5030void Channel::UpdatePacketDelay(uint32_t rtp_timestamp,
5031 uint16_t sequence_number) {
5032 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
5033 "Channel::UpdatePacketDelay(timestamp=%lu, sequenceNumber=%u)",
5034 rtp_timestamp, sequence_number);
niklase@google.com470e71d2011-07-07 08:21:25 +00005035
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005036 // Get frequency of last received payload
5037 int rtp_receive_frequency = _audioCodingModule.ReceiveFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +00005038
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005039 CodecInst current_receive_codec;
5040 if (_audioCodingModule.ReceiveCodec(&current_receive_codec) != 0) {
5041 return;
5042 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005043
turaj@webrtc.orge46c8d32013-05-22 20:39:43 +00005044 // Update the least required delay.
5045 least_required_delay_ms_ = _audioCodingModule.LeastRequiredDelayMs();
5046
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005047 if (STR_CASE_CMP("G722", current_receive_codec.plname) == 0) {
5048 // Even though the actual sampling rate for G.722 audio is
5049 // 16,000 Hz, the RTP clock rate for the G722 payload format is
5050 // 8,000 Hz because that value was erroneously assigned in
5051 // RFC 1890 and must remain unchanged for backward compatibility.
5052 rtp_receive_frequency = 8000;
5053 } else if (STR_CASE_CMP("opus", current_receive_codec.plname) == 0) {
5054 // We are resampling Opus internally to 32,000 Hz until all our
5055 // DSP routines can operate at 48,000 Hz, but the RTP clock
5056 // rate for the Opus payload format is standardized to 48,000 Hz,
5057 // because that is the maximum supported decoding sampling rate.
5058 rtp_receive_frequency = 48000;
5059 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005060
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005061 // playout_timestamp_rtp_ updated in UpdatePlayoutTimestamp for every incoming
5062 // packet.
5063 uint32_t timestamp_diff_ms = (rtp_timestamp - playout_timestamp_rtp_) /
5064 (rtp_receive_frequency / 1000);
niklase@google.com470e71d2011-07-07 08:21:25 +00005065
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005066 uint16_t packet_delay_ms = (rtp_timestamp - _previousTimestamp) /
5067 (rtp_receive_frequency / 1000);
niklase@google.com470e71d2011-07-07 08:21:25 +00005068
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005069 _previousTimestamp = rtp_timestamp;
niklase@google.com470e71d2011-07-07 08:21:25 +00005070
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005071 if (timestamp_diff_ms > (2 * kVoiceEngineMaxMinPlayoutDelayMs)) {
5072 timestamp_diff_ms = 0;
5073 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005074
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005075 if (timestamp_diff_ms == 0) return;
niklase@google.com470e71d2011-07-07 08:21:25 +00005076
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005077 if (packet_delay_ms >= 10 && packet_delay_ms <= 60) {
5078 _recPacketDelayMs = packet_delay_ms;
5079 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005080
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005081 if (_average_jitter_buffer_delay_us == 0) {
5082 _average_jitter_buffer_delay_us = timestamp_diff_ms * 1000;
5083 return;
5084 }
5085
5086 // Filter average delay value using exponential filter (alpha is
5087 // 7/8). We derive 1000 *_average_jitter_buffer_delay_us here (reduces
5088 // risk of rounding error) and compensate for it in GetDelayEstimate()
5089 // later.
5090 _average_jitter_buffer_delay_us = (_average_jitter_buffer_delay_us * 7 +
5091 1000 * timestamp_diff_ms + 500) / 8;
niklase@google.com470e71d2011-07-07 08:21:25 +00005092}
5093
5094void
5095Channel::RegisterReceiveCodecsToRTPModule()
5096{
5097 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
5098 "Channel::RegisterReceiveCodecsToRTPModule()");
5099
5100
5101 CodecInst codec;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005102 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
niklase@google.com470e71d2011-07-07 08:21:25 +00005103
5104 for (int idx = 0; idx < nSupportedCodecs; idx++)
5105 {
5106 // Open up the RTP/RTCP receiver for all supported codecs
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00005107 if ((_audioCodingModule.Codec(idx, &codec) == -1) ||
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00005108 (rtp_receiver_->RegisterReceivePayload(
5109 codec.plname,
5110 codec.pltype,
5111 codec.plfreq,
5112 codec.channels,
5113 (codec.rate < 0) ? 0 : codec.rate) == -1))
niklase@google.com470e71d2011-07-07 08:21:25 +00005114 {
5115 WEBRTC_TRACE(
5116 kTraceWarning,
5117 kTraceVoice,
5118 VoEId(_instanceId, _channelId),
5119 "Channel::RegisterReceiveCodecsToRTPModule() unable"
5120 " to register %s (%d/%d/%d/%d) to RTP/RTCP receiver",
5121 codec.plname, codec.pltype, codec.plfreq,
5122 codec.channels, codec.rate);
5123 }
5124 else
5125 {
5126 WEBRTC_TRACE(
5127 kTraceInfo,
5128 kTraceVoice,
5129 VoEId(_instanceId, _channelId),
5130 "Channel::RegisterReceiveCodecsToRTPModule() %s "
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00005131 "(%d/%d/%d/%d) has been added to the RTP/RTCP "
niklase@google.com470e71d2011-07-07 08:21:25 +00005132 "receiver",
5133 codec.plname, codec.pltype, codec.plfreq,
5134 codec.channels, codec.rate);
5135 }
5136 }
5137}
5138
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005139int Channel::ApmProcessRx(AudioFrame& frame) {
5140 AudioProcessing* audioproc = _rxAudioProcessingModulePtr;
5141 // Register the (possibly new) frame parameters.
5142 if (audioproc->set_sample_rate_hz(frame.sample_rate_hz_) != 0) {
andrew@webrtc.org655d8f52012-11-20 07:34:45 +00005143 LOG_FERR1(LS_WARNING, set_sample_rate_hz, frame.sample_rate_hz_);
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005144 }
5145 if (audioproc->set_num_channels(frame.num_channels_,
5146 frame.num_channels_) != 0) {
andrew@webrtc.org655d8f52012-11-20 07:34:45 +00005147 LOG_FERR1(LS_WARNING, set_num_channels, frame.num_channels_);
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005148 }
5149 if (audioproc->ProcessStream(&frame) != 0) {
andrew@webrtc.org655d8f52012-11-20 07:34:45 +00005150 LOG_FERR0(LS_WARNING, ProcessStream);
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005151 }
5152 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00005153}
5154
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005155int Channel::SetSecondarySendCodec(const CodecInst& codec,
5156 int red_payload_type) {
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00005157 // Sanity check for payload type.
5158 if (red_payload_type < 0 || red_payload_type > 127) {
5159 _engineStatisticsPtr->SetLastError(
5160 VE_PLTYPE_ERROR, kTraceError,
5161 "SetRedPayloadType() invalid RED payload type");
5162 return -1;
5163 }
5164
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005165 if (SetRedPayloadType(red_payload_type) < 0) {
5166 _engineStatisticsPtr->SetLastError(
5167 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5168 "SetSecondarySendCodec() Failed to register RED ACM");
5169 return -1;
5170 }
5171 if (_audioCodingModule.RegisterSecondarySendCodec(codec) < 0) {
5172 _engineStatisticsPtr->SetLastError(
5173 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5174 "SetSecondarySendCodec() Failed to register secondary send codec in "
5175 "ACM");
5176 return -1;
5177 }
5178
5179 return 0;
5180}
5181
5182void Channel::RemoveSecondarySendCodec() {
5183 _audioCodingModule.UnregisterSecondarySendCodec();
5184}
5185
5186int Channel::GetSecondarySendCodec(CodecInst* codec) {
5187 if (_audioCodingModule.SecondarySendCodec(codec) < 0) {
5188 _engineStatisticsPtr->SetLastError(
5189 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5190 "GetSecondarySendCodec() Failed to get secondary sent codec from ACM");
5191 return -1;
5192 }
5193 return 0;
5194}
5195
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00005196// Assuming this method is called with valid payload type.
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005197int Channel::SetRedPayloadType(int red_payload_type) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005198 CodecInst codec;
5199 bool found_red = false;
5200
5201 // Get default RED settings from the ACM database
5202 const int num_codecs = AudioCodingModule::NumberOfCodecs();
5203 for (int idx = 0; idx < num_codecs; idx++) {
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00005204 _audioCodingModule.Codec(idx, &codec);
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005205 if (!STR_CASE_CMP(codec.plname, "RED")) {
5206 found_red = true;
5207 break;
5208 }
5209 }
5210
5211 if (!found_red) {
5212 _engineStatisticsPtr->SetLastError(
5213 VE_CODEC_ERROR, kTraceError,
5214 "SetRedPayloadType() RED is not supported");
5215 return -1;
5216 }
5217
turaj@webrtc.org9d532fd2013-01-31 18:34:19 +00005218 codec.pltype = red_payload_type;
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005219 if (_audioCodingModule.RegisterSendCodec(codec) < 0) {
5220 _engineStatisticsPtr->SetLastError(
5221 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5222 "SetRedPayloadType() RED registration in ACM module failed");
5223 return -1;
5224 }
5225
5226 if (_rtpRtcpModule->SetSendREDPayloadType(red_payload_type) != 0) {
5227 _engineStatisticsPtr->SetLastError(
5228 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
5229 "SetRedPayloadType() RED registration in RTP/RTCP module failed");
5230 return -1;
5231 }
5232 return 0;
5233}
5234
pbos@webrtc.orgd900e8b2013-07-03 15:12:26 +00005235} // namespace voe
5236} // namespace webrtc