blob: 936ddd1880a98897d8b04b9646cc40697ab6c6fc [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
henrika@webrtc.org2919e952012-01-31 08:45:03 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000011#include "webrtc/voice_engine/channel.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000013#include "webrtc/modules/audio_device/include/audio_device.h"
14#include "webrtc/modules/audio_processing/include/audio_processing.h"
15#include "webrtc/modules/utility/interface/audio_frame_operations.h"
16#include "webrtc/modules/utility/interface/process_thread.h"
17#include "webrtc/modules/utility/interface/rtp_dump.h"
18#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
19#include "webrtc/system_wrappers/interface/logging.h"
20#include "webrtc/system_wrappers/interface/trace.h"
21#include "webrtc/voice_engine/include/voe_base.h"
22#include "webrtc/voice_engine/include/voe_external_media.h"
23#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
24#include "webrtc/voice_engine/output_mixer.h"
25#include "webrtc/voice_engine/statistics.h"
26#include "webrtc/voice_engine/transmit_mixer.h"
27#include "webrtc/voice_engine/utility.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000028
29#if defined(_WIN32)
30#include <Qos.h>
31#endif
32
andrew@webrtc.org50419b02012-11-14 19:07:54 +000033namespace webrtc {
34namespace voe {
niklase@google.com470e71d2011-07-07 08:21:25 +000035
pbos@webrtc.org6141e132013-04-09 10:09:10 +000036int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +000037Channel::SendData(FrameType frameType,
pbos@webrtc.org6141e132013-04-09 10:09:10 +000038 uint8_t payloadType,
39 uint32_t timeStamp,
40 const uint8_t* payloadData,
41 uint16_t payloadSize,
niklase@google.com470e71d2011-07-07 08:21:25 +000042 const RTPFragmentationHeader* fragmentation)
43{
44 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
45 "Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u,"
46 " payloadSize=%u, fragmentation=0x%x)",
47 frameType, payloadType, timeStamp, payloadSize, fragmentation);
48
49 if (_includeAudioLevelIndication)
50 {
andrew@webrtc.org755b04a2011-11-15 16:57:56 +000051 assert(_rtpAudioProc.get() != NULL);
niklase@google.com470e71d2011-07-07 08:21:25 +000052 // Store current audio level in the RTP/RTCP module.
53 // The level will be used in combination with voice-activity state
54 // (frameType) to add an RTP header extension
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +000055 _rtpRtcpModule->SetAudioLevel(_rtpAudioProc->level_estimator()->RMS());
niklase@google.com470e71d2011-07-07 08:21:25 +000056 }
57
58 // Push data from ACM to RTP/RTCP-module to deliver audio frame for
59 // packetization.
60 // This call will trigger Transport::SendPacket() from the RTP/RTCP module.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +000061 if (_rtpRtcpModule->SendOutgoingData((FrameType&)frameType,
niklase@google.com470e71d2011-07-07 08:21:25 +000062 payloadType,
63 timeStamp,
stefan@webrtc.orgddfdfed2012-07-03 13:21:22 +000064 // Leaving the time when this frame was
65 // received from the capture device as
66 // undefined for voice for now.
67 -1,
niklase@google.com470e71d2011-07-07 08:21:25 +000068 payloadData,
69 payloadSize,
70 fragmentation) == -1)
71 {
72 _engineStatisticsPtr->SetLastError(
73 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
74 "Channel::SendData() failed to send data to RTP/RTCP module");
75 return -1;
76 }
77
78 _lastLocalTimeStamp = timeStamp;
79 _lastPayloadType = payloadType;
80
81 return 0;
82}
83
pbos@webrtc.org6141e132013-04-09 10:09:10 +000084int32_t
85Channel::InFrameType(int16_t frameType)
niklase@google.com470e71d2011-07-07 08:21:25 +000086{
87 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
88 "Channel::InFrameType(frameType=%d)", frameType);
89
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +000090 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +000091 // 1 indicates speech
92 _sendFrameType = (frameType == 1) ? 1 : 0;
93 return 0;
94}
95
pbos@webrtc.org6141e132013-04-09 10:09:10 +000096int32_t
pbos@webrtc.org92135212013-05-14 08:31:39 +000097Channel::OnRxVadDetected(int vadDecision)
niklase@google.com470e71d2011-07-07 08:21:25 +000098{
99 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
100 "Channel::OnRxVadDetected(vadDecision=%d)", vadDecision);
101
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000102 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000103 if (_rxVadObserverPtr)
104 {
105 _rxVadObserverPtr->OnRxVad(_channelId, vadDecision);
106 }
107
108 return 0;
109}
110
111int
112Channel::SendPacket(int channel, const void *data, int len)
113{
114 channel = VoEChannelId(channel);
115 assert(channel == _channelId);
116
117 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
118 "Channel::SendPacket(channel=%d, len=%d)", channel, len);
119
120 if (_transportPtr == NULL)
121 {
122 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
123 "Channel::SendPacket() failed to send RTP packet due to"
124 " invalid transport object");
125 return -1;
126 }
127
128 // Insert extra RTP packet using if user has called the InsertExtraRTPPacket
129 // API
130 if (_insertExtraRTPPacket)
131 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000132 uint8_t* rtpHdr = (uint8_t*)data;
133 uint8_t M_PT(0);
niklase@google.com470e71d2011-07-07 08:21:25 +0000134 if (_extraMarkerBit)
135 {
136 M_PT = 0x80; // set the M-bit
137 }
138 M_PT += _extraPayloadType; // set the payload type
139 *(++rtpHdr) = M_PT; // modify the M|PT-byte within the RTP header
140 _insertExtraRTPPacket = false; // insert one packet only
141 }
142
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000143 uint8_t* bufferToSendPtr = (uint8_t*)data;
144 int32_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000145
146 // Dump the RTP packet to a file (if RTP dump is enabled).
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000147 if (_rtpDumpOut.DumpPacket((const uint8_t*)data, len) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000148 {
149 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
150 VoEId(_instanceId,_channelId),
151 "Channel::SendPacket() RTP dump to output file failed");
152 }
153
154 // SRTP or External encryption
155 if (_encrypting)
156 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000157 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000158
159 if (_encryptionPtr)
160 {
161 if (!_encryptionRTPBufferPtr)
162 {
163 // Allocate memory for encryption buffer one time only
164 _encryptionRTPBufferPtr =
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000165 new uint8_t[kVoiceEngineMaxIpPacketSizeBytes];
xians@webrtc.org51253502012-10-25 13:58:02 +0000166 memset(_encryptionRTPBufferPtr, 0,
167 kVoiceEngineMaxIpPacketSizeBytes);
niklase@google.com470e71d2011-07-07 08:21:25 +0000168 }
169
170 // Perform encryption (SRTP or external)
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000171 int32_t encryptedBufferLength = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000172 _encryptionPtr->encrypt(_channelId,
173 bufferToSendPtr,
174 _encryptionRTPBufferPtr,
175 bufferLength,
176 (int*)&encryptedBufferLength);
177 if (encryptedBufferLength <= 0)
178 {
179 _engineStatisticsPtr->SetLastError(
180 VE_ENCRYPTION_FAILED,
181 kTraceError, "Channel::SendPacket() encryption failed");
182 return -1;
183 }
184
185 // Replace default data buffer with encrypted buffer
186 bufferToSendPtr = _encryptionRTPBufferPtr;
187 bufferLength = encryptedBufferLength;
188 }
189 }
190
191 // Packet transmission using WebRtc socket transport
192 if (!_externalTransport)
193 {
194 int n = _transportPtr->SendPacket(channel, bufferToSendPtr,
195 bufferLength);
196 if (n < 0)
197 {
198 WEBRTC_TRACE(kTraceError, kTraceVoice,
199 VoEId(_instanceId,_channelId),
200 "Channel::SendPacket() RTP transmission using WebRtc"
201 " sockets failed");
202 return -1;
203 }
204 return n;
205 }
206
207 // Packet transmission using external transport transport
208 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000209 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000210
211 int n = _transportPtr->SendPacket(channel,
212 bufferToSendPtr,
213 bufferLength);
214 if (n < 0)
215 {
216 WEBRTC_TRACE(kTraceError, kTraceVoice,
217 VoEId(_instanceId,_channelId),
218 "Channel::SendPacket() RTP transmission using external"
219 " transport failed");
220 return -1;
221 }
222 return n;
223 }
224}
225
226int
227Channel::SendRTCPPacket(int channel, const void *data, int len)
228{
229 channel = VoEChannelId(channel);
230 assert(channel == _channelId);
231
232 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
233 "Channel::SendRTCPPacket(channel=%d, len=%d)", channel, len);
234
niklase@google.com470e71d2011-07-07 08:21:25 +0000235 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000236 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.org83661f52011-11-25 10:58:15 +0000237 if (_transportPtr == NULL)
238 {
239 WEBRTC_TRACE(kTraceError, kTraceVoice,
240 VoEId(_instanceId,_channelId),
241 "Channel::SendRTCPPacket() failed to send RTCP packet"
242 " due to invalid transport object");
243 return -1;
244 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000245 }
246
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000247 uint8_t* bufferToSendPtr = (uint8_t*)data;
248 int32_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000249
250 // Dump the RTCP packet to a file (if RTP dump is enabled).
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000251 if (_rtpDumpOut.DumpPacket((const uint8_t*)data, len) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000252 {
253 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
254 VoEId(_instanceId,_channelId),
255 "Channel::SendPacket() RTCP dump to output file failed");
256 }
257
258 // SRTP or External encryption
259 if (_encrypting)
260 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000261 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000262
263 if (_encryptionPtr)
264 {
265 if (!_encryptionRTCPBufferPtr)
266 {
267 // Allocate memory for encryption buffer one time only
268 _encryptionRTCPBufferPtr =
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000269 new uint8_t[kVoiceEngineMaxIpPacketSizeBytes];
niklase@google.com470e71d2011-07-07 08:21:25 +0000270 }
271
272 // Perform encryption (SRTP or external).
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000273 int32_t encryptedBufferLength = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000274 _encryptionPtr->encrypt_rtcp(_channelId,
275 bufferToSendPtr,
276 _encryptionRTCPBufferPtr,
277 bufferLength,
278 (int*)&encryptedBufferLength);
279 if (encryptedBufferLength <= 0)
280 {
281 _engineStatisticsPtr->SetLastError(
282 VE_ENCRYPTION_FAILED, kTraceError,
283 "Channel::SendRTCPPacket() encryption failed");
284 return -1;
285 }
286
287 // Replace default data buffer with encrypted buffer
288 bufferToSendPtr = _encryptionRTCPBufferPtr;
289 bufferLength = encryptedBufferLength;
290 }
291 }
292
293 // Packet transmission using WebRtc socket transport
294 if (!_externalTransport)
295 {
296 int n = _transportPtr->SendRTCPPacket(channel,
297 bufferToSendPtr,
298 bufferLength);
299 if (n < 0)
300 {
301 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
302 VoEId(_instanceId,_channelId),
303 "Channel::SendRTCPPacket() transmission using WebRtc"
304 " sockets failed");
305 return -1;
306 }
307 return n;
308 }
309
310 // Packet transmission using external transport transport
311 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000312 CriticalSectionScoped cs(&_callbackCritSect);
henrike@webrtc.orgde727ab2012-11-18 18:49:13 +0000313 if (_transportPtr == NULL)
314 {
315 return -1;
316 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000317 int n = _transportPtr->SendRTCPPacket(channel,
318 bufferToSendPtr,
319 bufferLength);
320 if (n < 0)
321 {
322 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
323 VoEId(_instanceId,_channelId),
324 "Channel::SendRTCPPacket() transmission using external"
325 " transport failed");
326 return -1;
327 }
328 return n;
329 }
330
331 return len;
332}
333
334void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000335Channel::OnPlayTelephoneEvent(int32_t id,
336 uint8_t event,
337 uint16_t lengthMs,
338 uint8_t volume)
niklase@google.com470e71d2011-07-07 08:21:25 +0000339{
340 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
341 "Channel::OnPlayTelephoneEvent(id=%d, event=%u, lengthMs=%u,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +0000342 " volume=%u)", id, event, lengthMs, volume);
niklase@google.com470e71d2011-07-07 08:21:25 +0000343
344 if (!_playOutbandDtmfEvent || (event > 15))
345 {
346 // Ignore callback since feedback is disabled or event is not a
347 // Dtmf tone event.
348 return;
349 }
350
351 assert(_outputMixerPtr != NULL);
352
353 // Start playing out the Dtmf tone (if playout is enabled).
354 // Reduce length of tone with 80ms to the reduce risk of echo.
355 _outputMixerPtr->PlayDtmfTone(event, lengthMs - 80, volume);
356}
357
358void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000359Channel::OnIncomingSSRCChanged(int32_t id,
360 uint32_t SSRC)
niklase@google.com470e71d2011-07-07 08:21:25 +0000361{
362 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
363 "Channel::OnIncomingSSRCChanged(id=%d, SSRC=%d)",
364 id, SSRC);
365
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000366 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000367 assert(channel == _channelId);
368
369 // Reset RTP-module counters since a new incoming RTP stream is detected
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +0000370 _rtpRtcpModule->ResetReceiveDataCountersRTP();
371 _rtpRtcpModule->ResetStatisticsRTP();
niklase@google.com470e71d2011-07-07 08:21:25 +0000372
373 if (_rtpObserver)
374 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000375 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000376
377 if (_rtpObserverPtr)
378 {
379 // Send new SSRC to registered observer using callback
380 _rtpObserverPtr->OnIncomingSSRCChanged(channel, SSRC);
381 }
382 }
383}
384
pbos@webrtc.org92135212013-05-14 08:31:39 +0000385void Channel::OnIncomingCSRCChanged(int32_t id,
386 uint32_t CSRC,
387 bool added)
niklase@google.com470e71d2011-07-07 08:21:25 +0000388{
389 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
390 "Channel::OnIncomingCSRCChanged(id=%d, CSRC=%d, added=%d)",
391 id, CSRC, added);
392
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000393 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000394 assert(channel == _channelId);
395
396 if (_rtpObserver)
397 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000398 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000399
400 if (_rtpObserverPtr)
401 {
402 _rtpObserverPtr->OnIncomingCSRCChanged(channel, CSRC, added);
403 }
404 }
405}
406
407void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000408Channel::OnApplicationDataReceived(int32_t id,
409 uint8_t subType,
410 uint32_t name,
411 uint16_t length,
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000412 const uint8_t* data)
niklase@google.com470e71d2011-07-07 08:21:25 +0000413{
414 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
415 "Channel::OnApplicationDataReceived(id=%d, subType=%u,"
416 " name=%u, length=%u)",
417 id, subType, name, length);
418
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000419 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000420 assert(channel == _channelId);
421
422 if (_rtcpObserver)
423 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000424 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000425
426 if (_rtcpObserverPtr)
427 {
428 _rtcpObserverPtr->OnApplicationDataReceived(channel,
429 subType,
430 name,
431 data,
432 length);
433 }
434 }
435}
436
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000437int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +0000438Channel::OnInitializeDecoder(
pbos@webrtc.org92135212013-05-14 08:31:39 +0000439 int32_t id,
440 int8_t payloadType,
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +0000441 const char payloadName[RTP_PAYLOAD_NAME_SIZE],
pbos@webrtc.org92135212013-05-14 08:31:39 +0000442 int frequency,
443 uint8_t channels,
444 uint32_t rate)
niklase@google.com470e71d2011-07-07 08:21:25 +0000445{
446 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
447 "Channel::OnInitializeDecoder(id=%d, payloadType=%d, "
448 "payloadName=%s, frequency=%u, channels=%u, rate=%u)",
449 id, payloadType, payloadName, frequency, channels, rate);
450
andrew@webrtc.orgceb148c2011-08-23 17:53:54 +0000451 assert(VoEChannelId(id) == _channelId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000452
henrika@webrtc.orgf75901f2012-01-16 08:45:42 +0000453 CodecInst receiveCodec = {0};
454 CodecInst dummyCodec = {0};
niklase@google.com470e71d2011-07-07 08:21:25 +0000455
456 receiveCodec.pltype = payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +0000457 receiveCodec.plfreq = frequency;
458 receiveCodec.channels = channels;
459 receiveCodec.rate = rate;
henrika@webrtc.orgf75901f2012-01-16 08:45:42 +0000460 strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +0000461
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +0000462 _audioCodingModule.Codec(payloadName, &dummyCodec, frequency, channels);
niklase@google.com470e71d2011-07-07 08:21:25 +0000463 receiveCodec.pacsize = dummyCodec.pacsize;
464
465 // Register the new codec to the ACM
466 if (_audioCodingModule.RegisterReceiveCodec(receiveCodec) == -1)
467 {
468 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
andrew@webrtc.orgceb148c2011-08-23 17:53:54 +0000469 VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +0000470 "Channel::OnInitializeDecoder() invalid codec ("
471 "pt=%d, name=%s) received - 1", payloadType, payloadName);
472 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR);
473 return -1;
474 }
475
476 return 0;
477}
478
479void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000480Channel::OnPacketTimeout(int32_t id)
niklase@google.com470e71d2011-07-07 08:21:25 +0000481{
482 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
483 "Channel::OnPacketTimeout(id=%d)", id);
484
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000485 CriticalSectionScoped cs(_callbackCritSectPtr);
niklase@google.com470e71d2011-07-07 08:21:25 +0000486 if (_voiceEngineObserverPtr)
487 {
488 if (_receiving || _externalTransport)
489 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000490 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000491 assert(channel == _channelId);
492 // Ensure that next OnReceivedPacket() callback will trigger
493 // a VE_PACKET_RECEIPT_RESTARTED callback.
494 _rtpPacketTimedOut = true;
495 // Deliver callback to the observer
496 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
497 VoEId(_instanceId,_channelId),
498 "Channel::OnPacketTimeout() => "
499 "CallbackOnError(VE_RECEIVE_PACKET_TIMEOUT)");
500 _voiceEngineObserverPtr->CallbackOnError(channel,
501 VE_RECEIVE_PACKET_TIMEOUT);
502 }
503 }
504}
505
506void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000507Channel::OnReceivedPacket(int32_t id,
508 RtpRtcpPacketType packetType)
niklase@google.com470e71d2011-07-07 08:21:25 +0000509{
510 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
511 "Channel::OnReceivedPacket(id=%d, packetType=%d)",
512 id, packetType);
513
andrew@webrtc.orgceb148c2011-08-23 17:53:54 +0000514 assert(VoEChannelId(id) == _channelId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000515
516 // Notify only for the case when we have restarted an RTP session.
517 if (_rtpPacketTimedOut && (kPacketRtp == packetType))
518 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000519 CriticalSectionScoped cs(_callbackCritSectPtr);
niklase@google.com470e71d2011-07-07 08:21:25 +0000520 if (_voiceEngineObserverPtr)
521 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000522 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000523 assert(channel == _channelId);
524 // Reset timeout mechanism
525 _rtpPacketTimedOut = false;
526 // Deliver callback to the observer
527 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
528 VoEId(_instanceId,_channelId),
529 "Channel::OnPacketTimeout() =>"
530 " CallbackOnError(VE_PACKET_RECEIPT_RESTARTED)");
531 _voiceEngineObserverPtr->CallbackOnError(
532 channel,
533 VE_PACKET_RECEIPT_RESTARTED);
534 }
535 }
536}
537
538void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000539Channel::OnPeriodicDeadOrAlive(int32_t id,
540 RTPAliveType alive)
niklase@google.com470e71d2011-07-07 08:21:25 +0000541{
542 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
543 "Channel::OnPeriodicDeadOrAlive(id=%d, alive=%d)", id, alive);
544
henrika@webrtc.org19da7192013-04-05 14:34:57 +0000545 {
546 CriticalSectionScoped cs(&_callbackCritSect);
547 if (!_connectionObserver)
548 return;
549 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000550
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000551 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000552 assert(channel == _channelId);
553
554 // Use Alive as default to limit risk of false Dead detections
555 bool isAlive(true);
556
557 // Always mark the connection as Dead when the module reports kRtpDead
558 if (kRtpDead == alive)
559 {
560 isAlive = false;
561 }
562
563 // It is possible that the connection is alive even if no RTP packet has
564 // been received for a long time since the other side might use VAD/DTX
565 // and a low SID-packet update rate.
566 if ((kRtpNoRtp == alive) && _playing)
567 {
568 // Detect Alive for all NetEQ states except for the case when we are
569 // in PLC_CNG state.
570 // PLC_CNG <=> background noise only due to long expand or error.
571 // Note that, the case where the other side stops sending during CNG
572 // state will be detected as Alive. Dead is is not set until after
573 // missing RTCP packets for at least twelve seconds (handled
574 // internally by the RTP/RTCP module).
575 isAlive = (_outputSpeechType != AudioFrame::kPLCCNG);
576 }
577
578 UpdateDeadOrAliveCounters(isAlive);
579
580 // Send callback to the registered observer
581 if (_connectionObserver)
582 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000583 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000584 if (_connectionObserverPtr)
585 {
586 _connectionObserverPtr->OnPeriodicDeadOrAlive(channel, isAlive);
587 }
588 }
589}
590
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000591int32_t
592Channel::OnReceivedPayloadData(const uint8_t* payloadData,
pbos@webrtc.org92135212013-05-14 08:31:39 +0000593 uint16_t payloadSize,
niklase@google.com470e71d2011-07-07 08:21:25 +0000594 const WebRtcRTPHeader* rtpHeader)
595{
596 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
597 "Channel::OnReceivedPayloadData(payloadSize=%d,"
598 " payloadType=%u, audioChannel=%u)",
599 payloadSize,
600 rtpHeader->header.payloadType,
601 rtpHeader->type.Audio.channel);
602
roosa@google.com0870f022012-12-12 21:31:41 +0000603 _lastRemoteTimeStamp = rtpHeader->header.timestamp;
604
niklase@google.com470e71d2011-07-07 08:21:25 +0000605 if (!_playing)
606 {
607 // Avoid inserting into NetEQ when we are not playing. Count the
608 // packet as discarded.
609 WEBRTC_TRACE(kTraceStream, kTraceVoice,
610 VoEId(_instanceId, _channelId),
611 "received packet is discarded since playing is not"
612 " activated");
613 _numberOfDiscardedPackets++;
614 return 0;
615 }
616
617 // Push the incoming payload (parsed and ready for decoding) into the ACM
tina.legrand@webrtc.org16b6b902012-04-12 11:02:38 +0000618 if (_audioCodingModule.IncomingPacket(payloadData,
niklase@google.com470e71d2011-07-07 08:21:25 +0000619 payloadSize,
620 *rtpHeader) != 0)
621 {
622 _engineStatisticsPtr->SetLastError(
623 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
624 "Channel::OnReceivedPayloadData() unable to push data to the ACM");
625 return -1;
626 }
627
628 // Update the packet delay
629 UpdatePacketDelay(rtpHeader->header.timestamp,
630 rtpHeader->header.sequenceNumber);
niklase@google.com470e71d2011-07-07 08:21:25 +0000631 return 0;
632}
633
pbos@webrtc.org92135212013-05-14 08:31:39 +0000634int32_t Channel::GetAudioFrame(int32_t id, AudioFrame& audioFrame)
niklase@google.com470e71d2011-07-07 08:21:25 +0000635{
636 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
637 "Channel::GetAudioFrame(id=%d)", id);
638
639 // Get 10ms raw PCM data from the ACM (mixer limits output frequency)
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000640 if (_audioCodingModule.PlayoutData10Ms(audioFrame.sample_rate_hz_,
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +0000641 &audioFrame) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000642 {
643 WEBRTC_TRACE(kTraceError, kTraceVoice,
644 VoEId(_instanceId,_channelId),
645 "Channel::GetAudioFrame() PlayoutData10Ms() failed!");
andrew@webrtc.org7859e102012-01-13 00:30:11 +0000646 // In all likelihood, the audio in this frame is garbage. We return an
647 // error so that the audio mixer module doesn't add it to the mix. As
648 // a result, it won't be played out and the actions skipped here are
649 // irrelevant.
650 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +0000651 }
652
653 if (_RxVadDetection)
654 {
655 UpdateRxVadDetection(audioFrame);
656 }
657
658 // Convert module ID to internal VoE channel ID
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000659 audioFrame.id_ = VoEChannelId(audioFrame.id_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000660 // Store speech type for dead-or-alive detection
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000661 _outputSpeechType = audioFrame.speech_type_;
niklase@google.com470e71d2011-07-07 08:21:25 +0000662
663 // Perform far-end AudioProcessing module processing on the received signal
664 if (_rxApmIsEnabled)
665 {
666 ApmProcessRx(audioFrame);
667 }
668
669 // Output volume scaling
670 if (_outputGain < 0.99f || _outputGain > 1.01f)
671 {
672 AudioFrameOperations::ScaleWithSat(_outputGain, audioFrame);
673 }
674
675 // Scale left and/or right channel(s) if stereo and master balance is
676 // active
677
678 if (_panLeft != 1.0f || _panRight != 1.0f)
679 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000680 if (audioFrame.num_channels_ == 1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000681 {
682 // Emulate stereo mode since panning is active.
683 // The mono signal is copied to both left and right channels here.
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +0000684 AudioFrameOperations::MonoToStereo(&audioFrame);
niklase@google.com470e71d2011-07-07 08:21:25 +0000685 }
686 // For true stereo mode (when we are receiving a stereo signal), no
687 // action is needed.
688
689 // Do the panning operation (the audio frame contains stereo at this
690 // stage)
691 AudioFrameOperations::Scale(_panLeft, _panRight, audioFrame);
692 }
693
694 // Mix decoded PCM output with file if file mixing is enabled
695 if (_outputFilePlaying)
696 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000697 MixAudioWithFile(audioFrame, audioFrame.sample_rate_hz_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000698 }
699
700 // Place channel in on-hold state (~muted) if on-hold is activated
701 if (_outputIsOnHold)
702 {
703 AudioFrameOperations::Mute(audioFrame);
704 }
705
706 // External media
707 if (_outputExternalMedia)
708 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000709 CriticalSectionScoped cs(&_callbackCritSect);
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000710 const bool isStereo = (audioFrame.num_channels_ == 2);
niklase@google.com470e71d2011-07-07 08:21:25 +0000711 if (_outputExternalMediaCallbackPtr)
712 {
713 _outputExternalMediaCallbackPtr->Process(
714 _channelId,
715 kPlaybackPerChannel,
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000716 (int16_t*)audioFrame.data_,
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000717 audioFrame.samples_per_channel_,
718 audioFrame.sample_rate_hz_,
niklase@google.com470e71d2011-07-07 08:21:25 +0000719 isStereo);
720 }
721 }
722
723 // Record playout if enabled
724 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000725 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000726
727 if (_outputFileRecording && _outputFileRecorderPtr)
728 {
niklas.enbom@webrtc.org5398d952012-03-26 08:11:25 +0000729 _outputFileRecorderPtr->RecordAudioToFile(audioFrame);
niklase@google.com470e71d2011-07-07 08:21:25 +0000730 }
731 }
732
733 // Measure audio level (0-9)
734 _outputAudioLevel.ComputeLevel(audioFrame);
735
736 return 0;
737}
738
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000739int32_t
pbos@webrtc.org92135212013-05-14 08:31:39 +0000740Channel::NeededFrequency(int32_t id)
niklase@google.com470e71d2011-07-07 08:21:25 +0000741{
742 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
743 "Channel::NeededFrequency(id=%d)", id);
744
745 int highestNeeded = 0;
746
747 // Determine highest needed receive frequency
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000748 int32_t receiveFrequency = _audioCodingModule.ReceiveFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +0000749
750 // Return the bigger of playout and receive frequency in the ACM.
751 if (_audioCodingModule.PlayoutFrequency() > receiveFrequency)
752 {
753 highestNeeded = _audioCodingModule.PlayoutFrequency();
754 }
755 else
756 {
757 highestNeeded = receiveFrequency;
758 }
759
760 // Special case, if we're playing a file on the playout side
761 // we take that frequency into consideration as well
762 // This is not needed on sending side, since the codec will
763 // limit the spectrum anyway.
764 if (_outputFilePlaying)
765 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000766 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000767 if (_outputFilePlayerPtr && _outputFilePlaying)
768 {
769 if(_outputFilePlayerPtr->Frequency()>highestNeeded)
770 {
771 highestNeeded=_outputFilePlayerPtr->Frequency();
772 }
773 }
774 }
775
776 return(highestNeeded);
777}
778
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000779int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +0000780Channel::CreateChannel(Channel*& channel,
pbos@webrtc.org92135212013-05-14 08:31:39 +0000781 int32_t channelId,
782 uint32_t instanceId)
niklase@google.com470e71d2011-07-07 08:21:25 +0000783{
784 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId,channelId),
785 "Channel::CreateChannel(channelId=%d, instanceId=%d)",
786 channelId, instanceId);
787
788 channel = new Channel(channelId, instanceId);
789 if (channel == NULL)
790 {
791 WEBRTC_TRACE(kTraceMemory, kTraceVoice,
792 VoEId(instanceId,channelId),
793 "Channel::CreateChannel() unable to allocate memory for"
794 " channel");
795 return -1;
796 }
797 return 0;
798}
799
800void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000801Channel::PlayNotification(int32_t id, uint32_t durationMs)
niklase@google.com470e71d2011-07-07 08:21:25 +0000802{
803 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
804 "Channel::PlayNotification(id=%d, durationMs=%d)",
805 id, durationMs);
806
807 // Not implement yet
808}
809
810void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000811Channel::RecordNotification(int32_t id, uint32_t durationMs)
niklase@google.com470e71d2011-07-07 08:21:25 +0000812{
813 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
814 "Channel::RecordNotification(id=%d, durationMs=%d)",
815 id, durationMs);
816
817 // Not implement yet
818}
819
820void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000821Channel::PlayFileEnded(int32_t id)
niklase@google.com470e71d2011-07-07 08:21:25 +0000822{
823 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
824 "Channel::PlayFileEnded(id=%d)", id);
825
826 if (id == _inputFilePlayerId)
827 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000828 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000829
830 _inputFilePlaying = false;
831 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
832 VoEId(_instanceId,_channelId),
833 "Channel::PlayFileEnded() => input file player module is"
834 " shutdown");
835 }
836 else if (id == _outputFilePlayerId)
837 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000838 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000839
840 _outputFilePlaying = false;
841 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
842 VoEId(_instanceId,_channelId),
843 "Channel::PlayFileEnded() => output file player module is"
844 " shutdown");
845 }
846}
847
848void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000849Channel::RecordFileEnded(int32_t id)
niklase@google.com470e71d2011-07-07 08:21:25 +0000850{
851 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
852 "Channel::RecordFileEnded(id=%d)", id);
853
854 assert(id == _outputFileRecorderId);
855
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000856 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000857
858 _outputFileRecording = false;
859 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
860 VoEId(_instanceId,_channelId),
861 "Channel::RecordFileEnded() => output file recorder module is"
862 " shutdown");
863}
864
pbos@webrtc.org92135212013-05-14 08:31:39 +0000865Channel::Channel(int32_t channelId,
866 uint32_t instanceId) :
niklase@google.com470e71d2011-07-07 08:21:25 +0000867 _fileCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
868 _callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
niklase@google.com470e71d2011-07-07 08:21:25 +0000869 _instanceId(instanceId),
xians@google.com22963ab2011-08-03 12:40:23 +0000870 _channelId(channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +0000871 _audioCodingModule(*AudioCodingModule::Create(
xians@google.com22963ab2011-08-03 12:40:23 +0000872 VoEModuleId(instanceId, channelId))),
niklase@google.com470e71d2011-07-07 08:21:25 +0000873 _rtpDumpIn(*RtpDump::CreateRtpDump()),
874 _rtpDumpOut(*RtpDump::CreateRtpDump()),
niklase@google.com470e71d2011-07-07 08:21:25 +0000875 _outputAudioLevel(),
niklase@google.com470e71d2011-07-07 08:21:25 +0000876 _externalTransport(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000877 _inputFilePlayerPtr(NULL),
878 _outputFilePlayerPtr(NULL),
879 _outputFileRecorderPtr(NULL),
880 // Avoid conflict with other channels by adding 1024 - 1026,
881 // won't use as much as 1024 channels.
882 _inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
883 _outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
884 _outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
885 _inputFilePlaying(false),
886 _outputFilePlaying(false),
887 _outputFileRecording(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000888 _inbandDtmfQueue(VoEModuleId(instanceId, channelId)),
889 _inbandDtmfGenerator(VoEModuleId(instanceId, channelId)),
niklase@google.com470e71d2011-07-07 08:21:25 +0000890 _inputExternalMedia(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000891 _outputExternalMedia(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000892 _inputExternalMediaCallbackPtr(NULL),
893 _outputExternalMediaCallbackPtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000894 _encryptionRTPBufferPtr(NULL),
895 _decryptionRTPBufferPtr(NULL),
896 _encryptionRTCPBufferPtr(NULL),
897 _decryptionRTCPBufferPtr(NULL),
898 _timeStamp(0), // This is just an offset, RTP module will add it's own random offset
899 _sendTelephoneEventPayloadType(106),
pwestin@webrtc.org1de01352013-04-11 20:23:35 +0000900 playout_timestamp_rtp_(0),
901 playout_timestamp_rtcp_(0),
xians@google.com22963ab2011-08-03 12:40:23 +0000902 _numberOfDiscardedPackets(0),
903 _engineStatisticsPtr(NULL),
henrika@webrtc.org2919e952012-01-31 08:45:03 +0000904 _outputMixerPtr(NULL),
905 _transmitMixerPtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000906 _moduleProcessThreadPtr(NULL),
907 _audioDeviceModulePtr(NULL),
908 _voiceEngineObserverPtr(NULL),
909 _callbackCritSectPtr(NULL),
910 _transportPtr(NULL),
911 _encryptionPtr(NULL),
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000912 _rtpAudioProc(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000913 _rxAudioProcessingModulePtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000914 _rxVadObserverPtr(NULL),
915 _oldVadDecision(-1),
916 _sendFrameType(0),
niklase@google.com470e71d2011-07-07 08:21:25 +0000917 _rtpObserverPtr(NULL),
918 _rtcpObserverPtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000919 _outputIsOnHold(false),
920 _externalPlayout(false),
roosa@google.com1b60ceb2012-12-12 23:00:29 +0000921 _externalMixing(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000922 _inputIsOnHold(false),
923 _playing(false),
924 _sending(false),
925 _receiving(false),
926 _mixFileWithMicrophone(false),
927 _rtpObserver(false),
928 _rtcpObserver(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000929 _mute(false),
930 _panLeft(1.0f),
931 _panRight(1.0f),
932 _outputGain(1.0f),
xians@google.com22963ab2011-08-03 12:40:23 +0000933 _encrypting(false),
934 _decrypting(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000935 _playOutbandDtmfEvent(false),
936 _playInbandDtmfEvent(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000937 _extraPayloadType(0),
938 _insertExtraRTPPacket(false),
939 _extraMarkerBit(false),
940 _lastLocalTimeStamp(0),
roosa@google.com0870f022012-12-12 21:31:41 +0000941 _lastRemoteTimeStamp(0),
niklase@google.com470e71d2011-07-07 08:21:25 +0000942 _lastPayloadType(0),
xians@google.com22963ab2011-08-03 12:40:23 +0000943 _includeAudioLevelIndication(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000944 _rtpPacketTimedOut(false),
945 _rtpPacketTimeOutIsEnabled(false),
946 _rtpTimeOutSeconds(0),
947 _connectionObserver(false),
948 _connectionObserverPtr(NULL),
949 _countAliveDetections(0),
950 _countDeadDetections(0),
951 _outputSpeechType(AudioFrame::kNormalSpeech),
pwestin@webrtc.org1de01352013-04-11 20:23:35 +0000952 _average_jitter_buffer_delay_us(0),
turaj@webrtc.orge46c8d32013-05-22 20:39:43 +0000953 least_required_delay_ms_(0),
niklase@google.com470e71d2011-07-07 08:21:25 +0000954 _previousTimestamp(0),
955 _recPacketDelayMs(20),
956 _RxVadDetection(false),
957 _rxApmIsEnabled(false),
958 _rxAgcIsEnabled(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000959 _rxNsIsEnabled(false)
niklase@google.com470e71d2011-07-07 08:21:25 +0000960{
961 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
962 "Channel::Channel() - ctor");
963 _inbandDtmfQueue.ResetDtmf();
964 _inbandDtmfGenerator.Init();
965 _outputAudioLevel.Clear();
966
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +0000967 RtpRtcp::Configuration configuration;
968 configuration.id = VoEModuleId(instanceId, channelId);
969 configuration.audio = true;
970 configuration.incoming_data = this;
971 configuration.incoming_messages = this;
972 configuration.outgoing_transport = this;
973 configuration.rtcp_feedback = this;
974 configuration.audio_messages = this;
975
976 _rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
977
niklase@google.com470e71d2011-07-07 08:21:25 +0000978 // Create far end AudioProcessing Module
979 _rxAudioProcessingModulePtr = AudioProcessing::Create(
980 VoEModuleId(instanceId, channelId));
981}
982
983Channel::~Channel()
984{
985 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
986 "Channel::~Channel() - dtor");
987
988 if (_outputExternalMedia)
989 {
990 DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
991 }
992 if (_inputExternalMedia)
993 {
994 DeRegisterExternalMediaProcessing(kRecordingPerChannel);
995 }
996 StopSend();
niklase@google.com470e71d2011-07-07 08:21:25 +0000997 StopPlayout();
998
999 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001000 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001001 if (_inputFilePlayerPtr)
1002 {
1003 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1004 _inputFilePlayerPtr->StopPlayingFile();
1005 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1006 _inputFilePlayerPtr = NULL;
1007 }
1008 if (_outputFilePlayerPtr)
1009 {
1010 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1011 _outputFilePlayerPtr->StopPlayingFile();
1012 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1013 _outputFilePlayerPtr = NULL;
1014 }
1015 if (_outputFileRecorderPtr)
1016 {
1017 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
1018 _outputFileRecorderPtr->StopRecording();
1019 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
1020 _outputFileRecorderPtr = NULL;
1021 }
1022 }
1023
1024 // The order to safely shutdown modules in a channel is:
1025 // 1. De-register callbacks in modules
1026 // 2. De-register modules in process thread
1027 // 3. Destroy modules
niklase@google.com470e71d2011-07-07 08:21:25 +00001028 if (_audioCodingModule.RegisterTransportCallback(NULL) == -1)
1029 {
1030 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1031 VoEId(_instanceId,_channelId),
1032 "~Channel() failed to de-register transport callback"
1033 " (Audio coding module)");
1034 }
1035 if (_audioCodingModule.RegisterVADCallback(NULL) == -1)
1036 {
1037 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1038 VoEId(_instanceId,_channelId),
1039 "~Channel() failed to de-register VAD callback"
1040 " (Audio coding module)");
1041 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001042 // De-register modules in process thread
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001043 if (_moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get()) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001044 {
1045 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
1046 VoEId(_instanceId,_channelId),
1047 "~Channel() failed to deregister RTP/RTCP module");
1048 }
1049
1050 // Destroy modules
niklase@google.com470e71d2011-07-07 08:21:25 +00001051 AudioCodingModule::Destroy(&_audioCodingModule);
niklase@google.com470e71d2011-07-07 08:21:25 +00001052 if (_rxAudioProcessingModulePtr != NULL)
1053 {
1054 AudioProcessing::Destroy(_rxAudioProcessingModulePtr); // far end APM
1055 _rxAudioProcessingModulePtr = NULL;
1056 }
1057
1058 // End of modules shutdown
1059
1060 // Delete other objects
1061 RtpDump::DestroyRtpDump(&_rtpDumpIn);
1062 RtpDump::DestroyRtpDump(&_rtpDumpOut);
1063 delete [] _encryptionRTPBufferPtr;
1064 delete [] _decryptionRTPBufferPtr;
1065 delete [] _encryptionRTCPBufferPtr;
1066 delete [] _decryptionRTCPBufferPtr;
1067 delete &_callbackCritSect;
niklase@google.com470e71d2011-07-07 08:21:25 +00001068 delete &_fileCritSect;
1069}
1070
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001071int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001072Channel::Init()
1073{
1074 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1075 "Channel::Init()");
1076
1077 // --- Initial sanity
1078
1079 if ((_engineStatisticsPtr == NULL) ||
1080 (_moduleProcessThreadPtr == NULL))
1081 {
1082 WEBRTC_TRACE(kTraceError, kTraceVoice,
1083 VoEId(_instanceId,_channelId),
1084 "Channel::Init() must call SetEngineInformation() first");
1085 return -1;
1086 }
1087
1088 // --- Add modules to process thread (for periodic schedulation)
1089
1090 const bool processThreadFail =
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001091 ((_moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get()) != 0) ||
niklase@google.com470e71d2011-07-07 08:21:25 +00001092 false);
niklase@google.com470e71d2011-07-07 08:21:25 +00001093 if (processThreadFail)
1094 {
1095 _engineStatisticsPtr->SetLastError(
1096 VE_CANNOT_INIT_CHANNEL, kTraceError,
1097 "Channel::Init() modules not registered");
1098 return -1;
1099 }
pwestin@webrtc.orgc450a192012-01-04 15:00:12 +00001100 // --- ACM initialization
niklase@google.com470e71d2011-07-07 08:21:25 +00001101
1102 if ((_audioCodingModule.InitializeReceiver() == -1) ||
1103#ifdef WEBRTC_CODEC_AVT
1104 // out-of-band Dtmf tones are played out by default
1105 (_audioCodingModule.SetDtmfPlayoutStatus(true) == -1) ||
1106#endif
niklase@google.com470e71d2011-07-07 08:21:25 +00001107 (_audioCodingModule.InitializeSender() == -1))
1108 {
1109 _engineStatisticsPtr->SetLastError(
1110 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1111 "Channel::Init() unable to initialize the ACM - 1");
1112 return -1;
1113 }
1114
1115 // --- RTP/RTCP module initialization
1116
1117 // Ensure that RTCP is enabled by default for the created channel.
1118 // Note that, the module will keep generating RTCP until it is explicitly
1119 // disabled by the user.
1120 // After StopListen (when no sockets exists), RTCP packets will no longer
1121 // be transmitted since the Transport object will then be invalid.
1122
1123 const bool rtpRtcpFail =
turaj@webrtc.orgb7edd062013-03-12 22:27:27 +00001124 ((_rtpRtcpModule->SetTelephoneEventForwardToDecoder(true) == -1) ||
niklase@google.com470e71d2011-07-07 08:21:25 +00001125 // RTCP is enabled by default
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001126 (_rtpRtcpModule->SetRTCPStatus(kRtcpCompound) == -1));
niklase@google.com470e71d2011-07-07 08:21:25 +00001127 if (rtpRtcpFail)
1128 {
1129 _engineStatisticsPtr->SetLastError(
1130 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1131 "Channel::Init() RTP/RTCP module not initialized");
1132 return -1;
1133 }
1134
1135 // --- Register all permanent callbacks
niklase@google.com470e71d2011-07-07 08:21:25 +00001136 const bool fail =
niklase@google.com470e71d2011-07-07 08:21:25 +00001137 (_audioCodingModule.RegisterTransportCallback(this) == -1) ||
1138 (_audioCodingModule.RegisterVADCallback(this) == -1);
1139
1140 if (fail)
1141 {
1142 _engineStatisticsPtr->SetLastError(
1143 VE_CANNOT_INIT_CHANNEL, kTraceError,
1144 "Channel::Init() callbacks not registered");
1145 return -1;
1146 }
1147
1148 // --- Register all supported codecs to the receiving side of the
1149 // RTP/RTCP module
1150
1151 CodecInst codec;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001152 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
niklase@google.com470e71d2011-07-07 08:21:25 +00001153
1154 for (int idx = 0; idx < nSupportedCodecs; idx++)
1155 {
1156 // Open up the RTP/RTCP receiver for all supported codecs
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001157 if ((_audioCodingModule.Codec(idx, &codec) == -1) ||
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001158 (_rtpRtcpModule->RegisterReceivePayload(codec) == -1))
niklase@google.com470e71d2011-07-07 08:21:25 +00001159 {
1160 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1161 VoEId(_instanceId,_channelId),
1162 "Channel::Init() unable to register %s (%d/%d/%d/%d) "
1163 "to RTP/RTCP receiver",
1164 codec.plname, codec.pltype, codec.plfreq,
1165 codec.channels, codec.rate);
1166 }
1167 else
1168 {
1169 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
1170 VoEId(_instanceId,_channelId),
1171 "Channel::Init() %s (%d/%d/%d/%d) has been added to "
1172 "the RTP/RTCP receiver",
1173 codec.plname, codec.pltype, codec.plfreq,
1174 codec.channels, codec.rate);
1175 }
1176
1177 // Ensure that PCMU is used as default codec on the sending side
tina.legrand@webrtc.org45175852012-06-01 09:27:35 +00001178 if (!STR_CASE_CMP(codec.plname, "PCMU") && (codec.channels == 1))
niklase@google.com470e71d2011-07-07 08:21:25 +00001179 {
1180 SetSendCodec(codec);
1181 }
1182
1183 // Register default PT for outband 'telephone-event'
1184 if (!STR_CASE_CMP(codec.plname, "telephone-event"))
1185 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001186 if ((_rtpRtcpModule->RegisterSendPayload(codec) == -1) ||
niklase@google.com470e71d2011-07-07 08:21:25 +00001187 (_audioCodingModule.RegisterReceiveCodec(codec) == -1))
1188 {
1189 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1190 VoEId(_instanceId,_channelId),
1191 "Channel::Init() failed to register outband "
1192 "'telephone-event' (%d/%d) correctly",
1193 codec.pltype, codec.plfreq);
1194 }
1195 }
1196
1197 if (!STR_CASE_CMP(codec.plname, "CN"))
1198 {
1199 if ((_audioCodingModule.RegisterSendCodec(codec) == -1) ||
1200 (_audioCodingModule.RegisterReceiveCodec(codec) == -1) ||
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001201 (_rtpRtcpModule->RegisterSendPayload(codec) == -1))
niklase@google.com470e71d2011-07-07 08:21:25 +00001202 {
1203 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1204 VoEId(_instanceId,_channelId),
1205 "Channel::Init() failed to register CN (%d/%d) "
1206 "correctly - 1",
1207 codec.pltype, codec.plfreq);
1208 }
1209 }
1210#ifdef WEBRTC_CODEC_RED
1211 // Register RED to the receiving side of the ACM.
1212 // We will not receive an OnInitializeDecoder() callback for RED.
1213 if (!STR_CASE_CMP(codec.plname, "RED"))
1214 {
1215 if (_audioCodingModule.RegisterReceiveCodec(codec) == -1)
1216 {
1217 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1218 VoEId(_instanceId,_channelId),
1219 "Channel::Init() failed to register RED (%d/%d) "
1220 "correctly",
1221 codec.pltype, codec.plfreq);
1222 }
1223 }
1224#endif
1225 }
pwestin@webrtc.org684f0572013-03-13 23:20:57 +00001226
niklase@google.com470e71d2011-07-07 08:21:25 +00001227 // Initialize the far end AP module
1228 // Using 8 kHz as initial Fs, the same as in transmission. Might be
1229 // changed at the first receiving audio.
1230 if (_rxAudioProcessingModulePtr == NULL)
1231 {
1232 _engineStatisticsPtr->SetLastError(
1233 VE_NO_MEMORY, kTraceCritical,
1234 "Channel::Init() failed to create the far-end AudioProcessing"
1235 " module");
1236 return -1;
1237 }
1238
niklase@google.com470e71d2011-07-07 08:21:25 +00001239 if (_rxAudioProcessingModulePtr->set_sample_rate_hz(8000))
1240 {
1241 _engineStatisticsPtr->SetLastError(
1242 VE_APM_ERROR, kTraceWarning,
1243 "Channel::Init() failed to set the sample rate to 8K for"
1244 " far-end AP module");
1245 }
1246
1247 if (_rxAudioProcessingModulePtr->set_num_channels(1, 1) != 0)
1248 {
1249 _engineStatisticsPtr->SetLastError(
1250 VE_SOUNDCARD_ERROR, kTraceWarning,
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00001251 "Init() failed to set channels for the primary audio stream");
niklase@google.com470e71d2011-07-07 08:21:25 +00001252 }
1253
1254 if (_rxAudioProcessingModulePtr->high_pass_filter()->Enable(
1255 WEBRTC_VOICE_ENGINE_RX_HP_DEFAULT_STATE) != 0)
1256 {
1257 _engineStatisticsPtr->SetLastError(
1258 VE_APM_ERROR, kTraceWarning,
1259 "Channel::Init() failed to set the high-pass filter for"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00001260 " far-end AP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00001261 }
1262
1263 if (_rxAudioProcessingModulePtr->noise_suppression()->set_level(
1264 (NoiseSuppression::Level)WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE) != 0)
1265 {
1266 _engineStatisticsPtr->SetLastError(
1267 VE_APM_ERROR, kTraceWarning,
1268 "Init() failed to set noise reduction level for far-end"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00001269 " AP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00001270 }
1271 if (_rxAudioProcessingModulePtr->noise_suppression()->Enable(
1272 WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_STATE) != 0)
1273 {
1274 _engineStatisticsPtr->SetLastError(
1275 VE_APM_ERROR, kTraceWarning,
1276 "Init() failed to set noise reduction state for far-end"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00001277 " AP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00001278 }
1279
1280 if (_rxAudioProcessingModulePtr->gain_control()->set_mode(
1281 (GainControl::Mode)WEBRTC_VOICE_ENGINE_RX_AGC_DEFAULT_MODE) != 0)
1282 {
1283 _engineStatisticsPtr->SetLastError(
1284 VE_APM_ERROR, kTraceWarning,
1285 "Init() failed to set AGC mode for far-end AP module");
1286 }
1287 if (_rxAudioProcessingModulePtr->gain_control()->Enable(
1288 WEBRTC_VOICE_ENGINE_RX_AGC_DEFAULT_STATE) != 0)
1289 {
1290 _engineStatisticsPtr->SetLastError(
1291 VE_APM_ERROR, kTraceWarning,
1292 "Init() failed to set AGC state for far-end AP module");
1293 }
1294
1295 return 0;
1296}
1297
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001298int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001299Channel::SetEngineInformation(Statistics& engineStatistics,
1300 OutputMixer& outputMixer,
1301 voe::TransmitMixer& transmitMixer,
1302 ProcessThread& moduleProcessThread,
1303 AudioDeviceModule& audioDeviceModule,
1304 VoiceEngineObserver* voiceEngineObserver,
1305 CriticalSectionWrapper* callbackCritSect)
1306{
1307 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1308 "Channel::SetEngineInformation()");
1309 _engineStatisticsPtr = &engineStatistics;
1310 _outputMixerPtr = &outputMixer;
1311 _transmitMixerPtr = &transmitMixer,
1312 _moduleProcessThreadPtr = &moduleProcessThread;
1313 _audioDeviceModulePtr = &audioDeviceModule;
1314 _voiceEngineObserverPtr = voiceEngineObserver;
1315 _callbackCritSectPtr = callbackCritSect;
1316 return 0;
1317}
1318
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001319int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001320Channel::UpdateLocalTimeStamp()
1321{
1322
andrew@webrtc.org63a50982012-05-02 23:56:37 +00001323 _timeStamp += _audioFrame.samples_per_channel_;
niklase@google.com470e71d2011-07-07 08:21:25 +00001324 return 0;
1325}
1326
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001327int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001328Channel::StartPlayout()
1329{
1330 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1331 "Channel::StartPlayout()");
1332 if (_playing)
1333 {
1334 return 0;
1335 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00001336
1337 if (!_externalMixing) {
1338 // Add participant as candidates for mixing.
1339 if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0)
1340 {
1341 _engineStatisticsPtr->SetLastError(
1342 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1343 "StartPlayout() failed to add participant to mixer");
1344 return -1;
1345 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001346 }
1347
1348 _playing = true;
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001349
1350 if (RegisterFilePlayingToMixer() != 0)
1351 return -1;
1352
niklase@google.com470e71d2011-07-07 08:21:25 +00001353 return 0;
1354}
1355
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001356int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001357Channel::StopPlayout()
1358{
1359 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1360 "Channel::StopPlayout()");
1361 if (!_playing)
1362 {
1363 return 0;
1364 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00001365
1366 if (!_externalMixing) {
1367 // Remove participant as candidates for mixing
1368 if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0)
1369 {
1370 _engineStatisticsPtr->SetLastError(
1371 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1372 "StopPlayout() failed to remove participant from mixer");
1373 return -1;
1374 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001375 }
1376
1377 _playing = false;
1378 _outputAudioLevel.Clear();
1379
1380 return 0;
1381}
1382
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001383int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001384Channel::StartSend()
1385{
1386 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1387 "Channel::StartSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001388 {
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001389 // A lock is needed because |_sending| can be accessed or modified by
1390 // another thread at the same time.
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001391 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001392
1393 if (_sending)
1394 {
1395 return 0;
1396 }
1397 _sending = true;
niklase@google.com470e71d2011-07-07 08:21:25 +00001398 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001399
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001400 if (_rtpRtcpModule->SetSendingStatus(true) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001401 {
1402 _engineStatisticsPtr->SetLastError(
1403 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1404 "StartSend() RTP/RTCP failed to start sending");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001405 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001406 _sending = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00001407 return -1;
1408 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001409
niklase@google.com470e71d2011-07-07 08:21:25 +00001410 return 0;
1411}
1412
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001413int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001414Channel::StopSend()
1415{
1416 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1417 "Channel::StopSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001418 {
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001419 // A lock is needed because |_sending| can be accessed or modified by
1420 // another thread at the same time.
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001421 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001422
1423 if (!_sending)
1424 {
1425 return 0;
1426 }
1427 _sending = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00001428 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001429
niklase@google.com470e71d2011-07-07 08:21:25 +00001430 // Reset sending SSRC and sequence number and triggers direct transmission
1431 // of RTCP BYE
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001432 if (_rtpRtcpModule->SetSendingStatus(false) == -1 ||
1433 _rtpRtcpModule->ResetSendDataCountersRTP() == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001434 {
1435 _engineStatisticsPtr->SetLastError(
1436 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1437 "StartSend() RTP/RTCP failed to stop sending");
1438 }
1439
niklase@google.com470e71d2011-07-07 08:21:25 +00001440 return 0;
1441}
1442
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001443int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001444Channel::StartReceiving()
1445{
1446 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1447 "Channel::StartReceiving()");
1448 if (_receiving)
1449 {
1450 return 0;
1451 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001452 _receiving = true;
1453 _numberOfDiscardedPackets = 0;
1454 return 0;
1455}
1456
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001457int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001458Channel::StopReceiving()
1459{
1460 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1461 "Channel::StopReceiving()");
1462 if (!_receiving)
1463 {
1464 return 0;
1465 }
pwestin@webrtc.org684f0572013-03-13 23:20:57 +00001466
henrika@webrtc.orgaf71f0e2011-12-05 07:02:22 +00001467 // Recover DTMF detection status.
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001468 int32_t ret = _rtpRtcpModule->SetTelephoneEventForwardToDecoder(true);
henrika@webrtc.orgaf71f0e2011-12-05 07:02:22 +00001469 if (ret != 0) {
1470 _engineStatisticsPtr->SetLastError(
1471 VE_INVALID_OPERATION, kTraceWarning,
1472 "StopReceiving() failed to restore telephone-event status.");
1473 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001474 RegisterReceiveCodecsToRTPModule();
1475 _receiving = false;
1476 return 0;
1477}
1478
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001479int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001480Channel::SetNetEQPlayoutMode(NetEqModes mode)
1481{
1482 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1483 "Channel::SetNetEQPlayoutMode()");
1484 AudioPlayoutMode playoutMode(voice);
1485 switch (mode)
1486 {
1487 case kNetEqDefault:
1488 playoutMode = voice;
1489 break;
1490 case kNetEqStreaming:
1491 playoutMode = streaming;
1492 break;
1493 case kNetEqFax:
1494 playoutMode = fax;
1495 break;
roosa@google.comb7186192012-12-12 21:59:14 +00001496 case kNetEqOff:
1497 playoutMode = off;
1498 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00001499 }
1500 if (_audioCodingModule.SetPlayoutMode(playoutMode) != 0)
1501 {
1502 _engineStatisticsPtr->SetLastError(
1503 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1504 "SetNetEQPlayoutMode() failed to set playout mode");
1505 return -1;
1506 }
1507 return 0;
1508}
1509
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001510int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001511Channel::GetNetEQPlayoutMode(NetEqModes& mode)
1512{
1513 const AudioPlayoutMode playoutMode = _audioCodingModule.PlayoutMode();
1514 switch (playoutMode)
1515 {
1516 case voice:
1517 mode = kNetEqDefault;
1518 break;
1519 case streaming:
1520 mode = kNetEqStreaming;
1521 break;
1522 case fax:
1523 mode = kNetEqFax;
1524 break;
roosa@google.comb7186192012-12-12 21:59:14 +00001525 case off:
1526 mode = kNetEqOff;
niklase@google.com470e71d2011-07-07 08:21:25 +00001527 }
1528 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
1529 VoEId(_instanceId,_channelId),
1530 "Channel::GetNetEQPlayoutMode() => mode=%u", mode);
1531 return 0;
1532}
1533
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001534int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001535Channel::SetOnHoldStatus(bool enable, OnHoldModes mode)
1536{
1537 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1538 "Channel::SetOnHoldStatus()");
1539 if (mode == kHoldSendAndPlay)
1540 {
1541 _outputIsOnHold = enable;
1542 _inputIsOnHold = enable;
1543 }
1544 else if (mode == kHoldPlayOnly)
1545 {
1546 _outputIsOnHold = enable;
1547 }
1548 if (mode == kHoldSendOnly)
1549 {
1550 _inputIsOnHold = enable;
1551 }
1552 return 0;
1553}
1554
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001555int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001556Channel::GetOnHoldStatus(bool& enabled, OnHoldModes& mode)
1557{
1558 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1559 "Channel::GetOnHoldStatus()");
1560 enabled = (_outputIsOnHold || _inputIsOnHold);
1561 if (_outputIsOnHold && _inputIsOnHold)
1562 {
1563 mode = kHoldSendAndPlay;
1564 }
1565 else if (_outputIsOnHold && !_inputIsOnHold)
1566 {
1567 mode = kHoldPlayOnly;
1568 }
1569 else if (!_outputIsOnHold && _inputIsOnHold)
1570 {
1571 mode = kHoldSendOnly;
1572 }
1573 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1574 "Channel::GetOnHoldStatus() => enabled=%d, mode=%d",
1575 enabled, mode);
1576 return 0;
1577}
1578
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001579int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001580Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer)
1581{
1582 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1583 "Channel::RegisterVoiceEngineObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001584 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001585
1586 if (_voiceEngineObserverPtr)
1587 {
1588 _engineStatisticsPtr->SetLastError(
1589 VE_INVALID_OPERATION, kTraceError,
1590 "RegisterVoiceEngineObserver() observer already enabled");
1591 return -1;
1592 }
1593 _voiceEngineObserverPtr = &observer;
1594 return 0;
1595}
1596
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001597int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001598Channel::DeRegisterVoiceEngineObserver()
1599{
1600 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1601 "Channel::DeRegisterVoiceEngineObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001602 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001603
1604 if (!_voiceEngineObserverPtr)
1605 {
1606 _engineStatisticsPtr->SetLastError(
1607 VE_INVALID_OPERATION, kTraceWarning,
1608 "DeRegisterVoiceEngineObserver() observer already disabled");
1609 return 0;
1610 }
1611 _voiceEngineObserverPtr = NULL;
1612 return 0;
1613}
1614
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001615int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001616Channel::GetSendCodec(CodecInst& codec)
1617{
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001618 return (_audioCodingModule.SendCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001619}
1620
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001621int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001622Channel::GetRecCodec(CodecInst& codec)
1623{
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001624 return (_audioCodingModule.ReceiveCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001625}
1626
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001627int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001628Channel::SetSendCodec(const CodecInst& codec)
1629{
1630 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1631 "Channel::SetSendCodec()");
1632
1633 if (_audioCodingModule.RegisterSendCodec(codec) != 0)
1634 {
1635 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1636 "SetSendCodec() failed to register codec to ACM");
1637 return -1;
1638 }
1639
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001640 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001641 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001642 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1643 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001644 {
1645 WEBRTC_TRACE(
1646 kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1647 "SetSendCodec() failed to register codec to"
1648 " RTP/RTCP module");
1649 return -1;
1650 }
1651 }
1652
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001653 if (_rtpRtcpModule->SetAudioPacketSize(codec.pacsize) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001654 {
1655 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1656 "SetSendCodec() failed to set audio packet size");
1657 return -1;
1658 }
1659
1660 return 0;
1661}
1662
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001663int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001664Channel::SetVADStatus(bool enableVAD, ACMVADMode mode, bool disableDTX)
1665{
1666 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1667 "Channel::SetVADStatus(mode=%d)", mode);
1668 // To disable VAD, DTX must be disabled too
1669 disableDTX = ((enableVAD == false) ? true : disableDTX);
1670 if (_audioCodingModule.SetVAD(!disableDTX, enableVAD, mode) != 0)
1671 {
1672 _engineStatisticsPtr->SetLastError(
1673 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1674 "SetVADStatus() failed to set VAD");
1675 return -1;
1676 }
1677 return 0;
1678}
1679
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001680int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001681Channel::GetVADStatus(bool& enabledVAD, ACMVADMode& mode, bool& disabledDTX)
1682{
1683 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1684 "Channel::GetVADStatus");
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001685 if (_audioCodingModule.VAD(&disabledDTX, &enabledVAD, &mode) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001686 {
1687 _engineStatisticsPtr->SetLastError(
1688 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1689 "GetVADStatus() failed to get VAD status");
1690 return -1;
1691 }
1692 disabledDTX = !disabledDTX;
1693 return 0;
1694}
1695
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001696int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001697Channel::SetRecPayloadType(const CodecInst& codec)
1698{
1699 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1700 "Channel::SetRecPayloadType()");
1701
1702 if (_playing)
1703 {
1704 _engineStatisticsPtr->SetLastError(
1705 VE_ALREADY_PLAYING, kTraceError,
1706 "SetRecPayloadType() unable to set PT while playing");
1707 return -1;
1708 }
1709 if (_receiving)
1710 {
1711 _engineStatisticsPtr->SetLastError(
1712 VE_ALREADY_LISTENING, kTraceError,
1713 "SetRecPayloadType() unable to set PT while listening");
1714 return -1;
1715 }
1716
1717 if (codec.pltype == -1)
1718 {
1719 // De-register the selected codec (RTP/RTCP module and ACM)
1720
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001721 int8_t pltype(-1);
niklase@google.com470e71d2011-07-07 08:21:25 +00001722 CodecInst rxCodec = codec;
1723
1724 // Get payload type for the given codec
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001725 _rtpRtcpModule->ReceivePayloadType(rxCodec, &pltype);
niklase@google.com470e71d2011-07-07 08:21:25 +00001726 rxCodec.pltype = pltype;
1727
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001728 if (_rtpRtcpModule->DeRegisterReceivePayload(pltype) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001729 {
1730 _engineStatisticsPtr->SetLastError(
1731 VE_RTP_RTCP_MODULE_ERROR,
1732 kTraceError,
1733 "SetRecPayloadType() RTP/RTCP-module deregistration "
1734 "failed");
1735 return -1;
1736 }
1737 if (_audioCodingModule.UnregisterReceiveCodec(rxCodec.pltype) != 0)
1738 {
1739 _engineStatisticsPtr->SetLastError(
1740 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1741 "SetRecPayloadType() ACM deregistration failed - 1");
1742 return -1;
1743 }
1744 return 0;
1745 }
1746
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001747 if (_rtpRtcpModule->RegisterReceivePayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001748 {
1749 // First attempt to register failed => de-register and try again
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001750 _rtpRtcpModule->DeRegisterReceivePayload(codec.pltype);
1751 if (_rtpRtcpModule->RegisterReceivePayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001752 {
1753 _engineStatisticsPtr->SetLastError(
1754 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1755 "SetRecPayloadType() RTP/RTCP-module registration failed");
1756 return -1;
1757 }
1758 }
1759 if (_audioCodingModule.RegisterReceiveCodec(codec) != 0)
1760 {
1761 _audioCodingModule.UnregisterReceiveCodec(codec.pltype);
1762 if (_audioCodingModule.RegisterReceiveCodec(codec) != 0)
1763 {
1764 _engineStatisticsPtr->SetLastError(
1765 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1766 "SetRecPayloadType() ACM registration failed - 1");
1767 return -1;
1768 }
1769 }
1770 return 0;
1771}
1772
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001773int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001774Channel::GetRecPayloadType(CodecInst& codec)
1775{
1776 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1777 "Channel::GetRecPayloadType()");
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001778 int8_t payloadType(-1);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001779 if (_rtpRtcpModule->ReceivePayloadType(codec, &payloadType) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001780 {
1781 _engineStatisticsPtr->SetLastError(
henrika@webrtc.org37198002012-06-18 11:00:12 +00001782 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
niklase@google.com470e71d2011-07-07 08:21:25 +00001783 "GetRecPayloadType() failed to retrieve RX payload type");
1784 return -1;
1785 }
1786 codec.pltype = payloadType;
1787 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1788 "Channel::GetRecPayloadType() => pltype=%u", codec.pltype);
1789 return 0;
1790}
1791
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001792int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001793Channel::SetAMREncFormat(AmrMode mode)
1794{
1795 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1796 "Channel::SetAMREncFormat()");
1797
1798 // ACM doesn't support AMR
1799 return -1;
1800}
1801
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001802int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001803Channel::SetAMRDecFormat(AmrMode mode)
1804{
1805 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1806 "Channel::SetAMRDecFormat()");
1807
1808 // ACM doesn't support AMR
1809 return -1;
1810}
1811
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001812int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001813Channel::SetAMRWbEncFormat(AmrMode mode)
1814{
1815 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1816 "Channel::SetAMRWbEncFormat()");
1817
1818 // ACM doesn't support AMR
1819 return -1;
1820
1821}
1822
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001823int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001824Channel::SetAMRWbDecFormat(AmrMode mode)
1825{
1826 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1827 "Channel::SetAMRWbDecFormat()");
1828
1829 // ACM doesn't support AMR
1830 return -1;
1831}
1832
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001833int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001834Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency)
1835{
1836 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1837 "Channel::SetSendCNPayloadType()");
1838
1839 CodecInst codec;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001840 int32_t samplingFreqHz(-1);
tina.legrand@webrtc.org45175852012-06-01 09:27:35 +00001841 const int kMono = 1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001842 if (frequency == kFreq32000Hz)
1843 samplingFreqHz = 32000;
1844 else if (frequency == kFreq16000Hz)
1845 samplingFreqHz = 16000;
1846
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001847 if (_audioCodingModule.Codec("CN", &codec, samplingFreqHz, kMono) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001848 {
1849 _engineStatisticsPtr->SetLastError(
1850 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1851 "SetSendCNPayloadType() failed to retrieve default CN codec "
1852 "settings");
1853 return -1;
1854 }
1855
1856 // Modify the payload type (must be set to dynamic range)
1857 codec.pltype = type;
1858
1859 if (_audioCodingModule.RegisterSendCodec(codec) != 0)
1860 {
1861 _engineStatisticsPtr->SetLastError(
1862 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1863 "SetSendCNPayloadType() failed to register CN to ACM");
1864 return -1;
1865 }
1866
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001867 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001868 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001869 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1870 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001871 {
1872 _engineStatisticsPtr->SetLastError(
1873 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1874 "SetSendCNPayloadType() failed to register CN to RTP/RTCP "
1875 "module");
1876 return -1;
1877 }
1878 }
1879 return 0;
1880}
1881
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001882int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001883Channel::SetISACInitTargetRate(int rateBps, bool useFixedFrameSize)
1884{
1885 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1886 "Channel::SetISACInitTargetRate()");
1887
1888 CodecInst sendCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001889 if (_audioCodingModule.SendCodec(&sendCodec) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001890 {
1891 _engineStatisticsPtr->SetLastError(
1892 VE_CODEC_ERROR, kTraceError,
1893 "SetISACInitTargetRate() failed to retrieve send codec");
1894 return -1;
1895 }
1896 if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
1897 {
1898 // This API is only valid if iSAC is setup to run in channel-adaptive
1899 // mode.
1900 // We do not validate the adaptive mode here. It is done later in the
1901 // ConfigISACBandwidthEstimator() API.
1902 _engineStatisticsPtr->SetLastError(
1903 VE_CODEC_ERROR, kTraceError,
1904 "SetISACInitTargetRate() send codec is not iSAC");
1905 return -1;
1906 }
1907
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001908 uint8_t initFrameSizeMsec(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00001909 if (16000 == sendCodec.plfreq)
1910 {
1911 // Note that 0 is a valid and corresponds to "use default
1912 if ((rateBps != 0 &&
1913 rateBps < kVoiceEngineMinIsacInitTargetRateBpsWb) ||
1914 (rateBps > kVoiceEngineMaxIsacInitTargetRateBpsWb))
1915 {
1916 _engineStatisticsPtr->SetLastError(
1917 VE_INVALID_ARGUMENT, kTraceError,
1918 "SetISACInitTargetRate() invalid target rate - 1");
1919 return -1;
1920 }
1921 // 30 or 60ms
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001922 initFrameSizeMsec = (uint8_t)(sendCodec.pacsize / 16);
niklase@google.com470e71d2011-07-07 08:21:25 +00001923 }
1924 else if (32000 == sendCodec.plfreq)
1925 {
1926 if ((rateBps != 0 &&
1927 rateBps < kVoiceEngineMinIsacInitTargetRateBpsSwb) ||
1928 (rateBps > kVoiceEngineMaxIsacInitTargetRateBpsSwb))
1929 {
1930 _engineStatisticsPtr->SetLastError(
1931 VE_INVALID_ARGUMENT, kTraceError,
1932 "SetISACInitTargetRate() invalid target rate - 2");
1933 return -1;
1934 }
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001935 initFrameSizeMsec = (uint8_t)(sendCodec.pacsize / 32); // 30ms
niklase@google.com470e71d2011-07-07 08:21:25 +00001936 }
1937
1938 if (_audioCodingModule.ConfigISACBandwidthEstimator(
1939 initFrameSizeMsec, rateBps, useFixedFrameSize) == -1)
1940 {
1941 _engineStatisticsPtr->SetLastError(
1942 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1943 "SetISACInitTargetRate() iSAC BWE config failed");
1944 return -1;
1945 }
1946
1947 return 0;
1948}
1949
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001950int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001951Channel::SetISACMaxRate(int rateBps)
1952{
1953 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1954 "Channel::SetISACMaxRate()");
1955
1956 CodecInst sendCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001957 if (_audioCodingModule.SendCodec(&sendCodec) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001958 {
1959 _engineStatisticsPtr->SetLastError(
1960 VE_CODEC_ERROR, kTraceError,
1961 "SetISACMaxRate() failed to retrieve send codec");
1962 return -1;
1963 }
1964 if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
1965 {
1966 // This API is only valid if iSAC is selected as sending codec.
1967 _engineStatisticsPtr->SetLastError(
1968 VE_CODEC_ERROR, kTraceError,
1969 "SetISACMaxRate() send codec is not iSAC");
1970 return -1;
1971 }
1972 if (16000 == sendCodec.plfreq)
1973 {
1974 if ((rateBps < kVoiceEngineMinIsacMaxRateBpsWb) ||
1975 (rateBps > kVoiceEngineMaxIsacMaxRateBpsWb))
1976 {
1977 _engineStatisticsPtr->SetLastError(
1978 VE_INVALID_ARGUMENT, kTraceError,
1979 "SetISACMaxRate() invalid max rate - 1");
1980 return -1;
1981 }
1982 }
1983 else if (32000 == sendCodec.plfreq)
1984 {
1985 if ((rateBps < kVoiceEngineMinIsacMaxRateBpsSwb) ||
1986 (rateBps > kVoiceEngineMaxIsacMaxRateBpsSwb))
1987 {
1988 _engineStatisticsPtr->SetLastError(
1989 VE_INVALID_ARGUMENT, kTraceError,
1990 "SetISACMaxRate() invalid max rate - 2");
1991 return -1;
1992 }
1993 }
1994 if (_sending)
1995 {
1996 _engineStatisticsPtr->SetLastError(
1997 VE_SENDING, kTraceError,
1998 "SetISACMaxRate() unable to set max rate while sending");
1999 return -1;
2000 }
2001
2002 // Set the maximum instantaneous rate of iSAC (works for both adaptive
2003 // and non-adaptive mode)
2004 if (_audioCodingModule.SetISACMaxRate(rateBps) == -1)
2005 {
2006 _engineStatisticsPtr->SetLastError(
2007 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2008 "SetISACMaxRate() failed to set max rate");
2009 return -1;
2010 }
2011
2012 return 0;
2013}
2014
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002015int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00002016Channel::SetISACMaxPayloadSize(int sizeBytes)
2017{
2018 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2019 "Channel::SetISACMaxPayloadSize()");
2020 CodecInst sendCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00002021 if (_audioCodingModule.SendCodec(&sendCodec) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00002022 {
2023 _engineStatisticsPtr->SetLastError(
2024 VE_CODEC_ERROR, kTraceError,
2025 "SetISACMaxPayloadSize() failed to retrieve send codec");
2026 return -1;
2027 }
2028 if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
2029 {
2030 _engineStatisticsPtr->SetLastError(
2031 VE_CODEC_ERROR, kTraceError,
2032 "SetISACMaxPayloadSize() send codec is not iSAC");
2033 return -1;
2034 }
2035 if (16000 == sendCodec.plfreq)
2036 {
2037 if ((sizeBytes < kVoiceEngineMinIsacMaxPayloadSizeBytesWb) ||
2038 (sizeBytes > kVoiceEngineMaxIsacMaxPayloadSizeBytesWb))
2039 {
2040 _engineStatisticsPtr->SetLastError(
2041 VE_INVALID_ARGUMENT, kTraceError,
2042 "SetISACMaxPayloadSize() invalid max payload - 1");
2043 return -1;
2044 }
2045 }
2046 else if (32000 == sendCodec.plfreq)
2047 {
2048 if ((sizeBytes < kVoiceEngineMinIsacMaxPayloadSizeBytesSwb) ||
2049 (sizeBytes > kVoiceEngineMaxIsacMaxPayloadSizeBytesSwb))
2050 {
2051 _engineStatisticsPtr->SetLastError(
2052 VE_INVALID_ARGUMENT, kTraceError,
2053 "SetISACMaxPayloadSize() invalid max payload - 2");
2054 return -1;
2055 }
2056 }
2057 if (_sending)
2058 {
2059 _engineStatisticsPtr->SetLastError(
2060 VE_SENDING, kTraceError,
2061 "SetISACMaxPayloadSize() unable to set max rate while sending");
2062 return -1;
2063 }
2064
2065 if (_audioCodingModule.SetISACMaxPayloadSize(sizeBytes) == -1)
2066 {
2067 _engineStatisticsPtr->SetLastError(
2068 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2069 "SetISACMaxPayloadSize() failed to set max payload size");
2070 return -1;
2071 }
2072 return 0;
2073}
2074
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002075int32_t Channel::RegisterExternalTransport(Transport& transport)
niklase@google.com470e71d2011-07-07 08:21:25 +00002076{
2077 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2078 "Channel::RegisterExternalTransport()");
2079
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002080 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002081
niklase@google.com470e71d2011-07-07 08:21:25 +00002082 if (_externalTransport)
2083 {
2084 _engineStatisticsPtr->SetLastError(VE_INVALID_OPERATION,
2085 kTraceError,
2086 "RegisterExternalTransport() external transport already enabled");
2087 return -1;
2088 }
2089 _externalTransport = true;
2090 _transportPtr = &transport;
2091 return 0;
2092}
2093
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002094int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00002095Channel::DeRegisterExternalTransport()
2096{
2097 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2098 "Channel::DeRegisterExternalTransport()");
2099
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002100 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.org83661f52011-11-25 10:58:15 +00002101
niklase@google.com470e71d2011-07-07 08:21:25 +00002102 if (!_transportPtr)
2103 {
2104 _engineStatisticsPtr->SetLastError(
2105 VE_INVALID_OPERATION, kTraceWarning,
2106 "DeRegisterExternalTransport() external transport already "
2107 "disabled");
2108 return 0;
2109 }
2110 _externalTransport = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00002111 _transportPtr = NULL;
2112 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2113 "DeRegisterExternalTransport() all transport is disabled");
niklase@google.com470e71d2011-07-07 08:21:25 +00002114 return 0;
2115}
2116
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002117int32_t Channel::ReceivedRTPPacket(const int8_t* data, int32_t length) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002118 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
2119 "Channel::ReceivedRTPPacket()");
2120
2121 // Store playout timestamp for the received RTP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00002122 UpdatePlayoutTimestamp(false);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002123
2124 // Dump the RTP packet to a file (if RTP dump is enabled).
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002125 if (_rtpDumpIn.DumpPacket((const uint8_t*)data,
2126 (uint16_t)length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002127 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
2128 VoEId(_instanceId,_channelId),
2129 "Channel::SendPacket() RTP dump to input file failed");
2130 }
2131
2132 // Deliver RTP packet to RTP/RTCP module for parsing
2133 // The packet will be pushed back to the channel thru the
2134 // OnReceivedPayloadData callback so we don't push it to the ACM here
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002135 if (_rtpRtcpModule->IncomingPacket((const uint8_t*)data,
2136 (uint16_t)length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002137 _engineStatisticsPtr->SetLastError(
2138 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
2139 "Channel::IncomingRTPPacket() RTP packet is invalid");
2140 }
2141 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002142}
2143
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002144int32_t Channel::ReceivedRTCPPacket(const int8_t* data, int32_t length) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002145 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
2146 "Channel::ReceivedRTCPPacket()");
2147 // Store playout timestamp for the received RTCP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00002148 UpdatePlayoutTimestamp(true);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002149
2150 // Dump the RTCP packet to a file (if RTP dump is enabled).
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002151 if (_rtpDumpIn.DumpPacket((const uint8_t*)data,
2152 (uint16_t)length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002153 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
2154 VoEId(_instanceId,_channelId),
2155 "Channel::SendPacket() RTCP dump to input file failed");
2156 }
2157
2158 // Deliver RTCP packet to RTP/RTCP module for parsing
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002159 if (_rtpRtcpModule->IncomingPacket((const uint8_t*)data,
2160 (uint16_t)length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002161 _engineStatisticsPtr->SetLastError(
2162 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
2163 "Channel::IncomingRTPPacket() RTCP packet is invalid");
2164 }
2165 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002166}
2167
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002168int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00002169Channel::SetPacketTimeoutNotification(bool enable, int timeoutSeconds)
2170{
2171 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2172 "Channel::SetPacketTimeoutNotification()");
2173 if (enable)
2174 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002175 const uint32_t RTPtimeoutMS = 1000*timeoutSeconds;
2176 const uint32_t RTCPtimeoutMS = 0;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00002177 _rtpRtcpModule->SetPacketTimeout(RTPtimeoutMS, RTCPtimeoutMS);
niklase@google.com470e71d2011-07-07 08:21:25 +00002178 _rtpPacketTimeOutIsEnabled = true;
2179 _rtpTimeOutSeconds = timeoutSeconds;
2180 }
2181 else
2182 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00002183 _rtpRtcpModule->SetPacketTimeout(0, 0);
niklase@google.com470e71d2011-07-07 08:21:25 +00002184 _rtpPacketTimeOutIsEnabled = false;
2185 _rtpTimeOutSeconds = 0;
2186 }
2187 return 0;
2188}
2189
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002190int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00002191Channel::GetPacketTimeoutNotification(bool& enabled, int& timeoutSeconds)
2192{
2193 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2194 "Channel::GetPacketTimeoutNotification()");
2195 enabled = _rtpPacketTimeOutIsEnabled;
2196 if (enabled)
2197 {
2198 timeoutSeconds = _rtpTimeOutSeconds;
2199 }
2200 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
2201 "GetPacketTimeoutNotification() => enabled=%d,"
2202 " timeoutSeconds=%d",
2203 enabled, timeoutSeconds);
2204 return 0;
2205}
2206
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002207int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00002208Channel::RegisterDeadOrAliveObserver(VoEConnectionObserver& observer)
2209{
2210 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2211 "Channel::RegisterDeadOrAliveObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002212 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002213
2214 if (_connectionObserverPtr)
2215 {
2216 _engineStatisticsPtr->SetLastError(VE_INVALID_OPERATION, kTraceError,
2217 "RegisterDeadOrAliveObserver() observer already enabled");
2218 return -1;
2219 }
2220
2221 _connectionObserverPtr = &observer;
2222 _connectionObserver = true;
2223
2224 return 0;
2225}
2226
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002227int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00002228Channel::DeRegisterDeadOrAliveObserver()
2229{
2230 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2231 "Channel::DeRegisterDeadOrAliveObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002232 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002233
2234 if (!_connectionObserverPtr)
2235 {
2236 _engineStatisticsPtr->SetLastError(
2237 VE_INVALID_OPERATION, kTraceWarning,
2238 "DeRegisterDeadOrAliveObserver() observer already disabled");
2239 return 0;
2240 }
2241
2242 _connectionObserver = false;
2243 _connectionObserverPtr = NULL;
2244
2245 return 0;
2246}
2247
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002248int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00002249Channel::SetPeriodicDeadOrAliveStatus(bool enable, int sampleTimeSeconds)
2250{
2251 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2252 "Channel::SetPeriodicDeadOrAliveStatus()");
2253 if (!_connectionObserverPtr)
2254 {
2255 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
2256 "SetPeriodicDeadOrAliveStatus() connection observer has"
2257 " not been registered");
2258 }
2259 if (enable)
2260 {
2261 ResetDeadOrAliveCounters();
2262 }
2263 bool enabled(false);
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002264 uint8_t currentSampleTimeSec(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00002265 // Store last state (will be used later if dead-or-alive is disabled).
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00002266 _rtpRtcpModule->PeriodicDeadOrAliveStatus(enabled, currentSampleTimeSec);
niklase@google.com470e71d2011-07-07 08:21:25 +00002267 // Update the dead-or-alive state.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00002268 if (_rtpRtcpModule->SetPeriodicDeadOrAliveStatus(
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002269 enable, (uint8_t)sampleTimeSeconds) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00002270 {
2271 _engineStatisticsPtr->SetLastError(
2272 VE_RTP_RTCP_MODULE_ERROR,
2273 kTraceError,
2274 "SetPeriodicDeadOrAliveStatus() failed to set dead-or-alive "
2275 "status");
2276 return -1;
2277 }
2278 if (!enable)
2279 {
2280 // Restore last utilized sample time.
2281 // Without this, the sample time would always be reset to default
2282 // (2 sec), each time dead-or-alived was disabled without sample-time
2283 // parameter.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00002284 _rtpRtcpModule->SetPeriodicDeadOrAliveStatus(enable,
niklase@google.com470e71d2011-07-07 08:21:25 +00002285 currentSampleTimeSec);
2286 }
2287 return 0;
2288}
2289
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002290int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00002291Channel::GetPeriodicDeadOrAliveStatus(bool& enabled, int& sampleTimeSeconds)
2292{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00002293 _rtpRtcpModule->PeriodicDeadOrAliveStatus(
niklase@google.com470e71d2011-07-07 08:21:25 +00002294 enabled,
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002295 (uint8_t&)sampleTimeSeconds);
niklase@google.com470e71d2011-07-07 08:21:25 +00002296 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
2297 "GetPeriodicDeadOrAliveStatus() => enabled=%d,"
2298 " sampleTimeSeconds=%d",
2299 enabled, sampleTimeSeconds);
2300 return 0;
2301}
2302
niklase@google.com470e71d2011-07-07 08:21:25 +00002303int Channel::StartPlayingFileLocally(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00002304 bool loop,
2305 FileFormats format,
2306 int startPosition,
2307 float volumeScaling,
2308 int stopPosition,
niklase@google.com470e71d2011-07-07 08:21:25 +00002309 const CodecInst* codecInst)
2310{
2311 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2312 "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d,"
2313 " format=%d, volumeScaling=%5.3f, startPosition=%d, "
2314 "stopPosition=%d)", fileName, loop, format, volumeScaling,
2315 startPosition, stopPosition);
2316
2317 if (_outputFilePlaying)
2318 {
2319 _engineStatisticsPtr->SetLastError(
2320 VE_ALREADY_PLAYING, kTraceError,
2321 "StartPlayingFileLocally() is already playing");
2322 return -1;
2323 }
2324
niklase@google.com470e71d2011-07-07 08:21:25 +00002325 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002326 CriticalSectionScoped cs(&_fileCritSect);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002327
2328 if (_outputFilePlayerPtr)
2329 {
2330 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2331 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2332 _outputFilePlayerPtr = NULL;
2333 }
2334
2335 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2336 _outputFilePlayerId, (const FileFormats)format);
2337
2338 if (_outputFilePlayerPtr == NULL)
2339 {
2340 _engineStatisticsPtr->SetLastError(
2341 VE_INVALID_ARGUMENT, kTraceError,
henrike@webrtc.org31d30702011-11-18 19:59:32 +00002342 "StartPlayingFileLocally() filePlayer format is not correct");
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002343 return -1;
2344 }
2345
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002346 const uint32_t notificationTime(0);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002347
2348 if (_outputFilePlayerPtr->StartPlayingFile(
2349 fileName,
2350 loop,
2351 startPosition,
2352 volumeScaling,
2353 notificationTime,
2354 stopPosition,
2355 (const CodecInst*)codecInst) != 0)
2356 {
2357 _engineStatisticsPtr->SetLastError(
2358 VE_BAD_FILE, kTraceError,
2359 "StartPlayingFile() failed to start file playout");
2360 _outputFilePlayerPtr->StopPlayingFile();
2361 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2362 _outputFilePlayerPtr = NULL;
2363 return -1;
2364 }
2365 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
2366 _outputFilePlaying = true;
niklase@google.com470e71d2011-07-07 08:21:25 +00002367 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00002368
2369 if (RegisterFilePlayingToMixer() != 0)
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002370 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002371
2372 return 0;
2373}
2374
2375int Channel::StartPlayingFileLocally(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00002376 FileFormats format,
2377 int startPosition,
2378 float volumeScaling,
2379 int stopPosition,
niklase@google.com470e71d2011-07-07 08:21:25 +00002380 const CodecInst* codecInst)
2381{
2382 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2383 "Channel::StartPlayingFileLocally(format=%d,"
2384 " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
2385 format, volumeScaling, startPosition, stopPosition);
2386
2387 if(stream == NULL)
2388 {
2389 _engineStatisticsPtr->SetLastError(
2390 VE_BAD_FILE, kTraceError,
2391 "StartPlayingFileLocally() NULL as input stream");
2392 return -1;
2393 }
2394
2395
2396 if (_outputFilePlaying)
2397 {
2398 _engineStatisticsPtr->SetLastError(
2399 VE_ALREADY_PLAYING, kTraceError,
2400 "StartPlayingFileLocally() is already playing");
2401 return -1;
2402 }
2403
niklase@google.com470e71d2011-07-07 08:21:25 +00002404 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002405 CriticalSectionScoped cs(&_fileCritSect);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002406
2407 // Destroy the old instance
2408 if (_outputFilePlayerPtr)
2409 {
2410 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2411 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2412 _outputFilePlayerPtr = NULL;
2413 }
2414
2415 // Create the instance
2416 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2417 _outputFilePlayerId,
2418 (const FileFormats)format);
2419
2420 if (_outputFilePlayerPtr == NULL)
2421 {
2422 _engineStatisticsPtr->SetLastError(
2423 VE_INVALID_ARGUMENT, kTraceError,
2424 "StartPlayingFileLocally() filePlayer format isnot correct");
2425 return -1;
2426 }
2427
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002428 const uint32_t notificationTime(0);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002429
2430 if (_outputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
2431 volumeScaling,
2432 notificationTime,
2433 stopPosition, codecInst) != 0)
2434 {
2435 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2436 "StartPlayingFile() failed to "
2437 "start file playout");
2438 _outputFilePlayerPtr->StopPlayingFile();
2439 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2440 _outputFilePlayerPtr = NULL;
2441 return -1;
2442 }
2443 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
2444 _outputFilePlaying = true;
niklase@google.com470e71d2011-07-07 08:21:25 +00002445 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00002446
2447 if (RegisterFilePlayingToMixer() != 0)
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002448 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002449
niklase@google.com470e71d2011-07-07 08:21:25 +00002450 return 0;
2451}
2452
2453int Channel::StopPlayingFileLocally()
2454{
2455 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2456 "Channel::StopPlayingFileLocally()");
2457
2458 if (!_outputFilePlaying)
2459 {
2460 _engineStatisticsPtr->SetLastError(
2461 VE_INVALID_OPERATION, kTraceWarning,
2462 "StopPlayingFileLocally() isnot playing");
2463 return 0;
2464 }
2465
niklase@google.com470e71d2011-07-07 08:21:25 +00002466 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002467 CriticalSectionScoped cs(&_fileCritSect);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002468
2469 if (_outputFilePlayerPtr->StopPlayingFile() != 0)
2470 {
2471 _engineStatisticsPtr->SetLastError(
2472 VE_STOP_RECORDING_FAILED, kTraceError,
2473 "StopPlayingFile() could not stop playing");
2474 return -1;
2475 }
2476 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2477 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2478 _outputFilePlayerPtr = NULL;
2479 _outputFilePlaying = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00002480 }
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002481 // _fileCritSect cannot be taken while calling
2482 // SetAnonymousMixibilityStatus. Refer to comments in
2483 // StartPlayingFileLocally(const char* ...) for more details.
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002484 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0)
2485 {
2486 _engineStatisticsPtr->SetLastError(
2487 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002488 "StopPlayingFile() failed to stop participant from playing as"
2489 "file in the mixer");
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002490 return -1;
2491 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002492
2493 return 0;
2494}
2495
2496int Channel::IsPlayingFileLocally() const
2497{
2498 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2499 "Channel::IsPlayingFileLocally()");
2500
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002501 return (int32_t)_outputFilePlaying;
niklase@google.com470e71d2011-07-07 08:21:25 +00002502}
2503
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00002504int Channel::RegisterFilePlayingToMixer()
2505{
2506 // Return success for not registering for file playing to mixer if:
2507 // 1. playing file before playout is started on that channel.
2508 // 2. starting playout without file playing on that channel.
2509 if (!_playing || !_outputFilePlaying)
2510 {
2511 return 0;
2512 }
2513
2514 // |_fileCritSect| cannot be taken while calling
2515 // SetAnonymousMixabilityStatus() since as soon as the participant is added
2516 // frames can be pulled by the mixer. Since the frames are generated from
2517 // the file, _fileCritSect will be taken. This would result in a deadlock.
2518 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0)
2519 {
2520 CriticalSectionScoped cs(&_fileCritSect);
2521 _outputFilePlaying = false;
2522 _engineStatisticsPtr->SetLastError(
2523 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
2524 "StartPlayingFile() failed to add participant as file to mixer");
2525 _outputFilePlayerPtr->StopPlayingFile();
2526 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2527 _outputFilePlayerPtr = NULL;
2528 return -1;
2529 }
2530
2531 return 0;
2532}
2533
pbos@webrtc.org92135212013-05-14 08:31:39 +00002534int Channel::ScaleLocalFilePlayout(float scale)
niklase@google.com470e71d2011-07-07 08:21:25 +00002535{
2536 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2537 "Channel::ScaleLocalFilePlayout(scale=%5.3f)", scale);
2538
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002539 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002540
2541 if (!_outputFilePlaying)
2542 {
2543 _engineStatisticsPtr->SetLastError(
2544 VE_INVALID_OPERATION, kTraceError,
2545 "ScaleLocalFilePlayout() isnot playing");
2546 return -1;
2547 }
2548 if ((_outputFilePlayerPtr == NULL) ||
2549 (_outputFilePlayerPtr->SetAudioScaling(scale) != 0))
2550 {
2551 _engineStatisticsPtr->SetLastError(
2552 VE_BAD_ARGUMENT, kTraceError,
2553 "SetAudioScaling() failed to scale the playout");
2554 return -1;
2555 }
2556
2557 return 0;
2558}
2559
2560int Channel::GetLocalPlayoutPosition(int& positionMs)
2561{
2562 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2563 "Channel::GetLocalPlayoutPosition(position=?)");
2564
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002565 uint32_t position;
niklase@google.com470e71d2011-07-07 08:21:25 +00002566
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002567 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002568
2569 if (_outputFilePlayerPtr == NULL)
2570 {
2571 _engineStatisticsPtr->SetLastError(
2572 VE_INVALID_OPERATION, kTraceError,
2573 "GetLocalPlayoutPosition() filePlayer instance doesnot exist");
2574 return -1;
2575 }
2576
2577 if (_outputFilePlayerPtr->GetPlayoutPosition(position) != 0)
2578 {
2579 _engineStatisticsPtr->SetLastError(
2580 VE_BAD_FILE, kTraceError,
2581 "GetLocalPlayoutPosition() failed");
2582 return -1;
2583 }
2584 positionMs = position;
2585
2586 return 0;
2587}
2588
2589int Channel::StartPlayingFileAsMicrophone(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00002590 bool loop,
2591 FileFormats format,
2592 int startPosition,
2593 float volumeScaling,
2594 int stopPosition,
niklase@google.com470e71d2011-07-07 08:21:25 +00002595 const CodecInst* codecInst)
2596{
2597 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2598 "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, "
2599 "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, "
2600 "stopPosition=%d)", fileName, loop, format, volumeScaling,
2601 startPosition, stopPosition);
2602
2603 if (_inputFilePlaying)
2604 {
2605 _engineStatisticsPtr->SetLastError(
2606 VE_ALREADY_PLAYING, kTraceWarning,
2607 "StartPlayingFileAsMicrophone() filePlayer is playing");
2608 return 0;
2609 }
2610
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002611 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002612
2613 // Destroy the old instance
2614 if (_inputFilePlayerPtr)
2615 {
2616 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2617 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2618 _inputFilePlayerPtr = NULL;
2619 }
2620
2621 // Create the instance
2622 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2623 _inputFilePlayerId, (const FileFormats)format);
2624
2625 if (_inputFilePlayerPtr == NULL)
2626 {
2627 _engineStatisticsPtr->SetLastError(
2628 VE_INVALID_ARGUMENT, kTraceError,
2629 "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
2630 return -1;
2631 }
2632
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002633 const uint32_t notificationTime(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00002634
2635 if (_inputFilePlayerPtr->StartPlayingFile(
2636 fileName,
2637 loop,
2638 startPosition,
2639 volumeScaling,
2640 notificationTime,
2641 stopPosition,
2642 (const CodecInst*)codecInst) != 0)
2643 {
2644 _engineStatisticsPtr->SetLastError(
2645 VE_BAD_FILE, kTraceError,
2646 "StartPlayingFile() failed to start file playout");
2647 _inputFilePlayerPtr->StopPlayingFile();
2648 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2649 _inputFilePlayerPtr = NULL;
2650 return -1;
2651 }
2652 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2653 _inputFilePlaying = true;
2654
2655 return 0;
2656}
2657
2658int Channel::StartPlayingFileAsMicrophone(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00002659 FileFormats format,
2660 int startPosition,
2661 float volumeScaling,
2662 int stopPosition,
niklase@google.com470e71d2011-07-07 08:21:25 +00002663 const CodecInst* codecInst)
2664{
2665 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2666 "Channel::StartPlayingFileAsMicrophone(format=%d, "
2667 "volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
2668 format, volumeScaling, startPosition, stopPosition);
2669
2670 if(stream == NULL)
2671 {
2672 _engineStatisticsPtr->SetLastError(
2673 VE_BAD_FILE, kTraceError,
2674 "StartPlayingFileAsMicrophone NULL as input stream");
2675 return -1;
2676 }
2677
2678 if (_inputFilePlaying)
2679 {
2680 _engineStatisticsPtr->SetLastError(
2681 VE_ALREADY_PLAYING, kTraceWarning,
2682 "StartPlayingFileAsMicrophone() is playing");
2683 return 0;
2684 }
2685
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002686 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002687
2688 // Destroy the old instance
2689 if (_inputFilePlayerPtr)
2690 {
2691 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2692 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2693 _inputFilePlayerPtr = NULL;
2694 }
2695
2696 // Create the instance
2697 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2698 _inputFilePlayerId, (const FileFormats)format);
2699
2700 if (_inputFilePlayerPtr == NULL)
2701 {
2702 _engineStatisticsPtr->SetLastError(
2703 VE_INVALID_ARGUMENT, kTraceError,
2704 "StartPlayingInputFile() filePlayer format isnot correct");
2705 return -1;
2706 }
2707
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002708 const uint32_t notificationTime(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00002709
2710 if (_inputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
2711 volumeScaling, notificationTime,
2712 stopPosition, codecInst) != 0)
2713 {
2714 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2715 "StartPlayingFile() failed to start "
2716 "file playout");
2717 _inputFilePlayerPtr->StopPlayingFile();
2718 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2719 _inputFilePlayerPtr = NULL;
2720 return -1;
2721 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00002722
niklase@google.com470e71d2011-07-07 08:21:25 +00002723 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2724 _inputFilePlaying = true;
2725
2726 return 0;
2727}
2728
2729int Channel::StopPlayingFileAsMicrophone()
2730{
2731 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2732 "Channel::StopPlayingFileAsMicrophone()");
2733
2734 if (!_inputFilePlaying)
2735 {
2736 _engineStatisticsPtr->SetLastError(
2737 VE_INVALID_OPERATION, kTraceWarning,
2738 "StopPlayingFileAsMicrophone() isnot playing");
2739 return 0;
2740 }
2741
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002742 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002743 if (_inputFilePlayerPtr->StopPlayingFile() != 0)
2744 {
2745 _engineStatisticsPtr->SetLastError(
2746 VE_STOP_RECORDING_FAILED, kTraceError,
2747 "StopPlayingFile() could not stop playing");
2748 return -1;
2749 }
2750 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2751 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2752 _inputFilePlayerPtr = NULL;
2753 _inputFilePlaying = false;
2754
2755 return 0;
2756}
2757
2758int Channel::IsPlayingFileAsMicrophone() const
2759{
2760 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2761 "Channel::IsPlayingFileAsMicrophone()");
2762
2763 return _inputFilePlaying;
2764}
2765
pbos@webrtc.org92135212013-05-14 08:31:39 +00002766int Channel::ScaleFileAsMicrophonePlayout(float scale)
niklase@google.com470e71d2011-07-07 08:21:25 +00002767{
2768 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2769 "Channel::ScaleFileAsMicrophonePlayout(scale=%5.3f)", scale);
2770
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002771 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002772
2773 if (!_inputFilePlaying)
2774 {
2775 _engineStatisticsPtr->SetLastError(
2776 VE_INVALID_OPERATION, kTraceError,
2777 "ScaleFileAsMicrophonePlayout() isnot playing");
2778 return -1;
2779 }
2780
2781 if ((_inputFilePlayerPtr == NULL) ||
2782 (_inputFilePlayerPtr->SetAudioScaling(scale) != 0))
2783 {
2784 _engineStatisticsPtr->SetLastError(
2785 VE_BAD_ARGUMENT, kTraceError,
2786 "SetAudioScaling() failed to scale playout");
2787 return -1;
2788 }
2789
2790 return 0;
2791}
2792
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00002793int Channel::StartRecordingPlayout(const char* fileName,
niklase@google.com470e71d2011-07-07 08:21:25 +00002794 const CodecInst* codecInst)
2795{
2796 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2797 "Channel::StartRecordingPlayout(fileName=%s)", fileName);
2798
2799 if (_outputFileRecording)
2800 {
2801 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
2802 "StartRecordingPlayout() is already recording");
2803 return 0;
2804 }
2805
2806 FileFormats format;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002807 const uint32_t notificationTime(0); // Not supported in VoE
niklase@google.com470e71d2011-07-07 08:21:25 +00002808 CodecInst dummyCodec={100,"L16",16000,320,1,320000};
2809
niklas.enbom@webrtc.org40197d72012-03-26 08:45:47 +00002810 if ((codecInst != NULL) &&
2811 ((codecInst->channels < 1) || (codecInst->channels > 2)))
niklase@google.com470e71d2011-07-07 08:21:25 +00002812 {
2813 _engineStatisticsPtr->SetLastError(
2814 VE_BAD_ARGUMENT, kTraceError,
2815 "StartRecordingPlayout() invalid compression");
2816 return(-1);
2817 }
2818 if(codecInst == NULL)
2819 {
2820 format = kFileFormatPcm16kHzFile;
2821 codecInst=&dummyCodec;
2822 }
2823 else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
2824 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
2825 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
2826 {
2827 format = kFileFormatWavFile;
2828 }
2829 else
2830 {
2831 format = kFileFormatCompressedFile;
2832 }
2833
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002834 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002835
2836 // Destroy the old instance
2837 if (_outputFileRecorderPtr)
2838 {
2839 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2840 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2841 _outputFileRecorderPtr = NULL;
2842 }
2843
2844 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2845 _outputFileRecorderId, (const FileFormats)format);
2846 if (_outputFileRecorderPtr == NULL)
2847 {
2848 _engineStatisticsPtr->SetLastError(
2849 VE_INVALID_ARGUMENT, kTraceError,
2850 "StartRecordingPlayout() fileRecorder format isnot correct");
2851 return -1;
2852 }
2853
2854 if (_outputFileRecorderPtr->StartRecordingAudioFile(
2855 fileName, (const CodecInst&)*codecInst, notificationTime) != 0)
2856 {
2857 _engineStatisticsPtr->SetLastError(
2858 VE_BAD_FILE, kTraceError,
2859 "StartRecordingAudioFile() failed to start file recording");
2860 _outputFileRecorderPtr->StopRecording();
2861 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2862 _outputFileRecorderPtr = NULL;
2863 return -1;
2864 }
2865 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2866 _outputFileRecording = true;
2867
2868 return 0;
2869}
2870
2871int Channel::StartRecordingPlayout(OutStream* stream,
2872 const CodecInst* codecInst)
2873{
2874 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2875 "Channel::StartRecordingPlayout()");
2876
2877 if (_outputFileRecording)
2878 {
2879 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
2880 "StartRecordingPlayout() is already recording");
2881 return 0;
2882 }
2883
2884 FileFormats format;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002885 const uint32_t notificationTime(0); // Not supported in VoE
niklase@google.com470e71d2011-07-07 08:21:25 +00002886 CodecInst dummyCodec={100,"L16",16000,320,1,320000};
2887
2888 if (codecInst != NULL && codecInst->channels != 1)
2889 {
2890 _engineStatisticsPtr->SetLastError(
2891 VE_BAD_ARGUMENT, kTraceError,
2892 "StartRecordingPlayout() invalid compression");
2893 return(-1);
2894 }
2895 if(codecInst == NULL)
2896 {
2897 format = kFileFormatPcm16kHzFile;
2898 codecInst=&dummyCodec;
2899 }
2900 else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
2901 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
2902 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
2903 {
2904 format = kFileFormatWavFile;
2905 }
2906 else
2907 {
2908 format = kFileFormatCompressedFile;
2909 }
2910
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002911 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002912
2913 // Destroy the old instance
2914 if (_outputFileRecorderPtr)
2915 {
2916 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2917 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2918 _outputFileRecorderPtr = NULL;
2919 }
2920
2921 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2922 _outputFileRecorderId, (const FileFormats)format);
2923 if (_outputFileRecorderPtr == NULL)
2924 {
2925 _engineStatisticsPtr->SetLastError(
2926 VE_INVALID_ARGUMENT, kTraceError,
2927 "StartRecordingPlayout() fileRecorder format isnot correct");
2928 return -1;
2929 }
2930
2931 if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream, *codecInst,
2932 notificationTime) != 0)
2933 {
2934 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2935 "StartRecordingPlayout() failed to "
2936 "start file recording");
2937 _outputFileRecorderPtr->StopRecording();
2938 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2939 _outputFileRecorderPtr = NULL;
2940 return -1;
2941 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00002942
niklase@google.com470e71d2011-07-07 08:21:25 +00002943 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2944 _outputFileRecording = true;
2945
2946 return 0;
2947}
2948
2949int Channel::StopRecordingPlayout()
2950{
2951 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
2952 "Channel::StopRecordingPlayout()");
2953
2954 if (!_outputFileRecording)
2955 {
2956 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
2957 "StopRecordingPlayout() isnot recording");
2958 return -1;
2959 }
2960
2961
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002962 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002963
2964 if (_outputFileRecorderPtr->StopRecording() != 0)
2965 {
2966 _engineStatisticsPtr->SetLastError(
2967 VE_STOP_RECORDING_FAILED, kTraceError,
2968 "StopRecording() could not stop recording");
2969 return(-1);
2970 }
2971 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2972 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2973 _outputFileRecorderPtr = NULL;
2974 _outputFileRecording = false;
2975
2976 return 0;
2977}
2978
2979void
2980Channel::SetMixWithMicStatus(bool mix)
2981{
2982 _mixFileWithMicrophone=mix;
2983}
2984
2985int
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002986Channel::GetSpeechOutputLevel(uint32_t& level) const
niklase@google.com470e71d2011-07-07 08:21:25 +00002987{
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002988 int8_t currentLevel = _outputAudioLevel.Level();
2989 level = static_cast<int32_t> (currentLevel);
niklase@google.com470e71d2011-07-07 08:21:25 +00002990 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2991 VoEId(_instanceId,_channelId),
2992 "GetSpeechOutputLevel() => level=%u", level);
2993 return 0;
2994}
2995
2996int
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002997Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const
niklase@google.com470e71d2011-07-07 08:21:25 +00002998{
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002999 int16_t currentLevel = _outputAudioLevel.LevelFullRange();
3000 level = static_cast<int32_t> (currentLevel);
niklase@google.com470e71d2011-07-07 08:21:25 +00003001 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3002 VoEId(_instanceId,_channelId),
3003 "GetSpeechOutputLevelFullRange() => level=%u", level);
3004 return 0;
3005}
3006
3007int
3008Channel::SetMute(bool enable)
3009{
3010 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3011 "Channel::SetMute(enable=%d)", enable);
3012 _mute = enable;
3013 return 0;
3014}
3015
3016bool
3017Channel::Mute() const
3018{
3019 return _mute;
3020}
3021
3022int
3023Channel::SetOutputVolumePan(float left, float right)
3024{
3025 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3026 "Channel::SetOutputVolumePan()");
3027 _panLeft = left;
3028 _panRight = right;
3029 return 0;
3030}
3031
3032int
3033Channel::GetOutputVolumePan(float& left, float& right) const
3034{
3035 left = _panLeft;
3036 right = _panRight;
3037 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3038 VoEId(_instanceId,_channelId),
3039 "GetOutputVolumePan() => left=%3.2f, right=%3.2f", left, right);
3040 return 0;
3041}
3042
3043int
3044Channel::SetChannelOutputVolumeScaling(float scaling)
3045{
3046 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3047 "Channel::SetChannelOutputVolumeScaling()");
3048 _outputGain = scaling;
3049 return 0;
3050}
3051
3052int
3053Channel::GetChannelOutputVolumeScaling(float& scaling) const
3054{
3055 scaling = _outputGain;
3056 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3057 VoEId(_instanceId,_channelId),
3058 "GetChannelOutputVolumeScaling() => scaling=%3.2f", scaling);
3059 return 0;
3060}
3061
niklase@google.com470e71d2011-07-07 08:21:25 +00003062int
3063Channel::RegisterExternalEncryption(Encryption& encryption)
3064{
3065 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3066 "Channel::RegisterExternalEncryption()");
3067
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003068 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003069
3070 if (_encryptionPtr)
3071 {
3072 _engineStatisticsPtr->SetLastError(
3073 VE_INVALID_OPERATION, kTraceError,
3074 "RegisterExternalEncryption() encryption already enabled");
3075 return -1;
3076 }
3077
3078 _encryptionPtr = &encryption;
3079
3080 _decrypting = true;
3081 _encrypting = true;
3082
3083 return 0;
3084}
3085
3086int
3087Channel::DeRegisterExternalEncryption()
3088{
3089 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3090 "Channel::DeRegisterExternalEncryption()");
3091
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003092 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003093
3094 if (!_encryptionPtr)
3095 {
3096 _engineStatisticsPtr->SetLastError(
3097 VE_INVALID_OPERATION, kTraceWarning,
3098 "DeRegisterExternalEncryption() encryption already disabled");
3099 return 0;
3100 }
3101
3102 _decrypting = false;
3103 _encrypting = false;
3104
3105 _encryptionPtr = NULL;
3106
3107 return 0;
3108}
3109
3110int Channel::SendTelephoneEventOutband(unsigned char eventCode,
3111 int lengthMs, int attenuationDb,
3112 bool playDtmfEvent)
3113{
3114 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3115 "Channel::SendTelephoneEventOutband(..., playDtmfEvent=%d)",
3116 playDtmfEvent);
3117
3118 _playOutbandDtmfEvent = playDtmfEvent;
3119
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003120 if (_rtpRtcpModule->SendTelephoneEventOutband(eventCode, lengthMs,
niklase@google.com470e71d2011-07-07 08:21:25 +00003121 attenuationDb) != 0)
3122 {
3123 _engineStatisticsPtr->SetLastError(
3124 VE_SEND_DTMF_FAILED,
3125 kTraceWarning,
3126 "SendTelephoneEventOutband() failed to send event");
3127 return -1;
3128 }
3129 return 0;
3130}
3131
3132int Channel::SendTelephoneEventInband(unsigned char eventCode,
3133 int lengthMs,
3134 int attenuationDb,
3135 bool playDtmfEvent)
3136{
3137 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3138 "Channel::SendTelephoneEventInband(..., playDtmfEvent=%d)",
3139 playDtmfEvent);
3140
3141 _playInbandDtmfEvent = playDtmfEvent;
3142 _inbandDtmfQueue.AddDtmf(eventCode, lengthMs, attenuationDb);
3143
3144 return 0;
3145}
3146
3147int
3148Channel::SetDtmfPlayoutStatus(bool enable)
3149{
3150 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3151 "Channel::SetDtmfPlayoutStatus()");
3152 if (_audioCodingModule.SetDtmfPlayoutStatus(enable) != 0)
3153 {
3154 _engineStatisticsPtr->SetLastError(
3155 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
3156 "SetDtmfPlayoutStatus() failed to set Dtmf playout");
3157 return -1;
3158 }
3159 return 0;
3160}
3161
3162bool
3163Channel::DtmfPlayoutStatus() const
3164{
3165 return _audioCodingModule.DtmfPlayoutStatus();
3166}
3167
3168int
3169Channel::SetSendTelephoneEventPayloadType(unsigned char type)
3170{
3171 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3172 "Channel::SetSendTelephoneEventPayloadType()");
andrew@webrtc.orgf81f9f82011-08-19 22:56:22 +00003173 if (type > 127)
niklase@google.com470e71d2011-07-07 08:21:25 +00003174 {
3175 _engineStatisticsPtr->SetLastError(
3176 VE_INVALID_ARGUMENT, kTraceError,
3177 "SetSendTelephoneEventPayloadType() invalid type");
3178 return -1;
3179 }
pwestin@webrtc.org1da1ce02011-10-13 15:19:55 +00003180 CodecInst codec;
3181 codec.plfreq = 8000;
3182 codec.pltype = type;
3183 memcpy(codec.plname, "telephone-event", 16);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003184 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003185 {
henrika@webrtc.org4392d5f2013-04-17 07:34:25 +00003186 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
3187 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
3188 _engineStatisticsPtr->SetLastError(
3189 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3190 "SetSendTelephoneEventPayloadType() failed to register send"
3191 "payload type");
3192 return -1;
3193 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003194 }
3195 _sendTelephoneEventPayloadType = type;
3196 return 0;
3197}
3198
3199int
3200Channel::GetSendTelephoneEventPayloadType(unsigned char& type)
3201{
3202 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3203 "Channel::GetSendTelephoneEventPayloadType()");
3204 type = _sendTelephoneEventPayloadType;
3205 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3206 VoEId(_instanceId,_channelId),
3207 "GetSendTelephoneEventPayloadType() => type=%u", type);
3208 return 0;
3209}
3210
niklase@google.com470e71d2011-07-07 08:21:25 +00003211int
3212Channel::UpdateRxVadDetection(AudioFrame& audioFrame)
3213{
3214 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3215 "Channel::UpdateRxVadDetection()");
3216
3217 int vadDecision = 1;
3218
andrew@webrtc.org63a50982012-05-02 23:56:37 +00003219 vadDecision = (audioFrame.vad_activity_ == AudioFrame::kVadActive)? 1 : 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003220
3221 if ((vadDecision != _oldVadDecision) && _rxVadObserverPtr)
3222 {
3223 OnRxVadDetected(vadDecision);
3224 _oldVadDecision = vadDecision;
3225 }
3226
3227 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3228 "Channel::UpdateRxVadDetection() => vadDecision=%d",
3229 vadDecision);
3230 return 0;
3231}
3232
3233int
3234Channel::RegisterRxVadObserver(VoERxVadCallback &observer)
3235{
3236 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3237 "Channel::RegisterRxVadObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003238 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003239
3240 if (_rxVadObserverPtr)
3241 {
3242 _engineStatisticsPtr->SetLastError(
3243 VE_INVALID_OPERATION, kTraceError,
3244 "RegisterRxVadObserver() observer already enabled");
3245 return -1;
3246 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003247 _rxVadObserverPtr = &observer;
3248 _RxVadDetection = true;
3249 return 0;
3250}
3251
3252int
3253Channel::DeRegisterRxVadObserver()
3254{
3255 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3256 "Channel::DeRegisterRxVadObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003257 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003258
3259 if (!_rxVadObserverPtr)
3260 {
3261 _engineStatisticsPtr->SetLastError(
3262 VE_INVALID_OPERATION, kTraceWarning,
3263 "DeRegisterRxVadObserver() observer already disabled");
3264 return 0;
3265 }
3266 _rxVadObserverPtr = NULL;
3267 _RxVadDetection = false;
3268 return 0;
3269}
3270
3271int
3272Channel::VoiceActivityIndicator(int &activity)
3273{
3274 activity = _sendFrameType;
3275
3276 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3277 "Channel::VoiceActivityIndicator(indicator=%d)", activity);
3278 return 0;
3279}
3280
3281#ifdef WEBRTC_VOICE_ENGINE_AGC
3282
3283int
pbos@webrtc.org92135212013-05-14 08:31:39 +00003284Channel::SetRxAgcStatus(bool enable, AgcModes mode)
niklase@google.com470e71d2011-07-07 08:21:25 +00003285{
3286 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3287 "Channel::SetRxAgcStatus(enable=%d, mode=%d)",
3288 (int)enable, (int)mode);
3289
3290 GainControl::Mode agcMode(GainControl::kFixedDigital);
3291 switch (mode)
3292 {
3293 case kAgcDefault:
3294 agcMode = GainControl::kAdaptiveDigital;
3295 break;
3296 case kAgcUnchanged:
3297 agcMode = _rxAudioProcessingModulePtr->gain_control()->mode();
3298 break;
3299 case kAgcFixedDigital:
3300 agcMode = GainControl::kFixedDigital;
3301 break;
3302 case kAgcAdaptiveDigital:
3303 agcMode =GainControl::kAdaptiveDigital;
3304 break;
3305 default:
3306 _engineStatisticsPtr->SetLastError(
3307 VE_INVALID_ARGUMENT, kTraceError,
3308 "SetRxAgcStatus() invalid Agc mode");
3309 return -1;
3310 }
3311
3312 if (_rxAudioProcessingModulePtr->gain_control()->set_mode(agcMode) != 0)
3313 {
3314 _engineStatisticsPtr->SetLastError(
3315 VE_APM_ERROR, kTraceError,
3316 "SetRxAgcStatus() failed to set Agc mode");
3317 return -1;
3318 }
3319 if (_rxAudioProcessingModulePtr->gain_control()->Enable(enable) != 0)
3320 {
3321 _engineStatisticsPtr->SetLastError(
3322 VE_APM_ERROR, kTraceError,
3323 "SetRxAgcStatus() failed to set Agc state");
3324 return -1;
3325 }
3326
3327 _rxAgcIsEnabled = enable;
niklase@google.com470e71d2011-07-07 08:21:25 +00003328 _rxApmIsEnabled = ((_rxAgcIsEnabled == true) || (_rxNsIsEnabled == true));
3329
3330 return 0;
3331}
3332
3333int
3334Channel::GetRxAgcStatus(bool& enabled, AgcModes& mode)
3335{
3336 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3337 "Channel::GetRxAgcStatus(enable=?, mode=?)");
3338
3339 bool enable = _rxAudioProcessingModulePtr->gain_control()->is_enabled();
3340 GainControl::Mode agcMode =
3341 _rxAudioProcessingModulePtr->gain_control()->mode();
3342
3343 enabled = enable;
3344
3345 switch (agcMode)
3346 {
3347 case GainControl::kFixedDigital:
3348 mode = kAgcFixedDigital;
3349 break;
3350 case GainControl::kAdaptiveDigital:
3351 mode = kAgcAdaptiveDigital;
3352 break;
3353 default:
3354 _engineStatisticsPtr->SetLastError(
3355 VE_APM_ERROR, kTraceError,
3356 "GetRxAgcStatus() invalid Agc mode");
3357 return -1;
3358 }
3359
3360 return 0;
3361}
3362
3363int
pbos@webrtc.org92135212013-05-14 08:31:39 +00003364Channel::SetRxAgcConfig(AgcConfig config)
niklase@google.com470e71d2011-07-07 08:21:25 +00003365{
3366 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3367 "Channel::SetRxAgcConfig()");
3368
3369 if (_rxAudioProcessingModulePtr->gain_control()->set_target_level_dbfs(
3370 config.targetLeveldBOv) != 0)
3371 {
3372 _engineStatisticsPtr->SetLastError(
3373 VE_APM_ERROR, kTraceError,
3374 "SetRxAgcConfig() failed to set target peak |level|"
3375 "(or envelope) of the Agc");
3376 return -1;
3377 }
3378 if (_rxAudioProcessingModulePtr->gain_control()->set_compression_gain_db(
3379 config.digitalCompressionGaindB) != 0)
3380 {
3381 _engineStatisticsPtr->SetLastError(
3382 VE_APM_ERROR, kTraceError,
3383 "SetRxAgcConfig() failed to set the range in |gain| the"
3384 " digital compression stage may apply");
3385 return -1;
3386 }
3387 if (_rxAudioProcessingModulePtr->gain_control()->enable_limiter(
3388 config.limiterEnable) != 0)
3389 {
3390 _engineStatisticsPtr->SetLastError(
3391 VE_APM_ERROR, kTraceError,
3392 "SetRxAgcConfig() failed to set hard limiter to the signal");
3393 return -1;
3394 }
3395
3396 return 0;
3397}
3398
3399int
3400Channel::GetRxAgcConfig(AgcConfig& config)
3401{
3402 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3403 "Channel::GetRxAgcConfig(config=%?)");
3404
3405 config.targetLeveldBOv =
3406 _rxAudioProcessingModulePtr->gain_control()->target_level_dbfs();
3407 config.digitalCompressionGaindB =
3408 _rxAudioProcessingModulePtr->gain_control()->compression_gain_db();
3409 config.limiterEnable =
3410 _rxAudioProcessingModulePtr->gain_control()->is_limiter_enabled();
3411
3412 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3413 VoEId(_instanceId,_channelId), "GetRxAgcConfig() => "
3414 "targetLeveldBOv=%u, digitalCompressionGaindB=%u,"
3415 " limiterEnable=%d",
3416 config.targetLeveldBOv,
3417 config.digitalCompressionGaindB,
3418 config.limiterEnable);
3419
3420 return 0;
3421}
3422
3423#endif // #ifdef WEBRTC_VOICE_ENGINE_AGC
3424
3425#ifdef WEBRTC_VOICE_ENGINE_NR
3426
3427int
pbos@webrtc.org92135212013-05-14 08:31:39 +00003428Channel::SetRxNsStatus(bool enable, NsModes mode)
niklase@google.com470e71d2011-07-07 08:21:25 +00003429{
3430 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3431 "Channel::SetRxNsStatus(enable=%d, mode=%d)",
3432 (int)enable, (int)mode);
3433
3434 NoiseSuppression::Level nsLevel(
3435 (NoiseSuppression::Level)WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE);
3436 switch (mode)
3437 {
3438
3439 case kNsDefault:
3440 nsLevel = (NoiseSuppression::Level)
3441 WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE;
3442 break;
3443 case kNsUnchanged:
3444 nsLevel = _rxAudioProcessingModulePtr->noise_suppression()->level();
3445 break;
3446 case kNsConference:
3447 nsLevel = NoiseSuppression::kHigh;
3448 break;
3449 case kNsLowSuppression:
3450 nsLevel = NoiseSuppression::kLow;
3451 break;
3452 case kNsModerateSuppression:
3453 nsLevel = NoiseSuppression::kModerate;
3454 break;
3455 case kNsHighSuppression:
3456 nsLevel = NoiseSuppression::kHigh;
3457 break;
3458 case kNsVeryHighSuppression:
3459 nsLevel = NoiseSuppression::kVeryHigh;
3460 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00003461 }
3462
3463 if (_rxAudioProcessingModulePtr->noise_suppression()->set_level(nsLevel)
3464 != 0)
3465 {
3466 _engineStatisticsPtr->SetLastError(
3467 VE_APM_ERROR, kTraceError,
3468 "SetRxAgcStatus() failed to set Ns level");
3469 return -1;
3470 }
3471 if (_rxAudioProcessingModulePtr->noise_suppression()->Enable(enable) != 0)
3472 {
3473 _engineStatisticsPtr->SetLastError(
3474 VE_APM_ERROR, kTraceError,
3475 "SetRxAgcStatus() failed to set Agc state");
3476 return -1;
3477 }
3478
3479 _rxNsIsEnabled = enable;
3480 _rxApmIsEnabled = ((_rxAgcIsEnabled == true) || (_rxNsIsEnabled == true));
3481
3482 return 0;
3483}
3484
3485int
3486Channel::GetRxNsStatus(bool& enabled, NsModes& mode)
3487{
3488 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3489 "Channel::GetRxNsStatus(enable=?, mode=?)");
3490
3491 bool enable =
3492 _rxAudioProcessingModulePtr->noise_suppression()->is_enabled();
3493 NoiseSuppression::Level ncLevel =
3494 _rxAudioProcessingModulePtr->noise_suppression()->level();
3495
3496 enabled = enable;
3497
3498 switch (ncLevel)
3499 {
3500 case NoiseSuppression::kLow:
3501 mode = kNsLowSuppression;
3502 break;
3503 case NoiseSuppression::kModerate:
3504 mode = kNsModerateSuppression;
3505 break;
3506 case NoiseSuppression::kHigh:
3507 mode = kNsHighSuppression;
3508 break;
3509 case NoiseSuppression::kVeryHigh:
3510 mode = kNsVeryHighSuppression;
3511 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00003512 }
3513
3514 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3515 VoEId(_instanceId,_channelId),
3516 "GetRxNsStatus() => enabled=%d, mode=%d", enabled, mode);
3517 return 0;
3518}
3519
3520#endif // #ifdef WEBRTC_VOICE_ENGINE_NR
3521
3522int
3523Channel::RegisterRTPObserver(VoERTPObserver& observer)
3524{
3525 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3526 "Channel::RegisterRTPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003527 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003528
3529 if (_rtpObserverPtr)
3530 {
3531 _engineStatisticsPtr->SetLastError(
3532 VE_INVALID_OPERATION, kTraceError,
3533 "RegisterRTPObserver() observer already enabled");
3534 return -1;
3535 }
3536
3537 _rtpObserverPtr = &observer;
3538 _rtpObserver = true;
3539
3540 return 0;
3541}
3542
3543int
3544Channel::DeRegisterRTPObserver()
3545{
3546 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3547 "Channel::DeRegisterRTPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003548 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003549
3550 if (!_rtpObserverPtr)
3551 {
3552 _engineStatisticsPtr->SetLastError(
3553 VE_INVALID_OPERATION, kTraceWarning,
3554 "DeRegisterRTPObserver() observer already disabled");
3555 return 0;
3556 }
3557
3558 _rtpObserver = false;
3559 _rtpObserverPtr = NULL;
3560
3561 return 0;
3562}
3563
3564int
3565Channel::RegisterRTCPObserver(VoERTCPObserver& observer)
3566{
3567 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3568 "Channel::RegisterRTCPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003569 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003570
3571 if (_rtcpObserverPtr)
3572 {
3573 _engineStatisticsPtr->SetLastError(
3574 VE_INVALID_OPERATION, kTraceError,
3575 "RegisterRTCPObserver() observer already enabled");
3576 return -1;
3577 }
3578
3579 _rtcpObserverPtr = &observer;
3580 _rtcpObserver = true;
3581
3582 return 0;
3583}
3584
3585int
3586Channel::DeRegisterRTCPObserver()
3587{
3588 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3589 "Channel::DeRegisterRTCPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003590 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003591
3592 if (!_rtcpObserverPtr)
3593 {
3594 _engineStatisticsPtr->SetLastError(
3595 VE_INVALID_OPERATION, kTraceWarning,
3596 "DeRegisterRTCPObserver() observer already disabled");
3597 return 0;
3598 }
3599
3600 _rtcpObserver = false;
3601 _rtcpObserverPtr = NULL;
3602
3603 return 0;
3604}
3605
3606int
3607Channel::SetLocalSSRC(unsigned int ssrc)
3608{
3609 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3610 "Channel::SetLocalSSRC()");
3611 if (_sending)
3612 {
3613 _engineStatisticsPtr->SetLastError(
3614 VE_ALREADY_SENDING, kTraceError,
3615 "SetLocalSSRC() already sending");
3616 return -1;
3617 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003618 if (_rtpRtcpModule->SetSSRC(ssrc) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003619 {
3620 _engineStatisticsPtr->SetLastError(
3621 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3622 "SetLocalSSRC() failed to set SSRC");
3623 return -1;
3624 }
3625 return 0;
3626}
3627
3628int
3629Channel::GetLocalSSRC(unsigned int& ssrc)
3630{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003631 ssrc = _rtpRtcpModule->SSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00003632 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3633 VoEId(_instanceId,_channelId),
3634 "GetLocalSSRC() => ssrc=%lu", ssrc);
3635 return 0;
3636}
3637
3638int
3639Channel::GetRemoteSSRC(unsigned int& ssrc)
3640{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003641 ssrc = _rtpRtcpModule->RemoteSSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00003642 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3643 VoEId(_instanceId,_channelId),
3644 "GetRemoteSSRC() => ssrc=%lu", ssrc);
3645 return 0;
3646}
3647
3648int
3649Channel::GetRemoteCSRCs(unsigned int arrCSRC[15])
3650{
3651 if (arrCSRC == NULL)
3652 {
3653 _engineStatisticsPtr->SetLastError(
3654 VE_INVALID_ARGUMENT, kTraceError,
3655 "GetRemoteCSRCs() invalid array argument");
3656 return -1;
3657 }
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003658 uint32_t arrOfCSRC[kRtpCsrcSize];
3659 int32_t CSRCs(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003660 CSRCs = _rtpRtcpModule->CSRCs(arrOfCSRC);
niklase@google.com470e71d2011-07-07 08:21:25 +00003661 if (CSRCs > 0)
3662 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003663 memcpy(arrCSRC, arrOfCSRC, CSRCs * sizeof(uint32_t));
niklase@google.com470e71d2011-07-07 08:21:25 +00003664 for (int i = 0; i < (int) CSRCs; i++)
3665 {
3666 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3667 VoEId(_instanceId, _channelId),
3668 "GetRemoteCSRCs() => arrCSRC[%d]=%lu", i, arrCSRC[i]);
3669 }
3670 } else
3671 {
3672 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3673 VoEId(_instanceId, _channelId),
3674 "GetRemoteCSRCs() => list is empty!");
3675 }
3676 return CSRCs;
3677}
3678
3679int
3680Channel::SetRTPAudioLevelIndicationStatus(bool enable, unsigned char ID)
3681{
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00003682 if (_rtpAudioProc.get() == NULL)
3683 {
3684 _rtpAudioProc.reset(AudioProcessing::Create(VoEModuleId(_instanceId,
3685 _channelId)));
3686 if (_rtpAudioProc.get() == NULL)
3687 {
3688 _engineStatisticsPtr->SetLastError(VE_NO_MEMORY, kTraceCritical,
3689 "Failed to create AudioProcessing");
3690 return -1;
3691 }
3692 }
3693
3694 if (_rtpAudioProc->level_estimator()->Enable(enable) !=
3695 AudioProcessing::kNoError)
3696 {
3697 _engineStatisticsPtr->SetLastError(VE_APM_ERROR, kTraceWarning,
3698 "Failed to enable AudioProcessing::level_estimator()");
3699 }
3700
niklase@google.com470e71d2011-07-07 08:21:25 +00003701 _includeAudioLevelIndication = enable;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003702 return _rtpRtcpModule->SetRTPAudioLevelIndicationStatus(enable, ID);
niklase@google.com470e71d2011-07-07 08:21:25 +00003703}
3704int
3705Channel::GetRTPAudioLevelIndicationStatus(bool& enabled, unsigned char& ID)
3706{
3707 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3708 VoEId(_instanceId,_channelId),
3709 "GetRTPAudioLevelIndicationStatus() => enabled=%d, ID=%u",
3710 enabled, ID);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003711 return _rtpRtcpModule->GetRTPAudioLevelIndicationStatus(enabled, ID);
niklase@google.com470e71d2011-07-07 08:21:25 +00003712}
3713
3714int
3715Channel::SetRTCPStatus(bool enable)
3716{
3717 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3718 "Channel::SetRTCPStatus()");
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003719 if (_rtpRtcpModule->SetRTCPStatus(enable ?
niklase@google.com470e71d2011-07-07 08:21:25 +00003720 kRtcpCompound : kRtcpOff) != 0)
3721 {
3722 _engineStatisticsPtr->SetLastError(
3723 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3724 "SetRTCPStatus() failed to set RTCP status");
3725 return -1;
3726 }
3727 return 0;
3728}
3729
3730int
3731Channel::GetRTCPStatus(bool& enabled)
3732{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003733 RTCPMethod method = _rtpRtcpModule->RTCP();
niklase@google.com470e71d2011-07-07 08:21:25 +00003734 enabled = (method != kRtcpOff);
3735 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3736 VoEId(_instanceId,_channelId),
3737 "GetRTCPStatus() => enabled=%d", enabled);
3738 return 0;
3739}
3740
3741int
3742Channel::SetRTCP_CNAME(const char cName[256])
3743{
3744 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3745 "Channel::SetRTCP_CNAME()");
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003746 if (_rtpRtcpModule->SetCNAME(cName) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003747 {
3748 _engineStatisticsPtr->SetLastError(
3749 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3750 "SetRTCP_CNAME() failed to set RTCP CNAME");
3751 return -1;
3752 }
3753 return 0;
3754}
3755
3756int
3757Channel::GetRTCP_CNAME(char cName[256])
3758{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003759 if (_rtpRtcpModule->CNAME(cName) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003760 {
3761 _engineStatisticsPtr->SetLastError(
3762 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3763 "GetRTCP_CNAME() failed to retrieve RTCP CNAME");
3764 return -1;
3765 }
3766 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3767 VoEId(_instanceId, _channelId),
3768 "GetRTCP_CNAME() => cName=%s", cName);
3769 return 0;
3770}
3771
3772int
3773Channel::GetRemoteRTCP_CNAME(char cName[256])
3774{
3775 if (cName == NULL)
3776 {
3777 _engineStatisticsPtr->SetLastError(
3778 VE_INVALID_ARGUMENT, kTraceError,
3779 "GetRemoteRTCP_CNAME() invalid CNAME input buffer");
3780 return -1;
3781 }
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00003782 char cname[RTCP_CNAME_SIZE];
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003783 const uint32_t remoteSSRC = _rtpRtcpModule->RemoteSSRC();
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003784 if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003785 {
3786 _engineStatisticsPtr->SetLastError(
3787 VE_CANNOT_RETRIEVE_CNAME, kTraceError,
3788 "GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME");
3789 return -1;
3790 }
3791 strcpy(cName, cname);
3792 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3793 VoEId(_instanceId, _channelId),
3794 "GetRemoteRTCP_CNAME() => cName=%s", cName);
3795 return 0;
3796}
3797
3798int
3799Channel::GetRemoteRTCPData(
3800 unsigned int& NTPHigh,
3801 unsigned int& NTPLow,
3802 unsigned int& timestamp,
3803 unsigned int& playoutTimestamp,
3804 unsigned int* jitter,
3805 unsigned short* fractionLost)
3806{
3807 // --- Information from sender info in received Sender Reports
3808
3809 RTCPSenderInfo senderInfo;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003810 if (_rtpRtcpModule->RemoteRTCPStat(&senderInfo) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003811 {
3812 _engineStatisticsPtr->SetLastError(
3813 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00003814 "GetRemoteRTCPData() failed to retrieve sender info for remote "
niklase@google.com470e71d2011-07-07 08:21:25 +00003815 "side");
3816 return -1;
3817 }
3818
3819 // We only utilize 12 out of 20 bytes in the sender info (ignores packet
3820 // and octet count)
3821 NTPHigh = senderInfo.NTPseconds;
3822 NTPLow = senderInfo.NTPfraction;
3823 timestamp = senderInfo.RTPtimeStamp;
3824
3825 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3826 VoEId(_instanceId, _channelId),
3827 "GetRemoteRTCPData() => NTPHigh=%lu, NTPLow=%lu, "
3828 "timestamp=%lu",
3829 NTPHigh, NTPLow, timestamp);
3830
3831 // --- Locally derived information
3832
3833 // This value is updated on each incoming RTCP packet (0 when no packet
3834 // has been received)
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003835 playoutTimestamp = playout_timestamp_rtcp_;
niklase@google.com470e71d2011-07-07 08:21:25 +00003836
3837 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3838 VoEId(_instanceId, _channelId),
3839 "GetRemoteRTCPData() => playoutTimestamp=%lu",
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003840 playout_timestamp_rtcp_);
niklase@google.com470e71d2011-07-07 08:21:25 +00003841
3842 if (NULL != jitter || NULL != fractionLost)
3843 {
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003844 // Get all RTCP receiver report blocks that have been received on this
3845 // channel. If we receive RTP packets from a remote source we know the
3846 // remote SSRC and use the report block from him.
3847 // Otherwise use the first report block.
3848 std::vector<RTCPReportBlock> remote_stats;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003849 if (_rtpRtcpModule->RemoteRTCPStat(&remote_stats) != 0 ||
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003850 remote_stats.empty()) {
3851 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3852 VoEId(_instanceId, _channelId),
3853 "GetRemoteRTCPData() failed to measure statistics due"
3854 " to lack of received RTP and/or RTCP packets");
3855 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003856 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003857
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003858 uint32_t remoteSSRC = _rtpRtcpModule->RemoteSSRC();
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003859 std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
3860 for (; it != remote_stats.end(); ++it) {
3861 if (it->remoteSSRC == remoteSSRC)
3862 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00003863 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003864
3865 if (it == remote_stats.end()) {
3866 // If we have not received any RTCP packets from this SSRC it probably
3867 // means that we have not received any RTP packets.
3868 // Use the first received report block instead.
3869 it = remote_stats.begin();
3870 remoteSSRC = it->remoteSSRC;
niklase@google.com470e71d2011-07-07 08:21:25 +00003871 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003872
xians@webrtc.org79af7342012-01-31 12:22:14 +00003873 if (jitter) {
3874 *jitter = it->jitter;
3875 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3876 VoEId(_instanceId, _channelId),
3877 "GetRemoteRTCPData() => jitter = %lu", *jitter);
3878 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003879
xians@webrtc.org79af7342012-01-31 12:22:14 +00003880 if (fractionLost) {
3881 *fractionLost = it->fractionLost;
3882 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3883 VoEId(_instanceId, _channelId),
3884 "GetRemoteRTCPData() => fractionLost = %lu",
3885 *fractionLost);
3886 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003887 }
3888 return 0;
3889}
3890
3891int
pbos@webrtc.org92135212013-05-14 08:31:39 +00003892Channel::SendApplicationDefinedRTCPPacket(unsigned char subType,
niklase@google.com470e71d2011-07-07 08:21:25 +00003893 unsigned int name,
3894 const char* data,
3895 unsigned short dataLengthInBytes)
3896{
3897 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3898 "Channel::SendApplicationDefinedRTCPPacket()");
3899 if (!_sending)
3900 {
3901 _engineStatisticsPtr->SetLastError(
3902 VE_NOT_SENDING, kTraceError,
3903 "SendApplicationDefinedRTCPPacket() not sending");
3904 return -1;
3905 }
3906 if (NULL == data)
3907 {
3908 _engineStatisticsPtr->SetLastError(
3909 VE_INVALID_ARGUMENT, kTraceError,
3910 "SendApplicationDefinedRTCPPacket() invalid data value");
3911 return -1;
3912 }
3913 if (dataLengthInBytes % 4 != 0)
3914 {
3915 _engineStatisticsPtr->SetLastError(
3916 VE_INVALID_ARGUMENT, kTraceError,
3917 "SendApplicationDefinedRTCPPacket() invalid length value");
3918 return -1;
3919 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003920 RTCPMethod status = _rtpRtcpModule->RTCP();
niklase@google.com470e71d2011-07-07 08:21:25 +00003921 if (status == kRtcpOff)
3922 {
3923 _engineStatisticsPtr->SetLastError(
3924 VE_RTCP_ERROR, kTraceError,
3925 "SendApplicationDefinedRTCPPacket() RTCP is disabled");
3926 return -1;
3927 }
3928
3929 // Create and schedule the RTCP APP packet for transmission
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003930 if (_rtpRtcpModule->SetRTCPApplicationSpecificData(
niklase@google.com470e71d2011-07-07 08:21:25 +00003931 subType,
3932 name,
3933 (const unsigned char*) data,
3934 dataLengthInBytes) != 0)
3935 {
3936 _engineStatisticsPtr->SetLastError(
3937 VE_SEND_ERROR, kTraceError,
3938 "SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
3939 return -1;
3940 }
3941 return 0;
3942}
3943
3944int
3945Channel::GetRTPStatistics(
3946 unsigned int& averageJitterMs,
3947 unsigned int& maxJitterMs,
3948 unsigned int& discardedPackets)
3949{
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003950 uint8_t fraction_lost(0);
3951 uint32_t cum_lost(0);
3952 uint32_t ext_max(0);
3953 uint32_t jitter(0);
3954 uint32_t max_jitter(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00003955
3956 // The jitter statistics is updated for each received RTP packet and is
3957 // based on received packets.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003958 if (_rtpRtcpModule->StatisticsRTP(&fraction_lost,
niklase@google.com470e71d2011-07-07 08:21:25 +00003959 &cum_lost,
3960 &ext_max,
3961 &jitter,
3962 &max_jitter) != 0)
3963 {
3964 _engineStatisticsPtr->SetLastError(
3965 VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00003966 "GetRTPStatistics() failed to read RTP statistics from the "
niklase@google.com470e71d2011-07-07 08:21:25 +00003967 "RTP/RTCP module");
3968 }
3969
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003970 const int32_t playoutFrequency =
niklase@google.com470e71d2011-07-07 08:21:25 +00003971 _audioCodingModule.PlayoutFrequency();
3972 if (playoutFrequency > 0)
3973 {
3974 // Scale RTP statistics given the current playout frequency
3975 maxJitterMs = max_jitter / (playoutFrequency / 1000);
3976 averageJitterMs = jitter / (playoutFrequency / 1000);
3977 }
3978
3979 discardedPackets = _numberOfDiscardedPackets;
3980
3981 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3982 VoEId(_instanceId, _channelId),
3983 "GetRTPStatistics() => averageJitterMs = %lu, maxJitterMs = %lu,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00003984 " discardedPackets = %lu)",
niklase@google.com470e71d2011-07-07 08:21:25 +00003985 averageJitterMs, maxJitterMs, discardedPackets);
3986 return 0;
3987}
3988
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00003989int Channel::GetRemoteRTCPSenderInfo(SenderInfo* sender_info) {
3990 if (sender_info == NULL) {
3991 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
3992 "GetRemoteRTCPSenderInfo() invalid sender_info.");
3993 return -1;
3994 }
3995
3996 // Get the sender info from the latest received RTCP Sender Report.
3997 RTCPSenderInfo rtcp_sender_info;
3998 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_sender_info) != 0) {
3999 _engineStatisticsPtr->SetLastError(VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4000 "GetRemoteRTCPSenderInfo() failed to read RTCP SR sender info.");
4001 return -1;
4002 }
4003
4004 sender_info->NTP_timestamp_high = rtcp_sender_info.NTPseconds;
4005 sender_info->NTP_timestamp_low = rtcp_sender_info.NTPfraction;
4006 sender_info->RTP_timestamp = rtcp_sender_info.RTPtimeStamp;
4007 sender_info->sender_packet_count = rtcp_sender_info.sendPacketCount;
4008 sender_info->sender_octet_count = rtcp_sender_info.sendOctetCount;
4009 return 0;
4010}
4011
4012int Channel::GetRemoteRTCPReportBlocks(
4013 std::vector<ReportBlock>* report_blocks) {
4014 if (report_blocks == NULL) {
4015 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
4016 "GetRemoteRTCPReportBlock()s invalid report_blocks.");
4017 return -1;
4018 }
4019
4020 // Get the report blocks from the latest received RTCP Sender or Receiver
4021 // Report. Each element in the vector contains the sender's SSRC and a
4022 // report block according to RFC 3550.
4023 std::vector<RTCPReportBlock> rtcp_report_blocks;
4024 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks) != 0) {
4025 _engineStatisticsPtr->SetLastError(VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4026 "GetRemoteRTCPReportBlocks() failed to read RTCP SR/RR report block.");
4027 return -1;
4028 }
4029
4030 if (rtcp_report_blocks.empty())
4031 return 0;
4032
4033 std::vector<RTCPReportBlock>::const_iterator it = rtcp_report_blocks.begin();
4034 for (; it != rtcp_report_blocks.end(); ++it) {
4035 ReportBlock report_block;
4036 report_block.sender_SSRC = it->remoteSSRC;
4037 report_block.source_SSRC = it->sourceSSRC;
4038 report_block.fraction_lost = it->fractionLost;
4039 report_block.cumulative_num_packets_lost = it->cumulativeLost;
4040 report_block.extended_highest_sequence_number = it->extendedHighSeqNum;
4041 report_block.interarrival_jitter = it->jitter;
4042 report_block.last_SR_timestamp = it->lastSR;
4043 report_block.delay_since_last_SR = it->delaySinceLastSR;
4044 report_blocks->push_back(report_block);
4045 }
4046 return 0;
4047}
4048
niklase@google.com470e71d2011-07-07 08:21:25 +00004049int
4050Channel::GetRTPStatistics(CallStatistics& stats)
4051{
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004052 uint8_t fraction_lost(0);
4053 uint32_t cum_lost(0);
4054 uint32_t ext_max(0);
4055 uint32_t jitter(0);
4056 uint32_t max_jitter(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004057
4058 // --- Part one of the final structure (four values)
4059
4060 // The jitter statistics is updated for each received RTP packet and is
4061 // based on received packets.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004062 if (_rtpRtcpModule->StatisticsRTP(&fraction_lost,
niklase@google.com470e71d2011-07-07 08:21:25 +00004063 &cum_lost,
4064 &ext_max,
4065 &jitter,
4066 &max_jitter) != 0)
4067 {
4068 _engineStatisticsPtr->SetLastError(
4069 VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
4070 "GetRTPStatistics() failed to read RTP statistics from the "
4071 "RTP/RTCP module");
4072 }
4073
4074 stats.fractionLost = fraction_lost;
4075 stats.cumulativeLost = cum_lost;
4076 stats.extendedMax = ext_max;
4077 stats.jitterSamples = jitter;
4078
4079 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4080 VoEId(_instanceId, _channelId),
4081 "GetRTPStatistics() => fractionLost=%lu, cumulativeLost=%lu,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004082 " extendedMax=%lu, jitterSamples=%li)",
niklase@google.com470e71d2011-07-07 08:21:25 +00004083 stats.fractionLost, stats.cumulativeLost, stats.extendedMax,
4084 stats.jitterSamples);
4085
4086 // --- Part two of the final structure (one value)
4087
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004088 uint16_t RTT(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004089 RTCPMethod method = _rtpRtcpModule->RTCP();
niklase@google.com470e71d2011-07-07 08:21:25 +00004090 if (method == kRtcpOff)
4091 {
4092 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4093 VoEId(_instanceId, _channelId),
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004094 "GetRTPStatistics() RTCP is disabled => valid RTT "
niklase@google.com470e71d2011-07-07 08:21:25 +00004095 "measurements cannot be retrieved");
4096 } else
4097 {
4098 // The remote SSRC will be zero if no RTP packet has been received.
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004099 uint32_t remoteSSRC = _rtpRtcpModule->RemoteSSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00004100 if (remoteSSRC > 0)
4101 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004102 uint16_t avgRTT(0);
4103 uint16_t maxRTT(0);
4104 uint16_t minRTT(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004105
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004106 if (_rtpRtcpModule->RTT(remoteSSRC, &RTT, &avgRTT, &minRTT, &maxRTT)
niklase@google.com470e71d2011-07-07 08:21:25 +00004107 != 0)
4108 {
4109 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4110 VoEId(_instanceId, _channelId),
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004111 "GetRTPStatistics() failed to retrieve RTT from "
niklase@google.com470e71d2011-07-07 08:21:25 +00004112 "the RTP/RTCP module");
4113 }
4114 } else
4115 {
4116 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4117 VoEId(_instanceId, _channelId),
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004118 "GetRTPStatistics() failed to measure RTT since no "
niklase@google.com470e71d2011-07-07 08:21:25 +00004119 "RTP packets have been received yet");
4120 }
4121 }
4122
4123 stats.rttMs = static_cast<int> (RTT);
4124
4125 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4126 VoEId(_instanceId, _channelId),
4127 "GetRTPStatistics() => rttMs=%d", stats.rttMs);
4128
4129 // --- Part three of the final structure (four values)
4130
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004131 uint32_t bytesSent(0);
4132 uint32_t packetsSent(0);
4133 uint32_t bytesReceived(0);
4134 uint32_t packetsReceived(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004135
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004136 if (_rtpRtcpModule->DataCountersRTP(&bytesSent,
niklase@google.com470e71d2011-07-07 08:21:25 +00004137 &packetsSent,
4138 &bytesReceived,
4139 &packetsReceived) != 0)
4140 {
4141 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4142 VoEId(_instanceId, _channelId),
4143 "GetRTPStatistics() failed to retrieve RTP datacounters =>"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004144 " output will not be complete");
niklase@google.com470e71d2011-07-07 08:21:25 +00004145 }
4146
4147 stats.bytesSent = bytesSent;
4148 stats.packetsSent = packetsSent;
4149 stats.bytesReceived = bytesReceived;
4150 stats.packetsReceived = packetsReceived;
4151
4152 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4153 VoEId(_instanceId, _channelId),
4154 "GetRTPStatistics() => bytesSent=%d, packetsSent=%d,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004155 " bytesReceived=%d, packetsReceived=%d)",
niklase@google.com470e71d2011-07-07 08:21:25 +00004156 stats.bytesSent, stats.packetsSent, stats.bytesReceived,
4157 stats.packetsReceived);
4158
4159 return 0;
4160}
4161
turaj@webrtc.org42259e72012-12-11 02:15:12 +00004162int Channel::SetFECStatus(bool enable, int redPayloadtype) {
4163 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4164 "Channel::SetFECStatus()");
niklase@google.com470e71d2011-07-07 08:21:25 +00004165
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00004166 if (enable) {
4167 if (redPayloadtype < 0 || redPayloadtype > 127) {
4168 _engineStatisticsPtr->SetLastError(
4169 VE_PLTYPE_ERROR, kTraceError,
4170 "SetFECStatus() invalid RED payload type");
4171 return -1;
4172 }
4173
4174 if (SetRedPayloadType(redPayloadtype) < 0) {
4175 _engineStatisticsPtr->SetLastError(
4176 VE_CODEC_ERROR, kTraceError,
4177 "SetSecondarySendCodec() Failed to register RED ACM");
4178 return -1;
4179 }
turaj@webrtc.org42259e72012-12-11 02:15:12 +00004180 }
niklase@google.com470e71d2011-07-07 08:21:25 +00004181
turaj@webrtc.org42259e72012-12-11 02:15:12 +00004182 if (_audioCodingModule.SetFECStatus(enable) != 0) {
4183 _engineStatisticsPtr->SetLastError(
4184 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4185 "SetFECStatus() failed to set FEC state in the ACM");
4186 return -1;
4187 }
4188 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00004189}
4190
4191int
4192Channel::GetFECStatus(bool& enabled, int& redPayloadtype)
4193{
4194 enabled = _audioCodingModule.FECStatus();
4195 if (enabled)
4196 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004197 int8_t payloadType(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004198 if (_rtpRtcpModule->SendREDPayloadType(payloadType) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004199 {
4200 _engineStatisticsPtr->SetLastError(
4201 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4202 "GetFECStatus() failed to retrieve RED PT from RTP/RTCP "
4203 "module");
4204 return -1;
4205 }
4206 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4207 VoEId(_instanceId, _channelId),
4208 "GetFECStatus() => enabled=%d, redPayloadtype=%d",
4209 enabled, redPayloadtype);
4210 return 0;
4211 }
4212 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4213 VoEId(_instanceId, _channelId),
4214 "GetFECStatus() => enabled=%d", enabled);
4215 return 0;
4216}
4217
4218int
niklase@google.com470e71d2011-07-07 08:21:25 +00004219Channel::StartRTPDump(const char fileNameUTF8[1024],
4220 RTPDirections direction)
4221{
4222 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4223 "Channel::StartRTPDump()");
4224 if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
4225 {
4226 _engineStatisticsPtr->SetLastError(
4227 VE_INVALID_ARGUMENT, kTraceError,
4228 "StartRTPDump() invalid RTP direction");
4229 return -1;
4230 }
4231 RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4232 &_rtpDumpIn : &_rtpDumpOut;
4233 if (rtpDumpPtr == NULL)
4234 {
4235 assert(false);
4236 return -1;
4237 }
4238 if (rtpDumpPtr->IsActive())
4239 {
4240 rtpDumpPtr->Stop();
4241 }
4242 if (rtpDumpPtr->Start(fileNameUTF8) != 0)
4243 {
4244 _engineStatisticsPtr->SetLastError(
4245 VE_BAD_FILE, kTraceError,
4246 "StartRTPDump() failed to create file");
4247 return -1;
4248 }
4249 return 0;
4250}
4251
4252int
4253Channel::StopRTPDump(RTPDirections direction)
4254{
4255 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4256 "Channel::StopRTPDump()");
4257 if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
4258 {
4259 _engineStatisticsPtr->SetLastError(
4260 VE_INVALID_ARGUMENT, kTraceError,
4261 "StopRTPDump() invalid RTP direction");
4262 return -1;
4263 }
4264 RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4265 &_rtpDumpIn : &_rtpDumpOut;
4266 if (rtpDumpPtr == NULL)
4267 {
4268 assert(false);
4269 return -1;
4270 }
4271 if (!rtpDumpPtr->IsActive())
4272 {
4273 return 0;
4274 }
4275 return rtpDumpPtr->Stop();
4276}
4277
4278bool
4279Channel::RTPDumpIsActive(RTPDirections direction)
4280{
4281 if ((direction != kRtpIncoming) &&
4282 (direction != kRtpOutgoing))
4283 {
4284 _engineStatisticsPtr->SetLastError(
4285 VE_INVALID_ARGUMENT, kTraceError,
4286 "RTPDumpIsActive() invalid RTP direction");
4287 return false;
4288 }
4289 RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4290 &_rtpDumpIn : &_rtpDumpOut;
4291 return rtpDumpPtr->IsActive();
4292}
4293
4294int
4295Channel::InsertExtraRTPPacket(unsigned char payloadType,
4296 bool markerBit,
4297 const char* payloadData,
4298 unsigned short payloadSize)
4299{
4300 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4301 "Channel::InsertExtraRTPPacket()");
4302 if (payloadType > 127)
4303 {
4304 _engineStatisticsPtr->SetLastError(
4305 VE_INVALID_PLTYPE, kTraceError,
4306 "InsertExtraRTPPacket() invalid payload type");
4307 return -1;
4308 }
4309 if (payloadData == NULL)
4310 {
4311 _engineStatisticsPtr->SetLastError(
4312 VE_INVALID_ARGUMENT, kTraceError,
4313 "InsertExtraRTPPacket() invalid payload data");
4314 return -1;
4315 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004316 if (payloadSize > _rtpRtcpModule->MaxDataPayloadLength())
niklase@google.com470e71d2011-07-07 08:21:25 +00004317 {
4318 _engineStatisticsPtr->SetLastError(
4319 VE_INVALID_ARGUMENT, kTraceError,
4320 "InsertExtraRTPPacket() invalid payload size");
4321 return -1;
4322 }
4323 if (!_sending)
4324 {
4325 _engineStatisticsPtr->SetLastError(
4326 VE_NOT_SENDING, kTraceError,
4327 "InsertExtraRTPPacket() not sending");
4328 return -1;
4329 }
4330
4331 // Create extra RTP packet by calling RtpRtcp::SendOutgoingData().
4332 // Transport::SendPacket() will be called by the module when the RTP packet
4333 // is created.
4334 // The call to SendOutgoingData() does *not* modify the timestamp and
4335 // payloadtype to ensure that the RTP module generates a valid RTP packet
4336 // (user might utilize a non-registered payload type).
4337 // The marker bit and payload type will be replaced just before the actual
4338 // transmission, i.e., the actual modification is done *after* the RTP
4339 // module has delivered its RTP packet back to the VoE.
4340 // We will use the stored values above when the packet is modified
4341 // (see Channel::SendPacket()).
4342
4343 _extraPayloadType = payloadType;
4344 _extraMarkerBit = markerBit;
4345 _insertExtraRTPPacket = true;
4346
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004347 if (_rtpRtcpModule->SendOutgoingData(kAudioFrameSpeech,
niklase@google.com470e71d2011-07-07 08:21:25 +00004348 _lastPayloadType,
4349 _lastLocalTimeStamp,
stefan@webrtc.orgddfdfed2012-07-03 13:21:22 +00004350 // Leaving the time when this frame was
4351 // received from the capture device as
4352 // undefined for voice for now.
4353 -1,
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004354 (const uint8_t*) payloadData,
niklase@google.com470e71d2011-07-07 08:21:25 +00004355 payloadSize) != 0)
4356 {
4357 _engineStatisticsPtr->SetLastError(
4358 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4359 "InsertExtraRTPPacket() failed to send extra RTP packet");
4360 return -1;
4361 }
4362
4363 return 0;
4364}
4365
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004366uint32_t
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004367Channel::Demultiplex(const AudioFrame& audioFrame)
niklase@google.com470e71d2011-07-07 08:21:25 +00004368{
4369 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004370 "Channel::Demultiplex()");
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00004371 _audioFrame.CopyFrom(audioFrame);
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004372 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00004373 return 0;
4374}
4375
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004376uint32_t
xians@google.com0b0665a2011-08-08 08:18:44 +00004377Channel::PrepareEncodeAndSend(int mixingFrequency)
niklase@google.com470e71d2011-07-07 08:21:25 +00004378{
4379 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4380 "Channel::PrepareEncodeAndSend()");
4381
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004382 if (_audioFrame.samples_per_channel_ == 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004383 {
4384 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4385 "Channel::PrepareEncodeAndSend() invalid audio frame");
4386 return -1;
4387 }
4388
4389 if (_inputFilePlaying)
4390 {
4391 MixOrReplaceAudioWithFile(mixingFrequency);
4392 }
4393
4394 if (_mute)
4395 {
4396 AudioFrameOperations::Mute(_audioFrame);
4397 }
4398
4399 if (_inputExternalMedia)
4400 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004401 CriticalSectionScoped cs(&_callbackCritSect);
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004402 const bool isStereo = (_audioFrame.num_channels_ == 2);
niklase@google.com470e71d2011-07-07 08:21:25 +00004403 if (_inputExternalMediaCallbackPtr)
4404 {
4405 _inputExternalMediaCallbackPtr->Process(
4406 _channelId,
4407 kRecordingPerChannel,
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004408 (int16_t*)_audioFrame.data_,
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004409 _audioFrame.samples_per_channel_,
4410 _audioFrame.sample_rate_hz_,
niklase@google.com470e71d2011-07-07 08:21:25 +00004411 isStereo);
4412 }
4413 }
4414
4415 InsertInbandDtmfTone();
4416
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004417 if (_includeAudioLevelIndication)
4418 {
4419 assert(_rtpAudioProc.get() != NULL);
4420
4421 // Check if settings need to be updated.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004422 if (_rtpAudioProc->sample_rate_hz() != _audioFrame.sample_rate_hz_)
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004423 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004424 if (_rtpAudioProc->set_sample_rate_hz(_audioFrame.sample_rate_hz_) !=
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004425 AudioProcessing::kNoError)
4426 {
4427 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4428 VoEId(_instanceId, _channelId),
4429 "Error setting AudioProcessing sample rate");
4430 return -1;
4431 }
4432 }
4433
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004434 if (_rtpAudioProc->num_input_channels() != _audioFrame.num_channels_)
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004435 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004436 if (_rtpAudioProc->set_num_channels(_audioFrame.num_channels_,
4437 _audioFrame.num_channels_)
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004438 != AudioProcessing::kNoError)
4439 {
4440 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4441 VoEId(_instanceId, _channelId),
4442 "Error setting AudioProcessing channels");
4443 return -1;
4444 }
4445 }
4446
4447 // Performs level analysis only; does not affect the signal.
4448 _rtpAudioProc->ProcessStream(&_audioFrame);
4449 }
4450
niklase@google.com470e71d2011-07-07 08:21:25 +00004451 return 0;
4452}
4453
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004454uint32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00004455Channel::EncodeAndSend()
4456{
4457 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4458 "Channel::EncodeAndSend()");
4459
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004460 assert(_audioFrame.num_channels_ <= 2);
4461 if (_audioFrame.samples_per_channel_ == 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004462 {
4463 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4464 "Channel::EncodeAndSend() invalid audio frame");
4465 return -1;
4466 }
4467
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004468 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00004469
4470 // --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz.
4471
4472 // The ACM resamples internally.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004473 _audioFrame.timestamp_ = _timeStamp;
niklase@google.com470e71d2011-07-07 08:21:25 +00004474 if (_audioCodingModule.Add10MsData((AudioFrame&)_audioFrame) != 0)
4475 {
4476 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
4477 "Channel::EncodeAndSend() ACM encoding failed");
4478 return -1;
4479 }
4480
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004481 _timeStamp += _audioFrame.samples_per_channel_;
niklase@google.com470e71d2011-07-07 08:21:25 +00004482
4483 // --- Encode if complete frame is ready
4484
4485 // This call will trigger AudioPacketizationCallback::SendData if encoding
4486 // is done and payload is ready for packetization and transmission.
4487 return _audioCodingModule.Process();
4488}
4489
4490int Channel::RegisterExternalMediaProcessing(
4491 ProcessingTypes type,
4492 VoEMediaProcess& processObject)
4493{
4494 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4495 "Channel::RegisterExternalMediaProcessing()");
4496
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004497 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004498
4499 if (kPlaybackPerChannel == type)
4500 {
4501 if (_outputExternalMediaCallbackPtr)
4502 {
4503 _engineStatisticsPtr->SetLastError(
4504 VE_INVALID_OPERATION, kTraceError,
4505 "Channel::RegisterExternalMediaProcessing() "
4506 "output external media already enabled");
4507 return -1;
4508 }
4509 _outputExternalMediaCallbackPtr = &processObject;
4510 _outputExternalMedia = true;
4511 }
4512 else if (kRecordingPerChannel == type)
4513 {
4514 if (_inputExternalMediaCallbackPtr)
4515 {
4516 _engineStatisticsPtr->SetLastError(
4517 VE_INVALID_OPERATION, kTraceError,
4518 "Channel::RegisterExternalMediaProcessing() "
4519 "output external media already enabled");
4520 return -1;
4521 }
4522 _inputExternalMediaCallbackPtr = &processObject;
4523 _inputExternalMedia = true;
4524 }
4525 return 0;
4526}
4527
4528int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type)
4529{
4530 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4531 "Channel::DeRegisterExternalMediaProcessing()");
4532
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004533 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004534
4535 if (kPlaybackPerChannel == type)
4536 {
4537 if (!_outputExternalMediaCallbackPtr)
4538 {
4539 _engineStatisticsPtr->SetLastError(
4540 VE_INVALID_OPERATION, kTraceWarning,
4541 "Channel::DeRegisterExternalMediaProcessing() "
4542 "output external media already disabled");
4543 return 0;
4544 }
4545 _outputExternalMedia = false;
4546 _outputExternalMediaCallbackPtr = NULL;
4547 }
4548 else if (kRecordingPerChannel == type)
4549 {
4550 if (!_inputExternalMediaCallbackPtr)
4551 {
4552 _engineStatisticsPtr->SetLastError(
4553 VE_INVALID_OPERATION, kTraceWarning,
4554 "Channel::DeRegisterExternalMediaProcessing() "
4555 "input external media already disabled");
4556 return 0;
4557 }
4558 _inputExternalMedia = false;
4559 _inputExternalMediaCallbackPtr = NULL;
4560 }
4561
4562 return 0;
4563}
4564
roosa@google.com1b60ceb2012-12-12 23:00:29 +00004565int Channel::SetExternalMixing(bool enabled) {
4566 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4567 "Channel::SetExternalMixing(enabled=%d)", enabled);
4568
4569 if (_playing)
4570 {
4571 _engineStatisticsPtr->SetLastError(
4572 VE_INVALID_OPERATION, kTraceError,
4573 "Channel::SetExternalMixing() "
4574 "external mixing cannot be changed while playing.");
4575 return -1;
4576 }
4577
4578 _externalMixing = enabled;
4579
4580 return 0;
4581}
4582
niklase@google.com470e71d2011-07-07 08:21:25 +00004583int
4584Channel::ResetRTCPStatistics()
4585{
4586 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4587 "Channel::ResetRTCPStatistics()");
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004588 uint32_t remoteSSRC(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004589 remoteSSRC = _rtpRtcpModule->RemoteSSRC();
4590 return _rtpRtcpModule->ResetRTT(remoteSSRC);
niklase@google.com470e71d2011-07-07 08:21:25 +00004591}
4592
4593int
4594Channel::GetRoundTripTimeSummary(StatVal& delaysMs) const
4595{
4596 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4597 "Channel::GetRoundTripTimeSummary()");
4598 // Override default module outputs for the case when RTCP is disabled.
4599 // This is done to ensure that we are backward compatible with the
4600 // VoiceEngine where we did not use RTP/RTCP module.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004601 if (!_rtpRtcpModule->RTCP())
niklase@google.com470e71d2011-07-07 08:21:25 +00004602 {
4603 delaysMs.min = -1;
4604 delaysMs.max = -1;
4605 delaysMs.average = -1;
4606 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4607 "Channel::GetRoundTripTimeSummary() RTCP is disabled =>"
4608 " valid RTT measurements cannot be retrieved");
4609 return 0;
4610 }
4611
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004612 uint32_t remoteSSRC;
4613 uint16_t RTT;
4614 uint16_t avgRTT;
4615 uint16_t maxRTT;
4616 uint16_t minRTT;
niklase@google.com470e71d2011-07-07 08:21:25 +00004617 // The remote SSRC will be zero if no RTP packet has been received.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004618 remoteSSRC = _rtpRtcpModule->RemoteSSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00004619 if (remoteSSRC == 0)
4620 {
4621 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4622 "Channel::GetRoundTripTimeSummary() unable to measure RTT"
4623 " since no RTP packet has been received yet");
4624 }
4625
4626 // Retrieve RTT statistics from the RTP/RTCP module for the specified
4627 // channel and SSRC. The SSRC is required to parse out the correct source
4628 // in conference scenarios.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004629 if (_rtpRtcpModule->RTT(remoteSSRC, &RTT, &avgRTT, &minRTT,&maxRTT) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004630 {
4631 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4632 "GetRoundTripTimeSummary unable to retrieve RTT values"
4633 " from the RTCP layer");
4634 delaysMs.min = -1; delaysMs.max = -1; delaysMs.average = -1;
4635 }
4636 else
4637 {
4638 delaysMs.min = minRTT;
4639 delaysMs.max = maxRTT;
4640 delaysMs.average = avgRTT;
4641 }
4642 return 0;
4643}
4644
4645int
4646Channel::GetNetworkStatistics(NetworkStatistics& stats)
4647{
4648 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4649 "Channel::GetNetworkStatistics()");
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00004650 ACMNetworkStatistics acm_stats;
4651 int return_value = _audioCodingModule.NetworkStatistics(&acm_stats);
4652 if (return_value >= 0) {
4653 memcpy(&stats, &acm_stats, sizeof(NetworkStatistics));
4654 }
4655 return return_value;
niklase@google.com470e71d2011-07-07 08:21:25 +00004656}
4657
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00004658bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms,
4659 int* playout_buffer_delay_ms) const {
4660 if (_average_jitter_buffer_delay_us == 0) {
niklase@google.com470e71d2011-07-07 08:21:25 +00004661 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00004662 "Channel::GetDelayEstimate() no valid estimate.");
4663 return false;
4664 }
4665 *jitter_buffer_delay_ms = (_average_jitter_buffer_delay_us + 500) / 1000 +
4666 _recPacketDelayMs;
4667 *playout_buffer_delay_ms = playout_delay_ms_;
4668 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4669 "Channel::GetDelayEstimate()");
4670 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +00004671}
4672
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +00004673int Channel::SetInitialPlayoutDelay(int delay_ms)
4674{
4675 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4676 "Channel::SetInitialPlayoutDelay()");
4677 if ((delay_ms < kVoiceEngineMinMinPlayoutDelayMs) ||
4678 (delay_ms > kVoiceEngineMaxMinPlayoutDelayMs))
4679 {
4680 _engineStatisticsPtr->SetLastError(
4681 VE_INVALID_ARGUMENT, kTraceError,
4682 "SetInitialPlayoutDelay() invalid min delay");
4683 return -1;
4684 }
4685 if (_audioCodingModule.SetInitialPlayoutDelay(delay_ms) != 0)
4686 {
4687 _engineStatisticsPtr->SetLastError(
4688 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4689 "SetInitialPlayoutDelay() failed to set min playout delay");
4690 return -1;
4691 }
4692 return 0;
4693}
4694
4695
niklase@google.com470e71d2011-07-07 08:21:25 +00004696int
4697Channel::SetMinimumPlayoutDelay(int delayMs)
4698{
4699 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4700 "Channel::SetMinimumPlayoutDelay()");
4701 if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
4702 (delayMs > kVoiceEngineMaxMinPlayoutDelayMs))
4703 {
4704 _engineStatisticsPtr->SetLastError(
4705 VE_INVALID_ARGUMENT, kTraceError,
4706 "SetMinimumPlayoutDelay() invalid min delay");
4707 return -1;
4708 }
4709 if (_audioCodingModule.SetMinimumPlayoutDelay(delayMs) != 0)
4710 {
4711 _engineStatisticsPtr->SetLastError(
4712 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4713 "SetMinimumPlayoutDelay() failed to set min playout delay");
4714 return -1;
4715 }
4716 return 0;
4717}
4718
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00004719void Channel::UpdatePlayoutTimestamp(bool rtcp) {
4720 uint32_t playout_timestamp = 0;
4721
4722 if (_audioCodingModule.PlayoutTimestamp(&playout_timestamp) == -1) {
4723 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4724 "Channel::UpdatePlayoutTimestamp() failed to read playout"
4725 " timestamp from the ACM");
4726 _engineStatisticsPtr->SetLastError(
4727 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4728 "UpdatePlayoutTimestamp() failed to retrieve timestamp");
4729 return;
4730 }
4731
4732 uint16_t delay_ms = 0;
4733 if (_audioDeviceModulePtr->PlayoutDelay(&delay_ms) == -1) {
4734 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4735 "Channel::UpdatePlayoutTimestamp() failed to read playout"
4736 " delay from the ADM");
4737 _engineStatisticsPtr->SetLastError(
4738 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4739 "UpdatePlayoutTimestamp() failed to retrieve playout delay");
4740 return;
4741 }
4742
4743 int32_t playout_frequency = _audioCodingModule.PlayoutFrequency();
4744 CodecInst current_recive_codec;
4745 if (_audioCodingModule.ReceiveCodec(&current_recive_codec) == 0) {
4746 if (STR_CASE_CMP("G722", current_recive_codec.plname) == 0) {
4747 playout_frequency = 8000;
4748 } else if (STR_CASE_CMP("opus", current_recive_codec.plname) == 0) {
4749 playout_frequency = 48000;
niklase@google.com470e71d2011-07-07 08:21:25 +00004750 }
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00004751 }
4752
4753 // Remove the playout delay.
4754 playout_timestamp -= (delay_ms * (playout_frequency / 1000));
4755
4756 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4757 "Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu",
4758 playout_timestamp);
4759
4760 if (rtcp) {
4761 playout_timestamp_rtcp_ = playout_timestamp;
4762 } else {
4763 playout_timestamp_rtp_ = playout_timestamp;
4764 }
4765 playout_delay_ms_ = delay_ms;
4766}
4767
4768int Channel::GetPlayoutTimestamp(unsigned int& timestamp) {
4769 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4770 "Channel::GetPlayoutTimestamp()");
4771 if (playout_timestamp_rtp_ == 0) {
4772 _engineStatisticsPtr->SetLastError(
4773 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4774 "GetPlayoutTimestamp() failed to retrieve timestamp");
4775 return -1;
4776 }
4777 timestamp = playout_timestamp_rtp_;
4778 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4779 VoEId(_instanceId,_channelId),
4780 "GetPlayoutTimestamp() => timestamp=%u", timestamp);
4781 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00004782}
4783
4784int
4785Channel::SetInitTimestamp(unsigned int timestamp)
4786{
4787 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4788 "Channel::SetInitTimestamp()");
4789 if (_sending)
4790 {
4791 _engineStatisticsPtr->SetLastError(
4792 VE_SENDING, kTraceError, "SetInitTimestamp() already sending");
4793 return -1;
4794 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004795 if (_rtpRtcpModule->SetStartTimestamp(timestamp) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004796 {
4797 _engineStatisticsPtr->SetLastError(
4798 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4799 "SetInitTimestamp() failed to set timestamp");
4800 return -1;
4801 }
4802 return 0;
4803}
4804
4805int
4806Channel::SetInitSequenceNumber(short sequenceNumber)
4807{
4808 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4809 "Channel::SetInitSequenceNumber()");
4810 if (_sending)
4811 {
4812 _engineStatisticsPtr->SetLastError(
4813 VE_SENDING, kTraceError,
4814 "SetInitSequenceNumber() already sending");
4815 return -1;
4816 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004817 if (_rtpRtcpModule->SetSequenceNumber(sequenceNumber) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004818 {
4819 _engineStatisticsPtr->SetLastError(
4820 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4821 "SetInitSequenceNumber() failed to set sequence number");
4822 return -1;
4823 }
4824 return 0;
4825}
4826
4827int
4828Channel::GetRtpRtcp(RtpRtcp* &rtpRtcpModule) const
4829{
4830 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4831 "Channel::GetRtpRtcp()");
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004832 rtpRtcpModule = _rtpRtcpModule.get();
niklase@google.com470e71d2011-07-07 08:21:25 +00004833 return 0;
4834}
4835
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004836// TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use
4837// a shared helper.
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004838int32_t
pbos@webrtc.org92135212013-05-14 08:31:39 +00004839Channel::MixOrReplaceAudioWithFile(int mixingFrequency)
niklase@google.com470e71d2011-07-07 08:21:25 +00004840{
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004841 scoped_array<int16_t> fileBuffer(new int16_t[640]);
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004842 int fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004843
4844 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004845 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004846
4847 if (_inputFilePlayerPtr == NULL)
4848 {
4849 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4850 VoEId(_instanceId, _channelId),
4851 "Channel::MixOrReplaceAudioWithFile() fileplayer"
4852 " doesnt exist");
4853 return -1;
4854 }
4855
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004856 if (_inputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
niklase@google.com470e71d2011-07-07 08:21:25 +00004857 fileSamples,
4858 mixingFrequency) == -1)
4859 {
4860 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4861 VoEId(_instanceId, _channelId),
4862 "Channel::MixOrReplaceAudioWithFile() file mixing "
4863 "failed");
4864 return -1;
4865 }
4866 if (fileSamples == 0)
4867 {
4868 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4869 VoEId(_instanceId, _channelId),
4870 "Channel::MixOrReplaceAudioWithFile() file is ended");
4871 return 0;
4872 }
4873 }
4874
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004875 assert(_audioFrame.samples_per_channel_ == fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004876
4877 if (_mixFileWithMicrophone)
4878 {
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004879 // Currently file stream is always mono.
4880 // TODO(xians): Change the code when FilePlayer supports real stereo.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004881 Utility::MixWithSat(_audioFrame.data_,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004882 _audioFrame.num_channels_,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004883 fileBuffer.get(),
4884 1,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004885 fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004886 }
4887 else
4888 {
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004889 // Replace ACM audio with file.
4890 // Currently file stream is always mono.
4891 // TODO(xians): Change the code when FilePlayer supports real stereo.
niklase@google.com470e71d2011-07-07 08:21:25 +00004892 _audioFrame.UpdateFrame(_channelId,
4893 -1,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004894 fileBuffer.get(),
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004895 fileSamples,
niklase@google.com470e71d2011-07-07 08:21:25 +00004896 mixingFrequency,
4897 AudioFrame::kNormalSpeech,
4898 AudioFrame::kVadUnknown,
4899 1);
4900
4901 }
4902 return 0;
4903}
4904
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004905int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00004906Channel::MixAudioWithFile(AudioFrame& audioFrame,
pbos@webrtc.org92135212013-05-14 08:31:39 +00004907 int mixingFrequency)
niklase@google.com470e71d2011-07-07 08:21:25 +00004908{
4909 assert(mixingFrequency <= 32000);
4910
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004911 scoped_array<int16_t> fileBuffer(new int16_t[640]);
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004912 int fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004913
4914 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004915 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004916
4917 if (_outputFilePlayerPtr == NULL)
4918 {
4919 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4920 VoEId(_instanceId, _channelId),
4921 "Channel::MixAudioWithFile() file mixing failed");
4922 return -1;
4923 }
4924
4925 // We should get the frequency we ask for.
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004926 if (_outputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
niklase@google.com470e71d2011-07-07 08:21:25 +00004927 fileSamples,
4928 mixingFrequency) == -1)
4929 {
4930 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4931 VoEId(_instanceId, _channelId),
4932 "Channel::MixAudioWithFile() file mixing failed");
4933 return -1;
4934 }
4935 }
4936
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004937 if (audioFrame.samples_per_channel_ == fileSamples)
niklase@google.com470e71d2011-07-07 08:21:25 +00004938 {
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004939 // Currently file stream is always mono.
4940 // TODO(xians): Change the code when FilePlayer supports real stereo.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004941 Utility::MixWithSat(audioFrame.data_,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004942 audioFrame.num_channels_,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004943 fileBuffer.get(),
4944 1,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004945 fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004946 }
4947 else
4948 {
4949 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004950 "Channel::MixAudioWithFile() samples_per_channel_(%d) != "
niklase@google.com470e71d2011-07-07 08:21:25 +00004951 "fileSamples(%d)",
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004952 audioFrame.samples_per_channel_, fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004953 return -1;
4954 }
4955
4956 return 0;
4957}
4958
4959int
4960Channel::InsertInbandDtmfTone()
4961{
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00004962 // Check if we should start a new tone.
niklase@google.com470e71d2011-07-07 08:21:25 +00004963 if (_inbandDtmfQueue.PendingDtmf() &&
4964 !_inbandDtmfGenerator.IsAddingTone() &&
4965 _inbandDtmfGenerator.DelaySinceLastTone() >
4966 kMinTelephoneEventSeparationMs)
4967 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004968 int8_t eventCode(0);
4969 uint16_t lengthMs(0);
4970 uint8_t attenuationDb(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004971
4972 eventCode = _inbandDtmfQueue.NextDtmf(&lengthMs, &attenuationDb);
4973 _inbandDtmfGenerator.AddTone(eventCode, lengthMs, attenuationDb);
4974 if (_playInbandDtmfEvent)
4975 {
4976 // Add tone to output mixer using a reduced length to minimize
4977 // risk of echo.
4978 _outputMixerPtr->PlayDtmfTone(eventCode, lengthMs - 80,
4979 attenuationDb);
4980 }
4981 }
4982
4983 if (_inbandDtmfGenerator.IsAddingTone())
4984 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004985 uint16_t frequency(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004986 _inbandDtmfGenerator.GetSampleRate(frequency);
4987
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004988 if (frequency != _audioFrame.sample_rate_hz_)
niklase@google.com470e71d2011-07-07 08:21:25 +00004989 {
4990 // Update sample rate of Dtmf tone since the mixing frequency
4991 // has changed.
4992 _inbandDtmfGenerator.SetSampleRate(
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004993 (uint16_t) (_audioFrame.sample_rate_hz_));
niklase@google.com470e71d2011-07-07 08:21:25 +00004994 // Reset the tone to be added taking the new sample rate into
4995 // account.
4996 _inbandDtmfGenerator.ResetTone();
4997 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00004998
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004999 int16_t toneBuffer[320];
5000 uint16_t toneSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00005001 // Get 10ms tone segment and set time since last tone to zero
5002 if (_inbandDtmfGenerator.Get10msTone(toneBuffer, toneSamples) == -1)
5003 {
5004 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
5005 VoEId(_instanceId, _channelId),
5006 "Channel::EncodeAndSend() inserting Dtmf failed");
5007 return -1;
5008 }
5009
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00005010 // Replace mixed audio with DTMF tone.
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00005011 for (int sample = 0;
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005012 sample < _audioFrame.samples_per_channel_;
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00005013 sample++)
5014 {
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00005015 for (int channel = 0;
5016 channel < _audioFrame.num_channels_;
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00005017 channel++)
5018 {
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00005019 const int index = sample * _audioFrame.num_channels_ + channel;
5020 _audioFrame.data_[index] = toneBuffer[sample];
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00005021 }
5022 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00005023
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005024 assert(_audioFrame.samples_per_channel_ == toneSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00005025 } else
5026 {
5027 // Add 10ms to "delay-since-last-tone" counter
5028 _inbandDtmfGenerator.UpdateDelaySinceLastTone();
5029 }
5030 return 0;
5031}
5032
niklase@google.com470e71d2011-07-07 08:21:25 +00005033void
5034Channel::ResetDeadOrAliveCounters()
5035{
5036 _countDeadDetections = 0;
5037 _countAliveDetections = 0;
5038}
5039
5040void
5041Channel::UpdateDeadOrAliveCounters(bool alive)
5042{
5043 if (alive)
5044 _countAliveDetections++;
5045 else
5046 _countDeadDetections++;
5047}
5048
5049int
5050Channel::GetDeadOrAliveCounters(int& countDead, int& countAlive) const
5051{
5052 bool enabled;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005053 uint8_t timeSec;
niklase@google.com470e71d2011-07-07 08:21:25 +00005054
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00005055 _rtpRtcpModule->PeriodicDeadOrAliveStatus(enabled, timeSec);
niklase@google.com470e71d2011-07-07 08:21:25 +00005056 if (!enabled)
5057 return (-1);
5058
5059 countDead = static_cast<int> (_countDeadDetections);
5060 countAlive = static_cast<int> (_countAliveDetections);
5061 return 0;
5062}
5063
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005064int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00005065Channel::SendPacketRaw(const void *data, int len, bool RTCP)
5066{
5067 if (_transportPtr == NULL)
5068 {
5069 return -1;
5070 }
5071 if (!RTCP)
5072 {
5073 return _transportPtr->SendPacket(_channelId, data, len);
5074 }
5075 else
5076 {
5077 return _transportPtr->SendRTCPPacket(_channelId, data, len);
5078 }
5079}
5080
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005081// Called for incoming RTP packets after successful RTP header parsing.
5082void Channel::UpdatePacketDelay(uint32_t rtp_timestamp,
5083 uint16_t sequence_number) {
5084 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
5085 "Channel::UpdatePacketDelay(timestamp=%lu, sequenceNumber=%u)",
5086 rtp_timestamp, sequence_number);
niklase@google.com470e71d2011-07-07 08:21:25 +00005087
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005088 // Get frequency of last received payload
5089 int rtp_receive_frequency = _audioCodingModule.ReceiveFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +00005090
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005091 CodecInst current_receive_codec;
5092 if (_audioCodingModule.ReceiveCodec(&current_receive_codec) != 0) {
5093 return;
5094 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005095
turaj@webrtc.orge46c8d32013-05-22 20:39:43 +00005096 // Update the least required delay.
5097 least_required_delay_ms_ = _audioCodingModule.LeastRequiredDelayMs();
5098
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005099 if (STR_CASE_CMP("G722", current_receive_codec.plname) == 0) {
5100 // Even though the actual sampling rate for G.722 audio is
5101 // 16,000 Hz, the RTP clock rate for the G722 payload format is
5102 // 8,000 Hz because that value was erroneously assigned in
5103 // RFC 1890 and must remain unchanged for backward compatibility.
5104 rtp_receive_frequency = 8000;
5105 } else if (STR_CASE_CMP("opus", current_receive_codec.plname) == 0) {
5106 // We are resampling Opus internally to 32,000 Hz until all our
5107 // DSP routines can operate at 48,000 Hz, but the RTP clock
5108 // rate for the Opus payload format is standardized to 48,000 Hz,
5109 // because that is the maximum supported decoding sampling rate.
5110 rtp_receive_frequency = 48000;
5111 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005112
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005113 // playout_timestamp_rtp_ updated in UpdatePlayoutTimestamp for every incoming
5114 // packet.
5115 uint32_t timestamp_diff_ms = (rtp_timestamp - playout_timestamp_rtp_) /
5116 (rtp_receive_frequency / 1000);
niklase@google.com470e71d2011-07-07 08:21:25 +00005117
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005118 uint16_t packet_delay_ms = (rtp_timestamp - _previousTimestamp) /
5119 (rtp_receive_frequency / 1000);
niklase@google.com470e71d2011-07-07 08:21:25 +00005120
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005121 _previousTimestamp = rtp_timestamp;
niklase@google.com470e71d2011-07-07 08:21:25 +00005122
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005123 if (timestamp_diff_ms > (2 * kVoiceEngineMaxMinPlayoutDelayMs)) {
5124 timestamp_diff_ms = 0;
5125 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005126
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005127 if (timestamp_diff_ms == 0) return;
niklase@google.com470e71d2011-07-07 08:21:25 +00005128
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005129 if (packet_delay_ms >= 10 && packet_delay_ms <= 60) {
5130 _recPacketDelayMs = packet_delay_ms;
5131 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005132
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005133 if (_average_jitter_buffer_delay_us == 0) {
5134 _average_jitter_buffer_delay_us = timestamp_diff_ms * 1000;
5135 return;
5136 }
5137
5138 // Filter average delay value using exponential filter (alpha is
5139 // 7/8). We derive 1000 *_average_jitter_buffer_delay_us here (reduces
5140 // risk of rounding error) and compensate for it in GetDelayEstimate()
5141 // later.
5142 _average_jitter_buffer_delay_us = (_average_jitter_buffer_delay_us * 7 +
5143 1000 * timestamp_diff_ms + 500) / 8;
niklase@google.com470e71d2011-07-07 08:21:25 +00005144}
5145
5146void
5147Channel::RegisterReceiveCodecsToRTPModule()
5148{
5149 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
5150 "Channel::RegisterReceiveCodecsToRTPModule()");
5151
5152
5153 CodecInst codec;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005154 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
niklase@google.com470e71d2011-07-07 08:21:25 +00005155
5156 for (int idx = 0; idx < nSupportedCodecs; idx++)
5157 {
5158 // Open up the RTP/RTCP receiver for all supported codecs
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00005159 if ((_audioCodingModule.Codec(idx, &codec) == -1) ||
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00005160 (_rtpRtcpModule->RegisterReceivePayload(codec) == -1))
niklase@google.com470e71d2011-07-07 08:21:25 +00005161 {
5162 WEBRTC_TRACE(
5163 kTraceWarning,
5164 kTraceVoice,
5165 VoEId(_instanceId, _channelId),
5166 "Channel::RegisterReceiveCodecsToRTPModule() unable"
5167 " to register %s (%d/%d/%d/%d) to RTP/RTCP receiver",
5168 codec.plname, codec.pltype, codec.plfreq,
5169 codec.channels, codec.rate);
5170 }
5171 else
5172 {
5173 WEBRTC_TRACE(
5174 kTraceInfo,
5175 kTraceVoice,
5176 VoEId(_instanceId, _channelId),
5177 "Channel::RegisterReceiveCodecsToRTPModule() %s "
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00005178 "(%d/%d/%d/%d) has been added to the RTP/RTCP "
niklase@google.com470e71d2011-07-07 08:21:25 +00005179 "receiver",
5180 codec.plname, codec.pltype, codec.plfreq,
5181 codec.channels, codec.rate);
5182 }
5183 }
5184}
5185
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005186int Channel::ApmProcessRx(AudioFrame& frame) {
5187 AudioProcessing* audioproc = _rxAudioProcessingModulePtr;
5188 // Register the (possibly new) frame parameters.
5189 if (audioproc->set_sample_rate_hz(frame.sample_rate_hz_) != 0) {
andrew@webrtc.org655d8f52012-11-20 07:34:45 +00005190 LOG_FERR1(LS_WARNING, set_sample_rate_hz, frame.sample_rate_hz_);
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005191 }
5192 if (audioproc->set_num_channels(frame.num_channels_,
5193 frame.num_channels_) != 0) {
andrew@webrtc.org655d8f52012-11-20 07:34:45 +00005194 LOG_FERR1(LS_WARNING, set_num_channels, frame.num_channels_);
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005195 }
5196 if (audioproc->ProcessStream(&frame) != 0) {
andrew@webrtc.org655d8f52012-11-20 07:34:45 +00005197 LOG_FERR0(LS_WARNING, ProcessStream);
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005198 }
5199 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00005200}
5201
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005202int Channel::SetSecondarySendCodec(const CodecInst& codec,
5203 int red_payload_type) {
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00005204 // Sanity check for payload type.
5205 if (red_payload_type < 0 || red_payload_type > 127) {
5206 _engineStatisticsPtr->SetLastError(
5207 VE_PLTYPE_ERROR, kTraceError,
5208 "SetRedPayloadType() invalid RED payload type");
5209 return -1;
5210 }
5211
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005212 if (SetRedPayloadType(red_payload_type) < 0) {
5213 _engineStatisticsPtr->SetLastError(
5214 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5215 "SetSecondarySendCodec() Failed to register RED ACM");
5216 return -1;
5217 }
5218 if (_audioCodingModule.RegisterSecondarySendCodec(codec) < 0) {
5219 _engineStatisticsPtr->SetLastError(
5220 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5221 "SetSecondarySendCodec() Failed to register secondary send codec in "
5222 "ACM");
5223 return -1;
5224 }
5225
5226 return 0;
5227}
5228
5229void Channel::RemoveSecondarySendCodec() {
5230 _audioCodingModule.UnregisterSecondarySendCodec();
5231}
5232
5233int Channel::GetSecondarySendCodec(CodecInst* codec) {
5234 if (_audioCodingModule.SecondarySendCodec(codec) < 0) {
5235 _engineStatisticsPtr->SetLastError(
5236 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5237 "GetSecondarySendCodec() Failed to get secondary sent codec from ACM");
5238 return -1;
5239 }
5240 return 0;
5241}
5242
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00005243// Assuming this method is called with valid payload type.
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005244int Channel::SetRedPayloadType(int red_payload_type) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005245 CodecInst codec;
5246 bool found_red = false;
5247
5248 // Get default RED settings from the ACM database
5249 const int num_codecs = AudioCodingModule::NumberOfCodecs();
5250 for (int idx = 0; idx < num_codecs; idx++) {
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00005251 _audioCodingModule.Codec(idx, &codec);
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005252 if (!STR_CASE_CMP(codec.plname, "RED")) {
5253 found_red = true;
5254 break;
5255 }
5256 }
5257
5258 if (!found_red) {
5259 _engineStatisticsPtr->SetLastError(
5260 VE_CODEC_ERROR, kTraceError,
5261 "SetRedPayloadType() RED is not supported");
5262 return -1;
5263 }
5264
turaj@webrtc.org9d532fd2013-01-31 18:34:19 +00005265 codec.pltype = red_payload_type;
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005266 if (_audioCodingModule.RegisterSendCodec(codec) < 0) {
5267 _engineStatisticsPtr->SetLastError(
5268 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5269 "SetRedPayloadType() RED registration in ACM module failed");
5270 return -1;
5271 }
5272
5273 if (_rtpRtcpModule->SetSendREDPayloadType(red_payload_type) != 0) {
5274 _engineStatisticsPtr->SetLastError(
5275 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
5276 "SetRedPayloadType() RED registration in RTP/RTCP module failed");
5277 return -1;
5278 }
5279 return 0;
5280}
5281
niklase@google.com470e71d2011-07-07 08:21:25 +00005282} // namespace voe
niklase@google.com470e71d2011-07-07 08:21:25 +00005283} // namespace webrtc