blob: c9f4b8a86f97c1c86e7883babfd6c86903cdedde [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
henrika@webrtc.org2919e952012-01-31 08:45:03 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000011#include "webrtc/voice_engine/channel.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000013#include "webrtc/modules/audio_device/include/audio_device.h"
14#include "webrtc/modules/audio_processing/include/audio_processing.h"
15#include "webrtc/modules/utility/interface/audio_frame_operations.h"
16#include "webrtc/modules/utility/interface/process_thread.h"
17#include "webrtc/modules/utility/interface/rtp_dump.h"
18#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
19#include "webrtc/system_wrappers/interface/logging.h"
20#include "webrtc/system_wrappers/interface/trace.h"
21#include "webrtc/voice_engine/include/voe_base.h"
22#include "webrtc/voice_engine/include/voe_external_media.h"
23#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
24#include "webrtc/voice_engine/output_mixer.h"
25#include "webrtc/voice_engine/statistics.h"
26#include "webrtc/voice_engine/transmit_mixer.h"
27#include "webrtc/voice_engine/utility.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000028
29#if defined(_WIN32)
30#include <Qos.h>
31#endif
32
andrew@webrtc.org50419b02012-11-14 19:07:54 +000033namespace webrtc {
34namespace voe {
niklase@google.com470e71d2011-07-07 08:21:25 +000035
pbos@webrtc.org6141e132013-04-09 10:09:10 +000036int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +000037Channel::SendData(FrameType frameType,
pbos@webrtc.org6141e132013-04-09 10:09:10 +000038 uint8_t payloadType,
39 uint32_t timeStamp,
40 const uint8_t* payloadData,
41 uint16_t payloadSize,
niklase@google.com470e71d2011-07-07 08:21:25 +000042 const RTPFragmentationHeader* fragmentation)
43{
44 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
45 "Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u,"
46 " payloadSize=%u, fragmentation=0x%x)",
47 frameType, payloadType, timeStamp, payloadSize, fragmentation);
48
49 if (_includeAudioLevelIndication)
50 {
andrew@webrtc.org755b04a2011-11-15 16:57:56 +000051 assert(_rtpAudioProc.get() != NULL);
niklase@google.com470e71d2011-07-07 08:21:25 +000052 // Store current audio level in the RTP/RTCP module.
53 // The level will be used in combination with voice-activity state
54 // (frameType) to add an RTP header extension
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +000055 _rtpRtcpModule->SetAudioLevel(_rtpAudioProc->level_estimator()->RMS());
niklase@google.com470e71d2011-07-07 08:21:25 +000056 }
57
58 // Push data from ACM to RTP/RTCP-module to deliver audio frame for
59 // packetization.
60 // This call will trigger Transport::SendPacket() from the RTP/RTCP module.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +000061 if (_rtpRtcpModule->SendOutgoingData((FrameType&)frameType,
niklase@google.com470e71d2011-07-07 08:21:25 +000062 payloadType,
63 timeStamp,
stefan@webrtc.orgddfdfed2012-07-03 13:21:22 +000064 // Leaving the time when this frame was
65 // received from the capture device as
66 // undefined for voice for now.
67 -1,
niklase@google.com470e71d2011-07-07 08:21:25 +000068 payloadData,
69 payloadSize,
70 fragmentation) == -1)
71 {
72 _engineStatisticsPtr->SetLastError(
73 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
74 "Channel::SendData() failed to send data to RTP/RTCP module");
75 return -1;
76 }
77
78 _lastLocalTimeStamp = timeStamp;
79 _lastPayloadType = payloadType;
80
81 return 0;
82}
83
pbos@webrtc.org6141e132013-04-09 10:09:10 +000084int32_t
85Channel::InFrameType(int16_t frameType)
niklase@google.com470e71d2011-07-07 08:21:25 +000086{
87 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
88 "Channel::InFrameType(frameType=%d)", frameType);
89
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +000090 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +000091 // 1 indicates speech
92 _sendFrameType = (frameType == 1) ? 1 : 0;
93 return 0;
94}
95
pbos@webrtc.org6141e132013-04-09 10:09:10 +000096int32_t
pbos@webrtc.org92135212013-05-14 08:31:39 +000097Channel::OnRxVadDetected(int vadDecision)
niklase@google.com470e71d2011-07-07 08:21:25 +000098{
99 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
100 "Channel::OnRxVadDetected(vadDecision=%d)", vadDecision);
101
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000102 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000103 if (_rxVadObserverPtr)
104 {
105 _rxVadObserverPtr->OnRxVad(_channelId, vadDecision);
106 }
107
108 return 0;
109}
110
111int
112Channel::SendPacket(int channel, const void *data, int len)
113{
114 channel = VoEChannelId(channel);
115 assert(channel == _channelId);
116
117 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
118 "Channel::SendPacket(channel=%d, len=%d)", channel, len);
119
120 if (_transportPtr == NULL)
121 {
122 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
123 "Channel::SendPacket() failed to send RTP packet due to"
124 " invalid transport object");
125 return -1;
126 }
127
128 // Insert extra RTP packet using if user has called the InsertExtraRTPPacket
129 // API
130 if (_insertExtraRTPPacket)
131 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000132 uint8_t* rtpHdr = (uint8_t*)data;
133 uint8_t M_PT(0);
niklase@google.com470e71d2011-07-07 08:21:25 +0000134 if (_extraMarkerBit)
135 {
136 M_PT = 0x80; // set the M-bit
137 }
138 M_PT += _extraPayloadType; // set the payload type
139 *(++rtpHdr) = M_PT; // modify the M|PT-byte within the RTP header
140 _insertExtraRTPPacket = false; // insert one packet only
141 }
142
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000143 uint8_t* bufferToSendPtr = (uint8_t*)data;
144 int32_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000145
146 // Dump the RTP packet to a file (if RTP dump is enabled).
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000147 if (_rtpDumpOut.DumpPacket((const uint8_t*)data, len) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000148 {
149 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
150 VoEId(_instanceId,_channelId),
151 "Channel::SendPacket() RTP dump to output file failed");
152 }
153
154 // SRTP or External encryption
155 if (_encrypting)
156 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000157 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000158
159 if (_encryptionPtr)
160 {
161 if (!_encryptionRTPBufferPtr)
162 {
163 // Allocate memory for encryption buffer one time only
164 _encryptionRTPBufferPtr =
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000165 new uint8_t[kVoiceEngineMaxIpPacketSizeBytes];
xians@webrtc.org51253502012-10-25 13:58:02 +0000166 memset(_encryptionRTPBufferPtr, 0,
167 kVoiceEngineMaxIpPacketSizeBytes);
niklase@google.com470e71d2011-07-07 08:21:25 +0000168 }
169
170 // Perform encryption (SRTP or external)
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000171 int32_t encryptedBufferLength = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000172 _encryptionPtr->encrypt(_channelId,
173 bufferToSendPtr,
174 _encryptionRTPBufferPtr,
175 bufferLength,
176 (int*)&encryptedBufferLength);
177 if (encryptedBufferLength <= 0)
178 {
179 _engineStatisticsPtr->SetLastError(
180 VE_ENCRYPTION_FAILED,
181 kTraceError, "Channel::SendPacket() encryption failed");
182 return -1;
183 }
184
185 // Replace default data buffer with encrypted buffer
186 bufferToSendPtr = _encryptionRTPBufferPtr;
187 bufferLength = encryptedBufferLength;
188 }
189 }
190
191 // Packet transmission using WebRtc socket transport
192 if (!_externalTransport)
193 {
194 int n = _transportPtr->SendPacket(channel, bufferToSendPtr,
195 bufferLength);
196 if (n < 0)
197 {
198 WEBRTC_TRACE(kTraceError, kTraceVoice,
199 VoEId(_instanceId,_channelId),
200 "Channel::SendPacket() RTP transmission using WebRtc"
201 " sockets failed");
202 return -1;
203 }
204 return n;
205 }
206
207 // Packet transmission using external transport transport
208 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000209 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000210
211 int n = _transportPtr->SendPacket(channel,
212 bufferToSendPtr,
213 bufferLength);
214 if (n < 0)
215 {
216 WEBRTC_TRACE(kTraceError, kTraceVoice,
217 VoEId(_instanceId,_channelId),
218 "Channel::SendPacket() RTP transmission using external"
219 " transport failed");
220 return -1;
221 }
222 return n;
223 }
224}
225
226int
227Channel::SendRTCPPacket(int channel, const void *data, int len)
228{
229 channel = VoEChannelId(channel);
230 assert(channel == _channelId);
231
232 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
233 "Channel::SendRTCPPacket(channel=%d, len=%d)", channel, len);
234
niklase@google.com470e71d2011-07-07 08:21:25 +0000235 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000236 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.org83661f52011-11-25 10:58:15 +0000237 if (_transportPtr == NULL)
238 {
239 WEBRTC_TRACE(kTraceError, kTraceVoice,
240 VoEId(_instanceId,_channelId),
241 "Channel::SendRTCPPacket() failed to send RTCP packet"
242 " due to invalid transport object");
243 return -1;
244 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000245 }
246
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000247 uint8_t* bufferToSendPtr = (uint8_t*)data;
248 int32_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000249
250 // Dump the RTCP packet to a file (if RTP dump is enabled).
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000251 if (_rtpDumpOut.DumpPacket((const uint8_t*)data, len) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000252 {
253 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
254 VoEId(_instanceId,_channelId),
255 "Channel::SendPacket() RTCP dump to output file failed");
256 }
257
258 // SRTP or External encryption
259 if (_encrypting)
260 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000261 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000262
263 if (_encryptionPtr)
264 {
265 if (!_encryptionRTCPBufferPtr)
266 {
267 // Allocate memory for encryption buffer one time only
268 _encryptionRTCPBufferPtr =
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000269 new uint8_t[kVoiceEngineMaxIpPacketSizeBytes];
niklase@google.com470e71d2011-07-07 08:21:25 +0000270 }
271
272 // Perform encryption (SRTP or external).
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000273 int32_t encryptedBufferLength = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000274 _encryptionPtr->encrypt_rtcp(_channelId,
275 bufferToSendPtr,
276 _encryptionRTCPBufferPtr,
277 bufferLength,
278 (int*)&encryptedBufferLength);
279 if (encryptedBufferLength <= 0)
280 {
281 _engineStatisticsPtr->SetLastError(
282 VE_ENCRYPTION_FAILED, kTraceError,
283 "Channel::SendRTCPPacket() encryption failed");
284 return -1;
285 }
286
287 // Replace default data buffer with encrypted buffer
288 bufferToSendPtr = _encryptionRTCPBufferPtr;
289 bufferLength = encryptedBufferLength;
290 }
291 }
292
293 // Packet transmission using WebRtc socket transport
294 if (!_externalTransport)
295 {
296 int n = _transportPtr->SendRTCPPacket(channel,
297 bufferToSendPtr,
298 bufferLength);
299 if (n < 0)
300 {
301 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
302 VoEId(_instanceId,_channelId),
303 "Channel::SendRTCPPacket() transmission using WebRtc"
304 " sockets failed");
305 return -1;
306 }
307 return n;
308 }
309
310 // Packet transmission using external transport transport
311 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000312 CriticalSectionScoped cs(&_callbackCritSect);
henrike@webrtc.orgde727ab2012-11-18 18:49:13 +0000313 if (_transportPtr == NULL)
314 {
315 return -1;
316 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000317 int n = _transportPtr->SendRTCPPacket(channel,
318 bufferToSendPtr,
319 bufferLength);
320 if (n < 0)
321 {
322 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
323 VoEId(_instanceId,_channelId),
324 "Channel::SendRTCPPacket() transmission using external"
325 " transport failed");
326 return -1;
327 }
328 return n;
329 }
330
331 return len;
332}
333
334void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000335Channel::OnPlayTelephoneEvent(int32_t id,
336 uint8_t event,
337 uint16_t lengthMs,
338 uint8_t volume)
niklase@google.com470e71d2011-07-07 08:21:25 +0000339{
340 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
341 "Channel::OnPlayTelephoneEvent(id=%d, event=%u, lengthMs=%u,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +0000342 " volume=%u)", id, event, lengthMs, volume);
niklase@google.com470e71d2011-07-07 08:21:25 +0000343
344 if (!_playOutbandDtmfEvent || (event > 15))
345 {
346 // Ignore callback since feedback is disabled or event is not a
347 // Dtmf tone event.
348 return;
349 }
350
351 assert(_outputMixerPtr != NULL);
352
353 // Start playing out the Dtmf tone (if playout is enabled).
354 // Reduce length of tone with 80ms to the reduce risk of echo.
355 _outputMixerPtr->PlayDtmfTone(event, lengthMs - 80, volume);
356}
357
358void
elham@webrtc.orgb7eda432013-07-15 21:08:27 +0000359Channel::OnIncomingSSRCChanged(int32_t id,
360 uint32_t SSRC)
niklase@google.com470e71d2011-07-07 08:21:25 +0000361{
362 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
363 "Channel::OnIncomingSSRCChanged(id=%d, SSRC=%d)",
elham@webrtc.orgb7eda432013-07-15 21:08:27 +0000364 id, SSRC);
niklase@google.com470e71d2011-07-07 08:21:25 +0000365
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000366 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000367 assert(channel == _channelId);
368
elham@webrtc.orgb7eda432013-07-15 21:08:27 +0000369 // Reset RTP-module counters since a new incoming RTP stream is detected
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +0000370 _rtpRtcpModule->ResetReceiveDataCountersRTP();
371 _rtpRtcpModule->ResetStatisticsRTP();
elham@webrtc.orgb7eda432013-07-15 21:08:27 +0000372
niklase@google.com470e71d2011-07-07 08:21:25 +0000373 if (_rtpObserver)
374 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000375 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000376
377 if (_rtpObserverPtr)
378 {
379 // Send new SSRC to registered observer using callback
elham@webrtc.orgb7eda432013-07-15 21:08:27 +0000380 _rtpObserverPtr->OnIncomingSSRCChanged(channel, SSRC);
niklase@google.com470e71d2011-07-07 08:21:25 +0000381 }
382 }
383}
384
pbos@webrtc.org92135212013-05-14 08:31:39 +0000385void Channel::OnIncomingCSRCChanged(int32_t id,
386 uint32_t CSRC,
387 bool added)
niklase@google.com470e71d2011-07-07 08:21:25 +0000388{
389 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
390 "Channel::OnIncomingCSRCChanged(id=%d, CSRC=%d, added=%d)",
391 id, CSRC, added);
392
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000393 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000394 assert(channel == _channelId);
395
396 if (_rtpObserver)
397 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000398 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000399
400 if (_rtpObserverPtr)
401 {
402 _rtpObserverPtr->OnIncomingCSRCChanged(channel, CSRC, added);
403 }
404 }
405}
406
407void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000408Channel::OnApplicationDataReceived(int32_t id,
409 uint8_t subType,
410 uint32_t name,
411 uint16_t length,
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000412 const uint8_t* data)
niklase@google.com470e71d2011-07-07 08:21:25 +0000413{
414 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
415 "Channel::OnApplicationDataReceived(id=%d, subType=%u,"
416 " name=%u, length=%u)",
417 id, subType, name, length);
418
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000419 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000420 assert(channel == _channelId);
421
422 if (_rtcpObserver)
423 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000424 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000425
426 if (_rtcpObserverPtr)
427 {
428 _rtcpObserverPtr->OnApplicationDataReceived(channel,
429 subType,
430 name,
431 data,
432 length);
433 }
434 }
435}
436
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000437int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +0000438Channel::OnInitializeDecoder(
pbos@webrtc.org92135212013-05-14 08:31:39 +0000439 int32_t id,
440 int8_t payloadType,
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +0000441 const char payloadName[RTP_PAYLOAD_NAME_SIZE],
pbos@webrtc.org92135212013-05-14 08:31:39 +0000442 int frequency,
443 uint8_t channels,
444 uint32_t rate)
niklase@google.com470e71d2011-07-07 08:21:25 +0000445{
446 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
447 "Channel::OnInitializeDecoder(id=%d, payloadType=%d, "
448 "payloadName=%s, frequency=%u, channels=%u, rate=%u)",
449 id, payloadType, payloadName, frequency, channels, rate);
450
andrew@webrtc.orgceb148c2011-08-23 17:53:54 +0000451 assert(VoEChannelId(id) == _channelId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000452
henrika@webrtc.orgf75901f2012-01-16 08:45:42 +0000453 CodecInst receiveCodec = {0};
454 CodecInst dummyCodec = {0};
niklase@google.com470e71d2011-07-07 08:21:25 +0000455
456 receiveCodec.pltype = payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +0000457 receiveCodec.plfreq = frequency;
458 receiveCodec.channels = channels;
459 receiveCodec.rate = rate;
henrika@webrtc.orgf75901f2012-01-16 08:45:42 +0000460 strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +0000461
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +0000462 _audioCodingModule.Codec(payloadName, &dummyCodec, frequency, channels);
niklase@google.com470e71d2011-07-07 08:21:25 +0000463 receiveCodec.pacsize = dummyCodec.pacsize;
464
465 // Register the new codec to the ACM
466 if (_audioCodingModule.RegisterReceiveCodec(receiveCodec) == -1)
467 {
468 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
andrew@webrtc.orgceb148c2011-08-23 17:53:54 +0000469 VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +0000470 "Channel::OnInitializeDecoder() invalid codec ("
471 "pt=%d, name=%s) received - 1", payloadType, payloadName);
472 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR);
473 return -1;
474 }
475
476 return 0;
477}
478
479void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000480Channel::OnPacketTimeout(int32_t id)
niklase@google.com470e71d2011-07-07 08:21:25 +0000481{
482 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
483 "Channel::OnPacketTimeout(id=%d)", id);
484
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000485 CriticalSectionScoped cs(_callbackCritSectPtr);
niklase@google.com470e71d2011-07-07 08:21:25 +0000486 if (_voiceEngineObserverPtr)
487 {
488 if (_receiving || _externalTransport)
489 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000490 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000491 assert(channel == _channelId);
492 // Ensure that next OnReceivedPacket() callback will trigger
493 // a VE_PACKET_RECEIPT_RESTARTED callback.
494 _rtpPacketTimedOut = true;
495 // Deliver callback to the observer
496 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
497 VoEId(_instanceId,_channelId),
498 "Channel::OnPacketTimeout() => "
499 "CallbackOnError(VE_RECEIVE_PACKET_TIMEOUT)");
500 _voiceEngineObserverPtr->CallbackOnError(channel,
501 VE_RECEIVE_PACKET_TIMEOUT);
502 }
503 }
504}
505
506void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000507Channel::OnReceivedPacket(int32_t id,
508 RtpRtcpPacketType packetType)
niklase@google.com470e71d2011-07-07 08:21:25 +0000509{
510 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
511 "Channel::OnReceivedPacket(id=%d, packetType=%d)",
512 id, packetType);
513
andrew@webrtc.orgceb148c2011-08-23 17:53:54 +0000514 assert(VoEChannelId(id) == _channelId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000515
516 // Notify only for the case when we have restarted an RTP session.
517 if (_rtpPacketTimedOut && (kPacketRtp == packetType))
518 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000519 CriticalSectionScoped cs(_callbackCritSectPtr);
niklase@google.com470e71d2011-07-07 08:21:25 +0000520 if (_voiceEngineObserverPtr)
521 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000522 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000523 assert(channel == _channelId);
524 // Reset timeout mechanism
525 _rtpPacketTimedOut = false;
526 // Deliver callback to the observer
527 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
528 VoEId(_instanceId,_channelId),
529 "Channel::OnPacketTimeout() =>"
530 " CallbackOnError(VE_PACKET_RECEIPT_RESTARTED)");
531 _voiceEngineObserverPtr->CallbackOnError(
532 channel,
533 VE_PACKET_RECEIPT_RESTARTED);
534 }
535 }
536}
537
538void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000539Channel::OnPeriodicDeadOrAlive(int32_t id,
540 RTPAliveType alive)
niklase@google.com470e71d2011-07-07 08:21:25 +0000541{
542 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
543 "Channel::OnPeriodicDeadOrAlive(id=%d, alive=%d)", id, alive);
544
henrika@webrtc.org19da7192013-04-05 14:34:57 +0000545 {
546 CriticalSectionScoped cs(&_callbackCritSect);
547 if (!_connectionObserver)
548 return;
549 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000550
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000551 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000552 assert(channel == _channelId);
553
554 // Use Alive as default to limit risk of false Dead detections
555 bool isAlive(true);
556
557 // Always mark the connection as Dead when the module reports kRtpDead
558 if (kRtpDead == alive)
559 {
560 isAlive = false;
561 }
562
563 // It is possible that the connection is alive even if no RTP packet has
564 // been received for a long time since the other side might use VAD/DTX
565 // and a low SID-packet update rate.
566 if ((kRtpNoRtp == alive) && _playing)
567 {
568 // Detect Alive for all NetEQ states except for the case when we are
569 // in PLC_CNG state.
570 // PLC_CNG <=> background noise only due to long expand or error.
571 // Note that, the case where the other side stops sending during CNG
572 // state will be detected as Alive. Dead is is not set until after
573 // missing RTCP packets for at least twelve seconds (handled
574 // internally by the RTP/RTCP module).
575 isAlive = (_outputSpeechType != AudioFrame::kPLCCNG);
576 }
577
578 UpdateDeadOrAliveCounters(isAlive);
579
580 // Send callback to the registered observer
581 if (_connectionObserver)
582 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000583 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000584 if (_connectionObserverPtr)
585 {
586 _connectionObserverPtr->OnPeriodicDeadOrAlive(channel, isAlive);
587 }
588 }
589}
590
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000591int32_t
592Channel::OnReceivedPayloadData(const uint8_t* payloadData,
pbos@webrtc.org92135212013-05-14 08:31:39 +0000593 uint16_t payloadSize,
niklase@google.com470e71d2011-07-07 08:21:25 +0000594 const WebRtcRTPHeader* rtpHeader)
595{
596 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
597 "Channel::OnReceivedPayloadData(payloadSize=%d,"
598 " payloadType=%u, audioChannel=%u)",
599 payloadSize,
600 rtpHeader->header.payloadType,
601 rtpHeader->type.Audio.channel);
602
roosa@google.com0870f022012-12-12 21:31:41 +0000603 _lastRemoteTimeStamp = rtpHeader->header.timestamp;
604
niklase@google.com470e71d2011-07-07 08:21:25 +0000605 if (!_playing)
606 {
607 // Avoid inserting into NetEQ when we are not playing. Count the
608 // packet as discarded.
609 WEBRTC_TRACE(kTraceStream, kTraceVoice,
610 VoEId(_instanceId, _channelId),
611 "received packet is discarded since playing is not"
612 " activated");
613 _numberOfDiscardedPackets++;
614 return 0;
615 }
616
617 // Push the incoming payload (parsed and ready for decoding) into the ACM
tina.legrand@webrtc.org16b6b902012-04-12 11:02:38 +0000618 if (_audioCodingModule.IncomingPacket(payloadData,
niklase@google.com470e71d2011-07-07 08:21:25 +0000619 payloadSize,
620 *rtpHeader) != 0)
621 {
622 _engineStatisticsPtr->SetLastError(
623 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
624 "Channel::OnReceivedPayloadData() unable to push data to the ACM");
625 return -1;
626 }
627
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +0000628 // Update the packet delay.
niklase@google.com470e71d2011-07-07 08:21:25 +0000629 UpdatePacketDelay(rtpHeader->header.timestamp,
630 rtpHeader->header.sequenceNumber);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +0000631
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +0000632 if (kNackOff != _rtpRtcpModule->NACK()) { // Is NACK on?
633 uint16_t round_trip_time = 0;
634 _rtpRtcpModule->RTT(_rtpRtcpModule->RemoteSSRC(), &round_trip_time,
635 NULL, NULL, NULL);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +0000636
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +0000637 std::vector<uint16_t> nack_list = _audioCodingModule.GetNackList(
638 round_trip_time);
639 if (!nack_list.empty()) {
640 // Can't use nack_list.data() since it's not supported by all
641 // compilers.
642 ResendPackets(&(nack_list[0]), static_cast<int>(nack_list.size()));
643 }
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +0000644 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000645 return 0;
646}
647
pbos@webrtc.org92135212013-05-14 08:31:39 +0000648int32_t Channel::GetAudioFrame(int32_t id, AudioFrame& audioFrame)
niklase@google.com470e71d2011-07-07 08:21:25 +0000649{
650 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
651 "Channel::GetAudioFrame(id=%d)", id);
652
653 // Get 10ms raw PCM data from the ACM (mixer limits output frequency)
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000654 if (_audioCodingModule.PlayoutData10Ms(audioFrame.sample_rate_hz_,
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +0000655 &audioFrame) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000656 {
657 WEBRTC_TRACE(kTraceError, kTraceVoice,
658 VoEId(_instanceId,_channelId),
659 "Channel::GetAudioFrame() PlayoutData10Ms() failed!");
andrew@webrtc.org7859e102012-01-13 00:30:11 +0000660 // In all likelihood, the audio in this frame is garbage. We return an
661 // error so that the audio mixer module doesn't add it to the mix. As
662 // a result, it won't be played out and the actions skipped here are
663 // irrelevant.
664 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +0000665 }
666
667 if (_RxVadDetection)
668 {
669 UpdateRxVadDetection(audioFrame);
670 }
671
672 // Convert module ID to internal VoE channel ID
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000673 audioFrame.id_ = VoEChannelId(audioFrame.id_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000674 // Store speech type for dead-or-alive detection
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000675 _outputSpeechType = audioFrame.speech_type_;
niklase@google.com470e71d2011-07-07 08:21:25 +0000676
677 // Perform far-end AudioProcessing module processing on the received signal
678 if (_rxApmIsEnabled)
679 {
680 ApmProcessRx(audioFrame);
681 }
682
683 // Output volume scaling
684 if (_outputGain < 0.99f || _outputGain > 1.01f)
685 {
686 AudioFrameOperations::ScaleWithSat(_outputGain, audioFrame);
687 }
688
689 // Scale left and/or right channel(s) if stereo and master balance is
690 // active
691
692 if (_panLeft != 1.0f || _panRight != 1.0f)
693 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000694 if (audioFrame.num_channels_ == 1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000695 {
696 // Emulate stereo mode since panning is active.
697 // The mono signal is copied to both left and right channels here.
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +0000698 AudioFrameOperations::MonoToStereo(&audioFrame);
niklase@google.com470e71d2011-07-07 08:21:25 +0000699 }
700 // For true stereo mode (when we are receiving a stereo signal), no
701 // action is needed.
702
703 // Do the panning operation (the audio frame contains stereo at this
704 // stage)
705 AudioFrameOperations::Scale(_panLeft, _panRight, audioFrame);
706 }
707
708 // Mix decoded PCM output with file if file mixing is enabled
709 if (_outputFilePlaying)
710 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000711 MixAudioWithFile(audioFrame, audioFrame.sample_rate_hz_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000712 }
713
714 // Place channel in on-hold state (~muted) if on-hold is activated
715 if (_outputIsOnHold)
716 {
717 AudioFrameOperations::Mute(audioFrame);
718 }
719
720 // External media
721 if (_outputExternalMedia)
722 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000723 CriticalSectionScoped cs(&_callbackCritSect);
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000724 const bool isStereo = (audioFrame.num_channels_ == 2);
niklase@google.com470e71d2011-07-07 08:21:25 +0000725 if (_outputExternalMediaCallbackPtr)
726 {
727 _outputExternalMediaCallbackPtr->Process(
728 _channelId,
729 kPlaybackPerChannel,
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000730 (int16_t*)audioFrame.data_,
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000731 audioFrame.samples_per_channel_,
732 audioFrame.sample_rate_hz_,
niklase@google.com470e71d2011-07-07 08:21:25 +0000733 isStereo);
734 }
735 }
736
737 // Record playout if enabled
738 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000739 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000740
741 if (_outputFileRecording && _outputFileRecorderPtr)
742 {
niklas.enbom@webrtc.org5398d952012-03-26 08:11:25 +0000743 _outputFileRecorderPtr->RecordAudioToFile(audioFrame);
niklase@google.com470e71d2011-07-07 08:21:25 +0000744 }
745 }
746
747 // Measure audio level (0-9)
748 _outputAudioLevel.ComputeLevel(audioFrame);
749
750 return 0;
751}
752
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000753int32_t
pbos@webrtc.org92135212013-05-14 08:31:39 +0000754Channel::NeededFrequency(int32_t id)
niklase@google.com470e71d2011-07-07 08:21:25 +0000755{
756 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
757 "Channel::NeededFrequency(id=%d)", id);
758
759 int highestNeeded = 0;
760
761 // Determine highest needed receive frequency
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000762 int32_t receiveFrequency = _audioCodingModule.ReceiveFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +0000763
764 // Return the bigger of playout and receive frequency in the ACM.
765 if (_audioCodingModule.PlayoutFrequency() > receiveFrequency)
766 {
767 highestNeeded = _audioCodingModule.PlayoutFrequency();
768 }
769 else
770 {
771 highestNeeded = receiveFrequency;
772 }
773
774 // Special case, if we're playing a file on the playout side
775 // we take that frequency into consideration as well
776 // This is not needed on sending side, since the codec will
777 // limit the spectrum anyway.
778 if (_outputFilePlaying)
779 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000780 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000781 if (_outputFilePlayerPtr && _outputFilePlaying)
782 {
783 if(_outputFilePlayerPtr->Frequency()>highestNeeded)
784 {
785 highestNeeded=_outputFilePlayerPtr->Frequency();
786 }
787 }
788 }
789
790 return(highestNeeded);
791}
792
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000793int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +0000794Channel::CreateChannel(Channel*& channel,
pbos@webrtc.org92135212013-05-14 08:31:39 +0000795 int32_t channelId,
796 uint32_t instanceId)
niklase@google.com470e71d2011-07-07 08:21:25 +0000797{
798 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId,channelId),
799 "Channel::CreateChannel(channelId=%d, instanceId=%d)",
800 channelId, instanceId);
801
802 channel = new Channel(channelId, instanceId);
803 if (channel == NULL)
804 {
805 WEBRTC_TRACE(kTraceMemory, kTraceVoice,
806 VoEId(instanceId,channelId),
807 "Channel::CreateChannel() unable to allocate memory for"
808 " channel");
809 return -1;
810 }
811 return 0;
812}
813
814void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000815Channel::PlayNotification(int32_t id, uint32_t durationMs)
niklase@google.com470e71d2011-07-07 08:21:25 +0000816{
817 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
818 "Channel::PlayNotification(id=%d, durationMs=%d)",
819 id, durationMs);
820
821 // Not implement yet
822}
823
824void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000825Channel::RecordNotification(int32_t id, uint32_t durationMs)
niklase@google.com470e71d2011-07-07 08:21:25 +0000826{
827 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
828 "Channel::RecordNotification(id=%d, durationMs=%d)",
829 id, durationMs);
830
831 // Not implement yet
832}
833
834void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000835Channel::PlayFileEnded(int32_t id)
niklase@google.com470e71d2011-07-07 08:21:25 +0000836{
837 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
838 "Channel::PlayFileEnded(id=%d)", id);
839
840 if (id == _inputFilePlayerId)
841 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000842 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000843
844 _inputFilePlaying = false;
845 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
846 VoEId(_instanceId,_channelId),
847 "Channel::PlayFileEnded() => input file player module is"
848 " shutdown");
849 }
850 else if (id == _outputFilePlayerId)
851 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000852 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000853
854 _outputFilePlaying = false;
855 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
856 VoEId(_instanceId,_channelId),
857 "Channel::PlayFileEnded() => output file player module is"
858 " shutdown");
859 }
860}
861
862void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000863Channel::RecordFileEnded(int32_t id)
niklase@google.com470e71d2011-07-07 08:21:25 +0000864{
865 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
866 "Channel::RecordFileEnded(id=%d)", id);
867
868 assert(id == _outputFileRecorderId);
869
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000870 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000871
872 _outputFileRecording = false;
873 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
874 VoEId(_instanceId,_channelId),
875 "Channel::RecordFileEnded() => output file recorder module is"
876 " shutdown");
877}
878
pbos@webrtc.org92135212013-05-14 08:31:39 +0000879Channel::Channel(int32_t channelId,
880 uint32_t instanceId) :
niklase@google.com470e71d2011-07-07 08:21:25 +0000881 _fileCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
882 _callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
niklase@google.com470e71d2011-07-07 08:21:25 +0000883 _instanceId(instanceId),
xians@google.com22963ab2011-08-03 12:40:23 +0000884 _channelId(channelId),
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +0000885 rtp_header_parser_(RtpHeaderParser::Create()),
niklase@google.com470e71d2011-07-07 08:21:25 +0000886 _audioCodingModule(*AudioCodingModule::Create(
xians@google.com22963ab2011-08-03 12:40:23 +0000887 VoEModuleId(instanceId, channelId))),
niklase@google.com470e71d2011-07-07 08:21:25 +0000888 _rtpDumpIn(*RtpDump::CreateRtpDump()),
889 _rtpDumpOut(*RtpDump::CreateRtpDump()),
niklase@google.com470e71d2011-07-07 08:21:25 +0000890 _outputAudioLevel(),
niklase@google.com470e71d2011-07-07 08:21:25 +0000891 _externalTransport(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000892 _inputFilePlayerPtr(NULL),
893 _outputFilePlayerPtr(NULL),
894 _outputFileRecorderPtr(NULL),
895 // Avoid conflict with other channels by adding 1024 - 1026,
896 // won't use as much as 1024 channels.
897 _inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
898 _outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
899 _outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
900 _inputFilePlaying(false),
901 _outputFilePlaying(false),
902 _outputFileRecording(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000903 _inbandDtmfQueue(VoEModuleId(instanceId, channelId)),
904 _inbandDtmfGenerator(VoEModuleId(instanceId, channelId)),
niklase@google.com470e71d2011-07-07 08:21:25 +0000905 _inputExternalMedia(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000906 _outputExternalMedia(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000907 _inputExternalMediaCallbackPtr(NULL),
908 _outputExternalMediaCallbackPtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000909 _encryptionRTPBufferPtr(NULL),
910 _decryptionRTPBufferPtr(NULL),
911 _encryptionRTCPBufferPtr(NULL),
912 _decryptionRTCPBufferPtr(NULL),
913 _timeStamp(0), // This is just an offset, RTP module will add it's own random offset
914 _sendTelephoneEventPayloadType(106),
pwestin@webrtc.org1de01352013-04-11 20:23:35 +0000915 playout_timestamp_rtp_(0),
916 playout_timestamp_rtcp_(0),
xians@google.com22963ab2011-08-03 12:40:23 +0000917 _numberOfDiscardedPackets(0),
xians@webrtc.org09e8c472013-07-31 16:30:19 +0000918 send_sequence_number_(0),
xians@google.com22963ab2011-08-03 12:40:23 +0000919 _engineStatisticsPtr(NULL),
henrika@webrtc.org2919e952012-01-31 08:45:03 +0000920 _outputMixerPtr(NULL),
921 _transmitMixerPtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000922 _moduleProcessThreadPtr(NULL),
923 _audioDeviceModulePtr(NULL),
924 _voiceEngineObserverPtr(NULL),
925 _callbackCritSectPtr(NULL),
926 _transportPtr(NULL),
927 _encryptionPtr(NULL),
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000928 _rtpAudioProc(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000929 _rxAudioProcessingModulePtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000930 _rxVadObserverPtr(NULL),
931 _oldVadDecision(-1),
932 _sendFrameType(0),
niklase@google.com470e71d2011-07-07 08:21:25 +0000933 _rtpObserverPtr(NULL),
934 _rtcpObserverPtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000935 _outputIsOnHold(false),
936 _externalPlayout(false),
roosa@google.com1b60ceb2012-12-12 23:00:29 +0000937 _externalMixing(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000938 _inputIsOnHold(false),
939 _playing(false),
940 _sending(false),
941 _receiving(false),
942 _mixFileWithMicrophone(false),
943 _rtpObserver(false),
944 _rtcpObserver(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000945 _mute(false),
946 _panLeft(1.0f),
947 _panRight(1.0f),
948 _outputGain(1.0f),
xians@google.com22963ab2011-08-03 12:40:23 +0000949 _encrypting(false),
950 _decrypting(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000951 _playOutbandDtmfEvent(false),
952 _playInbandDtmfEvent(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000953 _extraPayloadType(0),
954 _insertExtraRTPPacket(false),
955 _extraMarkerBit(false),
956 _lastLocalTimeStamp(0),
roosa@google.com0870f022012-12-12 21:31:41 +0000957 _lastRemoteTimeStamp(0),
niklase@google.com470e71d2011-07-07 08:21:25 +0000958 _lastPayloadType(0),
xians@google.com22963ab2011-08-03 12:40:23 +0000959 _includeAudioLevelIndication(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000960 _rtpPacketTimedOut(false),
961 _rtpPacketTimeOutIsEnabled(false),
962 _rtpTimeOutSeconds(0),
963 _connectionObserver(false),
964 _connectionObserverPtr(NULL),
965 _countAliveDetections(0),
966 _countDeadDetections(0),
967 _outputSpeechType(AudioFrame::kNormalSpeech),
pwestin@webrtc.org1de01352013-04-11 20:23:35 +0000968 _average_jitter_buffer_delay_us(0),
turaj@webrtc.orge46c8d32013-05-22 20:39:43 +0000969 least_required_delay_ms_(0),
niklase@google.com470e71d2011-07-07 08:21:25 +0000970 _previousTimestamp(0),
971 _recPacketDelayMs(20),
972 _RxVadDetection(false),
973 _rxApmIsEnabled(false),
974 _rxAgcIsEnabled(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000975 _rxNsIsEnabled(false)
niklase@google.com470e71d2011-07-07 08:21:25 +0000976{
977 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
978 "Channel::Channel() - ctor");
979 _inbandDtmfQueue.ResetDtmf();
980 _inbandDtmfGenerator.Init();
981 _outputAudioLevel.Clear();
982
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +0000983 RtpRtcp::Configuration configuration;
984 configuration.id = VoEModuleId(instanceId, channelId);
985 configuration.audio = true;
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +0000986 configuration.incoming_data = this;
987 configuration.incoming_messages = this;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +0000988 configuration.outgoing_transport = this;
989 configuration.rtcp_feedback = this;
990 configuration.audio_messages = this;
991
992 _rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
993
niklase@google.com470e71d2011-07-07 08:21:25 +0000994 // Create far end AudioProcessing Module
995 _rxAudioProcessingModulePtr = AudioProcessing::Create(
996 VoEModuleId(instanceId, channelId));
997}
998
999Channel::~Channel()
1000{
1001 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
1002 "Channel::~Channel() - dtor");
1003
1004 if (_outputExternalMedia)
1005 {
1006 DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
1007 }
1008 if (_inputExternalMedia)
1009 {
1010 DeRegisterExternalMediaProcessing(kRecordingPerChannel);
1011 }
1012 StopSend();
niklase@google.com470e71d2011-07-07 08:21:25 +00001013 StopPlayout();
1014
1015 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001016 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001017 if (_inputFilePlayerPtr)
1018 {
1019 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1020 _inputFilePlayerPtr->StopPlayingFile();
1021 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1022 _inputFilePlayerPtr = NULL;
1023 }
1024 if (_outputFilePlayerPtr)
1025 {
1026 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1027 _outputFilePlayerPtr->StopPlayingFile();
1028 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1029 _outputFilePlayerPtr = NULL;
1030 }
1031 if (_outputFileRecorderPtr)
1032 {
1033 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
1034 _outputFileRecorderPtr->StopRecording();
1035 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
1036 _outputFileRecorderPtr = NULL;
1037 }
1038 }
1039
1040 // The order to safely shutdown modules in a channel is:
1041 // 1. De-register callbacks in modules
1042 // 2. De-register modules in process thread
1043 // 3. Destroy modules
niklase@google.com470e71d2011-07-07 08:21:25 +00001044 if (_audioCodingModule.RegisterTransportCallback(NULL) == -1)
1045 {
1046 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1047 VoEId(_instanceId,_channelId),
1048 "~Channel() failed to de-register transport callback"
1049 " (Audio coding module)");
1050 }
1051 if (_audioCodingModule.RegisterVADCallback(NULL) == -1)
1052 {
1053 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1054 VoEId(_instanceId,_channelId),
1055 "~Channel() failed to de-register VAD callback"
1056 " (Audio coding module)");
1057 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001058 // De-register modules in process thread
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001059 if (_moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get()) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001060 {
1061 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
1062 VoEId(_instanceId,_channelId),
1063 "~Channel() failed to deregister RTP/RTCP module");
1064 }
1065
1066 // Destroy modules
niklase@google.com470e71d2011-07-07 08:21:25 +00001067 AudioCodingModule::Destroy(&_audioCodingModule);
niklase@google.com470e71d2011-07-07 08:21:25 +00001068 if (_rxAudioProcessingModulePtr != NULL)
1069 {
1070 AudioProcessing::Destroy(_rxAudioProcessingModulePtr); // far end APM
1071 _rxAudioProcessingModulePtr = NULL;
1072 }
1073
1074 // End of modules shutdown
1075
1076 // Delete other objects
1077 RtpDump::DestroyRtpDump(&_rtpDumpIn);
1078 RtpDump::DestroyRtpDump(&_rtpDumpOut);
1079 delete [] _encryptionRTPBufferPtr;
1080 delete [] _decryptionRTPBufferPtr;
1081 delete [] _encryptionRTCPBufferPtr;
1082 delete [] _decryptionRTCPBufferPtr;
1083 delete &_callbackCritSect;
niklase@google.com470e71d2011-07-07 08:21:25 +00001084 delete &_fileCritSect;
1085}
1086
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001087int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001088Channel::Init()
1089{
1090 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1091 "Channel::Init()");
1092
1093 // --- Initial sanity
1094
1095 if ((_engineStatisticsPtr == NULL) ||
1096 (_moduleProcessThreadPtr == NULL))
1097 {
1098 WEBRTC_TRACE(kTraceError, kTraceVoice,
1099 VoEId(_instanceId,_channelId),
1100 "Channel::Init() must call SetEngineInformation() first");
1101 return -1;
1102 }
1103
1104 // --- Add modules to process thread (for periodic schedulation)
1105
1106 const bool processThreadFail =
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001107 ((_moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get()) != 0) ||
niklase@google.com470e71d2011-07-07 08:21:25 +00001108 false);
niklase@google.com470e71d2011-07-07 08:21:25 +00001109 if (processThreadFail)
1110 {
1111 _engineStatisticsPtr->SetLastError(
1112 VE_CANNOT_INIT_CHANNEL, kTraceError,
1113 "Channel::Init() modules not registered");
1114 return -1;
1115 }
pwestin@webrtc.orgc450a192012-01-04 15:00:12 +00001116 // --- ACM initialization
niklase@google.com470e71d2011-07-07 08:21:25 +00001117
1118 if ((_audioCodingModule.InitializeReceiver() == -1) ||
1119#ifdef WEBRTC_CODEC_AVT
1120 // out-of-band Dtmf tones are played out by default
1121 (_audioCodingModule.SetDtmfPlayoutStatus(true) == -1) ||
1122#endif
niklase@google.com470e71d2011-07-07 08:21:25 +00001123 (_audioCodingModule.InitializeSender() == -1))
1124 {
1125 _engineStatisticsPtr->SetLastError(
1126 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1127 "Channel::Init() unable to initialize the ACM - 1");
1128 return -1;
1129 }
1130
1131 // --- RTP/RTCP module initialization
1132
1133 // Ensure that RTCP is enabled by default for the created channel.
1134 // Note that, the module will keep generating RTCP until it is explicitly
1135 // disabled by the user.
1136 // After StopListen (when no sockets exists), RTCP packets will no longer
1137 // be transmitted since the Transport object will then be invalid.
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00001138
1139 const bool rtpRtcpFail =
1140 ((_rtpRtcpModule->SetTelephoneEventForwardToDecoder(true) == -1) ||
1141 // RTCP is enabled by default
1142 (_rtpRtcpModule->SetRTCPStatus(kRtcpCompound) == -1));
1143 if (rtpRtcpFail)
niklase@google.com470e71d2011-07-07 08:21:25 +00001144 {
1145 _engineStatisticsPtr->SetLastError(
1146 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1147 "Channel::Init() RTP/RTCP module not initialized");
1148 return -1;
1149 }
1150
1151 // --- Register all permanent callbacks
niklase@google.com470e71d2011-07-07 08:21:25 +00001152 const bool fail =
niklase@google.com470e71d2011-07-07 08:21:25 +00001153 (_audioCodingModule.RegisterTransportCallback(this) == -1) ||
1154 (_audioCodingModule.RegisterVADCallback(this) == -1);
1155
1156 if (fail)
1157 {
1158 _engineStatisticsPtr->SetLastError(
1159 VE_CANNOT_INIT_CHANNEL, kTraceError,
1160 "Channel::Init() callbacks not registered");
1161 return -1;
1162 }
1163
1164 // --- Register all supported codecs to the receiving side of the
1165 // RTP/RTCP module
1166
1167 CodecInst codec;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001168 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
niklase@google.com470e71d2011-07-07 08:21:25 +00001169
1170 for (int idx = 0; idx < nSupportedCodecs; idx++)
1171 {
1172 // Open up the RTP/RTCP receiver for all supported codecs
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001173 if ((_audioCodingModule.Codec(idx, &codec) == -1) ||
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00001174 (_rtpRtcpModule->RegisterReceivePayload(codec) == -1))
niklase@google.com470e71d2011-07-07 08:21:25 +00001175 {
1176 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1177 VoEId(_instanceId,_channelId),
1178 "Channel::Init() unable to register %s (%d/%d/%d/%d) "
1179 "to RTP/RTCP receiver",
1180 codec.plname, codec.pltype, codec.plfreq,
1181 codec.channels, codec.rate);
1182 }
1183 else
1184 {
1185 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
1186 VoEId(_instanceId,_channelId),
1187 "Channel::Init() %s (%d/%d/%d/%d) has been added to "
1188 "the RTP/RTCP receiver",
1189 codec.plname, codec.pltype, codec.plfreq,
1190 codec.channels, codec.rate);
1191 }
1192
1193 // Ensure that PCMU is used as default codec on the sending side
tina.legrand@webrtc.org45175852012-06-01 09:27:35 +00001194 if (!STR_CASE_CMP(codec.plname, "PCMU") && (codec.channels == 1))
niklase@google.com470e71d2011-07-07 08:21:25 +00001195 {
1196 SetSendCodec(codec);
1197 }
1198
1199 // Register default PT for outband 'telephone-event'
1200 if (!STR_CASE_CMP(codec.plname, "telephone-event"))
1201 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001202 if ((_rtpRtcpModule->RegisterSendPayload(codec) == -1) ||
niklase@google.com470e71d2011-07-07 08:21:25 +00001203 (_audioCodingModule.RegisterReceiveCodec(codec) == -1))
1204 {
1205 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1206 VoEId(_instanceId,_channelId),
1207 "Channel::Init() failed to register outband "
1208 "'telephone-event' (%d/%d) correctly",
1209 codec.pltype, codec.plfreq);
1210 }
1211 }
1212
1213 if (!STR_CASE_CMP(codec.plname, "CN"))
1214 {
1215 if ((_audioCodingModule.RegisterSendCodec(codec) == -1) ||
1216 (_audioCodingModule.RegisterReceiveCodec(codec) == -1) ||
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001217 (_rtpRtcpModule->RegisterSendPayload(codec) == -1))
niklase@google.com470e71d2011-07-07 08:21:25 +00001218 {
1219 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1220 VoEId(_instanceId,_channelId),
1221 "Channel::Init() failed to register CN (%d/%d) "
1222 "correctly - 1",
1223 codec.pltype, codec.plfreq);
1224 }
1225 }
1226#ifdef WEBRTC_CODEC_RED
1227 // Register RED to the receiving side of the ACM.
1228 // We will not receive an OnInitializeDecoder() callback for RED.
1229 if (!STR_CASE_CMP(codec.plname, "RED"))
1230 {
1231 if (_audioCodingModule.RegisterReceiveCodec(codec) == -1)
1232 {
1233 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1234 VoEId(_instanceId,_channelId),
1235 "Channel::Init() failed to register RED (%d/%d) "
1236 "correctly",
1237 codec.pltype, codec.plfreq);
1238 }
1239 }
1240#endif
1241 }
pwestin@webrtc.org684f0572013-03-13 23:20:57 +00001242
niklase@google.com470e71d2011-07-07 08:21:25 +00001243 // Initialize the far end AP module
1244 // Using 8 kHz as initial Fs, the same as in transmission. Might be
1245 // changed at the first receiving audio.
1246 if (_rxAudioProcessingModulePtr == NULL)
1247 {
1248 _engineStatisticsPtr->SetLastError(
1249 VE_NO_MEMORY, kTraceCritical,
1250 "Channel::Init() failed to create the far-end AudioProcessing"
1251 " module");
1252 return -1;
1253 }
1254
niklase@google.com470e71d2011-07-07 08:21:25 +00001255 if (_rxAudioProcessingModulePtr->set_sample_rate_hz(8000))
1256 {
1257 _engineStatisticsPtr->SetLastError(
1258 VE_APM_ERROR, kTraceWarning,
1259 "Channel::Init() failed to set the sample rate to 8K for"
1260 " far-end AP module");
1261 }
1262
1263 if (_rxAudioProcessingModulePtr->set_num_channels(1, 1) != 0)
1264 {
1265 _engineStatisticsPtr->SetLastError(
1266 VE_SOUNDCARD_ERROR, kTraceWarning,
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00001267 "Init() failed to set channels for the primary audio stream");
niklase@google.com470e71d2011-07-07 08:21:25 +00001268 }
1269
1270 if (_rxAudioProcessingModulePtr->high_pass_filter()->Enable(
1271 WEBRTC_VOICE_ENGINE_RX_HP_DEFAULT_STATE) != 0)
1272 {
1273 _engineStatisticsPtr->SetLastError(
1274 VE_APM_ERROR, kTraceWarning,
1275 "Channel::Init() failed to set the high-pass filter for"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00001276 " far-end AP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00001277 }
1278
1279 if (_rxAudioProcessingModulePtr->noise_suppression()->set_level(
1280 (NoiseSuppression::Level)WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE) != 0)
1281 {
1282 _engineStatisticsPtr->SetLastError(
1283 VE_APM_ERROR, kTraceWarning,
1284 "Init() failed to set noise reduction level for far-end"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00001285 " AP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00001286 }
1287 if (_rxAudioProcessingModulePtr->noise_suppression()->Enable(
1288 WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_STATE) != 0)
1289 {
1290 _engineStatisticsPtr->SetLastError(
1291 VE_APM_ERROR, kTraceWarning,
1292 "Init() failed to set noise reduction state for far-end"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00001293 " AP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00001294 }
1295
1296 if (_rxAudioProcessingModulePtr->gain_control()->set_mode(
1297 (GainControl::Mode)WEBRTC_VOICE_ENGINE_RX_AGC_DEFAULT_MODE) != 0)
1298 {
1299 _engineStatisticsPtr->SetLastError(
1300 VE_APM_ERROR, kTraceWarning,
1301 "Init() failed to set AGC mode for far-end AP module");
1302 }
1303 if (_rxAudioProcessingModulePtr->gain_control()->Enable(
1304 WEBRTC_VOICE_ENGINE_RX_AGC_DEFAULT_STATE) != 0)
1305 {
1306 _engineStatisticsPtr->SetLastError(
1307 VE_APM_ERROR, kTraceWarning,
1308 "Init() failed to set AGC state for far-end AP module");
1309 }
1310
1311 return 0;
1312}
1313
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001314int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001315Channel::SetEngineInformation(Statistics& engineStatistics,
1316 OutputMixer& outputMixer,
1317 voe::TransmitMixer& transmitMixer,
1318 ProcessThread& moduleProcessThread,
1319 AudioDeviceModule& audioDeviceModule,
1320 VoiceEngineObserver* voiceEngineObserver,
1321 CriticalSectionWrapper* callbackCritSect)
1322{
1323 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1324 "Channel::SetEngineInformation()");
1325 _engineStatisticsPtr = &engineStatistics;
1326 _outputMixerPtr = &outputMixer;
1327 _transmitMixerPtr = &transmitMixer,
1328 _moduleProcessThreadPtr = &moduleProcessThread;
1329 _audioDeviceModulePtr = &audioDeviceModule;
1330 _voiceEngineObserverPtr = voiceEngineObserver;
1331 _callbackCritSectPtr = callbackCritSect;
1332 return 0;
1333}
1334
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001335int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001336Channel::UpdateLocalTimeStamp()
1337{
1338
andrew@webrtc.org63a50982012-05-02 23:56:37 +00001339 _timeStamp += _audioFrame.samples_per_channel_;
niklase@google.com470e71d2011-07-07 08:21:25 +00001340 return 0;
1341}
1342
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001343int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001344Channel::StartPlayout()
1345{
1346 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1347 "Channel::StartPlayout()");
1348 if (_playing)
1349 {
1350 return 0;
1351 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00001352
1353 if (!_externalMixing) {
1354 // Add participant as candidates for mixing.
1355 if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0)
1356 {
1357 _engineStatisticsPtr->SetLastError(
1358 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1359 "StartPlayout() failed to add participant to mixer");
1360 return -1;
1361 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001362 }
1363
1364 _playing = true;
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001365
1366 if (RegisterFilePlayingToMixer() != 0)
1367 return -1;
1368
niklase@google.com470e71d2011-07-07 08:21:25 +00001369 return 0;
1370}
1371
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001372int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001373Channel::StopPlayout()
1374{
1375 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1376 "Channel::StopPlayout()");
1377 if (!_playing)
1378 {
1379 return 0;
1380 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00001381
1382 if (!_externalMixing) {
1383 // Remove participant as candidates for mixing
1384 if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0)
1385 {
1386 _engineStatisticsPtr->SetLastError(
1387 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1388 "StopPlayout() failed to remove participant from mixer");
1389 return -1;
1390 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001391 }
1392
1393 _playing = false;
1394 _outputAudioLevel.Clear();
1395
1396 return 0;
1397}
1398
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001399int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001400Channel::StartSend()
1401{
1402 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1403 "Channel::StartSend()");
xians@webrtc.org09e8c472013-07-31 16:30:19 +00001404 // Resume the previous sequence number which was reset by StopSend().
1405 // This needs to be done before |_sending| is set to true.
1406 if (send_sequence_number_)
1407 SetInitSequenceNumber(send_sequence_number_);
1408
niklase@google.com470e71d2011-07-07 08:21:25 +00001409 {
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001410 // A lock is needed because |_sending| can be accessed or modified by
1411 // another thread at the same time.
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001412 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001413
1414 if (_sending)
1415 {
1416 return 0;
1417 }
1418 _sending = true;
niklase@google.com470e71d2011-07-07 08:21:25 +00001419 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001420
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001421 if (_rtpRtcpModule->SetSendingStatus(true) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001422 {
1423 _engineStatisticsPtr->SetLastError(
1424 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1425 "StartSend() RTP/RTCP failed to start sending");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001426 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001427 _sending = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00001428 return -1;
1429 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001430
niklase@google.com470e71d2011-07-07 08:21:25 +00001431 return 0;
1432}
1433
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001434int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001435Channel::StopSend()
1436{
1437 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1438 "Channel::StopSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001439 {
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001440 // A lock is needed because |_sending| can be accessed or modified by
1441 // another thread at the same time.
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001442 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001443
1444 if (!_sending)
1445 {
1446 return 0;
1447 }
1448 _sending = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00001449 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001450
xians@webrtc.org09e8c472013-07-31 16:30:19 +00001451 // Store the sequence number to be able to pick up the same sequence for
1452 // the next StartSend(). This is needed for restarting device, otherwise
1453 // it might cause libSRTP to complain about packets being replayed.
1454 // TODO(xians): Remove this workaround after RtpRtcpModule's refactoring
1455 // CL is landed. See issue
1456 // https://code.google.com/p/webrtc/issues/detail?id=2111 .
1457 send_sequence_number_ = _rtpRtcpModule->SequenceNumber();
1458
niklase@google.com470e71d2011-07-07 08:21:25 +00001459 // Reset sending SSRC and sequence number and triggers direct transmission
1460 // of RTCP BYE
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001461 if (_rtpRtcpModule->SetSendingStatus(false) == -1 ||
1462 _rtpRtcpModule->ResetSendDataCountersRTP() == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001463 {
1464 _engineStatisticsPtr->SetLastError(
1465 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1466 "StartSend() RTP/RTCP failed to stop sending");
1467 }
1468
niklase@google.com470e71d2011-07-07 08:21:25 +00001469 return 0;
1470}
1471
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001472int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001473Channel::StartReceiving()
1474{
1475 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1476 "Channel::StartReceiving()");
1477 if (_receiving)
1478 {
1479 return 0;
1480 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001481 _receiving = true;
1482 _numberOfDiscardedPackets = 0;
1483 return 0;
1484}
1485
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001486int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001487Channel::StopReceiving()
1488{
1489 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1490 "Channel::StopReceiving()");
1491 if (!_receiving)
1492 {
1493 return 0;
1494 }
pwestin@webrtc.org684f0572013-03-13 23:20:57 +00001495
henrika@webrtc.orgaf71f0e2011-12-05 07:02:22 +00001496 // Recover DTMF detection status.
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00001497 int32_t ret = _rtpRtcpModule->SetTelephoneEventForwardToDecoder(true);
1498 if (ret != 0) {
1499 _engineStatisticsPtr->SetLastError(
1500 VE_INVALID_OPERATION, kTraceWarning,
1501 "StopReceiving() failed to restore telephone-event status.");
1502 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001503 RegisterReceiveCodecsToRTPModule();
1504 _receiving = false;
1505 return 0;
1506}
1507
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001508int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001509Channel::SetNetEQPlayoutMode(NetEqModes mode)
1510{
1511 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1512 "Channel::SetNetEQPlayoutMode()");
1513 AudioPlayoutMode playoutMode(voice);
1514 switch (mode)
1515 {
1516 case kNetEqDefault:
1517 playoutMode = voice;
1518 break;
1519 case kNetEqStreaming:
1520 playoutMode = streaming;
1521 break;
1522 case kNetEqFax:
1523 playoutMode = fax;
1524 break;
roosa@google.comb7186192012-12-12 21:59:14 +00001525 case kNetEqOff:
1526 playoutMode = off;
1527 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00001528 }
1529 if (_audioCodingModule.SetPlayoutMode(playoutMode) != 0)
1530 {
1531 _engineStatisticsPtr->SetLastError(
1532 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1533 "SetNetEQPlayoutMode() failed to set playout mode");
1534 return -1;
1535 }
1536 return 0;
1537}
1538
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001539int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001540Channel::GetNetEQPlayoutMode(NetEqModes& mode)
1541{
1542 const AudioPlayoutMode playoutMode = _audioCodingModule.PlayoutMode();
1543 switch (playoutMode)
1544 {
1545 case voice:
1546 mode = kNetEqDefault;
1547 break;
1548 case streaming:
1549 mode = kNetEqStreaming;
1550 break;
1551 case fax:
1552 mode = kNetEqFax;
1553 break;
roosa@google.comb7186192012-12-12 21:59:14 +00001554 case off:
1555 mode = kNetEqOff;
niklase@google.com470e71d2011-07-07 08:21:25 +00001556 }
1557 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
1558 VoEId(_instanceId,_channelId),
1559 "Channel::GetNetEQPlayoutMode() => mode=%u", mode);
1560 return 0;
1561}
1562
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001563int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001564Channel::SetOnHoldStatus(bool enable, OnHoldModes mode)
1565{
1566 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1567 "Channel::SetOnHoldStatus()");
1568 if (mode == kHoldSendAndPlay)
1569 {
1570 _outputIsOnHold = enable;
1571 _inputIsOnHold = enable;
1572 }
1573 else if (mode == kHoldPlayOnly)
1574 {
1575 _outputIsOnHold = enable;
1576 }
1577 if (mode == kHoldSendOnly)
1578 {
1579 _inputIsOnHold = enable;
1580 }
1581 return 0;
1582}
1583
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001584int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001585Channel::GetOnHoldStatus(bool& enabled, OnHoldModes& mode)
1586{
1587 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1588 "Channel::GetOnHoldStatus()");
1589 enabled = (_outputIsOnHold || _inputIsOnHold);
1590 if (_outputIsOnHold && _inputIsOnHold)
1591 {
1592 mode = kHoldSendAndPlay;
1593 }
1594 else if (_outputIsOnHold && !_inputIsOnHold)
1595 {
1596 mode = kHoldPlayOnly;
1597 }
1598 else if (!_outputIsOnHold && _inputIsOnHold)
1599 {
1600 mode = kHoldSendOnly;
1601 }
1602 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1603 "Channel::GetOnHoldStatus() => enabled=%d, mode=%d",
1604 enabled, mode);
1605 return 0;
1606}
1607
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001608int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001609Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer)
1610{
1611 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1612 "Channel::RegisterVoiceEngineObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001613 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001614
1615 if (_voiceEngineObserverPtr)
1616 {
1617 _engineStatisticsPtr->SetLastError(
1618 VE_INVALID_OPERATION, kTraceError,
1619 "RegisterVoiceEngineObserver() observer already enabled");
1620 return -1;
1621 }
1622 _voiceEngineObserverPtr = &observer;
1623 return 0;
1624}
1625
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001626int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001627Channel::DeRegisterVoiceEngineObserver()
1628{
1629 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1630 "Channel::DeRegisterVoiceEngineObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001631 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001632
1633 if (!_voiceEngineObserverPtr)
1634 {
1635 _engineStatisticsPtr->SetLastError(
1636 VE_INVALID_OPERATION, kTraceWarning,
1637 "DeRegisterVoiceEngineObserver() observer already disabled");
1638 return 0;
1639 }
1640 _voiceEngineObserverPtr = NULL;
1641 return 0;
1642}
1643
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001644int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001645Channel::GetSendCodec(CodecInst& codec)
1646{
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001647 return (_audioCodingModule.SendCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001648}
1649
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001650int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001651Channel::GetRecCodec(CodecInst& codec)
1652{
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001653 return (_audioCodingModule.ReceiveCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001654}
1655
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001656int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001657Channel::SetSendCodec(const CodecInst& codec)
1658{
1659 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1660 "Channel::SetSendCodec()");
1661
1662 if (_audioCodingModule.RegisterSendCodec(codec) != 0)
1663 {
1664 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1665 "SetSendCodec() failed to register codec to ACM");
1666 return -1;
1667 }
1668
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001669 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001670 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001671 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1672 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001673 {
1674 WEBRTC_TRACE(
1675 kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1676 "SetSendCodec() failed to register codec to"
1677 " RTP/RTCP module");
1678 return -1;
1679 }
1680 }
1681
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001682 if (_rtpRtcpModule->SetAudioPacketSize(codec.pacsize) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001683 {
1684 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1685 "SetSendCodec() failed to set audio packet size");
1686 return -1;
1687 }
1688
1689 return 0;
1690}
1691
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001692int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001693Channel::SetVADStatus(bool enableVAD, ACMVADMode mode, bool disableDTX)
1694{
1695 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1696 "Channel::SetVADStatus(mode=%d)", mode);
1697 // To disable VAD, DTX must be disabled too
1698 disableDTX = ((enableVAD == false) ? true : disableDTX);
1699 if (_audioCodingModule.SetVAD(!disableDTX, enableVAD, mode) != 0)
1700 {
1701 _engineStatisticsPtr->SetLastError(
1702 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1703 "SetVADStatus() failed to set VAD");
1704 return -1;
1705 }
1706 return 0;
1707}
1708
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001709int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001710Channel::GetVADStatus(bool& enabledVAD, ACMVADMode& mode, bool& disabledDTX)
1711{
1712 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1713 "Channel::GetVADStatus");
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001714 if (_audioCodingModule.VAD(&disabledDTX, &enabledVAD, &mode) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001715 {
1716 _engineStatisticsPtr->SetLastError(
1717 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1718 "GetVADStatus() failed to get VAD status");
1719 return -1;
1720 }
1721 disabledDTX = !disabledDTX;
1722 return 0;
1723}
1724
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001725int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001726Channel::SetRecPayloadType(const CodecInst& codec)
1727{
1728 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1729 "Channel::SetRecPayloadType()");
1730
1731 if (_playing)
1732 {
1733 _engineStatisticsPtr->SetLastError(
1734 VE_ALREADY_PLAYING, kTraceError,
1735 "SetRecPayloadType() unable to set PT while playing");
1736 return -1;
1737 }
1738 if (_receiving)
1739 {
1740 _engineStatisticsPtr->SetLastError(
1741 VE_ALREADY_LISTENING, kTraceError,
1742 "SetRecPayloadType() unable to set PT while listening");
1743 return -1;
1744 }
1745
1746 if (codec.pltype == -1)
1747 {
1748 // De-register the selected codec (RTP/RTCP module and ACM)
1749
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001750 int8_t pltype(-1);
niklase@google.com470e71d2011-07-07 08:21:25 +00001751 CodecInst rxCodec = codec;
1752
1753 // Get payload type for the given codec
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00001754 _rtpRtcpModule->ReceivePayloadType(rxCodec, &pltype);
niklase@google.com470e71d2011-07-07 08:21:25 +00001755 rxCodec.pltype = pltype;
1756
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00001757 if (_rtpRtcpModule->DeRegisterReceivePayload(pltype) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001758 {
1759 _engineStatisticsPtr->SetLastError(
1760 VE_RTP_RTCP_MODULE_ERROR,
1761 kTraceError,
1762 "SetRecPayloadType() RTP/RTCP-module deregistration "
1763 "failed");
1764 return -1;
1765 }
1766 if (_audioCodingModule.UnregisterReceiveCodec(rxCodec.pltype) != 0)
1767 {
1768 _engineStatisticsPtr->SetLastError(
1769 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1770 "SetRecPayloadType() ACM deregistration failed - 1");
1771 return -1;
1772 }
1773 return 0;
1774 }
1775
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00001776 if (_rtpRtcpModule->RegisterReceivePayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001777 {
1778 // First attempt to register failed => de-register and try again
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00001779 _rtpRtcpModule->DeRegisterReceivePayload(codec.pltype);
1780 if (_rtpRtcpModule->RegisterReceivePayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001781 {
1782 _engineStatisticsPtr->SetLastError(
1783 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1784 "SetRecPayloadType() RTP/RTCP-module registration failed");
1785 return -1;
1786 }
1787 }
1788 if (_audioCodingModule.RegisterReceiveCodec(codec) != 0)
1789 {
1790 _audioCodingModule.UnregisterReceiveCodec(codec.pltype);
1791 if (_audioCodingModule.RegisterReceiveCodec(codec) != 0)
1792 {
1793 _engineStatisticsPtr->SetLastError(
1794 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1795 "SetRecPayloadType() ACM registration failed - 1");
1796 return -1;
1797 }
1798 }
1799 return 0;
1800}
1801
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001802int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001803Channel::GetRecPayloadType(CodecInst& codec)
1804{
1805 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1806 "Channel::GetRecPayloadType()");
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001807 int8_t payloadType(-1);
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00001808 if (_rtpRtcpModule->ReceivePayloadType(codec, &payloadType) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001809 {
1810 _engineStatisticsPtr->SetLastError(
henrika@webrtc.org37198002012-06-18 11:00:12 +00001811 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
niklase@google.com470e71d2011-07-07 08:21:25 +00001812 "GetRecPayloadType() failed to retrieve RX payload type");
1813 return -1;
1814 }
1815 codec.pltype = payloadType;
1816 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1817 "Channel::GetRecPayloadType() => pltype=%u", codec.pltype);
1818 return 0;
1819}
1820
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001821int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001822Channel::SetAMREncFormat(AmrMode mode)
1823{
1824 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1825 "Channel::SetAMREncFormat()");
1826
1827 // ACM doesn't support AMR
1828 return -1;
1829}
1830
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001831int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001832Channel::SetAMRDecFormat(AmrMode mode)
1833{
1834 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1835 "Channel::SetAMRDecFormat()");
1836
1837 // ACM doesn't support AMR
1838 return -1;
1839}
1840
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001841int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001842Channel::SetAMRWbEncFormat(AmrMode mode)
1843{
1844 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1845 "Channel::SetAMRWbEncFormat()");
1846
1847 // ACM doesn't support AMR
1848 return -1;
1849
1850}
1851
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001852int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001853Channel::SetAMRWbDecFormat(AmrMode mode)
1854{
1855 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1856 "Channel::SetAMRWbDecFormat()");
1857
1858 // ACM doesn't support AMR
1859 return -1;
1860}
1861
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001862int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001863Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency)
1864{
1865 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1866 "Channel::SetSendCNPayloadType()");
1867
1868 CodecInst codec;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001869 int32_t samplingFreqHz(-1);
tina.legrand@webrtc.org45175852012-06-01 09:27:35 +00001870 const int kMono = 1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001871 if (frequency == kFreq32000Hz)
1872 samplingFreqHz = 32000;
1873 else if (frequency == kFreq16000Hz)
1874 samplingFreqHz = 16000;
1875
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001876 if (_audioCodingModule.Codec("CN", &codec, samplingFreqHz, kMono) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001877 {
1878 _engineStatisticsPtr->SetLastError(
1879 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1880 "SetSendCNPayloadType() failed to retrieve default CN codec "
1881 "settings");
1882 return -1;
1883 }
1884
1885 // Modify the payload type (must be set to dynamic range)
1886 codec.pltype = type;
1887
1888 if (_audioCodingModule.RegisterSendCodec(codec) != 0)
1889 {
1890 _engineStatisticsPtr->SetLastError(
1891 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1892 "SetSendCNPayloadType() failed to register CN to ACM");
1893 return -1;
1894 }
1895
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001896 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001897 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001898 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1899 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001900 {
1901 _engineStatisticsPtr->SetLastError(
1902 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1903 "SetSendCNPayloadType() failed to register CN to RTP/RTCP "
1904 "module");
1905 return -1;
1906 }
1907 }
1908 return 0;
1909}
1910
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001911int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001912Channel::SetISACInitTargetRate(int rateBps, bool useFixedFrameSize)
1913{
1914 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1915 "Channel::SetISACInitTargetRate()");
1916
1917 CodecInst sendCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001918 if (_audioCodingModule.SendCodec(&sendCodec) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001919 {
1920 _engineStatisticsPtr->SetLastError(
1921 VE_CODEC_ERROR, kTraceError,
1922 "SetISACInitTargetRate() failed to retrieve send codec");
1923 return -1;
1924 }
1925 if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
1926 {
1927 // This API is only valid if iSAC is setup to run in channel-adaptive
1928 // mode.
1929 // We do not validate the adaptive mode here. It is done later in the
1930 // ConfigISACBandwidthEstimator() API.
1931 _engineStatisticsPtr->SetLastError(
1932 VE_CODEC_ERROR, kTraceError,
1933 "SetISACInitTargetRate() send codec is not iSAC");
1934 return -1;
1935 }
1936
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001937 uint8_t initFrameSizeMsec(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00001938 if (16000 == sendCodec.plfreq)
1939 {
1940 // Note that 0 is a valid and corresponds to "use default
1941 if ((rateBps != 0 &&
1942 rateBps < kVoiceEngineMinIsacInitTargetRateBpsWb) ||
1943 (rateBps > kVoiceEngineMaxIsacInitTargetRateBpsWb))
1944 {
1945 _engineStatisticsPtr->SetLastError(
1946 VE_INVALID_ARGUMENT, kTraceError,
1947 "SetISACInitTargetRate() invalid target rate - 1");
1948 return -1;
1949 }
1950 // 30 or 60ms
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001951 initFrameSizeMsec = (uint8_t)(sendCodec.pacsize / 16);
niklase@google.com470e71d2011-07-07 08:21:25 +00001952 }
1953 else if (32000 == sendCodec.plfreq)
1954 {
1955 if ((rateBps != 0 &&
1956 rateBps < kVoiceEngineMinIsacInitTargetRateBpsSwb) ||
1957 (rateBps > kVoiceEngineMaxIsacInitTargetRateBpsSwb))
1958 {
1959 _engineStatisticsPtr->SetLastError(
1960 VE_INVALID_ARGUMENT, kTraceError,
1961 "SetISACInitTargetRate() invalid target rate - 2");
1962 return -1;
1963 }
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001964 initFrameSizeMsec = (uint8_t)(sendCodec.pacsize / 32); // 30ms
niklase@google.com470e71d2011-07-07 08:21:25 +00001965 }
1966
1967 if (_audioCodingModule.ConfigISACBandwidthEstimator(
1968 initFrameSizeMsec, rateBps, useFixedFrameSize) == -1)
1969 {
1970 _engineStatisticsPtr->SetLastError(
1971 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1972 "SetISACInitTargetRate() iSAC BWE config failed");
1973 return -1;
1974 }
1975
1976 return 0;
1977}
1978
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001979int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001980Channel::SetISACMaxRate(int rateBps)
1981{
1982 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1983 "Channel::SetISACMaxRate()");
1984
1985 CodecInst sendCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001986 if (_audioCodingModule.SendCodec(&sendCodec) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001987 {
1988 _engineStatisticsPtr->SetLastError(
1989 VE_CODEC_ERROR, kTraceError,
1990 "SetISACMaxRate() failed to retrieve send codec");
1991 return -1;
1992 }
1993 if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
1994 {
1995 // This API is only valid if iSAC is selected as sending codec.
1996 _engineStatisticsPtr->SetLastError(
1997 VE_CODEC_ERROR, kTraceError,
1998 "SetISACMaxRate() send codec is not iSAC");
1999 return -1;
2000 }
2001 if (16000 == sendCodec.plfreq)
2002 {
2003 if ((rateBps < kVoiceEngineMinIsacMaxRateBpsWb) ||
2004 (rateBps > kVoiceEngineMaxIsacMaxRateBpsWb))
2005 {
2006 _engineStatisticsPtr->SetLastError(
2007 VE_INVALID_ARGUMENT, kTraceError,
2008 "SetISACMaxRate() invalid max rate - 1");
2009 return -1;
2010 }
2011 }
2012 else if (32000 == sendCodec.plfreq)
2013 {
2014 if ((rateBps < kVoiceEngineMinIsacMaxRateBpsSwb) ||
2015 (rateBps > kVoiceEngineMaxIsacMaxRateBpsSwb))
2016 {
2017 _engineStatisticsPtr->SetLastError(
2018 VE_INVALID_ARGUMENT, kTraceError,
2019 "SetISACMaxRate() invalid max rate - 2");
2020 return -1;
2021 }
2022 }
2023 if (_sending)
2024 {
2025 _engineStatisticsPtr->SetLastError(
2026 VE_SENDING, kTraceError,
2027 "SetISACMaxRate() unable to set max rate while sending");
2028 return -1;
2029 }
2030
2031 // Set the maximum instantaneous rate of iSAC (works for both adaptive
2032 // and non-adaptive mode)
2033 if (_audioCodingModule.SetISACMaxRate(rateBps) == -1)
2034 {
2035 _engineStatisticsPtr->SetLastError(
2036 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2037 "SetISACMaxRate() failed to set max rate");
2038 return -1;
2039 }
2040
2041 return 0;
2042}
2043
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002044int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00002045Channel::SetISACMaxPayloadSize(int sizeBytes)
2046{
2047 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2048 "Channel::SetISACMaxPayloadSize()");
2049 CodecInst sendCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00002050 if (_audioCodingModule.SendCodec(&sendCodec) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00002051 {
2052 _engineStatisticsPtr->SetLastError(
2053 VE_CODEC_ERROR, kTraceError,
2054 "SetISACMaxPayloadSize() failed to retrieve send codec");
2055 return -1;
2056 }
2057 if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
2058 {
2059 _engineStatisticsPtr->SetLastError(
2060 VE_CODEC_ERROR, kTraceError,
2061 "SetISACMaxPayloadSize() send codec is not iSAC");
2062 return -1;
2063 }
2064 if (16000 == sendCodec.plfreq)
2065 {
2066 if ((sizeBytes < kVoiceEngineMinIsacMaxPayloadSizeBytesWb) ||
2067 (sizeBytes > kVoiceEngineMaxIsacMaxPayloadSizeBytesWb))
2068 {
2069 _engineStatisticsPtr->SetLastError(
2070 VE_INVALID_ARGUMENT, kTraceError,
2071 "SetISACMaxPayloadSize() invalid max payload - 1");
2072 return -1;
2073 }
2074 }
2075 else if (32000 == sendCodec.plfreq)
2076 {
2077 if ((sizeBytes < kVoiceEngineMinIsacMaxPayloadSizeBytesSwb) ||
2078 (sizeBytes > kVoiceEngineMaxIsacMaxPayloadSizeBytesSwb))
2079 {
2080 _engineStatisticsPtr->SetLastError(
2081 VE_INVALID_ARGUMENT, kTraceError,
2082 "SetISACMaxPayloadSize() invalid max payload - 2");
2083 return -1;
2084 }
2085 }
2086 if (_sending)
2087 {
2088 _engineStatisticsPtr->SetLastError(
2089 VE_SENDING, kTraceError,
2090 "SetISACMaxPayloadSize() unable to set max rate while sending");
2091 return -1;
2092 }
2093
2094 if (_audioCodingModule.SetISACMaxPayloadSize(sizeBytes) == -1)
2095 {
2096 _engineStatisticsPtr->SetLastError(
2097 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2098 "SetISACMaxPayloadSize() failed to set max payload size");
2099 return -1;
2100 }
2101 return 0;
2102}
2103
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002104int32_t Channel::RegisterExternalTransport(Transport& transport)
niklase@google.com470e71d2011-07-07 08:21:25 +00002105{
2106 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2107 "Channel::RegisterExternalTransport()");
2108
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002109 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002110
niklase@google.com470e71d2011-07-07 08:21:25 +00002111 if (_externalTransport)
2112 {
2113 _engineStatisticsPtr->SetLastError(VE_INVALID_OPERATION,
2114 kTraceError,
2115 "RegisterExternalTransport() external transport already enabled");
2116 return -1;
2117 }
2118 _externalTransport = true;
2119 _transportPtr = &transport;
2120 return 0;
2121}
2122
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002123int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00002124Channel::DeRegisterExternalTransport()
2125{
2126 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2127 "Channel::DeRegisterExternalTransport()");
2128
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002129 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.org83661f52011-11-25 10:58:15 +00002130
niklase@google.com470e71d2011-07-07 08:21:25 +00002131 if (!_transportPtr)
2132 {
2133 _engineStatisticsPtr->SetLastError(
2134 VE_INVALID_OPERATION, kTraceWarning,
2135 "DeRegisterExternalTransport() external transport already "
2136 "disabled");
2137 return 0;
2138 }
2139 _externalTransport = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00002140 _transportPtr = NULL;
2141 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2142 "DeRegisterExternalTransport() all transport is disabled");
niklase@google.com470e71d2011-07-07 08:21:25 +00002143 return 0;
2144}
2145
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002146int32_t Channel::ReceivedRTPPacket(const int8_t* data, int32_t length) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002147 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
2148 "Channel::ReceivedRTPPacket()");
2149
2150 // Store playout timestamp for the received RTP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00002151 UpdatePlayoutTimestamp(false);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002152
2153 // Dump the RTP packet to a file (if RTP dump is enabled).
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002154 if (_rtpDumpIn.DumpPacket((const uint8_t*)data,
2155 (uint16_t)length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002156 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
2157 VoEId(_instanceId,_channelId),
2158 "Channel::SendPacket() RTP dump to input file failed");
2159 }
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00002160 RTPHeader header;
2161 if (!rtp_header_parser_->Parse(reinterpret_cast<const uint8_t*>(data),
2162 static_cast<uint16_t>(length), &header)) {
2163 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideo,
2164 VoEId(_instanceId,_channelId),
2165 "IncomingPacket invalid RTP header");
2166 return -1;
2167 }
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002168 // Deliver RTP packet to RTP/RTCP module for parsing
2169 // The packet will be pushed back to the channel thru the
2170 // OnReceivedPayloadData callback so we don't push it to the ACM here
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00002171 if (_rtpRtcpModule->IncomingRtpPacket(reinterpret_cast<const uint8_t*>(data),
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00002172 static_cast<uint16_t>(length),
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00002173 header) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002174 _engineStatisticsPtr->SetLastError(
2175 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
2176 "Channel::IncomingRTPPacket() RTP packet is invalid");
2177 }
2178 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002179}
2180
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002181int32_t Channel::ReceivedRTCPPacket(const int8_t* data, int32_t length) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002182 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
2183 "Channel::ReceivedRTCPPacket()");
2184 // Store playout timestamp for the received RTCP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00002185 UpdatePlayoutTimestamp(true);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002186
2187 // Dump the RTCP packet to a file (if RTP dump is enabled).
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002188 if (_rtpDumpIn.DumpPacket((const uint8_t*)data,
2189 (uint16_t)length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002190 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
2191 VoEId(_instanceId,_channelId),
2192 "Channel::SendPacket() RTCP dump to input file failed");
2193 }
2194
2195 // Deliver RTCP packet to RTP/RTCP module for parsing
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00002196 if (_rtpRtcpModule->IncomingRtcpPacket((const uint8_t*)data,
2197 (uint16_t)length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002198 _engineStatisticsPtr->SetLastError(
2199 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
2200 "Channel::IncomingRTPPacket() RTCP packet is invalid");
2201 }
2202 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002203}
2204
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00002205int32_t
2206Channel::SetPacketTimeoutNotification(bool enable, int timeoutSeconds)
2207{
2208 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2209 "Channel::SetPacketTimeoutNotification()");
2210 if (enable)
2211 {
2212 const uint32_t RTPtimeoutMS = 1000*timeoutSeconds;
2213 const uint32_t RTCPtimeoutMS = 0;
2214 _rtpRtcpModule->SetPacketTimeout(RTPtimeoutMS, RTCPtimeoutMS);
2215 _rtpPacketTimeOutIsEnabled = true;
2216 _rtpTimeOutSeconds = timeoutSeconds;
2217 }
2218 else
2219 {
2220 _rtpRtcpModule->SetPacketTimeout(0, 0);
2221 _rtpPacketTimeOutIsEnabled = false;
2222 _rtpTimeOutSeconds = 0;
2223 }
2224 return 0;
2225}
2226
2227int32_t
2228Channel::GetPacketTimeoutNotification(bool& enabled, int& timeoutSeconds)
2229{
2230 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2231 "Channel::GetPacketTimeoutNotification()");
2232 enabled = _rtpPacketTimeOutIsEnabled;
2233 if (enabled)
2234 {
2235 timeoutSeconds = _rtpTimeOutSeconds;
2236 }
2237 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
2238 "GetPacketTimeoutNotification() => enabled=%d,"
2239 " timeoutSeconds=%d",
2240 enabled, timeoutSeconds);
2241 return 0;
2242}
2243
2244int32_t
2245Channel::RegisterDeadOrAliveObserver(VoEConnectionObserver& observer)
2246{
2247 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2248 "Channel::RegisterDeadOrAliveObserver()");
2249 CriticalSectionScoped cs(&_callbackCritSect);
2250
2251 if (_connectionObserverPtr)
2252 {
2253 _engineStatisticsPtr->SetLastError(VE_INVALID_OPERATION, kTraceError,
2254 "RegisterDeadOrAliveObserver() observer already enabled");
2255 return -1;
2256 }
2257
2258 _connectionObserverPtr = &observer;
2259 _connectionObserver = true;
2260
2261 return 0;
2262}
2263
2264int32_t
2265Channel::DeRegisterDeadOrAliveObserver()
2266{
2267 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2268 "Channel::DeRegisterDeadOrAliveObserver()");
2269 CriticalSectionScoped cs(&_callbackCritSect);
2270
2271 if (!_connectionObserverPtr)
2272 {
2273 _engineStatisticsPtr->SetLastError(
2274 VE_INVALID_OPERATION, kTraceWarning,
2275 "DeRegisterDeadOrAliveObserver() observer already disabled");
2276 return 0;
2277 }
2278
2279 _connectionObserver = false;
2280 _connectionObserverPtr = NULL;
2281
2282 return 0;
2283}
2284
2285int32_t
2286Channel::SetPeriodicDeadOrAliveStatus(bool enable, int sampleTimeSeconds)
2287{
2288 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2289 "Channel::SetPeriodicDeadOrAliveStatus()");
2290 if (!_connectionObserverPtr)
2291 {
2292 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
2293 "SetPeriodicDeadOrAliveStatus() connection observer has"
2294 " not been registered");
2295 }
2296 if (enable)
2297 {
2298 ResetDeadOrAliveCounters();
2299 }
2300 bool enabled(false);
2301 uint8_t currentSampleTimeSec(0);
2302 // Store last state (will be used later if dead-or-alive is disabled).
2303 _rtpRtcpModule->PeriodicDeadOrAliveStatus(enabled, currentSampleTimeSec);
2304 // Update the dead-or-alive state.
2305 if (_rtpRtcpModule->SetPeriodicDeadOrAliveStatus(
2306 enable, (uint8_t)sampleTimeSeconds) != 0)
2307 {
2308 _engineStatisticsPtr->SetLastError(
2309 VE_RTP_RTCP_MODULE_ERROR,
2310 kTraceError,
2311 "SetPeriodicDeadOrAliveStatus() failed to set dead-or-alive "
2312 "status");
2313 return -1;
2314 }
2315 if (!enable)
2316 {
2317 // Restore last utilized sample time.
2318 // Without this, the sample time would always be reset to default
2319 // (2 sec), each time dead-or-alived was disabled without sample-time
2320 // parameter.
2321 _rtpRtcpModule->SetPeriodicDeadOrAliveStatus(enable,
2322 currentSampleTimeSec);
2323 }
2324 return 0;
2325}
2326
2327int32_t
2328Channel::GetPeriodicDeadOrAliveStatus(bool& enabled, int& sampleTimeSeconds)
2329{
2330 _rtpRtcpModule->PeriodicDeadOrAliveStatus(
2331 enabled,
2332 (uint8_t&)sampleTimeSeconds);
2333 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
2334 "GetPeriodicDeadOrAliveStatus() => enabled=%d,"
2335 " sampleTimeSeconds=%d",
2336 enabled, sampleTimeSeconds);
2337 return 0;
2338}
2339
niklase@google.com470e71d2011-07-07 08:21:25 +00002340int Channel::StartPlayingFileLocally(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00002341 bool loop,
2342 FileFormats format,
2343 int startPosition,
2344 float volumeScaling,
2345 int stopPosition,
niklase@google.com470e71d2011-07-07 08:21:25 +00002346 const CodecInst* codecInst)
2347{
2348 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2349 "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d,"
2350 " format=%d, volumeScaling=%5.3f, startPosition=%d, "
2351 "stopPosition=%d)", fileName, loop, format, volumeScaling,
2352 startPosition, stopPosition);
2353
2354 if (_outputFilePlaying)
2355 {
2356 _engineStatisticsPtr->SetLastError(
2357 VE_ALREADY_PLAYING, kTraceError,
2358 "StartPlayingFileLocally() is already playing");
2359 return -1;
2360 }
2361
niklase@google.com470e71d2011-07-07 08:21:25 +00002362 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002363 CriticalSectionScoped cs(&_fileCritSect);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002364
2365 if (_outputFilePlayerPtr)
2366 {
2367 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2368 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2369 _outputFilePlayerPtr = NULL;
2370 }
2371
2372 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2373 _outputFilePlayerId, (const FileFormats)format);
2374
2375 if (_outputFilePlayerPtr == NULL)
2376 {
2377 _engineStatisticsPtr->SetLastError(
2378 VE_INVALID_ARGUMENT, kTraceError,
henrike@webrtc.org31d30702011-11-18 19:59:32 +00002379 "StartPlayingFileLocally() filePlayer format is not correct");
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002380 return -1;
2381 }
2382
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002383 const uint32_t notificationTime(0);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002384
2385 if (_outputFilePlayerPtr->StartPlayingFile(
2386 fileName,
2387 loop,
2388 startPosition,
2389 volumeScaling,
2390 notificationTime,
2391 stopPosition,
2392 (const CodecInst*)codecInst) != 0)
2393 {
2394 _engineStatisticsPtr->SetLastError(
2395 VE_BAD_FILE, kTraceError,
2396 "StartPlayingFile() failed to start file playout");
2397 _outputFilePlayerPtr->StopPlayingFile();
2398 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2399 _outputFilePlayerPtr = NULL;
2400 return -1;
2401 }
2402 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
2403 _outputFilePlaying = true;
niklase@google.com470e71d2011-07-07 08:21:25 +00002404 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00002405
2406 if (RegisterFilePlayingToMixer() != 0)
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002407 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002408
2409 return 0;
2410}
2411
2412int Channel::StartPlayingFileLocally(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00002413 FileFormats format,
2414 int startPosition,
2415 float volumeScaling,
2416 int stopPosition,
niklase@google.com470e71d2011-07-07 08:21:25 +00002417 const CodecInst* codecInst)
2418{
2419 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2420 "Channel::StartPlayingFileLocally(format=%d,"
2421 " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
2422 format, volumeScaling, startPosition, stopPosition);
2423
2424 if(stream == NULL)
2425 {
2426 _engineStatisticsPtr->SetLastError(
2427 VE_BAD_FILE, kTraceError,
2428 "StartPlayingFileLocally() NULL as input stream");
2429 return -1;
2430 }
2431
2432
2433 if (_outputFilePlaying)
2434 {
2435 _engineStatisticsPtr->SetLastError(
2436 VE_ALREADY_PLAYING, kTraceError,
2437 "StartPlayingFileLocally() is already playing");
2438 return -1;
2439 }
2440
niklase@google.com470e71d2011-07-07 08:21:25 +00002441 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002442 CriticalSectionScoped cs(&_fileCritSect);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002443
2444 // Destroy the old instance
2445 if (_outputFilePlayerPtr)
2446 {
2447 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2448 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2449 _outputFilePlayerPtr = NULL;
2450 }
2451
2452 // Create the instance
2453 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2454 _outputFilePlayerId,
2455 (const FileFormats)format);
2456
2457 if (_outputFilePlayerPtr == NULL)
2458 {
2459 _engineStatisticsPtr->SetLastError(
2460 VE_INVALID_ARGUMENT, kTraceError,
2461 "StartPlayingFileLocally() filePlayer format isnot correct");
2462 return -1;
2463 }
2464
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002465 const uint32_t notificationTime(0);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002466
2467 if (_outputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
2468 volumeScaling,
2469 notificationTime,
2470 stopPosition, codecInst) != 0)
2471 {
2472 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2473 "StartPlayingFile() failed to "
2474 "start file playout");
2475 _outputFilePlayerPtr->StopPlayingFile();
2476 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2477 _outputFilePlayerPtr = NULL;
2478 return -1;
2479 }
2480 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
2481 _outputFilePlaying = true;
niklase@google.com470e71d2011-07-07 08:21:25 +00002482 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00002483
2484 if (RegisterFilePlayingToMixer() != 0)
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002485 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002486
niklase@google.com470e71d2011-07-07 08:21:25 +00002487 return 0;
2488}
2489
2490int Channel::StopPlayingFileLocally()
2491{
2492 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2493 "Channel::StopPlayingFileLocally()");
2494
2495 if (!_outputFilePlaying)
2496 {
2497 _engineStatisticsPtr->SetLastError(
2498 VE_INVALID_OPERATION, kTraceWarning,
2499 "StopPlayingFileLocally() isnot playing");
2500 return 0;
2501 }
2502
niklase@google.com470e71d2011-07-07 08:21:25 +00002503 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002504 CriticalSectionScoped cs(&_fileCritSect);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002505
2506 if (_outputFilePlayerPtr->StopPlayingFile() != 0)
2507 {
2508 _engineStatisticsPtr->SetLastError(
2509 VE_STOP_RECORDING_FAILED, kTraceError,
2510 "StopPlayingFile() could not stop playing");
2511 return -1;
2512 }
2513 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2514 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2515 _outputFilePlayerPtr = NULL;
2516 _outputFilePlaying = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00002517 }
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002518 // _fileCritSect cannot be taken while calling
2519 // SetAnonymousMixibilityStatus. Refer to comments in
2520 // StartPlayingFileLocally(const char* ...) for more details.
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002521 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0)
2522 {
2523 _engineStatisticsPtr->SetLastError(
2524 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002525 "StopPlayingFile() failed to stop participant from playing as"
2526 "file in the mixer");
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002527 return -1;
2528 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002529
2530 return 0;
2531}
2532
2533int Channel::IsPlayingFileLocally() const
2534{
2535 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2536 "Channel::IsPlayingFileLocally()");
2537
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002538 return (int32_t)_outputFilePlaying;
niklase@google.com470e71d2011-07-07 08:21:25 +00002539}
2540
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00002541int Channel::RegisterFilePlayingToMixer()
2542{
2543 // Return success for not registering for file playing to mixer if:
2544 // 1. playing file before playout is started on that channel.
2545 // 2. starting playout without file playing on that channel.
2546 if (!_playing || !_outputFilePlaying)
2547 {
2548 return 0;
2549 }
2550
2551 // |_fileCritSect| cannot be taken while calling
2552 // SetAnonymousMixabilityStatus() since as soon as the participant is added
2553 // frames can be pulled by the mixer. Since the frames are generated from
2554 // the file, _fileCritSect will be taken. This would result in a deadlock.
2555 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0)
2556 {
2557 CriticalSectionScoped cs(&_fileCritSect);
2558 _outputFilePlaying = false;
2559 _engineStatisticsPtr->SetLastError(
2560 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
2561 "StartPlayingFile() failed to add participant as file to mixer");
2562 _outputFilePlayerPtr->StopPlayingFile();
2563 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2564 _outputFilePlayerPtr = NULL;
2565 return -1;
2566 }
2567
2568 return 0;
2569}
2570
pbos@webrtc.org92135212013-05-14 08:31:39 +00002571int Channel::ScaleLocalFilePlayout(float scale)
niklase@google.com470e71d2011-07-07 08:21:25 +00002572{
2573 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2574 "Channel::ScaleLocalFilePlayout(scale=%5.3f)", scale);
2575
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002576 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002577
2578 if (!_outputFilePlaying)
2579 {
2580 _engineStatisticsPtr->SetLastError(
2581 VE_INVALID_OPERATION, kTraceError,
2582 "ScaleLocalFilePlayout() isnot playing");
2583 return -1;
2584 }
2585 if ((_outputFilePlayerPtr == NULL) ||
2586 (_outputFilePlayerPtr->SetAudioScaling(scale) != 0))
2587 {
2588 _engineStatisticsPtr->SetLastError(
2589 VE_BAD_ARGUMENT, kTraceError,
2590 "SetAudioScaling() failed to scale the playout");
2591 return -1;
2592 }
2593
2594 return 0;
2595}
2596
2597int Channel::GetLocalPlayoutPosition(int& positionMs)
2598{
2599 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2600 "Channel::GetLocalPlayoutPosition(position=?)");
2601
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002602 uint32_t position;
niklase@google.com470e71d2011-07-07 08:21:25 +00002603
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002604 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002605
2606 if (_outputFilePlayerPtr == NULL)
2607 {
2608 _engineStatisticsPtr->SetLastError(
2609 VE_INVALID_OPERATION, kTraceError,
2610 "GetLocalPlayoutPosition() filePlayer instance doesnot exist");
2611 return -1;
2612 }
2613
2614 if (_outputFilePlayerPtr->GetPlayoutPosition(position) != 0)
2615 {
2616 _engineStatisticsPtr->SetLastError(
2617 VE_BAD_FILE, kTraceError,
2618 "GetLocalPlayoutPosition() failed");
2619 return -1;
2620 }
2621 positionMs = position;
2622
2623 return 0;
2624}
2625
2626int Channel::StartPlayingFileAsMicrophone(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00002627 bool loop,
2628 FileFormats format,
2629 int startPosition,
2630 float volumeScaling,
2631 int stopPosition,
niklase@google.com470e71d2011-07-07 08:21:25 +00002632 const CodecInst* codecInst)
2633{
2634 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2635 "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, "
2636 "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, "
2637 "stopPosition=%d)", fileName, loop, format, volumeScaling,
2638 startPosition, stopPosition);
2639
2640 if (_inputFilePlaying)
2641 {
2642 _engineStatisticsPtr->SetLastError(
2643 VE_ALREADY_PLAYING, kTraceWarning,
2644 "StartPlayingFileAsMicrophone() filePlayer is playing");
2645 return 0;
2646 }
2647
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002648 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002649
2650 // Destroy the old instance
2651 if (_inputFilePlayerPtr)
2652 {
2653 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2654 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2655 _inputFilePlayerPtr = NULL;
2656 }
2657
2658 // Create the instance
2659 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2660 _inputFilePlayerId, (const FileFormats)format);
2661
2662 if (_inputFilePlayerPtr == NULL)
2663 {
2664 _engineStatisticsPtr->SetLastError(
2665 VE_INVALID_ARGUMENT, kTraceError,
2666 "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
2667 return -1;
2668 }
2669
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002670 const uint32_t notificationTime(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00002671
2672 if (_inputFilePlayerPtr->StartPlayingFile(
2673 fileName,
2674 loop,
2675 startPosition,
2676 volumeScaling,
2677 notificationTime,
2678 stopPosition,
2679 (const CodecInst*)codecInst) != 0)
2680 {
2681 _engineStatisticsPtr->SetLastError(
2682 VE_BAD_FILE, kTraceError,
2683 "StartPlayingFile() failed to start file playout");
2684 _inputFilePlayerPtr->StopPlayingFile();
2685 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2686 _inputFilePlayerPtr = NULL;
2687 return -1;
2688 }
2689 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2690 _inputFilePlaying = true;
2691
2692 return 0;
2693}
2694
2695int Channel::StartPlayingFileAsMicrophone(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00002696 FileFormats format,
2697 int startPosition,
2698 float volumeScaling,
2699 int stopPosition,
niklase@google.com470e71d2011-07-07 08:21:25 +00002700 const CodecInst* codecInst)
2701{
2702 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2703 "Channel::StartPlayingFileAsMicrophone(format=%d, "
2704 "volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
2705 format, volumeScaling, startPosition, stopPosition);
2706
2707 if(stream == NULL)
2708 {
2709 _engineStatisticsPtr->SetLastError(
2710 VE_BAD_FILE, kTraceError,
2711 "StartPlayingFileAsMicrophone NULL as input stream");
2712 return -1;
2713 }
2714
2715 if (_inputFilePlaying)
2716 {
2717 _engineStatisticsPtr->SetLastError(
2718 VE_ALREADY_PLAYING, kTraceWarning,
2719 "StartPlayingFileAsMicrophone() is playing");
2720 return 0;
2721 }
2722
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002723 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002724
2725 // Destroy the old instance
2726 if (_inputFilePlayerPtr)
2727 {
2728 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2729 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2730 _inputFilePlayerPtr = NULL;
2731 }
2732
2733 // Create the instance
2734 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2735 _inputFilePlayerId, (const FileFormats)format);
2736
2737 if (_inputFilePlayerPtr == NULL)
2738 {
2739 _engineStatisticsPtr->SetLastError(
2740 VE_INVALID_ARGUMENT, kTraceError,
2741 "StartPlayingInputFile() filePlayer format isnot correct");
2742 return -1;
2743 }
2744
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002745 const uint32_t notificationTime(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00002746
2747 if (_inputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
2748 volumeScaling, notificationTime,
2749 stopPosition, codecInst) != 0)
2750 {
2751 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2752 "StartPlayingFile() failed to start "
2753 "file playout");
2754 _inputFilePlayerPtr->StopPlayingFile();
2755 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2756 _inputFilePlayerPtr = NULL;
2757 return -1;
2758 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00002759
niklase@google.com470e71d2011-07-07 08:21:25 +00002760 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2761 _inputFilePlaying = true;
2762
2763 return 0;
2764}
2765
2766int Channel::StopPlayingFileAsMicrophone()
2767{
2768 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2769 "Channel::StopPlayingFileAsMicrophone()");
2770
2771 if (!_inputFilePlaying)
2772 {
2773 _engineStatisticsPtr->SetLastError(
2774 VE_INVALID_OPERATION, kTraceWarning,
2775 "StopPlayingFileAsMicrophone() isnot playing");
2776 return 0;
2777 }
2778
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002779 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002780 if (_inputFilePlayerPtr->StopPlayingFile() != 0)
2781 {
2782 _engineStatisticsPtr->SetLastError(
2783 VE_STOP_RECORDING_FAILED, kTraceError,
2784 "StopPlayingFile() could not stop playing");
2785 return -1;
2786 }
2787 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2788 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2789 _inputFilePlayerPtr = NULL;
2790 _inputFilePlaying = false;
2791
2792 return 0;
2793}
2794
2795int Channel::IsPlayingFileAsMicrophone() const
2796{
2797 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2798 "Channel::IsPlayingFileAsMicrophone()");
2799
2800 return _inputFilePlaying;
2801}
2802
pbos@webrtc.org92135212013-05-14 08:31:39 +00002803int Channel::ScaleFileAsMicrophonePlayout(float scale)
niklase@google.com470e71d2011-07-07 08:21:25 +00002804{
2805 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2806 "Channel::ScaleFileAsMicrophonePlayout(scale=%5.3f)", scale);
2807
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002808 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002809
2810 if (!_inputFilePlaying)
2811 {
2812 _engineStatisticsPtr->SetLastError(
2813 VE_INVALID_OPERATION, kTraceError,
2814 "ScaleFileAsMicrophonePlayout() isnot playing");
2815 return -1;
2816 }
2817
2818 if ((_inputFilePlayerPtr == NULL) ||
2819 (_inputFilePlayerPtr->SetAudioScaling(scale) != 0))
2820 {
2821 _engineStatisticsPtr->SetLastError(
2822 VE_BAD_ARGUMENT, kTraceError,
2823 "SetAudioScaling() failed to scale playout");
2824 return -1;
2825 }
2826
2827 return 0;
2828}
2829
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00002830int Channel::StartRecordingPlayout(const char* fileName,
niklase@google.com470e71d2011-07-07 08:21:25 +00002831 const CodecInst* codecInst)
2832{
2833 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2834 "Channel::StartRecordingPlayout(fileName=%s)", fileName);
2835
2836 if (_outputFileRecording)
2837 {
2838 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
2839 "StartRecordingPlayout() is already recording");
2840 return 0;
2841 }
2842
2843 FileFormats format;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002844 const uint32_t notificationTime(0); // Not supported in VoE
niklase@google.com470e71d2011-07-07 08:21:25 +00002845 CodecInst dummyCodec={100,"L16",16000,320,1,320000};
2846
niklas.enbom@webrtc.org40197d72012-03-26 08:45:47 +00002847 if ((codecInst != NULL) &&
2848 ((codecInst->channels < 1) || (codecInst->channels > 2)))
niklase@google.com470e71d2011-07-07 08:21:25 +00002849 {
2850 _engineStatisticsPtr->SetLastError(
2851 VE_BAD_ARGUMENT, kTraceError,
2852 "StartRecordingPlayout() invalid compression");
2853 return(-1);
2854 }
2855 if(codecInst == NULL)
2856 {
2857 format = kFileFormatPcm16kHzFile;
2858 codecInst=&dummyCodec;
2859 }
2860 else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
2861 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
2862 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
2863 {
2864 format = kFileFormatWavFile;
2865 }
2866 else
2867 {
2868 format = kFileFormatCompressedFile;
2869 }
2870
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002871 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002872
2873 // Destroy the old instance
2874 if (_outputFileRecorderPtr)
2875 {
2876 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2877 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2878 _outputFileRecorderPtr = NULL;
2879 }
2880
2881 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2882 _outputFileRecorderId, (const FileFormats)format);
2883 if (_outputFileRecorderPtr == NULL)
2884 {
2885 _engineStatisticsPtr->SetLastError(
2886 VE_INVALID_ARGUMENT, kTraceError,
2887 "StartRecordingPlayout() fileRecorder format isnot correct");
2888 return -1;
2889 }
2890
2891 if (_outputFileRecorderPtr->StartRecordingAudioFile(
2892 fileName, (const CodecInst&)*codecInst, notificationTime) != 0)
2893 {
2894 _engineStatisticsPtr->SetLastError(
2895 VE_BAD_FILE, kTraceError,
2896 "StartRecordingAudioFile() failed to start file recording");
2897 _outputFileRecorderPtr->StopRecording();
2898 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2899 _outputFileRecorderPtr = NULL;
2900 return -1;
2901 }
2902 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2903 _outputFileRecording = true;
2904
2905 return 0;
2906}
2907
2908int Channel::StartRecordingPlayout(OutStream* stream,
2909 const CodecInst* codecInst)
2910{
2911 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2912 "Channel::StartRecordingPlayout()");
2913
2914 if (_outputFileRecording)
2915 {
2916 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
2917 "StartRecordingPlayout() is already recording");
2918 return 0;
2919 }
2920
2921 FileFormats format;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002922 const uint32_t notificationTime(0); // Not supported in VoE
niklase@google.com470e71d2011-07-07 08:21:25 +00002923 CodecInst dummyCodec={100,"L16",16000,320,1,320000};
2924
2925 if (codecInst != NULL && codecInst->channels != 1)
2926 {
2927 _engineStatisticsPtr->SetLastError(
2928 VE_BAD_ARGUMENT, kTraceError,
2929 "StartRecordingPlayout() invalid compression");
2930 return(-1);
2931 }
2932 if(codecInst == NULL)
2933 {
2934 format = kFileFormatPcm16kHzFile;
2935 codecInst=&dummyCodec;
2936 }
2937 else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
2938 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
2939 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
2940 {
2941 format = kFileFormatWavFile;
2942 }
2943 else
2944 {
2945 format = kFileFormatCompressedFile;
2946 }
2947
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002948 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002949
2950 // Destroy the old instance
2951 if (_outputFileRecorderPtr)
2952 {
2953 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2954 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2955 _outputFileRecorderPtr = NULL;
2956 }
2957
2958 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2959 _outputFileRecorderId, (const FileFormats)format);
2960 if (_outputFileRecorderPtr == NULL)
2961 {
2962 _engineStatisticsPtr->SetLastError(
2963 VE_INVALID_ARGUMENT, kTraceError,
2964 "StartRecordingPlayout() fileRecorder format isnot correct");
2965 return -1;
2966 }
2967
2968 if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream, *codecInst,
2969 notificationTime) != 0)
2970 {
2971 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2972 "StartRecordingPlayout() failed to "
2973 "start file recording");
2974 _outputFileRecorderPtr->StopRecording();
2975 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2976 _outputFileRecorderPtr = NULL;
2977 return -1;
2978 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00002979
niklase@google.com470e71d2011-07-07 08:21:25 +00002980 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2981 _outputFileRecording = true;
2982
2983 return 0;
2984}
2985
2986int Channel::StopRecordingPlayout()
2987{
2988 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
2989 "Channel::StopRecordingPlayout()");
2990
2991 if (!_outputFileRecording)
2992 {
2993 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
2994 "StopRecordingPlayout() isnot recording");
2995 return -1;
2996 }
2997
2998
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002999 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003000
3001 if (_outputFileRecorderPtr->StopRecording() != 0)
3002 {
3003 _engineStatisticsPtr->SetLastError(
3004 VE_STOP_RECORDING_FAILED, kTraceError,
3005 "StopRecording() could not stop recording");
3006 return(-1);
3007 }
3008 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
3009 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
3010 _outputFileRecorderPtr = NULL;
3011 _outputFileRecording = false;
3012
3013 return 0;
3014}
3015
3016void
3017Channel::SetMixWithMicStatus(bool mix)
3018{
3019 _mixFileWithMicrophone=mix;
3020}
3021
3022int
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003023Channel::GetSpeechOutputLevel(uint32_t& level) const
niklase@google.com470e71d2011-07-07 08:21:25 +00003024{
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003025 int8_t currentLevel = _outputAudioLevel.Level();
3026 level = static_cast<int32_t> (currentLevel);
niklase@google.com470e71d2011-07-07 08:21:25 +00003027 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3028 VoEId(_instanceId,_channelId),
3029 "GetSpeechOutputLevel() => level=%u", level);
3030 return 0;
3031}
3032
3033int
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003034Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const
niklase@google.com470e71d2011-07-07 08:21:25 +00003035{
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003036 int16_t currentLevel = _outputAudioLevel.LevelFullRange();
3037 level = static_cast<int32_t> (currentLevel);
niklase@google.com470e71d2011-07-07 08:21:25 +00003038 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3039 VoEId(_instanceId,_channelId),
3040 "GetSpeechOutputLevelFullRange() => level=%u", level);
3041 return 0;
3042}
3043
3044int
3045Channel::SetMute(bool enable)
3046{
3047 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3048 "Channel::SetMute(enable=%d)", enable);
3049 _mute = enable;
3050 return 0;
3051}
3052
3053bool
3054Channel::Mute() const
3055{
3056 return _mute;
3057}
3058
3059int
3060Channel::SetOutputVolumePan(float left, float right)
3061{
3062 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3063 "Channel::SetOutputVolumePan()");
3064 _panLeft = left;
3065 _panRight = right;
3066 return 0;
3067}
3068
3069int
3070Channel::GetOutputVolumePan(float& left, float& right) const
3071{
3072 left = _panLeft;
3073 right = _panRight;
3074 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3075 VoEId(_instanceId,_channelId),
3076 "GetOutputVolumePan() => left=%3.2f, right=%3.2f", left, right);
3077 return 0;
3078}
3079
3080int
3081Channel::SetChannelOutputVolumeScaling(float scaling)
3082{
3083 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3084 "Channel::SetChannelOutputVolumeScaling()");
3085 _outputGain = scaling;
3086 return 0;
3087}
3088
3089int
3090Channel::GetChannelOutputVolumeScaling(float& scaling) const
3091{
3092 scaling = _outputGain;
3093 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3094 VoEId(_instanceId,_channelId),
3095 "GetChannelOutputVolumeScaling() => scaling=%3.2f", scaling);
3096 return 0;
3097}
3098
niklase@google.com470e71d2011-07-07 08:21:25 +00003099int
3100Channel::RegisterExternalEncryption(Encryption& encryption)
3101{
3102 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3103 "Channel::RegisterExternalEncryption()");
3104
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003105 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003106
3107 if (_encryptionPtr)
3108 {
3109 _engineStatisticsPtr->SetLastError(
3110 VE_INVALID_OPERATION, kTraceError,
3111 "RegisterExternalEncryption() encryption already enabled");
3112 return -1;
3113 }
3114
3115 _encryptionPtr = &encryption;
3116
3117 _decrypting = true;
3118 _encrypting = true;
3119
3120 return 0;
3121}
3122
3123int
3124Channel::DeRegisterExternalEncryption()
3125{
3126 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3127 "Channel::DeRegisterExternalEncryption()");
3128
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003129 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003130
3131 if (!_encryptionPtr)
3132 {
3133 _engineStatisticsPtr->SetLastError(
3134 VE_INVALID_OPERATION, kTraceWarning,
3135 "DeRegisterExternalEncryption() encryption already disabled");
3136 return 0;
3137 }
3138
3139 _decrypting = false;
3140 _encrypting = false;
3141
3142 _encryptionPtr = NULL;
3143
3144 return 0;
3145}
3146
3147int Channel::SendTelephoneEventOutband(unsigned char eventCode,
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00003148 int lengthMs, int attenuationDb,
3149 bool playDtmfEvent)
niklase@google.com470e71d2011-07-07 08:21:25 +00003150{
3151 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3152 "Channel::SendTelephoneEventOutband(..., playDtmfEvent=%d)",
3153 playDtmfEvent);
3154
3155 _playOutbandDtmfEvent = playDtmfEvent;
3156
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003157 if (_rtpRtcpModule->SendTelephoneEventOutband(eventCode, lengthMs,
niklase@google.com470e71d2011-07-07 08:21:25 +00003158 attenuationDb) != 0)
3159 {
3160 _engineStatisticsPtr->SetLastError(
3161 VE_SEND_DTMF_FAILED,
3162 kTraceWarning,
3163 "SendTelephoneEventOutband() failed to send event");
3164 return -1;
3165 }
3166 return 0;
3167}
3168
3169int Channel::SendTelephoneEventInband(unsigned char eventCode,
3170 int lengthMs,
3171 int attenuationDb,
3172 bool playDtmfEvent)
3173{
3174 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3175 "Channel::SendTelephoneEventInband(..., playDtmfEvent=%d)",
3176 playDtmfEvent);
3177
3178 _playInbandDtmfEvent = playDtmfEvent;
3179 _inbandDtmfQueue.AddDtmf(eventCode, lengthMs, attenuationDb);
3180
3181 return 0;
3182}
3183
3184int
3185Channel::SetDtmfPlayoutStatus(bool enable)
3186{
3187 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3188 "Channel::SetDtmfPlayoutStatus()");
3189 if (_audioCodingModule.SetDtmfPlayoutStatus(enable) != 0)
3190 {
3191 _engineStatisticsPtr->SetLastError(
3192 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
3193 "SetDtmfPlayoutStatus() failed to set Dtmf playout");
3194 return -1;
3195 }
3196 return 0;
3197}
3198
3199bool
3200Channel::DtmfPlayoutStatus() const
3201{
3202 return _audioCodingModule.DtmfPlayoutStatus();
3203}
3204
3205int
3206Channel::SetSendTelephoneEventPayloadType(unsigned char type)
3207{
3208 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3209 "Channel::SetSendTelephoneEventPayloadType()");
andrew@webrtc.orgf81f9f82011-08-19 22:56:22 +00003210 if (type > 127)
niklase@google.com470e71d2011-07-07 08:21:25 +00003211 {
3212 _engineStatisticsPtr->SetLastError(
3213 VE_INVALID_ARGUMENT, kTraceError,
3214 "SetSendTelephoneEventPayloadType() invalid type");
3215 return -1;
3216 }
pbos@webrtc.org5b10d8f2013-07-11 15:50:07 +00003217 CodecInst codec = {};
pwestin@webrtc.org1da1ce02011-10-13 15:19:55 +00003218 codec.plfreq = 8000;
3219 codec.pltype = type;
3220 memcpy(codec.plname, "telephone-event", 16);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003221 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003222 {
henrika@webrtc.org4392d5f2013-04-17 07:34:25 +00003223 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
3224 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
3225 _engineStatisticsPtr->SetLastError(
3226 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3227 "SetSendTelephoneEventPayloadType() failed to register send"
3228 "payload type");
3229 return -1;
3230 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003231 }
3232 _sendTelephoneEventPayloadType = type;
3233 return 0;
3234}
3235
3236int
3237Channel::GetSendTelephoneEventPayloadType(unsigned char& type)
3238{
3239 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3240 "Channel::GetSendTelephoneEventPayloadType()");
3241 type = _sendTelephoneEventPayloadType;
3242 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3243 VoEId(_instanceId,_channelId),
3244 "GetSendTelephoneEventPayloadType() => type=%u", type);
3245 return 0;
3246}
3247
niklase@google.com470e71d2011-07-07 08:21:25 +00003248int
3249Channel::UpdateRxVadDetection(AudioFrame& audioFrame)
3250{
3251 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3252 "Channel::UpdateRxVadDetection()");
3253
3254 int vadDecision = 1;
3255
andrew@webrtc.org63a50982012-05-02 23:56:37 +00003256 vadDecision = (audioFrame.vad_activity_ == AudioFrame::kVadActive)? 1 : 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003257
3258 if ((vadDecision != _oldVadDecision) && _rxVadObserverPtr)
3259 {
3260 OnRxVadDetected(vadDecision);
3261 _oldVadDecision = vadDecision;
3262 }
3263
3264 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3265 "Channel::UpdateRxVadDetection() => vadDecision=%d",
3266 vadDecision);
3267 return 0;
3268}
3269
3270int
3271Channel::RegisterRxVadObserver(VoERxVadCallback &observer)
3272{
3273 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3274 "Channel::RegisterRxVadObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003275 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003276
3277 if (_rxVadObserverPtr)
3278 {
3279 _engineStatisticsPtr->SetLastError(
3280 VE_INVALID_OPERATION, kTraceError,
3281 "RegisterRxVadObserver() observer already enabled");
3282 return -1;
3283 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003284 _rxVadObserverPtr = &observer;
3285 _RxVadDetection = true;
3286 return 0;
3287}
3288
3289int
3290Channel::DeRegisterRxVadObserver()
3291{
3292 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3293 "Channel::DeRegisterRxVadObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003294 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003295
3296 if (!_rxVadObserverPtr)
3297 {
3298 _engineStatisticsPtr->SetLastError(
3299 VE_INVALID_OPERATION, kTraceWarning,
3300 "DeRegisterRxVadObserver() observer already disabled");
3301 return 0;
3302 }
3303 _rxVadObserverPtr = NULL;
3304 _RxVadDetection = false;
3305 return 0;
3306}
3307
3308int
3309Channel::VoiceActivityIndicator(int &activity)
3310{
3311 activity = _sendFrameType;
3312
3313 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3314 "Channel::VoiceActivityIndicator(indicator=%d)", activity);
3315 return 0;
3316}
3317
3318#ifdef WEBRTC_VOICE_ENGINE_AGC
3319
3320int
pbos@webrtc.org92135212013-05-14 08:31:39 +00003321Channel::SetRxAgcStatus(bool enable, AgcModes mode)
niklase@google.com470e71d2011-07-07 08:21:25 +00003322{
3323 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3324 "Channel::SetRxAgcStatus(enable=%d, mode=%d)",
3325 (int)enable, (int)mode);
3326
3327 GainControl::Mode agcMode(GainControl::kFixedDigital);
3328 switch (mode)
3329 {
3330 case kAgcDefault:
3331 agcMode = GainControl::kAdaptiveDigital;
3332 break;
3333 case kAgcUnchanged:
3334 agcMode = _rxAudioProcessingModulePtr->gain_control()->mode();
3335 break;
3336 case kAgcFixedDigital:
3337 agcMode = GainControl::kFixedDigital;
3338 break;
3339 case kAgcAdaptiveDigital:
3340 agcMode =GainControl::kAdaptiveDigital;
3341 break;
3342 default:
3343 _engineStatisticsPtr->SetLastError(
3344 VE_INVALID_ARGUMENT, kTraceError,
3345 "SetRxAgcStatus() invalid Agc mode");
3346 return -1;
3347 }
3348
3349 if (_rxAudioProcessingModulePtr->gain_control()->set_mode(agcMode) != 0)
3350 {
3351 _engineStatisticsPtr->SetLastError(
3352 VE_APM_ERROR, kTraceError,
3353 "SetRxAgcStatus() failed to set Agc mode");
3354 return -1;
3355 }
3356 if (_rxAudioProcessingModulePtr->gain_control()->Enable(enable) != 0)
3357 {
3358 _engineStatisticsPtr->SetLastError(
3359 VE_APM_ERROR, kTraceError,
3360 "SetRxAgcStatus() failed to set Agc state");
3361 return -1;
3362 }
3363
3364 _rxAgcIsEnabled = enable;
niklase@google.com470e71d2011-07-07 08:21:25 +00003365 _rxApmIsEnabled = ((_rxAgcIsEnabled == true) || (_rxNsIsEnabled == true));
3366
3367 return 0;
3368}
3369
3370int
3371Channel::GetRxAgcStatus(bool& enabled, AgcModes& mode)
3372{
3373 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3374 "Channel::GetRxAgcStatus(enable=?, mode=?)");
3375
3376 bool enable = _rxAudioProcessingModulePtr->gain_control()->is_enabled();
3377 GainControl::Mode agcMode =
3378 _rxAudioProcessingModulePtr->gain_control()->mode();
3379
3380 enabled = enable;
3381
3382 switch (agcMode)
3383 {
3384 case GainControl::kFixedDigital:
3385 mode = kAgcFixedDigital;
3386 break;
3387 case GainControl::kAdaptiveDigital:
3388 mode = kAgcAdaptiveDigital;
3389 break;
3390 default:
3391 _engineStatisticsPtr->SetLastError(
3392 VE_APM_ERROR, kTraceError,
3393 "GetRxAgcStatus() invalid Agc mode");
3394 return -1;
3395 }
3396
3397 return 0;
3398}
3399
3400int
pbos@webrtc.org92135212013-05-14 08:31:39 +00003401Channel::SetRxAgcConfig(AgcConfig config)
niklase@google.com470e71d2011-07-07 08:21:25 +00003402{
3403 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3404 "Channel::SetRxAgcConfig()");
3405
3406 if (_rxAudioProcessingModulePtr->gain_control()->set_target_level_dbfs(
3407 config.targetLeveldBOv) != 0)
3408 {
3409 _engineStatisticsPtr->SetLastError(
3410 VE_APM_ERROR, kTraceError,
3411 "SetRxAgcConfig() failed to set target peak |level|"
3412 "(or envelope) of the Agc");
3413 return -1;
3414 }
3415 if (_rxAudioProcessingModulePtr->gain_control()->set_compression_gain_db(
3416 config.digitalCompressionGaindB) != 0)
3417 {
3418 _engineStatisticsPtr->SetLastError(
3419 VE_APM_ERROR, kTraceError,
3420 "SetRxAgcConfig() failed to set the range in |gain| the"
3421 " digital compression stage may apply");
3422 return -1;
3423 }
3424 if (_rxAudioProcessingModulePtr->gain_control()->enable_limiter(
3425 config.limiterEnable) != 0)
3426 {
3427 _engineStatisticsPtr->SetLastError(
3428 VE_APM_ERROR, kTraceError,
3429 "SetRxAgcConfig() failed to set hard limiter to the signal");
3430 return -1;
3431 }
3432
3433 return 0;
3434}
3435
3436int
3437Channel::GetRxAgcConfig(AgcConfig& config)
3438{
3439 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3440 "Channel::GetRxAgcConfig(config=%?)");
3441
3442 config.targetLeveldBOv =
3443 _rxAudioProcessingModulePtr->gain_control()->target_level_dbfs();
3444 config.digitalCompressionGaindB =
3445 _rxAudioProcessingModulePtr->gain_control()->compression_gain_db();
3446 config.limiterEnable =
3447 _rxAudioProcessingModulePtr->gain_control()->is_limiter_enabled();
3448
3449 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3450 VoEId(_instanceId,_channelId), "GetRxAgcConfig() => "
3451 "targetLeveldBOv=%u, digitalCompressionGaindB=%u,"
3452 " limiterEnable=%d",
3453 config.targetLeveldBOv,
3454 config.digitalCompressionGaindB,
3455 config.limiterEnable);
3456
3457 return 0;
3458}
3459
3460#endif // #ifdef WEBRTC_VOICE_ENGINE_AGC
3461
3462#ifdef WEBRTC_VOICE_ENGINE_NR
3463
3464int
pbos@webrtc.org92135212013-05-14 08:31:39 +00003465Channel::SetRxNsStatus(bool enable, NsModes mode)
niklase@google.com470e71d2011-07-07 08:21:25 +00003466{
3467 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3468 "Channel::SetRxNsStatus(enable=%d, mode=%d)",
3469 (int)enable, (int)mode);
3470
3471 NoiseSuppression::Level nsLevel(
3472 (NoiseSuppression::Level)WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE);
3473 switch (mode)
3474 {
3475
3476 case kNsDefault:
3477 nsLevel = (NoiseSuppression::Level)
3478 WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE;
3479 break;
3480 case kNsUnchanged:
3481 nsLevel = _rxAudioProcessingModulePtr->noise_suppression()->level();
3482 break;
3483 case kNsConference:
3484 nsLevel = NoiseSuppression::kHigh;
3485 break;
3486 case kNsLowSuppression:
3487 nsLevel = NoiseSuppression::kLow;
3488 break;
3489 case kNsModerateSuppression:
3490 nsLevel = NoiseSuppression::kModerate;
3491 break;
3492 case kNsHighSuppression:
3493 nsLevel = NoiseSuppression::kHigh;
3494 break;
3495 case kNsVeryHighSuppression:
3496 nsLevel = NoiseSuppression::kVeryHigh;
3497 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00003498 }
3499
3500 if (_rxAudioProcessingModulePtr->noise_suppression()->set_level(nsLevel)
3501 != 0)
3502 {
3503 _engineStatisticsPtr->SetLastError(
3504 VE_APM_ERROR, kTraceError,
3505 "SetRxAgcStatus() failed to set Ns level");
3506 return -1;
3507 }
3508 if (_rxAudioProcessingModulePtr->noise_suppression()->Enable(enable) != 0)
3509 {
3510 _engineStatisticsPtr->SetLastError(
3511 VE_APM_ERROR, kTraceError,
3512 "SetRxAgcStatus() failed to set Agc state");
3513 return -1;
3514 }
3515
3516 _rxNsIsEnabled = enable;
3517 _rxApmIsEnabled = ((_rxAgcIsEnabled == true) || (_rxNsIsEnabled == true));
3518
3519 return 0;
3520}
3521
3522int
3523Channel::GetRxNsStatus(bool& enabled, NsModes& mode)
3524{
3525 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3526 "Channel::GetRxNsStatus(enable=?, mode=?)");
3527
3528 bool enable =
3529 _rxAudioProcessingModulePtr->noise_suppression()->is_enabled();
3530 NoiseSuppression::Level ncLevel =
3531 _rxAudioProcessingModulePtr->noise_suppression()->level();
3532
3533 enabled = enable;
3534
3535 switch (ncLevel)
3536 {
3537 case NoiseSuppression::kLow:
3538 mode = kNsLowSuppression;
3539 break;
3540 case NoiseSuppression::kModerate:
3541 mode = kNsModerateSuppression;
3542 break;
3543 case NoiseSuppression::kHigh:
3544 mode = kNsHighSuppression;
3545 break;
3546 case NoiseSuppression::kVeryHigh:
3547 mode = kNsVeryHighSuppression;
3548 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00003549 }
3550
3551 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3552 VoEId(_instanceId,_channelId),
3553 "GetRxNsStatus() => enabled=%d, mode=%d", enabled, mode);
3554 return 0;
3555}
3556
3557#endif // #ifdef WEBRTC_VOICE_ENGINE_NR
3558
3559int
3560Channel::RegisterRTPObserver(VoERTPObserver& observer)
3561{
3562 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3563 "Channel::RegisterRTPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003564 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003565
3566 if (_rtpObserverPtr)
3567 {
3568 _engineStatisticsPtr->SetLastError(
3569 VE_INVALID_OPERATION, kTraceError,
3570 "RegisterRTPObserver() observer already enabled");
3571 return -1;
3572 }
3573
3574 _rtpObserverPtr = &observer;
3575 _rtpObserver = true;
3576
3577 return 0;
3578}
3579
3580int
3581Channel::DeRegisterRTPObserver()
3582{
3583 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3584 "Channel::DeRegisterRTPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003585 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003586
3587 if (!_rtpObserverPtr)
3588 {
3589 _engineStatisticsPtr->SetLastError(
3590 VE_INVALID_OPERATION, kTraceWarning,
3591 "DeRegisterRTPObserver() observer already disabled");
3592 return 0;
3593 }
3594
3595 _rtpObserver = false;
3596 _rtpObserverPtr = NULL;
3597
3598 return 0;
3599}
3600
3601int
3602Channel::RegisterRTCPObserver(VoERTCPObserver& observer)
3603{
3604 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3605 "Channel::RegisterRTCPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003606 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003607
3608 if (_rtcpObserverPtr)
3609 {
3610 _engineStatisticsPtr->SetLastError(
3611 VE_INVALID_OPERATION, kTraceError,
3612 "RegisterRTCPObserver() observer already enabled");
3613 return -1;
3614 }
3615
3616 _rtcpObserverPtr = &observer;
3617 _rtcpObserver = true;
3618
3619 return 0;
3620}
3621
3622int
3623Channel::DeRegisterRTCPObserver()
3624{
3625 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3626 "Channel::DeRegisterRTCPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003627 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003628
3629 if (!_rtcpObserverPtr)
3630 {
3631 _engineStatisticsPtr->SetLastError(
3632 VE_INVALID_OPERATION, kTraceWarning,
3633 "DeRegisterRTCPObserver() observer already disabled");
3634 return 0;
3635 }
3636
3637 _rtcpObserver = false;
3638 _rtcpObserverPtr = NULL;
3639
3640 return 0;
3641}
3642
3643int
3644Channel::SetLocalSSRC(unsigned int ssrc)
3645{
3646 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3647 "Channel::SetLocalSSRC()");
3648 if (_sending)
3649 {
3650 _engineStatisticsPtr->SetLastError(
3651 VE_ALREADY_SENDING, kTraceError,
3652 "SetLocalSSRC() already sending");
3653 return -1;
3654 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003655 if (_rtpRtcpModule->SetSSRC(ssrc) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003656 {
3657 _engineStatisticsPtr->SetLastError(
3658 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3659 "SetLocalSSRC() failed to set SSRC");
3660 return -1;
3661 }
3662 return 0;
3663}
3664
3665int
3666Channel::GetLocalSSRC(unsigned int& ssrc)
3667{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003668 ssrc = _rtpRtcpModule->SSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00003669 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3670 VoEId(_instanceId,_channelId),
3671 "GetLocalSSRC() => ssrc=%lu", ssrc);
3672 return 0;
3673}
3674
3675int
3676Channel::GetRemoteSSRC(unsigned int& ssrc)
3677{
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00003678 ssrc = _rtpRtcpModule->RemoteSSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00003679 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3680 VoEId(_instanceId,_channelId),
3681 "GetRemoteSSRC() => ssrc=%lu", ssrc);
3682 return 0;
3683}
3684
3685int
3686Channel::GetRemoteCSRCs(unsigned int arrCSRC[15])
3687{
3688 if (arrCSRC == NULL)
3689 {
3690 _engineStatisticsPtr->SetLastError(
3691 VE_INVALID_ARGUMENT, kTraceError,
3692 "GetRemoteCSRCs() invalid array argument");
3693 return -1;
3694 }
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003695 uint32_t arrOfCSRC[kRtpCsrcSize];
3696 int32_t CSRCs(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003697 CSRCs = _rtpRtcpModule->CSRCs(arrOfCSRC);
niklase@google.com470e71d2011-07-07 08:21:25 +00003698 if (CSRCs > 0)
3699 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003700 memcpy(arrCSRC, arrOfCSRC, CSRCs * sizeof(uint32_t));
niklase@google.com470e71d2011-07-07 08:21:25 +00003701 for (int i = 0; i < (int) CSRCs; i++)
3702 {
3703 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3704 VoEId(_instanceId, _channelId),
3705 "GetRemoteCSRCs() => arrCSRC[%d]=%lu", i, arrCSRC[i]);
3706 }
3707 } else
3708 {
3709 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3710 VoEId(_instanceId, _channelId),
3711 "GetRemoteCSRCs() => list is empty!");
3712 }
3713 return CSRCs;
3714}
3715
3716int
3717Channel::SetRTPAudioLevelIndicationStatus(bool enable, unsigned char ID)
3718{
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00003719 if (_rtpAudioProc.get() == NULL)
3720 {
3721 _rtpAudioProc.reset(AudioProcessing::Create(VoEModuleId(_instanceId,
3722 _channelId)));
3723 if (_rtpAudioProc.get() == NULL)
3724 {
3725 _engineStatisticsPtr->SetLastError(VE_NO_MEMORY, kTraceCritical,
3726 "Failed to create AudioProcessing");
3727 return -1;
3728 }
3729 }
3730
3731 if (_rtpAudioProc->level_estimator()->Enable(enable) !=
3732 AudioProcessing::kNoError)
3733 {
3734 _engineStatisticsPtr->SetLastError(VE_APM_ERROR, kTraceWarning,
3735 "Failed to enable AudioProcessing::level_estimator()");
3736 }
3737
niklase@google.com470e71d2011-07-07 08:21:25 +00003738 _includeAudioLevelIndication = enable;
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00003739 if (enable) {
3740 rtp_header_parser_->RegisterRtpHeaderExtension(kRtpExtensionAudioLevel,
3741 ID);
3742 } else {
3743 rtp_header_parser_->DeregisterRtpHeaderExtension(kRtpExtensionAudioLevel);
3744 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003745 return _rtpRtcpModule->SetRTPAudioLevelIndicationStatus(enable, ID);
niklase@google.com470e71d2011-07-07 08:21:25 +00003746}
3747int
3748Channel::GetRTPAudioLevelIndicationStatus(bool& enabled, unsigned char& ID)
3749{
3750 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3751 VoEId(_instanceId,_channelId),
3752 "GetRTPAudioLevelIndicationStatus() => enabled=%d, ID=%u",
3753 enabled, ID);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003754 return _rtpRtcpModule->GetRTPAudioLevelIndicationStatus(enabled, ID);
niklase@google.com470e71d2011-07-07 08:21:25 +00003755}
3756
3757int
3758Channel::SetRTCPStatus(bool enable)
3759{
3760 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3761 "Channel::SetRTCPStatus()");
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003762 if (_rtpRtcpModule->SetRTCPStatus(enable ?
niklase@google.com470e71d2011-07-07 08:21:25 +00003763 kRtcpCompound : kRtcpOff) != 0)
3764 {
3765 _engineStatisticsPtr->SetLastError(
3766 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3767 "SetRTCPStatus() failed to set RTCP status");
3768 return -1;
3769 }
3770 return 0;
3771}
3772
3773int
3774Channel::GetRTCPStatus(bool& enabled)
3775{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003776 RTCPMethod method = _rtpRtcpModule->RTCP();
niklase@google.com470e71d2011-07-07 08:21:25 +00003777 enabled = (method != kRtcpOff);
3778 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3779 VoEId(_instanceId,_channelId),
3780 "GetRTCPStatus() => enabled=%d", enabled);
3781 return 0;
3782}
3783
3784int
3785Channel::SetRTCP_CNAME(const char cName[256])
3786{
3787 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3788 "Channel::SetRTCP_CNAME()");
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003789 if (_rtpRtcpModule->SetCNAME(cName) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003790 {
3791 _engineStatisticsPtr->SetLastError(
3792 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3793 "SetRTCP_CNAME() failed to set RTCP CNAME");
3794 return -1;
3795 }
3796 return 0;
3797}
3798
3799int
3800Channel::GetRTCP_CNAME(char cName[256])
3801{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003802 if (_rtpRtcpModule->CNAME(cName) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003803 {
3804 _engineStatisticsPtr->SetLastError(
3805 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3806 "GetRTCP_CNAME() failed to retrieve RTCP CNAME");
3807 return -1;
3808 }
3809 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3810 VoEId(_instanceId, _channelId),
3811 "GetRTCP_CNAME() => cName=%s", cName);
3812 return 0;
3813}
3814
3815int
3816Channel::GetRemoteRTCP_CNAME(char cName[256])
3817{
3818 if (cName == NULL)
3819 {
3820 _engineStatisticsPtr->SetLastError(
3821 VE_INVALID_ARGUMENT, kTraceError,
3822 "GetRemoteRTCP_CNAME() invalid CNAME input buffer");
3823 return -1;
3824 }
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00003825 char cname[RTCP_CNAME_SIZE];
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00003826 const uint32_t remoteSSRC = _rtpRtcpModule->RemoteSSRC();
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003827 if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003828 {
3829 _engineStatisticsPtr->SetLastError(
3830 VE_CANNOT_RETRIEVE_CNAME, kTraceError,
3831 "GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME");
3832 return -1;
3833 }
3834 strcpy(cName, cname);
3835 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3836 VoEId(_instanceId, _channelId),
3837 "GetRemoteRTCP_CNAME() => cName=%s", cName);
3838 return 0;
3839}
3840
3841int
3842Channel::GetRemoteRTCPData(
3843 unsigned int& NTPHigh,
3844 unsigned int& NTPLow,
3845 unsigned int& timestamp,
3846 unsigned int& playoutTimestamp,
3847 unsigned int* jitter,
3848 unsigned short* fractionLost)
3849{
3850 // --- Information from sender info in received Sender Reports
3851
3852 RTCPSenderInfo senderInfo;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003853 if (_rtpRtcpModule->RemoteRTCPStat(&senderInfo) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003854 {
3855 _engineStatisticsPtr->SetLastError(
3856 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00003857 "GetRemoteRTCPData() failed to retrieve sender info for remote "
niklase@google.com470e71d2011-07-07 08:21:25 +00003858 "side");
3859 return -1;
3860 }
3861
3862 // We only utilize 12 out of 20 bytes in the sender info (ignores packet
3863 // and octet count)
3864 NTPHigh = senderInfo.NTPseconds;
3865 NTPLow = senderInfo.NTPfraction;
3866 timestamp = senderInfo.RTPtimeStamp;
3867
3868 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3869 VoEId(_instanceId, _channelId),
3870 "GetRemoteRTCPData() => NTPHigh=%lu, NTPLow=%lu, "
3871 "timestamp=%lu",
3872 NTPHigh, NTPLow, timestamp);
3873
3874 // --- Locally derived information
3875
3876 // This value is updated on each incoming RTCP packet (0 when no packet
3877 // has been received)
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003878 playoutTimestamp = playout_timestamp_rtcp_;
niklase@google.com470e71d2011-07-07 08:21:25 +00003879
3880 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3881 VoEId(_instanceId, _channelId),
3882 "GetRemoteRTCPData() => playoutTimestamp=%lu",
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003883 playout_timestamp_rtcp_);
niklase@google.com470e71d2011-07-07 08:21:25 +00003884
3885 if (NULL != jitter || NULL != fractionLost)
3886 {
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003887 // Get all RTCP receiver report blocks that have been received on this
3888 // channel. If we receive RTP packets from a remote source we know the
3889 // remote SSRC and use the report block from him.
3890 // Otherwise use the first report block.
3891 std::vector<RTCPReportBlock> remote_stats;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003892 if (_rtpRtcpModule->RemoteRTCPStat(&remote_stats) != 0 ||
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003893 remote_stats.empty()) {
3894 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3895 VoEId(_instanceId, _channelId),
3896 "GetRemoteRTCPData() failed to measure statistics due"
3897 " to lack of received RTP and/or RTCP packets");
3898 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003899 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003900
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00003901 uint32_t remoteSSRC = _rtpRtcpModule->RemoteSSRC();
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003902 std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
3903 for (; it != remote_stats.end(); ++it) {
3904 if (it->remoteSSRC == remoteSSRC)
3905 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00003906 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003907
3908 if (it == remote_stats.end()) {
3909 // If we have not received any RTCP packets from this SSRC it probably
3910 // means that we have not received any RTP packets.
3911 // Use the first received report block instead.
3912 it = remote_stats.begin();
3913 remoteSSRC = it->remoteSSRC;
niklase@google.com470e71d2011-07-07 08:21:25 +00003914 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003915
xians@webrtc.org79af7342012-01-31 12:22:14 +00003916 if (jitter) {
3917 *jitter = it->jitter;
3918 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3919 VoEId(_instanceId, _channelId),
3920 "GetRemoteRTCPData() => jitter = %lu", *jitter);
3921 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003922
xians@webrtc.org79af7342012-01-31 12:22:14 +00003923 if (fractionLost) {
3924 *fractionLost = it->fractionLost;
3925 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3926 VoEId(_instanceId, _channelId),
3927 "GetRemoteRTCPData() => fractionLost = %lu",
3928 *fractionLost);
3929 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003930 }
3931 return 0;
3932}
3933
3934int
pbos@webrtc.org92135212013-05-14 08:31:39 +00003935Channel::SendApplicationDefinedRTCPPacket(unsigned char subType,
niklase@google.com470e71d2011-07-07 08:21:25 +00003936 unsigned int name,
3937 const char* data,
3938 unsigned short dataLengthInBytes)
3939{
3940 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3941 "Channel::SendApplicationDefinedRTCPPacket()");
3942 if (!_sending)
3943 {
3944 _engineStatisticsPtr->SetLastError(
3945 VE_NOT_SENDING, kTraceError,
3946 "SendApplicationDefinedRTCPPacket() not sending");
3947 return -1;
3948 }
3949 if (NULL == data)
3950 {
3951 _engineStatisticsPtr->SetLastError(
3952 VE_INVALID_ARGUMENT, kTraceError,
3953 "SendApplicationDefinedRTCPPacket() invalid data value");
3954 return -1;
3955 }
3956 if (dataLengthInBytes % 4 != 0)
3957 {
3958 _engineStatisticsPtr->SetLastError(
3959 VE_INVALID_ARGUMENT, kTraceError,
3960 "SendApplicationDefinedRTCPPacket() invalid length value");
3961 return -1;
3962 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003963 RTCPMethod status = _rtpRtcpModule->RTCP();
niklase@google.com470e71d2011-07-07 08:21:25 +00003964 if (status == kRtcpOff)
3965 {
3966 _engineStatisticsPtr->SetLastError(
3967 VE_RTCP_ERROR, kTraceError,
3968 "SendApplicationDefinedRTCPPacket() RTCP is disabled");
3969 return -1;
3970 }
3971
3972 // Create and schedule the RTCP APP packet for transmission
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003973 if (_rtpRtcpModule->SetRTCPApplicationSpecificData(
niklase@google.com470e71d2011-07-07 08:21:25 +00003974 subType,
3975 name,
3976 (const unsigned char*) data,
3977 dataLengthInBytes) != 0)
3978 {
3979 _engineStatisticsPtr->SetLastError(
3980 VE_SEND_ERROR, kTraceError,
3981 "SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
3982 return -1;
3983 }
3984 return 0;
3985}
3986
3987int
3988Channel::GetRTPStatistics(
3989 unsigned int& averageJitterMs,
3990 unsigned int& maxJitterMs,
3991 unsigned int& discardedPackets)
3992{
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00003993 uint8_t fraction_lost(0);
3994 uint32_t cum_lost(0);
3995 uint32_t ext_max(0);
3996 uint32_t jitter(0);
3997 uint32_t max_jitter(0);
3998
niklase@google.com470e71d2011-07-07 08:21:25 +00003999 // The jitter statistics is updated for each received RTP packet and is
4000 // based on received packets.
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00004001 if (_rtpRtcpModule->StatisticsRTP(&fraction_lost,
4002 &cum_lost,
4003 &ext_max,
4004 &jitter,
4005 &max_jitter) != 0)
4006 {
4007 _engineStatisticsPtr->SetLastError(
4008 VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
4009 "GetRTPStatistics() failed to read RTP statistics from the "
4010 "RTP/RTCP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00004011 }
4012
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004013 const int32_t playoutFrequency =
niklase@google.com470e71d2011-07-07 08:21:25 +00004014 _audioCodingModule.PlayoutFrequency();
4015 if (playoutFrequency > 0)
4016 {
4017 // Scale RTP statistics given the current playout frequency
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00004018 maxJitterMs = max_jitter / (playoutFrequency / 1000);
4019 averageJitterMs = jitter / (playoutFrequency / 1000);
niklase@google.com470e71d2011-07-07 08:21:25 +00004020 }
4021
4022 discardedPackets = _numberOfDiscardedPackets;
4023
4024 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4025 VoEId(_instanceId, _channelId),
4026 "GetRTPStatistics() => averageJitterMs = %lu, maxJitterMs = %lu,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004027 " discardedPackets = %lu)",
niklase@google.com470e71d2011-07-07 08:21:25 +00004028 averageJitterMs, maxJitterMs, discardedPackets);
4029 return 0;
4030}
4031
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00004032int Channel::GetRemoteRTCPSenderInfo(SenderInfo* sender_info) {
4033 if (sender_info == NULL) {
4034 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
4035 "GetRemoteRTCPSenderInfo() invalid sender_info.");
4036 return -1;
4037 }
4038
4039 // Get the sender info from the latest received RTCP Sender Report.
4040 RTCPSenderInfo rtcp_sender_info;
4041 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_sender_info) != 0) {
4042 _engineStatisticsPtr->SetLastError(VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4043 "GetRemoteRTCPSenderInfo() failed to read RTCP SR sender info.");
4044 return -1;
4045 }
4046
4047 sender_info->NTP_timestamp_high = rtcp_sender_info.NTPseconds;
4048 sender_info->NTP_timestamp_low = rtcp_sender_info.NTPfraction;
4049 sender_info->RTP_timestamp = rtcp_sender_info.RTPtimeStamp;
4050 sender_info->sender_packet_count = rtcp_sender_info.sendPacketCount;
4051 sender_info->sender_octet_count = rtcp_sender_info.sendOctetCount;
4052 return 0;
4053}
4054
4055int Channel::GetRemoteRTCPReportBlocks(
4056 std::vector<ReportBlock>* report_blocks) {
4057 if (report_blocks == NULL) {
4058 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
4059 "GetRemoteRTCPReportBlock()s invalid report_blocks.");
4060 return -1;
4061 }
4062
4063 // Get the report blocks from the latest received RTCP Sender or Receiver
4064 // Report. Each element in the vector contains the sender's SSRC and a
4065 // report block according to RFC 3550.
4066 std::vector<RTCPReportBlock> rtcp_report_blocks;
4067 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks) != 0) {
4068 _engineStatisticsPtr->SetLastError(VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4069 "GetRemoteRTCPReportBlocks() failed to read RTCP SR/RR report block.");
4070 return -1;
4071 }
4072
4073 if (rtcp_report_blocks.empty())
4074 return 0;
4075
4076 std::vector<RTCPReportBlock>::const_iterator it = rtcp_report_blocks.begin();
4077 for (; it != rtcp_report_blocks.end(); ++it) {
4078 ReportBlock report_block;
4079 report_block.sender_SSRC = it->remoteSSRC;
4080 report_block.source_SSRC = it->sourceSSRC;
4081 report_block.fraction_lost = it->fractionLost;
4082 report_block.cumulative_num_packets_lost = it->cumulativeLost;
4083 report_block.extended_highest_sequence_number = it->extendedHighSeqNum;
4084 report_block.interarrival_jitter = it->jitter;
4085 report_block.last_SR_timestamp = it->lastSR;
4086 report_block.delay_since_last_SR = it->delaySinceLastSR;
4087 report_blocks->push_back(report_block);
4088 }
4089 return 0;
4090}
4091
niklase@google.com470e71d2011-07-07 08:21:25 +00004092int
4093Channel::GetRTPStatistics(CallStatistics& stats)
4094{
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00004095 uint8_t fraction_lost(0);
4096 uint32_t cum_lost(0);
4097 uint32_t ext_max(0);
4098 uint32_t jitter(0);
4099 uint32_t max_jitter(0);
4100
niklase@google.com470e71d2011-07-07 08:21:25 +00004101 // --- Part one of the final structure (four values)
4102
4103 // The jitter statistics is updated for each received RTP packet and is
4104 // based on received packets.
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00004105 if (_rtpRtcpModule->StatisticsRTP(&fraction_lost,
4106 &cum_lost,
4107 &ext_max,
4108 &jitter,
4109 &max_jitter) != 0)
4110 {
4111 _engineStatisticsPtr->SetLastError(
4112 VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
4113 "GetRTPStatistics() failed to read RTP statistics from the "
4114 "RTP/RTCP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00004115 }
4116
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00004117 stats.fractionLost = fraction_lost;
4118 stats.cumulativeLost = cum_lost;
4119 stats.extendedMax = ext_max;
4120 stats.jitterSamples = jitter;
niklase@google.com470e71d2011-07-07 08:21:25 +00004121
4122 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4123 VoEId(_instanceId, _channelId),
4124 "GetRTPStatistics() => fractionLost=%lu, cumulativeLost=%lu,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004125 " extendedMax=%lu, jitterSamples=%li)",
niklase@google.com470e71d2011-07-07 08:21:25 +00004126 stats.fractionLost, stats.cumulativeLost, stats.extendedMax,
4127 stats.jitterSamples);
4128
4129 // --- Part two of the final structure (one value)
4130
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004131 uint16_t RTT(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004132 RTCPMethod method = _rtpRtcpModule->RTCP();
niklase@google.com470e71d2011-07-07 08:21:25 +00004133 if (method == kRtcpOff)
4134 {
4135 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4136 VoEId(_instanceId, _channelId),
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004137 "GetRTPStatistics() RTCP is disabled => valid RTT "
niklase@google.com470e71d2011-07-07 08:21:25 +00004138 "measurements cannot be retrieved");
4139 } else
4140 {
4141 // The remote SSRC will be zero if no RTP packet has been received.
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00004142 uint32_t remoteSSRC = _rtpRtcpModule->RemoteSSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00004143 if (remoteSSRC > 0)
4144 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004145 uint16_t avgRTT(0);
4146 uint16_t maxRTT(0);
4147 uint16_t minRTT(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004148
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004149 if (_rtpRtcpModule->RTT(remoteSSRC, &RTT, &avgRTT, &minRTT, &maxRTT)
niklase@google.com470e71d2011-07-07 08:21:25 +00004150 != 0)
4151 {
4152 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4153 VoEId(_instanceId, _channelId),
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004154 "GetRTPStatistics() failed to retrieve RTT from "
niklase@google.com470e71d2011-07-07 08:21:25 +00004155 "the RTP/RTCP module");
4156 }
4157 } else
4158 {
4159 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4160 VoEId(_instanceId, _channelId),
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004161 "GetRTPStatistics() failed to measure RTT since no "
niklase@google.com470e71d2011-07-07 08:21:25 +00004162 "RTP packets have been received yet");
4163 }
4164 }
4165
4166 stats.rttMs = static_cast<int> (RTT);
4167
4168 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4169 VoEId(_instanceId, _channelId),
4170 "GetRTPStatistics() => rttMs=%d", stats.rttMs);
4171
4172 // --- Part three of the final structure (four values)
4173
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004174 uint32_t bytesSent(0);
4175 uint32_t packetsSent(0);
4176 uint32_t bytesReceived(0);
4177 uint32_t packetsReceived(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004178
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004179 if (_rtpRtcpModule->DataCountersRTP(&bytesSent,
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00004180 &packetsSent,
4181 &bytesReceived,
4182 &packetsReceived) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004183 {
4184 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4185 VoEId(_instanceId, _channelId),
4186 "GetRTPStatistics() failed to retrieve RTP datacounters =>"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004187 " output will not be complete");
niklase@google.com470e71d2011-07-07 08:21:25 +00004188 }
4189
4190 stats.bytesSent = bytesSent;
4191 stats.packetsSent = packetsSent;
4192 stats.bytesReceived = bytesReceived;
4193 stats.packetsReceived = packetsReceived;
4194
4195 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4196 VoEId(_instanceId, _channelId),
4197 "GetRTPStatistics() => bytesSent=%d, packetsSent=%d,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004198 " bytesReceived=%d, packetsReceived=%d)",
niklase@google.com470e71d2011-07-07 08:21:25 +00004199 stats.bytesSent, stats.packetsSent, stats.bytesReceived,
4200 stats.packetsReceived);
4201
4202 return 0;
4203}
4204
turaj@webrtc.org42259e72012-12-11 02:15:12 +00004205int Channel::SetFECStatus(bool enable, int redPayloadtype) {
4206 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4207 "Channel::SetFECStatus()");
niklase@google.com470e71d2011-07-07 08:21:25 +00004208
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00004209 if (enable) {
4210 if (redPayloadtype < 0 || redPayloadtype > 127) {
4211 _engineStatisticsPtr->SetLastError(
4212 VE_PLTYPE_ERROR, kTraceError,
4213 "SetFECStatus() invalid RED payload type");
4214 return -1;
4215 }
4216
4217 if (SetRedPayloadType(redPayloadtype) < 0) {
4218 _engineStatisticsPtr->SetLastError(
4219 VE_CODEC_ERROR, kTraceError,
4220 "SetSecondarySendCodec() Failed to register RED ACM");
4221 return -1;
4222 }
turaj@webrtc.org42259e72012-12-11 02:15:12 +00004223 }
niklase@google.com470e71d2011-07-07 08:21:25 +00004224
turaj@webrtc.org42259e72012-12-11 02:15:12 +00004225 if (_audioCodingModule.SetFECStatus(enable) != 0) {
4226 _engineStatisticsPtr->SetLastError(
4227 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4228 "SetFECStatus() failed to set FEC state in the ACM");
4229 return -1;
4230 }
4231 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00004232}
4233
4234int
4235Channel::GetFECStatus(bool& enabled, int& redPayloadtype)
4236{
4237 enabled = _audioCodingModule.FECStatus();
4238 if (enabled)
4239 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004240 int8_t payloadType(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004241 if (_rtpRtcpModule->SendREDPayloadType(payloadType) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004242 {
4243 _engineStatisticsPtr->SetLastError(
4244 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4245 "GetFECStatus() failed to retrieve RED PT from RTP/RTCP "
4246 "module");
4247 return -1;
4248 }
4249 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4250 VoEId(_instanceId, _channelId),
4251 "GetFECStatus() => enabled=%d, redPayloadtype=%d",
4252 enabled, redPayloadtype);
4253 return 0;
4254 }
4255 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4256 VoEId(_instanceId, _channelId),
4257 "GetFECStatus() => enabled=%d", enabled);
4258 return 0;
4259}
4260
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00004261void Channel::SetNACKStatus(bool enable, int maxNumberOfPackets) {
4262 // None of these functions can fail.
4263 _rtpRtcpModule->SetStorePacketsStatus(enable, maxNumberOfPackets);
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00004264 _rtpRtcpModule->SetNACKStatus(enable ? kNackRtcp : kNackOff,
4265 maxNumberOfPackets);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00004266 if (enable)
4267 _audioCodingModule.EnableNack(maxNumberOfPackets);
4268 else
4269 _audioCodingModule.DisableNack();
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00004270}
4271
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00004272// Called when we are missing one or more packets.
4273int Channel::ResendPackets(const uint16_t* sequence_numbers, int length) {
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00004274 return _rtpRtcpModule->SendNACK(sequence_numbers, length);
4275}
4276
niklase@google.com470e71d2011-07-07 08:21:25 +00004277int
niklase@google.com470e71d2011-07-07 08:21:25 +00004278Channel::StartRTPDump(const char fileNameUTF8[1024],
4279 RTPDirections direction)
4280{
4281 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4282 "Channel::StartRTPDump()");
4283 if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
4284 {
4285 _engineStatisticsPtr->SetLastError(
4286 VE_INVALID_ARGUMENT, kTraceError,
4287 "StartRTPDump() invalid RTP direction");
4288 return -1;
4289 }
4290 RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4291 &_rtpDumpIn : &_rtpDumpOut;
4292 if (rtpDumpPtr == NULL)
4293 {
4294 assert(false);
4295 return -1;
4296 }
4297 if (rtpDumpPtr->IsActive())
4298 {
4299 rtpDumpPtr->Stop();
4300 }
4301 if (rtpDumpPtr->Start(fileNameUTF8) != 0)
4302 {
4303 _engineStatisticsPtr->SetLastError(
4304 VE_BAD_FILE, kTraceError,
4305 "StartRTPDump() failed to create file");
4306 return -1;
4307 }
4308 return 0;
4309}
4310
4311int
4312Channel::StopRTPDump(RTPDirections direction)
4313{
4314 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4315 "Channel::StopRTPDump()");
4316 if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
4317 {
4318 _engineStatisticsPtr->SetLastError(
4319 VE_INVALID_ARGUMENT, kTraceError,
4320 "StopRTPDump() invalid RTP direction");
4321 return -1;
4322 }
4323 RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4324 &_rtpDumpIn : &_rtpDumpOut;
4325 if (rtpDumpPtr == NULL)
4326 {
4327 assert(false);
4328 return -1;
4329 }
4330 if (!rtpDumpPtr->IsActive())
4331 {
4332 return 0;
4333 }
4334 return rtpDumpPtr->Stop();
4335}
4336
4337bool
4338Channel::RTPDumpIsActive(RTPDirections direction)
4339{
4340 if ((direction != kRtpIncoming) &&
4341 (direction != kRtpOutgoing))
4342 {
4343 _engineStatisticsPtr->SetLastError(
4344 VE_INVALID_ARGUMENT, kTraceError,
4345 "RTPDumpIsActive() invalid RTP direction");
4346 return false;
4347 }
4348 RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4349 &_rtpDumpIn : &_rtpDumpOut;
4350 return rtpDumpPtr->IsActive();
4351}
4352
4353int
4354Channel::InsertExtraRTPPacket(unsigned char payloadType,
4355 bool markerBit,
4356 const char* payloadData,
4357 unsigned short payloadSize)
4358{
4359 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4360 "Channel::InsertExtraRTPPacket()");
4361 if (payloadType > 127)
4362 {
4363 _engineStatisticsPtr->SetLastError(
4364 VE_INVALID_PLTYPE, kTraceError,
4365 "InsertExtraRTPPacket() invalid payload type");
4366 return -1;
4367 }
4368 if (payloadData == NULL)
4369 {
4370 _engineStatisticsPtr->SetLastError(
4371 VE_INVALID_ARGUMENT, kTraceError,
4372 "InsertExtraRTPPacket() invalid payload data");
4373 return -1;
4374 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004375 if (payloadSize > _rtpRtcpModule->MaxDataPayloadLength())
niklase@google.com470e71d2011-07-07 08:21:25 +00004376 {
4377 _engineStatisticsPtr->SetLastError(
4378 VE_INVALID_ARGUMENT, kTraceError,
4379 "InsertExtraRTPPacket() invalid payload size");
4380 return -1;
4381 }
4382 if (!_sending)
4383 {
4384 _engineStatisticsPtr->SetLastError(
4385 VE_NOT_SENDING, kTraceError,
4386 "InsertExtraRTPPacket() not sending");
4387 return -1;
4388 }
4389
4390 // Create extra RTP packet by calling RtpRtcp::SendOutgoingData().
4391 // Transport::SendPacket() will be called by the module when the RTP packet
4392 // is created.
4393 // The call to SendOutgoingData() does *not* modify the timestamp and
4394 // payloadtype to ensure that the RTP module generates a valid RTP packet
4395 // (user might utilize a non-registered payload type).
4396 // The marker bit and payload type will be replaced just before the actual
4397 // transmission, i.e., the actual modification is done *after* the RTP
4398 // module has delivered its RTP packet back to the VoE.
4399 // We will use the stored values above when the packet is modified
4400 // (see Channel::SendPacket()).
4401
4402 _extraPayloadType = payloadType;
4403 _extraMarkerBit = markerBit;
4404 _insertExtraRTPPacket = true;
4405
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004406 if (_rtpRtcpModule->SendOutgoingData(kAudioFrameSpeech,
niklase@google.com470e71d2011-07-07 08:21:25 +00004407 _lastPayloadType,
4408 _lastLocalTimeStamp,
stefan@webrtc.orgddfdfed2012-07-03 13:21:22 +00004409 // Leaving the time when this frame was
4410 // received from the capture device as
4411 // undefined for voice for now.
4412 -1,
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004413 (const uint8_t*) payloadData,
niklase@google.com470e71d2011-07-07 08:21:25 +00004414 payloadSize) != 0)
4415 {
4416 _engineStatisticsPtr->SetLastError(
4417 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4418 "InsertExtraRTPPacket() failed to send extra RTP packet");
4419 return -1;
4420 }
4421
4422 return 0;
4423}
4424
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004425uint32_t
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004426Channel::Demultiplex(const AudioFrame& audioFrame)
niklase@google.com470e71d2011-07-07 08:21:25 +00004427{
4428 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004429 "Channel::Demultiplex()");
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00004430 _audioFrame.CopyFrom(audioFrame);
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004431 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00004432 return 0;
4433}
4434
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00004435// TODO(xians): This method borrows quite some code from
4436// TransmitMixer::GenerateAudioFrame(), refactor these two methods and reduce
4437// code duplication.
4438void Channel::Demultiplex(const int16_t* audio_data,
xians@webrtc.org8fff1f02013-07-31 16:27:42 +00004439 int sample_rate,
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00004440 int number_of_frames,
xians@webrtc.org8fff1f02013-07-31 16:27:42 +00004441 int number_of_channels) {
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00004442 // The highest sample rate that WebRTC supports for mono audio is 96kHz.
4443 static const int kMaxNumberOfFrames = 960;
4444 assert(number_of_frames <= kMaxNumberOfFrames);
4445
4446 // Get the send codec information for doing resampling or downmixing later on.
4447 CodecInst codec;
4448 GetSendCodec(codec);
4449 assert(codec.channels == 1 || codec.channels == 2);
4450 int support_sample_rate = std::min(32000,
4451 std::min(sample_rate, codec.plfreq));
4452
4453 // Downmix the data to mono if needed.
4454 const int16_t* audio_ptr = audio_data;
4455 if (number_of_channels == 2 && codec.channels == 1) {
4456 if (!mono_recording_audio_.get())
4457 mono_recording_audio_.reset(new int16_t[kMaxNumberOfFrames]);
4458
4459 AudioFrameOperations::StereoToMono(audio_data, number_of_frames,
4460 mono_recording_audio_.get());
4461 audio_ptr = mono_recording_audio_.get();
4462 }
4463
4464 // Resample the data to the sample rate that the codec is using.
4465 if (input_resampler_.InitializeIfNeeded(sample_rate,
4466 support_sample_rate,
4467 codec.channels)) {
4468 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
4469 "Channel::Demultiplex() unable to resample");
4470 return;
4471 }
4472
4473 int out_length = input_resampler_.Resample(audio_ptr,
4474 number_of_frames * codec.channels,
4475 _audioFrame.data_,
4476 AudioFrame::kMaxDataSizeSamples);
4477 if (out_length == -1) {
4478 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
4479 "Channel::Demultiplex() resampling failed");
4480 return;
4481 }
4482
4483 _audioFrame.samples_per_channel_ = out_length / codec.channels;
4484 _audioFrame.timestamp_ = -1;
4485 _audioFrame.sample_rate_hz_ = support_sample_rate;
4486 _audioFrame.speech_type_ = AudioFrame::kNormalSpeech;
4487 _audioFrame.vad_activity_ = AudioFrame::kVadUnknown;
4488 _audioFrame.num_channels_ = codec.channels;
4489 _audioFrame.id_ = _channelId;
4490}
4491
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004492uint32_t
xians@google.com0b0665a2011-08-08 08:18:44 +00004493Channel::PrepareEncodeAndSend(int mixingFrequency)
niklase@google.com470e71d2011-07-07 08:21:25 +00004494{
4495 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4496 "Channel::PrepareEncodeAndSend()");
4497
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004498 if (_audioFrame.samples_per_channel_ == 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004499 {
4500 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4501 "Channel::PrepareEncodeAndSend() invalid audio frame");
4502 return -1;
4503 }
4504
4505 if (_inputFilePlaying)
4506 {
4507 MixOrReplaceAudioWithFile(mixingFrequency);
4508 }
4509
4510 if (_mute)
4511 {
4512 AudioFrameOperations::Mute(_audioFrame);
4513 }
4514
4515 if (_inputExternalMedia)
4516 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004517 CriticalSectionScoped cs(&_callbackCritSect);
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004518 const bool isStereo = (_audioFrame.num_channels_ == 2);
niklase@google.com470e71d2011-07-07 08:21:25 +00004519 if (_inputExternalMediaCallbackPtr)
4520 {
4521 _inputExternalMediaCallbackPtr->Process(
4522 _channelId,
4523 kRecordingPerChannel,
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004524 (int16_t*)_audioFrame.data_,
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004525 _audioFrame.samples_per_channel_,
4526 _audioFrame.sample_rate_hz_,
niklase@google.com470e71d2011-07-07 08:21:25 +00004527 isStereo);
4528 }
4529 }
4530
4531 InsertInbandDtmfTone();
4532
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004533 if (_includeAudioLevelIndication)
4534 {
4535 assert(_rtpAudioProc.get() != NULL);
4536
4537 // Check if settings need to be updated.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004538 if (_rtpAudioProc->sample_rate_hz() != _audioFrame.sample_rate_hz_)
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004539 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004540 if (_rtpAudioProc->set_sample_rate_hz(_audioFrame.sample_rate_hz_) !=
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004541 AudioProcessing::kNoError)
4542 {
4543 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4544 VoEId(_instanceId, _channelId),
4545 "Error setting AudioProcessing sample rate");
4546 return -1;
4547 }
4548 }
4549
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004550 if (_rtpAudioProc->num_input_channels() != _audioFrame.num_channels_)
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004551 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004552 if (_rtpAudioProc->set_num_channels(_audioFrame.num_channels_,
4553 _audioFrame.num_channels_)
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004554 != AudioProcessing::kNoError)
4555 {
4556 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4557 VoEId(_instanceId, _channelId),
4558 "Error setting AudioProcessing channels");
4559 return -1;
4560 }
4561 }
4562
4563 // Performs level analysis only; does not affect the signal.
4564 _rtpAudioProc->ProcessStream(&_audioFrame);
4565 }
4566
niklase@google.com470e71d2011-07-07 08:21:25 +00004567 return 0;
4568}
4569
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004570uint32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00004571Channel::EncodeAndSend()
4572{
4573 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4574 "Channel::EncodeAndSend()");
4575
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004576 assert(_audioFrame.num_channels_ <= 2);
4577 if (_audioFrame.samples_per_channel_ == 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004578 {
4579 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4580 "Channel::EncodeAndSend() invalid audio frame");
4581 return -1;
4582 }
4583
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004584 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00004585
4586 // --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz.
4587
4588 // The ACM resamples internally.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004589 _audioFrame.timestamp_ = _timeStamp;
niklase@google.com470e71d2011-07-07 08:21:25 +00004590 if (_audioCodingModule.Add10MsData((AudioFrame&)_audioFrame) != 0)
4591 {
4592 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
4593 "Channel::EncodeAndSend() ACM encoding failed");
4594 return -1;
4595 }
4596
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004597 _timeStamp += _audioFrame.samples_per_channel_;
niklase@google.com470e71d2011-07-07 08:21:25 +00004598
4599 // --- Encode if complete frame is ready
4600
4601 // This call will trigger AudioPacketizationCallback::SendData if encoding
4602 // is done and payload is ready for packetization and transmission.
4603 return _audioCodingModule.Process();
4604}
4605
4606int Channel::RegisterExternalMediaProcessing(
4607 ProcessingTypes type,
4608 VoEMediaProcess& processObject)
4609{
4610 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4611 "Channel::RegisterExternalMediaProcessing()");
4612
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004613 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004614
4615 if (kPlaybackPerChannel == type)
4616 {
4617 if (_outputExternalMediaCallbackPtr)
4618 {
4619 _engineStatisticsPtr->SetLastError(
4620 VE_INVALID_OPERATION, kTraceError,
4621 "Channel::RegisterExternalMediaProcessing() "
4622 "output external media already enabled");
4623 return -1;
4624 }
4625 _outputExternalMediaCallbackPtr = &processObject;
4626 _outputExternalMedia = true;
4627 }
4628 else if (kRecordingPerChannel == type)
4629 {
4630 if (_inputExternalMediaCallbackPtr)
4631 {
4632 _engineStatisticsPtr->SetLastError(
4633 VE_INVALID_OPERATION, kTraceError,
4634 "Channel::RegisterExternalMediaProcessing() "
4635 "output external media already enabled");
4636 return -1;
4637 }
4638 _inputExternalMediaCallbackPtr = &processObject;
4639 _inputExternalMedia = true;
4640 }
4641 return 0;
4642}
4643
4644int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type)
4645{
4646 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4647 "Channel::DeRegisterExternalMediaProcessing()");
4648
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004649 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004650
4651 if (kPlaybackPerChannel == type)
4652 {
4653 if (!_outputExternalMediaCallbackPtr)
4654 {
4655 _engineStatisticsPtr->SetLastError(
4656 VE_INVALID_OPERATION, kTraceWarning,
4657 "Channel::DeRegisterExternalMediaProcessing() "
4658 "output external media already disabled");
4659 return 0;
4660 }
4661 _outputExternalMedia = false;
4662 _outputExternalMediaCallbackPtr = NULL;
4663 }
4664 else if (kRecordingPerChannel == type)
4665 {
4666 if (!_inputExternalMediaCallbackPtr)
4667 {
4668 _engineStatisticsPtr->SetLastError(
4669 VE_INVALID_OPERATION, kTraceWarning,
4670 "Channel::DeRegisterExternalMediaProcessing() "
4671 "input external media already disabled");
4672 return 0;
4673 }
4674 _inputExternalMedia = false;
4675 _inputExternalMediaCallbackPtr = NULL;
4676 }
4677
4678 return 0;
4679}
4680
roosa@google.com1b60ceb2012-12-12 23:00:29 +00004681int Channel::SetExternalMixing(bool enabled) {
4682 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4683 "Channel::SetExternalMixing(enabled=%d)", enabled);
4684
4685 if (_playing)
4686 {
4687 _engineStatisticsPtr->SetLastError(
4688 VE_INVALID_OPERATION, kTraceError,
4689 "Channel::SetExternalMixing() "
4690 "external mixing cannot be changed while playing.");
4691 return -1;
4692 }
4693
4694 _externalMixing = enabled;
4695
4696 return 0;
4697}
4698
niklase@google.com470e71d2011-07-07 08:21:25 +00004699int
4700Channel::ResetRTCPStatistics()
4701{
4702 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4703 "Channel::ResetRTCPStatistics()");
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004704 uint32_t remoteSSRC(0);
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00004705 remoteSSRC = _rtpRtcpModule->RemoteSSRC();
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004706 return _rtpRtcpModule->ResetRTT(remoteSSRC);
niklase@google.com470e71d2011-07-07 08:21:25 +00004707}
4708
4709int
4710Channel::GetRoundTripTimeSummary(StatVal& delaysMs) const
4711{
4712 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4713 "Channel::GetRoundTripTimeSummary()");
4714 // Override default module outputs for the case when RTCP is disabled.
4715 // This is done to ensure that we are backward compatible with the
4716 // VoiceEngine where we did not use RTP/RTCP module.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004717 if (!_rtpRtcpModule->RTCP())
niklase@google.com470e71d2011-07-07 08:21:25 +00004718 {
4719 delaysMs.min = -1;
4720 delaysMs.max = -1;
4721 delaysMs.average = -1;
4722 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4723 "Channel::GetRoundTripTimeSummary() RTCP is disabled =>"
4724 " valid RTT measurements cannot be retrieved");
4725 return 0;
4726 }
4727
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004728 uint32_t remoteSSRC;
4729 uint16_t RTT;
4730 uint16_t avgRTT;
4731 uint16_t maxRTT;
4732 uint16_t minRTT;
niklase@google.com470e71d2011-07-07 08:21:25 +00004733 // The remote SSRC will be zero if no RTP packet has been received.
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00004734 remoteSSRC = _rtpRtcpModule->RemoteSSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00004735 if (remoteSSRC == 0)
4736 {
4737 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4738 "Channel::GetRoundTripTimeSummary() unable to measure RTT"
4739 " since no RTP packet has been received yet");
4740 }
4741
4742 // Retrieve RTT statistics from the RTP/RTCP module for the specified
4743 // channel and SSRC. The SSRC is required to parse out the correct source
4744 // in conference scenarios.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004745 if (_rtpRtcpModule->RTT(remoteSSRC, &RTT, &avgRTT, &minRTT,&maxRTT) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004746 {
4747 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4748 "GetRoundTripTimeSummary unable to retrieve RTT values"
4749 " from the RTCP layer");
4750 delaysMs.min = -1; delaysMs.max = -1; delaysMs.average = -1;
4751 }
4752 else
4753 {
4754 delaysMs.min = minRTT;
4755 delaysMs.max = maxRTT;
4756 delaysMs.average = avgRTT;
4757 }
4758 return 0;
4759}
4760
4761int
4762Channel::GetNetworkStatistics(NetworkStatistics& stats)
4763{
4764 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4765 "Channel::GetNetworkStatistics()");
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00004766 ACMNetworkStatistics acm_stats;
4767 int return_value = _audioCodingModule.NetworkStatistics(&acm_stats);
4768 if (return_value >= 0) {
4769 memcpy(&stats, &acm_stats, sizeof(NetworkStatistics));
4770 }
4771 return return_value;
niklase@google.com470e71d2011-07-07 08:21:25 +00004772}
4773
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00004774bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms,
4775 int* playout_buffer_delay_ms) const {
4776 if (_average_jitter_buffer_delay_us == 0) {
niklase@google.com470e71d2011-07-07 08:21:25 +00004777 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00004778 "Channel::GetDelayEstimate() no valid estimate.");
4779 return false;
4780 }
4781 *jitter_buffer_delay_ms = (_average_jitter_buffer_delay_us + 500) / 1000 +
4782 _recPacketDelayMs;
4783 *playout_buffer_delay_ms = playout_delay_ms_;
4784 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4785 "Channel::GetDelayEstimate()");
4786 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +00004787}
4788
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +00004789int Channel::SetInitialPlayoutDelay(int delay_ms)
4790{
4791 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4792 "Channel::SetInitialPlayoutDelay()");
4793 if ((delay_ms < kVoiceEngineMinMinPlayoutDelayMs) ||
4794 (delay_ms > kVoiceEngineMaxMinPlayoutDelayMs))
4795 {
4796 _engineStatisticsPtr->SetLastError(
4797 VE_INVALID_ARGUMENT, kTraceError,
4798 "SetInitialPlayoutDelay() invalid min delay");
4799 return -1;
4800 }
4801 if (_audioCodingModule.SetInitialPlayoutDelay(delay_ms) != 0)
4802 {
4803 _engineStatisticsPtr->SetLastError(
4804 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4805 "SetInitialPlayoutDelay() failed to set min playout delay");
4806 return -1;
4807 }
4808 return 0;
4809}
4810
4811
niklase@google.com470e71d2011-07-07 08:21:25 +00004812int
4813Channel::SetMinimumPlayoutDelay(int delayMs)
4814{
4815 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4816 "Channel::SetMinimumPlayoutDelay()");
4817 if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
4818 (delayMs > kVoiceEngineMaxMinPlayoutDelayMs))
4819 {
4820 _engineStatisticsPtr->SetLastError(
4821 VE_INVALID_ARGUMENT, kTraceError,
4822 "SetMinimumPlayoutDelay() invalid min delay");
4823 return -1;
4824 }
4825 if (_audioCodingModule.SetMinimumPlayoutDelay(delayMs) != 0)
4826 {
4827 _engineStatisticsPtr->SetLastError(
4828 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4829 "SetMinimumPlayoutDelay() failed to set min playout delay");
4830 return -1;
4831 }
4832 return 0;
4833}
4834
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00004835void Channel::UpdatePlayoutTimestamp(bool rtcp) {
4836 uint32_t playout_timestamp = 0;
4837
4838 if (_audioCodingModule.PlayoutTimestamp(&playout_timestamp) == -1) {
4839 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4840 "Channel::UpdatePlayoutTimestamp() failed to read playout"
4841 " timestamp from the ACM");
4842 _engineStatisticsPtr->SetLastError(
4843 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4844 "UpdatePlayoutTimestamp() failed to retrieve timestamp");
4845 return;
4846 }
4847
4848 uint16_t delay_ms = 0;
4849 if (_audioDeviceModulePtr->PlayoutDelay(&delay_ms) == -1) {
4850 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4851 "Channel::UpdatePlayoutTimestamp() failed to read playout"
4852 " delay from the ADM");
4853 _engineStatisticsPtr->SetLastError(
4854 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4855 "UpdatePlayoutTimestamp() failed to retrieve playout delay");
4856 return;
4857 }
4858
4859 int32_t playout_frequency = _audioCodingModule.PlayoutFrequency();
4860 CodecInst current_recive_codec;
4861 if (_audioCodingModule.ReceiveCodec(&current_recive_codec) == 0) {
4862 if (STR_CASE_CMP("G722", current_recive_codec.plname) == 0) {
4863 playout_frequency = 8000;
4864 } else if (STR_CASE_CMP("opus", current_recive_codec.plname) == 0) {
4865 playout_frequency = 48000;
niklase@google.com470e71d2011-07-07 08:21:25 +00004866 }
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00004867 }
4868
4869 // Remove the playout delay.
4870 playout_timestamp -= (delay_ms * (playout_frequency / 1000));
4871
4872 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4873 "Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu",
4874 playout_timestamp);
4875
4876 if (rtcp) {
4877 playout_timestamp_rtcp_ = playout_timestamp;
4878 } else {
4879 playout_timestamp_rtp_ = playout_timestamp;
4880 }
4881 playout_delay_ms_ = delay_ms;
4882}
4883
4884int Channel::GetPlayoutTimestamp(unsigned int& timestamp) {
4885 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4886 "Channel::GetPlayoutTimestamp()");
4887 if (playout_timestamp_rtp_ == 0) {
4888 _engineStatisticsPtr->SetLastError(
4889 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4890 "GetPlayoutTimestamp() failed to retrieve timestamp");
4891 return -1;
4892 }
4893 timestamp = playout_timestamp_rtp_;
4894 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4895 VoEId(_instanceId,_channelId),
4896 "GetPlayoutTimestamp() => timestamp=%u", timestamp);
4897 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00004898}
4899
4900int
4901Channel::SetInitTimestamp(unsigned int timestamp)
4902{
4903 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4904 "Channel::SetInitTimestamp()");
4905 if (_sending)
4906 {
4907 _engineStatisticsPtr->SetLastError(
4908 VE_SENDING, kTraceError, "SetInitTimestamp() already sending");
4909 return -1;
4910 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004911 if (_rtpRtcpModule->SetStartTimestamp(timestamp) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004912 {
4913 _engineStatisticsPtr->SetLastError(
4914 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4915 "SetInitTimestamp() failed to set timestamp");
4916 return -1;
4917 }
4918 return 0;
4919}
4920
4921int
4922Channel::SetInitSequenceNumber(short sequenceNumber)
4923{
4924 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4925 "Channel::SetInitSequenceNumber()");
4926 if (_sending)
4927 {
4928 _engineStatisticsPtr->SetLastError(
4929 VE_SENDING, kTraceError,
4930 "SetInitSequenceNumber() already sending");
4931 return -1;
4932 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004933 if (_rtpRtcpModule->SetSequenceNumber(sequenceNumber) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004934 {
4935 _engineStatisticsPtr->SetLastError(
4936 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4937 "SetInitSequenceNumber() failed to set sequence number");
4938 return -1;
4939 }
4940 return 0;
4941}
4942
4943int
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00004944Channel::GetRtpRtcp(RtpRtcp* &rtpRtcpModule) const
niklase@google.com470e71d2011-07-07 08:21:25 +00004945{
4946 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4947 "Channel::GetRtpRtcp()");
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00004948 rtpRtcpModule = _rtpRtcpModule.get();
niklase@google.com470e71d2011-07-07 08:21:25 +00004949 return 0;
4950}
4951
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004952// TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use
4953// a shared helper.
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004954int32_t
pbos@webrtc.org92135212013-05-14 08:31:39 +00004955Channel::MixOrReplaceAudioWithFile(int mixingFrequency)
niklase@google.com470e71d2011-07-07 08:21:25 +00004956{
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004957 scoped_array<int16_t> fileBuffer(new int16_t[640]);
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004958 int fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004959
4960 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004961 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004962
4963 if (_inputFilePlayerPtr == NULL)
4964 {
4965 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4966 VoEId(_instanceId, _channelId),
4967 "Channel::MixOrReplaceAudioWithFile() fileplayer"
4968 " doesnt exist");
4969 return -1;
4970 }
4971
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004972 if (_inputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
niklase@google.com470e71d2011-07-07 08:21:25 +00004973 fileSamples,
4974 mixingFrequency) == -1)
4975 {
4976 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4977 VoEId(_instanceId, _channelId),
4978 "Channel::MixOrReplaceAudioWithFile() file mixing "
4979 "failed");
4980 return -1;
4981 }
4982 if (fileSamples == 0)
4983 {
4984 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4985 VoEId(_instanceId, _channelId),
4986 "Channel::MixOrReplaceAudioWithFile() file is ended");
4987 return 0;
4988 }
4989 }
4990
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004991 assert(_audioFrame.samples_per_channel_ == fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004992
4993 if (_mixFileWithMicrophone)
4994 {
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004995 // Currently file stream is always mono.
4996 // TODO(xians): Change the code when FilePlayer supports real stereo.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004997 Utility::MixWithSat(_audioFrame.data_,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004998 _audioFrame.num_channels_,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004999 fileBuffer.get(),
5000 1,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00005001 fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00005002 }
5003 else
5004 {
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00005005 // Replace ACM audio with file.
5006 // Currently file stream is always mono.
5007 // TODO(xians): Change the code when FilePlayer supports real stereo.
niklase@google.com470e71d2011-07-07 08:21:25 +00005008 _audioFrame.UpdateFrame(_channelId,
5009 -1,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00005010 fileBuffer.get(),
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00005011 fileSamples,
niklase@google.com470e71d2011-07-07 08:21:25 +00005012 mixingFrequency,
5013 AudioFrame::kNormalSpeech,
5014 AudioFrame::kVadUnknown,
5015 1);
5016
5017 }
5018 return 0;
5019}
5020
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005021int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00005022Channel::MixAudioWithFile(AudioFrame& audioFrame,
pbos@webrtc.org92135212013-05-14 08:31:39 +00005023 int mixingFrequency)
niklase@google.com470e71d2011-07-07 08:21:25 +00005024{
5025 assert(mixingFrequency <= 32000);
5026
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005027 scoped_array<int16_t> fileBuffer(new int16_t[640]);
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00005028 int fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00005029
5030 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00005031 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00005032
5033 if (_outputFilePlayerPtr == NULL)
5034 {
5035 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
5036 VoEId(_instanceId, _channelId),
5037 "Channel::MixAudioWithFile() file mixing failed");
5038 return -1;
5039 }
5040
5041 // We should get the frequency we ask for.
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00005042 if (_outputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
niklase@google.com470e71d2011-07-07 08:21:25 +00005043 fileSamples,
5044 mixingFrequency) == -1)
5045 {
5046 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
5047 VoEId(_instanceId, _channelId),
5048 "Channel::MixAudioWithFile() file mixing failed");
5049 return -1;
5050 }
5051 }
5052
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005053 if (audioFrame.samples_per_channel_ == fileSamples)
niklase@google.com470e71d2011-07-07 08:21:25 +00005054 {
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00005055 // Currently file stream is always mono.
5056 // TODO(xians): Change the code when FilePlayer supports real stereo.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005057 Utility::MixWithSat(audioFrame.data_,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00005058 audioFrame.num_channels_,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00005059 fileBuffer.get(),
5060 1,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00005061 fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00005062 }
5063 else
5064 {
5065 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005066 "Channel::MixAudioWithFile() samples_per_channel_(%d) != "
niklase@google.com470e71d2011-07-07 08:21:25 +00005067 "fileSamples(%d)",
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005068 audioFrame.samples_per_channel_, fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00005069 return -1;
5070 }
5071
5072 return 0;
5073}
5074
5075int
5076Channel::InsertInbandDtmfTone()
5077{
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00005078 // Check if we should start a new tone.
niklase@google.com470e71d2011-07-07 08:21:25 +00005079 if (_inbandDtmfQueue.PendingDtmf() &&
5080 !_inbandDtmfGenerator.IsAddingTone() &&
5081 _inbandDtmfGenerator.DelaySinceLastTone() >
5082 kMinTelephoneEventSeparationMs)
5083 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005084 int8_t eventCode(0);
5085 uint16_t lengthMs(0);
5086 uint8_t attenuationDb(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00005087
5088 eventCode = _inbandDtmfQueue.NextDtmf(&lengthMs, &attenuationDb);
5089 _inbandDtmfGenerator.AddTone(eventCode, lengthMs, attenuationDb);
5090 if (_playInbandDtmfEvent)
5091 {
5092 // Add tone to output mixer using a reduced length to minimize
5093 // risk of echo.
5094 _outputMixerPtr->PlayDtmfTone(eventCode, lengthMs - 80,
5095 attenuationDb);
5096 }
5097 }
5098
5099 if (_inbandDtmfGenerator.IsAddingTone())
5100 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005101 uint16_t frequency(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00005102 _inbandDtmfGenerator.GetSampleRate(frequency);
5103
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005104 if (frequency != _audioFrame.sample_rate_hz_)
niklase@google.com470e71d2011-07-07 08:21:25 +00005105 {
5106 // Update sample rate of Dtmf tone since the mixing frequency
5107 // has changed.
5108 _inbandDtmfGenerator.SetSampleRate(
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005109 (uint16_t) (_audioFrame.sample_rate_hz_));
niklase@google.com470e71d2011-07-07 08:21:25 +00005110 // Reset the tone to be added taking the new sample rate into
5111 // account.
5112 _inbandDtmfGenerator.ResetTone();
5113 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00005114
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005115 int16_t toneBuffer[320];
5116 uint16_t toneSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00005117 // Get 10ms tone segment and set time since last tone to zero
5118 if (_inbandDtmfGenerator.Get10msTone(toneBuffer, toneSamples) == -1)
5119 {
5120 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
5121 VoEId(_instanceId, _channelId),
5122 "Channel::EncodeAndSend() inserting Dtmf failed");
5123 return -1;
5124 }
5125
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00005126 // Replace mixed audio with DTMF tone.
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00005127 for (int sample = 0;
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005128 sample < _audioFrame.samples_per_channel_;
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00005129 sample++)
5130 {
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00005131 for (int channel = 0;
5132 channel < _audioFrame.num_channels_;
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00005133 channel++)
5134 {
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00005135 const int index = sample * _audioFrame.num_channels_ + channel;
5136 _audioFrame.data_[index] = toneBuffer[sample];
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00005137 }
5138 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00005139
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005140 assert(_audioFrame.samples_per_channel_ == toneSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00005141 } else
5142 {
5143 // Add 10ms to "delay-since-last-tone" counter
5144 _inbandDtmfGenerator.UpdateDelaySinceLastTone();
5145 }
5146 return 0;
5147}
5148
niklase@google.com470e71d2011-07-07 08:21:25 +00005149void
5150Channel::ResetDeadOrAliveCounters()
5151{
5152 _countDeadDetections = 0;
5153 _countAliveDetections = 0;
5154}
5155
5156void
5157Channel::UpdateDeadOrAliveCounters(bool alive)
5158{
5159 if (alive)
5160 _countAliveDetections++;
5161 else
5162 _countDeadDetections++;
5163}
5164
5165int
5166Channel::GetDeadOrAliveCounters(int& countDead, int& countAlive) const
5167{
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00005168 bool enabled;
5169 uint8_t timeSec;
5170
5171 _rtpRtcpModule->PeriodicDeadOrAliveStatus(enabled, timeSec);
5172 if (!enabled)
5173 return (-1);
5174
5175 countDead = static_cast<int> (_countDeadDetections);
5176 countAlive = static_cast<int> (_countAliveDetections);
niklase@google.com470e71d2011-07-07 08:21:25 +00005177 return 0;
5178}
5179
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005180int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00005181Channel::SendPacketRaw(const void *data, int len, bool RTCP)
5182{
5183 if (_transportPtr == NULL)
5184 {
5185 return -1;
5186 }
5187 if (!RTCP)
5188 {
5189 return _transportPtr->SendPacket(_channelId, data, len);
5190 }
5191 else
5192 {
5193 return _transportPtr->SendRTCPPacket(_channelId, data, len);
5194 }
5195}
5196
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005197// Called for incoming RTP packets after successful RTP header parsing.
5198void Channel::UpdatePacketDelay(uint32_t rtp_timestamp,
5199 uint16_t sequence_number) {
5200 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
5201 "Channel::UpdatePacketDelay(timestamp=%lu, sequenceNumber=%u)",
5202 rtp_timestamp, sequence_number);
niklase@google.com470e71d2011-07-07 08:21:25 +00005203
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005204 // Get frequency of last received payload
5205 int rtp_receive_frequency = _audioCodingModule.ReceiveFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +00005206
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005207 CodecInst current_receive_codec;
5208 if (_audioCodingModule.ReceiveCodec(&current_receive_codec) != 0) {
5209 return;
5210 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005211
turaj@webrtc.orge46c8d32013-05-22 20:39:43 +00005212 // Update the least required delay.
5213 least_required_delay_ms_ = _audioCodingModule.LeastRequiredDelayMs();
5214
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005215 if (STR_CASE_CMP("G722", current_receive_codec.plname) == 0) {
5216 // Even though the actual sampling rate for G.722 audio is
5217 // 16,000 Hz, the RTP clock rate for the G722 payload format is
5218 // 8,000 Hz because that value was erroneously assigned in
5219 // RFC 1890 and must remain unchanged for backward compatibility.
5220 rtp_receive_frequency = 8000;
5221 } else if (STR_CASE_CMP("opus", current_receive_codec.plname) == 0) {
5222 // We are resampling Opus internally to 32,000 Hz until all our
5223 // DSP routines can operate at 48,000 Hz, but the RTP clock
5224 // rate for the Opus payload format is standardized to 48,000 Hz,
5225 // because that is the maximum supported decoding sampling rate.
5226 rtp_receive_frequency = 48000;
5227 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005228
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005229 // playout_timestamp_rtp_ updated in UpdatePlayoutTimestamp for every incoming
5230 // packet.
5231 uint32_t timestamp_diff_ms = (rtp_timestamp - playout_timestamp_rtp_) /
5232 (rtp_receive_frequency / 1000);
niklase@google.com470e71d2011-07-07 08:21:25 +00005233
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005234 uint16_t packet_delay_ms = (rtp_timestamp - _previousTimestamp) /
5235 (rtp_receive_frequency / 1000);
niklase@google.com470e71d2011-07-07 08:21:25 +00005236
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005237 _previousTimestamp = rtp_timestamp;
niklase@google.com470e71d2011-07-07 08:21:25 +00005238
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005239 if (timestamp_diff_ms > (2 * kVoiceEngineMaxMinPlayoutDelayMs)) {
5240 timestamp_diff_ms = 0;
5241 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005242
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005243 if (timestamp_diff_ms == 0) return;
niklase@google.com470e71d2011-07-07 08:21:25 +00005244
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005245 if (packet_delay_ms >= 10 && packet_delay_ms <= 60) {
5246 _recPacketDelayMs = packet_delay_ms;
5247 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005248
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005249 if (_average_jitter_buffer_delay_us == 0) {
5250 _average_jitter_buffer_delay_us = timestamp_diff_ms * 1000;
5251 return;
5252 }
5253
5254 // Filter average delay value using exponential filter (alpha is
5255 // 7/8). We derive 1000 *_average_jitter_buffer_delay_us here (reduces
5256 // risk of rounding error) and compensate for it in GetDelayEstimate()
5257 // later.
5258 _average_jitter_buffer_delay_us = (_average_jitter_buffer_delay_us * 7 +
5259 1000 * timestamp_diff_ms + 500) / 8;
niklase@google.com470e71d2011-07-07 08:21:25 +00005260}
5261
5262void
5263Channel::RegisterReceiveCodecsToRTPModule()
5264{
5265 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
5266 "Channel::RegisterReceiveCodecsToRTPModule()");
5267
5268
5269 CodecInst codec;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005270 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
niklase@google.com470e71d2011-07-07 08:21:25 +00005271
5272 for (int idx = 0; idx < nSupportedCodecs; idx++)
5273 {
5274 // Open up the RTP/RTCP receiver for all supported codecs
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00005275 if ((_audioCodingModule.Codec(idx, &codec) == -1) ||
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00005276 (_rtpRtcpModule->RegisterReceivePayload(codec) == -1))
niklase@google.com470e71d2011-07-07 08:21:25 +00005277 {
5278 WEBRTC_TRACE(
5279 kTraceWarning,
5280 kTraceVoice,
5281 VoEId(_instanceId, _channelId),
5282 "Channel::RegisterReceiveCodecsToRTPModule() unable"
5283 " to register %s (%d/%d/%d/%d) to RTP/RTCP receiver",
5284 codec.plname, codec.pltype, codec.plfreq,
5285 codec.channels, codec.rate);
5286 }
5287 else
5288 {
5289 WEBRTC_TRACE(
5290 kTraceInfo,
5291 kTraceVoice,
5292 VoEId(_instanceId, _channelId),
5293 "Channel::RegisterReceiveCodecsToRTPModule() %s "
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00005294 "(%d/%d/%d/%d) has been added to the RTP/RTCP "
niklase@google.com470e71d2011-07-07 08:21:25 +00005295 "receiver",
5296 codec.plname, codec.pltype, codec.plfreq,
5297 codec.channels, codec.rate);
5298 }
5299 }
5300}
5301
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005302int Channel::ApmProcessRx(AudioFrame& frame) {
5303 AudioProcessing* audioproc = _rxAudioProcessingModulePtr;
5304 // Register the (possibly new) frame parameters.
5305 if (audioproc->set_sample_rate_hz(frame.sample_rate_hz_) != 0) {
andrew@webrtc.org655d8f52012-11-20 07:34:45 +00005306 LOG_FERR1(LS_WARNING, set_sample_rate_hz, frame.sample_rate_hz_);
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005307 }
5308 if (audioproc->set_num_channels(frame.num_channels_,
5309 frame.num_channels_) != 0) {
andrew@webrtc.org655d8f52012-11-20 07:34:45 +00005310 LOG_FERR1(LS_WARNING, set_num_channels, frame.num_channels_);
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005311 }
5312 if (audioproc->ProcessStream(&frame) != 0) {
andrew@webrtc.org655d8f52012-11-20 07:34:45 +00005313 LOG_FERR0(LS_WARNING, ProcessStream);
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005314 }
5315 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00005316}
5317
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005318int Channel::SetSecondarySendCodec(const CodecInst& codec,
5319 int red_payload_type) {
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00005320 // Sanity check for payload type.
5321 if (red_payload_type < 0 || red_payload_type > 127) {
5322 _engineStatisticsPtr->SetLastError(
5323 VE_PLTYPE_ERROR, kTraceError,
5324 "SetRedPayloadType() invalid RED payload type");
5325 return -1;
5326 }
5327
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005328 if (SetRedPayloadType(red_payload_type) < 0) {
5329 _engineStatisticsPtr->SetLastError(
5330 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5331 "SetSecondarySendCodec() Failed to register RED ACM");
5332 return -1;
5333 }
5334 if (_audioCodingModule.RegisterSecondarySendCodec(codec) < 0) {
5335 _engineStatisticsPtr->SetLastError(
5336 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5337 "SetSecondarySendCodec() Failed to register secondary send codec in "
5338 "ACM");
5339 return -1;
5340 }
5341
5342 return 0;
5343}
5344
5345void Channel::RemoveSecondarySendCodec() {
5346 _audioCodingModule.UnregisterSecondarySendCodec();
5347}
5348
5349int Channel::GetSecondarySendCodec(CodecInst* codec) {
5350 if (_audioCodingModule.SecondarySendCodec(codec) < 0) {
5351 _engineStatisticsPtr->SetLastError(
5352 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5353 "GetSecondarySendCodec() Failed to get secondary sent codec from ACM");
5354 return -1;
5355 }
5356 return 0;
5357}
5358
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00005359// Assuming this method is called with valid payload type.
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005360int Channel::SetRedPayloadType(int red_payload_type) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005361 CodecInst codec;
5362 bool found_red = false;
5363
5364 // Get default RED settings from the ACM database
5365 const int num_codecs = AudioCodingModule::NumberOfCodecs();
5366 for (int idx = 0; idx < num_codecs; idx++) {
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00005367 _audioCodingModule.Codec(idx, &codec);
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005368 if (!STR_CASE_CMP(codec.plname, "RED")) {
5369 found_red = true;
5370 break;
5371 }
5372 }
5373
5374 if (!found_red) {
5375 _engineStatisticsPtr->SetLastError(
5376 VE_CODEC_ERROR, kTraceError,
5377 "SetRedPayloadType() RED is not supported");
5378 return -1;
5379 }
5380
turaj@webrtc.org9d532fd2013-01-31 18:34:19 +00005381 codec.pltype = red_payload_type;
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005382 if (_audioCodingModule.RegisterSendCodec(codec) < 0) {
5383 _engineStatisticsPtr->SetLastError(
5384 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5385 "SetRedPayloadType() RED registration in ACM module failed");
5386 return -1;
5387 }
5388
5389 if (_rtpRtcpModule->SetSendREDPayloadType(red_payload_type) != 0) {
5390 _engineStatisticsPtr->SetLastError(
5391 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
5392 "SetRedPayloadType() RED registration in RTP/RTCP module failed");
5393 return -1;
5394 }
5395 return 0;
5396}
5397
pbos@webrtc.orgd900e8b2013-07-03 15:12:26 +00005398} // namespace voe
5399} // namespace webrtc