blob: 9d76761181af9d7fafb04c5a67524c55ab91f108 [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
henrika@webrtc.org2919e952012-01-31 08:45:03 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000011#include "webrtc/voice_engine/channel.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000013#include "webrtc/modules/audio_device/include/audio_device.h"
14#include "webrtc/modules/audio_processing/include/audio_processing.h"
15#include "webrtc/modules/utility/interface/audio_frame_operations.h"
16#include "webrtc/modules/utility/interface/process_thread.h"
17#include "webrtc/modules/utility/interface/rtp_dump.h"
18#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
19#include "webrtc/system_wrappers/interface/logging.h"
20#include "webrtc/system_wrappers/interface/trace.h"
21#include "webrtc/voice_engine/include/voe_base.h"
22#include "webrtc/voice_engine/include/voe_external_media.h"
23#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
24#include "webrtc/voice_engine/output_mixer.h"
25#include "webrtc/voice_engine/statistics.h"
26#include "webrtc/voice_engine/transmit_mixer.h"
27#include "webrtc/voice_engine/utility.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000028
29#if defined(_WIN32)
30#include <Qos.h>
31#endif
32
andrew@webrtc.org50419b02012-11-14 19:07:54 +000033namespace webrtc {
34namespace voe {
niklase@google.com470e71d2011-07-07 08:21:25 +000035
pbos@webrtc.org6141e132013-04-09 10:09:10 +000036int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +000037Channel::SendData(FrameType frameType,
pbos@webrtc.org6141e132013-04-09 10:09:10 +000038 uint8_t payloadType,
39 uint32_t timeStamp,
40 const uint8_t* payloadData,
41 uint16_t payloadSize,
niklase@google.com470e71d2011-07-07 08:21:25 +000042 const RTPFragmentationHeader* fragmentation)
43{
44 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
45 "Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u,"
46 " payloadSize=%u, fragmentation=0x%x)",
47 frameType, payloadType, timeStamp, payloadSize, fragmentation);
48
49 if (_includeAudioLevelIndication)
50 {
andrew@webrtc.org755b04a2011-11-15 16:57:56 +000051 assert(_rtpAudioProc.get() != NULL);
niklase@google.com470e71d2011-07-07 08:21:25 +000052 // Store current audio level in the RTP/RTCP module.
53 // The level will be used in combination with voice-activity state
54 // (frameType) to add an RTP header extension
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +000055 _rtpRtcpModule->SetAudioLevel(_rtpAudioProc->level_estimator()->RMS());
niklase@google.com470e71d2011-07-07 08:21:25 +000056 }
57
58 // Push data from ACM to RTP/RTCP-module to deliver audio frame for
59 // packetization.
60 // This call will trigger Transport::SendPacket() from the RTP/RTCP module.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +000061 if (_rtpRtcpModule->SendOutgoingData((FrameType&)frameType,
niklase@google.com470e71d2011-07-07 08:21:25 +000062 payloadType,
63 timeStamp,
stefan@webrtc.orgddfdfed2012-07-03 13:21:22 +000064 // Leaving the time when this frame was
65 // received from the capture device as
66 // undefined for voice for now.
67 -1,
niklase@google.com470e71d2011-07-07 08:21:25 +000068 payloadData,
69 payloadSize,
70 fragmentation) == -1)
71 {
72 _engineStatisticsPtr->SetLastError(
73 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
74 "Channel::SendData() failed to send data to RTP/RTCP module");
75 return -1;
76 }
77
78 _lastLocalTimeStamp = timeStamp;
79 _lastPayloadType = payloadType;
80
81 return 0;
82}
83
pbos@webrtc.org6141e132013-04-09 10:09:10 +000084int32_t
85Channel::InFrameType(int16_t frameType)
niklase@google.com470e71d2011-07-07 08:21:25 +000086{
87 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
88 "Channel::InFrameType(frameType=%d)", frameType);
89
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +000090 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +000091 // 1 indicates speech
92 _sendFrameType = (frameType == 1) ? 1 : 0;
93 return 0;
94}
95
pbos@webrtc.org6141e132013-04-09 10:09:10 +000096int32_t
pbos@webrtc.org92135212013-05-14 08:31:39 +000097Channel::OnRxVadDetected(int vadDecision)
niklase@google.com470e71d2011-07-07 08:21:25 +000098{
99 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
100 "Channel::OnRxVadDetected(vadDecision=%d)", vadDecision);
101
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000102 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000103 if (_rxVadObserverPtr)
104 {
105 _rxVadObserverPtr->OnRxVad(_channelId, vadDecision);
106 }
107
108 return 0;
109}
110
111int
112Channel::SendPacket(int channel, const void *data, int len)
113{
114 channel = VoEChannelId(channel);
115 assert(channel == _channelId);
116
117 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
118 "Channel::SendPacket(channel=%d, len=%d)", channel, len);
119
120 if (_transportPtr == NULL)
121 {
122 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
123 "Channel::SendPacket() failed to send RTP packet due to"
124 " invalid transport object");
125 return -1;
126 }
127
128 // Insert extra RTP packet using if user has called the InsertExtraRTPPacket
129 // API
130 if (_insertExtraRTPPacket)
131 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000132 uint8_t* rtpHdr = (uint8_t*)data;
133 uint8_t M_PT(0);
niklase@google.com470e71d2011-07-07 08:21:25 +0000134 if (_extraMarkerBit)
135 {
136 M_PT = 0x80; // set the M-bit
137 }
138 M_PT += _extraPayloadType; // set the payload type
139 *(++rtpHdr) = M_PT; // modify the M|PT-byte within the RTP header
140 _insertExtraRTPPacket = false; // insert one packet only
141 }
142
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000143 uint8_t* bufferToSendPtr = (uint8_t*)data;
144 int32_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000145
146 // Dump the RTP packet to a file (if RTP dump is enabled).
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000147 if (_rtpDumpOut.DumpPacket((const uint8_t*)data, len) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000148 {
149 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
150 VoEId(_instanceId,_channelId),
151 "Channel::SendPacket() RTP dump to output file failed");
152 }
153
154 // SRTP or External encryption
155 if (_encrypting)
156 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000157 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000158
159 if (_encryptionPtr)
160 {
161 if (!_encryptionRTPBufferPtr)
162 {
163 // Allocate memory for encryption buffer one time only
164 _encryptionRTPBufferPtr =
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000165 new uint8_t[kVoiceEngineMaxIpPacketSizeBytes];
xians@webrtc.org51253502012-10-25 13:58:02 +0000166 memset(_encryptionRTPBufferPtr, 0,
167 kVoiceEngineMaxIpPacketSizeBytes);
niklase@google.com470e71d2011-07-07 08:21:25 +0000168 }
169
170 // Perform encryption (SRTP or external)
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000171 int32_t encryptedBufferLength = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000172 _encryptionPtr->encrypt(_channelId,
173 bufferToSendPtr,
174 _encryptionRTPBufferPtr,
175 bufferLength,
176 (int*)&encryptedBufferLength);
177 if (encryptedBufferLength <= 0)
178 {
179 _engineStatisticsPtr->SetLastError(
180 VE_ENCRYPTION_FAILED,
181 kTraceError, "Channel::SendPacket() encryption failed");
182 return -1;
183 }
184
185 // Replace default data buffer with encrypted buffer
186 bufferToSendPtr = _encryptionRTPBufferPtr;
187 bufferLength = encryptedBufferLength;
188 }
189 }
190
191 // Packet transmission using WebRtc socket transport
192 if (!_externalTransport)
193 {
194 int n = _transportPtr->SendPacket(channel, bufferToSendPtr,
195 bufferLength);
196 if (n < 0)
197 {
198 WEBRTC_TRACE(kTraceError, kTraceVoice,
199 VoEId(_instanceId,_channelId),
200 "Channel::SendPacket() RTP transmission using WebRtc"
201 " sockets failed");
202 return -1;
203 }
204 return n;
205 }
206
207 // Packet transmission using external transport transport
208 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000209 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000210
211 int n = _transportPtr->SendPacket(channel,
212 bufferToSendPtr,
213 bufferLength);
214 if (n < 0)
215 {
216 WEBRTC_TRACE(kTraceError, kTraceVoice,
217 VoEId(_instanceId,_channelId),
218 "Channel::SendPacket() RTP transmission using external"
219 " transport failed");
220 return -1;
221 }
222 return n;
223 }
224}
225
226int
227Channel::SendRTCPPacket(int channel, const void *data, int len)
228{
229 channel = VoEChannelId(channel);
230 assert(channel == _channelId);
231
232 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
233 "Channel::SendRTCPPacket(channel=%d, len=%d)", channel, len);
234
niklase@google.com470e71d2011-07-07 08:21:25 +0000235 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000236 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.org83661f52011-11-25 10:58:15 +0000237 if (_transportPtr == NULL)
238 {
239 WEBRTC_TRACE(kTraceError, kTraceVoice,
240 VoEId(_instanceId,_channelId),
241 "Channel::SendRTCPPacket() failed to send RTCP packet"
242 " due to invalid transport object");
243 return -1;
244 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000245 }
246
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000247 uint8_t* bufferToSendPtr = (uint8_t*)data;
248 int32_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000249
250 // Dump the RTCP packet to a file (if RTP dump is enabled).
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000251 if (_rtpDumpOut.DumpPacket((const uint8_t*)data, len) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000252 {
253 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
254 VoEId(_instanceId,_channelId),
255 "Channel::SendPacket() RTCP dump to output file failed");
256 }
257
258 // SRTP or External encryption
259 if (_encrypting)
260 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000261 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000262
263 if (_encryptionPtr)
264 {
265 if (!_encryptionRTCPBufferPtr)
266 {
267 // Allocate memory for encryption buffer one time only
268 _encryptionRTCPBufferPtr =
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000269 new uint8_t[kVoiceEngineMaxIpPacketSizeBytes];
niklase@google.com470e71d2011-07-07 08:21:25 +0000270 }
271
272 // Perform encryption (SRTP or external).
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000273 int32_t encryptedBufferLength = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000274 _encryptionPtr->encrypt_rtcp(_channelId,
275 bufferToSendPtr,
276 _encryptionRTCPBufferPtr,
277 bufferLength,
278 (int*)&encryptedBufferLength);
279 if (encryptedBufferLength <= 0)
280 {
281 _engineStatisticsPtr->SetLastError(
282 VE_ENCRYPTION_FAILED, kTraceError,
283 "Channel::SendRTCPPacket() encryption failed");
284 return -1;
285 }
286
287 // Replace default data buffer with encrypted buffer
288 bufferToSendPtr = _encryptionRTCPBufferPtr;
289 bufferLength = encryptedBufferLength;
290 }
291 }
292
293 // Packet transmission using WebRtc socket transport
294 if (!_externalTransport)
295 {
296 int n = _transportPtr->SendRTCPPacket(channel,
297 bufferToSendPtr,
298 bufferLength);
299 if (n < 0)
300 {
301 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
302 VoEId(_instanceId,_channelId),
303 "Channel::SendRTCPPacket() transmission using WebRtc"
304 " sockets failed");
305 return -1;
306 }
307 return n;
308 }
309
310 // Packet transmission using external transport transport
311 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000312 CriticalSectionScoped cs(&_callbackCritSect);
henrike@webrtc.orgde727ab2012-11-18 18:49:13 +0000313 if (_transportPtr == NULL)
314 {
315 return -1;
316 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000317 int n = _transportPtr->SendRTCPPacket(channel,
318 bufferToSendPtr,
319 bufferLength);
320 if (n < 0)
321 {
322 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
323 VoEId(_instanceId,_channelId),
324 "Channel::SendRTCPPacket() transmission using external"
325 " transport failed");
326 return -1;
327 }
328 return n;
329 }
330
331 return len;
332}
333
334void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000335Channel::OnPlayTelephoneEvent(int32_t id,
336 uint8_t event,
337 uint16_t lengthMs,
338 uint8_t volume)
niklase@google.com470e71d2011-07-07 08:21:25 +0000339{
340 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
341 "Channel::OnPlayTelephoneEvent(id=%d, event=%u, lengthMs=%u,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +0000342 " volume=%u)", id, event, lengthMs, volume);
niklase@google.com470e71d2011-07-07 08:21:25 +0000343
344 if (!_playOutbandDtmfEvent || (event > 15))
345 {
346 // Ignore callback since feedback is disabled or event is not a
347 // Dtmf tone event.
348 return;
349 }
350
351 assert(_outputMixerPtr != NULL);
352
353 // Start playing out the Dtmf tone (if playout is enabled).
354 // Reduce length of tone with 80ms to the reduce risk of echo.
355 _outputMixerPtr->PlayDtmfTone(event, lengthMs - 80, volume);
356}
357
358void
elham@webrtc.orgb7eda432013-07-15 21:08:27 +0000359Channel::OnIncomingSSRCChanged(int32_t id,
360 uint32_t SSRC)
niklase@google.com470e71d2011-07-07 08:21:25 +0000361{
362 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
363 "Channel::OnIncomingSSRCChanged(id=%d, SSRC=%d)",
elham@webrtc.orgb7eda432013-07-15 21:08:27 +0000364 id, SSRC);
niklase@google.com470e71d2011-07-07 08:21:25 +0000365
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000366 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000367 assert(channel == _channelId);
368
elham@webrtc.orgb7eda432013-07-15 21:08:27 +0000369 // Reset RTP-module counters since a new incoming RTP stream is detected
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +0000370 _rtpRtcpModule->ResetReceiveDataCountersRTP();
371 _rtpRtcpModule->ResetStatisticsRTP();
elham@webrtc.orgb7eda432013-07-15 21:08:27 +0000372
niklase@google.com470e71d2011-07-07 08:21:25 +0000373 if (_rtpObserver)
374 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000375 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000376
377 if (_rtpObserverPtr)
378 {
379 // Send new SSRC to registered observer using callback
elham@webrtc.orgb7eda432013-07-15 21:08:27 +0000380 _rtpObserverPtr->OnIncomingSSRCChanged(channel, SSRC);
niklase@google.com470e71d2011-07-07 08:21:25 +0000381 }
382 }
383}
384
pbos@webrtc.org92135212013-05-14 08:31:39 +0000385void Channel::OnIncomingCSRCChanged(int32_t id,
386 uint32_t CSRC,
387 bool added)
niklase@google.com470e71d2011-07-07 08:21:25 +0000388{
389 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
390 "Channel::OnIncomingCSRCChanged(id=%d, CSRC=%d, added=%d)",
391 id, CSRC, added);
392
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000393 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000394 assert(channel == _channelId);
395
396 if (_rtpObserver)
397 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000398 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000399
400 if (_rtpObserverPtr)
401 {
402 _rtpObserverPtr->OnIncomingCSRCChanged(channel, CSRC, added);
403 }
404 }
405}
406
407void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000408Channel::OnApplicationDataReceived(int32_t id,
409 uint8_t subType,
410 uint32_t name,
411 uint16_t length,
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000412 const uint8_t* data)
niklase@google.com470e71d2011-07-07 08:21:25 +0000413{
414 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
415 "Channel::OnApplicationDataReceived(id=%d, subType=%u,"
416 " name=%u, length=%u)",
417 id, subType, name, length);
418
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000419 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000420 assert(channel == _channelId);
421
422 if (_rtcpObserver)
423 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000424 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000425
426 if (_rtcpObserverPtr)
427 {
428 _rtcpObserverPtr->OnApplicationDataReceived(channel,
429 subType,
430 name,
431 data,
432 length);
433 }
434 }
435}
436
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000437int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +0000438Channel::OnInitializeDecoder(
pbos@webrtc.org92135212013-05-14 08:31:39 +0000439 int32_t id,
440 int8_t payloadType,
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +0000441 const char payloadName[RTP_PAYLOAD_NAME_SIZE],
pbos@webrtc.org92135212013-05-14 08:31:39 +0000442 int frequency,
443 uint8_t channels,
444 uint32_t rate)
niklase@google.com470e71d2011-07-07 08:21:25 +0000445{
446 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
447 "Channel::OnInitializeDecoder(id=%d, payloadType=%d, "
448 "payloadName=%s, frequency=%u, channels=%u, rate=%u)",
449 id, payloadType, payloadName, frequency, channels, rate);
450
andrew@webrtc.orgceb148c2011-08-23 17:53:54 +0000451 assert(VoEChannelId(id) == _channelId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000452
henrika@webrtc.orgf75901f2012-01-16 08:45:42 +0000453 CodecInst receiveCodec = {0};
454 CodecInst dummyCodec = {0};
niklase@google.com470e71d2011-07-07 08:21:25 +0000455
456 receiveCodec.pltype = payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +0000457 receiveCodec.plfreq = frequency;
458 receiveCodec.channels = channels;
459 receiveCodec.rate = rate;
henrika@webrtc.orgf75901f2012-01-16 08:45:42 +0000460 strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +0000461
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +0000462 _audioCodingModule.Codec(payloadName, &dummyCodec, frequency, channels);
niklase@google.com470e71d2011-07-07 08:21:25 +0000463 receiveCodec.pacsize = dummyCodec.pacsize;
464
465 // Register the new codec to the ACM
466 if (_audioCodingModule.RegisterReceiveCodec(receiveCodec) == -1)
467 {
468 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
andrew@webrtc.orgceb148c2011-08-23 17:53:54 +0000469 VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +0000470 "Channel::OnInitializeDecoder() invalid codec ("
471 "pt=%d, name=%s) received - 1", payloadType, payloadName);
472 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR);
473 return -1;
474 }
475
476 return 0;
477}
478
479void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000480Channel::OnPacketTimeout(int32_t id)
niklase@google.com470e71d2011-07-07 08:21:25 +0000481{
482 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
483 "Channel::OnPacketTimeout(id=%d)", id);
484
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000485 CriticalSectionScoped cs(_callbackCritSectPtr);
niklase@google.com470e71d2011-07-07 08:21:25 +0000486 if (_voiceEngineObserverPtr)
487 {
488 if (_receiving || _externalTransport)
489 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000490 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000491 assert(channel == _channelId);
492 // Ensure that next OnReceivedPacket() callback will trigger
493 // a VE_PACKET_RECEIPT_RESTARTED callback.
494 _rtpPacketTimedOut = true;
495 // Deliver callback to the observer
496 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
497 VoEId(_instanceId,_channelId),
498 "Channel::OnPacketTimeout() => "
499 "CallbackOnError(VE_RECEIVE_PACKET_TIMEOUT)");
500 _voiceEngineObserverPtr->CallbackOnError(channel,
501 VE_RECEIVE_PACKET_TIMEOUT);
502 }
503 }
504}
505
506void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000507Channel::OnReceivedPacket(int32_t id,
508 RtpRtcpPacketType packetType)
niklase@google.com470e71d2011-07-07 08:21:25 +0000509{
510 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
511 "Channel::OnReceivedPacket(id=%d, packetType=%d)",
512 id, packetType);
513
andrew@webrtc.orgceb148c2011-08-23 17:53:54 +0000514 assert(VoEChannelId(id) == _channelId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000515
516 // Notify only for the case when we have restarted an RTP session.
517 if (_rtpPacketTimedOut && (kPacketRtp == packetType))
518 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000519 CriticalSectionScoped cs(_callbackCritSectPtr);
niklase@google.com470e71d2011-07-07 08:21:25 +0000520 if (_voiceEngineObserverPtr)
521 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000522 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000523 assert(channel == _channelId);
524 // Reset timeout mechanism
525 _rtpPacketTimedOut = false;
526 // Deliver callback to the observer
527 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
528 VoEId(_instanceId,_channelId),
529 "Channel::OnPacketTimeout() =>"
530 " CallbackOnError(VE_PACKET_RECEIPT_RESTARTED)");
531 _voiceEngineObserverPtr->CallbackOnError(
532 channel,
533 VE_PACKET_RECEIPT_RESTARTED);
534 }
535 }
536}
537
538void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000539Channel::OnPeriodicDeadOrAlive(int32_t id,
540 RTPAliveType alive)
niklase@google.com470e71d2011-07-07 08:21:25 +0000541{
542 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
543 "Channel::OnPeriodicDeadOrAlive(id=%d, alive=%d)", id, alive);
544
henrika@webrtc.org19da7192013-04-05 14:34:57 +0000545 {
546 CriticalSectionScoped cs(&_callbackCritSect);
547 if (!_connectionObserver)
548 return;
549 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000550
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000551 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000552 assert(channel == _channelId);
553
554 // Use Alive as default to limit risk of false Dead detections
555 bool isAlive(true);
556
557 // Always mark the connection as Dead when the module reports kRtpDead
558 if (kRtpDead == alive)
559 {
560 isAlive = false;
561 }
562
563 // It is possible that the connection is alive even if no RTP packet has
564 // been received for a long time since the other side might use VAD/DTX
565 // and a low SID-packet update rate.
566 if ((kRtpNoRtp == alive) && _playing)
567 {
568 // Detect Alive for all NetEQ states except for the case when we are
569 // in PLC_CNG state.
570 // PLC_CNG <=> background noise only due to long expand or error.
571 // Note that, the case where the other side stops sending during CNG
572 // state will be detected as Alive. Dead is is not set until after
573 // missing RTCP packets for at least twelve seconds (handled
574 // internally by the RTP/RTCP module).
575 isAlive = (_outputSpeechType != AudioFrame::kPLCCNG);
576 }
577
578 UpdateDeadOrAliveCounters(isAlive);
579
580 // Send callback to the registered observer
581 if (_connectionObserver)
582 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000583 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000584 if (_connectionObserverPtr)
585 {
586 _connectionObserverPtr->OnPeriodicDeadOrAlive(channel, isAlive);
587 }
588 }
589}
590
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000591int32_t
592Channel::OnReceivedPayloadData(const uint8_t* payloadData,
pbos@webrtc.org92135212013-05-14 08:31:39 +0000593 uint16_t payloadSize,
niklase@google.com470e71d2011-07-07 08:21:25 +0000594 const WebRtcRTPHeader* rtpHeader)
595{
596 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
597 "Channel::OnReceivedPayloadData(payloadSize=%d,"
598 " payloadType=%u, audioChannel=%u)",
599 payloadSize,
600 rtpHeader->header.payloadType,
601 rtpHeader->type.Audio.channel);
602
roosa@google.com0870f022012-12-12 21:31:41 +0000603 _lastRemoteTimeStamp = rtpHeader->header.timestamp;
604
niklase@google.com470e71d2011-07-07 08:21:25 +0000605 if (!_playing)
606 {
607 // Avoid inserting into NetEQ when we are not playing. Count the
608 // packet as discarded.
609 WEBRTC_TRACE(kTraceStream, kTraceVoice,
610 VoEId(_instanceId, _channelId),
611 "received packet is discarded since playing is not"
612 " activated");
613 _numberOfDiscardedPackets++;
614 return 0;
615 }
616
617 // Push the incoming payload (parsed and ready for decoding) into the ACM
tina.legrand@webrtc.org16b6b902012-04-12 11:02:38 +0000618 if (_audioCodingModule.IncomingPacket(payloadData,
niklase@google.com470e71d2011-07-07 08:21:25 +0000619 payloadSize,
620 *rtpHeader) != 0)
621 {
622 _engineStatisticsPtr->SetLastError(
623 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
624 "Channel::OnReceivedPayloadData() unable to push data to the ACM");
625 return -1;
626 }
627
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +0000628 // Update the packet delay.
niklase@google.com470e71d2011-07-07 08:21:25 +0000629 UpdatePacketDelay(rtpHeader->header.timestamp,
630 rtpHeader->header.sequenceNumber);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +0000631
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +0000632 if (kNackOff != _rtpRtcpModule->NACK()) { // Is NACK on?
633 uint16_t round_trip_time = 0;
634 _rtpRtcpModule->RTT(_rtpRtcpModule->RemoteSSRC(), &round_trip_time,
635 NULL, NULL, NULL);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +0000636
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +0000637 std::vector<uint16_t> nack_list = _audioCodingModule.GetNackList(
638 round_trip_time);
639 if (!nack_list.empty()) {
640 // Can't use nack_list.data() since it's not supported by all
641 // compilers.
642 ResendPackets(&(nack_list[0]), static_cast<int>(nack_list.size()));
643 }
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +0000644 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000645 return 0;
646}
647
pbos@webrtc.org92135212013-05-14 08:31:39 +0000648int32_t Channel::GetAudioFrame(int32_t id, AudioFrame& audioFrame)
niklase@google.com470e71d2011-07-07 08:21:25 +0000649{
650 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
651 "Channel::GetAudioFrame(id=%d)", id);
652
653 // Get 10ms raw PCM data from the ACM (mixer limits output frequency)
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000654 if (_audioCodingModule.PlayoutData10Ms(audioFrame.sample_rate_hz_,
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +0000655 &audioFrame) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000656 {
657 WEBRTC_TRACE(kTraceError, kTraceVoice,
658 VoEId(_instanceId,_channelId),
659 "Channel::GetAudioFrame() PlayoutData10Ms() failed!");
andrew@webrtc.org7859e102012-01-13 00:30:11 +0000660 // In all likelihood, the audio in this frame is garbage. We return an
661 // error so that the audio mixer module doesn't add it to the mix. As
662 // a result, it won't be played out and the actions skipped here are
663 // irrelevant.
664 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +0000665 }
666
667 if (_RxVadDetection)
668 {
669 UpdateRxVadDetection(audioFrame);
670 }
671
672 // Convert module ID to internal VoE channel ID
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000673 audioFrame.id_ = VoEChannelId(audioFrame.id_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000674 // Store speech type for dead-or-alive detection
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000675 _outputSpeechType = audioFrame.speech_type_;
niklase@google.com470e71d2011-07-07 08:21:25 +0000676
677 // Perform far-end AudioProcessing module processing on the received signal
678 if (_rxApmIsEnabled)
679 {
680 ApmProcessRx(audioFrame);
681 }
682
683 // Output volume scaling
684 if (_outputGain < 0.99f || _outputGain > 1.01f)
685 {
686 AudioFrameOperations::ScaleWithSat(_outputGain, audioFrame);
687 }
688
689 // Scale left and/or right channel(s) if stereo and master balance is
690 // active
691
692 if (_panLeft != 1.0f || _panRight != 1.0f)
693 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000694 if (audioFrame.num_channels_ == 1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000695 {
696 // Emulate stereo mode since panning is active.
697 // The mono signal is copied to both left and right channels here.
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +0000698 AudioFrameOperations::MonoToStereo(&audioFrame);
niklase@google.com470e71d2011-07-07 08:21:25 +0000699 }
700 // For true stereo mode (when we are receiving a stereo signal), no
701 // action is needed.
702
703 // Do the panning operation (the audio frame contains stereo at this
704 // stage)
705 AudioFrameOperations::Scale(_panLeft, _panRight, audioFrame);
706 }
707
708 // Mix decoded PCM output with file if file mixing is enabled
709 if (_outputFilePlaying)
710 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000711 MixAudioWithFile(audioFrame, audioFrame.sample_rate_hz_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000712 }
713
714 // Place channel in on-hold state (~muted) if on-hold is activated
715 if (_outputIsOnHold)
716 {
717 AudioFrameOperations::Mute(audioFrame);
718 }
719
720 // External media
721 if (_outputExternalMedia)
722 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000723 CriticalSectionScoped cs(&_callbackCritSect);
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000724 const bool isStereo = (audioFrame.num_channels_ == 2);
niklase@google.com470e71d2011-07-07 08:21:25 +0000725 if (_outputExternalMediaCallbackPtr)
726 {
727 _outputExternalMediaCallbackPtr->Process(
728 _channelId,
729 kPlaybackPerChannel,
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000730 (int16_t*)audioFrame.data_,
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000731 audioFrame.samples_per_channel_,
732 audioFrame.sample_rate_hz_,
niklase@google.com470e71d2011-07-07 08:21:25 +0000733 isStereo);
734 }
735 }
736
737 // Record playout if enabled
738 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000739 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000740
741 if (_outputFileRecording && _outputFileRecorderPtr)
742 {
niklas.enbom@webrtc.org5398d952012-03-26 08:11:25 +0000743 _outputFileRecorderPtr->RecordAudioToFile(audioFrame);
niklase@google.com470e71d2011-07-07 08:21:25 +0000744 }
745 }
746
747 // Measure audio level (0-9)
748 _outputAudioLevel.ComputeLevel(audioFrame);
749
750 return 0;
751}
752
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000753int32_t
pbos@webrtc.org92135212013-05-14 08:31:39 +0000754Channel::NeededFrequency(int32_t id)
niklase@google.com470e71d2011-07-07 08:21:25 +0000755{
756 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
757 "Channel::NeededFrequency(id=%d)", id);
758
759 int highestNeeded = 0;
760
761 // Determine highest needed receive frequency
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000762 int32_t receiveFrequency = _audioCodingModule.ReceiveFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +0000763
764 // Return the bigger of playout and receive frequency in the ACM.
765 if (_audioCodingModule.PlayoutFrequency() > receiveFrequency)
766 {
767 highestNeeded = _audioCodingModule.PlayoutFrequency();
768 }
769 else
770 {
771 highestNeeded = receiveFrequency;
772 }
773
774 // Special case, if we're playing a file on the playout side
775 // we take that frequency into consideration as well
776 // This is not needed on sending side, since the codec will
777 // limit the spectrum anyway.
778 if (_outputFilePlaying)
779 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000780 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000781 if (_outputFilePlayerPtr && _outputFilePlaying)
782 {
783 if(_outputFilePlayerPtr->Frequency()>highestNeeded)
784 {
785 highestNeeded=_outputFilePlayerPtr->Frequency();
786 }
787 }
788 }
789
790 return(highestNeeded);
791}
792
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000793int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +0000794Channel::CreateChannel(Channel*& channel,
pbos@webrtc.org92135212013-05-14 08:31:39 +0000795 int32_t channelId,
796 uint32_t instanceId)
niklase@google.com470e71d2011-07-07 08:21:25 +0000797{
798 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId,channelId),
799 "Channel::CreateChannel(channelId=%d, instanceId=%d)",
800 channelId, instanceId);
801
802 channel = new Channel(channelId, instanceId);
803 if (channel == NULL)
804 {
805 WEBRTC_TRACE(kTraceMemory, kTraceVoice,
806 VoEId(instanceId,channelId),
807 "Channel::CreateChannel() unable to allocate memory for"
808 " channel");
809 return -1;
810 }
811 return 0;
812}
813
814void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000815Channel::PlayNotification(int32_t id, uint32_t durationMs)
niklase@google.com470e71d2011-07-07 08:21:25 +0000816{
817 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
818 "Channel::PlayNotification(id=%d, durationMs=%d)",
819 id, durationMs);
820
821 // Not implement yet
822}
823
824void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000825Channel::RecordNotification(int32_t id, uint32_t durationMs)
niklase@google.com470e71d2011-07-07 08:21:25 +0000826{
827 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
828 "Channel::RecordNotification(id=%d, durationMs=%d)",
829 id, durationMs);
830
831 // Not implement yet
832}
833
834void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000835Channel::PlayFileEnded(int32_t id)
niklase@google.com470e71d2011-07-07 08:21:25 +0000836{
837 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
838 "Channel::PlayFileEnded(id=%d)", id);
839
840 if (id == _inputFilePlayerId)
841 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000842 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000843
844 _inputFilePlaying = false;
845 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
846 VoEId(_instanceId,_channelId),
847 "Channel::PlayFileEnded() => input file player module is"
848 " shutdown");
849 }
850 else if (id == _outputFilePlayerId)
851 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000852 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000853
854 _outputFilePlaying = false;
855 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
856 VoEId(_instanceId,_channelId),
857 "Channel::PlayFileEnded() => output file player module is"
858 " shutdown");
859 }
860}
861
862void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000863Channel::RecordFileEnded(int32_t id)
niklase@google.com470e71d2011-07-07 08:21:25 +0000864{
865 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
866 "Channel::RecordFileEnded(id=%d)", id);
867
868 assert(id == _outputFileRecorderId);
869
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000870 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000871
872 _outputFileRecording = false;
873 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
874 VoEId(_instanceId,_channelId),
875 "Channel::RecordFileEnded() => output file recorder module is"
876 " shutdown");
877}
878
pbos@webrtc.org92135212013-05-14 08:31:39 +0000879Channel::Channel(int32_t channelId,
880 uint32_t instanceId) :
niklase@google.com470e71d2011-07-07 08:21:25 +0000881 _fileCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
882 _callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
niklase@google.com470e71d2011-07-07 08:21:25 +0000883 _instanceId(instanceId),
xians@google.com22963ab2011-08-03 12:40:23 +0000884 _channelId(channelId),
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +0000885 rtp_header_parser_(RtpHeaderParser::Create()),
niklase@google.com470e71d2011-07-07 08:21:25 +0000886 _audioCodingModule(*AudioCodingModule::Create(
xians@google.com22963ab2011-08-03 12:40:23 +0000887 VoEModuleId(instanceId, channelId))),
niklase@google.com470e71d2011-07-07 08:21:25 +0000888 _rtpDumpIn(*RtpDump::CreateRtpDump()),
889 _rtpDumpOut(*RtpDump::CreateRtpDump()),
niklase@google.com470e71d2011-07-07 08:21:25 +0000890 _outputAudioLevel(),
niklase@google.com470e71d2011-07-07 08:21:25 +0000891 _externalTransport(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000892 _inputFilePlayerPtr(NULL),
893 _outputFilePlayerPtr(NULL),
894 _outputFileRecorderPtr(NULL),
895 // Avoid conflict with other channels by adding 1024 - 1026,
896 // won't use as much as 1024 channels.
897 _inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
898 _outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
899 _outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
900 _inputFilePlaying(false),
901 _outputFilePlaying(false),
902 _outputFileRecording(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000903 _inbandDtmfQueue(VoEModuleId(instanceId, channelId)),
904 _inbandDtmfGenerator(VoEModuleId(instanceId, channelId)),
niklase@google.com470e71d2011-07-07 08:21:25 +0000905 _inputExternalMedia(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000906 _outputExternalMedia(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000907 _inputExternalMediaCallbackPtr(NULL),
908 _outputExternalMediaCallbackPtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000909 _encryptionRTPBufferPtr(NULL),
910 _decryptionRTPBufferPtr(NULL),
911 _encryptionRTCPBufferPtr(NULL),
912 _decryptionRTCPBufferPtr(NULL),
913 _timeStamp(0), // This is just an offset, RTP module will add it's own random offset
914 _sendTelephoneEventPayloadType(106),
pwestin@webrtc.org1de01352013-04-11 20:23:35 +0000915 playout_timestamp_rtp_(0),
916 playout_timestamp_rtcp_(0),
xians@google.com22963ab2011-08-03 12:40:23 +0000917 _numberOfDiscardedPackets(0),
918 _engineStatisticsPtr(NULL),
henrika@webrtc.org2919e952012-01-31 08:45:03 +0000919 _outputMixerPtr(NULL),
920 _transmitMixerPtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000921 _moduleProcessThreadPtr(NULL),
922 _audioDeviceModulePtr(NULL),
923 _voiceEngineObserverPtr(NULL),
924 _callbackCritSectPtr(NULL),
925 _transportPtr(NULL),
926 _encryptionPtr(NULL),
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000927 _rtpAudioProc(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000928 _rxAudioProcessingModulePtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000929 _rxVadObserverPtr(NULL),
930 _oldVadDecision(-1),
931 _sendFrameType(0),
niklase@google.com470e71d2011-07-07 08:21:25 +0000932 _rtpObserverPtr(NULL),
933 _rtcpObserverPtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000934 _outputIsOnHold(false),
935 _externalPlayout(false),
roosa@google.com1b60ceb2012-12-12 23:00:29 +0000936 _externalMixing(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000937 _inputIsOnHold(false),
938 _playing(false),
939 _sending(false),
940 _receiving(false),
941 _mixFileWithMicrophone(false),
942 _rtpObserver(false),
943 _rtcpObserver(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000944 _mute(false),
945 _panLeft(1.0f),
946 _panRight(1.0f),
947 _outputGain(1.0f),
xians@google.com22963ab2011-08-03 12:40:23 +0000948 _encrypting(false),
949 _decrypting(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000950 _playOutbandDtmfEvent(false),
951 _playInbandDtmfEvent(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000952 _extraPayloadType(0),
953 _insertExtraRTPPacket(false),
954 _extraMarkerBit(false),
955 _lastLocalTimeStamp(0),
roosa@google.com0870f022012-12-12 21:31:41 +0000956 _lastRemoteTimeStamp(0),
niklase@google.com470e71d2011-07-07 08:21:25 +0000957 _lastPayloadType(0),
xians@google.com22963ab2011-08-03 12:40:23 +0000958 _includeAudioLevelIndication(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000959 _rtpPacketTimedOut(false),
960 _rtpPacketTimeOutIsEnabled(false),
961 _rtpTimeOutSeconds(0),
962 _connectionObserver(false),
963 _connectionObserverPtr(NULL),
964 _countAliveDetections(0),
965 _countDeadDetections(0),
966 _outputSpeechType(AudioFrame::kNormalSpeech),
pwestin@webrtc.org1de01352013-04-11 20:23:35 +0000967 _average_jitter_buffer_delay_us(0),
turaj@webrtc.orge46c8d32013-05-22 20:39:43 +0000968 least_required_delay_ms_(0),
niklase@google.com470e71d2011-07-07 08:21:25 +0000969 _previousTimestamp(0),
970 _recPacketDelayMs(20),
971 _RxVadDetection(false),
972 _rxApmIsEnabled(false),
973 _rxAgcIsEnabled(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000974 _rxNsIsEnabled(false)
niklase@google.com470e71d2011-07-07 08:21:25 +0000975{
976 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
977 "Channel::Channel() - ctor");
978 _inbandDtmfQueue.ResetDtmf();
979 _inbandDtmfGenerator.Init();
980 _outputAudioLevel.Clear();
981
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +0000982 RtpRtcp::Configuration configuration;
983 configuration.id = VoEModuleId(instanceId, channelId);
984 configuration.audio = true;
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +0000985 configuration.incoming_data = this;
986 configuration.incoming_messages = this;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +0000987 configuration.outgoing_transport = this;
988 configuration.rtcp_feedback = this;
989 configuration.audio_messages = this;
990
991 _rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
992
niklase@google.com470e71d2011-07-07 08:21:25 +0000993 // Create far end AudioProcessing Module
994 _rxAudioProcessingModulePtr = AudioProcessing::Create(
995 VoEModuleId(instanceId, channelId));
996}
997
998Channel::~Channel()
999{
1000 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
1001 "Channel::~Channel() - dtor");
1002
1003 if (_outputExternalMedia)
1004 {
1005 DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
1006 }
1007 if (_inputExternalMedia)
1008 {
1009 DeRegisterExternalMediaProcessing(kRecordingPerChannel);
1010 }
1011 StopSend();
niklase@google.com470e71d2011-07-07 08:21:25 +00001012 StopPlayout();
1013
1014 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001015 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001016 if (_inputFilePlayerPtr)
1017 {
1018 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1019 _inputFilePlayerPtr->StopPlayingFile();
1020 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1021 _inputFilePlayerPtr = NULL;
1022 }
1023 if (_outputFilePlayerPtr)
1024 {
1025 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1026 _outputFilePlayerPtr->StopPlayingFile();
1027 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1028 _outputFilePlayerPtr = NULL;
1029 }
1030 if (_outputFileRecorderPtr)
1031 {
1032 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
1033 _outputFileRecorderPtr->StopRecording();
1034 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
1035 _outputFileRecorderPtr = NULL;
1036 }
1037 }
1038
1039 // The order to safely shutdown modules in a channel is:
1040 // 1. De-register callbacks in modules
1041 // 2. De-register modules in process thread
1042 // 3. Destroy modules
niklase@google.com470e71d2011-07-07 08:21:25 +00001043 if (_audioCodingModule.RegisterTransportCallback(NULL) == -1)
1044 {
1045 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1046 VoEId(_instanceId,_channelId),
1047 "~Channel() failed to de-register transport callback"
1048 " (Audio coding module)");
1049 }
1050 if (_audioCodingModule.RegisterVADCallback(NULL) == -1)
1051 {
1052 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1053 VoEId(_instanceId,_channelId),
1054 "~Channel() failed to de-register VAD callback"
1055 " (Audio coding module)");
1056 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001057 // De-register modules in process thread
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001058 if (_moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get()) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001059 {
1060 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
1061 VoEId(_instanceId,_channelId),
1062 "~Channel() failed to deregister RTP/RTCP module");
1063 }
1064
1065 // Destroy modules
niklase@google.com470e71d2011-07-07 08:21:25 +00001066 AudioCodingModule::Destroy(&_audioCodingModule);
niklase@google.com470e71d2011-07-07 08:21:25 +00001067 if (_rxAudioProcessingModulePtr != NULL)
1068 {
1069 AudioProcessing::Destroy(_rxAudioProcessingModulePtr); // far end APM
1070 _rxAudioProcessingModulePtr = NULL;
1071 }
1072
1073 // End of modules shutdown
1074
1075 // Delete other objects
1076 RtpDump::DestroyRtpDump(&_rtpDumpIn);
1077 RtpDump::DestroyRtpDump(&_rtpDumpOut);
1078 delete [] _encryptionRTPBufferPtr;
1079 delete [] _decryptionRTPBufferPtr;
1080 delete [] _encryptionRTCPBufferPtr;
1081 delete [] _decryptionRTCPBufferPtr;
1082 delete &_callbackCritSect;
niklase@google.com470e71d2011-07-07 08:21:25 +00001083 delete &_fileCritSect;
1084}
1085
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001086int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001087Channel::Init()
1088{
1089 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1090 "Channel::Init()");
1091
1092 // --- Initial sanity
1093
1094 if ((_engineStatisticsPtr == NULL) ||
1095 (_moduleProcessThreadPtr == NULL))
1096 {
1097 WEBRTC_TRACE(kTraceError, kTraceVoice,
1098 VoEId(_instanceId,_channelId),
1099 "Channel::Init() must call SetEngineInformation() first");
1100 return -1;
1101 }
1102
1103 // --- Add modules to process thread (for periodic schedulation)
1104
1105 const bool processThreadFail =
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001106 ((_moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get()) != 0) ||
niklase@google.com470e71d2011-07-07 08:21:25 +00001107 false);
niklase@google.com470e71d2011-07-07 08:21:25 +00001108 if (processThreadFail)
1109 {
1110 _engineStatisticsPtr->SetLastError(
1111 VE_CANNOT_INIT_CHANNEL, kTraceError,
1112 "Channel::Init() modules not registered");
1113 return -1;
1114 }
pwestin@webrtc.orgc450a192012-01-04 15:00:12 +00001115 // --- ACM initialization
niklase@google.com470e71d2011-07-07 08:21:25 +00001116
1117 if ((_audioCodingModule.InitializeReceiver() == -1) ||
1118#ifdef WEBRTC_CODEC_AVT
1119 // out-of-band Dtmf tones are played out by default
1120 (_audioCodingModule.SetDtmfPlayoutStatus(true) == -1) ||
1121#endif
niklase@google.com470e71d2011-07-07 08:21:25 +00001122 (_audioCodingModule.InitializeSender() == -1))
1123 {
1124 _engineStatisticsPtr->SetLastError(
1125 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1126 "Channel::Init() unable to initialize the ACM - 1");
1127 return -1;
1128 }
1129
1130 // --- RTP/RTCP module initialization
1131
1132 // Ensure that RTCP is enabled by default for the created channel.
1133 // Note that, the module will keep generating RTCP until it is explicitly
1134 // disabled by the user.
1135 // After StopListen (when no sockets exists), RTCP packets will no longer
1136 // be transmitted since the Transport object will then be invalid.
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00001137
1138 const bool rtpRtcpFail =
1139 ((_rtpRtcpModule->SetTelephoneEventForwardToDecoder(true) == -1) ||
1140 // RTCP is enabled by default
1141 (_rtpRtcpModule->SetRTCPStatus(kRtcpCompound) == -1));
1142 if (rtpRtcpFail)
niklase@google.com470e71d2011-07-07 08:21:25 +00001143 {
1144 _engineStatisticsPtr->SetLastError(
1145 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1146 "Channel::Init() RTP/RTCP module not initialized");
1147 return -1;
1148 }
1149
1150 // --- Register all permanent callbacks
niklase@google.com470e71d2011-07-07 08:21:25 +00001151 const bool fail =
niklase@google.com470e71d2011-07-07 08:21:25 +00001152 (_audioCodingModule.RegisterTransportCallback(this) == -1) ||
1153 (_audioCodingModule.RegisterVADCallback(this) == -1);
1154
1155 if (fail)
1156 {
1157 _engineStatisticsPtr->SetLastError(
1158 VE_CANNOT_INIT_CHANNEL, kTraceError,
1159 "Channel::Init() callbacks not registered");
1160 return -1;
1161 }
1162
1163 // --- Register all supported codecs to the receiving side of the
1164 // RTP/RTCP module
1165
1166 CodecInst codec;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001167 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
niklase@google.com470e71d2011-07-07 08:21:25 +00001168
1169 for (int idx = 0; idx < nSupportedCodecs; idx++)
1170 {
1171 // Open up the RTP/RTCP receiver for all supported codecs
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001172 if ((_audioCodingModule.Codec(idx, &codec) == -1) ||
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00001173 (_rtpRtcpModule->RegisterReceivePayload(codec) == -1))
niklase@google.com470e71d2011-07-07 08:21:25 +00001174 {
1175 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1176 VoEId(_instanceId,_channelId),
1177 "Channel::Init() unable to register %s (%d/%d/%d/%d) "
1178 "to RTP/RTCP receiver",
1179 codec.plname, codec.pltype, codec.plfreq,
1180 codec.channels, codec.rate);
1181 }
1182 else
1183 {
1184 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
1185 VoEId(_instanceId,_channelId),
1186 "Channel::Init() %s (%d/%d/%d/%d) has been added to "
1187 "the RTP/RTCP receiver",
1188 codec.plname, codec.pltype, codec.plfreq,
1189 codec.channels, codec.rate);
1190 }
1191
1192 // Ensure that PCMU is used as default codec on the sending side
tina.legrand@webrtc.org45175852012-06-01 09:27:35 +00001193 if (!STR_CASE_CMP(codec.plname, "PCMU") && (codec.channels == 1))
niklase@google.com470e71d2011-07-07 08:21:25 +00001194 {
1195 SetSendCodec(codec);
1196 }
1197
1198 // Register default PT for outband 'telephone-event'
1199 if (!STR_CASE_CMP(codec.plname, "telephone-event"))
1200 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001201 if ((_rtpRtcpModule->RegisterSendPayload(codec) == -1) ||
niklase@google.com470e71d2011-07-07 08:21:25 +00001202 (_audioCodingModule.RegisterReceiveCodec(codec) == -1))
1203 {
1204 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1205 VoEId(_instanceId,_channelId),
1206 "Channel::Init() failed to register outband "
1207 "'telephone-event' (%d/%d) correctly",
1208 codec.pltype, codec.plfreq);
1209 }
1210 }
1211
1212 if (!STR_CASE_CMP(codec.plname, "CN"))
1213 {
1214 if ((_audioCodingModule.RegisterSendCodec(codec) == -1) ||
1215 (_audioCodingModule.RegisterReceiveCodec(codec) == -1) ||
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001216 (_rtpRtcpModule->RegisterSendPayload(codec) == -1))
niklase@google.com470e71d2011-07-07 08:21:25 +00001217 {
1218 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1219 VoEId(_instanceId,_channelId),
1220 "Channel::Init() failed to register CN (%d/%d) "
1221 "correctly - 1",
1222 codec.pltype, codec.plfreq);
1223 }
1224 }
1225#ifdef WEBRTC_CODEC_RED
1226 // Register RED to the receiving side of the ACM.
1227 // We will not receive an OnInitializeDecoder() callback for RED.
1228 if (!STR_CASE_CMP(codec.plname, "RED"))
1229 {
1230 if (_audioCodingModule.RegisterReceiveCodec(codec) == -1)
1231 {
1232 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1233 VoEId(_instanceId,_channelId),
1234 "Channel::Init() failed to register RED (%d/%d) "
1235 "correctly",
1236 codec.pltype, codec.plfreq);
1237 }
1238 }
1239#endif
1240 }
pwestin@webrtc.org684f0572013-03-13 23:20:57 +00001241
niklase@google.com470e71d2011-07-07 08:21:25 +00001242 // Initialize the far end AP module
1243 // Using 8 kHz as initial Fs, the same as in transmission. Might be
1244 // changed at the first receiving audio.
1245 if (_rxAudioProcessingModulePtr == NULL)
1246 {
1247 _engineStatisticsPtr->SetLastError(
1248 VE_NO_MEMORY, kTraceCritical,
1249 "Channel::Init() failed to create the far-end AudioProcessing"
1250 " module");
1251 return -1;
1252 }
1253
niklase@google.com470e71d2011-07-07 08:21:25 +00001254 if (_rxAudioProcessingModulePtr->set_sample_rate_hz(8000))
1255 {
1256 _engineStatisticsPtr->SetLastError(
1257 VE_APM_ERROR, kTraceWarning,
1258 "Channel::Init() failed to set the sample rate to 8K for"
1259 " far-end AP module");
1260 }
1261
1262 if (_rxAudioProcessingModulePtr->set_num_channels(1, 1) != 0)
1263 {
1264 _engineStatisticsPtr->SetLastError(
1265 VE_SOUNDCARD_ERROR, kTraceWarning,
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00001266 "Init() failed to set channels for the primary audio stream");
niklase@google.com470e71d2011-07-07 08:21:25 +00001267 }
1268
1269 if (_rxAudioProcessingModulePtr->high_pass_filter()->Enable(
1270 WEBRTC_VOICE_ENGINE_RX_HP_DEFAULT_STATE) != 0)
1271 {
1272 _engineStatisticsPtr->SetLastError(
1273 VE_APM_ERROR, kTraceWarning,
1274 "Channel::Init() failed to set the high-pass filter for"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00001275 " far-end AP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00001276 }
1277
1278 if (_rxAudioProcessingModulePtr->noise_suppression()->set_level(
1279 (NoiseSuppression::Level)WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE) != 0)
1280 {
1281 _engineStatisticsPtr->SetLastError(
1282 VE_APM_ERROR, kTraceWarning,
1283 "Init() failed to set noise reduction level for far-end"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00001284 " AP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00001285 }
1286 if (_rxAudioProcessingModulePtr->noise_suppression()->Enable(
1287 WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_STATE) != 0)
1288 {
1289 _engineStatisticsPtr->SetLastError(
1290 VE_APM_ERROR, kTraceWarning,
1291 "Init() failed to set noise reduction state for far-end"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00001292 " AP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00001293 }
1294
1295 if (_rxAudioProcessingModulePtr->gain_control()->set_mode(
1296 (GainControl::Mode)WEBRTC_VOICE_ENGINE_RX_AGC_DEFAULT_MODE) != 0)
1297 {
1298 _engineStatisticsPtr->SetLastError(
1299 VE_APM_ERROR, kTraceWarning,
1300 "Init() failed to set AGC mode for far-end AP module");
1301 }
1302 if (_rxAudioProcessingModulePtr->gain_control()->Enable(
1303 WEBRTC_VOICE_ENGINE_RX_AGC_DEFAULT_STATE) != 0)
1304 {
1305 _engineStatisticsPtr->SetLastError(
1306 VE_APM_ERROR, kTraceWarning,
1307 "Init() failed to set AGC state for far-end AP module");
1308 }
1309
1310 return 0;
1311}
1312
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001313int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001314Channel::SetEngineInformation(Statistics& engineStatistics,
1315 OutputMixer& outputMixer,
1316 voe::TransmitMixer& transmitMixer,
1317 ProcessThread& moduleProcessThread,
1318 AudioDeviceModule& audioDeviceModule,
1319 VoiceEngineObserver* voiceEngineObserver,
1320 CriticalSectionWrapper* callbackCritSect)
1321{
1322 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1323 "Channel::SetEngineInformation()");
1324 _engineStatisticsPtr = &engineStatistics;
1325 _outputMixerPtr = &outputMixer;
1326 _transmitMixerPtr = &transmitMixer,
1327 _moduleProcessThreadPtr = &moduleProcessThread;
1328 _audioDeviceModulePtr = &audioDeviceModule;
1329 _voiceEngineObserverPtr = voiceEngineObserver;
1330 _callbackCritSectPtr = callbackCritSect;
1331 return 0;
1332}
1333
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001334int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001335Channel::UpdateLocalTimeStamp()
1336{
1337
andrew@webrtc.org63a50982012-05-02 23:56:37 +00001338 _timeStamp += _audioFrame.samples_per_channel_;
niklase@google.com470e71d2011-07-07 08:21:25 +00001339 return 0;
1340}
1341
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001342int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001343Channel::StartPlayout()
1344{
1345 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1346 "Channel::StartPlayout()");
1347 if (_playing)
1348 {
1349 return 0;
1350 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00001351
1352 if (!_externalMixing) {
1353 // Add participant as candidates for mixing.
1354 if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0)
1355 {
1356 _engineStatisticsPtr->SetLastError(
1357 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1358 "StartPlayout() failed to add participant to mixer");
1359 return -1;
1360 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001361 }
1362
1363 _playing = true;
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001364
1365 if (RegisterFilePlayingToMixer() != 0)
1366 return -1;
1367
niklase@google.com470e71d2011-07-07 08:21:25 +00001368 return 0;
1369}
1370
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001371int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001372Channel::StopPlayout()
1373{
1374 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1375 "Channel::StopPlayout()");
1376 if (!_playing)
1377 {
1378 return 0;
1379 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00001380
1381 if (!_externalMixing) {
1382 // Remove participant as candidates for mixing
1383 if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0)
1384 {
1385 _engineStatisticsPtr->SetLastError(
1386 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1387 "StopPlayout() failed to remove participant from mixer");
1388 return -1;
1389 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001390 }
1391
1392 _playing = false;
1393 _outputAudioLevel.Clear();
1394
1395 return 0;
1396}
1397
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001398int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001399Channel::StartSend()
1400{
1401 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1402 "Channel::StartSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001403 {
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001404 // A lock is needed because |_sending| can be accessed or modified by
1405 // another thread at the same time.
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001406 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001407
1408 if (_sending)
1409 {
1410 return 0;
1411 }
1412 _sending = true;
niklase@google.com470e71d2011-07-07 08:21:25 +00001413 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001414
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001415 if (_rtpRtcpModule->SetSendingStatus(true) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001416 {
1417 _engineStatisticsPtr->SetLastError(
1418 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1419 "StartSend() RTP/RTCP failed to start sending");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001420 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001421 _sending = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00001422 return -1;
1423 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001424
niklase@google.com470e71d2011-07-07 08:21:25 +00001425 return 0;
1426}
1427
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001428int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001429Channel::StopSend()
1430{
1431 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1432 "Channel::StopSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001433 {
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001434 // A lock is needed because |_sending| can be accessed or modified by
1435 // another thread at the same time.
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001436 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001437
1438 if (!_sending)
1439 {
1440 return 0;
1441 }
1442 _sending = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00001443 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001444
niklase@google.com470e71d2011-07-07 08:21:25 +00001445 // Reset sending SSRC and sequence number and triggers direct transmission
1446 // of RTCP BYE
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001447 if (_rtpRtcpModule->SetSendingStatus(false) == -1 ||
1448 _rtpRtcpModule->ResetSendDataCountersRTP() == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001449 {
1450 _engineStatisticsPtr->SetLastError(
1451 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1452 "StartSend() RTP/RTCP failed to stop sending");
1453 }
1454
niklase@google.com470e71d2011-07-07 08:21:25 +00001455 return 0;
1456}
1457
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001458int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001459Channel::StartReceiving()
1460{
1461 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1462 "Channel::StartReceiving()");
1463 if (_receiving)
1464 {
1465 return 0;
1466 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001467 _receiving = true;
1468 _numberOfDiscardedPackets = 0;
1469 return 0;
1470}
1471
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001472int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001473Channel::StopReceiving()
1474{
1475 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1476 "Channel::StopReceiving()");
1477 if (!_receiving)
1478 {
1479 return 0;
1480 }
pwestin@webrtc.org684f0572013-03-13 23:20:57 +00001481
henrika@webrtc.orgaf71f0e2011-12-05 07:02:22 +00001482 // Recover DTMF detection status.
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00001483 int32_t ret = _rtpRtcpModule->SetTelephoneEventForwardToDecoder(true);
1484 if (ret != 0) {
1485 _engineStatisticsPtr->SetLastError(
1486 VE_INVALID_OPERATION, kTraceWarning,
1487 "StopReceiving() failed to restore telephone-event status.");
1488 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001489 RegisterReceiveCodecsToRTPModule();
1490 _receiving = false;
1491 return 0;
1492}
1493
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001494int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001495Channel::SetNetEQPlayoutMode(NetEqModes mode)
1496{
1497 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1498 "Channel::SetNetEQPlayoutMode()");
1499 AudioPlayoutMode playoutMode(voice);
1500 switch (mode)
1501 {
1502 case kNetEqDefault:
1503 playoutMode = voice;
1504 break;
1505 case kNetEqStreaming:
1506 playoutMode = streaming;
1507 break;
1508 case kNetEqFax:
1509 playoutMode = fax;
1510 break;
roosa@google.comb7186192012-12-12 21:59:14 +00001511 case kNetEqOff:
1512 playoutMode = off;
1513 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00001514 }
1515 if (_audioCodingModule.SetPlayoutMode(playoutMode) != 0)
1516 {
1517 _engineStatisticsPtr->SetLastError(
1518 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1519 "SetNetEQPlayoutMode() failed to set playout mode");
1520 return -1;
1521 }
1522 return 0;
1523}
1524
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001525int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001526Channel::GetNetEQPlayoutMode(NetEqModes& mode)
1527{
1528 const AudioPlayoutMode playoutMode = _audioCodingModule.PlayoutMode();
1529 switch (playoutMode)
1530 {
1531 case voice:
1532 mode = kNetEqDefault;
1533 break;
1534 case streaming:
1535 mode = kNetEqStreaming;
1536 break;
1537 case fax:
1538 mode = kNetEqFax;
1539 break;
roosa@google.comb7186192012-12-12 21:59:14 +00001540 case off:
1541 mode = kNetEqOff;
niklase@google.com470e71d2011-07-07 08:21:25 +00001542 }
1543 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
1544 VoEId(_instanceId,_channelId),
1545 "Channel::GetNetEQPlayoutMode() => mode=%u", mode);
1546 return 0;
1547}
1548
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001549int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001550Channel::SetOnHoldStatus(bool enable, OnHoldModes mode)
1551{
1552 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1553 "Channel::SetOnHoldStatus()");
1554 if (mode == kHoldSendAndPlay)
1555 {
1556 _outputIsOnHold = enable;
1557 _inputIsOnHold = enable;
1558 }
1559 else if (mode == kHoldPlayOnly)
1560 {
1561 _outputIsOnHold = enable;
1562 }
1563 if (mode == kHoldSendOnly)
1564 {
1565 _inputIsOnHold = enable;
1566 }
1567 return 0;
1568}
1569
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001570int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001571Channel::GetOnHoldStatus(bool& enabled, OnHoldModes& mode)
1572{
1573 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1574 "Channel::GetOnHoldStatus()");
1575 enabled = (_outputIsOnHold || _inputIsOnHold);
1576 if (_outputIsOnHold && _inputIsOnHold)
1577 {
1578 mode = kHoldSendAndPlay;
1579 }
1580 else if (_outputIsOnHold && !_inputIsOnHold)
1581 {
1582 mode = kHoldPlayOnly;
1583 }
1584 else if (!_outputIsOnHold && _inputIsOnHold)
1585 {
1586 mode = kHoldSendOnly;
1587 }
1588 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1589 "Channel::GetOnHoldStatus() => enabled=%d, mode=%d",
1590 enabled, mode);
1591 return 0;
1592}
1593
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001594int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001595Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer)
1596{
1597 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1598 "Channel::RegisterVoiceEngineObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001599 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001600
1601 if (_voiceEngineObserverPtr)
1602 {
1603 _engineStatisticsPtr->SetLastError(
1604 VE_INVALID_OPERATION, kTraceError,
1605 "RegisterVoiceEngineObserver() observer already enabled");
1606 return -1;
1607 }
1608 _voiceEngineObserverPtr = &observer;
1609 return 0;
1610}
1611
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001612int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001613Channel::DeRegisterVoiceEngineObserver()
1614{
1615 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1616 "Channel::DeRegisterVoiceEngineObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001617 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001618
1619 if (!_voiceEngineObserverPtr)
1620 {
1621 _engineStatisticsPtr->SetLastError(
1622 VE_INVALID_OPERATION, kTraceWarning,
1623 "DeRegisterVoiceEngineObserver() observer already disabled");
1624 return 0;
1625 }
1626 _voiceEngineObserverPtr = NULL;
1627 return 0;
1628}
1629
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001630int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001631Channel::GetSendCodec(CodecInst& codec)
1632{
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001633 return (_audioCodingModule.SendCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001634}
1635
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001636int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001637Channel::GetRecCodec(CodecInst& codec)
1638{
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001639 return (_audioCodingModule.ReceiveCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001640}
1641
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001642int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001643Channel::SetSendCodec(const CodecInst& codec)
1644{
1645 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1646 "Channel::SetSendCodec()");
1647
1648 if (_audioCodingModule.RegisterSendCodec(codec) != 0)
1649 {
1650 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1651 "SetSendCodec() failed to register codec to ACM");
1652 return -1;
1653 }
1654
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001655 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001656 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001657 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1658 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001659 {
1660 WEBRTC_TRACE(
1661 kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1662 "SetSendCodec() failed to register codec to"
1663 " RTP/RTCP module");
1664 return -1;
1665 }
1666 }
1667
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001668 if (_rtpRtcpModule->SetAudioPacketSize(codec.pacsize) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001669 {
1670 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1671 "SetSendCodec() failed to set audio packet size");
1672 return -1;
1673 }
1674
1675 return 0;
1676}
1677
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001678int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001679Channel::SetVADStatus(bool enableVAD, ACMVADMode mode, bool disableDTX)
1680{
1681 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1682 "Channel::SetVADStatus(mode=%d)", mode);
1683 // To disable VAD, DTX must be disabled too
1684 disableDTX = ((enableVAD == false) ? true : disableDTX);
1685 if (_audioCodingModule.SetVAD(!disableDTX, enableVAD, mode) != 0)
1686 {
1687 _engineStatisticsPtr->SetLastError(
1688 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1689 "SetVADStatus() failed to set VAD");
1690 return -1;
1691 }
1692 return 0;
1693}
1694
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001695int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001696Channel::GetVADStatus(bool& enabledVAD, ACMVADMode& mode, bool& disabledDTX)
1697{
1698 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1699 "Channel::GetVADStatus");
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001700 if (_audioCodingModule.VAD(&disabledDTX, &enabledVAD, &mode) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001701 {
1702 _engineStatisticsPtr->SetLastError(
1703 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1704 "GetVADStatus() failed to get VAD status");
1705 return -1;
1706 }
1707 disabledDTX = !disabledDTX;
1708 return 0;
1709}
1710
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001711int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001712Channel::SetRecPayloadType(const CodecInst& codec)
1713{
1714 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1715 "Channel::SetRecPayloadType()");
1716
1717 if (_playing)
1718 {
1719 _engineStatisticsPtr->SetLastError(
1720 VE_ALREADY_PLAYING, kTraceError,
1721 "SetRecPayloadType() unable to set PT while playing");
1722 return -1;
1723 }
1724 if (_receiving)
1725 {
1726 _engineStatisticsPtr->SetLastError(
1727 VE_ALREADY_LISTENING, kTraceError,
1728 "SetRecPayloadType() unable to set PT while listening");
1729 return -1;
1730 }
1731
1732 if (codec.pltype == -1)
1733 {
1734 // De-register the selected codec (RTP/RTCP module and ACM)
1735
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001736 int8_t pltype(-1);
niklase@google.com470e71d2011-07-07 08:21:25 +00001737 CodecInst rxCodec = codec;
1738
1739 // Get payload type for the given codec
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00001740 _rtpRtcpModule->ReceivePayloadType(rxCodec, &pltype);
niklase@google.com470e71d2011-07-07 08:21:25 +00001741 rxCodec.pltype = pltype;
1742
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00001743 if (_rtpRtcpModule->DeRegisterReceivePayload(pltype) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001744 {
1745 _engineStatisticsPtr->SetLastError(
1746 VE_RTP_RTCP_MODULE_ERROR,
1747 kTraceError,
1748 "SetRecPayloadType() RTP/RTCP-module deregistration "
1749 "failed");
1750 return -1;
1751 }
1752 if (_audioCodingModule.UnregisterReceiveCodec(rxCodec.pltype) != 0)
1753 {
1754 _engineStatisticsPtr->SetLastError(
1755 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1756 "SetRecPayloadType() ACM deregistration failed - 1");
1757 return -1;
1758 }
1759 return 0;
1760 }
1761
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00001762 if (_rtpRtcpModule->RegisterReceivePayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001763 {
1764 // First attempt to register failed => de-register and try again
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00001765 _rtpRtcpModule->DeRegisterReceivePayload(codec.pltype);
1766 if (_rtpRtcpModule->RegisterReceivePayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001767 {
1768 _engineStatisticsPtr->SetLastError(
1769 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1770 "SetRecPayloadType() RTP/RTCP-module registration failed");
1771 return -1;
1772 }
1773 }
1774 if (_audioCodingModule.RegisterReceiveCodec(codec) != 0)
1775 {
1776 _audioCodingModule.UnregisterReceiveCodec(codec.pltype);
1777 if (_audioCodingModule.RegisterReceiveCodec(codec) != 0)
1778 {
1779 _engineStatisticsPtr->SetLastError(
1780 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1781 "SetRecPayloadType() ACM registration failed - 1");
1782 return -1;
1783 }
1784 }
1785 return 0;
1786}
1787
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001788int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001789Channel::GetRecPayloadType(CodecInst& codec)
1790{
1791 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1792 "Channel::GetRecPayloadType()");
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001793 int8_t payloadType(-1);
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00001794 if (_rtpRtcpModule->ReceivePayloadType(codec, &payloadType) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001795 {
1796 _engineStatisticsPtr->SetLastError(
henrika@webrtc.org37198002012-06-18 11:00:12 +00001797 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
niklase@google.com470e71d2011-07-07 08:21:25 +00001798 "GetRecPayloadType() failed to retrieve RX payload type");
1799 return -1;
1800 }
1801 codec.pltype = payloadType;
1802 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1803 "Channel::GetRecPayloadType() => pltype=%u", codec.pltype);
1804 return 0;
1805}
1806
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001807int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001808Channel::SetAMREncFormat(AmrMode mode)
1809{
1810 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1811 "Channel::SetAMREncFormat()");
1812
1813 // ACM doesn't support AMR
1814 return -1;
1815}
1816
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001817int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001818Channel::SetAMRDecFormat(AmrMode mode)
1819{
1820 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1821 "Channel::SetAMRDecFormat()");
1822
1823 // ACM doesn't support AMR
1824 return -1;
1825}
1826
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001827int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001828Channel::SetAMRWbEncFormat(AmrMode mode)
1829{
1830 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1831 "Channel::SetAMRWbEncFormat()");
1832
1833 // ACM doesn't support AMR
1834 return -1;
1835
1836}
1837
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001838int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001839Channel::SetAMRWbDecFormat(AmrMode mode)
1840{
1841 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1842 "Channel::SetAMRWbDecFormat()");
1843
1844 // ACM doesn't support AMR
1845 return -1;
1846}
1847
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001848int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001849Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency)
1850{
1851 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1852 "Channel::SetSendCNPayloadType()");
1853
1854 CodecInst codec;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001855 int32_t samplingFreqHz(-1);
tina.legrand@webrtc.org45175852012-06-01 09:27:35 +00001856 const int kMono = 1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001857 if (frequency == kFreq32000Hz)
1858 samplingFreqHz = 32000;
1859 else if (frequency == kFreq16000Hz)
1860 samplingFreqHz = 16000;
1861
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001862 if (_audioCodingModule.Codec("CN", &codec, samplingFreqHz, kMono) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001863 {
1864 _engineStatisticsPtr->SetLastError(
1865 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1866 "SetSendCNPayloadType() failed to retrieve default CN codec "
1867 "settings");
1868 return -1;
1869 }
1870
1871 // Modify the payload type (must be set to dynamic range)
1872 codec.pltype = type;
1873
1874 if (_audioCodingModule.RegisterSendCodec(codec) != 0)
1875 {
1876 _engineStatisticsPtr->SetLastError(
1877 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1878 "SetSendCNPayloadType() failed to register CN to ACM");
1879 return -1;
1880 }
1881
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001882 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001883 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001884 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1885 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001886 {
1887 _engineStatisticsPtr->SetLastError(
1888 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1889 "SetSendCNPayloadType() failed to register CN to RTP/RTCP "
1890 "module");
1891 return -1;
1892 }
1893 }
1894 return 0;
1895}
1896
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001897int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001898Channel::SetISACInitTargetRate(int rateBps, bool useFixedFrameSize)
1899{
1900 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1901 "Channel::SetISACInitTargetRate()");
1902
1903 CodecInst sendCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001904 if (_audioCodingModule.SendCodec(&sendCodec) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001905 {
1906 _engineStatisticsPtr->SetLastError(
1907 VE_CODEC_ERROR, kTraceError,
1908 "SetISACInitTargetRate() failed to retrieve send codec");
1909 return -1;
1910 }
1911 if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
1912 {
1913 // This API is only valid if iSAC is setup to run in channel-adaptive
1914 // mode.
1915 // We do not validate the adaptive mode here. It is done later in the
1916 // ConfigISACBandwidthEstimator() API.
1917 _engineStatisticsPtr->SetLastError(
1918 VE_CODEC_ERROR, kTraceError,
1919 "SetISACInitTargetRate() send codec is not iSAC");
1920 return -1;
1921 }
1922
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001923 uint8_t initFrameSizeMsec(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00001924 if (16000 == sendCodec.plfreq)
1925 {
1926 // Note that 0 is a valid and corresponds to "use default
1927 if ((rateBps != 0 &&
1928 rateBps < kVoiceEngineMinIsacInitTargetRateBpsWb) ||
1929 (rateBps > kVoiceEngineMaxIsacInitTargetRateBpsWb))
1930 {
1931 _engineStatisticsPtr->SetLastError(
1932 VE_INVALID_ARGUMENT, kTraceError,
1933 "SetISACInitTargetRate() invalid target rate - 1");
1934 return -1;
1935 }
1936 // 30 or 60ms
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001937 initFrameSizeMsec = (uint8_t)(sendCodec.pacsize / 16);
niklase@google.com470e71d2011-07-07 08:21:25 +00001938 }
1939 else if (32000 == sendCodec.plfreq)
1940 {
1941 if ((rateBps != 0 &&
1942 rateBps < kVoiceEngineMinIsacInitTargetRateBpsSwb) ||
1943 (rateBps > kVoiceEngineMaxIsacInitTargetRateBpsSwb))
1944 {
1945 _engineStatisticsPtr->SetLastError(
1946 VE_INVALID_ARGUMENT, kTraceError,
1947 "SetISACInitTargetRate() invalid target rate - 2");
1948 return -1;
1949 }
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001950 initFrameSizeMsec = (uint8_t)(sendCodec.pacsize / 32); // 30ms
niklase@google.com470e71d2011-07-07 08:21:25 +00001951 }
1952
1953 if (_audioCodingModule.ConfigISACBandwidthEstimator(
1954 initFrameSizeMsec, rateBps, useFixedFrameSize) == -1)
1955 {
1956 _engineStatisticsPtr->SetLastError(
1957 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1958 "SetISACInitTargetRate() iSAC BWE config failed");
1959 return -1;
1960 }
1961
1962 return 0;
1963}
1964
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001965int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001966Channel::SetISACMaxRate(int rateBps)
1967{
1968 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1969 "Channel::SetISACMaxRate()");
1970
1971 CodecInst sendCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001972 if (_audioCodingModule.SendCodec(&sendCodec) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001973 {
1974 _engineStatisticsPtr->SetLastError(
1975 VE_CODEC_ERROR, kTraceError,
1976 "SetISACMaxRate() failed to retrieve send codec");
1977 return -1;
1978 }
1979 if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
1980 {
1981 // This API is only valid if iSAC is selected as sending codec.
1982 _engineStatisticsPtr->SetLastError(
1983 VE_CODEC_ERROR, kTraceError,
1984 "SetISACMaxRate() send codec is not iSAC");
1985 return -1;
1986 }
1987 if (16000 == sendCodec.plfreq)
1988 {
1989 if ((rateBps < kVoiceEngineMinIsacMaxRateBpsWb) ||
1990 (rateBps > kVoiceEngineMaxIsacMaxRateBpsWb))
1991 {
1992 _engineStatisticsPtr->SetLastError(
1993 VE_INVALID_ARGUMENT, kTraceError,
1994 "SetISACMaxRate() invalid max rate - 1");
1995 return -1;
1996 }
1997 }
1998 else if (32000 == sendCodec.plfreq)
1999 {
2000 if ((rateBps < kVoiceEngineMinIsacMaxRateBpsSwb) ||
2001 (rateBps > kVoiceEngineMaxIsacMaxRateBpsSwb))
2002 {
2003 _engineStatisticsPtr->SetLastError(
2004 VE_INVALID_ARGUMENT, kTraceError,
2005 "SetISACMaxRate() invalid max rate - 2");
2006 return -1;
2007 }
2008 }
2009 if (_sending)
2010 {
2011 _engineStatisticsPtr->SetLastError(
2012 VE_SENDING, kTraceError,
2013 "SetISACMaxRate() unable to set max rate while sending");
2014 return -1;
2015 }
2016
2017 // Set the maximum instantaneous rate of iSAC (works for both adaptive
2018 // and non-adaptive mode)
2019 if (_audioCodingModule.SetISACMaxRate(rateBps) == -1)
2020 {
2021 _engineStatisticsPtr->SetLastError(
2022 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2023 "SetISACMaxRate() failed to set max rate");
2024 return -1;
2025 }
2026
2027 return 0;
2028}
2029
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002030int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00002031Channel::SetISACMaxPayloadSize(int sizeBytes)
2032{
2033 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2034 "Channel::SetISACMaxPayloadSize()");
2035 CodecInst sendCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00002036 if (_audioCodingModule.SendCodec(&sendCodec) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00002037 {
2038 _engineStatisticsPtr->SetLastError(
2039 VE_CODEC_ERROR, kTraceError,
2040 "SetISACMaxPayloadSize() failed to retrieve send codec");
2041 return -1;
2042 }
2043 if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
2044 {
2045 _engineStatisticsPtr->SetLastError(
2046 VE_CODEC_ERROR, kTraceError,
2047 "SetISACMaxPayloadSize() send codec is not iSAC");
2048 return -1;
2049 }
2050 if (16000 == sendCodec.plfreq)
2051 {
2052 if ((sizeBytes < kVoiceEngineMinIsacMaxPayloadSizeBytesWb) ||
2053 (sizeBytes > kVoiceEngineMaxIsacMaxPayloadSizeBytesWb))
2054 {
2055 _engineStatisticsPtr->SetLastError(
2056 VE_INVALID_ARGUMENT, kTraceError,
2057 "SetISACMaxPayloadSize() invalid max payload - 1");
2058 return -1;
2059 }
2060 }
2061 else if (32000 == sendCodec.plfreq)
2062 {
2063 if ((sizeBytes < kVoiceEngineMinIsacMaxPayloadSizeBytesSwb) ||
2064 (sizeBytes > kVoiceEngineMaxIsacMaxPayloadSizeBytesSwb))
2065 {
2066 _engineStatisticsPtr->SetLastError(
2067 VE_INVALID_ARGUMENT, kTraceError,
2068 "SetISACMaxPayloadSize() invalid max payload - 2");
2069 return -1;
2070 }
2071 }
2072 if (_sending)
2073 {
2074 _engineStatisticsPtr->SetLastError(
2075 VE_SENDING, kTraceError,
2076 "SetISACMaxPayloadSize() unable to set max rate while sending");
2077 return -1;
2078 }
2079
2080 if (_audioCodingModule.SetISACMaxPayloadSize(sizeBytes) == -1)
2081 {
2082 _engineStatisticsPtr->SetLastError(
2083 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2084 "SetISACMaxPayloadSize() failed to set max payload size");
2085 return -1;
2086 }
2087 return 0;
2088}
2089
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002090int32_t Channel::RegisterExternalTransport(Transport& transport)
niklase@google.com470e71d2011-07-07 08:21:25 +00002091{
2092 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2093 "Channel::RegisterExternalTransport()");
2094
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002095 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002096
niklase@google.com470e71d2011-07-07 08:21:25 +00002097 if (_externalTransport)
2098 {
2099 _engineStatisticsPtr->SetLastError(VE_INVALID_OPERATION,
2100 kTraceError,
2101 "RegisterExternalTransport() external transport already enabled");
2102 return -1;
2103 }
2104 _externalTransport = true;
2105 _transportPtr = &transport;
2106 return 0;
2107}
2108
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002109int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00002110Channel::DeRegisterExternalTransport()
2111{
2112 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2113 "Channel::DeRegisterExternalTransport()");
2114
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002115 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.org83661f52011-11-25 10:58:15 +00002116
niklase@google.com470e71d2011-07-07 08:21:25 +00002117 if (!_transportPtr)
2118 {
2119 _engineStatisticsPtr->SetLastError(
2120 VE_INVALID_OPERATION, kTraceWarning,
2121 "DeRegisterExternalTransport() external transport already "
2122 "disabled");
2123 return 0;
2124 }
2125 _externalTransport = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00002126 _transportPtr = NULL;
2127 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2128 "DeRegisterExternalTransport() all transport is disabled");
niklase@google.com470e71d2011-07-07 08:21:25 +00002129 return 0;
2130}
2131
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002132int32_t Channel::ReceivedRTPPacket(const int8_t* data, int32_t length) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002133 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
2134 "Channel::ReceivedRTPPacket()");
2135
2136 // Store playout timestamp for the received RTP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00002137 UpdatePlayoutTimestamp(false);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002138
2139 // Dump the RTP packet to a file (if RTP dump is enabled).
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002140 if (_rtpDumpIn.DumpPacket((const uint8_t*)data,
2141 (uint16_t)length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002142 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
2143 VoEId(_instanceId,_channelId),
2144 "Channel::SendPacket() RTP dump to input file failed");
2145 }
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00002146 RTPHeader header;
2147 if (!rtp_header_parser_->Parse(reinterpret_cast<const uint8_t*>(data),
2148 static_cast<uint16_t>(length), &header)) {
2149 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideo,
2150 VoEId(_instanceId,_channelId),
2151 "IncomingPacket invalid RTP header");
2152 return -1;
2153 }
pbos@webrtc.org08933a52013-07-10 10:06:29 +00002154 header.payload_type_frequency =
2155 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00002156 bool retransmitted = IsPacketRetransmitted(header);
2157 bool in_order = rtp_receiver_->InOrderPacket(header.sequenceNumber);
2158 rtp_receive_statistics_->IncomingPacket(header, static_cast<uint16_t>(length),
2159 retransmitted, in_order);
2160 PayloadUnion payload_specific;
2161 if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
2162 &payload_specific)) {
2163 return -1;
2164 }
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002165 // Deliver RTP packet to RTP/RTCP module for parsing
2166 // The packet will be pushed back to the channel thru the
2167 // OnReceivedPayloadData callback so we don't push it to the ACM here
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00002168 if (_rtpRtcpModule->IncomingRtpPacket(reinterpret_cast<const uint8_t*>(data),
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00002169 static_cast<uint16_t>(length),
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00002170 header) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002171 _engineStatisticsPtr->SetLastError(
2172 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
2173 "Channel::IncomingRTPPacket() RTP packet is invalid");
2174 }
2175 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002176}
2177
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002178int32_t Channel::ReceivedRTCPPacket(const int8_t* data, int32_t length) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002179 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
2180 "Channel::ReceivedRTCPPacket()");
2181 // Store playout timestamp for the received RTCP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00002182 UpdatePlayoutTimestamp(true);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002183
2184 // Dump the RTCP packet to a file (if RTP dump is enabled).
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002185 if (_rtpDumpIn.DumpPacket((const uint8_t*)data,
2186 (uint16_t)length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002187 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
2188 VoEId(_instanceId,_channelId),
2189 "Channel::SendPacket() RTCP dump to input file failed");
2190 }
2191
2192 // Deliver RTCP packet to RTP/RTCP module for parsing
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00002193 if (_rtpRtcpModule->IncomingRtcpPacket((const uint8_t*)data,
2194 (uint16_t)length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002195 _engineStatisticsPtr->SetLastError(
2196 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
2197 "Channel::IncomingRTPPacket() RTCP packet is invalid");
2198 }
2199 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002200}
2201
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00002202int32_t
2203Channel::SetPacketTimeoutNotification(bool enable, int timeoutSeconds)
2204{
2205 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2206 "Channel::SetPacketTimeoutNotification()");
2207 if (enable)
2208 {
2209 const uint32_t RTPtimeoutMS = 1000*timeoutSeconds;
2210 const uint32_t RTCPtimeoutMS = 0;
2211 _rtpRtcpModule->SetPacketTimeout(RTPtimeoutMS, RTCPtimeoutMS);
2212 _rtpPacketTimeOutIsEnabled = true;
2213 _rtpTimeOutSeconds = timeoutSeconds;
2214 }
2215 else
2216 {
2217 _rtpRtcpModule->SetPacketTimeout(0, 0);
2218 _rtpPacketTimeOutIsEnabled = false;
2219 _rtpTimeOutSeconds = 0;
2220 }
2221 return 0;
2222}
2223
2224int32_t
2225Channel::GetPacketTimeoutNotification(bool& enabled, int& timeoutSeconds)
2226{
2227 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2228 "Channel::GetPacketTimeoutNotification()");
2229 enabled = _rtpPacketTimeOutIsEnabled;
2230 if (enabled)
2231 {
2232 timeoutSeconds = _rtpTimeOutSeconds;
2233 }
2234 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
2235 "GetPacketTimeoutNotification() => enabled=%d,"
2236 " timeoutSeconds=%d",
2237 enabled, timeoutSeconds);
2238 return 0;
2239}
2240
2241int32_t
2242Channel::RegisterDeadOrAliveObserver(VoEConnectionObserver& observer)
2243{
2244 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2245 "Channel::RegisterDeadOrAliveObserver()");
2246 CriticalSectionScoped cs(&_callbackCritSect);
2247
2248 if (_connectionObserverPtr)
2249 {
2250 _engineStatisticsPtr->SetLastError(VE_INVALID_OPERATION, kTraceError,
2251 "RegisterDeadOrAliveObserver() observer already enabled");
2252 return -1;
2253 }
2254
2255 _connectionObserverPtr = &observer;
2256 _connectionObserver = true;
2257
2258 return 0;
2259}
2260
2261int32_t
2262Channel::DeRegisterDeadOrAliveObserver()
2263{
2264 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2265 "Channel::DeRegisterDeadOrAliveObserver()");
2266 CriticalSectionScoped cs(&_callbackCritSect);
2267
2268 if (!_connectionObserverPtr)
2269 {
2270 _engineStatisticsPtr->SetLastError(
2271 VE_INVALID_OPERATION, kTraceWarning,
2272 "DeRegisterDeadOrAliveObserver() observer already disabled");
2273 return 0;
2274 }
2275
2276 _connectionObserver = false;
2277 _connectionObserverPtr = NULL;
2278
2279 return 0;
2280}
2281
2282int32_t
2283Channel::SetPeriodicDeadOrAliveStatus(bool enable, int sampleTimeSeconds)
2284{
2285 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2286 "Channel::SetPeriodicDeadOrAliveStatus()");
2287 if (!_connectionObserverPtr)
2288 {
2289 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
2290 "SetPeriodicDeadOrAliveStatus() connection observer has"
2291 " not been registered");
2292 }
2293 if (enable)
2294 {
2295 ResetDeadOrAliveCounters();
2296 }
2297 bool enabled(false);
2298 uint8_t currentSampleTimeSec(0);
2299 // Store last state (will be used later if dead-or-alive is disabled).
2300 _rtpRtcpModule->PeriodicDeadOrAliveStatus(enabled, currentSampleTimeSec);
2301 // Update the dead-or-alive state.
2302 if (_rtpRtcpModule->SetPeriodicDeadOrAliveStatus(
2303 enable, (uint8_t)sampleTimeSeconds) != 0)
2304 {
2305 _engineStatisticsPtr->SetLastError(
2306 VE_RTP_RTCP_MODULE_ERROR,
2307 kTraceError,
2308 "SetPeriodicDeadOrAliveStatus() failed to set dead-or-alive "
2309 "status");
2310 return -1;
2311 }
2312 if (!enable)
2313 {
2314 // Restore last utilized sample time.
2315 // Without this, the sample time would always be reset to default
2316 // (2 sec), each time dead-or-alived was disabled without sample-time
2317 // parameter.
2318 _rtpRtcpModule->SetPeriodicDeadOrAliveStatus(enable,
2319 currentSampleTimeSec);
2320 }
2321 return 0;
2322}
2323
2324int32_t
2325Channel::GetPeriodicDeadOrAliveStatus(bool& enabled, int& sampleTimeSeconds)
2326{
2327 _rtpRtcpModule->PeriodicDeadOrAliveStatus(
2328 enabled,
2329 (uint8_t&)sampleTimeSeconds);
2330 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
2331 "GetPeriodicDeadOrAliveStatus() => enabled=%d,"
2332 " sampleTimeSeconds=%d",
2333 enabled, sampleTimeSeconds);
2334 return 0;
2335}
2336
niklase@google.com470e71d2011-07-07 08:21:25 +00002337int Channel::StartPlayingFileLocally(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00002338 bool loop,
2339 FileFormats format,
2340 int startPosition,
2341 float volumeScaling,
2342 int stopPosition,
niklase@google.com470e71d2011-07-07 08:21:25 +00002343 const CodecInst* codecInst)
2344{
2345 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2346 "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d,"
2347 " format=%d, volumeScaling=%5.3f, startPosition=%d, "
2348 "stopPosition=%d)", fileName, loop, format, volumeScaling,
2349 startPosition, stopPosition);
2350
2351 if (_outputFilePlaying)
2352 {
2353 _engineStatisticsPtr->SetLastError(
2354 VE_ALREADY_PLAYING, kTraceError,
2355 "StartPlayingFileLocally() is already playing");
2356 return -1;
2357 }
2358
niklase@google.com470e71d2011-07-07 08:21:25 +00002359 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002360 CriticalSectionScoped cs(&_fileCritSect);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002361
2362 if (_outputFilePlayerPtr)
2363 {
2364 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2365 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2366 _outputFilePlayerPtr = NULL;
2367 }
2368
2369 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2370 _outputFilePlayerId, (const FileFormats)format);
2371
2372 if (_outputFilePlayerPtr == NULL)
2373 {
2374 _engineStatisticsPtr->SetLastError(
2375 VE_INVALID_ARGUMENT, kTraceError,
henrike@webrtc.org31d30702011-11-18 19:59:32 +00002376 "StartPlayingFileLocally() filePlayer format is not correct");
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002377 return -1;
2378 }
2379
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002380 const uint32_t notificationTime(0);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002381
2382 if (_outputFilePlayerPtr->StartPlayingFile(
2383 fileName,
2384 loop,
2385 startPosition,
2386 volumeScaling,
2387 notificationTime,
2388 stopPosition,
2389 (const CodecInst*)codecInst) != 0)
2390 {
2391 _engineStatisticsPtr->SetLastError(
2392 VE_BAD_FILE, kTraceError,
2393 "StartPlayingFile() failed to start file playout");
2394 _outputFilePlayerPtr->StopPlayingFile();
2395 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2396 _outputFilePlayerPtr = NULL;
2397 return -1;
2398 }
2399 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
2400 _outputFilePlaying = true;
niklase@google.com470e71d2011-07-07 08:21:25 +00002401 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00002402
2403 if (RegisterFilePlayingToMixer() != 0)
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002404 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002405
2406 return 0;
2407}
2408
2409int Channel::StartPlayingFileLocally(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00002410 FileFormats format,
2411 int startPosition,
2412 float volumeScaling,
2413 int stopPosition,
niklase@google.com470e71d2011-07-07 08:21:25 +00002414 const CodecInst* codecInst)
2415{
2416 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2417 "Channel::StartPlayingFileLocally(format=%d,"
2418 " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
2419 format, volumeScaling, startPosition, stopPosition);
2420
2421 if(stream == NULL)
2422 {
2423 _engineStatisticsPtr->SetLastError(
2424 VE_BAD_FILE, kTraceError,
2425 "StartPlayingFileLocally() NULL as input stream");
2426 return -1;
2427 }
2428
2429
2430 if (_outputFilePlaying)
2431 {
2432 _engineStatisticsPtr->SetLastError(
2433 VE_ALREADY_PLAYING, kTraceError,
2434 "StartPlayingFileLocally() is already playing");
2435 return -1;
2436 }
2437
niklase@google.com470e71d2011-07-07 08:21:25 +00002438 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002439 CriticalSectionScoped cs(&_fileCritSect);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002440
2441 // Destroy the old instance
2442 if (_outputFilePlayerPtr)
2443 {
2444 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2445 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2446 _outputFilePlayerPtr = NULL;
2447 }
2448
2449 // Create the instance
2450 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2451 _outputFilePlayerId,
2452 (const FileFormats)format);
2453
2454 if (_outputFilePlayerPtr == NULL)
2455 {
2456 _engineStatisticsPtr->SetLastError(
2457 VE_INVALID_ARGUMENT, kTraceError,
2458 "StartPlayingFileLocally() filePlayer format isnot correct");
2459 return -1;
2460 }
2461
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002462 const uint32_t notificationTime(0);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002463
2464 if (_outputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
2465 volumeScaling,
2466 notificationTime,
2467 stopPosition, codecInst) != 0)
2468 {
2469 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2470 "StartPlayingFile() failed to "
2471 "start file playout");
2472 _outputFilePlayerPtr->StopPlayingFile();
2473 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2474 _outputFilePlayerPtr = NULL;
2475 return -1;
2476 }
2477 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
2478 _outputFilePlaying = true;
niklase@google.com470e71d2011-07-07 08:21:25 +00002479 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00002480
2481 if (RegisterFilePlayingToMixer() != 0)
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002482 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002483
niklase@google.com470e71d2011-07-07 08:21:25 +00002484 return 0;
2485}
2486
2487int Channel::StopPlayingFileLocally()
2488{
2489 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2490 "Channel::StopPlayingFileLocally()");
2491
2492 if (!_outputFilePlaying)
2493 {
2494 _engineStatisticsPtr->SetLastError(
2495 VE_INVALID_OPERATION, kTraceWarning,
2496 "StopPlayingFileLocally() isnot playing");
2497 return 0;
2498 }
2499
niklase@google.com470e71d2011-07-07 08:21:25 +00002500 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002501 CriticalSectionScoped cs(&_fileCritSect);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002502
2503 if (_outputFilePlayerPtr->StopPlayingFile() != 0)
2504 {
2505 _engineStatisticsPtr->SetLastError(
2506 VE_STOP_RECORDING_FAILED, kTraceError,
2507 "StopPlayingFile() could not stop playing");
2508 return -1;
2509 }
2510 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2511 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2512 _outputFilePlayerPtr = NULL;
2513 _outputFilePlaying = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00002514 }
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002515 // _fileCritSect cannot be taken while calling
2516 // SetAnonymousMixibilityStatus. Refer to comments in
2517 // StartPlayingFileLocally(const char* ...) for more details.
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002518 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0)
2519 {
2520 _engineStatisticsPtr->SetLastError(
2521 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002522 "StopPlayingFile() failed to stop participant from playing as"
2523 "file in the mixer");
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002524 return -1;
2525 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002526
2527 return 0;
2528}
2529
2530int Channel::IsPlayingFileLocally() const
2531{
2532 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2533 "Channel::IsPlayingFileLocally()");
2534
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002535 return (int32_t)_outputFilePlaying;
niklase@google.com470e71d2011-07-07 08:21:25 +00002536}
2537
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00002538int Channel::RegisterFilePlayingToMixer()
2539{
2540 // Return success for not registering for file playing to mixer if:
2541 // 1. playing file before playout is started on that channel.
2542 // 2. starting playout without file playing on that channel.
2543 if (!_playing || !_outputFilePlaying)
2544 {
2545 return 0;
2546 }
2547
2548 // |_fileCritSect| cannot be taken while calling
2549 // SetAnonymousMixabilityStatus() since as soon as the participant is added
2550 // frames can be pulled by the mixer. Since the frames are generated from
2551 // the file, _fileCritSect will be taken. This would result in a deadlock.
2552 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0)
2553 {
2554 CriticalSectionScoped cs(&_fileCritSect);
2555 _outputFilePlaying = false;
2556 _engineStatisticsPtr->SetLastError(
2557 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
2558 "StartPlayingFile() failed to add participant as file to mixer");
2559 _outputFilePlayerPtr->StopPlayingFile();
2560 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2561 _outputFilePlayerPtr = NULL;
2562 return -1;
2563 }
2564
2565 return 0;
2566}
2567
pbos@webrtc.org92135212013-05-14 08:31:39 +00002568int Channel::ScaleLocalFilePlayout(float scale)
niklase@google.com470e71d2011-07-07 08:21:25 +00002569{
2570 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2571 "Channel::ScaleLocalFilePlayout(scale=%5.3f)", scale);
2572
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002573 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002574
2575 if (!_outputFilePlaying)
2576 {
2577 _engineStatisticsPtr->SetLastError(
2578 VE_INVALID_OPERATION, kTraceError,
2579 "ScaleLocalFilePlayout() isnot playing");
2580 return -1;
2581 }
2582 if ((_outputFilePlayerPtr == NULL) ||
2583 (_outputFilePlayerPtr->SetAudioScaling(scale) != 0))
2584 {
2585 _engineStatisticsPtr->SetLastError(
2586 VE_BAD_ARGUMENT, kTraceError,
2587 "SetAudioScaling() failed to scale the playout");
2588 return -1;
2589 }
2590
2591 return 0;
2592}
2593
2594int Channel::GetLocalPlayoutPosition(int& positionMs)
2595{
2596 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2597 "Channel::GetLocalPlayoutPosition(position=?)");
2598
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002599 uint32_t position;
niklase@google.com470e71d2011-07-07 08:21:25 +00002600
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002601 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002602
2603 if (_outputFilePlayerPtr == NULL)
2604 {
2605 _engineStatisticsPtr->SetLastError(
2606 VE_INVALID_OPERATION, kTraceError,
2607 "GetLocalPlayoutPosition() filePlayer instance doesnot exist");
2608 return -1;
2609 }
2610
2611 if (_outputFilePlayerPtr->GetPlayoutPosition(position) != 0)
2612 {
2613 _engineStatisticsPtr->SetLastError(
2614 VE_BAD_FILE, kTraceError,
2615 "GetLocalPlayoutPosition() failed");
2616 return -1;
2617 }
2618 positionMs = position;
2619
2620 return 0;
2621}
2622
2623int Channel::StartPlayingFileAsMicrophone(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00002624 bool loop,
2625 FileFormats format,
2626 int startPosition,
2627 float volumeScaling,
2628 int stopPosition,
niklase@google.com470e71d2011-07-07 08:21:25 +00002629 const CodecInst* codecInst)
2630{
2631 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2632 "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, "
2633 "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, "
2634 "stopPosition=%d)", fileName, loop, format, volumeScaling,
2635 startPosition, stopPosition);
2636
2637 if (_inputFilePlaying)
2638 {
2639 _engineStatisticsPtr->SetLastError(
2640 VE_ALREADY_PLAYING, kTraceWarning,
2641 "StartPlayingFileAsMicrophone() filePlayer is playing");
2642 return 0;
2643 }
2644
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002645 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002646
2647 // Destroy the old instance
2648 if (_inputFilePlayerPtr)
2649 {
2650 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2651 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2652 _inputFilePlayerPtr = NULL;
2653 }
2654
2655 // Create the instance
2656 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2657 _inputFilePlayerId, (const FileFormats)format);
2658
2659 if (_inputFilePlayerPtr == NULL)
2660 {
2661 _engineStatisticsPtr->SetLastError(
2662 VE_INVALID_ARGUMENT, kTraceError,
2663 "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
2664 return -1;
2665 }
2666
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002667 const uint32_t notificationTime(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00002668
2669 if (_inputFilePlayerPtr->StartPlayingFile(
2670 fileName,
2671 loop,
2672 startPosition,
2673 volumeScaling,
2674 notificationTime,
2675 stopPosition,
2676 (const CodecInst*)codecInst) != 0)
2677 {
2678 _engineStatisticsPtr->SetLastError(
2679 VE_BAD_FILE, kTraceError,
2680 "StartPlayingFile() failed to start file playout");
2681 _inputFilePlayerPtr->StopPlayingFile();
2682 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2683 _inputFilePlayerPtr = NULL;
2684 return -1;
2685 }
2686 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2687 _inputFilePlaying = true;
2688
2689 return 0;
2690}
2691
2692int Channel::StartPlayingFileAsMicrophone(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00002693 FileFormats format,
2694 int startPosition,
2695 float volumeScaling,
2696 int stopPosition,
niklase@google.com470e71d2011-07-07 08:21:25 +00002697 const CodecInst* codecInst)
2698{
2699 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2700 "Channel::StartPlayingFileAsMicrophone(format=%d, "
2701 "volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
2702 format, volumeScaling, startPosition, stopPosition);
2703
2704 if(stream == NULL)
2705 {
2706 _engineStatisticsPtr->SetLastError(
2707 VE_BAD_FILE, kTraceError,
2708 "StartPlayingFileAsMicrophone NULL as input stream");
2709 return -1;
2710 }
2711
2712 if (_inputFilePlaying)
2713 {
2714 _engineStatisticsPtr->SetLastError(
2715 VE_ALREADY_PLAYING, kTraceWarning,
2716 "StartPlayingFileAsMicrophone() is playing");
2717 return 0;
2718 }
2719
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002720 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002721
2722 // Destroy the old instance
2723 if (_inputFilePlayerPtr)
2724 {
2725 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2726 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2727 _inputFilePlayerPtr = NULL;
2728 }
2729
2730 // Create the instance
2731 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2732 _inputFilePlayerId, (const FileFormats)format);
2733
2734 if (_inputFilePlayerPtr == NULL)
2735 {
2736 _engineStatisticsPtr->SetLastError(
2737 VE_INVALID_ARGUMENT, kTraceError,
2738 "StartPlayingInputFile() filePlayer format isnot correct");
2739 return -1;
2740 }
2741
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002742 const uint32_t notificationTime(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00002743
2744 if (_inputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
2745 volumeScaling, notificationTime,
2746 stopPosition, codecInst) != 0)
2747 {
2748 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2749 "StartPlayingFile() failed to start "
2750 "file playout");
2751 _inputFilePlayerPtr->StopPlayingFile();
2752 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2753 _inputFilePlayerPtr = NULL;
2754 return -1;
2755 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00002756
niklase@google.com470e71d2011-07-07 08:21:25 +00002757 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2758 _inputFilePlaying = true;
2759
2760 return 0;
2761}
2762
2763int Channel::StopPlayingFileAsMicrophone()
2764{
2765 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2766 "Channel::StopPlayingFileAsMicrophone()");
2767
2768 if (!_inputFilePlaying)
2769 {
2770 _engineStatisticsPtr->SetLastError(
2771 VE_INVALID_OPERATION, kTraceWarning,
2772 "StopPlayingFileAsMicrophone() isnot playing");
2773 return 0;
2774 }
2775
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002776 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002777 if (_inputFilePlayerPtr->StopPlayingFile() != 0)
2778 {
2779 _engineStatisticsPtr->SetLastError(
2780 VE_STOP_RECORDING_FAILED, kTraceError,
2781 "StopPlayingFile() could not stop playing");
2782 return -1;
2783 }
2784 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2785 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2786 _inputFilePlayerPtr = NULL;
2787 _inputFilePlaying = false;
2788
2789 return 0;
2790}
2791
2792int Channel::IsPlayingFileAsMicrophone() const
2793{
2794 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2795 "Channel::IsPlayingFileAsMicrophone()");
2796
2797 return _inputFilePlaying;
2798}
2799
pbos@webrtc.org92135212013-05-14 08:31:39 +00002800int Channel::ScaleFileAsMicrophonePlayout(float scale)
niklase@google.com470e71d2011-07-07 08:21:25 +00002801{
2802 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2803 "Channel::ScaleFileAsMicrophonePlayout(scale=%5.3f)", scale);
2804
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002805 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002806
2807 if (!_inputFilePlaying)
2808 {
2809 _engineStatisticsPtr->SetLastError(
2810 VE_INVALID_OPERATION, kTraceError,
2811 "ScaleFileAsMicrophonePlayout() isnot playing");
2812 return -1;
2813 }
2814
2815 if ((_inputFilePlayerPtr == NULL) ||
2816 (_inputFilePlayerPtr->SetAudioScaling(scale) != 0))
2817 {
2818 _engineStatisticsPtr->SetLastError(
2819 VE_BAD_ARGUMENT, kTraceError,
2820 "SetAudioScaling() failed to scale playout");
2821 return -1;
2822 }
2823
2824 return 0;
2825}
2826
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00002827int Channel::StartRecordingPlayout(const char* fileName,
niklase@google.com470e71d2011-07-07 08:21:25 +00002828 const CodecInst* codecInst)
2829{
2830 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2831 "Channel::StartRecordingPlayout(fileName=%s)", fileName);
2832
2833 if (_outputFileRecording)
2834 {
2835 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
2836 "StartRecordingPlayout() is already recording");
2837 return 0;
2838 }
2839
2840 FileFormats format;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002841 const uint32_t notificationTime(0); // Not supported in VoE
niklase@google.com470e71d2011-07-07 08:21:25 +00002842 CodecInst dummyCodec={100,"L16",16000,320,1,320000};
2843
niklas.enbom@webrtc.org40197d72012-03-26 08:45:47 +00002844 if ((codecInst != NULL) &&
2845 ((codecInst->channels < 1) || (codecInst->channels > 2)))
niklase@google.com470e71d2011-07-07 08:21:25 +00002846 {
2847 _engineStatisticsPtr->SetLastError(
2848 VE_BAD_ARGUMENT, kTraceError,
2849 "StartRecordingPlayout() invalid compression");
2850 return(-1);
2851 }
2852 if(codecInst == NULL)
2853 {
2854 format = kFileFormatPcm16kHzFile;
2855 codecInst=&dummyCodec;
2856 }
2857 else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
2858 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
2859 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
2860 {
2861 format = kFileFormatWavFile;
2862 }
2863 else
2864 {
2865 format = kFileFormatCompressedFile;
2866 }
2867
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002868 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002869
2870 // Destroy the old instance
2871 if (_outputFileRecorderPtr)
2872 {
2873 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2874 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2875 _outputFileRecorderPtr = NULL;
2876 }
2877
2878 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2879 _outputFileRecorderId, (const FileFormats)format);
2880 if (_outputFileRecorderPtr == NULL)
2881 {
2882 _engineStatisticsPtr->SetLastError(
2883 VE_INVALID_ARGUMENT, kTraceError,
2884 "StartRecordingPlayout() fileRecorder format isnot correct");
2885 return -1;
2886 }
2887
2888 if (_outputFileRecorderPtr->StartRecordingAudioFile(
2889 fileName, (const CodecInst&)*codecInst, notificationTime) != 0)
2890 {
2891 _engineStatisticsPtr->SetLastError(
2892 VE_BAD_FILE, kTraceError,
2893 "StartRecordingAudioFile() failed to start file recording");
2894 _outputFileRecorderPtr->StopRecording();
2895 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2896 _outputFileRecorderPtr = NULL;
2897 return -1;
2898 }
2899 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2900 _outputFileRecording = true;
2901
2902 return 0;
2903}
2904
2905int Channel::StartRecordingPlayout(OutStream* stream,
2906 const CodecInst* codecInst)
2907{
2908 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2909 "Channel::StartRecordingPlayout()");
2910
2911 if (_outputFileRecording)
2912 {
2913 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
2914 "StartRecordingPlayout() is already recording");
2915 return 0;
2916 }
2917
2918 FileFormats format;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002919 const uint32_t notificationTime(0); // Not supported in VoE
niklase@google.com470e71d2011-07-07 08:21:25 +00002920 CodecInst dummyCodec={100,"L16",16000,320,1,320000};
2921
2922 if (codecInst != NULL && codecInst->channels != 1)
2923 {
2924 _engineStatisticsPtr->SetLastError(
2925 VE_BAD_ARGUMENT, kTraceError,
2926 "StartRecordingPlayout() invalid compression");
2927 return(-1);
2928 }
2929 if(codecInst == NULL)
2930 {
2931 format = kFileFormatPcm16kHzFile;
2932 codecInst=&dummyCodec;
2933 }
2934 else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
2935 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
2936 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
2937 {
2938 format = kFileFormatWavFile;
2939 }
2940 else
2941 {
2942 format = kFileFormatCompressedFile;
2943 }
2944
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002945 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002946
2947 // Destroy the old instance
2948 if (_outputFileRecorderPtr)
2949 {
2950 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2951 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2952 _outputFileRecorderPtr = NULL;
2953 }
2954
2955 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2956 _outputFileRecorderId, (const FileFormats)format);
2957 if (_outputFileRecorderPtr == NULL)
2958 {
2959 _engineStatisticsPtr->SetLastError(
2960 VE_INVALID_ARGUMENT, kTraceError,
2961 "StartRecordingPlayout() fileRecorder format isnot correct");
2962 return -1;
2963 }
2964
2965 if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream, *codecInst,
2966 notificationTime) != 0)
2967 {
2968 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2969 "StartRecordingPlayout() failed to "
2970 "start file recording");
2971 _outputFileRecorderPtr->StopRecording();
2972 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2973 _outputFileRecorderPtr = NULL;
2974 return -1;
2975 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00002976
niklase@google.com470e71d2011-07-07 08:21:25 +00002977 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2978 _outputFileRecording = true;
2979
2980 return 0;
2981}
2982
2983int Channel::StopRecordingPlayout()
2984{
2985 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
2986 "Channel::StopRecordingPlayout()");
2987
2988 if (!_outputFileRecording)
2989 {
2990 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
2991 "StopRecordingPlayout() isnot recording");
2992 return -1;
2993 }
2994
2995
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002996 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002997
2998 if (_outputFileRecorderPtr->StopRecording() != 0)
2999 {
3000 _engineStatisticsPtr->SetLastError(
3001 VE_STOP_RECORDING_FAILED, kTraceError,
3002 "StopRecording() could not stop recording");
3003 return(-1);
3004 }
3005 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
3006 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
3007 _outputFileRecorderPtr = NULL;
3008 _outputFileRecording = false;
3009
3010 return 0;
3011}
3012
3013void
3014Channel::SetMixWithMicStatus(bool mix)
3015{
3016 _mixFileWithMicrophone=mix;
3017}
3018
3019int
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003020Channel::GetSpeechOutputLevel(uint32_t& level) const
niklase@google.com470e71d2011-07-07 08:21:25 +00003021{
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003022 int8_t currentLevel = _outputAudioLevel.Level();
3023 level = static_cast<int32_t> (currentLevel);
niklase@google.com470e71d2011-07-07 08:21:25 +00003024 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3025 VoEId(_instanceId,_channelId),
3026 "GetSpeechOutputLevel() => level=%u", level);
3027 return 0;
3028}
3029
3030int
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003031Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const
niklase@google.com470e71d2011-07-07 08:21:25 +00003032{
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003033 int16_t currentLevel = _outputAudioLevel.LevelFullRange();
3034 level = static_cast<int32_t> (currentLevel);
niklase@google.com470e71d2011-07-07 08:21:25 +00003035 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3036 VoEId(_instanceId,_channelId),
3037 "GetSpeechOutputLevelFullRange() => level=%u", level);
3038 return 0;
3039}
3040
3041int
3042Channel::SetMute(bool enable)
3043{
3044 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3045 "Channel::SetMute(enable=%d)", enable);
3046 _mute = enable;
3047 return 0;
3048}
3049
3050bool
3051Channel::Mute() const
3052{
3053 return _mute;
3054}
3055
3056int
3057Channel::SetOutputVolumePan(float left, float right)
3058{
3059 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3060 "Channel::SetOutputVolumePan()");
3061 _panLeft = left;
3062 _panRight = right;
3063 return 0;
3064}
3065
3066int
3067Channel::GetOutputVolumePan(float& left, float& right) const
3068{
3069 left = _panLeft;
3070 right = _panRight;
3071 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3072 VoEId(_instanceId,_channelId),
3073 "GetOutputVolumePan() => left=%3.2f, right=%3.2f", left, right);
3074 return 0;
3075}
3076
3077int
3078Channel::SetChannelOutputVolumeScaling(float scaling)
3079{
3080 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3081 "Channel::SetChannelOutputVolumeScaling()");
3082 _outputGain = scaling;
3083 return 0;
3084}
3085
3086int
3087Channel::GetChannelOutputVolumeScaling(float& scaling) const
3088{
3089 scaling = _outputGain;
3090 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3091 VoEId(_instanceId,_channelId),
3092 "GetChannelOutputVolumeScaling() => scaling=%3.2f", scaling);
3093 return 0;
3094}
3095
niklase@google.com470e71d2011-07-07 08:21:25 +00003096int
3097Channel::RegisterExternalEncryption(Encryption& encryption)
3098{
3099 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3100 "Channel::RegisterExternalEncryption()");
3101
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003102 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003103
3104 if (_encryptionPtr)
3105 {
3106 _engineStatisticsPtr->SetLastError(
3107 VE_INVALID_OPERATION, kTraceError,
3108 "RegisterExternalEncryption() encryption already enabled");
3109 return -1;
3110 }
3111
3112 _encryptionPtr = &encryption;
3113
3114 _decrypting = true;
3115 _encrypting = true;
3116
3117 return 0;
3118}
3119
3120int
3121Channel::DeRegisterExternalEncryption()
3122{
3123 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3124 "Channel::DeRegisterExternalEncryption()");
3125
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003126 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003127
3128 if (!_encryptionPtr)
3129 {
3130 _engineStatisticsPtr->SetLastError(
3131 VE_INVALID_OPERATION, kTraceWarning,
3132 "DeRegisterExternalEncryption() encryption already disabled");
3133 return 0;
3134 }
3135
3136 _decrypting = false;
3137 _encrypting = false;
3138
3139 _encryptionPtr = NULL;
3140
3141 return 0;
3142}
3143
3144int Channel::SendTelephoneEventOutband(unsigned char eventCode,
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00003145 int lengthMs, int attenuationDb,
3146 bool playDtmfEvent)
niklase@google.com470e71d2011-07-07 08:21:25 +00003147{
3148 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3149 "Channel::SendTelephoneEventOutband(..., playDtmfEvent=%d)",
3150 playDtmfEvent);
3151
3152 _playOutbandDtmfEvent = playDtmfEvent;
3153
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003154 if (_rtpRtcpModule->SendTelephoneEventOutband(eventCode, lengthMs,
niklase@google.com470e71d2011-07-07 08:21:25 +00003155 attenuationDb) != 0)
3156 {
3157 _engineStatisticsPtr->SetLastError(
3158 VE_SEND_DTMF_FAILED,
3159 kTraceWarning,
3160 "SendTelephoneEventOutband() failed to send event");
3161 return -1;
3162 }
3163 return 0;
3164}
3165
3166int Channel::SendTelephoneEventInband(unsigned char eventCode,
3167 int lengthMs,
3168 int attenuationDb,
3169 bool playDtmfEvent)
3170{
3171 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3172 "Channel::SendTelephoneEventInband(..., playDtmfEvent=%d)",
3173 playDtmfEvent);
3174
3175 _playInbandDtmfEvent = playDtmfEvent;
3176 _inbandDtmfQueue.AddDtmf(eventCode, lengthMs, attenuationDb);
3177
3178 return 0;
3179}
3180
3181int
3182Channel::SetDtmfPlayoutStatus(bool enable)
3183{
3184 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3185 "Channel::SetDtmfPlayoutStatus()");
3186 if (_audioCodingModule.SetDtmfPlayoutStatus(enable) != 0)
3187 {
3188 _engineStatisticsPtr->SetLastError(
3189 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
3190 "SetDtmfPlayoutStatus() failed to set Dtmf playout");
3191 return -1;
3192 }
3193 return 0;
3194}
3195
3196bool
3197Channel::DtmfPlayoutStatus() const
3198{
3199 return _audioCodingModule.DtmfPlayoutStatus();
3200}
3201
3202int
3203Channel::SetSendTelephoneEventPayloadType(unsigned char type)
3204{
3205 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3206 "Channel::SetSendTelephoneEventPayloadType()");
andrew@webrtc.orgf81f9f82011-08-19 22:56:22 +00003207 if (type > 127)
niklase@google.com470e71d2011-07-07 08:21:25 +00003208 {
3209 _engineStatisticsPtr->SetLastError(
3210 VE_INVALID_ARGUMENT, kTraceError,
3211 "SetSendTelephoneEventPayloadType() invalid type");
3212 return -1;
3213 }
pbos@webrtc.org5b10d8f2013-07-11 15:50:07 +00003214 CodecInst codec = {};
pwestin@webrtc.org1da1ce02011-10-13 15:19:55 +00003215 codec.plfreq = 8000;
3216 codec.pltype = type;
3217 memcpy(codec.plname, "telephone-event", 16);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003218 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003219 {
henrika@webrtc.org4392d5f2013-04-17 07:34:25 +00003220 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
3221 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
3222 _engineStatisticsPtr->SetLastError(
3223 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3224 "SetSendTelephoneEventPayloadType() failed to register send"
3225 "payload type");
3226 return -1;
3227 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003228 }
3229 _sendTelephoneEventPayloadType = type;
3230 return 0;
3231}
3232
3233int
3234Channel::GetSendTelephoneEventPayloadType(unsigned char& type)
3235{
3236 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3237 "Channel::GetSendTelephoneEventPayloadType()");
3238 type = _sendTelephoneEventPayloadType;
3239 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3240 VoEId(_instanceId,_channelId),
3241 "GetSendTelephoneEventPayloadType() => type=%u", type);
3242 return 0;
3243}
3244
niklase@google.com470e71d2011-07-07 08:21:25 +00003245int
3246Channel::UpdateRxVadDetection(AudioFrame& audioFrame)
3247{
3248 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3249 "Channel::UpdateRxVadDetection()");
3250
3251 int vadDecision = 1;
3252
andrew@webrtc.org63a50982012-05-02 23:56:37 +00003253 vadDecision = (audioFrame.vad_activity_ == AudioFrame::kVadActive)? 1 : 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003254
3255 if ((vadDecision != _oldVadDecision) && _rxVadObserverPtr)
3256 {
3257 OnRxVadDetected(vadDecision);
3258 _oldVadDecision = vadDecision;
3259 }
3260
3261 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3262 "Channel::UpdateRxVadDetection() => vadDecision=%d",
3263 vadDecision);
3264 return 0;
3265}
3266
3267int
3268Channel::RegisterRxVadObserver(VoERxVadCallback &observer)
3269{
3270 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3271 "Channel::RegisterRxVadObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003272 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003273
3274 if (_rxVadObserverPtr)
3275 {
3276 _engineStatisticsPtr->SetLastError(
3277 VE_INVALID_OPERATION, kTraceError,
3278 "RegisterRxVadObserver() observer already enabled");
3279 return -1;
3280 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003281 _rxVadObserverPtr = &observer;
3282 _RxVadDetection = true;
3283 return 0;
3284}
3285
3286int
3287Channel::DeRegisterRxVadObserver()
3288{
3289 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3290 "Channel::DeRegisterRxVadObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003291 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003292
3293 if (!_rxVadObserverPtr)
3294 {
3295 _engineStatisticsPtr->SetLastError(
3296 VE_INVALID_OPERATION, kTraceWarning,
3297 "DeRegisterRxVadObserver() observer already disabled");
3298 return 0;
3299 }
3300 _rxVadObserverPtr = NULL;
3301 _RxVadDetection = false;
3302 return 0;
3303}
3304
3305int
3306Channel::VoiceActivityIndicator(int &activity)
3307{
3308 activity = _sendFrameType;
3309
3310 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3311 "Channel::VoiceActivityIndicator(indicator=%d)", activity);
3312 return 0;
3313}
3314
3315#ifdef WEBRTC_VOICE_ENGINE_AGC
3316
3317int
pbos@webrtc.org92135212013-05-14 08:31:39 +00003318Channel::SetRxAgcStatus(bool enable, AgcModes mode)
niklase@google.com470e71d2011-07-07 08:21:25 +00003319{
3320 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3321 "Channel::SetRxAgcStatus(enable=%d, mode=%d)",
3322 (int)enable, (int)mode);
3323
3324 GainControl::Mode agcMode(GainControl::kFixedDigital);
3325 switch (mode)
3326 {
3327 case kAgcDefault:
3328 agcMode = GainControl::kAdaptiveDigital;
3329 break;
3330 case kAgcUnchanged:
3331 agcMode = _rxAudioProcessingModulePtr->gain_control()->mode();
3332 break;
3333 case kAgcFixedDigital:
3334 agcMode = GainControl::kFixedDigital;
3335 break;
3336 case kAgcAdaptiveDigital:
3337 agcMode =GainControl::kAdaptiveDigital;
3338 break;
3339 default:
3340 _engineStatisticsPtr->SetLastError(
3341 VE_INVALID_ARGUMENT, kTraceError,
3342 "SetRxAgcStatus() invalid Agc mode");
3343 return -1;
3344 }
3345
3346 if (_rxAudioProcessingModulePtr->gain_control()->set_mode(agcMode) != 0)
3347 {
3348 _engineStatisticsPtr->SetLastError(
3349 VE_APM_ERROR, kTraceError,
3350 "SetRxAgcStatus() failed to set Agc mode");
3351 return -1;
3352 }
3353 if (_rxAudioProcessingModulePtr->gain_control()->Enable(enable) != 0)
3354 {
3355 _engineStatisticsPtr->SetLastError(
3356 VE_APM_ERROR, kTraceError,
3357 "SetRxAgcStatus() failed to set Agc state");
3358 return -1;
3359 }
3360
3361 _rxAgcIsEnabled = enable;
niklase@google.com470e71d2011-07-07 08:21:25 +00003362 _rxApmIsEnabled = ((_rxAgcIsEnabled == true) || (_rxNsIsEnabled == true));
3363
3364 return 0;
3365}
3366
3367int
3368Channel::GetRxAgcStatus(bool& enabled, AgcModes& mode)
3369{
3370 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3371 "Channel::GetRxAgcStatus(enable=?, mode=?)");
3372
3373 bool enable = _rxAudioProcessingModulePtr->gain_control()->is_enabled();
3374 GainControl::Mode agcMode =
3375 _rxAudioProcessingModulePtr->gain_control()->mode();
3376
3377 enabled = enable;
3378
3379 switch (agcMode)
3380 {
3381 case GainControl::kFixedDigital:
3382 mode = kAgcFixedDigital;
3383 break;
3384 case GainControl::kAdaptiveDigital:
3385 mode = kAgcAdaptiveDigital;
3386 break;
3387 default:
3388 _engineStatisticsPtr->SetLastError(
3389 VE_APM_ERROR, kTraceError,
3390 "GetRxAgcStatus() invalid Agc mode");
3391 return -1;
3392 }
3393
3394 return 0;
3395}
3396
3397int
pbos@webrtc.org92135212013-05-14 08:31:39 +00003398Channel::SetRxAgcConfig(AgcConfig config)
niklase@google.com470e71d2011-07-07 08:21:25 +00003399{
3400 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3401 "Channel::SetRxAgcConfig()");
3402
3403 if (_rxAudioProcessingModulePtr->gain_control()->set_target_level_dbfs(
3404 config.targetLeveldBOv) != 0)
3405 {
3406 _engineStatisticsPtr->SetLastError(
3407 VE_APM_ERROR, kTraceError,
3408 "SetRxAgcConfig() failed to set target peak |level|"
3409 "(or envelope) of the Agc");
3410 return -1;
3411 }
3412 if (_rxAudioProcessingModulePtr->gain_control()->set_compression_gain_db(
3413 config.digitalCompressionGaindB) != 0)
3414 {
3415 _engineStatisticsPtr->SetLastError(
3416 VE_APM_ERROR, kTraceError,
3417 "SetRxAgcConfig() failed to set the range in |gain| the"
3418 " digital compression stage may apply");
3419 return -1;
3420 }
3421 if (_rxAudioProcessingModulePtr->gain_control()->enable_limiter(
3422 config.limiterEnable) != 0)
3423 {
3424 _engineStatisticsPtr->SetLastError(
3425 VE_APM_ERROR, kTraceError,
3426 "SetRxAgcConfig() failed to set hard limiter to the signal");
3427 return -1;
3428 }
3429
3430 return 0;
3431}
3432
3433int
3434Channel::GetRxAgcConfig(AgcConfig& config)
3435{
3436 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3437 "Channel::GetRxAgcConfig(config=%?)");
3438
3439 config.targetLeveldBOv =
3440 _rxAudioProcessingModulePtr->gain_control()->target_level_dbfs();
3441 config.digitalCompressionGaindB =
3442 _rxAudioProcessingModulePtr->gain_control()->compression_gain_db();
3443 config.limiterEnable =
3444 _rxAudioProcessingModulePtr->gain_control()->is_limiter_enabled();
3445
3446 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3447 VoEId(_instanceId,_channelId), "GetRxAgcConfig() => "
3448 "targetLeveldBOv=%u, digitalCompressionGaindB=%u,"
3449 " limiterEnable=%d",
3450 config.targetLeveldBOv,
3451 config.digitalCompressionGaindB,
3452 config.limiterEnable);
3453
3454 return 0;
3455}
3456
3457#endif // #ifdef WEBRTC_VOICE_ENGINE_AGC
3458
3459#ifdef WEBRTC_VOICE_ENGINE_NR
3460
3461int
pbos@webrtc.org92135212013-05-14 08:31:39 +00003462Channel::SetRxNsStatus(bool enable, NsModes mode)
niklase@google.com470e71d2011-07-07 08:21:25 +00003463{
3464 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3465 "Channel::SetRxNsStatus(enable=%d, mode=%d)",
3466 (int)enable, (int)mode);
3467
3468 NoiseSuppression::Level nsLevel(
3469 (NoiseSuppression::Level)WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE);
3470 switch (mode)
3471 {
3472
3473 case kNsDefault:
3474 nsLevel = (NoiseSuppression::Level)
3475 WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE;
3476 break;
3477 case kNsUnchanged:
3478 nsLevel = _rxAudioProcessingModulePtr->noise_suppression()->level();
3479 break;
3480 case kNsConference:
3481 nsLevel = NoiseSuppression::kHigh;
3482 break;
3483 case kNsLowSuppression:
3484 nsLevel = NoiseSuppression::kLow;
3485 break;
3486 case kNsModerateSuppression:
3487 nsLevel = NoiseSuppression::kModerate;
3488 break;
3489 case kNsHighSuppression:
3490 nsLevel = NoiseSuppression::kHigh;
3491 break;
3492 case kNsVeryHighSuppression:
3493 nsLevel = NoiseSuppression::kVeryHigh;
3494 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00003495 }
3496
3497 if (_rxAudioProcessingModulePtr->noise_suppression()->set_level(nsLevel)
3498 != 0)
3499 {
3500 _engineStatisticsPtr->SetLastError(
3501 VE_APM_ERROR, kTraceError,
3502 "SetRxAgcStatus() failed to set Ns level");
3503 return -1;
3504 }
3505 if (_rxAudioProcessingModulePtr->noise_suppression()->Enable(enable) != 0)
3506 {
3507 _engineStatisticsPtr->SetLastError(
3508 VE_APM_ERROR, kTraceError,
3509 "SetRxAgcStatus() failed to set Agc state");
3510 return -1;
3511 }
3512
3513 _rxNsIsEnabled = enable;
3514 _rxApmIsEnabled = ((_rxAgcIsEnabled == true) || (_rxNsIsEnabled == true));
3515
3516 return 0;
3517}
3518
3519int
3520Channel::GetRxNsStatus(bool& enabled, NsModes& mode)
3521{
3522 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3523 "Channel::GetRxNsStatus(enable=?, mode=?)");
3524
3525 bool enable =
3526 _rxAudioProcessingModulePtr->noise_suppression()->is_enabled();
3527 NoiseSuppression::Level ncLevel =
3528 _rxAudioProcessingModulePtr->noise_suppression()->level();
3529
3530 enabled = enable;
3531
3532 switch (ncLevel)
3533 {
3534 case NoiseSuppression::kLow:
3535 mode = kNsLowSuppression;
3536 break;
3537 case NoiseSuppression::kModerate:
3538 mode = kNsModerateSuppression;
3539 break;
3540 case NoiseSuppression::kHigh:
3541 mode = kNsHighSuppression;
3542 break;
3543 case NoiseSuppression::kVeryHigh:
3544 mode = kNsVeryHighSuppression;
3545 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00003546 }
3547
3548 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3549 VoEId(_instanceId,_channelId),
3550 "GetRxNsStatus() => enabled=%d, mode=%d", enabled, mode);
3551 return 0;
3552}
3553
3554#endif // #ifdef WEBRTC_VOICE_ENGINE_NR
3555
3556int
3557Channel::RegisterRTPObserver(VoERTPObserver& observer)
3558{
3559 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3560 "Channel::RegisterRTPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003561 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003562
3563 if (_rtpObserverPtr)
3564 {
3565 _engineStatisticsPtr->SetLastError(
3566 VE_INVALID_OPERATION, kTraceError,
3567 "RegisterRTPObserver() observer already enabled");
3568 return -1;
3569 }
3570
3571 _rtpObserverPtr = &observer;
3572 _rtpObserver = true;
3573
3574 return 0;
3575}
3576
3577int
3578Channel::DeRegisterRTPObserver()
3579{
3580 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3581 "Channel::DeRegisterRTPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003582 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003583
3584 if (!_rtpObserverPtr)
3585 {
3586 _engineStatisticsPtr->SetLastError(
3587 VE_INVALID_OPERATION, kTraceWarning,
3588 "DeRegisterRTPObserver() observer already disabled");
3589 return 0;
3590 }
3591
3592 _rtpObserver = false;
3593 _rtpObserverPtr = NULL;
3594
3595 return 0;
3596}
3597
3598int
3599Channel::RegisterRTCPObserver(VoERTCPObserver& observer)
3600{
3601 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3602 "Channel::RegisterRTCPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003603 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003604
3605 if (_rtcpObserverPtr)
3606 {
3607 _engineStatisticsPtr->SetLastError(
3608 VE_INVALID_OPERATION, kTraceError,
3609 "RegisterRTCPObserver() observer already enabled");
3610 return -1;
3611 }
3612
3613 _rtcpObserverPtr = &observer;
3614 _rtcpObserver = true;
3615
3616 return 0;
3617}
3618
3619int
3620Channel::DeRegisterRTCPObserver()
3621{
3622 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3623 "Channel::DeRegisterRTCPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003624 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003625
3626 if (!_rtcpObserverPtr)
3627 {
3628 _engineStatisticsPtr->SetLastError(
3629 VE_INVALID_OPERATION, kTraceWarning,
3630 "DeRegisterRTCPObserver() observer already disabled");
3631 return 0;
3632 }
3633
3634 _rtcpObserver = false;
3635 _rtcpObserverPtr = NULL;
3636
3637 return 0;
3638}
3639
3640int
3641Channel::SetLocalSSRC(unsigned int ssrc)
3642{
3643 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3644 "Channel::SetLocalSSRC()");
3645 if (_sending)
3646 {
3647 _engineStatisticsPtr->SetLastError(
3648 VE_ALREADY_SENDING, kTraceError,
3649 "SetLocalSSRC() already sending");
3650 return -1;
3651 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003652 if (_rtpRtcpModule->SetSSRC(ssrc) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003653 {
3654 _engineStatisticsPtr->SetLastError(
3655 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3656 "SetLocalSSRC() failed to set SSRC");
3657 return -1;
3658 }
3659 return 0;
3660}
3661
3662int
3663Channel::GetLocalSSRC(unsigned int& ssrc)
3664{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003665 ssrc = _rtpRtcpModule->SSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00003666 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3667 VoEId(_instanceId,_channelId),
3668 "GetLocalSSRC() => ssrc=%lu", ssrc);
3669 return 0;
3670}
3671
3672int
3673Channel::GetRemoteSSRC(unsigned int& ssrc)
3674{
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00003675 ssrc = _rtpRtcpModule->RemoteSSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00003676 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3677 VoEId(_instanceId,_channelId),
3678 "GetRemoteSSRC() => ssrc=%lu", ssrc);
3679 return 0;
3680}
3681
3682int
3683Channel::GetRemoteCSRCs(unsigned int arrCSRC[15])
3684{
3685 if (arrCSRC == NULL)
3686 {
3687 _engineStatisticsPtr->SetLastError(
3688 VE_INVALID_ARGUMENT, kTraceError,
3689 "GetRemoteCSRCs() invalid array argument");
3690 return -1;
3691 }
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003692 uint32_t arrOfCSRC[kRtpCsrcSize];
3693 int32_t CSRCs(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003694 CSRCs = _rtpRtcpModule->CSRCs(arrOfCSRC);
niklase@google.com470e71d2011-07-07 08:21:25 +00003695 if (CSRCs > 0)
3696 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003697 memcpy(arrCSRC, arrOfCSRC, CSRCs * sizeof(uint32_t));
niklase@google.com470e71d2011-07-07 08:21:25 +00003698 for (int i = 0; i < (int) CSRCs; i++)
3699 {
3700 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3701 VoEId(_instanceId, _channelId),
3702 "GetRemoteCSRCs() => arrCSRC[%d]=%lu", i, arrCSRC[i]);
3703 }
3704 } else
3705 {
3706 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3707 VoEId(_instanceId, _channelId),
3708 "GetRemoteCSRCs() => list is empty!");
3709 }
3710 return CSRCs;
3711}
3712
3713int
3714Channel::SetRTPAudioLevelIndicationStatus(bool enable, unsigned char ID)
3715{
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00003716 if (_rtpAudioProc.get() == NULL)
3717 {
3718 _rtpAudioProc.reset(AudioProcessing::Create(VoEModuleId(_instanceId,
3719 _channelId)));
3720 if (_rtpAudioProc.get() == NULL)
3721 {
3722 _engineStatisticsPtr->SetLastError(VE_NO_MEMORY, kTraceCritical,
3723 "Failed to create AudioProcessing");
3724 return -1;
3725 }
3726 }
3727
3728 if (_rtpAudioProc->level_estimator()->Enable(enable) !=
3729 AudioProcessing::kNoError)
3730 {
3731 _engineStatisticsPtr->SetLastError(VE_APM_ERROR, kTraceWarning,
3732 "Failed to enable AudioProcessing::level_estimator()");
3733 }
3734
niklase@google.com470e71d2011-07-07 08:21:25 +00003735 _includeAudioLevelIndication = enable;
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00003736 if (enable) {
3737 rtp_header_parser_->RegisterRtpHeaderExtension(kRtpExtensionAudioLevel,
3738 ID);
3739 } else {
3740 rtp_header_parser_->DeregisterRtpHeaderExtension(kRtpExtensionAudioLevel);
3741 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003742 return _rtpRtcpModule->SetRTPAudioLevelIndicationStatus(enable, ID);
niklase@google.com470e71d2011-07-07 08:21:25 +00003743}
3744int
3745Channel::GetRTPAudioLevelIndicationStatus(bool& enabled, unsigned char& ID)
3746{
3747 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3748 VoEId(_instanceId,_channelId),
3749 "GetRTPAudioLevelIndicationStatus() => enabled=%d, ID=%u",
3750 enabled, ID);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003751 return _rtpRtcpModule->GetRTPAudioLevelIndicationStatus(enabled, ID);
niklase@google.com470e71d2011-07-07 08:21:25 +00003752}
3753
3754int
3755Channel::SetRTCPStatus(bool enable)
3756{
3757 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3758 "Channel::SetRTCPStatus()");
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003759 if (_rtpRtcpModule->SetRTCPStatus(enable ?
niklase@google.com470e71d2011-07-07 08:21:25 +00003760 kRtcpCompound : kRtcpOff) != 0)
3761 {
3762 _engineStatisticsPtr->SetLastError(
3763 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3764 "SetRTCPStatus() failed to set RTCP status");
3765 return -1;
3766 }
3767 return 0;
3768}
3769
3770int
3771Channel::GetRTCPStatus(bool& enabled)
3772{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003773 RTCPMethod method = _rtpRtcpModule->RTCP();
niklase@google.com470e71d2011-07-07 08:21:25 +00003774 enabled = (method != kRtcpOff);
3775 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3776 VoEId(_instanceId,_channelId),
3777 "GetRTCPStatus() => enabled=%d", enabled);
3778 return 0;
3779}
3780
3781int
3782Channel::SetRTCP_CNAME(const char cName[256])
3783{
3784 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3785 "Channel::SetRTCP_CNAME()");
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003786 if (_rtpRtcpModule->SetCNAME(cName) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003787 {
3788 _engineStatisticsPtr->SetLastError(
3789 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3790 "SetRTCP_CNAME() failed to set RTCP CNAME");
3791 return -1;
3792 }
3793 return 0;
3794}
3795
3796int
3797Channel::GetRTCP_CNAME(char cName[256])
3798{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003799 if (_rtpRtcpModule->CNAME(cName) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003800 {
3801 _engineStatisticsPtr->SetLastError(
3802 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3803 "GetRTCP_CNAME() failed to retrieve RTCP CNAME");
3804 return -1;
3805 }
3806 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3807 VoEId(_instanceId, _channelId),
3808 "GetRTCP_CNAME() => cName=%s", cName);
3809 return 0;
3810}
3811
3812int
3813Channel::GetRemoteRTCP_CNAME(char cName[256])
3814{
3815 if (cName == NULL)
3816 {
3817 _engineStatisticsPtr->SetLastError(
3818 VE_INVALID_ARGUMENT, kTraceError,
3819 "GetRemoteRTCP_CNAME() invalid CNAME input buffer");
3820 return -1;
3821 }
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00003822 char cname[RTCP_CNAME_SIZE];
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00003823 const uint32_t remoteSSRC = _rtpRtcpModule->RemoteSSRC();
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003824 if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003825 {
3826 _engineStatisticsPtr->SetLastError(
3827 VE_CANNOT_RETRIEVE_CNAME, kTraceError,
3828 "GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME");
3829 return -1;
3830 }
3831 strcpy(cName, cname);
3832 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3833 VoEId(_instanceId, _channelId),
3834 "GetRemoteRTCP_CNAME() => cName=%s", cName);
3835 return 0;
3836}
3837
3838int
3839Channel::GetRemoteRTCPData(
3840 unsigned int& NTPHigh,
3841 unsigned int& NTPLow,
3842 unsigned int& timestamp,
3843 unsigned int& playoutTimestamp,
3844 unsigned int* jitter,
3845 unsigned short* fractionLost)
3846{
3847 // --- Information from sender info in received Sender Reports
3848
3849 RTCPSenderInfo senderInfo;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003850 if (_rtpRtcpModule->RemoteRTCPStat(&senderInfo) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003851 {
3852 _engineStatisticsPtr->SetLastError(
3853 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00003854 "GetRemoteRTCPData() failed to retrieve sender info for remote "
niklase@google.com470e71d2011-07-07 08:21:25 +00003855 "side");
3856 return -1;
3857 }
3858
3859 // We only utilize 12 out of 20 bytes in the sender info (ignores packet
3860 // and octet count)
3861 NTPHigh = senderInfo.NTPseconds;
3862 NTPLow = senderInfo.NTPfraction;
3863 timestamp = senderInfo.RTPtimeStamp;
3864
3865 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3866 VoEId(_instanceId, _channelId),
3867 "GetRemoteRTCPData() => NTPHigh=%lu, NTPLow=%lu, "
3868 "timestamp=%lu",
3869 NTPHigh, NTPLow, timestamp);
3870
3871 // --- Locally derived information
3872
3873 // This value is updated on each incoming RTCP packet (0 when no packet
3874 // has been received)
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003875 playoutTimestamp = playout_timestamp_rtcp_;
niklase@google.com470e71d2011-07-07 08:21:25 +00003876
3877 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3878 VoEId(_instanceId, _channelId),
3879 "GetRemoteRTCPData() => playoutTimestamp=%lu",
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003880 playout_timestamp_rtcp_);
niklase@google.com470e71d2011-07-07 08:21:25 +00003881
3882 if (NULL != jitter || NULL != fractionLost)
3883 {
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003884 // Get all RTCP receiver report blocks that have been received on this
3885 // channel. If we receive RTP packets from a remote source we know the
3886 // remote SSRC and use the report block from him.
3887 // Otherwise use the first report block.
3888 std::vector<RTCPReportBlock> remote_stats;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003889 if (_rtpRtcpModule->RemoteRTCPStat(&remote_stats) != 0 ||
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003890 remote_stats.empty()) {
3891 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3892 VoEId(_instanceId, _channelId),
3893 "GetRemoteRTCPData() failed to measure statistics due"
3894 " to lack of received RTP and/or RTCP packets");
3895 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003896 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003897
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00003898 uint32_t remoteSSRC = _rtpRtcpModule->RemoteSSRC();
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003899 std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
3900 for (; it != remote_stats.end(); ++it) {
3901 if (it->remoteSSRC == remoteSSRC)
3902 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00003903 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003904
3905 if (it == remote_stats.end()) {
3906 // If we have not received any RTCP packets from this SSRC it probably
3907 // means that we have not received any RTP packets.
3908 // Use the first received report block instead.
3909 it = remote_stats.begin();
3910 remoteSSRC = it->remoteSSRC;
niklase@google.com470e71d2011-07-07 08:21:25 +00003911 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003912
xians@webrtc.org79af7342012-01-31 12:22:14 +00003913 if (jitter) {
3914 *jitter = it->jitter;
3915 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3916 VoEId(_instanceId, _channelId),
3917 "GetRemoteRTCPData() => jitter = %lu", *jitter);
3918 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003919
xians@webrtc.org79af7342012-01-31 12:22:14 +00003920 if (fractionLost) {
3921 *fractionLost = it->fractionLost;
3922 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3923 VoEId(_instanceId, _channelId),
3924 "GetRemoteRTCPData() => fractionLost = %lu",
3925 *fractionLost);
3926 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003927 }
3928 return 0;
3929}
3930
3931int
pbos@webrtc.org92135212013-05-14 08:31:39 +00003932Channel::SendApplicationDefinedRTCPPacket(unsigned char subType,
niklase@google.com470e71d2011-07-07 08:21:25 +00003933 unsigned int name,
3934 const char* data,
3935 unsigned short dataLengthInBytes)
3936{
3937 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3938 "Channel::SendApplicationDefinedRTCPPacket()");
3939 if (!_sending)
3940 {
3941 _engineStatisticsPtr->SetLastError(
3942 VE_NOT_SENDING, kTraceError,
3943 "SendApplicationDefinedRTCPPacket() not sending");
3944 return -1;
3945 }
3946 if (NULL == data)
3947 {
3948 _engineStatisticsPtr->SetLastError(
3949 VE_INVALID_ARGUMENT, kTraceError,
3950 "SendApplicationDefinedRTCPPacket() invalid data value");
3951 return -1;
3952 }
3953 if (dataLengthInBytes % 4 != 0)
3954 {
3955 _engineStatisticsPtr->SetLastError(
3956 VE_INVALID_ARGUMENT, kTraceError,
3957 "SendApplicationDefinedRTCPPacket() invalid length value");
3958 return -1;
3959 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003960 RTCPMethod status = _rtpRtcpModule->RTCP();
niklase@google.com470e71d2011-07-07 08:21:25 +00003961 if (status == kRtcpOff)
3962 {
3963 _engineStatisticsPtr->SetLastError(
3964 VE_RTCP_ERROR, kTraceError,
3965 "SendApplicationDefinedRTCPPacket() RTCP is disabled");
3966 return -1;
3967 }
3968
3969 // Create and schedule the RTCP APP packet for transmission
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003970 if (_rtpRtcpModule->SetRTCPApplicationSpecificData(
niklase@google.com470e71d2011-07-07 08:21:25 +00003971 subType,
3972 name,
3973 (const unsigned char*) data,
3974 dataLengthInBytes) != 0)
3975 {
3976 _engineStatisticsPtr->SetLastError(
3977 VE_SEND_ERROR, kTraceError,
3978 "SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
3979 return -1;
3980 }
3981 return 0;
3982}
3983
3984int
3985Channel::GetRTPStatistics(
3986 unsigned int& averageJitterMs,
3987 unsigned int& maxJitterMs,
3988 unsigned int& discardedPackets)
3989{
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00003990 uint8_t fraction_lost(0);
3991 uint32_t cum_lost(0);
3992 uint32_t ext_max(0);
3993 uint32_t jitter(0);
3994 uint32_t max_jitter(0);
3995
niklase@google.com470e71d2011-07-07 08:21:25 +00003996 // The jitter statistics is updated for each received RTP packet and is
3997 // based on received packets.
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00003998 if (_rtpRtcpModule->StatisticsRTP(&fraction_lost,
3999 &cum_lost,
4000 &ext_max,
4001 &jitter,
4002 &max_jitter) != 0)
4003 {
4004 _engineStatisticsPtr->SetLastError(
4005 VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
4006 "GetRTPStatistics() failed to read RTP statistics from the "
4007 "RTP/RTCP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00004008 }
4009
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004010 const int32_t playoutFrequency =
niklase@google.com470e71d2011-07-07 08:21:25 +00004011 _audioCodingModule.PlayoutFrequency();
4012 if (playoutFrequency > 0)
4013 {
4014 // Scale RTP statistics given the current playout frequency
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00004015 maxJitterMs = max_jitter / (playoutFrequency / 1000);
4016 averageJitterMs = jitter / (playoutFrequency / 1000);
niklase@google.com470e71d2011-07-07 08:21:25 +00004017 }
4018
4019 discardedPackets = _numberOfDiscardedPackets;
4020
4021 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4022 VoEId(_instanceId, _channelId),
4023 "GetRTPStatistics() => averageJitterMs = %lu, maxJitterMs = %lu,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004024 " discardedPackets = %lu)",
niklase@google.com470e71d2011-07-07 08:21:25 +00004025 averageJitterMs, maxJitterMs, discardedPackets);
4026 return 0;
4027}
4028
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00004029int Channel::GetRemoteRTCPSenderInfo(SenderInfo* sender_info) {
4030 if (sender_info == NULL) {
4031 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
4032 "GetRemoteRTCPSenderInfo() invalid sender_info.");
4033 return -1;
4034 }
4035
4036 // Get the sender info from the latest received RTCP Sender Report.
4037 RTCPSenderInfo rtcp_sender_info;
4038 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_sender_info) != 0) {
4039 _engineStatisticsPtr->SetLastError(VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4040 "GetRemoteRTCPSenderInfo() failed to read RTCP SR sender info.");
4041 return -1;
4042 }
4043
4044 sender_info->NTP_timestamp_high = rtcp_sender_info.NTPseconds;
4045 sender_info->NTP_timestamp_low = rtcp_sender_info.NTPfraction;
4046 sender_info->RTP_timestamp = rtcp_sender_info.RTPtimeStamp;
4047 sender_info->sender_packet_count = rtcp_sender_info.sendPacketCount;
4048 sender_info->sender_octet_count = rtcp_sender_info.sendOctetCount;
4049 return 0;
4050}
4051
4052int Channel::GetRemoteRTCPReportBlocks(
4053 std::vector<ReportBlock>* report_blocks) {
4054 if (report_blocks == NULL) {
4055 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
4056 "GetRemoteRTCPReportBlock()s invalid report_blocks.");
4057 return -1;
4058 }
4059
4060 // Get the report blocks from the latest received RTCP Sender or Receiver
4061 // Report. Each element in the vector contains the sender's SSRC and a
4062 // report block according to RFC 3550.
4063 std::vector<RTCPReportBlock> rtcp_report_blocks;
4064 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks) != 0) {
4065 _engineStatisticsPtr->SetLastError(VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4066 "GetRemoteRTCPReportBlocks() failed to read RTCP SR/RR report block.");
4067 return -1;
4068 }
4069
4070 if (rtcp_report_blocks.empty())
4071 return 0;
4072
4073 std::vector<RTCPReportBlock>::const_iterator it = rtcp_report_blocks.begin();
4074 for (; it != rtcp_report_blocks.end(); ++it) {
4075 ReportBlock report_block;
4076 report_block.sender_SSRC = it->remoteSSRC;
4077 report_block.source_SSRC = it->sourceSSRC;
4078 report_block.fraction_lost = it->fractionLost;
4079 report_block.cumulative_num_packets_lost = it->cumulativeLost;
4080 report_block.extended_highest_sequence_number = it->extendedHighSeqNum;
4081 report_block.interarrival_jitter = it->jitter;
4082 report_block.last_SR_timestamp = it->lastSR;
4083 report_block.delay_since_last_SR = it->delaySinceLastSR;
4084 report_blocks->push_back(report_block);
4085 }
4086 return 0;
4087}
4088
niklase@google.com470e71d2011-07-07 08:21:25 +00004089int
4090Channel::GetRTPStatistics(CallStatistics& stats)
4091{
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00004092 uint8_t fraction_lost(0);
4093 uint32_t cum_lost(0);
4094 uint32_t ext_max(0);
4095 uint32_t jitter(0);
4096 uint32_t max_jitter(0);
4097
niklase@google.com470e71d2011-07-07 08:21:25 +00004098 // --- Part one of the final structure (four values)
4099
4100 // The jitter statistics is updated for each received RTP packet and is
4101 // based on received packets.
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00004102 if (_rtpRtcpModule->StatisticsRTP(&fraction_lost,
4103 &cum_lost,
4104 &ext_max,
4105 &jitter,
4106 &max_jitter) != 0)
4107 {
4108 _engineStatisticsPtr->SetLastError(
4109 VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
4110 "GetRTPStatistics() failed to read RTP statistics from the "
4111 "RTP/RTCP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00004112 }
4113
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00004114 stats.fractionLost = fraction_lost;
4115 stats.cumulativeLost = cum_lost;
4116 stats.extendedMax = ext_max;
4117 stats.jitterSamples = jitter;
niklase@google.com470e71d2011-07-07 08:21:25 +00004118
4119 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4120 VoEId(_instanceId, _channelId),
4121 "GetRTPStatistics() => fractionLost=%lu, cumulativeLost=%lu,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004122 " extendedMax=%lu, jitterSamples=%li)",
niklase@google.com470e71d2011-07-07 08:21:25 +00004123 stats.fractionLost, stats.cumulativeLost, stats.extendedMax,
4124 stats.jitterSamples);
4125
4126 // --- Part two of the final structure (one value)
4127
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004128 uint16_t RTT(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004129 RTCPMethod method = _rtpRtcpModule->RTCP();
niklase@google.com470e71d2011-07-07 08:21:25 +00004130 if (method == kRtcpOff)
4131 {
4132 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4133 VoEId(_instanceId, _channelId),
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004134 "GetRTPStatistics() RTCP is disabled => valid RTT "
niklase@google.com470e71d2011-07-07 08:21:25 +00004135 "measurements cannot be retrieved");
4136 } else
4137 {
4138 // The remote SSRC will be zero if no RTP packet has been received.
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00004139 uint32_t remoteSSRC = _rtpRtcpModule->RemoteSSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00004140 if (remoteSSRC > 0)
4141 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004142 uint16_t avgRTT(0);
4143 uint16_t maxRTT(0);
4144 uint16_t minRTT(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004145
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004146 if (_rtpRtcpModule->RTT(remoteSSRC, &RTT, &avgRTT, &minRTT, &maxRTT)
niklase@google.com470e71d2011-07-07 08:21:25 +00004147 != 0)
4148 {
4149 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4150 VoEId(_instanceId, _channelId),
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004151 "GetRTPStatistics() failed to retrieve RTT from "
niklase@google.com470e71d2011-07-07 08:21:25 +00004152 "the RTP/RTCP module");
4153 }
4154 } else
4155 {
4156 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4157 VoEId(_instanceId, _channelId),
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004158 "GetRTPStatistics() failed to measure RTT since no "
niklase@google.com470e71d2011-07-07 08:21:25 +00004159 "RTP packets have been received yet");
4160 }
4161 }
4162
4163 stats.rttMs = static_cast<int> (RTT);
4164
4165 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4166 VoEId(_instanceId, _channelId),
4167 "GetRTPStatistics() => rttMs=%d", stats.rttMs);
4168
4169 // --- Part three of the final structure (four values)
4170
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004171 uint32_t bytesSent(0);
4172 uint32_t packetsSent(0);
4173 uint32_t bytesReceived(0);
4174 uint32_t packetsReceived(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004175
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004176 if (_rtpRtcpModule->DataCountersRTP(&bytesSent,
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00004177 &packetsSent,
4178 &bytesReceived,
4179 &packetsReceived) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004180 {
4181 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4182 VoEId(_instanceId, _channelId),
4183 "GetRTPStatistics() failed to retrieve RTP datacounters =>"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004184 " output will not be complete");
niklase@google.com470e71d2011-07-07 08:21:25 +00004185 }
4186
4187 stats.bytesSent = bytesSent;
4188 stats.packetsSent = packetsSent;
4189 stats.bytesReceived = bytesReceived;
4190 stats.packetsReceived = packetsReceived;
4191
4192 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4193 VoEId(_instanceId, _channelId),
4194 "GetRTPStatistics() => bytesSent=%d, packetsSent=%d,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004195 " bytesReceived=%d, packetsReceived=%d)",
niklase@google.com470e71d2011-07-07 08:21:25 +00004196 stats.bytesSent, stats.packetsSent, stats.bytesReceived,
4197 stats.packetsReceived);
4198
4199 return 0;
4200}
4201
turaj@webrtc.org42259e72012-12-11 02:15:12 +00004202int Channel::SetFECStatus(bool enable, int redPayloadtype) {
4203 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4204 "Channel::SetFECStatus()");
niklase@google.com470e71d2011-07-07 08:21:25 +00004205
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00004206 if (enable) {
4207 if (redPayloadtype < 0 || redPayloadtype > 127) {
4208 _engineStatisticsPtr->SetLastError(
4209 VE_PLTYPE_ERROR, kTraceError,
4210 "SetFECStatus() invalid RED payload type");
4211 return -1;
4212 }
4213
4214 if (SetRedPayloadType(redPayloadtype) < 0) {
4215 _engineStatisticsPtr->SetLastError(
4216 VE_CODEC_ERROR, kTraceError,
4217 "SetSecondarySendCodec() Failed to register RED ACM");
4218 return -1;
4219 }
turaj@webrtc.org42259e72012-12-11 02:15:12 +00004220 }
niklase@google.com470e71d2011-07-07 08:21:25 +00004221
turaj@webrtc.org42259e72012-12-11 02:15:12 +00004222 if (_audioCodingModule.SetFECStatus(enable) != 0) {
4223 _engineStatisticsPtr->SetLastError(
4224 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4225 "SetFECStatus() failed to set FEC state in the ACM");
4226 return -1;
4227 }
4228 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00004229}
4230
4231int
4232Channel::GetFECStatus(bool& enabled, int& redPayloadtype)
4233{
4234 enabled = _audioCodingModule.FECStatus();
4235 if (enabled)
4236 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004237 int8_t payloadType(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004238 if (_rtpRtcpModule->SendREDPayloadType(payloadType) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004239 {
4240 _engineStatisticsPtr->SetLastError(
4241 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4242 "GetFECStatus() failed to retrieve RED PT from RTP/RTCP "
4243 "module");
4244 return -1;
4245 }
4246 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4247 VoEId(_instanceId, _channelId),
4248 "GetFECStatus() => enabled=%d, redPayloadtype=%d",
4249 enabled, redPayloadtype);
4250 return 0;
4251 }
4252 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4253 VoEId(_instanceId, _channelId),
4254 "GetFECStatus() => enabled=%d", enabled);
4255 return 0;
4256}
4257
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00004258void Channel::SetNACKStatus(bool enable, int maxNumberOfPackets) {
4259 // None of these functions can fail.
4260 _rtpRtcpModule->SetStorePacketsStatus(enable, maxNumberOfPackets);
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00004261 _rtpRtcpModule->SetNACKStatus(enable ? kNackRtcp : kNackOff,
4262 maxNumberOfPackets);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00004263 if (enable)
4264 _audioCodingModule.EnableNack(maxNumberOfPackets);
4265 else
4266 _audioCodingModule.DisableNack();
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00004267}
4268
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00004269// Called when we are missing one or more packets.
4270int Channel::ResendPackets(const uint16_t* sequence_numbers, int length) {
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00004271 return _rtpRtcpModule->SendNACK(sequence_numbers, length);
4272}
4273
niklase@google.com470e71d2011-07-07 08:21:25 +00004274int
niklase@google.com470e71d2011-07-07 08:21:25 +00004275Channel::StartRTPDump(const char fileNameUTF8[1024],
4276 RTPDirections direction)
4277{
4278 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4279 "Channel::StartRTPDump()");
4280 if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
4281 {
4282 _engineStatisticsPtr->SetLastError(
4283 VE_INVALID_ARGUMENT, kTraceError,
4284 "StartRTPDump() invalid RTP direction");
4285 return -1;
4286 }
4287 RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4288 &_rtpDumpIn : &_rtpDumpOut;
4289 if (rtpDumpPtr == NULL)
4290 {
4291 assert(false);
4292 return -1;
4293 }
4294 if (rtpDumpPtr->IsActive())
4295 {
4296 rtpDumpPtr->Stop();
4297 }
4298 if (rtpDumpPtr->Start(fileNameUTF8) != 0)
4299 {
4300 _engineStatisticsPtr->SetLastError(
4301 VE_BAD_FILE, kTraceError,
4302 "StartRTPDump() failed to create file");
4303 return -1;
4304 }
4305 return 0;
4306}
4307
4308int
4309Channel::StopRTPDump(RTPDirections direction)
4310{
4311 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4312 "Channel::StopRTPDump()");
4313 if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
4314 {
4315 _engineStatisticsPtr->SetLastError(
4316 VE_INVALID_ARGUMENT, kTraceError,
4317 "StopRTPDump() invalid RTP direction");
4318 return -1;
4319 }
4320 RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4321 &_rtpDumpIn : &_rtpDumpOut;
4322 if (rtpDumpPtr == NULL)
4323 {
4324 assert(false);
4325 return -1;
4326 }
4327 if (!rtpDumpPtr->IsActive())
4328 {
4329 return 0;
4330 }
4331 return rtpDumpPtr->Stop();
4332}
4333
4334bool
4335Channel::RTPDumpIsActive(RTPDirections direction)
4336{
4337 if ((direction != kRtpIncoming) &&
4338 (direction != kRtpOutgoing))
4339 {
4340 _engineStatisticsPtr->SetLastError(
4341 VE_INVALID_ARGUMENT, kTraceError,
4342 "RTPDumpIsActive() invalid RTP direction");
4343 return false;
4344 }
4345 RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4346 &_rtpDumpIn : &_rtpDumpOut;
4347 return rtpDumpPtr->IsActive();
4348}
4349
4350int
4351Channel::InsertExtraRTPPacket(unsigned char payloadType,
4352 bool markerBit,
4353 const char* payloadData,
4354 unsigned short payloadSize)
4355{
4356 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4357 "Channel::InsertExtraRTPPacket()");
4358 if (payloadType > 127)
4359 {
4360 _engineStatisticsPtr->SetLastError(
4361 VE_INVALID_PLTYPE, kTraceError,
4362 "InsertExtraRTPPacket() invalid payload type");
4363 return -1;
4364 }
4365 if (payloadData == NULL)
4366 {
4367 _engineStatisticsPtr->SetLastError(
4368 VE_INVALID_ARGUMENT, kTraceError,
4369 "InsertExtraRTPPacket() invalid payload data");
4370 return -1;
4371 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004372 if (payloadSize > _rtpRtcpModule->MaxDataPayloadLength())
niklase@google.com470e71d2011-07-07 08:21:25 +00004373 {
4374 _engineStatisticsPtr->SetLastError(
4375 VE_INVALID_ARGUMENT, kTraceError,
4376 "InsertExtraRTPPacket() invalid payload size");
4377 return -1;
4378 }
4379 if (!_sending)
4380 {
4381 _engineStatisticsPtr->SetLastError(
4382 VE_NOT_SENDING, kTraceError,
4383 "InsertExtraRTPPacket() not sending");
4384 return -1;
4385 }
4386
4387 // Create extra RTP packet by calling RtpRtcp::SendOutgoingData().
4388 // Transport::SendPacket() will be called by the module when the RTP packet
4389 // is created.
4390 // The call to SendOutgoingData() does *not* modify the timestamp and
4391 // payloadtype to ensure that the RTP module generates a valid RTP packet
4392 // (user might utilize a non-registered payload type).
4393 // The marker bit and payload type will be replaced just before the actual
4394 // transmission, i.e., the actual modification is done *after* the RTP
4395 // module has delivered its RTP packet back to the VoE.
4396 // We will use the stored values above when the packet is modified
4397 // (see Channel::SendPacket()).
4398
4399 _extraPayloadType = payloadType;
4400 _extraMarkerBit = markerBit;
4401 _insertExtraRTPPacket = true;
4402
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004403 if (_rtpRtcpModule->SendOutgoingData(kAudioFrameSpeech,
niklase@google.com470e71d2011-07-07 08:21:25 +00004404 _lastPayloadType,
4405 _lastLocalTimeStamp,
stefan@webrtc.orgddfdfed2012-07-03 13:21:22 +00004406 // Leaving the time when this frame was
4407 // received from the capture device as
4408 // undefined for voice for now.
4409 -1,
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004410 (const uint8_t*) payloadData,
niklase@google.com470e71d2011-07-07 08:21:25 +00004411 payloadSize) != 0)
4412 {
4413 _engineStatisticsPtr->SetLastError(
4414 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4415 "InsertExtraRTPPacket() failed to send extra RTP packet");
4416 return -1;
4417 }
4418
4419 return 0;
4420}
4421
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004422uint32_t
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004423Channel::Demultiplex(const AudioFrame& audioFrame)
niklase@google.com470e71d2011-07-07 08:21:25 +00004424{
4425 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004426 "Channel::Demultiplex()");
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00004427 _audioFrame.CopyFrom(audioFrame);
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004428 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00004429 return 0;
4430}
4431
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004432uint32_t
xians@google.com0b0665a2011-08-08 08:18:44 +00004433Channel::PrepareEncodeAndSend(int mixingFrequency)
niklase@google.com470e71d2011-07-07 08:21:25 +00004434{
4435 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4436 "Channel::PrepareEncodeAndSend()");
4437
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004438 if (_audioFrame.samples_per_channel_ == 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004439 {
4440 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4441 "Channel::PrepareEncodeAndSend() invalid audio frame");
4442 return -1;
4443 }
4444
4445 if (_inputFilePlaying)
4446 {
4447 MixOrReplaceAudioWithFile(mixingFrequency);
4448 }
4449
4450 if (_mute)
4451 {
4452 AudioFrameOperations::Mute(_audioFrame);
4453 }
4454
4455 if (_inputExternalMedia)
4456 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004457 CriticalSectionScoped cs(&_callbackCritSect);
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004458 const bool isStereo = (_audioFrame.num_channels_ == 2);
niklase@google.com470e71d2011-07-07 08:21:25 +00004459 if (_inputExternalMediaCallbackPtr)
4460 {
4461 _inputExternalMediaCallbackPtr->Process(
4462 _channelId,
4463 kRecordingPerChannel,
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004464 (int16_t*)_audioFrame.data_,
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004465 _audioFrame.samples_per_channel_,
4466 _audioFrame.sample_rate_hz_,
niklase@google.com470e71d2011-07-07 08:21:25 +00004467 isStereo);
4468 }
4469 }
4470
4471 InsertInbandDtmfTone();
4472
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004473 if (_includeAudioLevelIndication)
4474 {
4475 assert(_rtpAudioProc.get() != NULL);
4476
4477 // Check if settings need to be updated.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004478 if (_rtpAudioProc->sample_rate_hz() != _audioFrame.sample_rate_hz_)
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004479 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004480 if (_rtpAudioProc->set_sample_rate_hz(_audioFrame.sample_rate_hz_) !=
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004481 AudioProcessing::kNoError)
4482 {
4483 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4484 VoEId(_instanceId, _channelId),
4485 "Error setting AudioProcessing sample rate");
4486 return -1;
4487 }
4488 }
4489
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004490 if (_rtpAudioProc->num_input_channels() != _audioFrame.num_channels_)
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004491 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004492 if (_rtpAudioProc->set_num_channels(_audioFrame.num_channels_,
4493 _audioFrame.num_channels_)
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004494 != AudioProcessing::kNoError)
4495 {
4496 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4497 VoEId(_instanceId, _channelId),
4498 "Error setting AudioProcessing channels");
4499 return -1;
4500 }
4501 }
4502
4503 // Performs level analysis only; does not affect the signal.
4504 _rtpAudioProc->ProcessStream(&_audioFrame);
4505 }
4506
niklase@google.com470e71d2011-07-07 08:21:25 +00004507 return 0;
4508}
4509
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004510uint32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00004511Channel::EncodeAndSend()
4512{
4513 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4514 "Channel::EncodeAndSend()");
4515
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004516 assert(_audioFrame.num_channels_ <= 2);
4517 if (_audioFrame.samples_per_channel_ == 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004518 {
4519 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4520 "Channel::EncodeAndSend() invalid audio frame");
4521 return -1;
4522 }
4523
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004524 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00004525
4526 // --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz.
4527
4528 // The ACM resamples internally.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004529 _audioFrame.timestamp_ = _timeStamp;
niklase@google.com470e71d2011-07-07 08:21:25 +00004530 if (_audioCodingModule.Add10MsData((AudioFrame&)_audioFrame) != 0)
4531 {
4532 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
4533 "Channel::EncodeAndSend() ACM encoding failed");
4534 return -1;
4535 }
4536
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004537 _timeStamp += _audioFrame.samples_per_channel_;
niklase@google.com470e71d2011-07-07 08:21:25 +00004538
4539 // --- Encode if complete frame is ready
4540
4541 // This call will trigger AudioPacketizationCallback::SendData if encoding
4542 // is done and payload is ready for packetization and transmission.
4543 return _audioCodingModule.Process();
4544}
4545
4546int Channel::RegisterExternalMediaProcessing(
4547 ProcessingTypes type,
4548 VoEMediaProcess& processObject)
4549{
4550 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4551 "Channel::RegisterExternalMediaProcessing()");
4552
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004553 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004554
4555 if (kPlaybackPerChannel == type)
4556 {
4557 if (_outputExternalMediaCallbackPtr)
4558 {
4559 _engineStatisticsPtr->SetLastError(
4560 VE_INVALID_OPERATION, kTraceError,
4561 "Channel::RegisterExternalMediaProcessing() "
4562 "output external media already enabled");
4563 return -1;
4564 }
4565 _outputExternalMediaCallbackPtr = &processObject;
4566 _outputExternalMedia = true;
4567 }
4568 else if (kRecordingPerChannel == type)
4569 {
4570 if (_inputExternalMediaCallbackPtr)
4571 {
4572 _engineStatisticsPtr->SetLastError(
4573 VE_INVALID_OPERATION, kTraceError,
4574 "Channel::RegisterExternalMediaProcessing() "
4575 "output external media already enabled");
4576 return -1;
4577 }
4578 _inputExternalMediaCallbackPtr = &processObject;
4579 _inputExternalMedia = true;
4580 }
4581 return 0;
4582}
4583
4584int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type)
4585{
4586 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4587 "Channel::DeRegisterExternalMediaProcessing()");
4588
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004589 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004590
4591 if (kPlaybackPerChannel == type)
4592 {
4593 if (!_outputExternalMediaCallbackPtr)
4594 {
4595 _engineStatisticsPtr->SetLastError(
4596 VE_INVALID_OPERATION, kTraceWarning,
4597 "Channel::DeRegisterExternalMediaProcessing() "
4598 "output external media already disabled");
4599 return 0;
4600 }
4601 _outputExternalMedia = false;
4602 _outputExternalMediaCallbackPtr = NULL;
4603 }
4604 else if (kRecordingPerChannel == type)
4605 {
4606 if (!_inputExternalMediaCallbackPtr)
4607 {
4608 _engineStatisticsPtr->SetLastError(
4609 VE_INVALID_OPERATION, kTraceWarning,
4610 "Channel::DeRegisterExternalMediaProcessing() "
4611 "input external media already disabled");
4612 return 0;
4613 }
4614 _inputExternalMedia = false;
4615 _inputExternalMediaCallbackPtr = NULL;
4616 }
4617
4618 return 0;
4619}
4620
roosa@google.com1b60ceb2012-12-12 23:00:29 +00004621int Channel::SetExternalMixing(bool enabled) {
4622 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4623 "Channel::SetExternalMixing(enabled=%d)", enabled);
4624
4625 if (_playing)
4626 {
4627 _engineStatisticsPtr->SetLastError(
4628 VE_INVALID_OPERATION, kTraceError,
4629 "Channel::SetExternalMixing() "
4630 "external mixing cannot be changed while playing.");
4631 return -1;
4632 }
4633
4634 _externalMixing = enabled;
4635
4636 return 0;
4637}
4638
niklase@google.com470e71d2011-07-07 08:21:25 +00004639int
4640Channel::ResetRTCPStatistics()
4641{
4642 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4643 "Channel::ResetRTCPStatistics()");
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004644 uint32_t remoteSSRC(0);
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00004645 remoteSSRC = _rtpRtcpModule->RemoteSSRC();
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004646 return _rtpRtcpModule->ResetRTT(remoteSSRC);
niklase@google.com470e71d2011-07-07 08:21:25 +00004647}
4648
4649int
4650Channel::GetRoundTripTimeSummary(StatVal& delaysMs) const
4651{
4652 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4653 "Channel::GetRoundTripTimeSummary()");
4654 // Override default module outputs for the case when RTCP is disabled.
4655 // This is done to ensure that we are backward compatible with the
4656 // VoiceEngine where we did not use RTP/RTCP module.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004657 if (!_rtpRtcpModule->RTCP())
niklase@google.com470e71d2011-07-07 08:21:25 +00004658 {
4659 delaysMs.min = -1;
4660 delaysMs.max = -1;
4661 delaysMs.average = -1;
4662 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4663 "Channel::GetRoundTripTimeSummary() RTCP is disabled =>"
4664 " valid RTT measurements cannot be retrieved");
4665 return 0;
4666 }
4667
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004668 uint32_t remoteSSRC;
4669 uint16_t RTT;
4670 uint16_t avgRTT;
4671 uint16_t maxRTT;
4672 uint16_t minRTT;
niklase@google.com470e71d2011-07-07 08:21:25 +00004673 // The remote SSRC will be zero if no RTP packet has been received.
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00004674 remoteSSRC = _rtpRtcpModule->RemoteSSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00004675 if (remoteSSRC == 0)
4676 {
4677 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4678 "Channel::GetRoundTripTimeSummary() unable to measure RTT"
4679 " since no RTP packet has been received yet");
4680 }
4681
4682 // Retrieve RTT statistics from the RTP/RTCP module for the specified
4683 // channel and SSRC. The SSRC is required to parse out the correct source
4684 // in conference scenarios.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004685 if (_rtpRtcpModule->RTT(remoteSSRC, &RTT, &avgRTT, &minRTT,&maxRTT) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004686 {
4687 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4688 "GetRoundTripTimeSummary unable to retrieve RTT values"
4689 " from the RTCP layer");
4690 delaysMs.min = -1; delaysMs.max = -1; delaysMs.average = -1;
4691 }
4692 else
4693 {
4694 delaysMs.min = minRTT;
4695 delaysMs.max = maxRTT;
4696 delaysMs.average = avgRTT;
4697 }
4698 return 0;
4699}
4700
4701int
4702Channel::GetNetworkStatistics(NetworkStatistics& stats)
4703{
4704 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4705 "Channel::GetNetworkStatistics()");
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00004706 ACMNetworkStatistics acm_stats;
4707 int return_value = _audioCodingModule.NetworkStatistics(&acm_stats);
4708 if (return_value >= 0) {
4709 memcpy(&stats, &acm_stats, sizeof(NetworkStatistics));
4710 }
4711 return return_value;
niklase@google.com470e71d2011-07-07 08:21:25 +00004712}
4713
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00004714bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms,
4715 int* playout_buffer_delay_ms) const {
4716 if (_average_jitter_buffer_delay_us == 0) {
niklase@google.com470e71d2011-07-07 08:21:25 +00004717 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00004718 "Channel::GetDelayEstimate() no valid estimate.");
4719 return false;
4720 }
4721 *jitter_buffer_delay_ms = (_average_jitter_buffer_delay_us + 500) / 1000 +
4722 _recPacketDelayMs;
4723 *playout_buffer_delay_ms = playout_delay_ms_;
4724 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4725 "Channel::GetDelayEstimate()");
4726 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +00004727}
4728
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +00004729int Channel::SetInitialPlayoutDelay(int delay_ms)
4730{
4731 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4732 "Channel::SetInitialPlayoutDelay()");
4733 if ((delay_ms < kVoiceEngineMinMinPlayoutDelayMs) ||
4734 (delay_ms > kVoiceEngineMaxMinPlayoutDelayMs))
4735 {
4736 _engineStatisticsPtr->SetLastError(
4737 VE_INVALID_ARGUMENT, kTraceError,
4738 "SetInitialPlayoutDelay() invalid min delay");
4739 return -1;
4740 }
4741 if (_audioCodingModule.SetInitialPlayoutDelay(delay_ms) != 0)
4742 {
4743 _engineStatisticsPtr->SetLastError(
4744 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4745 "SetInitialPlayoutDelay() failed to set min playout delay");
4746 return -1;
4747 }
4748 return 0;
4749}
4750
4751
niklase@google.com470e71d2011-07-07 08:21:25 +00004752int
4753Channel::SetMinimumPlayoutDelay(int delayMs)
4754{
4755 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4756 "Channel::SetMinimumPlayoutDelay()");
4757 if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
4758 (delayMs > kVoiceEngineMaxMinPlayoutDelayMs))
4759 {
4760 _engineStatisticsPtr->SetLastError(
4761 VE_INVALID_ARGUMENT, kTraceError,
4762 "SetMinimumPlayoutDelay() invalid min delay");
4763 return -1;
4764 }
4765 if (_audioCodingModule.SetMinimumPlayoutDelay(delayMs) != 0)
4766 {
4767 _engineStatisticsPtr->SetLastError(
4768 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4769 "SetMinimumPlayoutDelay() failed to set min playout delay");
4770 return -1;
4771 }
4772 return 0;
4773}
4774
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00004775void Channel::UpdatePlayoutTimestamp(bool rtcp) {
4776 uint32_t playout_timestamp = 0;
4777
4778 if (_audioCodingModule.PlayoutTimestamp(&playout_timestamp) == -1) {
4779 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4780 "Channel::UpdatePlayoutTimestamp() failed to read playout"
4781 " timestamp from the ACM");
4782 _engineStatisticsPtr->SetLastError(
4783 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4784 "UpdatePlayoutTimestamp() failed to retrieve timestamp");
4785 return;
4786 }
4787
4788 uint16_t delay_ms = 0;
4789 if (_audioDeviceModulePtr->PlayoutDelay(&delay_ms) == -1) {
4790 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4791 "Channel::UpdatePlayoutTimestamp() failed to read playout"
4792 " delay from the ADM");
4793 _engineStatisticsPtr->SetLastError(
4794 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4795 "UpdatePlayoutTimestamp() failed to retrieve playout delay");
4796 return;
4797 }
4798
4799 int32_t playout_frequency = _audioCodingModule.PlayoutFrequency();
4800 CodecInst current_recive_codec;
4801 if (_audioCodingModule.ReceiveCodec(&current_recive_codec) == 0) {
4802 if (STR_CASE_CMP("G722", current_recive_codec.plname) == 0) {
4803 playout_frequency = 8000;
4804 } else if (STR_CASE_CMP("opus", current_recive_codec.plname) == 0) {
4805 playout_frequency = 48000;
niklase@google.com470e71d2011-07-07 08:21:25 +00004806 }
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00004807 }
4808
4809 // Remove the playout delay.
4810 playout_timestamp -= (delay_ms * (playout_frequency / 1000));
4811
4812 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4813 "Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu",
4814 playout_timestamp);
4815
4816 if (rtcp) {
4817 playout_timestamp_rtcp_ = playout_timestamp;
4818 } else {
4819 playout_timestamp_rtp_ = playout_timestamp;
4820 }
4821 playout_delay_ms_ = delay_ms;
4822}
4823
4824int Channel::GetPlayoutTimestamp(unsigned int& timestamp) {
4825 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4826 "Channel::GetPlayoutTimestamp()");
4827 if (playout_timestamp_rtp_ == 0) {
4828 _engineStatisticsPtr->SetLastError(
4829 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4830 "GetPlayoutTimestamp() failed to retrieve timestamp");
4831 return -1;
4832 }
4833 timestamp = playout_timestamp_rtp_;
4834 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4835 VoEId(_instanceId,_channelId),
4836 "GetPlayoutTimestamp() => timestamp=%u", timestamp);
4837 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00004838}
4839
4840int
4841Channel::SetInitTimestamp(unsigned int timestamp)
4842{
4843 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4844 "Channel::SetInitTimestamp()");
4845 if (_sending)
4846 {
4847 _engineStatisticsPtr->SetLastError(
4848 VE_SENDING, kTraceError, "SetInitTimestamp() already sending");
4849 return -1;
4850 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004851 if (_rtpRtcpModule->SetStartTimestamp(timestamp) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004852 {
4853 _engineStatisticsPtr->SetLastError(
4854 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4855 "SetInitTimestamp() failed to set timestamp");
4856 return -1;
4857 }
4858 return 0;
4859}
4860
4861int
4862Channel::SetInitSequenceNumber(short sequenceNumber)
4863{
4864 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4865 "Channel::SetInitSequenceNumber()");
4866 if (_sending)
4867 {
4868 _engineStatisticsPtr->SetLastError(
4869 VE_SENDING, kTraceError,
4870 "SetInitSequenceNumber() already sending");
4871 return -1;
4872 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004873 if (_rtpRtcpModule->SetSequenceNumber(sequenceNumber) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004874 {
4875 _engineStatisticsPtr->SetLastError(
4876 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4877 "SetInitSequenceNumber() failed to set sequence number");
4878 return -1;
4879 }
4880 return 0;
4881}
4882
4883int
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00004884Channel::GetRtpRtcp(RtpRtcp* &rtpRtcpModule) const
niklase@google.com470e71d2011-07-07 08:21:25 +00004885{
4886 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4887 "Channel::GetRtpRtcp()");
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00004888 rtpRtcpModule = _rtpRtcpModule.get();
niklase@google.com470e71d2011-07-07 08:21:25 +00004889 return 0;
4890}
4891
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004892// TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use
4893// a shared helper.
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004894int32_t
pbos@webrtc.org92135212013-05-14 08:31:39 +00004895Channel::MixOrReplaceAudioWithFile(int mixingFrequency)
niklase@google.com470e71d2011-07-07 08:21:25 +00004896{
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004897 scoped_array<int16_t> fileBuffer(new int16_t[640]);
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004898 int fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004899
4900 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004901 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004902
4903 if (_inputFilePlayerPtr == NULL)
4904 {
4905 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4906 VoEId(_instanceId, _channelId),
4907 "Channel::MixOrReplaceAudioWithFile() fileplayer"
4908 " doesnt exist");
4909 return -1;
4910 }
4911
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004912 if (_inputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
niklase@google.com470e71d2011-07-07 08:21:25 +00004913 fileSamples,
4914 mixingFrequency) == -1)
4915 {
4916 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4917 VoEId(_instanceId, _channelId),
4918 "Channel::MixOrReplaceAudioWithFile() file mixing "
4919 "failed");
4920 return -1;
4921 }
4922 if (fileSamples == 0)
4923 {
4924 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4925 VoEId(_instanceId, _channelId),
4926 "Channel::MixOrReplaceAudioWithFile() file is ended");
4927 return 0;
4928 }
4929 }
4930
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004931 assert(_audioFrame.samples_per_channel_ == fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004932
4933 if (_mixFileWithMicrophone)
4934 {
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004935 // Currently file stream is always mono.
4936 // TODO(xians): Change the code when FilePlayer supports real stereo.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004937 Utility::MixWithSat(_audioFrame.data_,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004938 _audioFrame.num_channels_,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004939 fileBuffer.get(),
4940 1,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004941 fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004942 }
4943 else
4944 {
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004945 // Replace ACM audio with file.
4946 // Currently file stream is always mono.
4947 // TODO(xians): Change the code when FilePlayer supports real stereo.
niklase@google.com470e71d2011-07-07 08:21:25 +00004948 _audioFrame.UpdateFrame(_channelId,
4949 -1,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004950 fileBuffer.get(),
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004951 fileSamples,
niklase@google.com470e71d2011-07-07 08:21:25 +00004952 mixingFrequency,
4953 AudioFrame::kNormalSpeech,
4954 AudioFrame::kVadUnknown,
4955 1);
4956
4957 }
4958 return 0;
4959}
4960
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004961int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00004962Channel::MixAudioWithFile(AudioFrame& audioFrame,
pbos@webrtc.org92135212013-05-14 08:31:39 +00004963 int mixingFrequency)
niklase@google.com470e71d2011-07-07 08:21:25 +00004964{
4965 assert(mixingFrequency <= 32000);
4966
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004967 scoped_array<int16_t> fileBuffer(new int16_t[640]);
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004968 int fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004969
4970 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004971 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004972
4973 if (_outputFilePlayerPtr == NULL)
4974 {
4975 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4976 VoEId(_instanceId, _channelId),
4977 "Channel::MixAudioWithFile() file mixing failed");
4978 return -1;
4979 }
4980
4981 // We should get the frequency we ask for.
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004982 if (_outputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
niklase@google.com470e71d2011-07-07 08:21:25 +00004983 fileSamples,
4984 mixingFrequency) == -1)
4985 {
4986 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4987 VoEId(_instanceId, _channelId),
4988 "Channel::MixAudioWithFile() file mixing failed");
4989 return -1;
4990 }
4991 }
4992
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004993 if (audioFrame.samples_per_channel_ == fileSamples)
niklase@google.com470e71d2011-07-07 08:21:25 +00004994 {
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004995 // Currently file stream is always mono.
4996 // TODO(xians): Change the code when FilePlayer supports real stereo.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004997 Utility::MixWithSat(audioFrame.data_,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004998 audioFrame.num_channels_,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004999 fileBuffer.get(),
5000 1,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00005001 fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00005002 }
5003 else
5004 {
5005 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005006 "Channel::MixAudioWithFile() samples_per_channel_(%d) != "
niklase@google.com470e71d2011-07-07 08:21:25 +00005007 "fileSamples(%d)",
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005008 audioFrame.samples_per_channel_, fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00005009 return -1;
5010 }
5011
5012 return 0;
5013}
5014
5015int
5016Channel::InsertInbandDtmfTone()
5017{
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00005018 // Check if we should start a new tone.
niklase@google.com470e71d2011-07-07 08:21:25 +00005019 if (_inbandDtmfQueue.PendingDtmf() &&
5020 !_inbandDtmfGenerator.IsAddingTone() &&
5021 _inbandDtmfGenerator.DelaySinceLastTone() >
5022 kMinTelephoneEventSeparationMs)
5023 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005024 int8_t eventCode(0);
5025 uint16_t lengthMs(0);
5026 uint8_t attenuationDb(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00005027
5028 eventCode = _inbandDtmfQueue.NextDtmf(&lengthMs, &attenuationDb);
5029 _inbandDtmfGenerator.AddTone(eventCode, lengthMs, attenuationDb);
5030 if (_playInbandDtmfEvent)
5031 {
5032 // Add tone to output mixer using a reduced length to minimize
5033 // risk of echo.
5034 _outputMixerPtr->PlayDtmfTone(eventCode, lengthMs - 80,
5035 attenuationDb);
5036 }
5037 }
5038
5039 if (_inbandDtmfGenerator.IsAddingTone())
5040 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005041 uint16_t frequency(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00005042 _inbandDtmfGenerator.GetSampleRate(frequency);
5043
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005044 if (frequency != _audioFrame.sample_rate_hz_)
niklase@google.com470e71d2011-07-07 08:21:25 +00005045 {
5046 // Update sample rate of Dtmf tone since the mixing frequency
5047 // has changed.
5048 _inbandDtmfGenerator.SetSampleRate(
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005049 (uint16_t) (_audioFrame.sample_rate_hz_));
niklase@google.com470e71d2011-07-07 08:21:25 +00005050 // Reset the tone to be added taking the new sample rate into
5051 // account.
5052 _inbandDtmfGenerator.ResetTone();
5053 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00005054
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005055 int16_t toneBuffer[320];
5056 uint16_t toneSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00005057 // Get 10ms tone segment and set time since last tone to zero
5058 if (_inbandDtmfGenerator.Get10msTone(toneBuffer, toneSamples) == -1)
5059 {
5060 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
5061 VoEId(_instanceId, _channelId),
5062 "Channel::EncodeAndSend() inserting Dtmf failed");
5063 return -1;
5064 }
5065
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00005066 // Replace mixed audio with DTMF tone.
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00005067 for (int sample = 0;
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005068 sample < _audioFrame.samples_per_channel_;
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00005069 sample++)
5070 {
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00005071 for (int channel = 0;
5072 channel < _audioFrame.num_channels_;
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00005073 channel++)
5074 {
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00005075 const int index = sample * _audioFrame.num_channels_ + channel;
5076 _audioFrame.data_[index] = toneBuffer[sample];
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00005077 }
5078 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00005079
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005080 assert(_audioFrame.samples_per_channel_ == toneSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00005081 } else
5082 {
5083 // Add 10ms to "delay-since-last-tone" counter
5084 _inbandDtmfGenerator.UpdateDelaySinceLastTone();
5085 }
5086 return 0;
5087}
5088
niklase@google.com470e71d2011-07-07 08:21:25 +00005089void
5090Channel::ResetDeadOrAliveCounters()
5091{
5092 _countDeadDetections = 0;
5093 _countAliveDetections = 0;
5094}
5095
5096void
5097Channel::UpdateDeadOrAliveCounters(bool alive)
5098{
5099 if (alive)
5100 _countAliveDetections++;
5101 else
5102 _countDeadDetections++;
5103}
5104
5105int
5106Channel::GetDeadOrAliveCounters(int& countDead, int& countAlive) const
5107{
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00005108 bool enabled;
5109 uint8_t timeSec;
5110
5111 _rtpRtcpModule->PeriodicDeadOrAliveStatus(enabled, timeSec);
5112 if (!enabled)
5113 return (-1);
5114
5115 countDead = static_cast<int> (_countDeadDetections);
5116 countAlive = static_cast<int> (_countAliveDetections);
niklase@google.com470e71d2011-07-07 08:21:25 +00005117 return 0;
5118}
5119
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005120int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00005121Channel::SendPacketRaw(const void *data, int len, bool RTCP)
5122{
5123 if (_transportPtr == NULL)
5124 {
5125 return -1;
5126 }
5127 if (!RTCP)
5128 {
5129 return _transportPtr->SendPacket(_channelId, data, len);
5130 }
5131 else
5132 {
5133 return _transportPtr->SendRTCPPacket(_channelId, data, len);
5134 }
5135}
5136
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005137// Called for incoming RTP packets after successful RTP header parsing.
5138void Channel::UpdatePacketDelay(uint32_t rtp_timestamp,
5139 uint16_t sequence_number) {
5140 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
5141 "Channel::UpdatePacketDelay(timestamp=%lu, sequenceNumber=%u)",
5142 rtp_timestamp, sequence_number);
niklase@google.com470e71d2011-07-07 08:21:25 +00005143
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005144 // Get frequency of last received payload
5145 int rtp_receive_frequency = _audioCodingModule.ReceiveFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +00005146
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005147 CodecInst current_receive_codec;
5148 if (_audioCodingModule.ReceiveCodec(&current_receive_codec) != 0) {
5149 return;
5150 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005151
turaj@webrtc.orge46c8d32013-05-22 20:39:43 +00005152 // Update the least required delay.
5153 least_required_delay_ms_ = _audioCodingModule.LeastRequiredDelayMs();
5154
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005155 if (STR_CASE_CMP("G722", current_receive_codec.plname) == 0) {
5156 // Even though the actual sampling rate for G.722 audio is
5157 // 16,000 Hz, the RTP clock rate for the G722 payload format is
5158 // 8,000 Hz because that value was erroneously assigned in
5159 // RFC 1890 and must remain unchanged for backward compatibility.
5160 rtp_receive_frequency = 8000;
5161 } else if (STR_CASE_CMP("opus", current_receive_codec.plname) == 0) {
5162 // We are resampling Opus internally to 32,000 Hz until all our
5163 // DSP routines can operate at 48,000 Hz, but the RTP clock
5164 // rate for the Opus payload format is standardized to 48,000 Hz,
5165 // because that is the maximum supported decoding sampling rate.
5166 rtp_receive_frequency = 48000;
5167 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005168
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005169 // playout_timestamp_rtp_ updated in UpdatePlayoutTimestamp for every incoming
5170 // packet.
5171 uint32_t timestamp_diff_ms = (rtp_timestamp - playout_timestamp_rtp_) /
5172 (rtp_receive_frequency / 1000);
niklase@google.com470e71d2011-07-07 08:21:25 +00005173
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005174 uint16_t packet_delay_ms = (rtp_timestamp - _previousTimestamp) /
5175 (rtp_receive_frequency / 1000);
niklase@google.com470e71d2011-07-07 08:21:25 +00005176
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005177 _previousTimestamp = rtp_timestamp;
niklase@google.com470e71d2011-07-07 08:21:25 +00005178
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005179 if (timestamp_diff_ms > (2 * kVoiceEngineMaxMinPlayoutDelayMs)) {
5180 timestamp_diff_ms = 0;
5181 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005182
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005183 if (timestamp_diff_ms == 0) return;
niklase@google.com470e71d2011-07-07 08:21:25 +00005184
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005185 if (packet_delay_ms >= 10 && packet_delay_ms <= 60) {
5186 _recPacketDelayMs = packet_delay_ms;
5187 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005188
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005189 if (_average_jitter_buffer_delay_us == 0) {
5190 _average_jitter_buffer_delay_us = timestamp_diff_ms * 1000;
5191 return;
5192 }
5193
5194 // Filter average delay value using exponential filter (alpha is
5195 // 7/8). We derive 1000 *_average_jitter_buffer_delay_us here (reduces
5196 // risk of rounding error) and compensate for it in GetDelayEstimate()
5197 // later.
5198 _average_jitter_buffer_delay_us = (_average_jitter_buffer_delay_us * 7 +
5199 1000 * timestamp_diff_ms + 500) / 8;
niklase@google.com470e71d2011-07-07 08:21:25 +00005200}
5201
5202void
5203Channel::RegisterReceiveCodecsToRTPModule()
5204{
5205 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
5206 "Channel::RegisterReceiveCodecsToRTPModule()");
5207
5208
5209 CodecInst codec;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005210 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
niklase@google.com470e71d2011-07-07 08:21:25 +00005211
5212 for (int idx = 0; idx < nSupportedCodecs; idx++)
5213 {
5214 // Open up the RTP/RTCP receiver for all supported codecs
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00005215 if ((_audioCodingModule.Codec(idx, &codec) == -1) ||
tnakamura@webrtc.orgaa4d96a2013-07-16 19:25:04 +00005216 (_rtpRtcpModule->RegisterReceivePayload(codec) == -1))
niklase@google.com470e71d2011-07-07 08:21:25 +00005217 {
5218 WEBRTC_TRACE(
5219 kTraceWarning,
5220 kTraceVoice,
5221 VoEId(_instanceId, _channelId),
5222 "Channel::RegisterReceiveCodecsToRTPModule() unable"
5223 " to register %s (%d/%d/%d/%d) to RTP/RTCP receiver",
5224 codec.plname, codec.pltype, codec.plfreq,
5225 codec.channels, codec.rate);
5226 }
5227 else
5228 {
5229 WEBRTC_TRACE(
5230 kTraceInfo,
5231 kTraceVoice,
5232 VoEId(_instanceId, _channelId),
5233 "Channel::RegisterReceiveCodecsToRTPModule() %s "
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00005234 "(%d/%d/%d/%d) has been added to the RTP/RTCP "
niklase@google.com470e71d2011-07-07 08:21:25 +00005235 "receiver",
5236 codec.plname, codec.pltype, codec.plfreq,
5237 codec.channels, codec.rate);
5238 }
5239 }
5240}
5241
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005242int Channel::ApmProcessRx(AudioFrame& frame) {
5243 AudioProcessing* audioproc = _rxAudioProcessingModulePtr;
5244 // Register the (possibly new) frame parameters.
5245 if (audioproc->set_sample_rate_hz(frame.sample_rate_hz_) != 0) {
andrew@webrtc.org655d8f52012-11-20 07:34:45 +00005246 LOG_FERR1(LS_WARNING, set_sample_rate_hz, frame.sample_rate_hz_);
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005247 }
5248 if (audioproc->set_num_channels(frame.num_channels_,
5249 frame.num_channels_) != 0) {
andrew@webrtc.org655d8f52012-11-20 07:34:45 +00005250 LOG_FERR1(LS_WARNING, set_num_channels, frame.num_channels_);
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005251 }
5252 if (audioproc->ProcessStream(&frame) != 0) {
andrew@webrtc.org655d8f52012-11-20 07:34:45 +00005253 LOG_FERR0(LS_WARNING, ProcessStream);
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005254 }
5255 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00005256}
5257
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005258int Channel::SetSecondarySendCodec(const CodecInst& codec,
5259 int red_payload_type) {
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00005260 // Sanity check for payload type.
5261 if (red_payload_type < 0 || red_payload_type > 127) {
5262 _engineStatisticsPtr->SetLastError(
5263 VE_PLTYPE_ERROR, kTraceError,
5264 "SetRedPayloadType() invalid RED payload type");
5265 return -1;
5266 }
5267
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005268 if (SetRedPayloadType(red_payload_type) < 0) {
5269 _engineStatisticsPtr->SetLastError(
5270 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5271 "SetSecondarySendCodec() Failed to register RED ACM");
5272 return -1;
5273 }
5274 if (_audioCodingModule.RegisterSecondarySendCodec(codec) < 0) {
5275 _engineStatisticsPtr->SetLastError(
5276 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5277 "SetSecondarySendCodec() Failed to register secondary send codec in "
5278 "ACM");
5279 return -1;
5280 }
5281
5282 return 0;
5283}
5284
5285void Channel::RemoveSecondarySendCodec() {
5286 _audioCodingModule.UnregisterSecondarySendCodec();
5287}
5288
5289int Channel::GetSecondarySendCodec(CodecInst* codec) {
5290 if (_audioCodingModule.SecondarySendCodec(codec) < 0) {
5291 _engineStatisticsPtr->SetLastError(
5292 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5293 "GetSecondarySendCodec() Failed to get secondary sent codec from ACM");
5294 return -1;
5295 }
5296 return 0;
5297}
5298
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00005299// Assuming this method is called with valid payload type.
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005300int Channel::SetRedPayloadType(int red_payload_type) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005301 CodecInst codec;
5302 bool found_red = false;
5303
5304 // Get default RED settings from the ACM database
5305 const int num_codecs = AudioCodingModule::NumberOfCodecs();
5306 for (int idx = 0; idx < num_codecs; idx++) {
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00005307 _audioCodingModule.Codec(idx, &codec);
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005308 if (!STR_CASE_CMP(codec.plname, "RED")) {
5309 found_red = true;
5310 break;
5311 }
5312 }
5313
5314 if (!found_red) {
5315 _engineStatisticsPtr->SetLastError(
5316 VE_CODEC_ERROR, kTraceError,
5317 "SetRedPayloadType() RED is not supported");
5318 return -1;
5319 }
5320
turaj@webrtc.org9d532fd2013-01-31 18:34:19 +00005321 codec.pltype = red_payload_type;
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005322 if (_audioCodingModule.RegisterSendCodec(codec) < 0) {
5323 _engineStatisticsPtr->SetLastError(
5324 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5325 "SetRedPayloadType() RED registration in ACM module failed");
5326 return -1;
5327 }
5328
5329 if (_rtpRtcpModule->SetSendREDPayloadType(red_payload_type) != 0) {
5330 _engineStatisticsPtr->SetLastError(
5331 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
5332 "SetRedPayloadType() RED registration in RTP/RTCP module failed");
5333 return -1;
5334 }
5335 return 0;
5336}
5337
pbos@webrtc.orgd900e8b2013-07-03 15:12:26 +00005338} // namespace voe
5339} // namespace webrtc