blob: f521af78407cd3f9af26f0cca7f7b3750db1ce3b [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
henrika@webrtc.org2919e952012-01-31 08:45:03 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000011#include "webrtc/voice_engine/channel.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000013#include "webrtc/modules/audio_device/include/audio_device.h"
14#include "webrtc/modules/audio_processing/include/audio_processing.h"
15#include "webrtc/modules/utility/interface/audio_frame_operations.h"
16#include "webrtc/modules/utility/interface/process_thread.h"
17#include "webrtc/modules/utility/interface/rtp_dump.h"
18#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
19#include "webrtc/system_wrappers/interface/logging.h"
20#include "webrtc/system_wrappers/interface/trace.h"
21#include "webrtc/voice_engine/include/voe_base.h"
22#include "webrtc/voice_engine/include/voe_external_media.h"
23#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
24#include "webrtc/voice_engine/output_mixer.h"
25#include "webrtc/voice_engine/statistics.h"
26#include "webrtc/voice_engine/transmit_mixer.h"
27#include "webrtc/voice_engine/utility.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000028
29#if defined(_WIN32)
30#include <Qos.h>
31#endif
32
andrew@webrtc.org50419b02012-11-14 19:07:54 +000033namespace webrtc {
34namespace voe {
niklase@google.com470e71d2011-07-07 08:21:25 +000035
36WebRtc_Word32
37Channel::SendData(FrameType frameType,
38 WebRtc_UWord8 payloadType,
39 WebRtc_UWord32 timeStamp,
40 const WebRtc_UWord8* payloadData,
41 WebRtc_UWord16 payloadSize,
42 const RTPFragmentationHeader* fragmentation)
43{
44 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
45 "Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u,"
46 " payloadSize=%u, fragmentation=0x%x)",
47 frameType, payloadType, timeStamp, payloadSize, fragmentation);
48
49 if (_includeAudioLevelIndication)
50 {
andrew@webrtc.org755b04a2011-11-15 16:57:56 +000051 assert(_rtpAudioProc.get() != NULL);
niklase@google.com470e71d2011-07-07 08:21:25 +000052 // Store current audio level in the RTP/RTCP module.
53 // The level will be used in combination with voice-activity state
54 // (frameType) to add an RTP header extension
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +000055 _rtpRtcpModule->SetAudioLevel(_rtpAudioProc->level_estimator()->RMS());
niklase@google.com470e71d2011-07-07 08:21:25 +000056 }
57
58 // Push data from ACM to RTP/RTCP-module to deliver audio frame for
59 // packetization.
60 // This call will trigger Transport::SendPacket() from the RTP/RTCP module.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +000061 if (_rtpRtcpModule->SendOutgoingData((FrameType&)frameType,
niklase@google.com470e71d2011-07-07 08:21:25 +000062 payloadType,
63 timeStamp,
stefan@webrtc.orgddfdfed2012-07-03 13:21:22 +000064 // Leaving the time when this frame was
65 // received from the capture device as
66 // undefined for voice for now.
67 -1,
niklase@google.com470e71d2011-07-07 08:21:25 +000068 payloadData,
69 payloadSize,
70 fragmentation) == -1)
71 {
72 _engineStatisticsPtr->SetLastError(
73 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
74 "Channel::SendData() failed to send data to RTP/RTCP module");
75 return -1;
76 }
77
78 _lastLocalTimeStamp = timeStamp;
79 _lastPayloadType = payloadType;
80
81 return 0;
82}
83
84WebRtc_Word32
85Channel::InFrameType(WebRtc_Word16 frameType)
86{
87 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
88 "Channel::InFrameType(frameType=%d)", frameType);
89
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +000090 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +000091 // 1 indicates speech
92 _sendFrameType = (frameType == 1) ? 1 : 0;
93 return 0;
94}
95
niklase@google.com470e71d2011-07-07 08:21:25 +000096WebRtc_Word32
97Channel::OnRxVadDetected(const int vadDecision)
98{
99 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
100 "Channel::OnRxVadDetected(vadDecision=%d)", vadDecision);
101
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000102 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000103 if (_rxVadObserverPtr)
104 {
105 _rxVadObserverPtr->OnRxVad(_channelId, vadDecision);
106 }
107
108 return 0;
109}
110
111int
112Channel::SendPacket(int channel, const void *data, int len)
113{
114 channel = VoEChannelId(channel);
115 assert(channel == _channelId);
116
117 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
118 "Channel::SendPacket(channel=%d, len=%d)", channel, len);
119
120 if (_transportPtr == NULL)
121 {
122 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
123 "Channel::SendPacket() failed to send RTP packet due to"
124 " invalid transport object");
125 return -1;
126 }
127
128 // Insert extra RTP packet using if user has called the InsertExtraRTPPacket
129 // API
130 if (_insertExtraRTPPacket)
131 {
132 WebRtc_UWord8* rtpHdr = (WebRtc_UWord8*)data;
133 WebRtc_UWord8 M_PT(0);
134 if (_extraMarkerBit)
135 {
136 M_PT = 0x80; // set the M-bit
137 }
138 M_PT += _extraPayloadType; // set the payload type
139 *(++rtpHdr) = M_PT; // modify the M|PT-byte within the RTP header
140 _insertExtraRTPPacket = false; // insert one packet only
141 }
142
143 WebRtc_UWord8* bufferToSendPtr = (WebRtc_UWord8*)data;
144 WebRtc_Word32 bufferLength = len;
145
146 // Dump the RTP packet to a file (if RTP dump is enabled).
147 if (_rtpDumpOut.DumpPacket((const WebRtc_UWord8*)data, len) == -1)
148 {
149 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
150 VoEId(_instanceId,_channelId),
151 "Channel::SendPacket() RTP dump to output file failed");
152 }
153
154 // SRTP or External encryption
155 if (_encrypting)
156 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000157 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000158
159 if (_encryptionPtr)
160 {
161 if (!_encryptionRTPBufferPtr)
162 {
163 // Allocate memory for encryption buffer one time only
164 _encryptionRTPBufferPtr =
165 new WebRtc_UWord8[kVoiceEngineMaxIpPacketSizeBytes];
xians@webrtc.org51253502012-10-25 13:58:02 +0000166 memset(_encryptionRTPBufferPtr, 0,
167 kVoiceEngineMaxIpPacketSizeBytes);
niklase@google.com470e71d2011-07-07 08:21:25 +0000168 }
169
170 // Perform encryption (SRTP or external)
171 WebRtc_Word32 encryptedBufferLength = 0;
172 _encryptionPtr->encrypt(_channelId,
173 bufferToSendPtr,
174 _encryptionRTPBufferPtr,
175 bufferLength,
176 (int*)&encryptedBufferLength);
177 if (encryptedBufferLength <= 0)
178 {
179 _engineStatisticsPtr->SetLastError(
180 VE_ENCRYPTION_FAILED,
181 kTraceError, "Channel::SendPacket() encryption failed");
182 return -1;
183 }
184
185 // Replace default data buffer with encrypted buffer
186 bufferToSendPtr = _encryptionRTPBufferPtr;
187 bufferLength = encryptedBufferLength;
188 }
189 }
190
191 // Packet transmission using WebRtc socket transport
192 if (!_externalTransport)
193 {
194 int n = _transportPtr->SendPacket(channel, bufferToSendPtr,
195 bufferLength);
196 if (n < 0)
197 {
198 WEBRTC_TRACE(kTraceError, kTraceVoice,
199 VoEId(_instanceId,_channelId),
200 "Channel::SendPacket() RTP transmission using WebRtc"
201 " sockets failed");
202 return -1;
203 }
204 return n;
205 }
206
207 // Packet transmission using external transport transport
208 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000209 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000210
211 int n = _transportPtr->SendPacket(channel,
212 bufferToSendPtr,
213 bufferLength);
214 if (n < 0)
215 {
216 WEBRTC_TRACE(kTraceError, kTraceVoice,
217 VoEId(_instanceId,_channelId),
218 "Channel::SendPacket() RTP transmission using external"
219 " transport failed");
220 return -1;
221 }
222 return n;
223 }
224}
225
226int
227Channel::SendRTCPPacket(int channel, const void *data, int len)
228{
229 channel = VoEChannelId(channel);
230 assert(channel == _channelId);
231
232 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
233 "Channel::SendRTCPPacket(channel=%d, len=%d)", channel, len);
234
niklase@google.com470e71d2011-07-07 08:21:25 +0000235 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000236 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.org83661f52011-11-25 10:58:15 +0000237 if (_transportPtr == NULL)
238 {
239 WEBRTC_TRACE(kTraceError, kTraceVoice,
240 VoEId(_instanceId,_channelId),
241 "Channel::SendRTCPPacket() failed to send RTCP packet"
242 " due to invalid transport object");
243 return -1;
244 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000245 }
246
247 WebRtc_UWord8* bufferToSendPtr = (WebRtc_UWord8*)data;
248 WebRtc_Word32 bufferLength = len;
249
250 // Dump the RTCP packet to a file (if RTP dump is enabled).
251 if (_rtpDumpOut.DumpPacket((const WebRtc_UWord8*)data, len) == -1)
252 {
253 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
254 VoEId(_instanceId,_channelId),
255 "Channel::SendPacket() RTCP dump to output file failed");
256 }
257
258 // SRTP or External encryption
259 if (_encrypting)
260 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000261 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000262
263 if (_encryptionPtr)
264 {
265 if (!_encryptionRTCPBufferPtr)
266 {
267 // Allocate memory for encryption buffer one time only
268 _encryptionRTCPBufferPtr =
269 new WebRtc_UWord8[kVoiceEngineMaxIpPacketSizeBytes];
270 }
271
272 // Perform encryption (SRTP or external).
273 WebRtc_Word32 encryptedBufferLength = 0;
274 _encryptionPtr->encrypt_rtcp(_channelId,
275 bufferToSendPtr,
276 _encryptionRTCPBufferPtr,
277 bufferLength,
278 (int*)&encryptedBufferLength);
279 if (encryptedBufferLength <= 0)
280 {
281 _engineStatisticsPtr->SetLastError(
282 VE_ENCRYPTION_FAILED, kTraceError,
283 "Channel::SendRTCPPacket() encryption failed");
284 return -1;
285 }
286
287 // Replace default data buffer with encrypted buffer
288 bufferToSendPtr = _encryptionRTCPBufferPtr;
289 bufferLength = encryptedBufferLength;
290 }
291 }
292
293 // Packet transmission using WebRtc socket transport
294 if (!_externalTransport)
295 {
296 int n = _transportPtr->SendRTCPPacket(channel,
297 bufferToSendPtr,
298 bufferLength);
299 if (n < 0)
300 {
301 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
302 VoEId(_instanceId,_channelId),
303 "Channel::SendRTCPPacket() transmission using WebRtc"
304 " sockets failed");
305 return -1;
306 }
307 return n;
308 }
309
310 // Packet transmission using external transport transport
311 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000312 CriticalSectionScoped cs(&_callbackCritSect);
henrike@webrtc.orgde727ab2012-11-18 18:49:13 +0000313 if (_transportPtr == NULL)
314 {
315 return -1;
316 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000317 int n = _transportPtr->SendRTCPPacket(channel,
318 bufferToSendPtr,
319 bufferLength);
320 if (n < 0)
321 {
322 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
323 VoEId(_instanceId,_channelId),
324 "Channel::SendRTCPPacket() transmission using external"
325 " transport failed");
326 return -1;
327 }
328 return n;
329 }
330
331 return len;
332}
333
334void
niklase@google.com470e71d2011-07-07 08:21:25 +0000335Channel::OnPlayTelephoneEvent(const WebRtc_Word32 id,
336 const WebRtc_UWord8 event,
337 const WebRtc_UWord16 lengthMs,
338 const WebRtc_UWord8 volume)
339{
340 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
341 "Channel::OnPlayTelephoneEvent(id=%d, event=%u, lengthMs=%u,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +0000342 " volume=%u)", id, event, lengthMs, volume);
niklase@google.com470e71d2011-07-07 08:21:25 +0000343
344 if (!_playOutbandDtmfEvent || (event > 15))
345 {
346 // Ignore callback since feedback is disabled or event is not a
347 // Dtmf tone event.
348 return;
349 }
350
351 assert(_outputMixerPtr != NULL);
352
353 // Start playing out the Dtmf tone (if playout is enabled).
354 // Reduce length of tone with 80ms to the reduce risk of echo.
355 _outputMixerPtr->PlayDtmfTone(event, lengthMs - 80, volume);
356}
357
358void
359Channel::OnIncomingSSRCChanged(const WebRtc_Word32 id,
360 const WebRtc_UWord32 SSRC)
361{
362 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
363 "Channel::OnIncomingSSRCChanged(id=%d, SSRC=%d)",
364 id, SSRC);
365
366 WebRtc_Word32 channel = VoEChannelId(id);
367 assert(channel == _channelId);
368
369 // Reset RTP-module counters since a new incoming RTP stream is detected
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +0000370 _rtpRtcpModule->ResetReceiveDataCountersRTP();
371 _rtpRtcpModule->ResetStatisticsRTP();
niklase@google.com470e71d2011-07-07 08:21:25 +0000372
373 if (_rtpObserver)
374 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000375 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000376
377 if (_rtpObserverPtr)
378 {
379 // Send new SSRC to registered observer using callback
380 _rtpObserverPtr->OnIncomingSSRCChanged(channel, SSRC);
381 }
382 }
383}
384
385void Channel::OnIncomingCSRCChanged(const WebRtc_Word32 id,
386 const WebRtc_UWord32 CSRC,
387 const bool added)
388{
389 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
390 "Channel::OnIncomingCSRCChanged(id=%d, CSRC=%d, added=%d)",
391 id, CSRC, added);
392
393 WebRtc_Word32 channel = VoEChannelId(id);
394 assert(channel == _channelId);
395
396 if (_rtpObserver)
397 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000398 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000399
400 if (_rtpObserverPtr)
401 {
402 _rtpObserverPtr->OnIncomingCSRCChanged(channel, CSRC, added);
403 }
404 }
405}
406
407void
408Channel::OnApplicationDataReceived(const WebRtc_Word32 id,
409 const WebRtc_UWord8 subType,
410 const WebRtc_UWord32 name,
411 const WebRtc_UWord16 length,
412 const WebRtc_UWord8* data)
413{
414 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
415 "Channel::OnApplicationDataReceived(id=%d, subType=%u,"
416 " name=%u, length=%u)",
417 id, subType, name, length);
418
419 WebRtc_Word32 channel = VoEChannelId(id);
420 assert(channel == _channelId);
421
422 if (_rtcpObserver)
423 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000424 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000425
426 if (_rtcpObserverPtr)
427 {
428 _rtcpObserverPtr->OnApplicationDataReceived(channel,
429 subType,
430 name,
431 data,
432 length);
433 }
434 }
435}
436
437WebRtc_Word32
438Channel::OnInitializeDecoder(
439 const WebRtc_Word32 id,
440 const WebRtc_Word8 payloadType,
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +0000441 const char payloadName[RTP_PAYLOAD_NAME_SIZE],
xians@google.com0b0665a2011-08-08 08:18:44 +0000442 const int frequency,
niklase@google.com470e71d2011-07-07 08:21:25 +0000443 const WebRtc_UWord8 channels,
444 const WebRtc_UWord32 rate)
445{
446 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
447 "Channel::OnInitializeDecoder(id=%d, payloadType=%d, "
448 "payloadName=%s, frequency=%u, channels=%u, rate=%u)",
449 id, payloadType, payloadName, frequency, channels, rate);
450
andrew@webrtc.orgceb148c2011-08-23 17:53:54 +0000451 assert(VoEChannelId(id) == _channelId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000452
henrika@webrtc.orgf75901f2012-01-16 08:45:42 +0000453 CodecInst receiveCodec = {0};
454 CodecInst dummyCodec = {0};
niklase@google.com470e71d2011-07-07 08:21:25 +0000455
456 receiveCodec.pltype = payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +0000457 receiveCodec.plfreq = frequency;
458 receiveCodec.channels = channels;
459 receiveCodec.rate = rate;
henrika@webrtc.orgf75901f2012-01-16 08:45:42 +0000460 strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +0000461
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +0000462 _audioCodingModule.Codec(payloadName, &dummyCodec, frequency, channels);
niklase@google.com470e71d2011-07-07 08:21:25 +0000463 receiveCodec.pacsize = dummyCodec.pacsize;
464
465 // Register the new codec to the ACM
466 if (_audioCodingModule.RegisterReceiveCodec(receiveCodec) == -1)
467 {
468 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
andrew@webrtc.orgceb148c2011-08-23 17:53:54 +0000469 VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +0000470 "Channel::OnInitializeDecoder() invalid codec ("
471 "pt=%d, name=%s) received - 1", payloadType, payloadName);
472 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR);
473 return -1;
474 }
475
476 return 0;
477}
478
479void
480Channel::OnPacketTimeout(const WebRtc_Word32 id)
481{
482 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
483 "Channel::OnPacketTimeout(id=%d)", id);
484
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000485 CriticalSectionScoped cs(_callbackCritSectPtr);
niklase@google.com470e71d2011-07-07 08:21:25 +0000486 if (_voiceEngineObserverPtr)
487 {
488 if (_receiving || _externalTransport)
489 {
490 WebRtc_Word32 channel = VoEChannelId(id);
491 assert(channel == _channelId);
492 // Ensure that next OnReceivedPacket() callback will trigger
493 // a VE_PACKET_RECEIPT_RESTARTED callback.
494 _rtpPacketTimedOut = true;
495 // Deliver callback to the observer
496 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
497 VoEId(_instanceId,_channelId),
498 "Channel::OnPacketTimeout() => "
499 "CallbackOnError(VE_RECEIVE_PACKET_TIMEOUT)");
500 _voiceEngineObserverPtr->CallbackOnError(channel,
501 VE_RECEIVE_PACKET_TIMEOUT);
502 }
503 }
504}
505
506void
507Channel::OnReceivedPacket(const WebRtc_Word32 id,
508 const RtpRtcpPacketType packetType)
509{
510 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
511 "Channel::OnReceivedPacket(id=%d, packetType=%d)",
512 id, packetType);
513
andrew@webrtc.orgceb148c2011-08-23 17:53:54 +0000514 assert(VoEChannelId(id) == _channelId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000515
516 // Notify only for the case when we have restarted an RTP session.
517 if (_rtpPacketTimedOut && (kPacketRtp == packetType))
518 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000519 CriticalSectionScoped cs(_callbackCritSectPtr);
niklase@google.com470e71d2011-07-07 08:21:25 +0000520 if (_voiceEngineObserverPtr)
521 {
522 WebRtc_Word32 channel = VoEChannelId(id);
523 assert(channel == _channelId);
524 // Reset timeout mechanism
525 _rtpPacketTimedOut = false;
526 // Deliver callback to the observer
527 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
528 VoEId(_instanceId,_channelId),
529 "Channel::OnPacketTimeout() =>"
530 " CallbackOnError(VE_PACKET_RECEIPT_RESTARTED)");
531 _voiceEngineObserverPtr->CallbackOnError(
532 channel,
533 VE_PACKET_RECEIPT_RESTARTED);
534 }
535 }
536}
537
538void
539Channel::OnPeriodicDeadOrAlive(const WebRtc_Word32 id,
540 const RTPAliveType alive)
541{
542 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
543 "Channel::OnPeriodicDeadOrAlive(id=%d, alive=%d)", id, alive);
544
545 if (!_connectionObserver)
546 return;
547
548 WebRtc_Word32 channel = VoEChannelId(id);
549 assert(channel == _channelId);
550
551 // Use Alive as default to limit risk of false Dead detections
552 bool isAlive(true);
553
554 // Always mark the connection as Dead when the module reports kRtpDead
555 if (kRtpDead == alive)
556 {
557 isAlive = false;
558 }
559
560 // It is possible that the connection is alive even if no RTP packet has
561 // been received for a long time since the other side might use VAD/DTX
562 // and a low SID-packet update rate.
563 if ((kRtpNoRtp == alive) && _playing)
564 {
565 // Detect Alive for all NetEQ states except for the case when we are
566 // in PLC_CNG state.
567 // PLC_CNG <=> background noise only due to long expand or error.
568 // Note that, the case where the other side stops sending during CNG
569 // state will be detected as Alive. Dead is is not set until after
570 // missing RTCP packets for at least twelve seconds (handled
571 // internally by the RTP/RTCP module).
572 isAlive = (_outputSpeechType != AudioFrame::kPLCCNG);
573 }
574
575 UpdateDeadOrAliveCounters(isAlive);
576
577 // Send callback to the registered observer
578 if (_connectionObserver)
579 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000580 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000581 if (_connectionObserverPtr)
582 {
583 _connectionObserverPtr->OnPeriodicDeadOrAlive(channel, isAlive);
584 }
585 }
586}
587
588WebRtc_Word32
589Channel::OnReceivedPayloadData(const WebRtc_UWord8* payloadData,
590 const WebRtc_UWord16 payloadSize,
591 const WebRtcRTPHeader* rtpHeader)
592{
593 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
594 "Channel::OnReceivedPayloadData(payloadSize=%d,"
595 " payloadType=%u, audioChannel=%u)",
596 payloadSize,
597 rtpHeader->header.payloadType,
598 rtpHeader->type.Audio.channel);
599
roosa@google.com0870f022012-12-12 21:31:41 +0000600 _lastRemoteTimeStamp = rtpHeader->header.timestamp;
601
niklase@google.com470e71d2011-07-07 08:21:25 +0000602 if (!_playing)
603 {
604 // Avoid inserting into NetEQ when we are not playing. Count the
605 // packet as discarded.
606 WEBRTC_TRACE(kTraceStream, kTraceVoice,
607 VoEId(_instanceId, _channelId),
608 "received packet is discarded since playing is not"
609 " activated");
610 _numberOfDiscardedPackets++;
611 return 0;
612 }
613
614 // Push the incoming payload (parsed and ready for decoding) into the ACM
tina.legrand@webrtc.org16b6b902012-04-12 11:02:38 +0000615 if (_audioCodingModule.IncomingPacket(payloadData,
niklase@google.com470e71d2011-07-07 08:21:25 +0000616 payloadSize,
617 *rtpHeader) != 0)
618 {
619 _engineStatisticsPtr->SetLastError(
620 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
621 "Channel::OnReceivedPayloadData() unable to push data to the ACM");
622 return -1;
623 }
624
625 // Update the packet delay
626 UpdatePacketDelay(rtpHeader->header.timestamp,
627 rtpHeader->header.sequenceNumber);
628
629 return 0;
630}
631
632WebRtc_Word32 Channel::GetAudioFrame(const WebRtc_Word32 id,
633 AudioFrame& audioFrame)
634{
635 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
636 "Channel::GetAudioFrame(id=%d)", id);
637
638 // Get 10ms raw PCM data from the ACM (mixer limits output frequency)
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000639 if (_audioCodingModule.PlayoutData10Ms(audioFrame.sample_rate_hz_,
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +0000640 &audioFrame) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000641 {
642 WEBRTC_TRACE(kTraceError, kTraceVoice,
643 VoEId(_instanceId,_channelId),
644 "Channel::GetAudioFrame() PlayoutData10Ms() failed!");
andrew@webrtc.org7859e102012-01-13 00:30:11 +0000645 // In all likelihood, the audio in this frame is garbage. We return an
646 // error so that the audio mixer module doesn't add it to the mix. As
647 // a result, it won't be played out and the actions skipped here are
648 // irrelevant.
649 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +0000650 }
651
652 if (_RxVadDetection)
653 {
654 UpdateRxVadDetection(audioFrame);
655 }
656
657 // Convert module ID to internal VoE channel ID
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000658 audioFrame.id_ = VoEChannelId(audioFrame.id_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000659 // Store speech type for dead-or-alive detection
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000660 _outputSpeechType = audioFrame.speech_type_;
niklase@google.com470e71d2011-07-07 08:21:25 +0000661
662 // Perform far-end AudioProcessing module processing on the received signal
663 if (_rxApmIsEnabled)
664 {
665 ApmProcessRx(audioFrame);
666 }
667
668 // Output volume scaling
669 if (_outputGain < 0.99f || _outputGain > 1.01f)
670 {
671 AudioFrameOperations::ScaleWithSat(_outputGain, audioFrame);
672 }
673
674 // Scale left and/or right channel(s) if stereo and master balance is
675 // active
676
677 if (_panLeft != 1.0f || _panRight != 1.0f)
678 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000679 if (audioFrame.num_channels_ == 1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000680 {
681 // Emulate stereo mode since panning is active.
682 // The mono signal is copied to both left and right channels here.
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +0000683 AudioFrameOperations::MonoToStereo(&audioFrame);
niklase@google.com470e71d2011-07-07 08:21:25 +0000684 }
685 // For true stereo mode (when we are receiving a stereo signal), no
686 // action is needed.
687
688 // Do the panning operation (the audio frame contains stereo at this
689 // stage)
690 AudioFrameOperations::Scale(_panLeft, _panRight, audioFrame);
691 }
692
693 // Mix decoded PCM output with file if file mixing is enabled
694 if (_outputFilePlaying)
695 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000696 MixAudioWithFile(audioFrame, audioFrame.sample_rate_hz_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000697 }
698
699 // Place channel in on-hold state (~muted) if on-hold is activated
700 if (_outputIsOnHold)
701 {
702 AudioFrameOperations::Mute(audioFrame);
703 }
704
705 // External media
706 if (_outputExternalMedia)
707 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000708 CriticalSectionScoped cs(&_callbackCritSect);
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000709 const bool isStereo = (audioFrame.num_channels_ == 2);
niklase@google.com470e71d2011-07-07 08:21:25 +0000710 if (_outputExternalMediaCallbackPtr)
711 {
712 _outputExternalMediaCallbackPtr->Process(
713 _channelId,
714 kPlaybackPerChannel,
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000715 (WebRtc_Word16*)audioFrame.data_,
716 audioFrame.samples_per_channel_,
717 audioFrame.sample_rate_hz_,
niklase@google.com470e71d2011-07-07 08:21:25 +0000718 isStereo);
719 }
720 }
721
722 // Record playout if enabled
723 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000724 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000725
726 if (_outputFileRecording && _outputFileRecorderPtr)
727 {
niklas.enbom@webrtc.org5398d952012-03-26 08:11:25 +0000728 _outputFileRecorderPtr->RecordAudioToFile(audioFrame);
niklase@google.com470e71d2011-07-07 08:21:25 +0000729 }
730 }
731
732 // Measure audio level (0-9)
733 _outputAudioLevel.ComputeLevel(audioFrame);
734
735 return 0;
736}
737
738WebRtc_Word32
739Channel::NeededFrequency(const WebRtc_Word32 id)
740{
741 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
742 "Channel::NeededFrequency(id=%d)", id);
743
744 int highestNeeded = 0;
745
746 // Determine highest needed receive frequency
747 WebRtc_Word32 receiveFrequency = _audioCodingModule.ReceiveFrequency();
748
749 // Return the bigger of playout and receive frequency in the ACM.
750 if (_audioCodingModule.PlayoutFrequency() > receiveFrequency)
751 {
752 highestNeeded = _audioCodingModule.PlayoutFrequency();
753 }
754 else
755 {
756 highestNeeded = receiveFrequency;
757 }
758
759 // Special case, if we're playing a file on the playout side
760 // we take that frequency into consideration as well
761 // This is not needed on sending side, since the codec will
762 // limit the spectrum anyway.
763 if (_outputFilePlaying)
764 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000765 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000766 if (_outputFilePlayerPtr && _outputFilePlaying)
767 {
768 if(_outputFilePlayerPtr->Frequency()>highestNeeded)
769 {
770 highestNeeded=_outputFilePlayerPtr->Frequency();
771 }
772 }
773 }
774
775 return(highestNeeded);
776}
777
niklase@google.com470e71d2011-07-07 08:21:25 +0000778WebRtc_Word32
779Channel::CreateChannel(Channel*& channel,
780 const WebRtc_Word32 channelId,
781 const WebRtc_UWord32 instanceId)
782{
783 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId,channelId),
784 "Channel::CreateChannel(channelId=%d, instanceId=%d)",
785 channelId, instanceId);
786
787 channel = new Channel(channelId, instanceId);
788 if (channel == NULL)
789 {
790 WEBRTC_TRACE(kTraceMemory, kTraceVoice,
791 VoEId(instanceId,channelId),
792 "Channel::CreateChannel() unable to allocate memory for"
793 " channel");
794 return -1;
795 }
796 return 0;
797}
798
799void
800Channel::PlayNotification(const WebRtc_Word32 id,
801 const WebRtc_UWord32 durationMs)
802{
803 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
804 "Channel::PlayNotification(id=%d, durationMs=%d)",
805 id, durationMs);
806
807 // Not implement yet
808}
809
810void
811Channel::RecordNotification(const WebRtc_Word32 id,
812 const WebRtc_UWord32 durationMs)
813{
814 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
815 "Channel::RecordNotification(id=%d, durationMs=%d)",
816 id, durationMs);
817
818 // Not implement yet
819}
820
821void
822Channel::PlayFileEnded(const WebRtc_Word32 id)
823{
824 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
825 "Channel::PlayFileEnded(id=%d)", id);
826
827 if (id == _inputFilePlayerId)
828 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000829 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000830
831 _inputFilePlaying = false;
832 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
833 VoEId(_instanceId,_channelId),
834 "Channel::PlayFileEnded() => input file player module is"
835 " shutdown");
836 }
837 else if (id == _outputFilePlayerId)
838 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000839 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000840
841 _outputFilePlaying = false;
842 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
843 VoEId(_instanceId,_channelId),
844 "Channel::PlayFileEnded() => output file player module is"
845 " shutdown");
846 }
847}
848
849void
850Channel::RecordFileEnded(const WebRtc_Word32 id)
851{
852 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
853 "Channel::RecordFileEnded(id=%d)", id);
854
855 assert(id == _outputFileRecorderId);
856
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000857 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000858
859 _outputFileRecording = false;
860 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
861 VoEId(_instanceId,_channelId),
862 "Channel::RecordFileEnded() => output file recorder module is"
863 " shutdown");
864}
865
866Channel::Channel(const WebRtc_Word32 channelId,
867 const WebRtc_UWord32 instanceId) :
868 _fileCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
869 _callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
niklase@google.com470e71d2011-07-07 08:21:25 +0000870 _instanceId(instanceId),
xians@google.com22963ab2011-08-03 12:40:23 +0000871 _channelId(channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +0000872 _audioCodingModule(*AudioCodingModule::Create(
xians@google.com22963ab2011-08-03 12:40:23 +0000873 VoEModuleId(instanceId, channelId))),
niklase@google.com470e71d2011-07-07 08:21:25 +0000874 _rtpDumpIn(*RtpDump::CreateRtpDump()),
875 _rtpDumpOut(*RtpDump::CreateRtpDump()),
niklase@google.com470e71d2011-07-07 08:21:25 +0000876 _outputAudioLevel(),
niklase@google.com470e71d2011-07-07 08:21:25 +0000877 _externalTransport(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000878 _inputFilePlayerPtr(NULL),
879 _outputFilePlayerPtr(NULL),
880 _outputFileRecorderPtr(NULL),
881 // Avoid conflict with other channels by adding 1024 - 1026,
882 // won't use as much as 1024 channels.
883 _inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
884 _outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
885 _outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
886 _inputFilePlaying(false),
887 _outputFilePlaying(false),
888 _outputFileRecording(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000889 _inbandDtmfQueue(VoEModuleId(instanceId, channelId)),
890 _inbandDtmfGenerator(VoEModuleId(instanceId, channelId)),
niklase@google.com470e71d2011-07-07 08:21:25 +0000891 _inputExternalMedia(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000892 _outputExternalMedia(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000893 _inputExternalMediaCallbackPtr(NULL),
894 _outputExternalMediaCallbackPtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000895 _encryptionRTPBufferPtr(NULL),
896 _decryptionRTPBufferPtr(NULL),
897 _encryptionRTCPBufferPtr(NULL),
898 _decryptionRTCPBufferPtr(NULL),
899 _timeStamp(0), // This is just an offset, RTP module will add it's own random offset
900 _sendTelephoneEventPayloadType(106),
901 _playoutTimeStampRTP(0),
902 _playoutTimeStampRTCP(0),
903 _numberOfDiscardedPackets(0),
904 _engineStatisticsPtr(NULL),
henrika@webrtc.org2919e952012-01-31 08:45:03 +0000905 _outputMixerPtr(NULL),
906 _transmitMixerPtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000907 _moduleProcessThreadPtr(NULL),
908 _audioDeviceModulePtr(NULL),
909 _voiceEngineObserverPtr(NULL),
910 _callbackCritSectPtr(NULL),
911 _transportPtr(NULL),
912 _encryptionPtr(NULL),
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000913 _rtpAudioProc(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000914 _rxAudioProcessingModulePtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000915 _rxVadObserverPtr(NULL),
916 _oldVadDecision(-1),
917 _sendFrameType(0),
niklase@google.com470e71d2011-07-07 08:21:25 +0000918 _rtpObserverPtr(NULL),
919 _rtcpObserverPtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000920 _outputIsOnHold(false),
921 _externalPlayout(false),
roosa@google.com1b60ceb2012-12-12 23:00:29 +0000922 _externalMixing(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000923 _inputIsOnHold(false),
924 _playing(false),
925 _sending(false),
926 _receiving(false),
927 _mixFileWithMicrophone(false),
928 _rtpObserver(false),
929 _rtcpObserver(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000930 _mute(false),
931 _panLeft(1.0f),
932 _panRight(1.0f),
933 _outputGain(1.0f),
xians@google.com22963ab2011-08-03 12:40:23 +0000934 _encrypting(false),
935 _decrypting(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000936 _playOutbandDtmfEvent(false),
937 _playInbandDtmfEvent(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000938 _extraPayloadType(0),
939 _insertExtraRTPPacket(false),
940 _extraMarkerBit(false),
941 _lastLocalTimeStamp(0),
roosa@google.com0870f022012-12-12 21:31:41 +0000942 _lastRemoteTimeStamp(0),
niklase@google.com470e71d2011-07-07 08:21:25 +0000943 _lastPayloadType(0),
xians@google.com22963ab2011-08-03 12:40:23 +0000944 _includeAudioLevelIndication(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000945 _rtpPacketTimedOut(false),
946 _rtpPacketTimeOutIsEnabled(false),
947 _rtpTimeOutSeconds(0),
948 _connectionObserver(false),
949 _connectionObserverPtr(NULL),
950 _countAliveDetections(0),
951 _countDeadDetections(0),
952 _outputSpeechType(AudioFrame::kNormalSpeech),
953 _averageDelayMs(0),
954 _previousSequenceNumber(0),
955 _previousTimestamp(0),
956 _recPacketDelayMs(20),
957 _RxVadDetection(false),
958 _rxApmIsEnabled(false),
959 _rxAgcIsEnabled(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000960 _rxNsIsEnabled(false)
niklase@google.com470e71d2011-07-07 08:21:25 +0000961{
962 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
963 "Channel::Channel() - ctor");
964 _inbandDtmfQueue.ResetDtmf();
965 _inbandDtmfGenerator.Init();
966 _outputAudioLevel.Clear();
967
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +0000968 RtpRtcp::Configuration configuration;
969 configuration.id = VoEModuleId(instanceId, channelId);
970 configuration.audio = true;
971 configuration.incoming_data = this;
972 configuration.incoming_messages = this;
973 configuration.outgoing_transport = this;
974 configuration.rtcp_feedback = this;
975 configuration.audio_messages = this;
976
977 _rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
978
niklase@google.com470e71d2011-07-07 08:21:25 +0000979 // Create far end AudioProcessing Module
980 _rxAudioProcessingModulePtr = AudioProcessing::Create(
981 VoEModuleId(instanceId, channelId));
982}
983
984Channel::~Channel()
985{
986 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
987 "Channel::~Channel() - dtor");
988
989 if (_outputExternalMedia)
990 {
991 DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
992 }
993 if (_inputExternalMedia)
994 {
995 DeRegisterExternalMediaProcessing(kRecordingPerChannel);
996 }
997 StopSend();
niklase@google.com470e71d2011-07-07 08:21:25 +0000998 StopPlayout();
999
1000 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001001 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001002 if (_inputFilePlayerPtr)
1003 {
1004 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1005 _inputFilePlayerPtr->StopPlayingFile();
1006 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1007 _inputFilePlayerPtr = NULL;
1008 }
1009 if (_outputFilePlayerPtr)
1010 {
1011 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1012 _outputFilePlayerPtr->StopPlayingFile();
1013 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1014 _outputFilePlayerPtr = NULL;
1015 }
1016 if (_outputFileRecorderPtr)
1017 {
1018 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
1019 _outputFileRecorderPtr->StopRecording();
1020 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
1021 _outputFileRecorderPtr = NULL;
1022 }
1023 }
1024
1025 // The order to safely shutdown modules in a channel is:
1026 // 1. De-register callbacks in modules
1027 // 2. De-register modules in process thread
1028 // 3. Destroy modules
niklase@google.com470e71d2011-07-07 08:21:25 +00001029 if (_audioCodingModule.RegisterTransportCallback(NULL) == -1)
1030 {
1031 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1032 VoEId(_instanceId,_channelId),
1033 "~Channel() failed to de-register transport callback"
1034 " (Audio coding module)");
1035 }
1036 if (_audioCodingModule.RegisterVADCallback(NULL) == -1)
1037 {
1038 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1039 VoEId(_instanceId,_channelId),
1040 "~Channel() failed to de-register VAD callback"
1041 " (Audio coding module)");
1042 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001043 // De-register modules in process thread
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001044 if (_moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get()) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001045 {
1046 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
1047 VoEId(_instanceId,_channelId),
1048 "~Channel() failed to deregister RTP/RTCP module");
1049 }
1050
1051 // Destroy modules
niklase@google.com470e71d2011-07-07 08:21:25 +00001052 AudioCodingModule::Destroy(&_audioCodingModule);
niklase@google.com470e71d2011-07-07 08:21:25 +00001053 if (_rxAudioProcessingModulePtr != NULL)
1054 {
1055 AudioProcessing::Destroy(_rxAudioProcessingModulePtr); // far end APM
1056 _rxAudioProcessingModulePtr = NULL;
1057 }
1058
1059 // End of modules shutdown
1060
1061 // Delete other objects
1062 RtpDump::DestroyRtpDump(&_rtpDumpIn);
1063 RtpDump::DestroyRtpDump(&_rtpDumpOut);
1064 delete [] _encryptionRTPBufferPtr;
1065 delete [] _decryptionRTPBufferPtr;
1066 delete [] _encryptionRTCPBufferPtr;
1067 delete [] _decryptionRTCPBufferPtr;
1068 delete &_callbackCritSect;
niklase@google.com470e71d2011-07-07 08:21:25 +00001069 delete &_fileCritSect;
1070}
1071
1072WebRtc_Word32
1073Channel::Init()
1074{
1075 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1076 "Channel::Init()");
1077
1078 // --- Initial sanity
1079
1080 if ((_engineStatisticsPtr == NULL) ||
1081 (_moduleProcessThreadPtr == NULL))
1082 {
1083 WEBRTC_TRACE(kTraceError, kTraceVoice,
1084 VoEId(_instanceId,_channelId),
1085 "Channel::Init() must call SetEngineInformation() first");
1086 return -1;
1087 }
1088
1089 // --- Add modules to process thread (for periodic schedulation)
1090
1091 const bool processThreadFail =
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001092 ((_moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get()) != 0) ||
niklase@google.com470e71d2011-07-07 08:21:25 +00001093 false);
niklase@google.com470e71d2011-07-07 08:21:25 +00001094 if (processThreadFail)
1095 {
1096 _engineStatisticsPtr->SetLastError(
1097 VE_CANNOT_INIT_CHANNEL, kTraceError,
1098 "Channel::Init() modules not registered");
1099 return -1;
1100 }
pwestin@webrtc.orgc450a192012-01-04 15:00:12 +00001101 // --- ACM initialization
niklase@google.com470e71d2011-07-07 08:21:25 +00001102
1103 if ((_audioCodingModule.InitializeReceiver() == -1) ||
1104#ifdef WEBRTC_CODEC_AVT
1105 // out-of-band Dtmf tones are played out by default
1106 (_audioCodingModule.SetDtmfPlayoutStatus(true) == -1) ||
1107#endif
niklase@google.com470e71d2011-07-07 08:21:25 +00001108 (_audioCodingModule.InitializeSender() == -1))
1109 {
1110 _engineStatisticsPtr->SetLastError(
1111 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1112 "Channel::Init() unable to initialize the ACM - 1");
1113 return -1;
1114 }
1115
1116 // --- RTP/RTCP module initialization
1117
1118 // Ensure that RTCP is enabled by default for the created channel.
1119 // Note that, the module will keep generating RTCP until it is explicitly
1120 // disabled by the user.
1121 // After StopListen (when no sockets exists), RTCP packets will no longer
1122 // be transmitted since the Transport object will then be invalid.
1123
1124 const bool rtpRtcpFail =
turaj@webrtc.orgb7edd062013-03-12 22:27:27 +00001125 ((_rtpRtcpModule->SetTelephoneEventForwardToDecoder(true) == -1) ||
niklase@google.com470e71d2011-07-07 08:21:25 +00001126 // RTCP is enabled by default
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001127 (_rtpRtcpModule->SetRTCPStatus(kRtcpCompound) == -1));
niklase@google.com470e71d2011-07-07 08:21:25 +00001128 if (rtpRtcpFail)
1129 {
1130 _engineStatisticsPtr->SetLastError(
1131 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1132 "Channel::Init() RTP/RTCP module not initialized");
1133 return -1;
1134 }
1135
1136 // --- Register all permanent callbacks
niklase@google.com470e71d2011-07-07 08:21:25 +00001137 const bool fail =
niklase@google.com470e71d2011-07-07 08:21:25 +00001138 (_audioCodingModule.RegisterTransportCallback(this) == -1) ||
1139 (_audioCodingModule.RegisterVADCallback(this) == -1);
1140
1141 if (fail)
1142 {
1143 _engineStatisticsPtr->SetLastError(
1144 VE_CANNOT_INIT_CHANNEL, kTraceError,
1145 "Channel::Init() callbacks not registered");
1146 return -1;
1147 }
1148
1149 // --- Register all supported codecs to the receiving side of the
1150 // RTP/RTCP module
1151
1152 CodecInst codec;
1153 const WebRtc_UWord8 nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
1154
1155 for (int idx = 0; idx < nSupportedCodecs; idx++)
1156 {
1157 // Open up the RTP/RTCP receiver for all supported codecs
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001158 if ((_audioCodingModule.Codec(idx, &codec) == -1) ||
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001159 (_rtpRtcpModule->RegisterReceivePayload(codec) == -1))
niklase@google.com470e71d2011-07-07 08:21:25 +00001160 {
1161 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1162 VoEId(_instanceId,_channelId),
1163 "Channel::Init() unable to register %s (%d/%d/%d/%d) "
1164 "to RTP/RTCP receiver",
1165 codec.plname, codec.pltype, codec.plfreq,
1166 codec.channels, codec.rate);
1167 }
1168 else
1169 {
1170 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
1171 VoEId(_instanceId,_channelId),
1172 "Channel::Init() %s (%d/%d/%d/%d) has been added to "
1173 "the RTP/RTCP receiver",
1174 codec.plname, codec.pltype, codec.plfreq,
1175 codec.channels, codec.rate);
1176 }
1177
1178 // Ensure that PCMU is used as default codec on the sending side
tina.legrand@webrtc.org45175852012-06-01 09:27:35 +00001179 if (!STR_CASE_CMP(codec.plname, "PCMU") && (codec.channels == 1))
niklase@google.com470e71d2011-07-07 08:21:25 +00001180 {
1181 SetSendCodec(codec);
1182 }
1183
1184 // Register default PT for outband 'telephone-event'
1185 if (!STR_CASE_CMP(codec.plname, "telephone-event"))
1186 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001187 if ((_rtpRtcpModule->RegisterSendPayload(codec) == -1) ||
niklase@google.com470e71d2011-07-07 08:21:25 +00001188 (_audioCodingModule.RegisterReceiveCodec(codec) == -1))
1189 {
1190 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1191 VoEId(_instanceId,_channelId),
1192 "Channel::Init() failed to register outband "
1193 "'telephone-event' (%d/%d) correctly",
1194 codec.pltype, codec.plfreq);
1195 }
1196 }
1197
1198 if (!STR_CASE_CMP(codec.plname, "CN"))
1199 {
1200 if ((_audioCodingModule.RegisterSendCodec(codec) == -1) ||
1201 (_audioCodingModule.RegisterReceiveCodec(codec) == -1) ||
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001202 (_rtpRtcpModule->RegisterSendPayload(codec) == -1))
niklase@google.com470e71d2011-07-07 08:21:25 +00001203 {
1204 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1205 VoEId(_instanceId,_channelId),
1206 "Channel::Init() failed to register CN (%d/%d) "
1207 "correctly - 1",
1208 codec.pltype, codec.plfreq);
1209 }
1210 }
1211#ifdef WEBRTC_CODEC_RED
1212 // Register RED to the receiving side of the ACM.
1213 // We will not receive an OnInitializeDecoder() callback for RED.
1214 if (!STR_CASE_CMP(codec.plname, "RED"))
1215 {
1216 if (_audioCodingModule.RegisterReceiveCodec(codec) == -1)
1217 {
1218 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1219 VoEId(_instanceId,_channelId),
1220 "Channel::Init() failed to register RED (%d/%d) "
1221 "correctly",
1222 codec.pltype, codec.plfreq);
1223 }
1224 }
1225#endif
1226 }
pwestin@webrtc.org684f0572013-03-13 23:20:57 +00001227
niklase@google.com470e71d2011-07-07 08:21:25 +00001228 // Initialize the far end AP module
1229 // Using 8 kHz as initial Fs, the same as in transmission. Might be
1230 // changed at the first receiving audio.
1231 if (_rxAudioProcessingModulePtr == NULL)
1232 {
1233 _engineStatisticsPtr->SetLastError(
1234 VE_NO_MEMORY, kTraceCritical,
1235 "Channel::Init() failed to create the far-end AudioProcessing"
1236 " module");
1237 return -1;
1238 }
1239
niklase@google.com470e71d2011-07-07 08:21:25 +00001240 if (_rxAudioProcessingModulePtr->set_sample_rate_hz(8000))
1241 {
1242 _engineStatisticsPtr->SetLastError(
1243 VE_APM_ERROR, kTraceWarning,
1244 "Channel::Init() failed to set the sample rate to 8K for"
1245 " far-end AP module");
1246 }
1247
1248 if (_rxAudioProcessingModulePtr->set_num_channels(1, 1) != 0)
1249 {
1250 _engineStatisticsPtr->SetLastError(
1251 VE_SOUNDCARD_ERROR, kTraceWarning,
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00001252 "Init() failed to set channels for the primary audio stream");
niklase@google.com470e71d2011-07-07 08:21:25 +00001253 }
1254
1255 if (_rxAudioProcessingModulePtr->high_pass_filter()->Enable(
1256 WEBRTC_VOICE_ENGINE_RX_HP_DEFAULT_STATE) != 0)
1257 {
1258 _engineStatisticsPtr->SetLastError(
1259 VE_APM_ERROR, kTraceWarning,
1260 "Channel::Init() failed to set the high-pass filter for"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00001261 " far-end AP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00001262 }
1263
1264 if (_rxAudioProcessingModulePtr->noise_suppression()->set_level(
1265 (NoiseSuppression::Level)WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE) != 0)
1266 {
1267 _engineStatisticsPtr->SetLastError(
1268 VE_APM_ERROR, kTraceWarning,
1269 "Init() failed to set noise reduction level for far-end"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00001270 " AP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00001271 }
1272 if (_rxAudioProcessingModulePtr->noise_suppression()->Enable(
1273 WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_STATE) != 0)
1274 {
1275 _engineStatisticsPtr->SetLastError(
1276 VE_APM_ERROR, kTraceWarning,
1277 "Init() failed to set noise reduction state for far-end"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00001278 " AP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00001279 }
1280
1281 if (_rxAudioProcessingModulePtr->gain_control()->set_mode(
1282 (GainControl::Mode)WEBRTC_VOICE_ENGINE_RX_AGC_DEFAULT_MODE) != 0)
1283 {
1284 _engineStatisticsPtr->SetLastError(
1285 VE_APM_ERROR, kTraceWarning,
1286 "Init() failed to set AGC mode for far-end AP module");
1287 }
1288 if (_rxAudioProcessingModulePtr->gain_control()->Enable(
1289 WEBRTC_VOICE_ENGINE_RX_AGC_DEFAULT_STATE) != 0)
1290 {
1291 _engineStatisticsPtr->SetLastError(
1292 VE_APM_ERROR, kTraceWarning,
1293 "Init() failed to set AGC state for far-end AP module");
1294 }
1295
1296 return 0;
1297}
1298
1299WebRtc_Word32
1300Channel::SetEngineInformation(Statistics& engineStatistics,
1301 OutputMixer& outputMixer,
1302 voe::TransmitMixer& transmitMixer,
1303 ProcessThread& moduleProcessThread,
1304 AudioDeviceModule& audioDeviceModule,
1305 VoiceEngineObserver* voiceEngineObserver,
1306 CriticalSectionWrapper* callbackCritSect)
1307{
1308 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1309 "Channel::SetEngineInformation()");
1310 _engineStatisticsPtr = &engineStatistics;
1311 _outputMixerPtr = &outputMixer;
1312 _transmitMixerPtr = &transmitMixer,
1313 _moduleProcessThreadPtr = &moduleProcessThread;
1314 _audioDeviceModulePtr = &audioDeviceModule;
1315 _voiceEngineObserverPtr = voiceEngineObserver;
1316 _callbackCritSectPtr = callbackCritSect;
1317 return 0;
1318}
1319
1320WebRtc_Word32
1321Channel::UpdateLocalTimeStamp()
1322{
1323
andrew@webrtc.org63a50982012-05-02 23:56:37 +00001324 _timeStamp += _audioFrame.samples_per_channel_;
niklase@google.com470e71d2011-07-07 08:21:25 +00001325 return 0;
1326}
1327
1328WebRtc_Word32
1329Channel::StartPlayout()
1330{
1331 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1332 "Channel::StartPlayout()");
1333 if (_playing)
1334 {
1335 return 0;
1336 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00001337
1338 if (!_externalMixing) {
1339 // Add participant as candidates for mixing.
1340 if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0)
1341 {
1342 _engineStatisticsPtr->SetLastError(
1343 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1344 "StartPlayout() failed to add participant to mixer");
1345 return -1;
1346 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001347 }
1348
1349 _playing = true;
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001350
1351 if (RegisterFilePlayingToMixer() != 0)
1352 return -1;
1353
niklase@google.com470e71d2011-07-07 08:21:25 +00001354 return 0;
1355}
1356
1357WebRtc_Word32
1358Channel::StopPlayout()
1359{
1360 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1361 "Channel::StopPlayout()");
1362 if (!_playing)
1363 {
1364 return 0;
1365 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00001366
1367 if (!_externalMixing) {
1368 // Remove participant as candidates for mixing
1369 if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0)
1370 {
1371 _engineStatisticsPtr->SetLastError(
1372 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1373 "StopPlayout() failed to remove participant from mixer");
1374 return -1;
1375 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001376 }
1377
1378 _playing = false;
1379 _outputAudioLevel.Clear();
1380
1381 return 0;
1382}
1383
1384WebRtc_Word32
1385Channel::StartSend()
1386{
1387 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1388 "Channel::StartSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001389 {
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001390 // A lock is needed because |_sending| can be accessed or modified by
1391 // another thread at the same time.
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001392 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001393
1394 if (_sending)
1395 {
1396 return 0;
1397 }
1398 _sending = true;
niklase@google.com470e71d2011-07-07 08:21:25 +00001399 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001400
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001401 if (_rtpRtcpModule->SetSendingStatus(true) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001402 {
1403 _engineStatisticsPtr->SetLastError(
1404 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1405 "StartSend() RTP/RTCP failed to start sending");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001406 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001407 _sending = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00001408 return -1;
1409 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001410
niklase@google.com470e71d2011-07-07 08:21:25 +00001411 return 0;
1412}
1413
1414WebRtc_Word32
1415Channel::StopSend()
1416{
1417 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1418 "Channel::StopSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001419 {
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001420 // A lock is needed because |_sending| can be accessed or modified by
1421 // another thread at the same time.
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001422 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001423
1424 if (!_sending)
1425 {
1426 return 0;
1427 }
1428 _sending = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00001429 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001430
niklase@google.com470e71d2011-07-07 08:21:25 +00001431 // Reset sending SSRC and sequence number and triggers direct transmission
1432 // of RTCP BYE
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001433 if (_rtpRtcpModule->SetSendingStatus(false) == -1 ||
1434 _rtpRtcpModule->ResetSendDataCountersRTP() == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001435 {
1436 _engineStatisticsPtr->SetLastError(
1437 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1438 "StartSend() RTP/RTCP failed to stop sending");
1439 }
1440
niklase@google.com470e71d2011-07-07 08:21:25 +00001441 return 0;
1442}
1443
1444WebRtc_Word32
1445Channel::StartReceiving()
1446{
1447 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1448 "Channel::StartReceiving()");
1449 if (_receiving)
1450 {
1451 return 0;
1452 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001453 _receiving = true;
1454 _numberOfDiscardedPackets = 0;
1455 return 0;
1456}
1457
1458WebRtc_Word32
1459Channel::StopReceiving()
1460{
1461 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1462 "Channel::StopReceiving()");
1463 if (!_receiving)
1464 {
1465 return 0;
1466 }
pwestin@webrtc.org684f0572013-03-13 23:20:57 +00001467
henrika@webrtc.orgaf71f0e2011-12-05 07:02:22 +00001468 // Recover DTMF detection status.
turaj@webrtc.orgb7edd062013-03-12 22:27:27 +00001469 WebRtc_Word32 ret = _rtpRtcpModule->SetTelephoneEventForwardToDecoder(true);
henrika@webrtc.orgaf71f0e2011-12-05 07:02:22 +00001470 if (ret != 0) {
1471 _engineStatisticsPtr->SetLastError(
1472 VE_INVALID_OPERATION, kTraceWarning,
1473 "StopReceiving() failed to restore telephone-event status.");
1474 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001475 RegisterReceiveCodecsToRTPModule();
1476 _receiving = false;
1477 return 0;
1478}
1479
niklase@google.com470e71d2011-07-07 08:21:25 +00001480WebRtc_Word32
1481Channel::SetNetEQPlayoutMode(NetEqModes mode)
1482{
1483 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1484 "Channel::SetNetEQPlayoutMode()");
1485 AudioPlayoutMode playoutMode(voice);
1486 switch (mode)
1487 {
1488 case kNetEqDefault:
1489 playoutMode = voice;
1490 break;
1491 case kNetEqStreaming:
1492 playoutMode = streaming;
1493 break;
1494 case kNetEqFax:
1495 playoutMode = fax;
1496 break;
roosa@google.comb7186192012-12-12 21:59:14 +00001497 case kNetEqOff:
1498 playoutMode = off;
1499 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00001500 }
1501 if (_audioCodingModule.SetPlayoutMode(playoutMode) != 0)
1502 {
1503 _engineStatisticsPtr->SetLastError(
1504 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1505 "SetNetEQPlayoutMode() failed to set playout mode");
1506 return -1;
1507 }
1508 return 0;
1509}
1510
1511WebRtc_Word32
1512Channel::GetNetEQPlayoutMode(NetEqModes& mode)
1513{
1514 const AudioPlayoutMode playoutMode = _audioCodingModule.PlayoutMode();
1515 switch (playoutMode)
1516 {
1517 case voice:
1518 mode = kNetEqDefault;
1519 break;
1520 case streaming:
1521 mode = kNetEqStreaming;
1522 break;
1523 case fax:
1524 mode = kNetEqFax;
1525 break;
roosa@google.comb7186192012-12-12 21:59:14 +00001526 case off:
1527 mode = kNetEqOff;
niklase@google.com470e71d2011-07-07 08:21:25 +00001528 }
1529 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
1530 VoEId(_instanceId,_channelId),
1531 "Channel::GetNetEQPlayoutMode() => mode=%u", mode);
1532 return 0;
1533}
1534
1535WebRtc_Word32
niklase@google.com470e71d2011-07-07 08:21:25 +00001536Channel::SetOnHoldStatus(bool enable, OnHoldModes mode)
1537{
1538 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1539 "Channel::SetOnHoldStatus()");
1540 if (mode == kHoldSendAndPlay)
1541 {
1542 _outputIsOnHold = enable;
1543 _inputIsOnHold = enable;
1544 }
1545 else if (mode == kHoldPlayOnly)
1546 {
1547 _outputIsOnHold = enable;
1548 }
1549 if (mode == kHoldSendOnly)
1550 {
1551 _inputIsOnHold = enable;
1552 }
1553 return 0;
1554}
1555
1556WebRtc_Word32
1557Channel::GetOnHoldStatus(bool& enabled, OnHoldModes& mode)
1558{
1559 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1560 "Channel::GetOnHoldStatus()");
1561 enabled = (_outputIsOnHold || _inputIsOnHold);
1562 if (_outputIsOnHold && _inputIsOnHold)
1563 {
1564 mode = kHoldSendAndPlay;
1565 }
1566 else if (_outputIsOnHold && !_inputIsOnHold)
1567 {
1568 mode = kHoldPlayOnly;
1569 }
1570 else if (!_outputIsOnHold && _inputIsOnHold)
1571 {
1572 mode = kHoldSendOnly;
1573 }
1574 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1575 "Channel::GetOnHoldStatus() => enabled=%d, mode=%d",
1576 enabled, mode);
1577 return 0;
1578}
1579
1580WebRtc_Word32
1581Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer)
1582{
1583 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1584 "Channel::RegisterVoiceEngineObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001585 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001586
1587 if (_voiceEngineObserverPtr)
1588 {
1589 _engineStatisticsPtr->SetLastError(
1590 VE_INVALID_OPERATION, kTraceError,
1591 "RegisterVoiceEngineObserver() observer already enabled");
1592 return -1;
1593 }
1594 _voiceEngineObserverPtr = &observer;
1595 return 0;
1596}
1597
1598WebRtc_Word32
1599Channel::DeRegisterVoiceEngineObserver()
1600{
1601 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1602 "Channel::DeRegisterVoiceEngineObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001603 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001604
1605 if (!_voiceEngineObserverPtr)
1606 {
1607 _engineStatisticsPtr->SetLastError(
1608 VE_INVALID_OPERATION, kTraceWarning,
1609 "DeRegisterVoiceEngineObserver() observer already disabled");
1610 return 0;
1611 }
1612 _voiceEngineObserverPtr = NULL;
1613 return 0;
1614}
1615
1616WebRtc_Word32
niklase@google.com470e71d2011-07-07 08:21:25 +00001617Channel::GetSendCodec(CodecInst& codec)
1618{
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001619 return (_audioCodingModule.SendCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001620}
1621
1622WebRtc_Word32
1623Channel::GetRecCodec(CodecInst& codec)
1624{
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001625 return (_audioCodingModule.ReceiveCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001626}
1627
1628WebRtc_Word32
1629Channel::SetSendCodec(const CodecInst& codec)
1630{
1631 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1632 "Channel::SetSendCodec()");
1633
1634 if (_audioCodingModule.RegisterSendCodec(codec) != 0)
1635 {
1636 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1637 "SetSendCodec() failed to register codec to ACM");
1638 return -1;
1639 }
1640
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001641 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001642 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001643 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1644 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001645 {
1646 WEBRTC_TRACE(
1647 kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1648 "SetSendCodec() failed to register codec to"
1649 " RTP/RTCP module");
1650 return -1;
1651 }
1652 }
1653
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001654 if (_rtpRtcpModule->SetAudioPacketSize(codec.pacsize) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001655 {
1656 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1657 "SetSendCodec() failed to set audio packet size");
1658 return -1;
1659 }
1660
1661 return 0;
1662}
1663
1664WebRtc_Word32
1665Channel::SetVADStatus(bool enableVAD, ACMVADMode mode, bool disableDTX)
1666{
1667 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1668 "Channel::SetVADStatus(mode=%d)", mode);
1669 // To disable VAD, DTX must be disabled too
1670 disableDTX = ((enableVAD == false) ? true : disableDTX);
1671 if (_audioCodingModule.SetVAD(!disableDTX, enableVAD, mode) != 0)
1672 {
1673 _engineStatisticsPtr->SetLastError(
1674 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1675 "SetVADStatus() failed to set VAD");
1676 return -1;
1677 }
1678 return 0;
1679}
1680
1681WebRtc_Word32
1682Channel::GetVADStatus(bool& enabledVAD, ACMVADMode& mode, bool& disabledDTX)
1683{
1684 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1685 "Channel::GetVADStatus");
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001686 if (_audioCodingModule.VAD(&disabledDTX, &enabledVAD, &mode) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001687 {
1688 _engineStatisticsPtr->SetLastError(
1689 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1690 "GetVADStatus() failed to get VAD status");
1691 return -1;
1692 }
1693 disabledDTX = !disabledDTX;
1694 return 0;
1695}
1696
1697WebRtc_Word32
1698Channel::SetRecPayloadType(const CodecInst& codec)
1699{
1700 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1701 "Channel::SetRecPayloadType()");
1702
1703 if (_playing)
1704 {
1705 _engineStatisticsPtr->SetLastError(
1706 VE_ALREADY_PLAYING, kTraceError,
1707 "SetRecPayloadType() unable to set PT while playing");
1708 return -1;
1709 }
1710 if (_receiving)
1711 {
1712 _engineStatisticsPtr->SetLastError(
1713 VE_ALREADY_LISTENING, kTraceError,
1714 "SetRecPayloadType() unable to set PT while listening");
1715 return -1;
1716 }
1717
1718 if (codec.pltype == -1)
1719 {
1720 // De-register the selected codec (RTP/RTCP module and ACM)
1721
1722 WebRtc_Word8 pltype(-1);
1723 CodecInst rxCodec = codec;
1724
1725 // Get payload type for the given codec
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001726 _rtpRtcpModule->ReceivePayloadType(rxCodec, &pltype);
niklase@google.com470e71d2011-07-07 08:21:25 +00001727 rxCodec.pltype = pltype;
1728
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001729 if (_rtpRtcpModule->DeRegisterReceivePayload(pltype) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001730 {
1731 _engineStatisticsPtr->SetLastError(
1732 VE_RTP_RTCP_MODULE_ERROR,
1733 kTraceError,
1734 "SetRecPayloadType() RTP/RTCP-module deregistration "
1735 "failed");
1736 return -1;
1737 }
1738 if (_audioCodingModule.UnregisterReceiveCodec(rxCodec.pltype) != 0)
1739 {
1740 _engineStatisticsPtr->SetLastError(
1741 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1742 "SetRecPayloadType() ACM deregistration failed - 1");
1743 return -1;
1744 }
1745 return 0;
1746 }
1747
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001748 if (_rtpRtcpModule->RegisterReceivePayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001749 {
1750 // First attempt to register failed => de-register and try again
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001751 _rtpRtcpModule->DeRegisterReceivePayload(codec.pltype);
1752 if (_rtpRtcpModule->RegisterReceivePayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001753 {
1754 _engineStatisticsPtr->SetLastError(
1755 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1756 "SetRecPayloadType() RTP/RTCP-module registration failed");
1757 return -1;
1758 }
1759 }
1760 if (_audioCodingModule.RegisterReceiveCodec(codec) != 0)
1761 {
1762 _audioCodingModule.UnregisterReceiveCodec(codec.pltype);
1763 if (_audioCodingModule.RegisterReceiveCodec(codec) != 0)
1764 {
1765 _engineStatisticsPtr->SetLastError(
1766 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1767 "SetRecPayloadType() ACM registration failed - 1");
1768 return -1;
1769 }
1770 }
1771 return 0;
1772}
1773
1774WebRtc_Word32
1775Channel::GetRecPayloadType(CodecInst& codec)
1776{
1777 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1778 "Channel::GetRecPayloadType()");
1779 WebRtc_Word8 payloadType(-1);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001780 if (_rtpRtcpModule->ReceivePayloadType(codec, &payloadType) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001781 {
1782 _engineStatisticsPtr->SetLastError(
henrika@webrtc.org37198002012-06-18 11:00:12 +00001783 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
niklase@google.com470e71d2011-07-07 08:21:25 +00001784 "GetRecPayloadType() failed to retrieve RX payload type");
1785 return -1;
1786 }
1787 codec.pltype = payloadType;
1788 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1789 "Channel::GetRecPayloadType() => pltype=%u", codec.pltype);
1790 return 0;
1791}
1792
1793WebRtc_Word32
1794Channel::SetAMREncFormat(AmrMode mode)
1795{
1796 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1797 "Channel::SetAMREncFormat()");
1798
1799 // ACM doesn't support AMR
1800 return -1;
1801}
1802
1803WebRtc_Word32
1804Channel::SetAMRDecFormat(AmrMode mode)
1805{
1806 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1807 "Channel::SetAMRDecFormat()");
1808
1809 // ACM doesn't support AMR
1810 return -1;
1811}
1812
1813WebRtc_Word32
1814Channel::SetAMRWbEncFormat(AmrMode mode)
1815{
1816 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1817 "Channel::SetAMRWbEncFormat()");
1818
1819 // ACM doesn't support AMR
1820 return -1;
1821
1822}
1823
1824WebRtc_Word32
1825Channel::SetAMRWbDecFormat(AmrMode mode)
1826{
1827 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1828 "Channel::SetAMRWbDecFormat()");
1829
1830 // ACM doesn't support AMR
1831 return -1;
1832}
1833
1834WebRtc_Word32
1835Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency)
1836{
1837 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1838 "Channel::SetSendCNPayloadType()");
1839
1840 CodecInst codec;
1841 WebRtc_Word32 samplingFreqHz(-1);
tina.legrand@webrtc.org45175852012-06-01 09:27:35 +00001842 const int kMono = 1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001843 if (frequency == kFreq32000Hz)
1844 samplingFreqHz = 32000;
1845 else if (frequency == kFreq16000Hz)
1846 samplingFreqHz = 16000;
1847
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001848 if (_audioCodingModule.Codec("CN", &codec, samplingFreqHz, kMono) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001849 {
1850 _engineStatisticsPtr->SetLastError(
1851 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1852 "SetSendCNPayloadType() failed to retrieve default CN codec "
1853 "settings");
1854 return -1;
1855 }
1856
1857 // Modify the payload type (must be set to dynamic range)
1858 codec.pltype = type;
1859
1860 if (_audioCodingModule.RegisterSendCodec(codec) != 0)
1861 {
1862 _engineStatisticsPtr->SetLastError(
1863 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1864 "SetSendCNPayloadType() failed to register CN to ACM");
1865 return -1;
1866 }
1867
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001868 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001869 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001870 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1871 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001872 {
1873 _engineStatisticsPtr->SetLastError(
1874 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1875 "SetSendCNPayloadType() failed to register CN to RTP/RTCP "
1876 "module");
1877 return -1;
1878 }
1879 }
1880 return 0;
1881}
1882
1883WebRtc_Word32
1884Channel::SetISACInitTargetRate(int rateBps, bool useFixedFrameSize)
1885{
1886 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1887 "Channel::SetISACInitTargetRate()");
1888
1889 CodecInst sendCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001890 if (_audioCodingModule.SendCodec(&sendCodec) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001891 {
1892 _engineStatisticsPtr->SetLastError(
1893 VE_CODEC_ERROR, kTraceError,
1894 "SetISACInitTargetRate() failed to retrieve send codec");
1895 return -1;
1896 }
1897 if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
1898 {
1899 // This API is only valid if iSAC is setup to run in channel-adaptive
1900 // mode.
1901 // We do not validate the adaptive mode here. It is done later in the
1902 // ConfigISACBandwidthEstimator() API.
1903 _engineStatisticsPtr->SetLastError(
1904 VE_CODEC_ERROR, kTraceError,
1905 "SetISACInitTargetRate() send codec is not iSAC");
1906 return -1;
1907 }
1908
1909 WebRtc_UWord8 initFrameSizeMsec(0);
1910 if (16000 == sendCodec.plfreq)
1911 {
1912 // Note that 0 is a valid and corresponds to "use default
1913 if ((rateBps != 0 &&
1914 rateBps < kVoiceEngineMinIsacInitTargetRateBpsWb) ||
1915 (rateBps > kVoiceEngineMaxIsacInitTargetRateBpsWb))
1916 {
1917 _engineStatisticsPtr->SetLastError(
1918 VE_INVALID_ARGUMENT, kTraceError,
1919 "SetISACInitTargetRate() invalid target rate - 1");
1920 return -1;
1921 }
1922 // 30 or 60ms
1923 initFrameSizeMsec = (WebRtc_UWord8)(sendCodec.pacsize / 16);
1924 }
1925 else if (32000 == sendCodec.plfreq)
1926 {
1927 if ((rateBps != 0 &&
1928 rateBps < kVoiceEngineMinIsacInitTargetRateBpsSwb) ||
1929 (rateBps > kVoiceEngineMaxIsacInitTargetRateBpsSwb))
1930 {
1931 _engineStatisticsPtr->SetLastError(
1932 VE_INVALID_ARGUMENT, kTraceError,
1933 "SetISACInitTargetRate() invalid target rate - 2");
1934 return -1;
1935 }
1936 initFrameSizeMsec = (WebRtc_UWord8)(sendCodec.pacsize / 32); // 30ms
1937 }
1938
1939 if (_audioCodingModule.ConfigISACBandwidthEstimator(
1940 initFrameSizeMsec, rateBps, useFixedFrameSize) == -1)
1941 {
1942 _engineStatisticsPtr->SetLastError(
1943 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1944 "SetISACInitTargetRate() iSAC BWE config failed");
1945 return -1;
1946 }
1947
1948 return 0;
1949}
1950
1951WebRtc_Word32
1952Channel::SetISACMaxRate(int rateBps)
1953{
1954 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1955 "Channel::SetISACMaxRate()");
1956
1957 CodecInst sendCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001958 if (_audioCodingModule.SendCodec(&sendCodec) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001959 {
1960 _engineStatisticsPtr->SetLastError(
1961 VE_CODEC_ERROR, kTraceError,
1962 "SetISACMaxRate() failed to retrieve send codec");
1963 return -1;
1964 }
1965 if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
1966 {
1967 // This API is only valid if iSAC is selected as sending codec.
1968 _engineStatisticsPtr->SetLastError(
1969 VE_CODEC_ERROR, kTraceError,
1970 "SetISACMaxRate() send codec is not iSAC");
1971 return -1;
1972 }
1973 if (16000 == sendCodec.plfreq)
1974 {
1975 if ((rateBps < kVoiceEngineMinIsacMaxRateBpsWb) ||
1976 (rateBps > kVoiceEngineMaxIsacMaxRateBpsWb))
1977 {
1978 _engineStatisticsPtr->SetLastError(
1979 VE_INVALID_ARGUMENT, kTraceError,
1980 "SetISACMaxRate() invalid max rate - 1");
1981 return -1;
1982 }
1983 }
1984 else if (32000 == sendCodec.plfreq)
1985 {
1986 if ((rateBps < kVoiceEngineMinIsacMaxRateBpsSwb) ||
1987 (rateBps > kVoiceEngineMaxIsacMaxRateBpsSwb))
1988 {
1989 _engineStatisticsPtr->SetLastError(
1990 VE_INVALID_ARGUMENT, kTraceError,
1991 "SetISACMaxRate() invalid max rate - 2");
1992 return -1;
1993 }
1994 }
1995 if (_sending)
1996 {
1997 _engineStatisticsPtr->SetLastError(
1998 VE_SENDING, kTraceError,
1999 "SetISACMaxRate() unable to set max rate while sending");
2000 return -1;
2001 }
2002
2003 // Set the maximum instantaneous rate of iSAC (works for both adaptive
2004 // and non-adaptive mode)
2005 if (_audioCodingModule.SetISACMaxRate(rateBps) == -1)
2006 {
2007 _engineStatisticsPtr->SetLastError(
2008 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2009 "SetISACMaxRate() failed to set max rate");
2010 return -1;
2011 }
2012
2013 return 0;
2014}
2015
2016WebRtc_Word32
2017Channel::SetISACMaxPayloadSize(int sizeBytes)
2018{
2019 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2020 "Channel::SetISACMaxPayloadSize()");
2021 CodecInst sendCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00002022 if (_audioCodingModule.SendCodec(&sendCodec) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00002023 {
2024 _engineStatisticsPtr->SetLastError(
2025 VE_CODEC_ERROR, kTraceError,
2026 "SetISACMaxPayloadSize() failed to retrieve send codec");
2027 return -1;
2028 }
2029 if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
2030 {
2031 _engineStatisticsPtr->SetLastError(
2032 VE_CODEC_ERROR, kTraceError,
2033 "SetISACMaxPayloadSize() send codec is not iSAC");
2034 return -1;
2035 }
2036 if (16000 == sendCodec.plfreq)
2037 {
2038 if ((sizeBytes < kVoiceEngineMinIsacMaxPayloadSizeBytesWb) ||
2039 (sizeBytes > kVoiceEngineMaxIsacMaxPayloadSizeBytesWb))
2040 {
2041 _engineStatisticsPtr->SetLastError(
2042 VE_INVALID_ARGUMENT, kTraceError,
2043 "SetISACMaxPayloadSize() invalid max payload - 1");
2044 return -1;
2045 }
2046 }
2047 else if (32000 == sendCodec.plfreq)
2048 {
2049 if ((sizeBytes < kVoiceEngineMinIsacMaxPayloadSizeBytesSwb) ||
2050 (sizeBytes > kVoiceEngineMaxIsacMaxPayloadSizeBytesSwb))
2051 {
2052 _engineStatisticsPtr->SetLastError(
2053 VE_INVALID_ARGUMENT, kTraceError,
2054 "SetISACMaxPayloadSize() invalid max payload - 2");
2055 return -1;
2056 }
2057 }
2058 if (_sending)
2059 {
2060 _engineStatisticsPtr->SetLastError(
2061 VE_SENDING, kTraceError,
2062 "SetISACMaxPayloadSize() unable to set max rate while sending");
2063 return -1;
2064 }
2065
2066 if (_audioCodingModule.SetISACMaxPayloadSize(sizeBytes) == -1)
2067 {
2068 _engineStatisticsPtr->SetLastError(
2069 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2070 "SetISACMaxPayloadSize() failed to set max payload size");
2071 return -1;
2072 }
2073 return 0;
2074}
2075
2076WebRtc_Word32 Channel::RegisterExternalTransport(Transport& transport)
2077{
2078 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2079 "Channel::RegisterExternalTransport()");
2080
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002081 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002082
niklase@google.com470e71d2011-07-07 08:21:25 +00002083 if (_externalTransport)
2084 {
2085 _engineStatisticsPtr->SetLastError(VE_INVALID_OPERATION,
2086 kTraceError,
2087 "RegisterExternalTransport() external transport already enabled");
2088 return -1;
2089 }
2090 _externalTransport = true;
2091 _transportPtr = &transport;
2092 return 0;
2093}
2094
2095WebRtc_Word32
2096Channel::DeRegisterExternalTransport()
2097{
2098 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2099 "Channel::DeRegisterExternalTransport()");
2100
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002101 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.org83661f52011-11-25 10:58:15 +00002102
niklase@google.com470e71d2011-07-07 08:21:25 +00002103 if (!_transportPtr)
2104 {
2105 _engineStatisticsPtr->SetLastError(
2106 VE_INVALID_OPERATION, kTraceWarning,
2107 "DeRegisterExternalTransport() external transport already "
2108 "disabled");
2109 return 0;
2110 }
2111 _externalTransport = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00002112 _transportPtr = NULL;
2113 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2114 "DeRegisterExternalTransport() all transport is disabled");
niklase@google.com470e71d2011-07-07 08:21:25 +00002115 return 0;
2116}
2117
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002118WebRtc_Word32 Channel::ReceivedRTPPacket(const WebRtc_Word8* data,
2119 WebRtc_Word32 length) {
2120 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
2121 "Channel::ReceivedRTPPacket()");
2122
2123 // Store playout timestamp for the received RTP packet
2124 WebRtc_UWord32 playoutTimestamp(0);
2125 if (GetPlayoutTimeStamp(playoutTimestamp) == 0) {
2126 _playoutTimeStampRTP = playoutTimestamp;
2127 }
2128
2129 // Dump the RTP packet to a file (if RTP dump is enabled).
2130 if (_rtpDumpIn.DumpPacket((const WebRtc_UWord8*)data,
2131 (WebRtc_UWord16)length) == -1) {
2132 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
2133 VoEId(_instanceId,_channelId),
2134 "Channel::SendPacket() RTP dump to input file failed");
2135 }
2136
2137 // Deliver RTP packet to RTP/RTCP module for parsing
2138 // The packet will be pushed back to the channel thru the
2139 // OnReceivedPayloadData callback so we don't push it to the ACM here
2140 if (_rtpRtcpModule->IncomingPacket((const WebRtc_UWord8*)data,
2141 (WebRtc_UWord16)length) == -1) {
2142 _engineStatisticsPtr->SetLastError(
2143 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
2144 "Channel::IncomingRTPPacket() RTP packet is invalid");
2145 }
2146 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002147}
2148
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002149WebRtc_Word32 Channel::ReceivedRTCPPacket(const WebRtc_Word8* data,
2150 WebRtc_Word32 length) {
2151 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
2152 "Channel::ReceivedRTCPPacket()");
2153 // Store playout timestamp for the received RTCP packet
2154 WebRtc_UWord32 playoutTimestamp(0);
2155 if (GetPlayoutTimeStamp(playoutTimestamp) == 0) {
2156 _playoutTimeStampRTCP = playoutTimestamp;
2157 }
2158
2159 // Dump the RTCP packet to a file (if RTP dump is enabled).
2160 if (_rtpDumpIn.DumpPacket((const WebRtc_UWord8*)data,
2161 (WebRtc_UWord16)length) == -1) {
2162 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
2163 VoEId(_instanceId,_channelId),
2164 "Channel::SendPacket() RTCP dump to input file failed");
2165 }
2166
2167 // Deliver RTCP packet to RTP/RTCP module for parsing
2168 if (_rtpRtcpModule->IncomingPacket((const WebRtc_UWord8*)data,
2169 (WebRtc_UWord16)length) == -1) {
2170 _engineStatisticsPtr->SetLastError(
2171 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
2172 "Channel::IncomingRTPPacket() RTCP packet is invalid");
2173 }
2174 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002175}
2176
niklase@google.com470e71d2011-07-07 08:21:25 +00002177WebRtc_Word32
2178Channel::SetPacketTimeoutNotification(bool enable, int timeoutSeconds)
2179{
2180 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2181 "Channel::SetPacketTimeoutNotification()");
2182 if (enable)
2183 {
2184 const WebRtc_UWord32 RTPtimeoutMS = 1000*timeoutSeconds;
2185 const WebRtc_UWord32 RTCPtimeoutMS = 0;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00002186 _rtpRtcpModule->SetPacketTimeout(RTPtimeoutMS, RTCPtimeoutMS);
niklase@google.com470e71d2011-07-07 08:21:25 +00002187 _rtpPacketTimeOutIsEnabled = true;
2188 _rtpTimeOutSeconds = timeoutSeconds;
2189 }
2190 else
2191 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00002192 _rtpRtcpModule->SetPacketTimeout(0, 0);
niklase@google.com470e71d2011-07-07 08:21:25 +00002193 _rtpPacketTimeOutIsEnabled = false;
2194 _rtpTimeOutSeconds = 0;
2195 }
2196 return 0;
2197}
2198
2199WebRtc_Word32
2200Channel::GetPacketTimeoutNotification(bool& enabled, int& timeoutSeconds)
2201{
2202 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2203 "Channel::GetPacketTimeoutNotification()");
2204 enabled = _rtpPacketTimeOutIsEnabled;
2205 if (enabled)
2206 {
2207 timeoutSeconds = _rtpTimeOutSeconds;
2208 }
2209 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
2210 "GetPacketTimeoutNotification() => enabled=%d,"
2211 " timeoutSeconds=%d",
2212 enabled, timeoutSeconds);
2213 return 0;
2214}
2215
2216WebRtc_Word32
2217Channel::RegisterDeadOrAliveObserver(VoEConnectionObserver& observer)
2218{
2219 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2220 "Channel::RegisterDeadOrAliveObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002221 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002222
2223 if (_connectionObserverPtr)
2224 {
2225 _engineStatisticsPtr->SetLastError(VE_INVALID_OPERATION, kTraceError,
2226 "RegisterDeadOrAliveObserver() observer already enabled");
2227 return -1;
2228 }
2229
2230 _connectionObserverPtr = &observer;
2231 _connectionObserver = true;
2232
2233 return 0;
2234}
2235
2236WebRtc_Word32
2237Channel::DeRegisterDeadOrAliveObserver()
2238{
2239 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2240 "Channel::DeRegisterDeadOrAliveObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002241 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002242
2243 if (!_connectionObserverPtr)
2244 {
2245 _engineStatisticsPtr->SetLastError(
2246 VE_INVALID_OPERATION, kTraceWarning,
2247 "DeRegisterDeadOrAliveObserver() observer already disabled");
2248 return 0;
2249 }
2250
2251 _connectionObserver = false;
2252 _connectionObserverPtr = NULL;
2253
2254 return 0;
2255}
2256
2257WebRtc_Word32
2258Channel::SetPeriodicDeadOrAliveStatus(bool enable, int sampleTimeSeconds)
2259{
2260 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2261 "Channel::SetPeriodicDeadOrAliveStatus()");
2262 if (!_connectionObserverPtr)
2263 {
2264 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
2265 "SetPeriodicDeadOrAliveStatus() connection observer has"
2266 " not been registered");
2267 }
2268 if (enable)
2269 {
2270 ResetDeadOrAliveCounters();
2271 }
2272 bool enabled(false);
2273 WebRtc_UWord8 currentSampleTimeSec(0);
2274 // Store last state (will be used later if dead-or-alive is disabled).
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00002275 _rtpRtcpModule->PeriodicDeadOrAliveStatus(enabled, currentSampleTimeSec);
niklase@google.com470e71d2011-07-07 08:21:25 +00002276 // Update the dead-or-alive state.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00002277 if (_rtpRtcpModule->SetPeriodicDeadOrAliveStatus(
niklase@google.com470e71d2011-07-07 08:21:25 +00002278 enable, (WebRtc_UWord8)sampleTimeSeconds) != 0)
2279 {
2280 _engineStatisticsPtr->SetLastError(
2281 VE_RTP_RTCP_MODULE_ERROR,
2282 kTraceError,
2283 "SetPeriodicDeadOrAliveStatus() failed to set dead-or-alive "
2284 "status");
2285 return -1;
2286 }
2287 if (!enable)
2288 {
2289 // Restore last utilized sample time.
2290 // Without this, the sample time would always be reset to default
2291 // (2 sec), each time dead-or-alived was disabled without sample-time
2292 // parameter.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00002293 _rtpRtcpModule->SetPeriodicDeadOrAliveStatus(enable,
niklase@google.com470e71d2011-07-07 08:21:25 +00002294 currentSampleTimeSec);
2295 }
2296 return 0;
2297}
2298
2299WebRtc_Word32
2300Channel::GetPeriodicDeadOrAliveStatus(bool& enabled, int& sampleTimeSeconds)
2301{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00002302 _rtpRtcpModule->PeriodicDeadOrAliveStatus(
niklase@google.com470e71d2011-07-07 08:21:25 +00002303 enabled,
2304 (WebRtc_UWord8&)sampleTimeSeconds);
2305 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
2306 "GetPeriodicDeadOrAliveStatus() => enabled=%d,"
2307 " sampleTimeSeconds=%d",
2308 enabled, sampleTimeSeconds);
2309 return 0;
2310}
2311
niklase@google.com470e71d2011-07-07 08:21:25 +00002312int Channel::StartPlayingFileLocally(const char* fileName,
2313 const bool loop,
2314 const FileFormats format,
2315 const int startPosition,
2316 const float volumeScaling,
2317 const int stopPosition,
2318 const CodecInst* codecInst)
2319{
2320 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2321 "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d,"
2322 " format=%d, volumeScaling=%5.3f, startPosition=%d, "
2323 "stopPosition=%d)", fileName, loop, format, volumeScaling,
2324 startPosition, stopPosition);
2325
2326 if (_outputFilePlaying)
2327 {
2328 _engineStatisticsPtr->SetLastError(
2329 VE_ALREADY_PLAYING, kTraceError,
2330 "StartPlayingFileLocally() is already playing");
2331 return -1;
2332 }
2333
niklase@google.com470e71d2011-07-07 08:21:25 +00002334 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002335 CriticalSectionScoped cs(&_fileCritSect);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002336
2337 if (_outputFilePlayerPtr)
2338 {
2339 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2340 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2341 _outputFilePlayerPtr = NULL;
2342 }
2343
2344 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2345 _outputFilePlayerId, (const FileFormats)format);
2346
2347 if (_outputFilePlayerPtr == NULL)
2348 {
2349 _engineStatisticsPtr->SetLastError(
2350 VE_INVALID_ARGUMENT, kTraceError,
henrike@webrtc.org31d30702011-11-18 19:59:32 +00002351 "StartPlayingFileLocally() filePlayer format is not correct");
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002352 return -1;
2353 }
2354
2355 const WebRtc_UWord32 notificationTime(0);
2356
2357 if (_outputFilePlayerPtr->StartPlayingFile(
2358 fileName,
2359 loop,
2360 startPosition,
2361 volumeScaling,
2362 notificationTime,
2363 stopPosition,
2364 (const CodecInst*)codecInst) != 0)
2365 {
2366 _engineStatisticsPtr->SetLastError(
2367 VE_BAD_FILE, kTraceError,
2368 "StartPlayingFile() failed to start file playout");
2369 _outputFilePlayerPtr->StopPlayingFile();
2370 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2371 _outputFilePlayerPtr = NULL;
2372 return -1;
2373 }
2374 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
2375 _outputFilePlaying = true;
niklase@google.com470e71d2011-07-07 08:21:25 +00002376 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00002377
2378 if (RegisterFilePlayingToMixer() != 0)
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002379 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002380
2381 return 0;
2382}
2383
2384int Channel::StartPlayingFileLocally(InStream* stream,
2385 const FileFormats format,
2386 const int startPosition,
2387 const float volumeScaling,
2388 const int stopPosition,
2389 const CodecInst* codecInst)
2390{
2391 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2392 "Channel::StartPlayingFileLocally(format=%d,"
2393 " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
2394 format, volumeScaling, startPosition, stopPosition);
2395
2396 if(stream == NULL)
2397 {
2398 _engineStatisticsPtr->SetLastError(
2399 VE_BAD_FILE, kTraceError,
2400 "StartPlayingFileLocally() NULL as input stream");
2401 return -1;
2402 }
2403
2404
2405 if (_outputFilePlaying)
2406 {
2407 _engineStatisticsPtr->SetLastError(
2408 VE_ALREADY_PLAYING, kTraceError,
2409 "StartPlayingFileLocally() is already playing");
2410 return -1;
2411 }
2412
niklase@google.com470e71d2011-07-07 08:21:25 +00002413 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002414 CriticalSectionScoped cs(&_fileCritSect);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002415
2416 // Destroy the old instance
2417 if (_outputFilePlayerPtr)
2418 {
2419 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2420 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2421 _outputFilePlayerPtr = NULL;
2422 }
2423
2424 // Create the instance
2425 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2426 _outputFilePlayerId,
2427 (const FileFormats)format);
2428
2429 if (_outputFilePlayerPtr == NULL)
2430 {
2431 _engineStatisticsPtr->SetLastError(
2432 VE_INVALID_ARGUMENT, kTraceError,
2433 "StartPlayingFileLocally() filePlayer format isnot correct");
2434 return -1;
2435 }
2436
2437 const WebRtc_UWord32 notificationTime(0);
2438
2439 if (_outputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
2440 volumeScaling,
2441 notificationTime,
2442 stopPosition, codecInst) != 0)
2443 {
2444 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2445 "StartPlayingFile() failed to "
2446 "start file playout");
2447 _outputFilePlayerPtr->StopPlayingFile();
2448 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2449 _outputFilePlayerPtr = NULL;
2450 return -1;
2451 }
2452 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
2453 _outputFilePlaying = true;
niklase@google.com470e71d2011-07-07 08:21:25 +00002454 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00002455
2456 if (RegisterFilePlayingToMixer() != 0)
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002457 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002458
niklase@google.com470e71d2011-07-07 08:21:25 +00002459 return 0;
2460}
2461
2462int Channel::StopPlayingFileLocally()
2463{
2464 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2465 "Channel::StopPlayingFileLocally()");
2466
2467 if (!_outputFilePlaying)
2468 {
2469 _engineStatisticsPtr->SetLastError(
2470 VE_INVALID_OPERATION, kTraceWarning,
2471 "StopPlayingFileLocally() isnot playing");
2472 return 0;
2473 }
2474
niklase@google.com470e71d2011-07-07 08:21:25 +00002475 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002476 CriticalSectionScoped cs(&_fileCritSect);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002477
2478 if (_outputFilePlayerPtr->StopPlayingFile() != 0)
2479 {
2480 _engineStatisticsPtr->SetLastError(
2481 VE_STOP_RECORDING_FAILED, kTraceError,
2482 "StopPlayingFile() could not stop playing");
2483 return -1;
2484 }
2485 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2486 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2487 _outputFilePlayerPtr = NULL;
2488 _outputFilePlaying = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00002489 }
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002490 // _fileCritSect cannot be taken while calling
2491 // SetAnonymousMixibilityStatus. Refer to comments in
2492 // StartPlayingFileLocally(const char* ...) for more details.
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002493 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0)
2494 {
2495 _engineStatisticsPtr->SetLastError(
2496 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002497 "StopPlayingFile() failed to stop participant from playing as"
2498 "file in the mixer");
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002499 return -1;
2500 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002501
2502 return 0;
2503}
2504
2505int Channel::IsPlayingFileLocally() const
2506{
2507 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2508 "Channel::IsPlayingFileLocally()");
2509
2510 return (WebRtc_Word32)_outputFilePlaying;
2511}
2512
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00002513int Channel::RegisterFilePlayingToMixer()
2514{
2515 // Return success for not registering for file playing to mixer if:
2516 // 1. playing file before playout is started on that channel.
2517 // 2. starting playout without file playing on that channel.
2518 if (!_playing || !_outputFilePlaying)
2519 {
2520 return 0;
2521 }
2522
2523 // |_fileCritSect| cannot be taken while calling
2524 // SetAnonymousMixabilityStatus() since as soon as the participant is added
2525 // frames can be pulled by the mixer. Since the frames are generated from
2526 // the file, _fileCritSect will be taken. This would result in a deadlock.
2527 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0)
2528 {
2529 CriticalSectionScoped cs(&_fileCritSect);
2530 _outputFilePlaying = false;
2531 _engineStatisticsPtr->SetLastError(
2532 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
2533 "StartPlayingFile() failed to add participant as file to mixer");
2534 _outputFilePlayerPtr->StopPlayingFile();
2535 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2536 _outputFilePlayerPtr = NULL;
2537 return -1;
2538 }
2539
2540 return 0;
2541}
2542
niklase@google.com470e71d2011-07-07 08:21:25 +00002543int Channel::ScaleLocalFilePlayout(const float scale)
2544{
2545 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2546 "Channel::ScaleLocalFilePlayout(scale=%5.3f)", scale);
2547
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002548 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002549
2550 if (!_outputFilePlaying)
2551 {
2552 _engineStatisticsPtr->SetLastError(
2553 VE_INVALID_OPERATION, kTraceError,
2554 "ScaleLocalFilePlayout() isnot playing");
2555 return -1;
2556 }
2557 if ((_outputFilePlayerPtr == NULL) ||
2558 (_outputFilePlayerPtr->SetAudioScaling(scale) != 0))
2559 {
2560 _engineStatisticsPtr->SetLastError(
2561 VE_BAD_ARGUMENT, kTraceError,
2562 "SetAudioScaling() failed to scale the playout");
2563 return -1;
2564 }
2565
2566 return 0;
2567}
2568
2569int Channel::GetLocalPlayoutPosition(int& positionMs)
2570{
2571 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2572 "Channel::GetLocalPlayoutPosition(position=?)");
2573
2574 WebRtc_UWord32 position;
2575
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002576 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002577
2578 if (_outputFilePlayerPtr == NULL)
2579 {
2580 _engineStatisticsPtr->SetLastError(
2581 VE_INVALID_OPERATION, kTraceError,
2582 "GetLocalPlayoutPosition() filePlayer instance doesnot exist");
2583 return -1;
2584 }
2585
2586 if (_outputFilePlayerPtr->GetPlayoutPosition(position) != 0)
2587 {
2588 _engineStatisticsPtr->SetLastError(
2589 VE_BAD_FILE, kTraceError,
2590 "GetLocalPlayoutPosition() failed");
2591 return -1;
2592 }
2593 positionMs = position;
2594
2595 return 0;
2596}
2597
2598int Channel::StartPlayingFileAsMicrophone(const char* fileName,
2599 const bool loop,
2600 const FileFormats format,
2601 const int startPosition,
2602 const float volumeScaling,
2603 const int stopPosition,
2604 const CodecInst* codecInst)
2605{
2606 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2607 "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, "
2608 "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, "
2609 "stopPosition=%d)", fileName, loop, format, volumeScaling,
2610 startPosition, stopPosition);
2611
2612 if (_inputFilePlaying)
2613 {
2614 _engineStatisticsPtr->SetLastError(
2615 VE_ALREADY_PLAYING, kTraceWarning,
2616 "StartPlayingFileAsMicrophone() filePlayer is playing");
2617 return 0;
2618 }
2619
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002620 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002621
2622 // Destroy the old instance
2623 if (_inputFilePlayerPtr)
2624 {
2625 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2626 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2627 _inputFilePlayerPtr = NULL;
2628 }
2629
2630 // Create the instance
2631 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2632 _inputFilePlayerId, (const FileFormats)format);
2633
2634 if (_inputFilePlayerPtr == NULL)
2635 {
2636 _engineStatisticsPtr->SetLastError(
2637 VE_INVALID_ARGUMENT, kTraceError,
2638 "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
2639 return -1;
2640 }
2641
2642 const WebRtc_UWord32 notificationTime(0);
2643
2644 if (_inputFilePlayerPtr->StartPlayingFile(
2645 fileName,
2646 loop,
2647 startPosition,
2648 volumeScaling,
2649 notificationTime,
2650 stopPosition,
2651 (const CodecInst*)codecInst) != 0)
2652 {
2653 _engineStatisticsPtr->SetLastError(
2654 VE_BAD_FILE, kTraceError,
2655 "StartPlayingFile() failed to start file playout");
2656 _inputFilePlayerPtr->StopPlayingFile();
2657 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2658 _inputFilePlayerPtr = NULL;
2659 return -1;
2660 }
2661 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2662 _inputFilePlaying = true;
2663
2664 return 0;
2665}
2666
2667int Channel::StartPlayingFileAsMicrophone(InStream* stream,
2668 const FileFormats format,
2669 const int startPosition,
2670 const float volumeScaling,
2671 const int stopPosition,
2672 const CodecInst* codecInst)
2673{
2674 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2675 "Channel::StartPlayingFileAsMicrophone(format=%d, "
2676 "volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
2677 format, volumeScaling, startPosition, stopPosition);
2678
2679 if(stream == NULL)
2680 {
2681 _engineStatisticsPtr->SetLastError(
2682 VE_BAD_FILE, kTraceError,
2683 "StartPlayingFileAsMicrophone NULL as input stream");
2684 return -1;
2685 }
2686
2687 if (_inputFilePlaying)
2688 {
2689 _engineStatisticsPtr->SetLastError(
2690 VE_ALREADY_PLAYING, kTraceWarning,
2691 "StartPlayingFileAsMicrophone() is playing");
2692 return 0;
2693 }
2694
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002695 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002696
2697 // Destroy the old instance
2698 if (_inputFilePlayerPtr)
2699 {
2700 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2701 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2702 _inputFilePlayerPtr = NULL;
2703 }
2704
2705 // Create the instance
2706 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2707 _inputFilePlayerId, (const FileFormats)format);
2708
2709 if (_inputFilePlayerPtr == NULL)
2710 {
2711 _engineStatisticsPtr->SetLastError(
2712 VE_INVALID_ARGUMENT, kTraceError,
2713 "StartPlayingInputFile() filePlayer format isnot correct");
2714 return -1;
2715 }
2716
2717 const WebRtc_UWord32 notificationTime(0);
2718
2719 if (_inputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
2720 volumeScaling, notificationTime,
2721 stopPosition, codecInst) != 0)
2722 {
2723 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2724 "StartPlayingFile() failed to start "
2725 "file playout");
2726 _inputFilePlayerPtr->StopPlayingFile();
2727 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2728 _inputFilePlayerPtr = NULL;
2729 return -1;
2730 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00002731
niklase@google.com470e71d2011-07-07 08:21:25 +00002732 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2733 _inputFilePlaying = true;
2734
2735 return 0;
2736}
2737
2738int Channel::StopPlayingFileAsMicrophone()
2739{
2740 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2741 "Channel::StopPlayingFileAsMicrophone()");
2742
2743 if (!_inputFilePlaying)
2744 {
2745 _engineStatisticsPtr->SetLastError(
2746 VE_INVALID_OPERATION, kTraceWarning,
2747 "StopPlayingFileAsMicrophone() isnot playing");
2748 return 0;
2749 }
2750
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002751 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002752 if (_inputFilePlayerPtr->StopPlayingFile() != 0)
2753 {
2754 _engineStatisticsPtr->SetLastError(
2755 VE_STOP_RECORDING_FAILED, kTraceError,
2756 "StopPlayingFile() could not stop playing");
2757 return -1;
2758 }
2759 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2760 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2761 _inputFilePlayerPtr = NULL;
2762 _inputFilePlaying = false;
2763
2764 return 0;
2765}
2766
2767int Channel::IsPlayingFileAsMicrophone() const
2768{
2769 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2770 "Channel::IsPlayingFileAsMicrophone()");
2771
2772 return _inputFilePlaying;
2773}
2774
2775int Channel::ScaleFileAsMicrophonePlayout(const float scale)
2776{
2777 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2778 "Channel::ScaleFileAsMicrophonePlayout(scale=%5.3f)", scale);
2779
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002780 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002781
2782 if (!_inputFilePlaying)
2783 {
2784 _engineStatisticsPtr->SetLastError(
2785 VE_INVALID_OPERATION, kTraceError,
2786 "ScaleFileAsMicrophonePlayout() isnot playing");
2787 return -1;
2788 }
2789
2790 if ((_inputFilePlayerPtr == NULL) ||
2791 (_inputFilePlayerPtr->SetAudioScaling(scale) != 0))
2792 {
2793 _engineStatisticsPtr->SetLastError(
2794 VE_BAD_ARGUMENT, kTraceError,
2795 "SetAudioScaling() failed to scale playout");
2796 return -1;
2797 }
2798
2799 return 0;
2800}
2801
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00002802int Channel::StartRecordingPlayout(const char* fileName,
niklase@google.com470e71d2011-07-07 08:21:25 +00002803 const CodecInst* codecInst)
2804{
2805 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2806 "Channel::StartRecordingPlayout(fileName=%s)", fileName);
2807
2808 if (_outputFileRecording)
2809 {
2810 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
2811 "StartRecordingPlayout() is already recording");
2812 return 0;
2813 }
2814
2815 FileFormats format;
2816 const WebRtc_UWord32 notificationTime(0); // Not supported in VoE
2817 CodecInst dummyCodec={100,"L16",16000,320,1,320000};
2818
niklas.enbom@webrtc.org40197d72012-03-26 08:45:47 +00002819 if ((codecInst != NULL) &&
2820 ((codecInst->channels < 1) || (codecInst->channels > 2)))
niklase@google.com470e71d2011-07-07 08:21:25 +00002821 {
2822 _engineStatisticsPtr->SetLastError(
2823 VE_BAD_ARGUMENT, kTraceError,
2824 "StartRecordingPlayout() invalid compression");
2825 return(-1);
2826 }
2827 if(codecInst == NULL)
2828 {
2829 format = kFileFormatPcm16kHzFile;
2830 codecInst=&dummyCodec;
2831 }
2832 else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
2833 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
2834 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
2835 {
2836 format = kFileFormatWavFile;
2837 }
2838 else
2839 {
2840 format = kFileFormatCompressedFile;
2841 }
2842
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002843 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002844
2845 // Destroy the old instance
2846 if (_outputFileRecorderPtr)
2847 {
2848 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2849 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2850 _outputFileRecorderPtr = NULL;
2851 }
2852
2853 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2854 _outputFileRecorderId, (const FileFormats)format);
2855 if (_outputFileRecorderPtr == NULL)
2856 {
2857 _engineStatisticsPtr->SetLastError(
2858 VE_INVALID_ARGUMENT, kTraceError,
2859 "StartRecordingPlayout() fileRecorder format isnot correct");
2860 return -1;
2861 }
2862
2863 if (_outputFileRecorderPtr->StartRecordingAudioFile(
2864 fileName, (const CodecInst&)*codecInst, notificationTime) != 0)
2865 {
2866 _engineStatisticsPtr->SetLastError(
2867 VE_BAD_FILE, kTraceError,
2868 "StartRecordingAudioFile() failed to start file recording");
2869 _outputFileRecorderPtr->StopRecording();
2870 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2871 _outputFileRecorderPtr = NULL;
2872 return -1;
2873 }
2874 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2875 _outputFileRecording = true;
2876
2877 return 0;
2878}
2879
2880int Channel::StartRecordingPlayout(OutStream* stream,
2881 const CodecInst* codecInst)
2882{
2883 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2884 "Channel::StartRecordingPlayout()");
2885
2886 if (_outputFileRecording)
2887 {
2888 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
2889 "StartRecordingPlayout() is already recording");
2890 return 0;
2891 }
2892
2893 FileFormats format;
2894 const WebRtc_UWord32 notificationTime(0); // Not supported in VoE
2895 CodecInst dummyCodec={100,"L16",16000,320,1,320000};
2896
2897 if (codecInst != NULL && codecInst->channels != 1)
2898 {
2899 _engineStatisticsPtr->SetLastError(
2900 VE_BAD_ARGUMENT, kTraceError,
2901 "StartRecordingPlayout() invalid compression");
2902 return(-1);
2903 }
2904 if(codecInst == NULL)
2905 {
2906 format = kFileFormatPcm16kHzFile;
2907 codecInst=&dummyCodec;
2908 }
2909 else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
2910 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
2911 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
2912 {
2913 format = kFileFormatWavFile;
2914 }
2915 else
2916 {
2917 format = kFileFormatCompressedFile;
2918 }
2919
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002920 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002921
2922 // Destroy the old instance
2923 if (_outputFileRecorderPtr)
2924 {
2925 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2926 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2927 _outputFileRecorderPtr = NULL;
2928 }
2929
2930 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2931 _outputFileRecorderId, (const FileFormats)format);
2932 if (_outputFileRecorderPtr == NULL)
2933 {
2934 _engineStatisticsPtr->SetLastError(
2935 VE_INVALID_ARGUMENT, kTraceError,
2936 "StartRecordingPlayout() fileRecorder format isnot correct");
2937 return -1;
2938 }
2939
2940 if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream, *codecInst,
2941 notificationTime) != 0)
2942 {
2943 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2944 "StartRecordingPlayout() failed to "
2945 "start file recording");
2946 _outputFileRecorderPtr->StopRecording();
2947 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2948 _outputFileRecorderPtr = NULL;
2949 return -1;
2950 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00002951
niklase@google.com470e71d2011-07-07 08:21:25 +00002952 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2953 _outputFileRecording = true;
2954
2955 return 0;
2956}
2957
2958int Channel::StopRecordingPlayout()
2959{
2960 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
2961 "Channel::StopRecordingPlayout()");
2962
2963 if (!_outputFileRecording)
2964 {
2965 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
2966 "StopRecordingPlayout() isnot recording");
2967 return -1;
2968 }
2969
2970
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002971 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002972
2973 if (_outputFileRecorderPtr->StopRecording() != 0)
2974 {
2975 _engineStatisticsPtr->SetLastError(
2976 VE_STOP_RECORDING_FAILED, kTraceError,
2977 "StopRecording() could not stop recording");
2978 return(-1);
2979 }
2980 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2981 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2982 _outputFileRecorderPtr = NULL;
2983 _outputFileRecording = false;
2984
2985 return 0;
2986}
2987
2988void
2989Channel::SetMixWithMicStatus(bool mix)
2990{
2991 _mixFileWithMicrophone=mix;
2992}
2993
2994int
2995Channel::GetSpeechOutputLevel(WebRtc_UWord32& level) const
2996{
2997 WebRtc_Word8 currentLevel = _outputAudioLevel.Level();
2998 level = static_cast<WebRtc_Word32> (currentLevel);
2999 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3000 VoEId(_instanceId,_channelId),
3001 "GetSpeechOutputLevel() => level=%u", level);
3002 return 0;
3003}
3004
3005int
3006Channel::GetSpeechOutputLevelFullRange(WebRtc_UWord32& level) const
3007{
3008 WebRtc_Word16 currentLevel = _outputAudioLevel.LevelFullRange();
3009 level = static_cast<WebRtc_Word32> (currentLevel);
3010 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3011 VoEId(_instanceId,_channelId),
3012 "GetSpeechOutputLevelFullRange() => level=%u", level);
3013 return 0;
3014}
3015
3016int
3017Channel::SetMute(bool enable)
3018{
3019 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3020 "Channel::SetMute(enable=%d)", enable);
3021 _mute = enable;
3022 return 0;
3023}
3024
3025bool
3026Channel::Mute() const
3027{
3028 return _mute;
3029}
3030
3031int
3032Channel::SetOutputVolumePan(float left, float right)
3033{
3034 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3035 "Channel::SetOutputVolumePan()");
3036 _panLeft = left;
3037 _panRight = right;
3038 return 0;
3039}
3040
3041int
3042Channel::GetOutputVolumePan(float& left, float& right) const
3043{
3044 left = _panLeft;
3045 right = _panRight;
3046 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3047 VoEId(_instanceId,_channelId),
3048 "GetOutputVolumePan() => left=%3.2f, right=%3.2f", left, right);
3049 return 0;
3050}
3051
3052int
3053Channel::SetChannelOutputVolumeScaling(float scaling)
3054{
3055 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3056 "Channel::SetChannelOutputVolumeScaling()");
3057 _outputGain = scaling;
3058 return 0;
3059}
3060
3061int
3062Channel::GetChannelOutputVolumeScaling(float& scaling) const
3063{
3064 scaling = _outputGain;
3065 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3066 VoEId(_instanceId,_channelId),
3067 "GetChannelOutputVolumeScaling() => scaling=%3.2f", scaling);
3068 return 0;
3069}
3070
niklase@google.com470e71d2011-07-07 08:21:25 +00003071int
3072Channel::RegisterExternalEncryption(Encryption& encryption)
3073{
3074 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3075 "Channel::RegisterExternalEncryption()");
3076
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003077 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003078
3079 if (_encryptionPtr)
3080 {
3081 _engineStatisticsPtr->SetLastError(
3082 VE_INVALID_OPERATION, kTraceError,
3083 "RegisterExternalEncryption() encryption already enabled");
3084 return -1;
3085 }
3086
3087 _encryptionPtr = &encryption;
3088
3089 _decrypting = true;
3090 _encrypting = true;
3091
3092 return 0;
3093}
3094
3095int
3096Channel::DeRegisterExternalEncryption()
3097{
3098 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3099 "Channel::DeRegisterExternalEncryption()");
3100
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003101 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003102
3103 if (!_encryptionPtr)
3104 {
3105 _engineStatisticsPtr->SetLastError(
3106 VE_INVALID_OPERATION, kTraceWarning,
3107 "DeRegisterExternalEncryption() encryption already disabled");
3108 return 0;
3109 }
3110
3111 _decrypting = false;
3112 _encrypting = false;
3113
3114 _encryptionPtr = NULL;
3115
3116 return 0;
3117}
3118
3119int Channel::SendTelephoneEventOutband(unsigned char eventCode,
3120 int lengthMs, int attenuationDb,
3121 bool playDtmfEvent)
3122{
3123 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3124 "Channel::SendTelephoneEventOutband(..., playDtmfEvent=%d)",
3125 playDtmfEvent);
3126
3127 _playOutbandDtmfEvent = playDtmfEvent;
3128
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003129 if (_rtpRtcpModule->SendTelephoneEventOutband(eventCode, lengthMs,
niklase@google.com470e71d2011-07-07 08:21:25 +00003130 attenuationDb) != 0)
3131 {
3132 _engineStatisticsPtr->SetLastError(
3133 VE_SEND_DTMF_FAILED,
3134 kTraceWarning,
3135 "SendTelephoneEventOutband() failed to send event");
3136 return -1;
3137 }
3138 return 0;
3139}
3140
3141int Channel::SendTelephoneEventInband(unsigned char eventCode,
3142 int lengthMs,
3143 int attenuationDb,
3144 bool playDtmfEvent)
3145{
3146 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3147 "Channel::SendTelephoneEventInband(..., playDtmfEvent=%d)",
3148 playDtmfEvent);
3149
3150 _playInbandDtmfEvent = playDtmfEvent;
3151 _inbandDtmfQueue.AddDtmf(eventCode, lengthMs, attenuationDb);
3152
3153 return 0;
3154}
3155
3156int
3157Channel::SetDtmfPlayoutStatus(bool enable)
3158{
3159 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3160 "Channel::SetDtmfPlayoutStatus()");
3161 if (_audioCodingModule.SetDtmfPlayoutStatus(enable) != 0)
3162 {
3163 _engineStatisticsPtr->SetLastError(
3164 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
3165 "SetDtmfPlayoutStatus() failed to set Dtmf playout");
3166 return -1;
3167 }
3168 return 0;
3169}
3170
3171bool
3172Channel::DtmfPlayoutStatus() const
3173{
3174 return _audioCodingModule.DtmfPlayoutStatus();
3175}
3176
3177int
3178Channel::SetSendTelephoneEventPayloadType(unsigned char type)
3179{
3180 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3181 "Channel::SetSendTelephoneEventPayloadType()");
andrew@webrtc.orgf81f9f82011-08-19 22:56:22 +00003182 if (type > 127)
niklase@google.com470e71d2011-07-07 08:21:25 +00003183 {
3184 _engineStatisticsPtr->SetLastError(
3185 VE_INVALID_ARGUMENT, kTraceError,
3186 "SetSendTelephoneEventPayloadType() invalid type");
3187 return -1;
3188 }
pwestin@webrtc.org1da1ce02011-10-13 15:19:55 +00003189 CodecInst codec;
3190 codec.plfreq = 8000;
3191 codec.pltype = type;
3192 memcpy(codec.plname, "telephone-event", 16);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003193 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003194 {
3195 _engineStatisticsPtr->SetLastError(
3196 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3197 "SetSendTelephoneEventPayloadType() failed to register send"
3198 "payload type");
3199 return -1;
3200 }
3201 _sendTelephoneEventPayloadType = type;
3202 return 0;
3203}
3204
3205int
3206Channel::GetSendTelephoneEventPayloadType(unsigned char& type)
3207{
3208 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3209 "Channel::GetSendTelephoneEventPayloadType()");
3210 type = _sendTelephoneEventPayloadType;
3211 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3212 VoEId(_instanceId,_channelId),
3213 "GetSendTelephoneEventPayloadType() => type=%u", type);
3214 return 0;
3215}
3216
niklase@google.com470e71d2011-07-07 08:21:25 +00003217int
3218Channel::UpdateRxVadDetection(AudioFrame& audioFrame)
3219{
3220 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3221 "Channel::UpdateRxVadDetection()");
3222
3223 int vadDecision = 1;
3224
andrew@webrtc.org63a50982012-05-02 23:56:37 +00003225 vadDecision = (audioFrame.vad_activity_ == AudioFrame::kVadActive)? 1 : 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003226
3227 if ((vadDecision != _oldVadDecision) && _rxVadObserverPtr)
3228 {
3229 OnRxVadDetected(vadDecision);
3230 _oldVadDecision = vadDecision;
3231 }
3232
3233 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3234 "Channel::UpdateRxVadDetection() => vadDecision=%d",
3235 vadDecision);
3236 return 0;
3237}
3238
3239int
3240Channel::RegisterRxVadObserver(VoERxVadCallback &observer)
3241{
3242 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3243 "Channel::RegisterRxVadObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003244 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003245
3246 if (_rxVadObserverPtr)
3247 {
3248 _engineStatisticsPtr->SetLastError(
3249 VE_INVALID_OPERATION, kTraceError,
3250 "RegisterRxVadObserver() observer already enabled");
3251 return -1;
3252 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003253 _rxVadObserverPtr = &observer;
3254 _RxVadDetection = true;
3255 return 0;
3256}
3257
3258int
3259Channel::DeRegisterRxVadObserver()
3260{
3261 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3262 "Channel::DeRegisterRxVadObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003263 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003264
3265 if (!_rxVadObserverPtr)
3266 {
3267 _engineStatisticsPtr->SetLastError(
3268 VE_INVALID_OPERATION, kTraceWarning,
3269 "DeRegisterRxVadObserver() observer already disabled");
3270 return 0;
3271 }
3272 _rxVadObserverPtr = NULL;
3273 _RxVadDetection = false;
3274 return 0;
3275}
3276
3277int
3278Channel::VoiceActivityIndicator(int &activity)
3279{
3280 activity = _sendFrameType;
3281
3282 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3283 "Channel::VoiceActivityIndicator(indicator=%d)", activity);
3284 return 0;
3285}
3286
3287#ifdef WEBRTC_VOICE_ENGINE_AGC
3288
3289int
3290Channel::SetRxAgcStatus(const bool enable, const AgcModes mode)
3291{
3292 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3293 "Channel::SetRxAgcStatus(enable=%d, mode=%d)",
3294 (int)enable, (int)mode);
3295
3296 GainControl::Mode agcMode(GainControl::kFixedDigital);
3297 switch (mode)
3298 {
3299 case kAgcDefault:
3300 agcMode = GainControl::kAdaptiveDigital;
3301 break;
3302 case kAgcUnchanged:
3303 agcMode = _rxAudioProcessingModulePtr->gain_control()->mode();
3304 break;
3305 case kAgcFixedDigital:
3306 agcMode = GainControl::kFixedDigital;
3307 break;
3308 case kAgcAdaptiveDigital:
3309 agcMode =GainControl::kAdaptiveDigital;
3310 break;
3311 default:
3312 _engineStatisticsPtr->SetLastError(
3313 VE_INVALID_ARGUMENT, kTraceError,
3314 "SetRxAgcStatus() invalid Agc mode");
3315 return -1;
3316 }
3317
3318 if (_rxAudioProcessingModulePtr->gain_control()->set_mode(agcMode) != 0)
3319 {
3320 _engineStatisticsPtr->SetLastError(
3321 VE_APM_ERROR, kTraceError,
3322 "SetRxAgcStatus() failed to set Agc mode");
3323 return -1;
3324 }
3325 if (_rxAudioProcessingModulePtr->gain_control()->Enable(enable) != 0)
3326 {
3327 _engineStatisticsPtr->SetLastError(
3328 VE_APM_ERROR, kTraceError,
3329 "SetRxAgcStatus() failed to set Agc state");
3330 return -1;
3331 }
3332
3333 _rxAgcIsEnabled = enable;
niklase@google.com470e71d2011-07-07 08:21:25 +00003334 _rxApmIsEnabled = ((_rxAgcIsEnabled == true) || (_rxNsIsEnabled == true));
3335
3336 return 0;
3337}
3338
3339int
3340Channel::GetRxAgcStatus(bool& enabled, AgcModes& mode)
3341{
3342 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3343 "Channel::GetRxAgcStatus(enable=?, mode=?)");
3344
3345 bool enable = _rxAudioProcessingModulePtr->gain_control()->is_enabled();
3346 GainControl::Mode agcMode =
3347 _rxAudioProcessingModulePtr->gain_control()->mode();
3348
3349 enabled = enable;
3350
3351 switch (agcMode)
3352 {
3353 case GainControl::kFixedDigital:
3354 mode = kAgcFixedDigital;
3355 break;
3356 case GainControl::kAdaptiveDigital:
3357 mode = kAgcAdaptiveDigital;
3358 break;
3359 default:
3360 _engineStatisticsPtr->SetLastError(
3361 VE_APM_ERROR, kTraceError,
3362 "GetRxAgcStatus() invalid Agc mode");
3363 return -1;
3364 }
3365
3366 return 0;
3367}
3368
3369int
3370Channel::SetRxAgcConfig(const AgcConfig config)
3371{
3372 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3373 "Channel::SetRxAgcConfig()");
3374
3375 if (_rxAudioProcessingModulePtr->gain_control()->set_target_level_dbfs(
3376 config.targetLeveldBOv) != 0)
3377 {
3378 _engineStatisticsPtr->SetLastError(
3379 VE_APM_ERROR, kTraceError,
3380 "SetRxAgcConfig() failed to set target peak |level|"
3381 "(or envelope) of the Agc");
3382 return -1;
3383 }
3384 if (_rxAudioProcessingModulePtr->gain_control()->set_compression_gain_db(
3385 config.digitalCompressionGaindB) != 0)
3386 {
3387 _engineStatisticsPtr->SetLastError(
3388 VE_APM_ERROR, kTraceError,
3389 "SetRxAgcConfig() failed to set the range in |gain| the"
3390 " digital compression stage may apply");
3391 return -1;
3392 }
3393 if (_rxAudioProcessingModulePtr->gain_control()->enable_limiter(
3394 config.limiterEnable) != 0)
3395 {
3396 _engineStatisticsPtr->SetLastError(
3397 VE_APM_ERROR, kTraceError,
3398 "SetRxAgcConfig() failed to set hard limiter to the signal");
3399 return -1;
3400 }
3401
3402 return 0;
3403}
3404
3405int
3406Channel::GetRxAgcConfig(AgcConfig& config)
3407{
3408 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3409 "Channel::GetRxAgcConfig(config=%?)");
3410
3411 config.targetLeveldBOv =
3412 _rxAudioProcessingModulePtr->gain_control()->target_level_dbfs();
3413 config.digitalCompressionGaindB =
3414 _rxAudioProcessingModulePtr->gain_control()->compression_gain_db();
3415 config.limiterEnable =
3416 _rxAudioProcessingModulePtr->gain_control()->is_limiter_enabled();
3417
3418 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3419 VoEId(_instanceId,_channelId), "GetRxAgcConfig() => "
3420 "targetLeveldBOv=%u, digitalCompressionGaindB=%u,"
3421 " limiterEnable=%d",
3422 config.targetLeveldBOv,
3423 config.digitalCompressionGaindB,
3424 config.limiterEnable);
3425
3426 return 0;
3427}
3428
3429#endif // #ifdef WEBRTC_VOICE_ENGINE_AGC
3430
3431#ifdef WEBRTC_VOICE_ENGINE_NR
3432
3433int
3434Channel::SetRxNsStatus(const bool enable, const NsModes mode)
3435{
3436 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3437 "Channel::SetRxNsStatus(enable=%d, mode=%d)",
3438 (int)enable, (int)mode);
3439
3440 NoiseSuppression::Level nsLevel(
3441 (NoiseSuppression::Level)WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE);
3442 switch (mode)
3443 {
3444
3445 case kNsDefault:
3446 nsLevel = (NoiseSuppression::Level)
3447 WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE;
3448 break;
3449 case kNsUnchanged:
3450 nsLevel = _rxAudioProcessingModulePtr->noise_suppression()->level();
3451 break;
3452 case kNsConference:
3453 nsLevel = NoiseSuppression::kHigh;
3454 break;
3455 case kNsLowSuppression:
3456 nsLevel = NoiseSuppression::kLow;
3457 break;
3458 case kNsModerateSuppression:
3459 nsLevel = NoiseSuppression::kModerate;
3460 break;
3461 case kNsHighSuppression:
3462 nsLevel = NoiseSuppression::kHigh;
3463 break;
3464 case kNsVeryHighSuppression:
3465 nsLevel = NoiseSuppression::kVeryHigh;
3466 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00003467 }
3468
3469 if (_rxAudioProcessingModulePtr->noise_suppression()->set_level(nsLevel)
3470 != 0)
3471 {
3472 _engineStatisticsPtr->SetLastError(
3473 VE_APM_ERROR, kTraceError,
3474 "SetRxAgcStatus() failed to set Ns level");
3475 return -1;
3476 }
3477 if (_rxAudioProcessingModulePtr->noise_suppression()->Enable(enable) != 0)
3478 {
3479 _engineStatisticsPtr->SetLastError(
3480 VE_APM_ERROR, kTraceError,
3481 "SetRxAgcStatus() failed to set Agc state");
3482 return -1;
3483 }
3484
3485 _rxNsIsEnabled = enable;
3486 _rxApmIsEnabled = ((_rxAgcIsEnabled == true) || (_rxNsIsEnabled == true));
3487
3488 return 0;
3489}
3490
3491int
3492Channel::GetRxNsStatus(bool& enabled, NsModes& mode)
3493{
3494 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3495 "Channel::GetRxNsStatus(enable=?, mode=?)");
3496
3497 bool enable =
3498 _rxAudioProcessingModulePtr->noise_suppression()->is_enabled();
3499 NoiseSuppression::Level ncLevel =
3500 _rxAudioProcessingModulePtr->noise_suppression()->level();
3501
3502 enabled = enable;
3503
3504 switch (ncLevel)
3505 {
3506 case NoiseSuppression::kLow:
3507 mode = kNsLowSuppression;
3508 break;
3509 case NoiseSuppression::kModerate:
3510 mode = kNsModerateSuppression;
3511 break;
3512 case NoiseSuppression::kHigh:
3513 mode = kNsHighSuppression;
3514 break;
3515 case NoiseSuppression::kVeryHigh:
3516 mode = kNsVeryHighSuppression;
3517 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00003518 }
3519
3520 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3521 VoEId(_instanceId,_channelId),
3522 "GetRxNsStatus() => enabled=%d, mode=%d", enabled, mode);
3523 return 0;
3524}
3525
3526#endif // #ifdef WEBRTC_VOICE_ENGINE_NR
3527
3528int
3529Channel::RegisterRTPObserver(VoERTPObserver& observer)
3530{
3531 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3532 "Channel::RegisterRTPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003533 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003534
3535 if (_rtpObserverPtr)
3536 {
3537 _engineStatisticsPtr->SetLastError(
3538 VE_INVALID_OPERATION, kTraceError,
3539 "RegisterRTPObserver() observer already enabled");
3540 return -1;
3541 }
3542
3543 _rtpObserverPtr = &observer;
3544 _rtpObserver = true;
3545
3546 return 0;
3547}
3548
3549int
3550Channel::DeRegisterRTPObserver()
3551{
3552 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3553 "Channel::DeRegisterRTPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003554 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003555
3556 if (!_rtpObserverPtr)
3557 {
3558 _engineStatisticsPtr->SetLastError(
3559 VE_INVALID_OPERATION, kTraceWarning,
3560 "DeRegisterRTPObserver() observer already disabled");
3561 return 0;
3562 }
3563
3564 _rtpObserver = false;
3565 _rtpObserverPtr = NULL;
3566
3567 return 0;
3568}
3569
3570int
3571Channel::RegisterRTCPObserver(VoERTCPObserver& observer)
3572{
3573 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3574 "Channel::RegisterRTCPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003575 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003576
3577 if (_rtcpObserverPtr)
3578 {
3579 _engineStatisticsPtr->SetLastError(
3580 VE_INVALID_OPERATION, kTraceError,
3581 "RegisterRTCPObserver() observer already enabled");
3582 return -1;
3583 }
3584
3585 _rtcpObserverPtr = &observer;
3586 _rtcpObserver = true;
3587
3588 return 0;
3589}
3590
3591int
3592Channel::DeRegisterRTCPObserver()
3593{
3594 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3595 "Channel::DeRegisterRTCPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003596 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003597
3598 if (!_rtcpObserverPtr)
3599 {
3600 _engineStatisticsPtr->SetLastError(
3601 VE_INVALID_OPERATION, kTraceWarning,
3602 "DeRegisterRTCPObserver() observer already disabled");
3603 return 0;
3604 }
3605
3606 _rtcpObserver = false;
3607 _rtcpObserverPtr = NULL;
3608
3609 return 0;
3610}
3611
3612int
3613Channel::SetLocalSSRC(unsigned int ssrc)
3614{
3615 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3616 "Channel::SetLocalSSRC()");
3617 if (_sending)
3618 {
3619 _engineStatisticsPtr->SetLastError(
3620 VE_ALREADY_SENDING, kTraceError,
3621 "SetLocalSSRC() already sending");
3622 return -1;
3623 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003624 if (_rtpRtcpModule->SetSSRC(ssrc) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003625 {
3626 _engineStatisticsPtr->SetLastError(
3627 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3628 "SetLocalSSRC() failed to set SSRC");
3629 return -1;
3630 }
3631 return 0;
3632}
3633
3634int
3635Channel::GetLocalSSRC(unsigned int& ssrc)
3636{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003637 ssrc = _rtpRtcpModule->SSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00003638 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3639 VoEId(_instanceId,_channelId),
3640 "GetLocalSSRC() => ssrc=%lu", ssrc);
3641 return 0;
3642}
3643
3644int
3645Channel::GetRemoteSSRC(unsigned int& ssrc)
3646{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003647 ssrc = _rtpRtcpModule->RemoteSSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00003648 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3649 VoEId(_instanceId,_channelId),
3650 "GetRemoteSSRC() => ssrc=%lu", ssrc);
3651 return 0;
3652}
3653
3654int
3655Channel::GetRemoteCSRCs(unsigned int arrCSRC[15])
3656{
3657 if (arrCSRC == NULL)
3658 {
3659 _engineStatisticsPtr->SetLastError(
3660 VE_INVALID_ARGUMENT, kTraceError,
3661 "GetRemoteCSRCs() invalid array argument");
3662 return -1;
3663 }
3664 WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize];
3665 WebRtc_Word32 CSRCs(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003666 CSRCs = _rtpRtcpModule->CSRCs(arrOfCSRC);
niklase@google.com470e71d2011-07-07 08:21:25 +00003667 if (CSRCs > 0)
3668 {
3669 memcpy(arrCSRC, arrOfCSRC, CSRCs * sizeof(WebRtc_UWord32));
3670 for (int i = 0; i < (int) CSRCs; i++)
3671 {
3672 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3673 VoEId(_instanceId, _channelId),
3674 "GetRemoteCSRCs() => arrCSRC[%d]=%lu", i, arrCSRC[i]);
3675 }
3676 } else
3677 {
3678 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3679 VoEId(_instanceId, _channelId),
3680 "GetRemoteCSRCs() => list is empty!");
3681 }
3682 return CSRCs;
3683}
3684
3685int
3686Channel::SetRTPAudioLevelIndicationStatus(bool enable, unsigned char ID)
3687{
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00003688 if (_rtpAudioProc.get() == NULL)
3689 {
3690 _rtpAudioProc.reset(AudioProcessing::Create(VoEModuleId(_instanceId,
3691 _channelId)));
3692 if (_rtpAudioProc.get() == NULL)
3693 {
3694 _engineStatisticsPtr->SetLastError(VE_NO_MEMORY, kTraceCritical,
3695 "Failed to create AudioProcessing");
3696 return -1;
3697 }
3698 }
3699
3700 if (_rtpAudioProc->level_estimator()->Enable(enable) !=
3701 AudioProcessing::kNoError)
3702 {
3703 _engineStatisticsPtr->SetLastError(VE_APM_ERROR, kTraceWarning,
3704 "Failed to enable AudioProcessing::level_estimator()");
3705 }
3706
niklase@google.com470e71d2011-07-07 08:21:25 +00003707 _includeAudioLevelIndication = enable;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003708 return _rtpRtcpModule->SetRTPAudioLevelIndicationStatus(enable, ID);
niklase@google.com470e71d2011-07-07 08:21:25 +00003709}
3710int
3711Channel::GetRTPAudioLevelIndicationStatus(bool& enabled, unsigned char& ID)
3712{
3713 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3714 VoEId(_instanceId,_channelId),
3715 "GetRTPAudioLevelIndicationStatus() => enabled=%d, ID=%u",
3716 enabled, ID);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003717 return _rtpRtcpModule->GetRTPAudioLevelIndicationStatus(enabled, ID);
niklase@google.com470e71d2011-07-07 08:21:25 +00003718}
3719
3720int
3721Channel::SetRTCPStatus(bool enable)
3722{
3723 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3724 "Channel::SetRTCPStatus()");
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003725 if (_rtpRtcpModule->SetRTCPStatus(enable ?
niklase@google.com470e71d2011-07-07 08:21:25 +00003726 kRtcpCompound : kRtcpOff) != 0)
3727 {
3728 _engineStatisticsPtr->SetLastError(
3729 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3730 "SetRTCPStatus() failed to set RTCP status");
3731 return -1;
3732 }
3733 return 0;
3734}
3735
3736int
3737Channel::GetRTCPStatus(bool& enabled)
3738{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003739 RTCPMethod method = _rtpRtcpModule->RTCP();
niklase@google.com470e71d2011-07-07 08:21:25 +00003740 enabled = (method != kRtcpOff);
3741 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3742 VoEId(_instanceId,_channelId),
3743 "GetRTCPStatus() => enabled=%d", enabled);
3744 return 0;
3745}
3746
3747int
3748Channel::SetRTCP_CNAME(const char cName[256])
3749{
3750 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3751 "Channel::SetRTCP_CNAME()");
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003752 if (_rtpRtcpModule->SetCNAME(cName) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003753 {
3754 _engineStatisticsPtr->SetLastError(
3755 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3756 "SetRTCP_CNAME() failed to set RTCP CNAME");
3757 return -1;
3758 }
3759 return 0;
3760}
3761
3762int
3763Channel::GetRTCP_CNAME(char cName[256])
3764{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003765 if (_rtpRtcpModule->CNAME(cName) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003766 {
3767 _engineStatisticsPtr->SetLastError(
3768 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3769 "GetRTCP_CNAME() failed to retrieve RTCP CNAME");
3770 return -1;
3771 }
3772 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3773 VoEId(_instanceId, _channelId),
3774 "GetRTCP_CNAME() => cName=%s", cName);
3775 return 0;
3776}
3777
3778int
3779Channel::GetRemoteRTCP_CNAME(char cName[256])
3780{
3781 if (cName == NULL)
3782 {
3783 _engineStatisticsPtr->SetLastError(
3784 VE_INVALID_ARGUMENT, kTraceError,
3785 "GetRemoteRTCP_CNAME() invalid CNAME input buffer");
3786 return -1;
3787 }
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00003788 char cname[RTCP_CNAME_SIZE];
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003789 const WebRtc_UWord32 remoteSSRC = _rtpRtcpModule->RemoteSSRC();
3790 if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003791 {
3792 _engineStatisticsPtr->SetLastError(
3793 VE_CANNOT_RETRIEVE_CNAME, kTraceError,
3794 "GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME");
3795 return -1;
3796 }
3797 strcpy(cName, cname);
3798 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3799 VoEId(_instanceId, _channelId),
3800 "GetRemoteRTCP_CNAME() => cName=%s", cName);
3801 return 0;
3802}
3803
3804int
3805Channel::GetRemoteRTCPData(
3806 unsigned int& NTPHigh,
3807 unsigned int& NTPLow,
3808 unsigned int& timestamp,
3809 unsigned int& playoutTimestamp,
3810 unsigned int* jitter,
3811 unsigned short* fractionLost)
3812{
3813 // --- Information from sender info in received Sender Reports
3814
3815 RTCPSenderInfo senderInfo;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003816 if (_rtpRtcpModule->RemoteRTCPStat(&senderInfo) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003817 {
3818 _engineStatisticsPtr->SetLastError(
3819 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00003820 "GetRemoteRTCPData() failed to retrieve sender info for remote "
niklase@google.com470e71d2011-07-07 08:21:25 +00003821 "side");
3822 return -1;
3823 }
3824
3825 // We only utilize 12 out of 20 bytes in the sender info (ignores packet
3826 // and octet count)
3827 NTPHigh = senderInfo.NTPseconds;
3828 NTPLow = senderInfo.NTPfraction;
3829 timestamp = senderInfo.RTPtimeStamp;
3830
3831 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3832 VoEId(_instanceId, _channelId),
3833 "GetRemoteRTCPData() => NTPHigh=%lu, NTPLow=%lu, "
3834 "timestamp=%lu",
3835 NTPHigh, NTPLow, timestamp);
3836
3837 // --- Locally derived information
3838
3839 // This value is updated on each incoming RTCP packet (0 when no packet
3840 // has been received)
3841 playoutTimestamp = _playoutTimeStampRTCP;
3842
3843 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3844 VoEId(_instanceId, _channelId),
3845 "GetRemoteRTCPData() => playoutTimestamp=%lu",
3846 _playoutTimeStampRTCP);
3847
3848 if (NULL != jitter || NULL != fractionLost)
3849 {
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003850 // Get all RTCP receiver report blocks that have been received on this
3851 // channel. If we receive RTP packets from a remote source we know the
3852 // remote SSRC and use the report block from him.
3853 // Otherwise use the first report block.
3854 std::vector<RTCPReportBlock> remote_stats;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003855 if (_rtpRtcpModule->RemoteRTCPStat(&remote_stats) != 0 ||
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003856 remote_stats.empty()) {
3857 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3858 VoEId(_instanceId, _channelId),
3859 "GetRemoteRTCPData() failed to measure statistics due"
3860 " to lack of received RTP and/or RTCP packets");
3861 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003862 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003863
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003864 WebRtc_UWord32 remoteSSRC = _rtpRtcpModule->RemoteSSRC();
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003865 std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
3866 for (; it != remote_stats.end(); ++it) {
3867 if (it->remoteSSRC == remoteSSRC)
3868 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00003869 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003870
3871 if (it == remote_stats.end()) {
3872 // If we have not received any RTCP packets from this SSRC it probably
3873 // means that we have not received any RTP packets.
3874 // Use the first received report block instead.
3875 it = remote_stats.begin();
3876 remoteSSRC = it->remoteSSRC;
niklase@google.com470e71d2011-07-07 08:21:25 +00003877 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003878
xians@webrtc.org79af7342012-01-31 12:22:14 +00003879 if (jitter) {
3880 *jitter = it->jitter;
3881 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3882 VoEId(_instanceId, _channelId),
3883 "GetRemoteRTCPData() => jitter = %lu", *jitter);
3884 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003885
xians@webrtc.org79af7342012-01-31 12:22:14 +00003886 if (fractionLost) {
3887 *fractionLost = it->fractionLost;
3888 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3889 VoEId(_instanceId, _channelId),
3890 "GetRemoteRTCPData() => fractionLost = %lu",
3891 *fractionLost);
3892 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003893 }
3894 return 0;
3895}
3896
3897int
3898Channel::SendApplicationDefinedRTCPPacket(const unsigned char subType,
3899 unsigned int name,
3900 const char* data,
3901 unsigned short dataLengthInBytes)
3902{
3903 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3904 "Channel::SendApplicationDefinedRTCPPacket()");
3905 if (!_sending)
3906 {
3907 _engineStatisticsPtr->SetLastError(
3908 VE_NOT_SENDING, kTraceError,
3909 "SendApplicationDefinedRTCPPacket() not sending");
3910 return -1;
3911 }
3912 if (NULL == data)
3913 {
3914 _engineStatisticsPtr->SetLastError(
3915 VE_INVALID_ARGUMENT, kTraceError,
3916 "SendApplicationDefinedRTCPPacket() invalid data value");
3917 return -1;
3918 }
3919 if (dataLengthInBytes % 4 != 0)
3920 {
3921 _engineStatisticsPtr->SetLastError(
3922 VE_INVALID_ARGUMENT, kTraceError,
3923 "SendApplicationDefinedRTCPPacket() invalid length value");
3924 return -1;
3925 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003926 RTCPMethod status = _rtpRtcpModule->RTCP();
niklase@google.com470e71d2011-07-07 08:21:25 +00003927 if (status == kRtcpOff)
3928 {
3929 _engineStatisticsPtr->SetLastError(
3930 VE_RTCP_ERROR, kTraceError,
3931 "SendApplicationDefinedRTCPPacket() RTCP is disabled");
3932 return -1;
3933 }
3934
3935 // Create and schedule the RTCP APP packet for transmission
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003936 if (_rtpRtcpModule->SetRTCPApplicationSpecificData(
niklase@google.com470e71d2011-07-07 08:21:25 +00003937 subType,
3938 name,
3939 (const unsigned char*) data,
3940 dataLengthInBytes) != 0)
3941 {
3942 _engineStatisticsPtr->SetLastError(
3943 VE_SEND_ERROR, kTraceError,
3944 "SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
3945 return -1;
3946 }
3947 return 0;
3948}
3949
3950int
3951Channel::GetRTPStatistics(
3952 unsigned int& averageJitterMs,
3953 unsigned int& maxJitterMs,
3954 unsigned int& discardedPackets)
3955{
3956 WebRtc_UWord8 fraction_lost(0);
3957 WebRtc_UWord32 cum_lost(0);
3958 WebRtc_UWord32 ext_max(0);
3959 WebRtc_UWord32 jitter(0);
3960 WebRtc_UWord32 max_jitter(0);
3961
3962 // The jitter statistics is updated for each received RTP packet and is
3963 // based on received packets.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003964 if (_rtpRtcpModule->StatisticsRTP(&fraction_lost,
niklase@google.com470e71d2011-07-07 08:21:25 +00003965 &cum_lost,
3966 &ext_max,
3967 &jitter,
3968 &max_jitter) != 0)
3969 {
3970 _engineStatisticsPtr->SetLastError(
3971 VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00003972 "GetRTPStatistics() failed to read RTP statistics from the "
niklase@google.com470e71d2011-07-07 08:21:25 +00003973 "RTP/RTCP module");
3974 }
3975
3976 const WebRtc_Word32 playoutFrequency =
3977 _audioCodingModule.PlayoutFrequency();
3978 if (playoutFrequency > 0)
3979 {
3980 // Scale RTP statistics given the current playout frequency
3981 maxJitterMs = max_jitter / (playoutFrequency / 1000);
3982 averageJitterMs = jitter / (playoutFrequency / 1000);
3983 }
3984
3985 discardedPackets = _numberOfDiscardedPackets;
3986
3987 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3988 VoEId(_instanceId, _channelId),
3989 "GetRTPStatistics() => averageJitterMs = %lu, maxJitterMs = %lu,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00003990 " discardedPackets = %lu)",
niklase@google.com470e71d2011-07-07 08:21:25 +00003991 averageJitterMs, maxJitterMs, discardedPackets);
3992 return 0;
3993}
3994
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00003995int Channel::GetRemoteRTCPSenderInfo(SenderInfo* sender_info) {
3996 if (sender_info == NULL) {
3997 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
3998 "GetRemoteRTCPSenderInfo() invalid sender_info.");
3999 return -1;
4000 }
4001
4002 // Get the sender info from the latest received RTCP Sender Report.
4003 RTCPSenderInfo rtcp_sender_info;
4004 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_sender_info) != 0) {
4005 _engineStatisticsPtr->SetLastError(VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4006 "GetRemoteRTCPSenderInfo() failed to read RTCP SR sender info.");
4007 return -1;
4008 }
4009
4010 sender_info->NTP_timestamp_high = rtcp_sender_info.NTPseconds;
4011 sender_info->NTP_timestamp_low = rtcp_sender_info.NTPfraction;
4012 sender_info->RTP_timestamp = rtcp_sender_info.RTPtimeStamp;
4013 sender_info->sender_packet_count = rtcp_sender_info.sendPacketCount;
4014 sender_info->sender_octet_count = rtcp_sender_info.sendOctetCount;
4015 return 0;
4016}
4017
4018int Channel::GetRemoteRTCPReportBlocks(
4019 std::vector<ReportBlock>* report_blocks) {
4020 if (report_blocks == NULL) {
4021 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
4022 "GetRemoteRTCPReportBlock()s invalid report_blocks.");
4023 return -1;
4024 }
4025
4026 // Get the report blocks from the latest received RTCP Sender or Receiver
4027 // Report. Each element in the vector contains the sender's SSRC and a
4028 // report block according to RFC 3550.
4029 std::vector<RTCPReportBlock> rtcp_report_blocks;
4030 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks) != 0) {
4031 _engineStatisticsPtr->SetLastError(VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4032 "GetRemoteRTCPReportBlocks() failed to read RTCP SR/RR report block.");
4033 return -1;
4034 }
4035
4036 if (rtcp_report_blocks.empty())
4037 return 0;
4038
4039 std::vector<RTCPReportBlock>::const_iterator it = rtcp_report_blocks.begin();
4040 for (; it != rtcp_report_blocks.end(); ++it) {
4041 ReportBlock report_block;
4042 report_block.sender_SSRC = it->remoteSSRC;
4043 report_block.source_SSRC = it->sourceSSRC;
4044 report_block.fraction_lost = it->fractionLost;
4045 report_block.cumulative_num_packets_lost = it->cumulativeLost;
4046 report_block.extended_highest_sequence_number = it->extendedHighSeqNum;
4047 report_block.interarrival_jitter = it->jitter;
4048 report_block.last_SR_timestamp = it->lastSR;
4049 report_block.delay_since_last_SR = it->delaySinceLastSR;
4050 report_blocks->push_back(report_block);
4051 }
4052 return 0;
4053}
4054
niklase@google.com470e71d2011-07-07 08:21:25 +00004055int
4056Channel::GetRTPStatistics(CallStatistics& stats)
4057{
4058 WebRtc_UWord8 fraction_lost(0);
4059 WebRtc_UWord32 cum_lost(0);
4060 WebRtc_UWord32 ext_max(0);
4061 WebRtc_UWord32 jitter(0);
4062 WebRtc_UWord32 max_jitter(0);
4063
4064 // --- Part one of the final structure (four values)
4065
4066 // The jitter statistics is updated for each received RTP packet and is
4067 // based on received packets.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004068 if (_rtpRtcpModule->StatisticsRTP(&fraction_lost,
niklase@google.com470e71d2011-07-07 08:21:25 +00004069 &cum_lost,
4070 &ext_max,
4071 &jitter,
4072 &max_jitter) != 0)
4073 {
4074 _engineStatisticsPtr->SetLastError(
4075 VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
4076 "GetRTPStatistics() failed to read RTP statistics from the "
4077 "RTP/RTCP module");
4078 }
4079
4080 stats.fractionLost = fraction_lost;
4081 stats.cumulativeLost = cum_lost;
4082 stats.extendedMax = ext_max;
4083 stats.jitterSamples = jitter;
4084
4085 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4086 VoEId(_instanceId, _channelId),
4087 "GetRTPStatistics() => fractionLost=%lu, cumulativeLost=%lu,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004088 " extendedMax=%lu, jitterSamples=%li)",
niklase@google.com470e71d2011-07-07 08:21:25 +00004089 stats.fractionLost, stats.cumulativeLost, stats.extendedMax,
4090 stats.jitterSamples);
4091
4092 // --- Part two of the final structure (one value)
4093
4094 WebRtc_UWord16 RTT(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004095 RTCPMethod method = _rtpRtcpModule->RTCP();
niklase@google.com470e71d2011-07-07 08:21:25 +00004096 if (method == kRtcpOff)
4097 {
4098 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4099 VoEId(_instanceId, _channelId),
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004100 "GetRTPStatistics() RTCP is disabled => valid RTT "
niklase@google.com470e71d2011-07-07 08:21:25 +00004101 "measurements cannot be retrieved");
4102 } else
4103 {
4104 // The remote SSRC will be zero if no RTP packet has been received.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004105 WebRtc_UWord32 remoteSSRC = _rtpRtcpModule->RemoteSSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00004106 if (remoteSSRC > 0)
4107 {
4108 WebRtc_UWord16 avgRTT(0);
4109 WebRtc_UWord16 maxRTT(0);
4110 WebRtc_UWord16 minRTT(0);
4111
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004112 if (_rtpRtcpModule->RTT(remoteSSRC, &RTT, &avgRTT, &minRTT, &maxRTT)
niklase@google.com470e71d2011-07-07 08:21:25 +00004113 != 0)
4114 {
4115 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4116 VoEId(_instanceId, _channelId),
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004117 "GetRTPStatistics() failed to retrieve RTT from "
niklase@google.com470e71d2011-07-07 08:21:25 +00004118 "the RTP/RTCP module");
4119 }
4120 } else
4121 {
4122 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4123 VoEId(_instanceId, _channelId),
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004124 "GetRTPStatistics() failed to measure RTT since no "
niklase@google.com470e71d2011-07-07 08:21:25 +00004125 "RTP packets have been received yet");
4126 }
4127 }
4128
4129 stats.rttMs = static_cast<int> (RTT);
4130
4131 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4132 VoEId(_instanceId, _channelId),
4133 "GetRTPStatistics() => rttMs=%d", stats.rttMs);
4134
4135 // --- Part three of the final structure (four values)
4136
4137 WebRtc_UWord32 bytesSent(0);
4138 WebRtc_UWord32 packetsSent(0);
4139 WebRtc_UWord32 bytesReceived(0);
4140 WebRtc_UWord32 packetsReceived(0);
4141
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004142 if (_rtpRtcpModule->DataCountersRTP(&bytesSent,
niklase@google.com470e71d2011-07-07 08:21:25 +00004143 &packetsSent,
4144 &bytesReceived,
4145 &packetsReceived) != 0)
4146 {
4147 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4148 VoEId(_instanceId, _channelId),
4149 "GetRTPStatistics() failed to retrieve RTP datacounters =>"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004150 " output will not be complete");
niklase@google.com470e71d2011-07-07 08:21:25 +00004151 }
4152
4153 stats.bytesSent = bytesSent;
4154 stats.packetsSent = packetsSent;
4155 stats.bytesReceived = bytesReceived;
4156 stats.packetsReceived = packetsReceived;
4157
4158 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4159 VoEId(_instanceId, _channelId),
4160 "GetRTPStatistics() => bytesSent=%d, packetsSent=%d,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004161 " bytesReceived=%d, packetsReceived=%d)",
niklase@google.com470e71d2011-07-07 08:21:25 +00004162 stats.bytesSent, stats.packetsSent, stats.bytesReceived,
4163 stats.packetsReceived);
4164
4165 return 0;
4166}
4167
turaj@webrtc.org42259e72012-12-11 02:15:12 +00004168int Channel::SetFECStatus(bool enable, int redPayloadtype) {
4169 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4170 "Channel::SetFECStatus()");
niklase@google.com470e71d2011-07-07 08:21:25 +00004171
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00004172 if (enable) {
4173 if (redPayloadtype < 0 || redPayloadtype > 127) {
4174 _engineStatisticsPtr->SetLastError(
4175 VE_PLTYPE_ERROR, kTraceError,
4176 "SetFECStatus() invalid RED payload type");
4177 return -1;
4178 }
4179
4180 if (SetRedPayloadType(redPayloadtype) < 0) {
4181 _engineStatisticsPtr->SetLastError(
4182 VE_CODEC_ERROR, kTraceError,
4183 "SetSecondarySendCodec() Failed to register RED ACM");
4184 return -1;
4185 }
turaj@webrtc.org42259e72012-12-11 02:15:12 +00004186 }
niklase@google.com470e71d2011-07-07 08:21:25 +00004187
turaj@webrtc.org42259e72012-12-11 02:15:12 +00004188 if (_audioCodingModule.SetFECStatus(enable) != 0) {
4189 _engineStatisticsPtr->SetLastError(
4190 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4191 "SetFECStatus() failed to set FEC state in the ACM");
4192 return -1;
4193 }
4194 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00004195}
4196
4197int
4198Channel::GetFECStatus(bool& enabled, int& redPayloadtype)
4199{
4200 enabled = _audioCodingModule.FECStatus();
4201 if (enabled)
4202 {
4203 WebRtc_Word8 payloadType(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004204 if (_rtpRtcpModule->SendREDPayloadType(payloadType) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004205 {
4206 _engineStatisticsPtr->SetLastError(
4207 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4208 "GetFECStatus() failed to retrieve RED PT from RTP/RTCP "
4209 "module");
4210 return -1;
4211 }
4212 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4213 VoEId(_instanceId, _channelId),
4214 "GetFECStatus() => enabled=%d, redPayloadtype=%d",
4215 enabled, redPayloadtype);
4216 return 0;
4217 }
4218 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4219 VoEId(_instanceId, _channelId),
4220 "GetFECStatus() => enabled=%d", enabled);
4221 return 0;
4222}
4223
4224int
niklase@google.com470e71d2011-07-07 08:21:25 +00004225Channel::StartRTPDump(const char fileNameUTF8[1024],
4226 RTPDirections direction)
4227{
4228 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4229 "Channel::StartRTPDump()");
4230 if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
4231 {
4232 _engineStatisticsPtr->SetLastError(
4233 VE_INVALID_ARGUMENT, kTraceError,
4234 "StartRTPDump() invalid RTP direction");
4235 return -1;
4236 }
4237 RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4238 &_rtpDumpIn : &_rtpDumpOut;
4239 if (rtpDumpPtr == NULL)
4240 {
4241 assert(false);
4242 return -1;
4243 }
4244 if (rtpDumpPtr->IsActive())
4245 {
4246 rtpDumpPtr->Stop();
4247 }
4248 if (rtpDumpPtr->Start(fileNameUTF8) != 0)
4249 {
4250 _engineStatisticsPtr->SetLastError(
4251 VE_BAD_FILE, kTraceError,
4252 "StartRTPDump() failed to create file");
4253 return -1;
4254 }
4255 return 0;
4256}
4257
4258int
4259Channel::StopRTPDump(RTPDirections direction)
4260{
4261 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4262 "Channel::StopRTPDump()");
4263 if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
4264 {
4265 _engineStatisticsPtr->SetLastError(
4266 VE_INVALID_ARGUMENT, kTraceError,
4267 "StopRTPDump() invalid RTP direction");
4268 return -1;
4269 }
4270 RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4271 &_rtpDumpIn : &_rtpDumpOut;
4272 if (rtpDumpPtr == NULL)
4273 {
4274 assert(false);
4275 return -1;
4276 }
4277 if (!rtpDumpPtr->IsActive())
4278 {
4279 return 0;
4280 }
4281 return rtpDumpPtr->Stop();
4282}
4283
4284bool
4285Channel::RTPDumpIsActive(RTPDirections direction)
4286{
4287 if ((direction != kRtpIncoming) &&
4288 (direction != kRtpOutgoing))
4289 {
4290 _engineStatisticsPtr->SetLastError(
4291 VE_INVALID_ARGUMENT, kTraceError,
4292 "RTPDumpIsActive() invalid RTP direction");
4293 return false;
4294 }
4295 RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4296 &_rtpDumpIn : &_rtpDumpOut;
4297 return rtpDumpPtr->IsActive();
4298}
4299
4300int
4301Channel::InsertExtraRTPPacket(unsigned char payloadType,
4302 bool markerBit,
4303 const char* payloadData,
4304 unsigned short payloadSize)
4305{
4306 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4307 "Channel::InsertExtraRTPPacket()");
4308 if (payloadType > 127)
4309 {
4310 _engineStatisticsPtr->SetLastError(
4311 VE_INVALID_PLTYPE, kTraceError,
4312 "InsertExtraRTPPacket() invalid payload type");
4313 return -1;
4314 }
4315 if (payloadData == NULL)
4316 {
4317 _engineStatisticsPtr->SetLastError(
4318 VE_INVALID_ARGUMENT, kTraceError,
4319 "InsertExtraRTPPacket() invalid payload data");
4320 return -1;
4321 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004322 if (payloadSize > _rtpRtcpModule->MaxDataPayloadLength())
niklase@google.com470e71d2011-07-07 08:21:25 +00004323 {
4324 _engineStatisticsPtr->SetLastError(
4325 VE_INVALID_ARGUMENT, kTraceError,
4326 "InsertExtraRTPPacket() invalid payload size");
4327 return -1;
4328 }
4329 if (!_sending)
4330 {
4331 _engineStatisticsPtr->SetLastError(
4332 VE_NOT_SENDING, kTraceError,
4333 "InsertExtraRTPPacket() not sending");
4334 return -1;
4335 }
4336
4337 // Create extra RTP packet by calling RtpRtcp::SendOutgoingData().
4338 // Transport::SendPacket() will be called by the module when the RTP packet
4339 // is created.
4340 // The call to SendOutgoingData() does *not* modify the timestamp and
4341 // payloadtype to ensure that the RTP module generates a valid RTP packet
4342 // (user might utilize a non-registered payload type).
4343 // The marker bit and payload type will be replaced just before the actual
4344 // transmission, i.e., the actual modification is done *after* the RTP
4345 // module has delivered its RTP packet back to the VoE.
4346 // We will use the stored values above when the packet is modified
4347 // (see Channel::SendPacket()).
4348
4349 _extraPayloadType = payloadType;
4350 _extraMarkerBit = markerBit;
4351 _insertExtraRTPPacket = true;
4352
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004353 if (_rtpRtcpModule->SendOutgoingData(kAudioFrameSpeech,
niklase@google.com470e71d2011-07-07 08:21:25 +00004354 _lastPayloadType,
4355 _lastLocalTimeStamp,
stefan@webrtc.orgddfdfed2012-07-03 13:21:22 +00004356 // Leaving the time when this frame was
4357 // received from the capture device as
4358 // undefined for voice for now.
4359 -1,
niklase@google.com470e71d2011-07-07 08:21:25 +00004360 (const WebRtc_UWord8*) payloadData,
4361 payloadSize) != 0)
4362 {
4363 _engineStatisticsPtr->SetLastError(
4364 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4365 "InsertExtraRTPPacket() failed to send extra RTP packet");
4366 return -1;
4367 }
4368
4369 return 0;
4370}
4371
4372WebRtc_UWord32
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004373Channel::Demultiplex(const AudioFrame& audioFrame)
niklase@google.com470e71d2011-07-07 08:21:25 +00004374{
4375 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004376 "Channel::Demultiplex()");
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00004377 _audioFrame.CopyFrom(audioFrame);
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004378 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00004379 return 0;
4380}
4381
4382WebRtc_UWord32
xians@google.com0b0665a2011-08-08 08:18:44 +00004383Channel::PrepareEncodeAndSend(int mixingFrequency)
niklase@google.com470e71d2011-07-07 08:21:25 +00004384{
4385 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4386 "Channel::PrepareEncodeAndSend()");
4387
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004388 if (_audioFrame.samples_per_channel_ == 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004389 {
4390 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4391 "Channel::PrepareEncodeAndSend() invalid audio frame");
4392 return -1;
4393 }
4394
4395 if (_inputFilePlaying)
4396 {
4397 MixOrReplaceAudioWithFile(mixingFrequency);
4398 }
4399
4400 if (_mute)
4401 {
4402 AudioFrameOperations::Mute(_audioFrame);
4403 }
4404
4405 if (_inputExternalMedia)
4406 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004407 CriticalSectionScoped cs(&_callbackCritSect);
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004408 const bool isStereo = (_audioFrame.num_channels_ == 2);
niklase@google.com470e71d2011-07-07 08:21:25 +00004409 if (_inputExternalMediaCallbackPtr)
4410 {
4411 _inputExternalMediaCallbackPtr->Process(
4412 _channelId,
4413 kRecordingPerChannel,
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004414 (WebRtc_Word16*)_audioFrame.data_,
4415 _audioFrame.samples_per_channel_,
4416 _audioFrame.sample_rate_hz_,
niklase@google.com470e71d2011-07-07 08:21:25 +00004417 isStereo);
4418 }
4419 }
4420
4421 InsertInbandDtmfTone();
4422
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004423 if (_includeAudioLevelIndication)
4424 {
4425 assert(_rtpAudioProc.get() != NULL);
4426
4427 // Check if settings need to be updated.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004428 if (_rtpAudioProc->sample_rate_hz() != _audioFrame.sample_rate_hz_)
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004429 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004430 if (_rtpAudioProc->set_sample_rate_hz(_audioFrame.sample_rate_hz_) !=
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004431 AudioProcessing::kNoError)
4432 {
4433 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4434 VoEId(_instanceId, _channelId),
4435 "Error setting AudioProcessing sample rate");
4436 return -1;
4437 }
4438 }
4439
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004440 if (_rtpAudioProc->num_input_channels() != _audioFrame.num_channels_)
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004441 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004442 if (_rtpAudioProc->set_num_channels(_audioFrame.num_channels_,
4443 _audioFrame.num_channels_)
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004444 != AudioProcessing::kNoError)
4445 {
4446 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4447 VoEId(_instanceId, _channelId),
4448 "Error setting AudioProcessing channels");
4449 return -1;
4450 }
4451 }
4452
4453 // Performs level analysis only; does not affect the signal.
4454 _rtpAudioProc->ProcessStream(&_audioFrame);
4455 }
4456
niklase@google.com470e71d2011-07-07 08:21:25 +00004457 return 0;
4458}
4459
4460WebRtc_UWord32
4461Channel::EncodeAndSend()
4462{
4463 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4464 "Channel::EncodeAndSend()");
4465
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004466 assert(_audioFrame.num_channels_ <= 2);
4467 if (_audioFrame.samples_per_channel_ == 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004468 {
4469 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4470 "Channel::EncodeAndSend() invalid audio frame");
4471 return -1;
4472 }
4473
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004474 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00004475
4476 // --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz.
4477
4478 // The ACM resamples internally.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004479 _audioFrame.timestamp_ = _timeStamp;
niklase@google.com470e71d2011-07-07 08:21:25 +00004480 if (_audioCodingModule.Add10MsData((AudioFrame&)_audioFrame) != 0)
4481 {
4482 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
4483 "Channel::EncodeAndSend() ACM encoding failed");
4484 return -1;
4485 }
4486
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004487 _timeStamp += _audioFrame.samples_per_channel_;
niklase@google.com470e71d2011-07-07 08:21:25 +00004488
4489 // --- Encode if complete frame is ready
4490
4491 // This call will trigger AudioPacketizationCallback::SendData if encoding
4492 // is done and payload is ready for packetization and transmission.
4493 return _audioCodingModule.Process();
4494}
4495
4496int Channel::RegisterExternalMediaProcessing(
4497 ProcessingTypes type,
4498 VoEMediaProcess& processObject)
4499{
4500 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4501 "Channel::RegisterExternalMediaProcessing()");
4502
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004503 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004504
4505 if (kPlaybackPerChannel == type)
4506 {
4507 if (_outputExternalMediaCallbackPtr)
4508 {
4509 _engineStatisticsPtr->SetLastError(
4510 VE_INVALID_OPERATION, kTraceError,
4511 "Channel::RegisterExternalMediaProcessing() "
4512 "output external media already enabled");
4513 return -1;
4514 }
4515 _outputExternalMediaCallbackPtr = &processObject;
4516 _outputExternalMedia = true;
4517 }
4518 else if (kRecordingPerChannel == type)
4519 {
4520 if (_inputExternalMediaCallbackPtr)
4521 {
4522 _engineStatisticsPtr->SetLastError(
4523 VE_INVALID_OPERATION, kTraceError,
4524 "Channel::RegisterExternalMediaProcessing() "
4525 "output external media already enabled");
4526 return -1;
4527 }
4528 _inputExternalMediaCallbackPtr = &processObject;
4529 _inputExternalMedia = true;
4530 }
4531 return 0;
4532}
4533
4534int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type)
4535{
4536 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4537 "Channel::DeRegisterExternalMediaProcessing()");
4538
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004539 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004540
4541 if (kPlaybackPerChannel == type)
4542 {
4543 if (!_outputExternalMediaCallbackPtr)
4544 {
4545 _engineStatisticsPtr->SetLastError(
4546 VE_INVALID_OPERATION, kTraceWarning,
4547 "Channel::DeRegisterExternalMediaProcessing() "
4548 "output external media already disabled");
4549 return 0;
4550 }
4551 _outputExternalMedia = false;
4552 _outputExternalMediaCallbackPtr = NULL;
4553 }
4554 else if (kRecordingPerChannel == type)
4555 {
4556 if (!_inputExternalMediaCallbackPtr)
4557 {
4558 _engineStatisticsPtr->SetLastError(
4559 VE_INVALID_OPERATION, kTraceWarning,
4560 "Channel::DeRegisterExternalMediaProcessing() "
4561 "input external media already disabled");
4562 return 0;
4563 }
4564 _inputExternalMedia = false;
4565 _inputExternalMediaCallbackPtr = NULL;
4566 }
4567
4568 return 0;
4569}
4570
roosa@google.com1b60ceb2012-12-12 23:00:29 +00004571int Channel::SetExternalMixing(bool enabled) {
4572 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4573 "Channel::SetExternalMixing(enabled=%d)", enabled);
4574
4575 if (_playing)
4576 {
4577 _engineStatisticsPtr->SetLastError(
4578 VE_INVALID_OPERATION, kTraceError,
4579 "Channel::SetExternalMixing() "
4580 "external mixing cannot be changed while playing.");
4581 return -1;
4582 }
4583
4584 _externalMixing = enabled;
4585
4586 return 0;
4587}
4588
niklase@google.com470e71d2011-07-07 08:21:25 +00004589int
4590Channel::ResetRTCPStatistics()
4591{
4592 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4593 "Channel::ResetRTCPStatistics()");
4594 WebRtc_UWord32 remoteSSRC(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004595 remoteSSRC = _rtpRtcpModule->RemoteSSRC();
4596 return _rtpRtcpModule->ResetRTT(remoteSSRC);
niklase@google.com470e71d2011-07-07 08:21:25 +00004597}
4598
4599int
4600Channel::GetRoundTripTimeSummary(StatVal& delaysMs) const
4601{
4602 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4603 "Channel::GetRoundTripTimeSummary()");
4604 // Override default module outputs for the case when RTCP is disabled.
4605 // This is done to ensure that we are backward compatible with the
4606 // VoiceEngine where we did not use RTP/RTCP module.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004607 if (!_rtpRtcpModule->RTCP())
niklase@google.com470e71d2011-07-07 08:21:25 +00004608 {
4609 delaysMs.min = -1;
4610 delaysMs.max = -1;
4611 delaysMs.average = -1;
4612 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4613 "Channel::GetRoundTripTimeSummary() RTCP is disabled =>"
4614 " valid RTT measurements cannot be retrieved");
4615 return 0;
4616 }
4617
4618 WebRtc_UWord32 remoteSSRC;
4619 WebRtc_UWord16 RTT;
4620 WebRtc_UWord16 avgRTT;
4621 WebRtc_UWord16 maxRTT;
4622 WebRtc_UWord16 minRTT;
4623 // The remote SSRC will be zero if no RTP packet has been received.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004624 remoteSSRC = _rtpRtcpModule->RemoteSSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00004625 if (remoteSSRC == 0)
4626 {
4627 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4628 "Channel::GetRoundTripTimeSummary() unable to measure RTT"
4629 " since no RTP packet has been received yet");
4630 }
4631
4632 // Retrieve RTT statistics from the RTP/RTCP module for the specified
4633 // channel and SSRC. The SSRC is required to parse out the correct source
4634 // in conference scenarios.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004635 if (_rtpRtcpModule->RTT(remoteSSRC, &RTT, &avgRTT, &minRTT,&maxRTT) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004636 {
4637 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4638 "GetRoundTripTimeSummary unable to retrieve RTT values"
4639 " from the RTCP layer");
4640 delaysMs.min = -1; delaysMs.max = -1; delaysMs.average = -1;
4641 }
4642 else
4643 {
4644 delaysMs.min = minRTT;
4645 delaysMs.max = maxRTT;
4646 delaysMs.average = avgRTT;
4647 }
4648 return 0;
4649}
4650
4651int
4652Channel::GetNetworkStatistics(NetworkStatistics& stats)
4653{
4654 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4655 "Channel::GetNetworkStatistics()");
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00004656 ACMNetworkStatistics acm_stats;
4657 int return_value = _audioCodingModule.NetworkStatistics(&acm_stats);
4658 if (return_value >= 0) {
4659 memcpy(&stats, &acm_stats, sizeof(NetworkStatistics));
4660 }
4661 return return_value;
niklase@google.com470e71d2011-07-07 08:21:25 +00004662}
4663
4664int
niklase@google.com470e71d2011-07-07 08:21:25 +00004665Channel::GetDelayEstimate(int& delayMs) const
4666{
4667 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4668 "Channel::GetDelayEstimate()");
4669 delayMs = (_averageDelayMs + 5) / 10 + _recPacketDelayMs;
4670 return 0;
4671}
4672
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +00004673int Channel::SetInitialPlayoutDelay(int delay_ms)
4674{
4675 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4676 "Channel::SetInitialPlayoutDelay()");
4677 if ((delay_ms < kVoiceEngineMinMinPlayoutDelayMs) ||
4678 (delay_ms > kVoiceEngineMaxMinPlayoutDelayMs))
4679 {
4680 _engineStatisticsPtr->SetLastError(
4681 VE_INVALID_ARGUMENT, kTraceError,
4682 "SetInitialPlayoutDelay() invalid min delay");
4683 return -1;
4684 }
4685 if (_audioCodingModule.SetInitialPlayoutDelay(delay_ms) != 0)
4686 {
4687 _engineStatisticsPtr->SetLastError(
4688 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4689 "SetInitialPlayoutDelay() failed to set min playout delay");
4690 return -1;
4691 }
4692 return 0;
4693}
4694
4695
niklase@google.com470e71d2011-07-07 08:21:25 +00004696int
4697Channel::SetMinimumPlayoutDelay(int delayMs)
4698{
4699 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4700 "Channel::SetMinimumPlayoutDelay()");
4701 if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
4702 (delayMs > kVoiceEngineMaxMinPlayoutDelayMs))
4703 {
4704 _engineStatisticsPtr->SetLastError(
4705 VE_INVALID_ARGUMENT, kTraceError,
4706 "SetMinimumPlayoutDelay() invalid min delay");
4707 return -1;
4708 }
4709 if (_audioCodingModule.SetMinimumPlayoutDelay(delayMs) != 0)
4710 {
4711 _engineStatisticsPtr->SetLastError(
4712 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4713 "SetMinimumPlayoutDelay() failed to set min playout delay");
4714 return -1;
4715 }
4716 return 0;
4717}
4718
4719int
4720Channel::GetPlayoutTimestamp(unsigned int& timestamp)
4721{
4722 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4723 "Channel::GetPlayoutTimestamp()");
4724 WebRtc_UWord32 playoutTimestamp(0);
4725 if (GetPlayoutTimeStamp(playoutTimestamp) != 0)
4726 {
4727 _engineStatisticsPtr->SetLastError(
4728 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4729 "GetPlayoutTimestamp() failed to retrieve timestamp");
4730 return -1;
4731 }
4732 timestamp = playoutTimestamp;
4733 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4734 VoEId(_instanceId,_channelId),
4735 "GetPlayoutTimestamp() => timestamp=%u", timestamp);
4736 return 0;
4737}
4738
4739int
4740Channel::SetInitTimestamp(unsigned int timestamp)
4741{
4742 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4743 "Channel::SetInitTimestamp()");
4744 if (_sending)
4745 {
4746 _engineStatisticsPtr->SetLastError(
4747 VE_SENDING, kTraceError, "SetInitTimestamp() already sending");
4748 return -1;
4749 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004750 if (_rtpRtcpModule->SetStartTimestamp(timestamp) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004751 {
4752 _engineStatisticsPtr->SetLastError(
4753 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4754 "SetInitTimestamp() failed to set timestamp");
4755 return -1;
4756 }
4757 return 0;
4758}
4759
4760int
4761Channel::SetInitSequenceNumber(short sequenceNumber)
4762{
4763 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4764 "Channel::SetInitSequenceNumber()");
4765 if (_sending)
4766 {
4767 _engineStatisticsPtr->SetLastError(
4768 VE_SENDING, kTraceError,
4769 "SetInitSequenceNumber() already sending");
4770 return -1;
4771 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004772 if (_rtpRtcpModule->SetSequenceNumber(sequenceNumber) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004773 {
4774 _engineStatisticsPtr->SetLastError(
4775 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4776 "SetInitSequenceNumber() failed to set sequence number");
4777 return -1;
4778 }
4779 return 0;
4780}
4781
4782int
4783Channel::GetRtpRtcp(RtpRtcp* &rtpRtcpModule) const
4784{
4785 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4786 "Channel::GetRtpRtcp()");
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004787 rtpRtcpModule = _rtpRtcpModule.get();
niklase@google.com470e71d2011-07-07 08:21:25 +00004788 return 0;
4789}
4790
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004791// TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use
4792// a shared helper.
niklase@google.com470e71d2011-07-07 08:21:25 +00004793WebRtc_Word32
xians@google.com0b0665a2011-08-08 08:18:44 +00004794Channel::MixOrReplaceAudioWithFile(const int mixingFrequency)
niklase@google.com470e71d2011-07-07 08:21:25 +00004795{
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004796 scoped_array<WebRtc_Word16> fileBuffer(new WebRtc_Word16[640]);
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004797 int fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004798
4799 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004800 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004801
4802 if (_inputFilePlayerPtr == NULL)
4803 {
4804 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4805 VoEId(_instanceId, _channelId),
4806 "Channel::MixOrReplaceAudioWithFile() fileplayer"
4807 " doesnt exist");
4808 return -1;
4809 }
4810
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004811 if (_inputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
niklase@google.com470e71d2011-07-07 08:21:25 +00004812 fileSamples,
4813 mixingFrequency) == -1)
4814 {
4815 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4816 VoEId(_instanceId, _channelId),
4817 "Channel::MixOrReplaceAudioWithFile() file mixing "
4818 "failed");
4819 return -1;
4820 }
4821 if (fileSamples == 0)
4822 {
4823 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4824 VoEId(_instanceId, _channelId),
4825 "Channel::MixOrReplaceAudioWithFile() file is ended");
4826 return 0;
4827 }
4828 }
4829
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004830 assert(_audioFrame.samples_per_channel_ == fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004831
4832 if (_mixFileWithMicrophone)
4833 {
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004834 // Currently file stream is always mono.
4835 // TODO(xians): Change the code when FilePlayer supports real stereo.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004836 Utility::MixWithSat(_audioFrame.data_,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004837 _audioFrame.num_channels_,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004838 fileBuffer.get(),
4839 1,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004840 fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004841 }
4842 else
4843 {
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004844 // Replace ACM audio with file.
4845 // Currently file stream is always mono.
4846 // TODO(xians): Change the code when FilePlayer supports real stereo.
niklase@google.com470e71d2011-07-07 08:21:25 +00004847 _audioFrame.UpdateFrame(_channelId,
4848 -1,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004849 fileBuffer.get(),
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004850 fileSamples,
niklase@google.com470e71d2011-07-07 08:21:25 +00004851 mixingFrequency,
4852 AudioFrame::kNormalSpeech,
4853 AudioFrame::kVadUnknown,
4854 1);
4855
4856 }
4857 return 0;
4858}
4859
4860WebRtc_Word32
4861Channel::MixAudioWithFile(AudioFrame& audioFrame,
xians@google.com0b0665a2011-08-08 08:18:44 +00004862 const int mixingFrequency)
niklase@google.com470e71d2011-07-07 08:21:25 +00004863{
4864 assert(mixingFrequency <= 32000);
4865
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004866 scoped_array<WebRtc_Word16> fileBuffer(new WebRtc_Word16[640]);
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004867 int fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004868
4869 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004870 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004871
4872 if (_outputFilePlayerPtr == NULL)
4873 {
4874 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4875 VoEId(_instanceId, _channelId),
4876 "Channel::MixAudioWithFile() file mixing failed");
4877 return -1;
4878 }
4879
4880 // We should get the frequency we ask for.
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004881 if (_outputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
niklase@google.com470e71d2011-07-07 08:21:25 +00004882 fileSamples,
4883 mixingFrequency) == -1)
4884 {
4885 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4886 VoEId(_instanceId, _channelId),
4887 "Channel::MixAudioWithFile() file mixing failed");
4888 return -1;
4889 }
4890 }
4891
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004892 if (audioFrame.samples_per_channel_ == fileSamples)
niklase@google.com470e71d2011-07-07 08:21:25 +00004893 {
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004894 // Currently file stream is always mono.
4895 // TODO(xians): Change the code when FilePlayer supports real stereo.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004896 Utility::MixWithSat(audioFrame.data_,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004897 audioFrame.num_channels_,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004898 fileBuffer.get(),
4899 1,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004900 fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004901 }
4902 else
4903 {
4904 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004905 "Channel::MixAudioWithFile() samples_per_channel_(%d) != "
niklase@google.com470e71d2011-07-07 08:21:25 +00004906 "fileSamples(%d)",
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004907 audioFrame.samples_per_channel_, fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004908 return -1;
4909 }
4910
4911 return 0;
4912}
4913
4914int
4915Channel::InsertInbandDtmfTone()
4916{
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00004917 // Check if we should start a new tone.
niklase@google.com470e71d2011-07-07 08:21:25 +00004918 if (_inbandDtmfQueue.PendingDtmf() &&
4919 !_inbandDtmfGenerator.IsAddingTone() &&
4920 _inbandDtmfGenerator.DelaySinceLastTone() >
4921 kMinTelephoneEventSeparationMs)
4922 {
4923 WebRtc_Word8 eventCode(0);
4924 WebRtc_UWord16 lengthMs(0);
4925 WebRtc_UWord8 attenuationDb(0);
4926
4927 eventCode = _inbandDtmfQueue.NextDtmf(&lengthMs, &attenuationDb);
4928 _inbandDtmfGenerator.AddTone(eventCode, lengthMs, attenuationDb);
4929 if (_playInbandDtmfEvent)
4930 {
4931 // Add tone to output mixer using a reduced length to minimize
4932 // risk of echo.
4933 _outputMixerPtr->PlayDtmfTone(eventCode, lengthMs - 80,
4934 attenuationDb);
4935 }
4936 }
4937
4938 if (_inbandDtmfGenerator.IsAddingTone())
4939 {
4940 WebRtc_UWord16 frequency(0);
4941 _inbandDtmfGenerator.GetSampleRate(frequency);
4942
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004943 if (frequency != _audioFrame.sample_rate_hz_)
niklase@google.com470e71d2011-07-07 08:21:25 +00004944 {
4945 // Update sample rate of Dtmf tone since the mixing frequency
4946 // has changed.
4947 _inbandDtmfGenerator.SetSampleRate(
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004948 (WebRtc_UWord16) (_audioFrame.sample_rate_hz_));
niklase@google.com470e71d2011-07-07 08:21:25 +00004949 // Reset the tone to be added taking the new sample rate into
4950 // account.
4951 _inbandDtmfGenerator.ResetTone();
4952 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00004953
niklase@google.com470e71d2011-07-07 08:21:25 +00004954 WebRtc_Word16 toneBuffer[320];
4955 WebRtc_UWord16 toneSamples(0);
4956 // Get 10ms tone segment and set time since last tone to zero
4957 if (_inbandDtmfGenerator.Get10msTone(toneBuffer, toneSamples) == -1)
4958 {
4959 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4960 VoEId(_instanceId, _channelId),
4961 "Channel::EncodeAndSend() inserting Dtmf failed");
4962 return -1;
4963 }
4964
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00004965 // Replace mixed audio with DTMF tone.
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00004966 for (int sample = 0;
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004967 sample < _audioFrame.samples_per_channel_;
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00004968 sample++)
4969 {
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00004970 for (int channel = 0;
4971 channel < _audioFrame.num_channels_;
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00004972 channel++)
4973 {
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00004974 const int index = sample * _audioFrame.num_channels_ + channel;
4975 _audioFrame.data_[index] = toneBuffer[sample];
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00004976 }
4977 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00004978
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004979 assert(_audioFrame.samples_per_channel_ == toneSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004980 } else
4981 {
4982 // Add 10ms to "delay-since-last-tone" counter
4983 _inbandDtmfGenerator.UpdateDelaySinceLastTone();
4984 }
4985 return 0;
4986}
4987
4988WebRtc_Word32
4989Channel::GetPlayoutTimeStamp(WebRtc_UWord32& playoutTimestamp)
4990{
4991 WebRtc_UWord32 timestamp(0);
4992 CodecInst currRecCodec;
4993
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00004994 if (_audioCodingModule.PlayoutTimestamp(&timestamp) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00004995 {
4996 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4997 "Channel::GetPlayoutTimeStamp() failed to read playout"
4998 " timestamp from the ACM");
4999 return -1;
5000 }
5001
5002 WebRtc_UWord16 delayMS(0);
5003 if (_audioDeviceModulePtr->PlayoutDelay(&delayMS) == -1)
5004 {
5005 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
5006 "Channel::GetPlayoutTimeStamp() failed to read playout"
5007 " delay from the ADM");
5008 return -1;
5009 }
5010
5011 WebRtc_Word32 playoutFrequency = _audioCodingModule.PlayoutFrequency();
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00005012 if (_audioCodingModule.ReceiveCodec(&currRecCodec) == 0) {
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +00005013 if (STR_CASE_CMP("G722", currRecCodec.plname) == 0) {
5014 playoutFrequency = 8000;
5015 } else if (STR_CASE_CMP("opus", currRecCodec.plname) == 0) {
5016 playoutFrequency = 48000;
5017 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005018 }
5019 timestamp -= (delayMS * (playoutFrequency/1000));
5020
5021 playoutTimestamp = timestamp;
5022
5023 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
5024 "Channel::GetPlayoutTimeStamp() => playoutTimestamp = %lu",
5025 playoutTimestamp);
5026 return 0;
5027}
5028
5029void
5030Channel::ResetDeadOrAliveCounters()
5031{
5032 _countDeadDetections = 0;
5033 _countAliveDetections = 0;
5034}
5035
5036void
5037Channel::UpdateDeadOrAliveCounters(bool alive)
5038{
5039 if (alive)
5040 _countAliveDetections++;
5041 else
5042 _countDeadDetections++;
5043}
5044
5045int
5046Channel::GetDeadOrAliveCounters(int& countDead, int& countAlive) const
5047{
5048 bool enabled;
5049 WebRtc_UWord8 timeSec;
5050
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00005051 _rtpRtcpModule->PeriodicDeadOrAliveStatus(enabled, timeSec);
niklase@google.com470e71d2011-07-07 08:21:25 +00005052 if (!enabled)
5053 return (-1);
5054
5055 countDead = static_cast<int> (_countDeadDetections);
5056 countAlive = static_cast<int> (_countAliveDetections);
5057 return 0;
5058}
5059
5060WebRtc_Word32
5061Channel::SendPacketRaw(const void *data, int len, bool RTCP)
5062{
5063 if (_transportPtr == NULL)
5064 {
5065 return -1;
5066 }
5067 if (!RTCP)
5068 {
5069 return _transportPtr->SendPacket(_channelId, data, len);
5070 }
5071 else
5072 {
5073 return _transportPtr->SendRTCPPacket(_channelId, data, len);
5074 }
5075}
5076
5077WebRtc_Word32
5078Channel::UpdatePacketDelay(const WebRtc_UWord32 timestamp,
5079 const WebRtc_UWord16 sequenceNumber)
5080{
5081 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
5082 "Channel::UpdatePacketDelay(timestamp=%lu, sequenceNumber=%u)",
5083 timestamp, sequenceNumber);
5084
5085 WebRtc_Word32 rtpReceiveFrequency(0);
5086
5087 // Get frequency of last received payload
5088 rtpReceiveFrequency = _audioCodingModule.ReceiveFrequency();
5089
5090 CodecInst currRecCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00005091 if (_audioCodingModule.ReceiveCodec(&currRecCodec) == 0) {
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +00005092 if (STR_CASE_CMP("G722", currRecCodec.plname) == 0) {
5093 // Even though the actual sampling rate for G.722 audio is
5094 // 16,000 Hz, the RTP clock rate for the G722 payload format is
5095 // 8,000 Hz because that value was erroneously assigned in
5096 // RFC 1890 and must remain unchanged for backward compatibility.
5097 rtpReceiveFrequency = 8000;
5098 } else if (STR_CASE_CMP("opus", currRecCodec.plname) == 0) {
5099 // We are resampling Opus internally to 32,000 Hz until all our
5100 // DSP routines can operate at 48,000 Hz, but the RTP clock
5101 // rate for the Opus payload format is standardized to 48,000 Hz,
5102 // because that is the maximum supported decoding sampling rate.
5103 rtpReceiveFrequency = 48000;
5104 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005105 }
5106
5107 const WebRtc_UWord32 timeStampDiff = timestamp - _playoutTimeStampRTP;
5108 WebRtc_UWord32 timeStampDiffMs(0);
5109
5110 if (timeStampDiff > 0)
5111 {
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +00005112 switch (rtpReceiveFrequency) {
5113 case 8000:
5114 timeStampDiffMs = static_cast<WebRtc_UWord32>(timeStampDiff >> 3);
5115 break;
5116 case 16000:
5117 timeStampDiffMs = static_cast<WebRtc_UWord32>(timeStampDiff >> 4);
5118 break;
5119 case 32000:
5120 timeStampDiffMs = static_cast<WebRtc_UWord32>(timeStampDiff >> 5);
5121 break;
5122 case 48000:
5123 timeStampDiffMs = static_cast<WebRtc_UWord32>(timeStampDiff / 48);
5124 break;
5125 default:
5126 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
5127 VoEId(_instanceId, _channelId),
5128 "Channel::UpdatePacketDelay() invalid sample rate");
5129 timeStampDiffMs = 0;
5130 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00005131 }
niklas.enbom@webrtc.org218c5422013-01-17 22:25:49 +00005132 if (timeStampDiffMs > (2 * kVoiceEngineMaxMinPlayoutDelayMs))
niklase@google.com470e71d2011-07-07 08:21:25 +00005133 {
5134 timeStampDiffMs = 0;
5135 }
5136
5137 if (_averageDelayMs == 0)
5138 {
niklas.enbom@webrtc.org218c5422013-01-17 22:25:49 +00005139 _averageDelayMs = timeStampDiffMs * 10;
niklase@google.com470e71d2011-07-07 08:21:25 +00005140 }
5141 else
5142 {
5143 // Filter average delay value using exponential filter (alpha is
5144 // 7/8). We derive 10*_averageDelayMs here (reduces risk of
5145 // rounding error) and compensate for it in GetDelayEstimate()
5146 // later. Adding 4/8 results in correct rounding.
5147 _averageDelayMs = ((_averageDelayMs*7 + 10*timeStampDiffMs + 4)>>3);
5148 }
5149
5150 if (sequenceNumber - _previousSequenceNumber == 1)
5151 {
5152 WebRtc_UWord16 packetDelayMs = 0;
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +00005153 switch (rtpReceiveFrequency) {
5154 case 8000:
5155 packetDelayMs = static_cast<WebRtc_UWord16>(
niklase@google.com470e71d2011-07-07 08:21:25 +00005156 (timestamp - _previousTimestamp) >> 3);
5157 break;
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +00005158 case 16000:
5159 packetDelayMs = static_cast<WebRtc_UWord16>(
niklase@google.com470e71d2011-07-07 08:21:25 +00005160 (timestamp - _previousTimestamp) >> 4);
5161 break;
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +00005162 case 32000:
5163 packetDelayMs = static_cast<WebRtc_UWord16>(
niklase@google.com470e71d2011-07-07 08:21:25 +00005164 (timestamp - _previousTimestamp) >> 5);
5165 break;
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +00005166 case 48000:
5167 packetDelayMs = static_cast<WebRtc_UWord16>(
5168 (timestamp - _previousTimestamp) / 48);
5169 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00005170 }
5171
5172 if (packetDelayMs >= 10 && packetDelayMs <= 60)
5173 _recPacketDelayMs = packetDelayMs;
5174 }
5175 }
5176
5177 _previousSequenceNumber = sequenceNumber;
5178 _previousTimestamp = timestamp;
5179
5180 return 0;
5181}
5182
5183void
5184Channel::RegisterReceiveCodecsToRTPModule()
5185{
5186 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
5187 "Channel::RegisterReceiveCodecsToRTPModule()");
5188
5189
5190 CodecInst codec;
5191 const WebRtc_UWord8 nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
5192
5193 for (int idx = 0; idx < nSupportedCodecs; idx++)
5194 {
5195 // Open up the RTP/RTCP receiver for all supported codecs
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00005196 if ((_audioCodingModule.Codec(idx, &codec) == -1) ||
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00005197 (_rtpRtcpModule->RegisterReceivePayload(codec) == -1))
niklase@google.com470e71d2011-07-07 08:21:25 +00005198 {
5199 WEBRTC_TRACE(
5200 kTraceWarning,
5201 kTraceVoice,
5202 VoEId(_instanceId, _channelId),
5203 "Channel::RegisterReceiveCodecsToRTPModule() unable"
5204 " to register %s (%d/%d/%d/%d) to RTP/RTCP receiver",
5205 codec.plname, codec.pltype, codec.plfreq,
5206 codec.channels, codec.rate);
5207 }
5208 else
5209 {
5210 WEBRTC_TRACE(
5211 kTraceInfo,
5212 kTraceVoice,
5213 VoEId(_instanceId, _channelId),
5214 "Channel::RegisterReceiveCodecsToRTPModule() %s "
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00005215 "(%d/%d/%d/%d) has been added to the RTP/RTCP "
niklase@google.com470e71d2011-07-07 08:21:25 +00005216 "receiver",
5217 codec.plname, codec.pltype, codec.plfreq,
5218 codec.channels, codec.rate);
5219 }
5220 }
5221}
5222
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005223int Channel::ApmProcessRx(AudioFrame& frame) {
5224 AudioProcessing* audioproc = _rxAudioProcessingModulePtr;
5225 // Register the (possibly new) frame parameters.
5226 if (audioproc->set_sample_rate_hz(frame.sample_rate_hz_) != 0) {
andrew@webrtc.org655d8f52012-11-20 07:34:45 +00005227 LOG_FERR1(LS_WARNING, set_sample_rate_hz, frame.sample_rate_hz_);
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005228 }
5229 if (audioproc->set_num_channels(frame.num_channels_,
5230 frame.num_channels_) != 0) {
andrew@webrtc.org655d8f52012-11-20 07:34:45 +00005231 LOG_FERR1(LS_WARNING, set_num_channels, frame.num_channels_);
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005232 }
5233 if (audioproc->ProcessStream(&frame) != 0) {
andrew@webrtc.org655d8f52012-11-20 07:34:45 +00005234 LOG_FERR0(LS_WARNING, ProcessStream);
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005235 }
5236 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00005237}
5238
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005239int Channel::SetSecondarySendCodec(const CodecInst& codec,
5240 int red_payload_type) {
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00005241 // Sanity check for payload type.
5242 if (red_payload_type < 0 || red_payload_type > 127) {
5243 _engineStatisticsPtr->SetLastError(
5244 VE_PLTYPE_ERROR, kTraceError,
5245 "SetRedPayloadType() invalid RED payload type");
5246 return -1;
5247 }
5248
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005249 if (SetRedPayloadType(red_payload_type) < 0) {
5250 _engineStatisticsPtr->SetLastError(
5251 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5252 "SetSecondarySendCodec() Failed to register RED ACM");
5253 return -1;
5254 }
5255 if (_audioCodingModule.RegisterSecondarySendCodec(codec) < 0) {
5256 _engineStatisticsPtr->SetLastError(
5257 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5258 "SetSecondarySendCodec() Failed to register secondary send codec in "
5259 "ACM");
5260 return -1;
5261 }
5262
5263 return 0;
5264}
5265
5266void Channel::RemoveSecondarySendCodec() {
5267 _audioCodingModule.UnregisterSecondarySendCodec();
5268}
5269
5270int Channel::GetSecondarySendCodec(CodecInst* codec) {
5271 if (_audioCodingModule.SecondarySendCodec(codec) < 0) {
5272 _engineStatisticsPtr->SetLastError(
5273 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5274 "GetSecondarySendCodec() Failed to get secondary sent codec from ACM");
5275 return -1;
5276 }
5277 return 0;
5278}
5279
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00005280// Assuming this method is called with valid payload type.
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005281int Channel::SetRedPayloadType(int red_payload_type) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005282 CodecInst codec;
5283 bool found_red = false;
5284
5285 // Get default RED settings from the ACM database
5286 const int num_codecs = AudioCodingModule::NumberOfCodecs();
5287 for (int idx = 0; idx < num_codecs; idx++) {
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00005288 _audioCodingModule.Codec(idx, &codec);
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005289 if (!STR_CASE_CMP(codec.plname, "RED")) {
5290 found_red = true;
5291 break;
5292 }
5293 }
5294
5295 if (!found_red) {
5296 _engineStatisticsPtr->SetLastError(
5297 VE_CODEC_ERROR, kTraceError,
5298 "SetRedPayloadType() RED is not supported");
5299 return -1;
5300 }
5301
turaj@webrtc.org9d532fd2013-01-31 18:34:19 +00005302 codec.pltype = red_payload_type;
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005303 if (_audioCodingModule.RegisterSendCodec(codec) < 0) {
5304 _engineStatisticsPtr->SetLastError(
5305 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5306 "SetRedPayloadType() RED registration in ACM module failed");
5307 return -1;
5308 }
5309
5310 if (_rtpRtcpModule->SetSendREDPayloadType(red_payload_type) != 0) {
5311 _engineStatisticsPtr->SetLastError(
5312 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
5313 "SetRedPayloadType() RED registration in RTP/RTCP module failed");
5314 return -1;
5315 }
5316 return 0;
5317}
5318
niklase@google.com470e71d2011-07-07 08:21:25 +00005319} // namespace voe
niklase@google.com470e71d2011-07-07 08:21:25 +00005320} // namespace webrtc