blob: d89a2637af9a0a662b5bed31fcee9673eae9b24c [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
henrika@webrtc.org2919e952012-01-31 08:45:03 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000011#include "webrtc/voice_engine/channel.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000013#include "webrtc/modules/audio_device/include/audio_device.h"
14#include "webrtc/modules/audio_processing/include/audio_processing.h"
15#include "webrtc/modules/utility/interface/audio_frame_operations.h"
16#include "webrtc/modules/utility/interface/process_thread.h"
17#include "webrtc/modules/utility/interface/rtp_dump.h"
18#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
19#include "webrtc/system_wrappers/interface/logging.h"
20#include "webrtc/system_wrappers/interface/trace.h"
21#include "webrtc/voice_engine/include/voe_base.h"
22#include "webrtc/voice_engine/include/voe_external_media.h"
23#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
24#include "webrtc/voice_engine/output_mixer.h"
25#include "webrtc/voice_engine/statistics.h"
26#include "webrtc/voice_engine/transmit_mixer.h"
27#include "webrtc/voice_engine/utility.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000028
29#if defined(_WIN32)
30#include <Qos.h>
31#endif
32
andrew@webrtc.org50419b02012-11-14 19:07:54 +000033namespace webrtc {
34namespace voe {
niklase@google.com470e71d2011-07-07 08:21:25 +000035
36WebRtc_Word32
37Channel::SendData(FrameType frameType,
38 WebRtc_UWord8 payloadType,
39 WebRtc_UWord32 timeStamp,
40 const WebRtc_UWord8* payloadData,
41 WebRtc_UWord16 payloadSize,
42 const RTPFragmentationHeader* fragmentation)
43{
44 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
45 "Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u,"
46 " payloadSize=%u, fragmentation=0x%x)",
47 frameType, payloadType, timeStamp, payloadSize, fragmentation);
48
49 if (_includeAudioLevelIndication)
50 {
andrew@webrtc.org755b04a2011-11-15 16:57:56 +000051 assert(_rtpAudioProc.get() != NULL);
niklase@google.com470e71d2011-07-07 08:21:25 +000052 // Store current audio level in the RTP/RTCP module.
53 // The level will be used in combination with voice-activity state
54 // (frameType) to add an RTP header extension
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +000055 _rtpRtcpModule->SetAudioLevel(_rtpAudioProc->level_estimator()->RMS());
niklase@google.com470e71d2011-07-07 08:21:25 +000056 }
57
58 // Push data from ACM to RTP/RTCP-module to deliver audio frame for
59 // packetization.
60 // This call will trigger Transport::SendPacket() from the RTP/RTCP module.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +000061 if (_rtpRtcpModule->SendOutgoingData((FrameType&)frameType,
niklase@google.com470e71d2011-07-07 08:21:25 +000062 payloadType,
63 timeStamp,
stefan@webrtc.orgddfdfed2012-07-03 13:21:22 +000064 // Leaving the time when this frame was
65 // received from the capture device as
66 // undefined for voice for now.
67 -1,
niklase@google.com470e71d2011-07-07 08:21:25 +000068 payloadData,
69 payloadSize,
70 fragmentation) == -1)
71 {
72 _engineStatisticsPtr->SetLastError(
73 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
74 "Channel::SendData() failed to send data to RTP/RTCP module");
75 return -1;
76 }
77
78 _lastLocalTimeStamp = timeStamp;
79 _lastPayloadType = payloadType;
80
81 return 0;
82}
83
84WebRtc_Word32
85Channel::InFrameType(WebRtc_Word16 frameType)
86{
87 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
88 "Channel::InFrameType(frameType=%d)", frameType);
89
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +000090 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +000091 // 1 indicates speech
92 _sendFrameType = (frameType == 1) ? 1 : 0;
93 return 0;
94}
95
niklase@google.com470e71d2011-07-07 08:21:25 +000096WebRtc_Word32
97Channel::OnRxVadDetected(const int vadDecision)
98{
99 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
100 "Channel::OnRxVadDetected(vadDecision=%d)", vadDecision);
101
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000102 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000103 if (_rxVadObserverPtr)
104 {
105 _rxVadObserverPtr->OnRxVad(_channelId, vadDecision);
106 }
107
108 return 0;
109}
110
111int
112Channel::SendPacket(int channel, const void *data, int len)
113{
114 channel = VoEChannelId(channel);
115 assert(channel == _channelId);
116
117 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
118 "Channel::SendPacket(channel=%d, len=%d)", channel, len);
119
120 if (_transportPtr == NULL)
121 {
122 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
123 "Channel::SendPacket() failed to send RTP packet due to"
124 " invalid transport object");
125 return -1;
126 }
127
128 // Insert extra RTP packet using if user has called the InsertExtraRTPPacket
129 // API
130 if (_insertExtraRTPPacket)
131 {
132 WebRtc_UWord8* rtpHdr = (WebRtc_UWord8*)data;
133 WebRtc_UWord8 M_PT(0);
134 if (_extraMarkerBit)
135 {
136 M_PT = 0x80; // set the M-bit
137 }
138 M_PT += _extraPayloadType; // set the payload type
139 *(++rtpHdr) = M_PT; // modify the M|PT-byte within the RTP header
140 _insertExtraRTPPacket = false; // insert one packet only
141 }
142
143 WebRtc_UWord8* bufferToSendPtr = (WebRtc_UWord8*)data;
144 WebRtc_Word32 bufferLength = len;
145
146 // Dump the RTP packet to a file (if RTP dump is enabled).
147 if (_rtpDumpOut.DumpPacket((const WebRtc_UWord8*)data, len) == -1)
148 {
149 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
150 VoEId(_instanceId,_channelId),
151 "Channel::SendPacket() RTP dump to output file failed");
152 }
153
154 // SRTP or External encryption
155 if (_encrypting)
156 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000157 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000158
159 if (_encryptionPtr)
160 {
161 if (!_encryptionRTPBufferPtr)
162 {
163 // Allocate memory for encryption buffer one time only
164 _encryptionRTPBufferPtr =
165 new WebRtc_UWord8[kVoiceEngineMaxIpPacketSizeBytes];
xians@webrtc.org51253502012-10-25 13:58:02 +0000166 memset(_encryptionRTPBufferPtr, 0,
167 kVoiceEngineMaxIpPacketSizeBytes);
niklase@google.com470e71d2011-07-07 08:21:25 +0000168 }
169
170 // Perform encryption (SRTP or external)
171 WebRtc_Word32 encryptedBufferLength = 0;
172 _encryptionPtr->encrypt(_channelId,
173 bufferToSendPtr,
174 _encryptionRTPBufferPtr,
175 bufferLength,
176 (int*)&encryptedBufferLength);
177 if (encryptedBufferLength <= 0)
178 {
179 _engineStatisticsPtr->SetLastError(
180 VE_ENCRYPTION_FAILED,
181 kTraceError, "Channel::SendPacket() encryption failed");
182 return -1;
183 }
184
185 // Replace default data buffer with encrypted buffer
186 bufferToSendPtr = _encryptionRTPBufferPtr;
187 bufferLength = encryptedBufferLength;
188 }
189 }
190
191 // Packet transmission using WebRtc socket transport
192 if (!_externalTransport)
193 {
194 int n = _transportPtr->SendPacket(channel, bufferToSendPtr,
195 bufferLength);
196 if (n < 0)
197 {
198 WEBRTC_TRACE(kTraceError, kTraceVoice,
199 VoEId(_instanceId,_channelId),
200 "Channel::SendPacket() RTP transmission using WebRtc"
201 " sockets failed");
202 return -1;
203 }
204 return n;
205 }
206
207 // Packet transmission using external transport transport
208 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000209 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000210
211 int n = _transportPtr->SendPacket(channel,
212 bufferToSendPtr,
213 bufferLength);
214 if (n < 0)
215 {
216 WEBRTC_TRACE(kTraceError, kTraceVoice,
217 VoEId(_instanceId,_channelId),
218 "Channel::SendPacket() RTP transmission using external"
219 " transport failed");
220 return -1;
221 }
222 return n;
223 }
224}
225
226int
227Channel::SendRTCPPacket(int channel, const void *data, int len)
228{
229 channel = VoEChannelId(channel);
230 assert(channel == _channelId);
231
232 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
233 "Channel::SendRTCPPacket(channel=%d, len=%d)", channel, len);
234
niklase@google.com470e71d2011-07-07 08:21:25 +0000235 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000236 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.org83661f52011-11-25 10:58:15 +0000237 if (_transportPtr == NULL)
238 {
239 WEBRTC_TRACE(kTraceError, kTraceVoice,
240 VoEId(_instanceId,_channelId),
241 "Channel::SendRTCPPacket() failed to send RTCP packet"
242 " due to invalid transport object");
243 return -1;
244 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000245 }
246
247 WebRtc_UWord8* bufferToSendPtr = (WebRtc_UWord8*)data;
248 WebRtc_Word32 bufferLength = len;
249
250 // Dump the RTCP packet to a file (if RTP dump is enabled).
251 if (_rtpDumpOut.DumpPacket((const WebRtc_UWord8*)data, len) == -1)
252 {
253 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
254 VoEId(_instanceId,_channelId),
255 "Channel::SendPacket() RTCP dump to output file failed");
256 }
257
258 // SRTP or External encryption
259 if (_encrypting)
260 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000261 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000262
263 if (_encryptionPtr)
264 {
265 if (!_encryptionRTCPBufferPtr)
266 {
267 // Allocate memory for encryption buffer one time only
268 _encryptionRTCPBufferPtr =
269 new WebRtc_UWord8[kVoiceEngineMaxIpPacketSizeBytes];
270 }
271
272 // Perform encryption (SRTP or external).
273 WebRtc_Word32 encryptedBufferLength = 0;
274 _encryptionPtr->encrypt_rtcp(_channelId,
275 bufferToSendPtr,
276 _encryptionRTCPBufferPtr,
277 bufferLength,
278 (int*)&encryptedBufferLength);
279 if (encryptedBufferLength <= 0)
280 {
281 _engineStatisticsPtr->SetLastError(
282 VE_ENCRYPTION_FAILED, kTraceError,
283 "Channel::SendRTCPPacket() encryption failed");
284 return -1;
285 }
286
287 // Replace default data buffer with encrypted buffer
288 bufferToSendPtr = _encryptionRTCPBufferPtr;
289 bufferLength = encryptedBufferLength;
290 }
291 }
292
293 // Packet transmission using WebRtc socket transport
294 if (!_externalTransport)
295 {
296 int n = _transportPtr->SendRTCPPacket(channel,
297 bufferToSendPtr,
298 bufferLength);
299 if (n < 0)
300 {
301 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
302 VoEId(_instanceId,_channelId),
303 "Channel::SendRTCPPacket() transmission using WebRtc"
304 " sockets failed");
305 return -1;
306 }
307 return n;
308 }
309
310 // Packet transmission using external transport transport
311 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000312 CriticalSectionScoped cs(&_callbackCritSect);
henrike@webrtc.orgde727ab2012-11-18 18:49:13 +0000313 if (_transportPtr == NULL)
314 {
315 return -1;
316 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000317 int n = _transportPtr->SendRTCPPacket(channel,
318 bufferToSendPtr,
319 bufferLength);
320 if (n < 0)
321 {
322 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
323 VoEId(_instanceId,_channelId),
324 "Channel::SendRTCPPacket() transmission using external"
325 " transport failed");
326 return -1;
327 }
328 return n;
329 }
330
331 return len;
332}
333
334void
niklase@google.com470e71d2011-07-07 08:21:25 +0000335Channel::OnPlayTelephoneEvent(const WebRtc_Word32 id,
336 const WebRtc_UWord8 event,
337 const WebRtc_UWord16 lengthMs,
338 const WebRtc_UWord8 volume)
339{
340 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
341 "Channel::OnPlayTelephoneEvent(id=%d, event=%u, lengthMs=%u,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +0000342 " volume=%u)", id, event, lengthMs, volume);
niklase@google.com470e71d2011-07-07 08:21:25 +0000343
344 if (!_playOutbandDtmfEvent || (event > 15))
345 {
346 // Ignore callback since feedback is disabled or event is not a
347 // Dtmf tone event.
348 return;
349 }
350
351 assert(_outputMixerPtr != NULL);
352
353 // Start playing out the Dtmf tone (if playout is enabled).
354 // Reduce length of tone with 80ms to the reduce risk of echo.
355 _outputMixerPtr->PlayDtmfTone(event, lengthMs - 80, volume);
356}
357
358void
359Channel::OnIncomingSSRCChanged(const WebRtc_Word32 id,
360 const WebRtc_UWord32 SSRC)
361{
362 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
363 "Channel::OnIncomingSSRCChanged(id=%d, SSRC=%d)",
364 id, SSRC);
365
366 WebRtc_Word32 channel = VoEChannelId(id);
367 assert(channel == _channelId);
368
369 // Reset RTP-module counters since a new incoming RTP stream is detected
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +0000370 _rtpRtcpModule->ResetReceiveDataCountersRTP();
371 _rtpRtcpModule->ResetStatisticsRTP();
niklase@google.com470e71d2011-07-07 08:21:25 +0000372
373 if (_rtpObserver)
374 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000375 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000376
377 if (_rtpObserverPtr)
378 {
379 // Send new SSRC to registered observer using callback
380 _rtpObserverPtr->OnIncomingSSRCChanged(channel, SSRC);
381 }
382 }
383}
384
385void Channel::OnIncomingCSRCChanged(const WebRtc_Word32 id,
386 const WebRtc_UWord32 CSRC,
387 const bool added)
388{
389 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
390 "Channel::OnIncomingCSRCChanged(id=%d, CSRC=%d, added=%d)",
391 id, CSRC, added);
392
393 WebRtc_Word32 channel = VoEChannelId(id);
394 assert(channel == _channelId);
395
396 if (_rtpObserver)
397 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000398 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000399
400 if (_rtpObserverPtr)
401 {
402 _rtpObserverPtr->OnIncomingCSRCChanged(channel, CSRC, added);
403 }
404 }
405}
406
407void
408Channel::OnApplicationDataReceived(const WebRtc_Word32 id,
409 const WebRtc_UWord8 subType,
410 const WebRtc_UWord32 name,
411 const WebRtc_UWord16 length,
412 const WebRtc_UWord8* data)
413{
414 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
415 "Channel::OnApplicationDataReceived(id=%d, subType=%u,"
416 " name=%u, length=%u)",
417 id, subType, name, length);
418
419 WebRtc_Word32 channel = VoEChannelId(id);
420 assert(channel == _channelId);
421
422 if (_rtcpObserver)
423 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000424 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000425
426 if (_rtcpObserverPtr)
427 {
428 _rtcpObserverPtr->OnApplicationDataReceived(channel,
429 subType,
430 name,
431 data,
432 length);
433 }
434 }
435}
436
437WebRtc_Word32
438Channel::OnInitializeDecoder(
439 const WebRtc_Word32 id,
440 const WebRtc_Word8 payloadType,
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +0000441 const char payloadName[RTP_PAYLOAD_NAME_SIZE],
xians@google.com0b0665a2011-08-08 08:18:44 +0000442 const int frequency,
niklase@google.com470e71d2011-07-07 08:21:25 +0000443 const WebRtc_UWord8 channels,
444 const WebRtc_UWord32 rate)
445{
446 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
447 "Channel::OnInitializeDecoder(id=%d, payloadType=%d, "
448 "payloadName=%s, frequency=%u, channels=%u, rate=%u)",
449 id, payloadType, payloadName, frequency, channels, rate);
450
andrew@webrtc.orgceb148c2011-08-23 17:53:54 +0000451 assert(VoEChannelId(id) == _channelId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000452
henrika@webrtc.orgf75901f2012-01-16 08:45:42 +0000453 CodecInst receiveCodec = {0};
454 CodecInst dummyCodec = {0};
niklase@google.com470e71d2011-07-07 08:21:25 +0000455
456 receiveCodec.pltype = payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +0000457 receiveCodec.plfreq = frequency;
458 receiveCodec.channels = channels;
459 receiveCodec.rate = rate;
henrika@webrtc.orgf75901f2012-01-16 08:45:42 +0000460 strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +0000461
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +0000462 _audioCodingModule.Codec(payloadName, &dummyCodec, frequency, channels);
niklase@google.com470e71d2011-07-07 08:21:25 +0000463 receiveCodec.pacsize = dummyCodec.pacsize;
464
465 // Register the new codec to the ACM
466 if (_audioCodingModule.RegisterReceiveCodec(receiveCodec) == -1)
467 {
468 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
andrew@webrtc.orgceb148c2011-08-23 17:53:54 +0000469 VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +0000470 "Channel::OnInitializeDecoder() invalid codec ("
471 "pt=%d, name=%s) received - 1", payloadType, payloadName);
472 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR);
473 return -1;
474 }
475
476 return 0;
477}
478
479void
480Channel::OnPacketTimeout(const WebRtc_Word32 id)
481{
482 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
483 "Channel::OnPacketTimeout(id=%d)", id);
484
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000485 CriticalSectionScoped cs(_callbackCritSectPtr);
niklase@google.com470e71d2011-07-07 08:21:25 +0000486 if (_voiceEngineObserverPtr)
487 {
488 if (_receiving || _externalTransport)
489 {
490 WebRtc_Word32 channel = VoEChannelId(id);
491 assert(channel == _channelId);
492 // Ensure that next OnReceivedPacket() callback will trigger
493 // a VE_PACKET_RECEIPT_RESTARTED callback.
494 _rtpPacketTimedOut = true;
495 // Deliver callback to the observer
496 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
497 VoEId(_instanceId,_channelId),
498 "Channel::OnPacketTimeout() => "
499 "CallbackOnError(VE_RECEIVE_PACKET_TIMEOUT)");
500 _voiceEngineObserverPtr->CallbackOnError(channel,
501 VE_RECEIVE_PACKET_TIMEOUT);
502 }
503 }
504}
505
506void
507Channel::OnReceivedPacket(const WebRtc_Word32 id,
508 const RtpRtcpPacketType packetType)
509{
510 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
511 "Channel::OnReceivedPacket(id=%d, packetType=%d)",
512 id, packetType);
513
andrew@webrtc.orgceb148c2011-08-23 17:53:54 +0000514 assert(VoEChannelId(id) == _channelId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000515
516 // Notify only for the case when we have restarted an RTP session.
517 if (_rtpPacketTimedOut && (kPacketRtp == packetType))
518 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000519 CriticalSectionScoped cs(_callbackCritSectPtr);
niklase@google.com470e71d2011-07-07 08:21:25 +0000520 if (_voiceEngineObserverPtr)
521 {
522 WebRtc_Word32 channel = VoEChannelId(id);
523 assert(channel == _channelId);
524 // Reset timeout mechanism
525 _rtpPacketTimedOut = false;
526 // Deliver callback to the observer
527 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
528 VoEId(_instanceId,_channelId),
529 "Channel::OnPacketTimeout() =>"
530 " CallbackOnError(VE_PACKET_RECEIPT_RESTARTED)");
531 _voiceEngineObserverPtr->CallbackOnError(
532 channel,
533 VE_PACKET_RECEIPT_RESTARTED);
534 }
535 }
536}
537
538void
539Channel::OnPeriodicDeadOrAlive(const WebRtc_Word32 id,
540 const RTPAliveType alive)
541{
542 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
543 "Channel::OnPeriodicDeadOrAlive(id=%d, alive=%d)", id, alive);
544
henrika@webrtc.org19da7192013-04-05 14:34:57 +0000545 {
546 CriticalSectionScoped cs(&_callbackCritSect);
547 if (!_connectionObserver)
548 return;
549 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000550
551 WebRtc_Word32 channel = VoEChannelId(id);
552 assert(channel == _channelId);
553
554 // Use Alive as default to limit risk of false Dead detections
555 bool isAlive(true);
556
557 // Always mark the connection as Dead when the module reports kRtpDead
558 if (kRtpDead == alive)
559 {
560 isAlive = false;
561 }
562
563 // It is possible that the connection is alive even if no RTP packet has
564 // been received for a long time since the other side might use VAD/DTX
565 // and a low SID-packet update rate.
566 if ((kRtpNoRtp == alive) && _playing)
567 {
568 // Detect Alive for all NetEQ states except for the case when we are
569 // in PLC_CNG state.
570 // PLC_CNG <=> background noise only due to long expand or error.
571 // Note that, the case where the other side stops sending during CNG
572 // state will be detected as Alive. Dead is is not set until after
573 // missing RTCP packets for at least twelve seconds (handled
574 // internally by the RTP/RTCP module).
575 isAlive = (_outputSpeechType != AudioFrame::kPLCCNG);
576 }
577
578 UpdateDeadOrAliveCounters(isAlive);
579
580 // Send callback to the registered observer
581 if (_connectionObserver)
582 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000583 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000584 if (_connectionObserverPtr)
585 {
586 _connectionObserverPtr->OnPeriodicDeadOrAlive(channel, isAlive);
587 }
588 }
589}
590
591WebRtc_Word32
592Channel::OnReceivedPayloadData(const WebRtc_UWord8* payloadData,
593 const WebRtc_UWord16 payloadSize,
594 const WebRtcRTPHeader* rtpHeader)
595{
596 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
597 "Channel::OnReceivedPayloadData(payloadSize=%d,"
598 " payloadType=%u, audioChannel=%u)",
599 payloadSize,
600 rtpHeader->header.payloadType,
601 rtpHeader->type.Audio.channel);
602
roosa@google.com0870f022012-12-12 21:31:41 +0000603 _lastRemoteTimeStamp = rtpHeader->header.timestamp;
604
niklase@google.com470e71d2011-07-07 08:21:25 +0000605 if (!_playing)
606 {
607 // Avoid inserting into NetEQ when we are not playing. Count the
608 // packet as discarded.
609 WEBRTC_TRACE(kTraceStream, kTraceVoice,
610 VoEId(_instanceId, _channelId),
611 "received packet is discarded since playing is not"
612 " activated");
613 _numberOfDiscardedPackets++;
614 return 0;
615 }
616
617 // Push the incoming payload (parsed and ready for decoding) into the ACM
tina.legrand@webrtc.org16b6b902012-04-12 11:02:38 +0000618 if (_audioCodingModule.IncomingPacket(payloadData,
niklase@google.com470e71d2011-07-07 08:21:25 +0000619 payloadSize,
620 *rtpHeader) != 0)
621 {
622 _engineStatisticsPtr->SetLastError(
623 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
624 "Channel::OnReceivedPayloadData() unable to push data to the ACM");
625 return -1;
626 }
627
628 // Update the packet delay
629 UpdatePacketDelay(rtpHeader->header.timestamp,
630 rtpHeader->header.sequenceNumber);
631
632 return 0;
633}
634
635WebRtc_Word32 Channel::GetAudioFrame(const WebRtc_Word32 id,
636 AudioFrame& audioFrame)
637{
638 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
639 "Channel::GetAudioFrame(id=%d)", id);
640
641 // Get 10ms raw PCM data from the ACM (mixer limits output frequency)
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000642 if (_audioCodingModule.PlayoutData10Ms(audioFrame.sample_rate_hz_,
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +0000643 &audioFrame) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000644 {
645 WEBRTC_TRACE(kTraceError, kTraceVoice,
646 VoEId(_instanceId,_channelId),
647 "Channel::GetAudioFrame() PlayoutData10Ms() failed!");
andrew@webrtc.org7859e102012-01-13 00:30:11 +0000648 // In all likelihood, the audio in this frame is garbage. We return an
649 // error so that the audio mixer module doesn't add it to the mix. As
650 // a result, it won't be played out and the actions skipped here are
651 // irrelevant.
652 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +0000653 }
654
655 if (_RxVadDetection)
656 {
657 UpdateRxVadDetection(audioFrame);
658 }
659
660 // Convert module ID to internal VoE channel ID
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000661 audioFrame.id_ = VoEChannelId(audioFrame.id_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000662 // Store speech type for dead-or-alive detection
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000663 _outputSpeechType = audioFrame.speech_type_;
niklase@google.com470e71d2011-07-07 08:21:25 +0000664
665 // Perform far-end AudioProcessing module processing on the received signal
666 if (_rxApmIsEnabled)
667 {
668 ApmProcessRx(audioFrame);
669 }
670
671 // Output volume scaling
672 if (_outputGain < 0.99f || _outputGain > 1.01f)
673 {
674 AudioFrameOperations::ScaleWithSat(_outputGain, audioFrame);
675 }
676
677 // Scale left and/or right channel(s) if stereo and master balance is
678 // active
679
680 if (_panLeft != 1.0f || _panRight != 1.0f)
681 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000682 if (audioFrame.num_channels_ == 1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000683 {
684 // Emulate stereo mode since panning is active.
685 // The mono signal is copied to both left and right channels here.
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +0000686 AudioFrameOperations::MonoToStereo(&audioFrame);
niklase@google.com470e71d2011-07-07 08:21:25 +0000687 }
688 // For true stereo mode (when we are receiving a stereo signal), no
689 // action is needed.
690
691 // Do the panning operation (the audio frame contains stereo at this
692 // stage)
693 AudioFrameOperations::Scale(_panLeft, _panRight, audioFrame);
694 }
695
696 // Mix decoded PCM output with file if file mixing is enabled
697 if (_outputFilePlaying)
698 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000699 MixAudioWithFile(audioFrame, audioFrame.sample_rate_hz_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000700 }
701
702 // Place channel in on-hold state (~muted) if on-hold is activated
703 if (_outputIsOnHold)
704 {
705 AudioFrameOperations::Mute(audioFrame);
706 }
707
708 // External media
709 if (_outputExternalMedia)
710 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000711 CriticalSectionScoped cs(&_callbackCritSect);
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000712 const bool isStereo = (audioFrame.num_channels_ == 2);
niklase@google.com470e71d2011-07-07 08:21:25 +0000713 if (_outputExternalMediaCallbackPtr)
714 {
715 _outputExternalMediaCallbackPtr->Process(
716 _channelId,
717 kPlaybackPerChannel,
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000718 (WebRtc_Word16*)audioFrame.data_,
719 audioFrame.samples_per_channel_,
720 audioFrame.sample_rate_hz_,
niklase@google.com470e71d2011-07-07 08:21:25 +0000721 isStereo);
722 }
723 }
724
725 // Record playout if enabled
726 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000727 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000728
729 if (_outputFileRecording && _outputFileRecorderPtr)
730 {
niklas.enbom@webrtc.org5398d952012-03-26 08:11:25 +0000731 _outputFileRecorderPtr->RecordAudioToFile(audioFrame);
niklase@google.com470e71d2011-07-07 08:21:25 +0000732 }
733 }
734
735 // Measure audio level (0-9)
736 _outputAudioLevel.ComputeLevel(audioFrame);
737
738 return 0;
739}
740
741WebRtc_Word32
742Channel::NeededFrequency(const WebRtc_Word32 id)
743{
744 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
745 "Channel::NeededFrequency(id=%d)", id);
746
747 int highestNeeded = 0;
748
749 // Determine highest needed receive frequency
750 WebRtc_Word32 receiveFrequency = _audioCodingModule.ReceiveFrequency();
751
752 // Return the bigger of playout and receive frequency in the ACM.
753 if (_audioCodingModule.PlayoutFrequency() > receiveFrequency)
754 {
755 highestNeeded = _audioCodingModule.PlayoutFrequency();
756 }
757 else
758 {
759 highestNeeded = receiveFrequency;
760 }
761
762 // Special case, if we're playing a file on the playout side
763 // we take that frequency into consideration as well
764 // This is not needed on sending side, since the codec will
765 // limit the spectrum anyway.
766 if (_outputFilePlaying)
767 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000768 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000769 if (_outputFilePlayerPtr && _outputFilePlaying)
770 {
771 if(_outputFilePlayerPtr->Frequency()>highestNeeded)
772 {
773 highestNeeded=_outputFilePlayerPtr->Frequency();
774 }
775 }
776 }
777
778 return(highestNeeded);
779}
780
niklase@google.com470e71d2011-07-07 08:21:25 +0000781WebRtc_Word32
782Channel::CreateChannel(Channel*& channel,
783 const WebRtc_Word32 channelId,
784 const WebRtc_UWord32 instanceId)
785{
786 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId,channelId),
787 "Channel::CreateChannel(channelId=%d, instanceId=%d)",
788 channelId, instanceId);
789
790 channel = new Channel(channelId, instanceId);
791 if (channel == NULL)
792 {
793 WEBRTC_TRACE(kTraceMemory, kTraceVoice,
794 VoEId(instanceId,channelId),
795 "Channel::CreateChannel() unable to allocate memory for"
796 " channel");
797 return -1;
798 }
799 return 0;
800}
801
802void
803Channel::PlayNotification(const WebRtc_Word32 id,
804 const WebRtc_UWord32 durationMs)
805{
806 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
807 "Channel::PlayNotification(id=%d, durationMs=%d)",
808 id, durationMs);
809
810 // Not implement yet
811}
812
813void
814Channel::RecordNotification(const WebRtc_Word32 id,
815 const WebRtc_UWord32 durationMs)
816{
817 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
818 "Channel::RecordNotification(id=%d, durationMs=%d)",
819 id, durationMs);
820
821 // Not implement yet
822}
823
824void
825Channel::PlayFileEnded(const WebRtc_Word32 id)
826{
827 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
828 "Channel::PlayFileEnded(id=%d)", id);
829
830 if (id == _inputFilePlayerId)
831 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000832 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000833
834 _inputFilePlaying = false;
835 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
836 VoEId(_instanceId,_channelId),
837 "Channel::PlayFileEnded() => input file player module is"
838 " shutdown");
839 }
840 else if (id == _outputFilePlayerId)
841 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000842 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000843
844 _outputFilePlaying = false;
845 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
846 VoEId(_instanceId,_channelId),
847 "Channel::PlayFileEnded() => output file player module is"
848 " shutdown");
849 }
850}
851
852void
853Channel::RecordFileEnded(const WebRtc_Word32 id)
854{
855 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
856 "Channel::RecordFileEnded(id=%d)", id);
857
858 assert(id == _outputFileRecorderId);
859
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000860 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000861
862 _outputFileRecording = false;
863 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
864 VoEId(_instanceId,_channelId),
865 "Channel::RecordFileEnded() => output file recorder module is"
866 " shutdown");
867}
868
869Channel::Channel(const WebRtc_Word32 channelId,
870 const WebRtc_UWord32 instanceId) :
871 _fileCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
872 _callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
niklase@google.com470e71d2011-07-07 08:21:25 +0000873 _instanceId(instanceId),
xians@google.com22963ab2011-08-03 12:40:23 +0000874 _channelId(channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +0000875 _audioCodingModule(*AudioCodingModule::Create(
xians@google.com22963ab2011-08-03 12:40:23 +0000876 VoEModuleId(instanceId, channelId))),
niklase@google.com470e71d2011-07-07 08:21:25 +0000877 _rtpDumpIn(*RtpDump::CreateRtpDump()),
878 _rtpDumpOut(*RtpDump::CreateRtpDump()),
niklase@google.com470e71d2011-07-07 08:21:25 +0000879 _outputAudioLevel(),
niklase@google.com470e71d2011-07-07 08:21:25 +0000880 _externalTransport(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000881 _inputFilePlayerPtr(NULL),
882 _outputFilePlayerPtr(NULL),
883 _outputFileRecorderPtr(NULL),
884 // Avoid conflict with other channels by adding 1024 - 1026,
885 // won't use as much as 1024 channels.
886 _inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
887 _outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
888 _outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
889 _inputFilePlaying(false),
890 _outputFilePlaying(false),
891 _outputFileRecording(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000892 _inbandDtmfQueue(VoEModuleId(instanceId, channelId)),
893 _inbandDtmfGenerator(VoEModuleId(instanceId, channelId)),
niklase@google.com470e71d2011-07-07 08:21:25 +0000894 _inputExternalMedia(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000895 _outputExternalMedia(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000896 _inputExternalMediaCallbackPtr(NULL),
897 _outputExternalMediaCallbackPtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000898 _encryptionRTPBufferPtr(NULL),
899 _decryptionRTPBufferPtr(NULL),
900 _encryptionRTCPBufferPtr(NULL),
901 _decryptionRTCPBufferPtr(NULL),
902 _timeStamp(0), // This is just an offset, RTP module will add it's own random offset
903 _sendTelephoneEventPayloadType(106),
904 _playoutTimeStampRTP(0),
905 _playoutTimeStampRTCP(0),
906 _numberOfDiscardedPackets(0),
907 _engineStatisticsPtr(NULL),
henrika@webrtc.org2919e952012-01-31 08:45:03 +0000908 _outputMixerPtr(NULL),
909 _transmitMixerPtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000910 _moduleProcessThreadPtr(NULL),
911 _audioDeviceModulePtr(NULL),
912 _voiceEngineObserverPtr(NULL),
913 _callbackCritSectPtr(NULL),
914 _transportPtr(NULL),
915 _encryptionPtr(NULL),
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000916 _rtpAudioProc(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000917 _rxAudioProcessingModulePtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000918 _rxVadObserverPtr(NULL),
919 _oldVadDecision(-1),
920 _sendFrameType(0),
niklase@google.com470e71d2011-07-07 08:21:25 +0000921 _rtpObserverPtr(NULL),
922 _rtcpObserverPtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000923 _outputIsOnHold(false),
924 _externalPlayout(false),
roosa@google.com1b60ceb2012-12-12 23:00:29 +0000925 _externalMixing(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000926 _inputIsOnHold(false),
927 _playing(false),
928 _sending(false),
929 _receiving(false),
930 _mixFileWithMicrophone(false),
931 _rtpObserver(false),
932 _rtcpObserver(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000933 _mute(false),
934 _panLeft(1.0f),
935 _panRight(1.0f),
936 _outputGain(1.0f),
xians@google.com22963ab2011-08-03 12:40:23 +0000937 _encrypting(false),
938 _decrypting(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000939 _playOutbandDtmfEvent(false),
940 _playInbandDtmfEvent(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000941 _extraPayloadType(0),
942 _insertExtraRTPPacket(false),
943 _extraMarkerBit(false),
944 _lastLocalTimeStamp(0),
roosa@google.com0870f022012-12-12 21:31:41 +0000945 _lastRemoteTimeStamp(0),
niklase@google.com470e71d2011-07-07 08:21:25 +0000946 _lastPayloadType(0),
xians@google.com22963ab2011-08-03 12:40:23 +0000947 _includeAudioLevelIndication(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000948 _rtpPacketTimedOut(false),
949 _rtpPacketTimeOutIsEnabled(false),
950 _rtpTimeOutSeconds(0),
951 _connectionObserver(false),
952 _connectionObserverPtr(NULL),
953 _countAliveDetections(0),
954 _countDeadDetections(0),
955 _outputSpeechType(AudioFrame::kNormalSpeech),
956 _averageDelayMs(0),
957 _previousSequenceNumber(0),
958 _previousTimestamp(0),
959 _recPacketDelayMs(20),
960 _RxVadDetection(false),
961 _rxApmIsEnabled(false),
962 _rxAgcIsEnabled(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000963 _rxNsIsEnabled(false)
niklase@google.com470e71d2011-07-07 08:21:25 +0000964{
965 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
966 "Channel::Channel() - ctor");
967 _inbandDtmfQueue.ResetDtmf();
968 _inbandDtmfGenerator.Init();
969 _outputAudioLevel.Clear();
970
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +0000971 RtpRtcp::Configuration configuration;
972 configuration.id = VoEModuleId(instanceId, channelId);
973 configuration.audio = true;
974 configuration.incoming_data = this;
975 configuration.incoming_messages = this;
976 configuration.outgoing_transport = this;
977 configuration.rtcp_feedback = this;
978 configuration.audio_messages = this;
979
980 _rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
981
niklase@google.com470e71d2011-07-07 08:21:25 +0000982 // Create far end AudioProcessing Module
983 _rxAudioProcessingModulePtr = AudioProcessing::Create(
984 VoEModuleId(instanceId, channelId));
985}
986
987Channel::~Channel()
988{
989 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
990 "Channel::~Channel() - dtor");
991
992 if (_outputExternalMedia)
993 {
994 DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
995 }
996 if (_inputExternalMedia)
997 {
998 DeRegisterExternalMediaProcessing(kRecordingPerChannel);
999 }
1000 StopSend();
niklase@google.com470e71d2011-07-07 08:21:25 +00001001 StopPlayout();
1002
1003 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001004 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001005 if (_inputFilePlayerPtr)
1006 {
1007 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1008 _inputFilePlayerPtr->StopPlayingFile();
1009 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1010 _inputFilePlayerPtr = NULL;
1011 }
1012 if (_outputFilePlayerPtr)
1013 {
1014 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1015 _outputFilePlayerPtr->StopPlayingFile();
1016 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1017 _outputFilePlayerPtr = NULL;
1018 }
1019 if (_outputFileRecorderPtr)
1020 {
1021 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
1022 _outputFileRecorderPtr->StopRecording();
1023 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
1024 _outputFileRecorderPtr = NULL;
1025 }
1026 }
1027
1028 // The order to safely shutdown modules in a channel is:
1029 // 1. De-register callbacks in modules
1030 // 2. De-register modules in process thread
1031 // 3. Destroy modules
niklase@google.com470e71d2011-07-07 08:21:25 +00001032 if (_audioCodingModule.RegisterTransportCallback(NULL) == -1)
1033 {
1034 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1035 VoEId(_instanceId,_channelId),
1036 "~Channel() failed to de-register transport callback"
1037 " (Audio coding module)");
1038 }
1039 if (_audioCodingModule.RegisterVADCallback(NULL) == -1)
1040 {
1041 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1042 VoEId(_instanceId,_channelId),
1043 "~Channel() failed to de-register VAD callback"
1044 " (Audio coding module)");
1045 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001046 // De-register modules in process thread
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001047 if (_moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get()) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001048 {
1049 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
1050 VoEId(_instanceId,_channelId),
1051 "~Channel() failed to deregister RTP/RTCP module");
1052 }
1053
1054 // Destroy modules
niklase@google.com470e71d2011-07-07 08:21:25 +00001055 AudioCodingModule::Destroy(&_audioCodingModule);
niklase@google.com470e71d2011-07-07 08:21:25 +00001056 if (_rxAudioProcessingModulePtr != NULL)
1057 {
1058 AudioProcessing::Destroy(_rxAudioProcessingModulePtr); // far end APM
1059 _rxAudioProcessingModulePtr = NULL;
1060 }
1061
1062 // End of modules shutdown
1063
1064 // Delete other objects
1065 RtpDump::DestroyRtpDump(&_rtpDumpIn);
1066 RtpDump::DestroyRtpDump(&_rtpDumpOut);
1067 delete [] _encryptionRTPBufferPtr;
1068 delete [] _decryptionRTPBufferPtr;
1069 delete [] _encryptionRTCPBufferPtr;
1070 delete [] _decryptionRTCPBufferPtr;
1071 delete &_callbackCritSect;
niklase@google.com470e71d2011-07-07 08:21:25 +00001072 delete &_fileCritSect;
1073}
1074
1075WebRtc_Word32
1076Channel::Init()
1077{
1078 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1079 "Channel::Init()");
1080
1081 // --- Initial sanity
1082
1083 if ((_engineStatisticsPtr == NULL) ||
1084 (_moduleProcessThreadPtr == NULL))
1085 {
1086 WEBRTC_TRACE(kTraceError, kTraceVoice,
1087 VoEId(_instanceId,_channelId),
1088 "Channel::Init() must call SetEngineInformation() first");
1089 return -1;
1090 }
1091
1092 // --- Add modules to process thread (for periodic schedulation)
1093
1094 const bool processThreadFail =
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001095 ((_moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get()) != 0) ||
niklase@google.com470e71d2011-07-07 08:21:25 +00001096 false);
niklase@google.com470e71d2011-07-07 08:21:25 +00001097 if (processThreadFail)
1098 {
1099 _engineStatisticsPtr->SetLastError(
1100 VE_CANNOT_INIT_CHANNEL, kTraceError,
1101 "Channel::Init() modules not registered");
1102 return -1;
1103 }
pwestin@webrtc.orgc450a192012-01-04 15:00:12 +00001104 // --- ACM initialization
niklase@google.com470e71d2011-07-07 08:21:25 +00001105
1106 if ((_audioCodingModule.InitializeReceiver() == -1) ||
1107#ifdef WEBRTC_CODEC_AVT
1108 // out-of-band Dtmf tones are played out by default
1109 (_audioCodingModule.SetDtmfPlayoutStatus(true) == -1) ||
1110#endif
niklase@google.com470e71d2011-07-07 08:21:25 +00001111 (_audioCodingModule.InitializeSender() == -1))
1112 {
1113 _engineStatisticsPtr->SetLastError(
1114 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1115 "Channel::Init() unable to initialize the ACM - 1");
1116 return -1;
1117 }
1118
1119 // --- RTP/RTCP module initialization
1120
1121 // Ensure that RTCP is enabled by default for the created channel.
1122 // Note that, the module will keep generating RTCP until it is explicitly
1123 // disabled by the user.
1124 // After StopListen (when no sockets exists), RTCP packets will no longer
1125 // be transmitted since the Transport object will then be invalid.
1126
1127 const bool rtpRtcpFail =
turaj@webrtc.orgb7edd062013-03-12 22:27:27 +00001128 ((_rtpRtcpModule->SetTelephoneEventForwardToDecoder(true) == -1) ||
niklase@google.com470e71d2011-07-07 08:21:25 +00001129 // RTCP is enabled by default
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001130 (_rtpRtcpModule->SetRTCPStatus(kRtcpCompound) == -1));
niklase@google.com470e71d2011-07-07 08:21:25 +00001131 if (rtpRtcpFail)
1132 {
1133 _engineStatisticsPtr->SetLastError(
1134 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1135 "Channel::Init() RTP/RTCP module not initialized");
1136 return -1;
1137 }
1138
1139 // --- Register all permanent callbacks
niklase@google.com470e71d2011-07-07 08:21:25 +00001140 const bool fail =
niklase@google.com470e71d2011-07-07 08:21:25 +00001141 (_audioCodingModule.RegisterTransportCallback(this) == -1) ||
1142 (_audioCodingModule.RegisterVADCallback(this) == -1);
1143
1144 if (fail)
1145 {
1146 _engineStatisticsPtr->SetLastError(
1147 VE_CANNOT_INIT_CHANNEL, kTraceError,
1148 "Channel::Init() callbacks not registered");
1149 return -1;
1150 }
1151
1152 // --- Register all supported codecs to the receiving side of the
1153 // RTP/RTCP module
1154
1155 CodecInst codec;
1156 const WebRtc_UWord8 nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
1157
1158 for (int idx = 0; idx < nSupportedCodecs; idx++)
1159 {
1160 // Open up the RTP/RTCP receiver for all supported codecs
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001161 if ((_audioCodingModule.Codec(idx, &codec) == -1) ||
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001162 (_rtpRtcpModule->RegisterReceivePayload(codec) == -1))
niklase@google.com470e71d2011-07-07 08:21:25 +00001163 {
1164 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1165 VoEId(_instanceId,_channelId),
1166 "Channel::Init() unable to register %s (%d/%d/%d/%d) "
1167 "to RTP/RTCP receiver",
1168 codec.plname, codec.pltype, codec.plfreq,
1169 codec.channels, codec.rate);
1170 }
1171 else
1172 {
1173 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
1174 VoEId(_instanceId,_channelId),
1175 "Channel::Init() %s (%d/%d/%d/%d) has been added to "
1176 "the RTP/RTCP receiver",
1177 codec.plname, codec.pltype, codec.plfreq,
1178 codec.channels, codec.rate);
1179 }
1180
1181 // Ensure that PCMU is used as default codec on the sending side
tina.legrand@webrtc.org45175852012-06-01 09:27:35 +00001182 if (!STR_CASE_CMP(codec.plname, "PCMU") && (codec.channels == 1))
niklase@google.com470e71d2011-07-07 08:21:25 +00001183 {
1184 SetSendCodec(codec);
1185 }
1186
1187 // Register default PT for outband 'telephone-event'
1188 if (!STR_CASE_CMP(codec.plname, "telephone-event"))
1189 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001190 if ((_rtpRtcpModule->RegisterSendPayload(codec) == -1) ||
niklase@google.com470e71d2011-07-07 08:21:25 +00001191 (_audioCodingModule.RegisterReceiveCodec(codec) == -1))
1192 {
1193 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1194 VoEId(_instanceId,_channelId),
1195 "Channel::Init() failed to register outband "
1196 "'telephone-event' (%d/%d) correctly",
1197 codec.pltype, codec.plfreq);
1198 }
1199 }
1200
1201 if (!STR_CASE_CMP(codec.plname, "CN"))
1202 {
1203 if ((_audioCodingModule.RegisterSendCodec(codec) == -1) ||
1204 (_audioCodingModule.RegisterReceiveCodec(codec) == -1) ||
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001205 (_rtpRtcpModule->RegisterSendPayload(codec) == -1))
niklase@google.com470e71d2011-07-07 08:21:25 +00001206 {
1207 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1208 VoEId(_instanceId,_channelId),
1209 "Channel::Init() failed to register CN (%d/%d) "
1210 "correctly - 1",
1211 codec.pltype, codec.plfreq);
1212 }
1213 }
1214#ifdef WEBRTC_CODEC_RED
1215 // Register RED to the receiving side of the ACM.
1216 // We will not receive an OnInitializeDecoder() callback for RED.
1217 if (!STR_CASE_CMP(codec.plname, "RED"))
1218 {
1219 if (_audioCodingModule.RegisterReceiveCodec(codec) == -1)
1220 {
1221 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1222 VoEId(_instanceId,_channelId),
1223 "Channel::Init() failed to register RED (%d/%d) "
1224 "correctly",
1225 codec.pltype, codec.plfreq);
1226 }
1227 }
1228#endif
1229 }
pwestin@webrtc.org684f0572013-03-13 23:20:57 +00001230
niklase@google.com470e71d2011-07-07 08:21:25 +00001231 // Initialize the far end AP module
1232 // Using 8 kHz as initial Fs, the same as in transmission. Might be
1233 // changed at the first receiving audio.
1234 if (_rxAudioProcessingModulePtr == NULL)
1235 {
1236 _engineStatisticsPtr->SetLastError(
1237 VE_NO_MEMORY, kTraceCritical,
1238 "Channel::Init() failed to create the far-end AudioProcessing"
1239 " module");
1240 return -1;
1241 }
1242
niklase@google.com470e71d2011-07-07 08:21:25 +00001243 if (_rxAudioProcessingModulePtr->set_sample_rate_hz(8000))
1244 {
1245 _engineStatisticsPtr->SetLastError(
1246 VE_APM_ERROR, kTraceWarning,
1247 "Channel::Init() failed to set the sample rate to 8K for"
1248 " far-end AP module");
1249 }
1250
1251 if (_rxAudioProcessingModulePtr->set_num_channels(1, 1) != 0)
1252 {
1253 _engineStatisticsPtr->SetLastError(
1254 VE_SOUNDCARD_ERROR, kTraceWarning,
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00001255 "Init() failed to set channels for the primary audio stream");
niklase@google.com470e71d2011-07-07 08:21:25 +00001256 }
1257
1258 if (_rxAudioProcessingModulePtr->high_pass_filter()->Enable(
1259 WEBRTC_VOICE_ENGINE_RX_HP_DEFAULT_STATE) != 0)
1260 {
1261 _engineStatisticsPtr->SetLastError(
1262 VE_APM_ERROR, kTraceWarning,
1263 "Channel::Init() failed to set the high-pass filter for"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00001264 " far-end AP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00001265 }
1266
1267 if (_rxAudioProcessingModulePtr->noise_suppression()->set_level(
1268 (NoiseSuppression::Level)WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE) != 0)
1269 {
1270 _engineStatisticsPtr->SetLastError(
1271 VE_APM_ERROR, kTraceWarning,
1272 "Init() failed to set noise reduction level for far-end"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00001273 " AP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00001274 }
1275 if (_rxAudioProcessingModulePtr->noise_suppression()->Enable(
1276 WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_STATE) != 0)
1277 {
1278 _engineStatisticsPtr->SetLastError(
1279 VE_APM_ERROR, kTraceWarning,
1280 "Init() failed to set noise reduction state for far-end"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00001281 " AP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00001282 }
1283
1284 if (_rxAudioProcessingModulePtr->gain_control()->set_mode(
1285 (GainControl::Mode)WEBRTC_VOICE_ENGINE_RX_AGC_DEFAULT_MODE) != 0)
1286 {
1287 _engineStatisticsPtr->SetLastError(
1288 VE_APM_ERROR, kTraceWarning,
1289 "Init() failed to set AGC mode for far-end AP module");
1290 }
1291 if (_rxAudioProcessingModulePtr->gain_control()->Enable(
1292 WEBRTC_VOICE_ENGINE_RX_AGC_DEFAULT_STATE) != 0)
1293 {
1294 _engineStatisticsPtr->SetLastError(
1295 VE_APM_ERROR, kTraceWarning,
1296 "Init() failed to set AGC state for far-end AP module");
1297 }
1298
1299 return 0;
1300}
1301
1302WebRtc_Word32
1303Channel::SetEngineInformation(Statistics& engineStatistics,
1304 OutputMixer& outputMixer,
1305 voe::TransmitMixer& transmitMixer,
1306 ProcessThread& moduleProcessThread,
1307 AudioDeviceModule& audioDeviceModule,
1308 VoiceEngineObserver* voiceEngineObserver,
1309 CriticalSectionWrapper* callbackCritSect)
1310{
1311 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1312 "Channel::SetEngineInformation()");
1313 _engineStatisticsPtr = &engineStatistics;
1314 _outputMixerPtr = &outputMixer;
1315 _transmitMixerPtr = &transmitMixer,
1316 _moduleProcessThreadPtr = &moduleProcessThread;
1317 _audioDeviceModulePtr = &audioDeviceModule;
1318 _voiceEngineObserverPtr = voiceEngineObserver;
1319 _callbackCritSectPtr = callbackCritSect;
1320 return 0;
1321}
1322
1323WebRtc_Word32
1324Channel::UpdateLocalTimeStamp()
1325{
1326
andrew@webrtc.org63a50982012-05-02 23:56:37 +00001327 _timeStamp += _audioFrame.samples_per_channel_;
niklase@google.com470e71d2011-07-07 08:21:25 +00001328 return 0;
1329}
1330
1331WebRtc_Word32
1332Channel::StartPlayout()
1333{
1334 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1335 "Channel::StartPlayout()");
1336 if (_playing)
1337 {
1338 return 0;
1339 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00001340
1341 if (!_externalMixing) {
1342 // Add participant as candidates for mixing.
1343 if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0)
1344 {
1345 _engineStatisticsPtr->SetLastError(
1346 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1347 "StartPlayout() failed to add participant to mixer");
1348 return -1;
1349 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001350 }
1351
1352 _playing = true;
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001353
1354 if (RegisterFilePlayingToMixer() != 0)
1355 return -1;
1356
niklase@google.com470e71d2011-07-07 08:21:25 +00001357 return 0;
1358}
1359
1360WebRtc_Word32
1361Channel::StopPlayout()
1362{
1363 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1364 "Channel::StopPlayout()");
1365 if (!_playing)
1366 {
1367 return 0;
1368 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00001369
1370 if (!_externalMixing) {
1371 // Remove participant as candidates for mixing
1372 if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0)
1373 {
1374 _engineStatisticsPtr->SetLastError(
1375 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1376 "StopPlayout() failed to remove participant from mixer");
1377 return -1;
1378 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001379 }
1380
1381 _playing = false;
1382 _outputAudioLevel.Clear();
1383
1384 return 0;
1385}
1386
1387WebRtc_Word32
1388Channel::StartSend()
1389{
1390 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1391 "Channel::StartSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001392 {
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001393 // A lock is needed because |_sending| can be accessed or modified by
1394 // another thread at the same time.
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001395 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001396
1397 if (_sending)
1398 {
1399 return 0;
1400 }
1401 _sending = true;
niklase@google.com470e71d2011-07-07 08:21:25 +00001402 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001403
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001404 if (_rtpRtcpModule->SetSendingStatus(true) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001405 {
1406 _engineStatisticsPtr->SetLastError(
1407 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1408 "StartSend() RTP/RTCP failed to start sending");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001409 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001410 _sending = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00001411 return -1;
1412 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001413
niklase@google.com470e71d2011-07-07 08:21:25 +00001414 return 0;
1415}
1416
1417WebRtc_Word32
1418Channel::StopSend()
1419{
1420 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1421 "Channel::StopSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001422 {
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001423 // A lock is needed because |_sending| can be accessed or modified by
1424 // another thread at the same time.
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001425 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001426
1427 if (!_sending)
1428 {
1429 return 0;
1430 }
1431 _sending = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00001432 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001433
niklase@google.com470e71d2011-07-07 08:21:25 +00001434 // Reset sending SSRC and sequence number and triggers direct transmission
1435 // of RTCP BYE
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001436 if (_rtpRtcpModule->SetSendingStatus(false) == -1 ||
1437 _rtpRtcpModule->ResetSendDataCountersRTP() == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001438 {
1439 _engineStatisticsPtr->SetLastError(
1440 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1441 "StartSend() RTP/RTCP failed to stop sending");
1442 }
1443
niklase@google.com470e71d2011-07-07 08:21:25 +00001444 return 0;
1445}
1446
1447WebRtc_Word32
1448Channel::StartReceiving()
1449{
1450 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1451 "Channel::StartReceiving()");
1452 if (_receiving)
1453 {
1454 return 0;
1455 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001456 _receiving = true;
1457 _numberOfDiscardedPackets = 0;
1458 return 0;
1459}
1460
1461WebRtc_Word32
1462Channel::StopReceiving()
1463{
1464 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1465 "Channel::StopReceiving()");
1466 if (!_receiving)
1467 {
1468 return 0;
1469 }
pwestin@webrtc.org684f0572013-03-13 23:20:57 +00001470
henrika@webrtc.orgaf71f0e2011-12-05 07:02:22 +00001471 // Recover DTMF detection status.
turaj@webrtc.orgb7edd062013-03-12 22:27:27 +00001472 WebRtc_Word32 ret = _rtpRtcpModule->SetTelephoneEventForwardToDecoder(true);
henrika@webrtc.orgaf71f0e2011-12-05 07:02:22 +00001473 if (ret != 0) {
1474 _engineStatisticsPtr->SetLastError(
1475 VE_INVALID_OPERATION, kTraceWarning,
1476 "StopReceiving() failed to restore telephone-event status.");
1477 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001478 RegisterReceiveCodecsToRTPModule();
1479 _receiving = false;
1480 return 0;
1481}
1482
niklase@google.com470e71d2011-07-07 08:21:25 +00001483WebRtc_Word32
1484Channel::SetNetEQPlayoutMode(NetEqModes mode)
1485{
1486 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1487 "Channel::SetNetEQPlayoutMode()");
1488 AudioPlayoutMode playoutMode(voice);
1489 switch (mode)
1490 {
1491 case kNetEqDefault:
1492 playoutMode = voice;
1493 break;
1494 case kNetEqStreaming:
1495 playoutMode = streaming;
1496 break;
1497 case kNetEqFax:
1498 playoutMode = fax;
1499 break;
roosa@google.comb7186192012-12-12 21:59:14 +00001500 case kNetEqOff:
1501 playoutMode = off;
1502 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00001503 }
1504 if (_audioCodingModule.SetPlayoutMode(playoutMode) != 0)
1505 {
1506 _engineStatisticsPtr->SetLastError(
1507 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1508 "SetNetEQPlayoutMode() failed to set playout mode");
1509 return -1;
1510 }
1511 return 0;
1512}
1513
1514WebRtc_Word32
1515Channel::GetNetEQPlayoutMode(NetEqModes& mode)
1516{
1517 const AudioPlayoutMode playoutMode = _audioCodingModule.PlayoutMode();
1518 switch (playoutMode)
1519 {
1520 case voice:
1521 mode = kNetEqDefault;
1522 break;
1523 case streaming:
1524 mode = kNetEqStreaming;
1525 break;
1526 case fax:
1527 mode = kNetEqFax;
1528 break;
roosa@google.comb7186192012-12-12 21:59:14 +00001529 case off:
1530 mode = kNetEqOff;
niklase@google.com470e71d2011-07-07 08:21:25 +00001531 }
1532 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
1533 VoEId(_instanceId,_channelId),
1534 "Channel::GetNetEQPlayoutMode() => mode=%u", mode);
1535 return 0;
1536}
1537
1538WebRtc_Word32
niklase@google.com470e71d2011-07-07 08:21:25 +00001539Channel::SetOnHoldStatus(bool enable, OnHoldModes mode)
1540{
1541 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1542 "Channel::SetOnHoldStatus()");
1543 if (mode == kHoldSendAndPlay)
1544 {
1545 _outputIsOnHold = enable;
1546 _inputIsOnHold = enable;
1547 }
1548 else if (mode == kHoldPlayOnly)
1549 {
1550 _outputIsOnHold = enable;
1551 }
1552 if (mode == kHoldSendOnly)
1553 {
1554 _inputIsOnHold = enable;
1555 }
1556 return 0;
1557}
1558
1559WebRtc_Word32
1560Channel::GetOnHoldStatus(bool& enabled, OnHoldModes& mode)
1561{
1562 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1563 "Channel::GetOnHoldStatus()");
1564 enabled = (_outputIsOnHold || _inputIsOnHold);
1565 if (_outputIsOnHold && _inputIsOnHold)
1566 {
1567 mode = kHoldSendAndPlay;
1568 }
1569 else if (_outputIsOnHold && !_inputIsOnHold)
1570 {
1571 mode = kHoldPlayOnly;
1572 }
1573 else if (!_outputIsOnHold && _inputIsOnHold)
1574 {
1575 mode = kHoldSendOnly;
1576 }
1577 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1578 "Channel::GetOnHoldStatus() => enabled=%d, mode=%d",
1579 enabled, mode);
1580 return 0;
1581}
1582
1583WebRtc_Word32
1584Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer)
1585{
1586 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1587 "Channel::RegisterVoiceEngineObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001588 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001589
1590 if (_voiceEngineObserverPtr)
1591 {
1592 _engineStatisticsPtr->SetLastError(
1593 VE_INVALID_OPERATION, kTraceError,
1594 "RegisterVoiceEngineObserver() observer already enabled");
1595 return -1;
1596 }
1597 _voiceEngineObserverPtr = &observer;
1598 return 0;
1599}
1600
1601WebRtc_Word32
1602Channel::DeRegisterVoiceEngineObserver()
1603{
1604 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1605 "Channel::DeRegisterVoiceEngineObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001606 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001607
1608 if (!_voiceEngineObserverPtr)
1609 {
1610 _engineStatisticsPtr->SetLastError(
1611 VE_INVALID_OPERATION, kTraceWarning,
1612 "DeRegisterVoiceEngineObserver() observer already disabled");
1613 return 0;
1614 }
1615 _voiceEngineObserverPtr = NULL;
1616 return 0;
1617}
1618
1619WebRtc_Word32
niklase@google.com470e71d2011-07-07 08:21:25 +00001620Channel::GetSendCodec(CodecInst& codec)
1621{
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001622 return (_audioCodingModule.SendCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001623}
1624
1625WebRtc_Word32
1626Channel::GetRecCodec(CodecInst& codec)
1627{
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001628 return (_audioCodingModule.ReceiveCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001629}
1630
1631WebRtc_Word32
1632Channel::SetSendCodec(const CodecInst& codec)
1633{
1634 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1635 "Channel::SetSendCodec()");
1636
1637 if (_audioCodingModule.RegisterSendCodec(codec) != 0)
1638 {
1639 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1640 "SetSendCodec() failed to register codec to ACM");
1641 return -1;
1642 }
1643
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001644 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001645 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001646 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1647 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001648 {
1649 WEBRTC_TRACE(
1650 kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1651 "SetSendCodec() failed to register codec to"
1652 " RTP/RTCP module");
1653 return -1;
1654 }
1655 }
1656
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001657 if (_rtpRtcpModule->SetAudioPacketSize(codec.pacsize) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001658 {
1659 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1660 "SetSendCodec() failed to set audio packet size");
1661 return -1;
1662 }
1663
1664 return 0;
1665}
1666
1667WebRtc_Word32
1668Channel::SetVADStatus(bool enableVAD, ACMVADMode mode, bool disableDTX)
1669{
1670 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1671 "Channel::SetVADStatus(mode=%d)", mode);
1672 // To disable VAD, DTX must be disabled too
1673 disableDTX = ((enableVAD == false) ? true : disableDTX);
1674 if (_audioCodingModule.SetVAD(!disableDTX, enableVAD, mode) != 0)
1675 {
1676 _engineStatisticsPtr->SetLastError(
1677 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1678 "SetVADStatus() failed to set VAD");
1679 return -1;
1680 }
1681 return 0;
1682}
1683
1684WebRtc_Word32
1685Channel::GetVADStatus(bool& enabledVAD, ACMVADMode& mode, bool& disabledDTX)
1686{
1687 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1688 "Channel::GetVADStatus");
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001689 if (_audioCodingModule.VAD(&disabledDTX, &enabledVAD, &mode) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001690 {
1691 _engineStatisticsPtr->SetLastError(
1692 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1693 "GetVADStatus() failed to get VAD status");
1694 return -1;
1695 }
1696 disabledDTX = !disabledDTX;
1697 return 0;
1698}
1699
1700WebRtc_Word32
1701Channel::SetRecPayloadType(const CodecInst& codec)
1702{
1703 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1704 "Channel::SetRecPayloadType()");
1705
1706 if (_playing)
1707 {
1708 _engineStatisticsPtr->SetLastError(
1709 VE_ALREADY_PLAYING, kTraceError,
1710 "SetRecPayloadType() unable to set PT while playing");
1711 return -1;
1712 }
1713 if (_receiving)
1714 {
1715 _engineStatisticsPtr->SetLastError(
1716 VE_ALREADY_LISTENING, kTraceError,
1717 "SetRecPayloadType() unable to set PT while listening");
1718 return -1;
1719 }
1720
1721 if (codec.pltype == -1)
1722 {
1723 // De-register the selected codec (RTP/RTCP module and ACM)
1724
1725 WebRtc_Word8 pltype(-1);
1726 CodecInst rxCodec = codec;
1727
1728 // Get payload type for the given codec
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001729 _rtpRtcpModule->ReceivePayloadType(rxCodec, &pltype);
niklase@google.com470e71d2011-07-07 08:21:25 +00001730 rxCodec.pltype = pltype;
1731
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001732 if (_rtpRtcpModule->DeRegisterReceivePayload(pltype) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001733 {
1734 _engineStatisticsPtr->SetLastError(
1735 VE_RTP_RTCP_MODULE_ERROR,
1736 kTraceError,
1737 "SetRecPayloadType() RTP/RTCP-module deregistration "
1738 "failed");
1739 return -1;
1740 }
1741 if (_audioCodingModule.UnregisterReceiveCodec(rxCodec.pltype) != 0)
1742 {
1743 _engineStatisticsPtr->SetLastError(
1744 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1745 "SetRecPayloadType() ACM deregistration failed - 1");
1746 return -1;
1747 }
1748 return 0;
1749 }
1750
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001751 if (_rtpRtcpModule->RegisterReceivePayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001752 {
1753 // First attempt to register failed => de-register and try again
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001754 _rtpRtcpModule->DeRegisterReceivePayload(codec.pltype);
1755 if (_rtpRtcpModule->RegisterReceivePayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001756 {
1757 _engineStatisticsPtr->SetLastError(
1758 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1759 "SetRecPayloadType() RTP/RTCP-module registration failed");
1760 return -1;
1761 }
1762 }
1763 if (_audioCodingModule.RegisterReceiveCodec(codec) != 0)
1764 {
1765 _audioCodingModule.UnregisterReceiveCodec(codec.pltype);
1766 if (_audioCodingModule.RegisterReceiveCodec(codec) != 0)
1767 {
1768 _engineStatisticsPtr->SetLastError(
1769 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1770 "SetRecPayloadType() ACM registration failed - 1");
1771 return -1;
1772 }
1773 }
1774 return 0;
1775}
1776
1777WebRtc_Word32
1778Channel::GetRecPayloadType(CodecInst& codec)
1779{
1780 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1781 "Channel::GetRecPayloadType()");
1782 WebRtc_Word8 payloadType(-1);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001783 if (_rtpRtcpModule->ReceivePayloadType(codec, &payloadType) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001784 {
1785 _engineStatisticsPtr->SetLastError(
henrika@webrtc.org37198002012-06-18 11:00:12 +00001786 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
niklase@google.com470e71d2011-07-07 08:21:25 +00001787 "GetRecPayloadType() failed to retrieve RX payload type");
1788 return -1;
1789 }
1790 codec.pltype = payloadType;
1791 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1792 "Channel::GetRecPayloadType() => pltype=%u", codec.pltype);
1793 return 0;
1794}
1795
1796WebRtc_Word32
1797Channel::SetAMREncFormat(AmrMode mode)
1798{
1799 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1800 "Channel::SetAMREncFormat()");
1801
1802 // ACM doesn't support AMR
1803 return -1;
1804}
1805
1806WebRtc_Word32
1807Channel::SetAMRDecFormat(AmrMode mode)
1808{
1809 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1810 "Channel::SetAMRDecFormat()");
1811
1812 // ACM doesn't support AMR
1813 return -1;
1814}
1815
1816WebRtc_Word32
1817Channel::SetAMRWbEncFormat(AmrMode mode)
1818{
1819 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1820 "Channel::SetAMRWbEncFormat()");
1821
1822 // ACM doesn't support AMR
1823 return -1;
1824
1825}
1826
1827WebRtc_Word32
1828Channel::SetAMRWbDecFormat(AmrMode mode)
1829{
1830 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1831 "Channel::SetAMRWbDecFormat()");
1832
1833 // ACM doesn't support AMR
1834 return -1;
1835}
1836
1837WebRtc_Word32
1838Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency)
1839{
1840 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1841 "Channel::SetSendCNPayloadType()");
1842
1843 CodecInst codec;
1844 WebRtc_Word32 samplingFreqHz(-1);
tina.legrand@webrtc.org45175852012-06-01 09:27:35 +00001845 const int kMono = 1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001846 if (frequency == kFreq32000Hz)
1847 samplingFreqHz = 32000;
1848 else if (frequency == kFreq16000Hz)
1849 samplingFreqHz = 16000;
1850
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001851 if (_audioCodingModule.Codec("CN", &codec, samplingFreqHz, kMono) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001852 {
1853 _engineStatisticsPtr->SetLastError(
1854 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1855 "SetSendCNPayloadType() failed to retrieve default CN codec "
1856 "settings");
1857 return -1;
1858 }
1859
1860 // Modify the payload type (must be set to dynamic range)
1861 codec.pltype = type;
1862
1863 if (_audioCodingModule.RegisterSendCodec(codec) != 0)
1864 {
1865 _engineStatisticsPtr->SetLastError(
1866 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1867 "SetSendCNPayloadType() failed to register CN to ACM");
1868 return -1;
1869 }
1870
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001871 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001872 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001873 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1874 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001875 {
1876 _engineStatisticsPtr->SetLastError(
1877 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1878 "SetSendCNPayloadType() failed to register CN to RTP/RTCP "
1879 "module");
1880 return -1;
1881 }
1882 }
1883 return 0;
1884}
1885
1886WebRtc_Word32
1887Channel::SetISACInitTargetRate(int rateBps, bool useFixedFrameSize)
1888{
1889 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1890 "Channel::SetISACInitTargetRate()");
1891
1892 CodecInst sendCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001893 if (_audioCodingModule.SendCodec(&sendCodec) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001894 {
1895 _engineStatisticsPtr->SetLastError(
1896 VE_CODEC_ERROR, kTraceError,
1897 "SetISACInitTargetRate() failed to retrieve send codec");
1898 return -1;
1899 }
1900 if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
1901 {
1902 // This API is only valid if iSAC is setup to run in channel-adaptive
1903 // mode.
1904 // We do not validate the adaptive mode here. It is done later in the
1905 // ConfigISACBandwidthEstimator() API.
1906 _engineStatisticsPtr->SetLastError(
1907 VE_CODEC_ERROR, kTraceError,
1908 "SetISACInitTargetRate() send codec is not iSAC");
1909 return -1;
1910 }
1911
1912 WebRtc_UWord8 initFrameSizeMsec(0);
1913 if (16000 == sendCodec.plfreq)
1914 {
1915 // Note that 0 is a valid and corresponds to "use default
1916 if ((rateBps != 0 &&
1917 rateBps < kVoiceEngineMinIsacInitTargetRateBpsWb) ||
1918 (rateBps > kVoiceEngineMaxIsacInitTargetRateBpsWb))
1919 {
1920 _engineStatisticsPtr->SetLastError(
1921 VE_INVALID_ARGUMENT, kTraceError,
1922 "SetISACInitTargetRate() invalid target rate - 1");
1923 return -1;
1924 }
1925 // 30 or 60ms
1926 initFrameSizeMsec = (WebRtc_UWord8)(sendCodec.pacsize / 16);
1927 }
1928 else if (32000 == sendCodec.plfreq)
1929 {
1930 if ((rateBps != 0 &&
1931 rateBps < kVoiceEngineMinIsacInitTargetRateBpsSwb) ||
1932 (rateBps > kVoiceEngineMaxIsacInitTargetRateBpsSwb))
1933 {
1934 _engineStatisticsPtr->SetLastError(
1935 VE_INVALID_ARGUMENT, kTraceError,
1936 "SetISACInitTargetRate() invalid target rate - 2");
1937 return -1;
1938 }
1939 initFrameSizeMsec = (WebRtc_UWord8)(sendCodec.pacsize / 32); // 30ms
1940 }
1941
1942 if (_audioCodingModule.ConfigISACBandwidthEstimator(
1943 initFrameSizeMsec, rateBps, useFixedFrameSize) == -1)
1944 {
1945 _engineStatisticsPtr->SetLastError(
1946 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1947 "SetISACInitTargetRate() iSAC BWE config failed");
1948 return -1;
1949 }
1950
1951 return 0;
1952}
1953
1954WebRtc_Word32
1955Channel::SetISACMaxRate(int rateBps)
1956{
1957 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1958 "Channel::SetISACMaxRate()");
1959
1960 CodecInst sendCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001961 if (_audioCodingModule.SendCodec(&sendCodec) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001962 {
1963 _engineStatisticsPtr->SetLastError(
1964 VE_CODEC_ERROR, kTraceError,
1965 "SetISACMaxRate() failed to retrieve send codec");
1966 return -1;
1967 }
1968 if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
1969 {
1970 // This API is only valid if iSAC is selected as sending codec.
1971 _engineStatisticsPtr->SetLastError(
1972 VE_CODEC_ERROR, kTraceError,
1973 "SetISACMaxRate() send codec is not iSAC");
1974 return -1;
1975 }
1976 if (16000 == sendCodec.plfreq)
1977 {
1978 if ((rateBps < kVoiceEngineMinIsacMaxRateBpsWb) ||
1979 (rateBps > kVoiceEngineMaxIsacMaxRateBpsWb))
1980 {
1981 _engineStatisticsPtr->SetLastError(
1982 VE_INVALID_ARGUMENT, kTraceError,
1983 "SetISACMaxRate() invalid max rate - 1");
1984 return -1;
1985 }
1986 }
1987 else if (32000 == sendCodec.plfreq)
1988 {
1989 if ((rateBps < kVoiceEngineMinIsacMaxRateBpsSwb) ||
1990 (rateBps > kVoiceEngineMaxIsacMaxRateBpsSwb))
1991 {
1992 _engineStatisticsPtr->SetLastError(
1993 VE_INVALID_ARGUMENT, kTraceError,
1994 "SetISACMaxRate() invalid max rate - 2");
1995 return -1;
1996 }
1997 }
1998 if (_sending)
1999 {
2000 _engineStatisticsPtr->SetLastError(
2001 VE_SENDING, kTraceError,
2002 "SetISACMaxRate() unable to set max rate while sending");
2003 return -1;
2004 }
2005
2006 // Set the maximum instantaneous rate of iSAC (works for both adaptive
2007 // and non-adaptive mode)
2008 if (_audioCodingModule.SetISACMaxRate(rateBps) == -1)
2009 {
2010 _engineStatisticsPtr->SetLastError(
2011 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2012 "SetISACMaxRate() failed to set max rate");
2013 return -1;
2014 }
2015
2016 return 0;
2017}
2018
2019WebRtc_Word32
2020Channel::SetISACMaxPayloadSize(int sizeBytes)
2021{
2022 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2023 "Channel::SetISACMaxPayloadSize()");
2024 CodecInst sendCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00002025 if (_audioCodingModule.SendCodec(&sendCodec) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00002026 {
2027 _engineStatisticsPtr->SetLastError(
2028 VE_CODEC_ERROR, kTraceError,
2029 "SetISACMaxPayloadSize() failed to retrieve send codec");
2030 return -1;
2031 }
2032 if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
2033 {
2034 _engineStatisticsPtr->SetLastError(
2035 VE_CODEC_ERROR, kTraceError,
2036 "SetISACMaxPayloadSize() send codec is not iSAC");
2037 return -1;
2038 }
2039 if (16000 == sendCodec.plfreq)
2040 {
2041 if ((sizeBytes < kVoiceEngineMinIsacMaxPayloadSizeBytesWb) ||
2042 (sizeBytes > kVoiceEngineMaxIsacMaxPayloadSizeBytesWb))
2043 {
2044 _engineStatisticsPtr->SetLastError(
2045 VE_INVALID_ARGUMENT, kTraceError,
2046 "SetISACMaxPayloadSize() invalid max payload - 1");
2047 return -1;
2048 }
2049 }
2050 else if (32000 == sendCodec.plfreq)
2051 {
2052 if ((sizeBytes < kVoiceEngineMinIsacMaxPayloadSizeBytesSwb) ||
2053 (sizeBytes > kVoiceEngineMaxIsacMaxPayloadSizeBytesSwb))
2054 {
2055 _engineStatisticsPtr->SetLastError(
2056 VE_INVALID_ARGUMENT, kTraceError,
2057 "SetISACMaxPayloadSize() invalid max payload - 2");
2058 return -1;
2059 }
2060 }
2061 if (_sending)
2062 {
2063 _engineStatisticsPtr->SetLastError(
2064 VE_SENDING, kTraceError,
2065 "SetISACMaxPayloadSize() unable to set max rate while sending");
2066 return -1;
2067 }
2068
2069 if (_audioCodingModule.SetISACMaxPayloadSize(sizeBytes) == -1)
2070 {
2071 _engineStatisticsPtr->SetLastError(
2072 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2073 "SetISACMaxPayloadSize() failed to set max payload size");
2074 return -1;
2075 }
2076 return 0;
2077}
2078
2079WebRtc_Word32 Channel::RegisterExternalTransport(Transport& transport)
2080{
2081 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2082 "Channel::RegisterExternalTransport()");
2083
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002084 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002085
niklase@google.com470e71d2011-07-07 08:21:25 +00002086 if (_externalTransport)
2087 {
2088 _engineStatisticsPtr->SetLastError(VE_INVALID_OPERATION,
2089 kTraceError,
2090 "RegisterExternalTransport() external transport already enabled");
2091 return -1;
2092 }
2093 _externalTransport = true;
2094 _transportPtr = &transport;
2095 return 0;
2096}
2097
2098WebRtc_Word32
2099Channel::DeRegisterExternalTransport()
2100{
2101 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2102 "Channel::DeRegisterExternalTransport()");
2103
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002104 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.org83661f52011-11-25 10:58:15 +00002105
niklase@google.com470e71d2011-07-07 08:21:25 +00002106 if (!_transportPtr)
2107 {
2108 _engineStatisticsPtr->SetLastError(
2109 VE_INVALID_OPERATION, kTraceWarning,
2110 "DeRegisterExternalTransport() external transport already "
2111 "disabled");
2112 return 0;
2113 }
2114 _externalTransport = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00002115 _transportPtr = NULL;
2116 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2117 "DeRegisterExternalTransport() all transport is disabled");
niklase@google.com470e71d2011-07-07 08:21:25 +00002118 return 0;
2119}
2120
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002121WebRtc_Word32 Channel::ReceivedRTPPacket(const WebRtc_Word8* data,
2122 WebRtc_Word32 length) {
2123 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
2124 "Channel::ReceivedRTPPacket()");
2125
2126 // Store playout timestamp for the received RTP packet
2127 WebRtc_UWord32 playoutTimestamp(0);
2128 if (GetPlayoutTimeStamp(playoutTimestamp) == 0) {
2129 _playoutTimeStampRTP = playoutTimestamp;
2130 }
2131
2132 // Dump the RTP packet to a file (if RTP dump is enabled).
2133 if (_rtpDumpIn.DumpPacket((const WebRtc_UWord8*)data,
2134 (WebRtc_UWord16)length) == -1) {
2135 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
2136 VoEId(_instanceId,_channelId),
2137 "Channel::SendPacket() RTP dump to input file failed");
2138 }
2139
2140 // Deliver RTP packet to RTP/RTCP module for parsing
2141 // The packet will be pushed back to the channel thru the
2142 // OnReceivedPayloadData callback so we don't push it to the ACM here
2143 if (_rtpRtcpModule->IncomingPacket((const WebRtc_UWord8*)data,
2144 (WebRtc_UWord16)length) == -1) {
2145 _engineStatisticsPtr->SetLastError(
2146 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
2147 "Channel::IncomingRTPPacket() RTP packet is invalid");
2148 }
2149 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002150}
2151
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002152WebRtc_Word32 Channel::ReceivedRTCPPacket(const WebRtc_Word8* data,
2153 WebRtc_Word32 length) {
2154 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
2155 "Channel::ReceivedRTCPPacket()");
2156 // Store playout timestamp for the received RTCP packet
2157 WebRtc_UWord32 playoutTimestamp(0);
2158 if (GetPlayoutTimeStamp(playoutTimestamp) == 0) {
2159 _playoutTimeStampRTCP = playoutTimestamp;
2160 }
2161
2162 // Dump the RTCP packet to a file (if RTP dump is enabled).
2163 if (_rtpDumpIn.DumpPacket((const WebRtc_UWord8*)data,
2164 (WebRtc_UWord16)length) == -1) {
2165 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
2166 VoEId(_instanceId,_channelId),
2167 "Channel::SendPacket() RTCP dump to input file failed");
2168 }
2169
2170 // Deliver RTCP packet to RTP/RTCP module for parsing
2171 if (_rtpRtcpModule->IncomingPacket((const WebRtc_UWord8*)data,
2172 (WebRtc_UWord16)length) == -1) {
2173 _engineStatisticsPtr->SetLastError(
2174 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
2175 "Channel::IncomingRTPPacket() RTCP packet is invalid");
2176 }
2177 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002178}
2179
niklase@google.com470e71d2011-07-07 08:21:25 +00002180WebRtc_Word32
2181Channel::SetPacketTimeoutNotification(bool enable, int timeoutSeconds)
2182{
2183 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2184 "Channel::SetPacketTimeoutNotification()");
2185 if (enable)
2186 {
2187 const WebRtc_UWord32 RTPtimeoutMS = 1000*timeoutSeconds;
2188 const WebRtc_UWord32 RTCPtimeoutMS = 0;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00002189 _rtpRtcpModule->SetPacketTimeout(RTPtimeoutMS, RTCPtimeoutMS);
niklase@google.com470e71d2011-07-07 08:21:25 +00002190 _rtpPacketTimeOutIsEnabled = true;
2191 _rtpTimeOutSeconds = timeoutSeconds;
2192 }
2193 else
2194 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00002195 _rtpRtcpModule->SetPacketTimeout(0, 0);
niklase@google.com470e71d2011-07-07 08:21:25 +00002196 _rtpPacketTimeOutIsEnabled = false;
2197 _rtpTimeOutSeconds = 0;
2198 }
2199 return 0;
2200}
2201
2202WebRtc_Word32
2203Channel::GetPacketTimeoutNotification(bool& enabled, int& timeoutSeconds)
2204{
2205 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2206 "Channel::GetPacketTimeoutNotification()");
2207 enabled = _rtpPacketTimeOutIsEnabled;
2208 if (enabled)
2209 {
2210 timeoutSeconds = _rtpTimeOutSeconds;
2211 }
2212 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
2213 "GetPacketTimeoutNotification() => enabled=%d,"
2214 " timeoutSeconds=%d",
2215 enabled, timeoutSeconds);
2216 return 0;
2217}
2218
2219WebRtc_Word32
2220Channel::RegisterDeadOrAliveObserver(VoEConnectionObserver& observer)
2221{
2222 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2223 "Channel::RegisterDeadOrAliveObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002224 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002225
2226 if (_connectionObserverPtr)
2227 {
2228 _engineStatisticsPtr->SetLastError(VE_INVALID_OPERATION, kTraceError,
2229 "RegisterDeadOrAliveObserver() observer already enabled");
2230 return -1;
2231 }
2232
2233 _connectionObserverPtr = &observer;
2234 _connectionObserver = true;
2235
2236 return 0;
2237}
2238
2239WebRtc_Word32
2240Channel::DeRegisterDeadOrAliveObserver()
2241{
2242 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2243 "Channel::DeRegisterDeadOrAliveObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002244 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002245
2246 if (!_connectionObserverPtr)
2247 {
2248 _engineStatisticsPtr->SetLastError(
2249 VE_INVALID_OPERATION, kTraceWarning,
2250 "DeRegisterDeadOrAliveObserver() observer already disabled");
2251 return 0;
2252 }
2253
2254 _connectionObserver = false;
2255 _connectionObserverPtr = NULL;
2256
2257 return 0;
2258}
2259
2260WebRtc_Word32
2261Channel::SetPeriodicDeadOrAliveStatus(bool enable, int sampleTimeSeconds)
2262{
2263 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2264 "Channel::SetPeriodicDeadOrAliveStatus()");
2265 if (!_connectionObserverPtr)
2266 {
2267 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
2268 "SetPeriodicDeadOrAliveStatus() connection observer has"
2269 " not been registered");
2270 }
2271 if (enable)
2272 {
2273 ResetDeadOrAliveCounters();
2274 }
2275 bool enabled(false);
2276 WebRtc_UWord8 currentSampleTimeSec(0);
2277 // Store last state (will be used later if dead-or-alive is disabled).
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00002278 _rtpRtcpModule->PeriodicDeadOrAliveStatus(enabled, currentSampleTimeSec);
niklase@google.com470e71d2011-07-07 08:21:25 +00002279 // Update the dead-or-alive state.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00002280 if (_rtpRtcpModule->SetPeriodicDeadOrAliveStatus(
niklase@google.com470e71d2011-07-07 08:21:25 +00002281 enable, (WebRtc_UWord8)sampleTimeSeconds) != 0)
2282 {
2283 _engineStatisticsPtr->SetLastError(
2284 VE_RTP_RTCP_MODULE_ERROR,
2285 kTraceError,
2286 "SetPeriodicDeadOrAliveStatus() failed to set dead-or-alive "
2287 "status");
2288 return -1;
2289 }
2290 if (!enable)
2291 {
2292 // Restore last utilized sample time.
2293 // Without this, the sample time would always be reset to default
2294 // (2 sec), each time dead-or-alived was disabled without sample-time
2295 // parameter.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00002296 _rtpRtcpModule->SetPeriodicDeadOrAliveStatus(enable,
niklase@google.com470e71d2011-07-07 08:21:25 +00002297 currentSampleTimeSec);
2298 }
2299 return 0;
2300}
2301
2302WebRtc_Word32
2303Channel::GetPeriodicDeadOrAliveStatus(bool& enabled, int& sampleTimeSeconds)
2304{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00002305 _rtpRtcpModule->PeriodicDeadOrAliveStatus(
niklase@google.com470e71d2011-07-07 08:21:25 +00002306 enabled,
2307 (WebRtc_UWord8&)sampleTimeSeconds);
2308 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
2309 "GetPeriodicDeadOrAliveStatus() => enabled=%d,"
2310 " sampleTimeSeconds=%d",
2311 enabled, sampleTimeSeconds);
2312 return 0;
2313}
2314
niklase@google.com470e71d2011-07-07 08:21:25 +00002315int Channel::StartPlayingFileLocally(const char* fileName,
2316 const bool loop,
2317 const FileFormats format,
2318 const int startPosition,
2319 const float volumeScaling,
2320 const int stopPosition,
2321 const CodecInst* codecInst)
2322{
2323 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2324 "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d,"
2325 " format=%d, volumeScaling=%5.3f, startPosition=%d, "
2326 "stopPosition=%d)", fileName, loop, format, volumeScaling,
2327 startPosition, stopPosition);
2328
2329 if (_outputFilePlaying)
2330 {
2331 _engineStatisticsPtr->SetLastError(
2332 VE_ALREADY_PLAYING, kTraceError,
2333 "StartPlayingFileLocally() is already playing");
2334 return -1;
2335 }
2336
niklase@google.com470e71d2011-07-07 08:21:25 +00002337 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002338 CriticalSectionScoped cs(&_fileCritSect);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002339
2340 if (_outputFilePlayerPtr)
2341 {
2342 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2343 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2344 _outputFilePlayerPtr = NULL;
2345 }
2346
2347 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2348 _outputFilePlayerId, (const FileFormats)format);
2349
2350 if (_outputFilePlayerPtr == NULL)
2351 {
2352 _engineStatisticsPtr->SetLastError(
2353 VE_INVALID_ARGUMENT, kTraceError,
henrike@webrtc.org31d30702011-11-18 19:59:32 +00002354 "StartPlayingFileLocally() filePlayer format is not correct");
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002355 return -1;
2356 }
2357
2358 const WebRtc_UWord32 notificationTime(0);
2359
2360 if (_outputFilePlayerPtr->StartPlayingFile(
2361 fileName,
2362 loop,
2363 startPosition,
2364 volumeScaling,
2365 notificationTime,
2366 stopPosition,
2367 (const CodecInst*)codecInst) != 0)
2368 {
2369 _engineStatisticsPtr->SetLastError(
2370 VE_BAD_FILE, kTraceError,
2371 "StartPlayingFile() failed to start file playout");
2372 _outputFilePlayerPtr->StopPlayingFile();
2373 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2374 _outputFilePlayerPtr = NULL;
2375 return -1;
2376 }
2377 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
2378 _outputFilePlaying = true;
niklase@google.com470e71d2011-07-07 08:21:25 +00002379 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00002380
2381 if (RegisterFilePlayingToMixer() != 0)
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002382 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002383
2384 return 0;
2385}
2386
2387int Channel::StartPlayingFileLocally(InStream* stream,
2388 const FileFormats format,
2389 const int startPosition,
2390 const float volumeScaling,
2391 const int stopPosition,
2392 const CodecInst* codecInst)
2393{
2394 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2395 "Channel::StartPlayingFileLocally(format=%d,"
2396 " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
2397 format, volumeScaling, startPosition, stopPosition);
2398
2399 if(stream == NULL)
2400 {
2401 _engineStatisticsPtr->SetLastError(
2402 VE_BAD_FILE, kTraceError,
2403 "StartPlayingFileLocally() NULL as input stream");
2404 return -1;
2405 }
2406
2407
2408 if (_outputFilePlaying)
2409 {
2410 _engineStatisticsPtr->SetLastError(
2411 VE_ALREADY_PLAYING, kTraceError,
2412 "StartPlayingFileLocally() is already playing");
2413 return -1;
2414 }
2415
niklase@google.com470e71d2011-07-07 08:21:25 +00002416 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002417 CriticalSectionScoped cs(&_fileCritSect);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002418
2419 // Destroy the old instance
2420 if (_outputFilePlayerPtr)
2421 {
2422 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2423 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2424 _outputFilePlayerPtr = NULL;
2425 }
2426
2427 // Create the instance
2428 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2429 _outputFilePlayerId,
2430 (const FileFormats)format);
2431
2432 if (_outputFilePlayerPtr == NULL)
2433 {
2434 _engineStatisticsPtr->SetLastError(
2435 VE_INVALID_ARGUMENT, kTraceError,
2436 "StartPlayingFileLocally() filePlayer format isnot correct");
2437 return -1;
2438 }
2439
2440 const WebRtc_UWord32 notificationTime(0);
2441
2442 if (_outputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
2443 volumeScaling,
2444 notificationTime,
2445 stopPosition, codecInst) != 0)
2446 {
2447 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2448 "StartPlayingFile() failed to "
2449 "start file playout");
2450 _outputFilePlayerPtr->StopPlayingFile();
2451 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2452 _outputFilePlayerPtr = NULL;
2453 return -1;
2454 }
2455 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
2456 _outputFilePlaying = true;
niklase@google.com470e71d2011-07-07 08:21:25 +00002457 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00002458
2459 if (RegisterFilePlayingToMixer() != 0)
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002460 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002461
niklase@google.com470e71d2011-07-07 08:21:25 +00002462 return 0;
2463}
2464
2465int Channel::StopPlayingFileLocally()
2466{
2467 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2468 "Channel::StopPlayingFileLocally()");
2469
2470 if (!_outputFilePlaying)
2471 {
2472 _engineStatisticsPtr->SetLastError(
2473 VE_INVALID_OPERATION, kTraceWarning,
2474 "StopPlayingFileLocally() isnot playing");
2475 return 0;
2476 }
2477
niklase@google.com470e71d2011-07-07 08:21:25 +00002478 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002479 CriticalSectionScoped cs(&_fileCritSect);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002480
2481 if (_outputFilePlayerPtr->StopPlayingFile() != 0)
2482 {
2483 _engineStatisticsPtr->SetLastError(
2484 VE_STOP_RECORDING_FAILED, kTraceError,
2485 "StopPlayingFile() could not stop playing");
2486 return -1;
2487 }
2488 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2489 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2490 _outputFilePlayerPtr = NULL;
2491 _outputFilePlaying = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00002492 }
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002493 // _fileCritSect cannot be taken while calling
2494 // SetAnonymousMixibilityStatus. Refer to comments in
2495 // StartPlayingFileLocally(const char* ...) for more details.
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002496 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0)
2497 {
2498 _engineStatisticsPtr->SetLastError(
2499 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002500 "StopPlayingFile() failed to stop participant from playing as"
2501 "file in the mixer");
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002502 return -1;
2503 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002504
2505 return 0;
2506}
2507
2508int Channel::IsPlayingFileLocally() const
2509{
2510 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2511 "Channel::IsPlayingFileLocally()");
2512
2513 return (WebRtc_Word32)_outputFilePlaying;
2514}
2515
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00002516int Channel::RegisterFilePlayingToMixer()
2517{
2518 // Return success for not registering for file playing to mixer if:
2519 // 1. playing file before playout is started on that channel.
2520 // 2. starting playout without file playing on that channel.
2521 if (!_playing || !_outputFilePlaying)
2522 {
2523 return 0;
2524 }
2525
2526 // |_fileCritSect| cannot be taken while calling
2527 // SetAnonymousMixabilityStatus() since as soon as the participant is added
2528 // frames can be pulled by the mixer. Since the frames are generated from
2529 // the file, _fileCritSect will be taken. This would result in a deadlock.
2530 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0)
2531 {
2532 CriticalSectionScoped cs(&_fileCritSect);
2533 _outputFilePlaying = false;
2534 _engineStatisticsPtr->SetLastError(
2535 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
2536 "StartPlayingFile() failed to add participant as file to mixer");
2537 _outputFilePlayerPtr->StopPlayingFile();
2538 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2539 _outputFilePlayerPtr = NULL;
2540 return -1;
2541 }
2542
2543 return 0;
2544}
2545
niklase@google.com470e71d2011-07-07 08:21:25 +00002546int Channel::ScaleLocalFilePlayout(const float scale)
2547{
2548 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2549 "Channel::ScaleLocalFilePlayout(scale=%5.3f)", scale);
2550
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002551 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002552
2553 if (!_outputFilePlaying)
2554 {
2555 _engineStatisticsPtr->SetLastError(
2556 VE_INVALID_OPERATION, kTraceError,
2557 "ScaleLocalFilePlayout() isnot playing");
2558 return -1;
2559 }
2560 if ((_outputFilePlayerPtr == NULL) ||
2561 (_outputFilePlayerPtr->SetAudioScaling(scale) != 0))
2562 {
2563 _engineStatisticsPtr->SetLastError(
2564 VE_BAD_ARGUMENT, kTraceError,
2565 "SetAudioScaling() failed to scale the playout");
2566 return -1;
2567 }
2568
2569 return 0;
2570}
2571
2572int Channel::GetLocalPlayoutPosition(int& positionMs)
2573{
2574 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2575 "Channel::GetLocalPlayoutPosition(position=?)");
2576
2577 WebRtc_UWord32 position;
2578
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002579 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002580
2581 if (_outputFilePlayerPtr == NULL)
2582 {
2583 _engineStatisticsPtr->SetLastError(
2584 VE_INVALID_OPERATION, kTraceError,
2585 "GetLocalPlayoutPosition() filePlayer instance doesnot exist");
2586 return -1;
2587 }
2588
2589 if (_outputFilePlayerPtr->GetPlayoutPosition(position) != 0)
2590 {
2591 _engineStatisticsPtr->SetLastError(
2592 VE_BAD_FILE, kTraceError,
2593 "GetLocalPlayoutPosition() failed");
2594 return -1;
2595 }
2596 positionMs = position;
2597
2598 return 0;
2599}
2600
2601int Channel::StartPlayingFileAsMicrophone(const char* fileName,
2602 const bool loop,
2603 const FileFormats format,
2604 const int startPosition,
2605 const float volumeScaling,
2606 const int stopPosition,
2607 const CodecInst* codecInst)
2608{
2609 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2610 "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, "
2611 "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, "
2612 "stopPosition=%d)", fileName, loop, format, volumeScaling,
2613 startPosition, stopPosition);
2614
2615 if (_inputFilePlaying)
2616 {
2617 _engineStatisticsPtr->SetLastError(
2618 VE_ALREADY_PLAYING, kTraceWarning,
2619 "StartPlayingFileAsMicrophone() filePlayer is playing");
2620 return 0;
2621 }
2622
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002623 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002624
2625 // Destroy the old instance
2626 if (_inputFilePlayerPtr)
2627 {
2628 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2629 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2630 _inputFilePlayerPtr = NULL;
2631 }
2632
2633 // Create the instance
2634 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2635 _inputFilePlayerId, (const FileFormats)format);
2636
2637 if (_inputFilePlayerPtr == NULL)
2638 {
2639 _engineStatisticsPtr->SetLastError(
2640 VE_INVALID_ARGUMENT, kTraceError,
2641 "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
2642 return -1;
2643 }
2644
2645 const WebRtc_UWord32 notificationTime(0);
2646
2647 if (_inputFilePlayerPtr->StartPlayingFile(
2648 fileName,
2649 loop,
2650 startPosition,
2651 volumeScaling,
2652 notificationTime,
2653 stopPosition,
2654 (const CodecInst*)codecInst) != 0)
2655 {
2656 _engineStatisticsPtr->SetLastError(
2657 VE_BAD_FILE, kTraceError,
2658 "StartPlayingFile() failed to start file playout");
2659 _inputFilePlayerPtr->StopPlayingFile();
2660 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2661 _inputFilePlayerPtr = NULL;
2662 return -1;
2663 }
2664 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2665 _inputFilePlaying = true;
2666
2667 return 0;
2668}
2669
2670int Channel::StartPlayingFileAsMicrophone(InStream* stream,
2671 const FileFormats format,
2672 const int startPosition,
2673 const float volumeScaling,
2674 const int stopPosition,
2675 const CodecInst* codecInst)
2676{
2677 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2678 "Channel::StartPlayingFileAsMicrophone(format=%d, "
2679 "volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
2680 format, volumeScaling, startPosition, stopPosition);
2681
2682 if(stream == NULL)
2683 {
2684 _engineStatisticsPtr->SetLastError(
2685 VE_BAD_FILE, kTraceError,
2686 "StartPlayingFileAsMicrophone NULL as input stream");
2687 return -1;
2688 }
2689
2690 if (_inputFilePlaying)
2691 {
2692 _engineStatisticsPtr->SetLastError(
2693 VE_ALREADY_PLAYING, kTraceWarning,
2694 "StartPlayingFileAsMicrophone() is playing");
2695 return 0;
2696 }
2697
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002698 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002699
2700 // Destroy the old instance
2701 if (_inputFilePlayerPtr)
2702 {
2703 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2704 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2705 _inputFilePlayerPtr = NULL;
2706 }
2707
2708 // Create the instance
2709 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2710 _inputFilePlayerId, (const FileFormats)format);
2711
2712 if (_inputFilePlayerPtr == NULL)
2713 {
2714 _engineStatisticsPtr->SetLastError(
2715 VE_INVALID_ARGUMENT, kTraceError,
2716 "StartPlayingInputFile() filePlayer format isnot correct");
2717 return -1;
2718 }
2719
2720 const WebRtc_UWord32 notificationTime(0);
2721
2722 if (_inputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
2723 volumeScaling, notificationTime,
2724 stopPosition, codecInst) != 0)
2725 {
2726 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2727 "StartPlayingFile() failed to start "
2728 "file playout");
2729 _inputFilePlayerPtr->StopPlayingFile();
2730 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2731 _inputFilePlayerPtr = NULL;
2732 return -1;
2733 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00002734
niklase@google.com470e71d2011-07-07 08:21:25 +00002735 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2736 _inputFilePlaying = true;
2737
2738 return 0;
2739}
2740
2741int Channel::StopPlayingFileAsMicrophone()
2742{
2743 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2744 "Channel::StopPlayingFileAsMicrophone()");
2745
2746 if (!_inputFilePlaying)
2747 {
2748 _engineStatisticsPtr->SetLastError(
2749 VE_INVALID_OPERATION, kTraceWarning,
2750 "StopPlayingFileAsMicrophone() isnot playing");
2751 return 0;
2752 }
2753
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002754 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002755 if (_inputFilePlayerPtr->StopPlayingFile() != 0)
2756 {
2757 _engineStatisticsPtr->SetLastError(
2758 VE_STOP_RECORDING_FAILED, kTraceError,
2759 "StopPlayingFile() could not stop playing");
2760 return -1;
2761 }
2762 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2763 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2764 _inputFilePlayerPtr = NULL;
2765 _inputFilePlaying = false;
2766
2767 return 0;
2768}
2769
2770int Channel::IsPlayingFileAsMicrophone() const
2771{
2772 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2773 "Channel::IsPlayingFileAsMicrophone()");
2774
2775 return _inputFilePlaying;
2776}
2777
2778int Channel::ScaleFileAsMicrophonePlayout(const float scale)
2779{
2780 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2781 "Channel::ScaleFileAsMicrophonePlayout(scale=%5.3f)", scale);
2782
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002783 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002784
2785 if (!_inputFilePlaying)
2786 {
2787 _engineStatisticsPtr->SetLastError(
2788 VE_INVALID_OPERATION, kTraceError,
2789 "ScaleFileAsMicrophonePlayout() isnot playing");
2790 return -1;
2791 }
2792
2793 if ((_inputFilePlayerPtr == NULL) ||
2794 (_inputFilePlayerPtr->SetAudioScaling(scale) != 0))
2795 {
2796 _engineStatisticsPtr->SetLastError(
2797 VE_BAD_ARGUMENT, kTraceError,
2798 "SetAudioScaling() failed to scale playout");
2799 return -1;
2800 }
2801
2802 return 0;
2803}
2804
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00002805int Channel::StartRecordingPlayout(const char* fileName,
niklase@google.com470e71d2011-07-07 08:21:25 +00002806 const CodecInst* codecInst)
2807{
2808 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2809 "Channel::StartRecordingPlayout(fileName=%s)", fileName);
2810
2811 if (_outputFileRecording)
2812 {
2813 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
2814 "StartRecordingPlayout() is already recording");
2815 return 0;
2816 }
2817
2818 FileFormats format;
2819 const WebRtc_UWord32 notificationTime(0); // Not supported in VoE
2820 CodecInst dummyCodec={100,"L16",16000,320,1,320000};
2821
niklas.enbom@webrtc.org40197d72012-03-26 08:45:47 +00002822 if ((codecInst != NULL) &&
2823 ((codecInst->channels < 1) || (codecInst->channels > 2)))
niklase@google.com470e71d2011-07-07 08:21:25 +00002824 {
2825 _engineStatisticsPtr->SetLastError(
2826 VE_BAD_ARGUMENT, kTraceError,
2827 "StartRecordingPlayout() invalid compression");
2828 return(-1);
2829 }
2830 if(codecInst == NULL)
2831 {
2832 format = kFileFormatPcm16kHzFile;
2833 codecInst=&dummyCodec;
2834 }
2835 else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
2836 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
2837 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
2838 {
2839 format = kFileFormatWavFile;
2840 }
2841 else
2842 {
2843 format = kFileFormatCompressedFile;
2844 }
2845
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002846 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002847
2848 // Destroy the old instance
2849 if (_outputFileRecorderPtr)
2850 {
2851 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2852 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2853 _outputFileRecorderPtr = NULL;
2854 }
2855
2856 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2857 _outputFileRecorderId, (const FileFormats)format);
2858 if (_outputFileRecorderPtr == NULL)
2859 {
2860 _engineStatisticsPtr->SetLastError(
2861 VE_INVALID_ARGUMENT, kTraceError,
2862 "StartRecordingPlayout() fileRecorder format isnot correct");
2863 return -1;
2864 }
2865
2866 if (_outputFileRecorderPtr->StartRecordingAudioFile(
2867 fileName, (const CodecInst&)*codecInst, notificationTime) != 0)
2868 {
2869 _engineStatisticsPtr->SetLastError(
2870 VE_BAD_FILE, kTraceError,
2871 "StartRecordingAudioFile() failed to start file recording");
2872 _outputFileRecorderPtr->StopRecording();
2873 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2874 _outputFileRecorderPtr = NULL;
2875 return -1;
2876 }
2877 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2878 _outputFileRecording = true;
2879
2880 return 0;
2881}
2882
2883int Channel::StartRecordingPlayout(OutStream* stream,
2884 const CodecInst* codecInst)
2885{
2886 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2887 "Channel::StartRecordingPlayout()");
2888
2889 if (_outputFileRecording)
2890 {
2891 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
2892 "StartRecordingPlayout() is already recording");
2893 return 0;
2894 }
2895
2896 FileFormats format;
2897 const WebRtc_UWord32 notificationTime(0); // Not supported in VoE
2898 CodecInst dummyCodec={100,"L16",16000,320,1,320000};
2899
2900 if (codecInst != NULL && codecInst->channels != 1)
2901 {
2902 _engineStatisticsPtr->SetLastError(
2903 VE_BAD_ARGUMENT, kTraceError,
2904 "StartRecordingPlayout() invalid compression");
2905 return(-1);
2906 }
2907 if(codecInst == NULL)
2908 {
2909 format = kFileFormatPcm16kHzFile;
2910 codecInst=&dummyCodec;
2911 }
2912 else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
2913 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
2914 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
2915 {
2916 format = kFileFormatWavFile;
2917 }
2918 else
2919 {
2920 format = kFileFormatCompressedFile;
2921 }
2922
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002923 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002924
2925 // Destroy the old instance
2926 if (_outputFileRecorderPtr)
2927 {
2928 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2929 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2930 _outputFileRecorderPtr = NULL;
2931 }
2932
2933 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2934 _outputFileRecorderId, (const FileFormats)format);
2935 if (_outputFileRecorderPtr == NULL)
2936 {
2937 _engineStatisticsPtr->SetLastError(
2938 VE_INVALID_ARGUMENT, kTraceError,
2939 "StartRecordingPlayout() fileRecorder format isnot correct");
2940 return -1;
2941 }
2942
2943 if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream, *codecInst,
2944 notificationTime) != 0)
2945 {
2946 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2947 "StartRecordingPlayout() failed to "
2948 "start file recording");
2949 _outputFileRecorderPtr->StopRecording();
2950 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2951 _outputFileRecorderPtr = NULL;
2952 return -1;
2953 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00002954
niklase@google.com470e71d2011-07-07 08:21:25 +00002955 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2956 _outputFileRecording = true;
2957
2958 return 0;
2959}
2960
2961int Channel::StopRecordingPlayout()
2962{
2963 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
2964 "Channel::StopRecordingPlayout()");
2965
2966 if (!_outputFileRecording)
2967 {
2968 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
2969 "StopRecordingPlayout() isnot recording");
2970 return -1;
2971 }
2972
2973
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002974 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002975
2976 if (_outputFileRecorderPtr->StopRecording() != 0)
2977 {
2978 _engineStatisticsPtr->SetLastError(
2979 VE_STOP_RECORDING_FAILED, kTraceError,
2980 "StopRecording() could not stop recording");
2981 return(-1);
2982 }
2983 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2984 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2985 _outputFileRecorderPtr = NULL;
2986 _outputFileRecording = false;
2987
2988 return 0;
2989}
2990
2991void
2992Channel::SetMixWithMicStatus(bool mix)
2993{
2994 _mixFileWithMicrophone=mix;
2995}
2996
2997int
2998Channel::GetSpeechOutputLevel(WebRtc_UWord32& level) const
2999{
3000 WebRtc_Word8 currentLevel = _outputAudioLevel.Level();
3001 level = static_cast<WebRtc_Word32> (currentLevel);
3002 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3003 VoEId(_instanceId,_channelId),
3004 "GetSpeechOutputLevel() => level=%u", level);
3005 return 0;
3006}
3007
3008int
3009Channel::GetSpeechOutputLevelFullRange(WebRtc_UWord32& level) const
3010{
3011 WebRtc_Word16 currentLevel = _outputAudioLevel.LevelFullRange();
3012 level = static_cast<WebRtc_Word32> (currentLevel);
3013 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3014 VoEId(_instanceId,_channelId),
3015 "GetSpeechOutputLevelFullRange() => level=%u", level);
3016 return 0;
3017}
3018
3019int
3020Channel::SetMute(bool enable)
3021{
3022 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3023 "Channel::SetMute(enable=%d)", enable);
3024 _mute = enable;
3025 return 0;
3026}
3027
3028bool
3029Channel::Mute() const
3030{
3031 return _mute;
3032}
3033
3034int
3035Channel::SetOutputVolumePan(float left, float right)
3036{
3037 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3038 "Channel::SetOutputVolumePan()");
3039 _panLeft = left;
3040 _panRight = right;
3041 return 0;
3042}
3043
3044int
3045Channel::GetOutputVolumePan(float& left, float& right) const
3046{
3047 left = _panLeft;
3048 right = _panRight;
3049 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3050 VoEId(_instanceId,_channelId),
3051 "GetOutputVolumePan() => left=%3.2f, right=%3.2f", left, right);
3052 return 0;
3053}
3054
3055int
3056Channel::SetChannelOutputVolumeScaling(float scaling)
3057{
3058 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3059 "Channel::SetChannelOutputVolumeScaling()");
3060 _outputGain = scaling;
3061 return 0;
3062}
3063
3064int
3065Channel::GetChannelOutputVolumeScaling(float& scaling) const
3066{
3067 scaling = _outputGain;
3068 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3069 VoEId(_instanceId,_channelId),
3070 "GetChannelOutputVolumeScaling() => scaling=%3.2f", scaling);
3071 return 0;
3072}
3073
niklase@google.com470e71d2011-07-07 08:21:25 +00003074int
3075Channel::RegisterExternalEncryption(Encryption& encryption)
3076{
3077 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3078 "Channel::RegisterExternalEncryption()");
3079
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003080 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003081
3082 if (_encryptionPtr)
3083 {
3084 _engineStatisticsPtr->SetLastError(
3085 VE_INVALID_OPERATION, kTraceError,
3086 "RegisterExternalEncryption() encryption already enabled");
3087 return -1;
3088 }
3089
3090 _encryptionPtr = &encryption;
3091
3092 _decrypting = true;
3093 _encrypting = true;
3094
3095 return 0;
3096}
3097
3098int
3099Channel::DeRegisterExternalEncryption()
3100{
3101 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3102 "Channel::DeRegisterExternalEncryption()");
3103
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003104 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003105
3106 if (!_encryptionPtr)
3107 {
3108 _engineStatisticsPtr->SetLastError(
3109 VE_INVALID_OPERATION, kTraceWarning,
3110 "DeRegisterExternalEncryption() encryption already disabled");
3111 return 0;
3112 }
3113
3114 _decrypting = false;
3115 _encrypting = false;
3116
3117 _encryptionPtr = NULL;
3118
3119 return 0;
3120}
3121
3122int Channel::SendTelephoneEventOutband(unsigned char eventCode,
3123 int lengthMs, int attenuationDb,
3124 bool playDtmfEvent)
3125{
3126 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3127 "Channel::SendTelephoneEventOutband(..., playDtmfEvent=%d)",
3128 playDtmfEvent);
3129
3130 _playOutbandDtmfEvent = playDtmfEvent;
3131
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003132 if (_rtpRtcpModule->SendTelephoneEventOutband(eventCode, lengthMs,
niklase@google.com470e71d2011-07-07 08:21:25 +00003133 attenuationDb) != 0)
3134 {
3135 _engineStatisticsPtr->SetLastError(
3136 VE_SEND_DTMF_FAILED,
3137 kTraceWarning,
3138 "SendTelephoneEventOutband() failed to send event");
3139 return -1;
3140 }
3141 return 0;
3142}
3143
3144int Channel::SendTelephoneEventInband(unsigned char eventCode,
3145 int lengthMs,
3146 int attenuationDb,
3147 bool playDtmfEvent)
3148{
3149 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3150 "Channel::SendTelephoneEventInband(..., playDtmfEvent=%d)",
3151 playDtmfEvent);
3152
3153 _playInbandDtmfEvent = playDtmfEvent;
3154 _inbandDtmfQueue.AddDtmf(eventCode, lengthMs, attenuationDb);
3155
3156 return 0;
3157}
3158
3159int
3160Channel::SetDtmfPlayoutStatus(bool enable)
3161{
3162 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3163 "Channel::SetDtmfPlayoutStatus()");
3164 if (_audioCodingModule.SetDtmfPlayoutStatus(enable) != 0)
3165 {
3166 _engineStatisticsPtr->SetLastError(
3167 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
3168 "SetDtmfPlayoutStatus() failed to set Dtmf playout");
3169 return -1;
3170 }
3171 return 0;
3172}
3173
3174bool
3175Channel::DtmfPlayoutStatus() const
3176{
3177 return _audioCodingModule.DtmfPlayoutStatus();
3178}
3179
3180int
3181Channel::SetSendTelephoneEventPayloadType(unsigned char type)
3182{
3183 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3184 "Channel::SetSendTelephoneEventPayloadType()");
andrew@webrtc.orgf81f9f82011-08-19 22:56:22 +00003185 if (type > 127)
niklase@google.com470e71d2011-07-07 08:21:25 +00003186 {
3187 _engineStatisticsPtr->SetLastError(
3188 VE_INVALID_ARGUMENT, kTraceError,
3189 "SetSendTelephoneEventPayloadType() invalid type");
3190 return -1;
3191 }
pwestin@webrtc.org1da1ce02011-10-13 15:19:55 +00003192 CodecInst codec;
3193 codec.plfreq = 8000;
3194 codec.pltype = type;
3195 memcpy(codec.plname, "telephone-event", 16);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003196 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003197 {
3198 _engineStatisticsPtr->SetLastError(
3199 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3200 "SetSendTelephoneEventPayloadType() failed to register send"
3201 "payload type");
3202 return -1;
3203 }
3204 _sendTelephoneEventPayloadType = type;
3205 return 0;
3206}
3207
3208int
3209Channel::GetSendTelephoneEventPayloadType(unsigned char& type)
3210{
3211 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3212 "Channel::GetSendTelephoneEventPayloadType()");
3213 type = _sendTelephoneEventPayloadType;
3214 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3215 VoEId(_instanceId,_channelId),
3216 "GetSendTelephoneEventPayloadType() => type=%u", type);
3217 return 0;
3218}
3219
niklase@google.com470e71d2011-07-07 08:21:25 +00003220int
3221Channel::UpdateRxVadDetection(AudioFrame& audioFrame)
3222{
3223 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3224 "Channel::UpdateRxVadDetection()");
3225
3226 int vadDecision = 1;
3227
andrew@webrtc.org63a50982012-05-02 23:56:37 +00003228 vadDecision = (audioFrame.vad_activity_ == AudioFrame::kVadActive)? 1 : 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003229
3230 if ((vadDecision != _oldVadDecision) && _rxVadObserverPtr)
3231 {
3232 OnRxVadDetected(vadDecision);
3233 _oldVadDecision = vadDecision;
3234 }
3235
3236 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3237 "Channel::UpdateRxVadDetection() => vadDecision=%d",
3238 vadDecision);
3239 return 0;
3240}
3241
3242int
3243Channel::RegisterRxVadObserver(VoERxVadCallback &observer)
3244{
3245 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3246 "Channel::RegisterRxVadObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003247 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003248
3249 if (_rxVadObserverPtr)
3250 {
3251 _engineStatisticsPtr->SetLastError(
3252 VE_INVALID_OPERATION, kTraceError,
3253 "RegisterRxVadObserver() observer already enabled");
3254 return -1;
3255 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003256 _rxVadObserverPtr = &observer;
3257 _RxVadDetection = true;
3258 return 0;
3259}
3260
3261int
3262Channel::DeRegisterRxVadObserver()
3263{
3264 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3265 "Channel::DeRegisterRxVadObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003266 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003267
3268 if (!_rxVadObserverPtr)
3269 {
3270 _engineStatisticsPtr->SetLastError(
3271 VE_INVALID_OPERATION, kTraceWarning,
3272 "DeRegisterRxVadObserver() observer already disabled");
3273 return 0;
3274 }
3275 _rxVadObserverPtr = NULL;
3276 _RxVadDetection = false;
3277 return 0;
3278}
3279
3280int
3281Channel::VoiceActivityIndicator(int &activity)
3282{
3283 activity = _sendFrameType;
3284
3285 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3286 "Channel::VoiceActivityIndicator(indicator=%d)", activity);
3287 return 0;
3288}
3289
3290#ifdef WEBRTC_VOICE_ENGINE_AGC
3291
3292int
3293Channel::SetRxAgcStatus(const bool enable, const AgcModes mode)
3294{
3295 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3296 "Channel::SetRxAgcStatus(enable=%d, mode=%d)",
3297 (int)enable, (int)mode);
3298
3299 GainControl::Mode agcMode(GainControl::kFixedDigital);
3300 switch (mode)
3301 {
3302 case kAgcDefault:
3303 agcMode = GainControl::kAdaptiveDigital;
3304 break;
3305 case kAgcUnchanged:
3306 agcMode = _rxAudioProcessingModulePtr->gain_control()->mode();
3307 break;
3308 case kAgcFixedDigital:
3309 agcMode = GainControl::kFixedDigital;
3310 break;
3311 case kAgcAdaptiveDigital:
3312 agcMode =GainControl::kAdaptiveDigital;
3313 break;
3314 default:
3315 _engineStatisticsPtr->SetLastError(
3316 VE_INVALID_ARGUMENT, kTraceError,
3317 "SetRxAgcStatus() invalid Agc mode");
3318 return -1;
3319 }
3320
3321 if (_rxAudioProcessingModulePtr->gain_control()->set_mode(agcMode) != 0)
3322 {
3323 _engineStatisticsPtr->SetLastError(
3324 VE_APM_ERROR, kTraceError,
3325 "SetRxAgcStatus() failed to set Agc mode");
3326 return -1;
3327 }
3328 if (_rxAudioProcessingModulePtr->gain_control()->Enable(enable) != 0)
3329 {
3330 _engineStatisticsPtr->SetLastError(
3331 VE_APM_ERROR, kTraceError,
3332 "SetRxAgcStatus() failed to set Agc state");
3333 return -1;
3334 }
3335
3336 _rxAgcIsEnabled = enable;
niklase@google.com470e71d2011-07-07 08:21:25 +00003337 _rxApmIsEnabled = ((_rxAgcIsEnabled == true) || (_rxNsIsEnabled == true));
3338
3339 return 0;
3340}
3341
3342int
3343Channel::GetRxAgcStatus(bool& enabled, AgcModes& mode)
3344{
3345 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3346 "Channel::GetRxAgcStatus(enable=?, mode=?)");
3347
3348 bool enable = _rxAudioProcessingModulePtr->gain_control()->is_enabled();
3349 GainControl::Mode agcMode =
3350 _rxAudioProcessingModulePtr->gain_control()->mode();
3351
3352 enabled = enable;
3353
3354 switch (agcMode)
3355 {
3356 case GainControl::kFixedDigital:
3357 mode = kAgcFixedDigital;
3358 break;
3359 case GainControl::kAdaptiveDigital:
3360 mode = kAgcAdaptiveDigital;
3361 break;
3362 default:
3363 _engineStatisticsPtr->SetLastError(
3364 VE_APM_ERROR, kTraceError,
3365 "GetRxAgcStatus() invalid Agc mode");
3366 return -1;
3367 }
3368
3369 return 0;
3370}
3371
3372int
3373Channel::SetRxAgcConfig(const AgcConfig config)
3374{
3375 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3376 "Channel::SetRxAgcConfig()");
3377
3378 if (_rxAudioProcessingModulePtr->gain_control()->set_target_level_dbfs(
3379 config.targetLeveldBOv) != 0)
3380 {
3381 _engineStatisticsPtr->SetLastError(
3382 VE_APM_ERROR, kTraceError,
3383 "SetRxAgcConfig() failed to set target peak |level|"
3384 "(or envelope) of the Agc");
3385 return -1;
3386 }
3387 if (_rxAudioProcessingModulePtr->gain_control()->set_compression_gain_db(
3388 config.digitalCompressionGaindB) != 0)
3389 {
3390 _engineStatisticsPtr->SetLastError(
3391 VE_APM_ERROR, kTraceError,
3392 "SetRxAgcConfig() failed to set the range in |gain| the"
3393 " digital compression stage may apply");
3394 return -1;
3395 }
3396 if (_rxAudioProcessingModulePtr->gain_control()->enable_limiter(
3397 config.limiterEnable) != 0)
3398 {
3399 _engineStatisticsPtr->SetLastError(
3400 VE_APM_ERROR, kTraceError,
3401 "SetRxAgcConfig() failed to set hard limiter to the signal");
3402 return -1;
3403 }
3404
3405 return 0;
3406}
3407
3408int
3409Channel::GetRxAgcConfig(AgcConfig& config)
3410{
3411 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3412 "Channel::GetRxAgcConfig(config=%?)");
3413
3414 config.targetLeveldBOv =
3415 _rxAudioProcessingModulePtr->gain_control()->target_level_dbfs();
3416 config.digitalCompressionGaindB =
3417 _rxAudioProcessingModulePtr->gain_control()->compression_gain_db();
3418 config.limiterEnable =
3419 _rxAudioProcessingModulePtr->gain_control()->is_limiter_enabled();
3420
3421 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3422 VoEId(_instanceId,_channelId), "GetRxAgcConfig() => "
3423 "targetLeveldBOv=%u, digitalCompressionGaindB=%u,"
3424 " limiterEnable=%d",
3425 config.targetLeveldBOv,
3426 config.digitalCompressionGaindB,
3427 config.limiterEnable);
3428
3429 return 0;
3430}
3431
3432#endif // #ifdef WEBRTC_VOICE_ENGINE_AGC
3433
3434#ifdef WEBRTC_VOICE_ENGINE_NR
3435
3436int
3437Channel::SetRxNsStatus(const bool enable, const NsModes mode)
3438{
3439 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3440 "Channel::SetRxNsStatus(enable=%d, mode=%d)",
3441 (int)enable, (int)mode);
3442
3443 NoiseSuppression::Level nsLevel(
3444 (NoiseSuppression::Level)WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE);
3445 switch (mode)
3446 {
3447
3448 case kNsDefault:
3449 nsLevel = (NoiseSuppression::Level)
3450 WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE;
3451 break;
3452 case kNsUnchanged:
3453 nsLevel = _rxAudioProcessingModulePtr->noise_suppression()->level();
3454 break;
3455 case kNsConference:
3456 nsLevel = NoiseSuppression::kHigh;
3457 break;
3458 case kNsLowSuppression:
3459 nsLevel = NoiseSuppression::kLow;
3460 break;
3461 case kNsModerateSuppression:
3462 nsLevel = NoiseSuppression::kModerate;
3463 break;
3464 case kNsHighSuppression:
3465 nsLevel = NoiseSuppression::kHigh;
3466 break;
3467 case kNsVeryHighSuppression:
3468 nsLevel = NoiseSuppression::kVeryHigh;
3469 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00003470 }
3471
3472 if (_rxAudioProcessingModulePtr->noise_suppression()->set_level(nsLevel)
3473 != 0)
3474 {
3475 _engineStatisticsPtr->SetLastError(
3476 VE_APM_ERROR, kTraceError,
3477 "SetRxAgcStatus() failed to set Ns level");
3478 return -1;
3479 }
3480 if (_rxAudioProcessingModulePtr->noise_suppression()->Enable(enable) != 0)
3481 {
3482 _engineStatisticsPtr->SetLastError(
3483 VE_APM_ERROR, kTraceError,
3484 "SetRxAgcStatus() failed to set Agc state");
3485 return -1;
3486 }
3487
3488 _rxNsIsEnabled = enable;
3489 _rxApmIsEnabled = ((_rxAgcIsEnabled == true) || (_rxNsIsEnabled == true));
3490
3491 return 0;
3492}
3493
3494int
3495Channel::GetRxNsStatus(bool& enabled, NsModes& mode)
3496{
3497 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3498 "Channel::GetRxNsStatus(enable=?, mode=?)");
3499
3500 bool enable =
3501 _rxAudioProcessingModulePtr->noise_suppression()->is_enabled();
3502 NoiseSuppression::Level ncLevel =
3503 _rxAudioProcessingModulePtr->noise_suppression()->level();
3504
3505 enabled = enable;
3506
3507 switch (ncLevel)
3508 {
3509 case NoiseSuppression::kLow:
3510 mode = kNsLowSuppression;
3511 break;
3512 case NoiseSuppression::kModerate:
3513 mode = kNsModerateSuppression;
3514 break;
3515 case NoiseSuppression::kHigh:
3516 mode = kNsHighSuppression;
3517 break;
3518 case NoiseSuppression::kVeryHigh:
3519 mode = kNsVeryHighSuppression;
3520 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00003521 }
3522
3523 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3524 VoEId(_instanceId,_channelId),
3525 "GetRxNsStatus() => enabled=%d, mode=%d", enabled, mode);
3526 return 0;
3527}
3528
3529#endif // #ifdef WEBRTC_VOICE_ENGINE_NR
3530
3531int
3532Channel::RegisterRTPObserver(VoERTPObserver& observer)
3533{
3534 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3535 "Channel::RegisterRTPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003536 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003537
3538 if (_rtpObserverPtr)
3539 {
3540 _engineStatisticsPtr->SetLastError(
3541 VE_INVALID_OPERATION, kTraceError,
3542 "RegisterRTPObserver() observer already enabled");
3543 return -1;
3544 }
3545
3546 _rtpObserverPtr = &observer;
3547 _rtpObserver = true;
3548
3549 return 0;
3550}
3551
3552int
3553Channel::DeRegisterRTPObserver()
3554{
3555 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3556 "Channel::DeRegisterRTPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003557 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003558
3559 if (!_rtpObserverPtr)
3560 {
3561 _engineStatisticsPtr->SetLastError(
3562 VE_INVALID_OPERATION, kTraceWarning,
3563 "DeRegisterRTPObserver() observer already disabled");
3564 return 0;
3565 }
3566
3567 _rtpObserver = false;
3568 _rtpObserverPtr = NULL;
3569
3570 return 0;
3571}
3572
3573int
3574Channel::RegisterRTCPObserver(VoERTCPObserver& observer)
3575{
3576 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3577 "Channel::RegisterRTCPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003578 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003579
3580 if (_rtcpObserverPtr)
3581 {
3582 _engineStatisticsPtr->SetLastError(
3583 VE_INVALID_OPERATION, kTraceError,
3584 "RegisterRTCPObserver() observer already enabled");
3585 return -1;
3586 }
3587
3588 _rtcpObserverPtr = &observer;
3589 _rtcpObserver = true;
3590
3591 return 0;
3592}
3593
3594int
3595Channel::DeRegisterRTCPObserver()
3596{
3597 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3598 "Channel::DeRegisterRTCPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003599 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003600
3601 if (!_rtcpObserverPtr)
3602 {
3603 _engineStatisticsPtr->SetLastError(
3604 VE_INVALID_OPERATION, kTraceWarning,
3605 "DeRegisterRTCPObserver() observer already disabled");
3606 return 0;
3607 }
3608
3609 _rtcpObserver = false;
3610 _rtcpObserverPtr = NULL;
3611
3612 return 0;
3613}
3614
3615int
3616Channel::SetLocalSSRC(unsigned int ssrc)
3617{
3618 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3619 "Channel::SetLocalSSRC()");
3620 if (_sending)
3621 {
3622 _engineStatisticsPtr->SetLastError(
3623 VE_ALREADY_SENDING, kTraceError,
3624 "SetLocalSSRC() already sending");
3625 return -1;
3626 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003627 if (_rtpRtcpModule->SetSSRC(ssrc) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003628 {
3629 _engineStatisticsPtr->SetLastError(
3630 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3631 "SetLocalSSRC() failed to set SSRC");
3632 return -1;
3633 }
3634 return 0;
3635}
3636
3637int
3638Channel::GetLocalSSRC(unsigned int& ssrc)
3639{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003640 ssrc = _rtpRtcpModule->SSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00003641 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3642 VoEId(_instanceId,_channelId),
3643 "GetLocalSSRC() => ssrc=%lu", ssrc);
3644 return 0;
3645}
3646
3647int
3648Channel::GetRemoteSSRC(unsigned int& ssrc)
3649{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003650 ssrc = _rtpRtcpModule->RemoteSSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00003651 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3652 VoEId(_instanceId,_channelId),
3653 "GetRemoteSSRC() => ssrc=%lu", ssrc);
3654 return 0;
3655}
3656
3657int
3658Channel::GetRemoteCSRCs(unsigned int arrCSRC[15])
3659{
3660 if (arrCSRC == NULL)
3661 {
3662 _engineStatisticsPtr->SetLastError(
3663 VE_INVALID_ARGUMENT, kTraceError,
3664 "GetRemoteCSRCs() invalid array argument");
3665 return -1;
3666 }
3667 WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize];
3668 WebRtc_Word32 CSRCs(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003669 CSRCs = _rtpRtcpModule->CSRCs(arrOfCSRC);
niklase@google.com470e71d2011-07-07 08:21:25 +00003670 if (CSRCs > 0)
3671 {
3672 memcpy(arrCSRC, arrOfCSRC, CSRCs * sizeof(WebRtc_UWord32));
3673 for (int i = 0; i < (int) CSRCs; i++)
3674 {
3675 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3676 VoEId(_instanceId, _channelId),
3677 "GetRemoteCSRCs() => arrCSRC[%d]=%lu", i, arrCSRC[i]);
3678 }
3679 } else
3680 {
3681 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3682 VoEId(_instanceId, _channelId),
3683 "GetRemoteCSRCs() => list is empty!");
3684 }
3685 return CSRCs;
3686}
3687
3688int
3689Channel::SetRTPAudioLevelIndicationStatus(bool enable, unsigned char ID)
3690{
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00003691 if (_rtpAudioProc.get() == NULL)
3692 {
3693 _rtpAudioProc.reset(AudioProcessing::Create(VoEModuleId(_instanceId,
3694 _channelId)));
3695 if (_rtpAudioProc.get() == NULL)
3696 {
3697 _engineStatisticsPtr->SetLastError(VE_NO_MEMORY, kTraceCritical,
3698 "Failed to create AudioProcessing");
3699 return -1;
3700 }
3701 }
3702
3703 if (_rtpAudioProc->level_estimator()->Enable(enable) !=
3704 AudioProcessing::kNoError)
3705 {
3706 _engineStatisticsPtr->SetLastError(VE_APM_ERROR, kTraceWarning,
3707 "Failed to enable AudioProcessing::level_estimator()");
3708 }
3709
niklase@google.com470e71d2011-07-07 08:21:25 +00003710 _includeAudioLevelIndication = enable;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003711 return _rtpRtcpModule->SetRTPAudioLevelIndicationStatus(enable, ID);
niklase@google.com470e71d2011-07-07 08:21:25 +00003712}
3713int
3714Channel::GetRTPAudioLevelIndicationStatus(bool& enabled, unsigned char& ID)
3715{
3716 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3717 VoEId(_instanceId,_channelId),
3718 "GetRTPAudioLevelIndicationStatus() => enabled=%d, ID=%u",
3719 enabled, ID);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003720 return _rtpRtcpModule->GetRTPAudioLevelIndicationStatus(enabled, ID);
niklase@google.com470e71d2011-07-07 08:21:25 +00003721}
3722
3723int
3724Channel::SetRTCPStatus(bool enable)
3725{
3726 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3727 "Channel::SetRTCPStatus()");
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003728 if (_rtpRtcpModule->SetRTCPStatus(enable ?
niklase@google.com470e71d2011-07-07 08:21:25 +00003729 kRtcpCompound : kRtcpOff) != 0)
3730 {
3731 _engineStatisticsPtr->SetLastError(
3732 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3733 "SetRTCPStatus() failed to set RTCP status");
3734 return -1;
3735 }
3736 return 0;
3737}
3738
3739int
3740Channel::GetRTCPStatus(bool& enabled)
3741{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003742 RTCPMethod method = _rtpRtcpModule->RTCP();
niklase@google.com470e71d2011-07-07 08:21:25 +00003743 enabled = (method != kRtcpOff);
3744 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3745 VoEId(_instanceId,_channelId),
3746 "GetRTCPStatus() => enabled=%d", enabled);
3747 return 0;
3748}
3749
3750int
3751Channel::SetRTCP_CNAME(const char cName[256])
3752{
3753 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3754 "Channel::SetRTCP_CNAME()");
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003755 if (_rtpRtcpModule->SetCNAME(cName) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003756 {
3757 _engineStatisticsPtr->SetLastError(
3758 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3759 "SetRTCP_CNAME() failed to set RTCP CNAME");
3760 return -1;
3761 }
3762 return 0;
3763}
3764
3765int
3766Channel::GetRTCP_CNAME(char cName[256])
3767{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003768 if (_rtpRtcpModule->CNAME(cName) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003769 {
3770 _engineStatisticsPtr->SetLastError(
3771 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3772 "GetRTCP_CNAME() failed to retrieve RTCP CNAME");
3773 return -1;
3774 }
3775 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3776 VoEId(_instanceId, _channelId),
3777 "GetRTCP_CNAME() => cName=%s", cName);
3778 return 0;
3779}
3780
3781int
3782Channel::GetRemoteRTCP_CNAME(char cName[256])
3783{
3784 if (cName == NULL)
3785 {
3786 _engineStatisticsPtr->SetLastError(
3787 VE_INVALID_ARGUMENT, kTraceError,
3788 "GetRemoteRTCP_CNAME() invalid CNAME input buffer");
3789 return -1;
3790 }
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00003791 char cname[RTCP_CNAME_SIZE];
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003792 const WebRtc_UWord32 remoteSSRC = _rtpRtcpModule->RemoteSSRC();
3793 if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003794 {
3795 _engineStatisticsPtr->SetLastError(
3796 VE_CANNOT_RETRIEVE_CNAME, kTraceError,
3797 "GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME");
3798 return -1;
3799 }
3800 strcpy(cName, cname);
3801 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3802 VoEId(_instanceId, _channelId),
3803 "GetRemoteRTCP_CNAME() => cName=%s", cName);
3804 return 0;
3805}
3806
3807int
3808Channel::GetRemoteRTCPData(
3809 unsigned int& NTPHigh,
3810 unsigned int& NTPLow,
3811 unsigned int& timestamp,
3812 unsigned int& playoutTimestamp,
3813 unsigned int* jitter,
3814 unsigned short* fractionLost)
3815{
3816 // --- Information from sender info in received Sender Reports
3817
3818 RTCPSenderInfo senderInfo;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003819 if (_rtpRtcpModule->RemoteRTCPStat(&senderInfo) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003820 {
3821 _engineStatisticsPtr->SetLastError(
3822 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00003823 "GetRemoteRTCPData() failed to retrieve sender info for remote "
niklase@google.com470e71d2011-07-07 08:21:25 +00003824 "side");
3825 return -1;
3826 }
3827
3828 // We only utilize 12 out of 20 bytes in the sender info (ignores packet
3829 // and octet count)
3830 NTPHigh = senderInfo.NTPseconds;
3831 NTPLow = senderInfo.NTPfraction;
3832 timestamp = senderInfo.RTPtimeStamp;
3833
3834 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3835 VoEId(_instanceId, _channelId),
3836 "GetRemoteRTCPData() => NTPHigh=%lu, NTPLow=%lu, "
3837 "timestamp=%lu",
3838 NTPHigh, NTPLow, timestamp);
3839
3840 // --- Locally derived information
3841
3842 // This value is updated on each incoming RTCP packet (0 when no packet
3843 // has been received)
3844 playoutTimestamp = _playoutTimeStampRTCP;
3845
3846 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3847 VoEId(_instanceId, _channelId),
3848 "GetRemoteRTCPData() => playoutTimestamp=%lu",
3849 _playoutTimeStampRTCP);
3850
3851 if (NULL != jitter || NULL != fractionLost)
3852 {
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003853 // Get all RTCP receiver report blocks that have been received on this
3854 // channel. If we receive RTP packets from a remote source we know the
3855 // remote SSRC and use the report block from him.
3856 // Otherwise use the first report block.
3857 std::vector<RTCPReportBlock> remote_stats;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003858 if (_rtpRtcpModule->RemoteRTCPStat(&remote_stats) != 0 ||
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003859 remote_stats.empty()) {
3860 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3861 VoEId(_instanceId, _channelId),
3862 "GetRemoteRTCPData() failed to measure statistics due"
3863 " to lack of received RTP and/or RTCP packets");
3864 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003865 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003866
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003867 WebRtc_UWord32 remoteSSRC = _rtpRtcpModule->RemoteSSRC();
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003868 std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
3869 for (; it != remote_stats.end(); ++it) {
3870 if (it->remoteSSRC == remoteSSRC)
3871 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00003872 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003873
3874 if (it == remote_stats.end()) {
3875 // If we have not received any RTCP packets from this SSRC it probably
3876 // means that we have not received any RTP packets.
3877 // Use the first received report block instead.
3878 it = remote_stats.begin();
3879 remoteSSRC = it->remoteSSRC;
niklase@google.com470e71d2011-07-07 08:21:25 +00003880 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003881
xians@webrtc.org79af7342012-01-31 12:22:14 +00003882 if (jitter) {
3883 *jitter = it->jitter;
3884 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3885 VoEId(_instanceId, _channelId),
3886 "GetRemoteRTCPData() => jitter = %lu", *jitter);
3887 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003888
xians@webrtc.org79af7342012-01-31 12:22:14 +00003889 if (fractionLost) {
3890 *fractionLost = it->fractionLost;
3891 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3892 VoEId(_instanceId, _channelId),
3893 "GetRemoteRTCPData() => fractionLost = %lu",
3894 *fractionLost);
3895 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003896 }
3897 return 0;
3898}
3899
3900int
3901Channel::SendApplicationDefinedRTCPPacket(const unsigned char subType,
3902 unsigned int name,
3903 const char* data,
3904 unsigned short dataLengthInBytes)
3905{
3906 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3907 "Channel::SendApplicationDefinedRTCPPacket()");
3908 if (!_sending)
3909 {
3910 _engineStatisticsPtr->SetLastError(
3911 VE_NOT_SENDING, kTraceError,
3912 "SendApplicationDefinedRTCPPacket() not sending");
3913 return -1;
3914 }
3915 if (NULL == data)
3916 {
3917 _engineStatisticsPtr->SetLastError(
3918 VE_INVALID_ARGUMENT, kTraceError,
3919 "SendApplicationDefinedRTCPPacket() invalid data value");
3920 return -1;
3921 }
3922 if (dataLengthInBytes % 4 != 0)
3923 {
3924 _engineStatisticsPtr->SetLastError(
3925 VE_INVALID_ARGUMENT, kTraceError,
3926 "SendApplicationDefinedRTCPPacket() invalid length value");
3927 return -1;
3928 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003929 RTCPMethod status = _rtpRtcpModule->RTCP();
niklase@google.com470e71d2011-07-07 08:21:25 +00003930 if (status == kRtcpOff)
3931 {
3932 _engineStatisticsPtr->SetLastError(
3933 VE_RTCP_ERROR, kTraceError,
3934 "SendApplicationDefinedRTCPPacket() RTCP is disabled");
3935 return -1;
3936 }
3937
3938 // Create and schedule the RTCP APP packet for transmission
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003939 if (_rtpRtcpModule->SetRTCPApplicationSpecificData(
niklase@google.com470e71d2011-07-07 08:21:25 +00003940 subType,
3941 name,
3942 (const unsigned char*) data,
3943 dataLengthInBytes) != 0)
3944 {
3945 _engineStatisticsPtr->SetLastError(
3946 VE_SEND_ERROR, kTraceError,
3947 "SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
3948 return -1;
3949 }
3950 return 0;
3951}
3952
3953int
3954Channel::GetRTPStatistics(
3955 unsigned int& averageJitterMs,
3956 unsigned int& maxJitterMs,
3957 unsigned int& discardedPackets)
3958{
3959 WebRtc_UWord8 fraction_lost(0);
3960 WebRtc_UWord32 cum_lost(0);
3961 WebRtc_UWord32 ext_max(0);
3962 WebRtc_UWord32 jitter(0);
3963 WebRtc_UWord32 max_jitter(0);
3964
3965 // The jitter statistics is updated for each received RTP packet and is
3966 // based on received packets.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003967 if (_rtpRtcpModule->StatisticsRTP(&fraction_lost,
niklase@google.com470e71d2011-07-07 08:21:25 +00003968 &cum_lost,
3969 &ext_max,
3970 &jitter,
3971 &max_jitter) != 0)
3972 {
3973 _engineStatisticsPtr->SetLastError(
3974 VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00003975 "GetRTPStatistics() failed to read RTP statistics from the "
niklase@google.com470e71d2011-07-07 08:21:25 +00003976 "RTP/RTCP module");
3977 }
3978
3979 const WebRtc_Word32 playoutFrequency =
3980 _audioCodingModule.PlayoutFrequency();
3981 if (playoutFrequency > 0)
3982 {
3983 // Scale RTP statistics given the current playout frequency
3984 maxJitterMs = max_jitter / (playoutFrequency / 1000);
3985 averageJitterMs = jitter / (playoutFrequency / 1000);
3986 }
3987
3988 discardedPackets = _numberOfDiscardedPackets;
3989
3990 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3991 VoEId(_instanceId, _channelId),
3992 "GetRTPStatistics() => averageJitterMs = %lu, maxJitterMs = %lu,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00003993 " discardedPackets = %lu)",
niklase@google.com470e71d2011-07-07 08:21:25 +00003994 averageJitterMs, maxJitterMs, discardedPackets);
3995 return 0;
3996}
3997
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00003998int Channel::GetRemoteRTCPSenderInfo(SenderInfo* sender_info) {
3999 if (sender_info == NULL) {
4000 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
4001 "GetRemoteRTCPSenderInfo() invalid sender_info.");
4002 return -1;
4003 }
4004
4005 // Get the sender info from the latest received RTCP Sender Report.
4006 RTCPSenderInfo rtcp_sender_info;
4007 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_sender_info) != 0) {
4008 _engineStatisticsPtr->SetLastError(VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4009 "GetRemoteRTCPSenderInfo() failed to read RTCP SR sender info.");
4010 return -1;
4011 }
4012
4013 sender_info->NTP_timestamp_high = rtcp_sender_info.NTPseconds;
4014 sender_info->NTP_timestamp_low = rtcp_sender_info.NTPfraction;
4015 sender_info->RTP_timestamp = rtcp_sender_info.RTPtimeStamp;
4016 sender_info->sender_packet_count = rtcp_sender_info.sendPacketCount;
4017 sender_info->sender_octet_count = rtcp_sender_info.sendOctetCount;
4018 return 0;
4019}
4020
4021int Channel::GetRemoteRTCPReportBlocks(
4022 std::vector<ReportBlock>* report_blocks) {
4023 if (report_blocks == NULL) {
4024 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
4025 "GetRemoteRTCPReportBlock()s invalid report_blocks.");
4026 return -1;
4027 }
4028
4029 // Get the report blocks from the latest received RTCP Sender or Receiver
4030 // Report. Each element in the vector contains the sender's SSRC and a
4031 // report block according to RFC 3550.
4032 std::vector<RTCPReportBlock> rtcp_report_blocks;
4033 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks) != 0) {
4034 _engineStatisticsPtr->SetLastError(VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4035 "GetRemoteRTCPReportBlocks() failed to read RTCP SR/RR report block.");
4036 return -1;
4037 }
4038
4039 if (rtcp_report_blocks.empty())
4040 return 0;
4041
4042 std::vector<RTCPReportBlock>::const_iterator it = rtcp_report_blocks.begin();
4043 for (; it != rtcp_report_blocks.end(); ++it) {
4044 ReportBlock report_block;
4045 report_block.sender_SSRC = it->remoteSSRC;
4046 report_block.source_SSRC = it->sourceSSRC;
4047 report_block.fraction_lost = it->fractionLost;
4048 report_block.cumulative_num_packets_lost = it->cumulativeLost;
4049 report_block.extended_highest_sequence_number = it->extendedHighSeqNum;
4050 report_block.interarrival_jitter = it->jitter;
4051 report_block.last_SR_timestamp = it->lastSR;
4052 report_block.delay_since_last_SR = it->delaySinceLastSR;
4053 report_blocks->push_back(report_block);
4054 }
4055 return 0;
4056}
4057
niklase@google.com470e71d2011-07-07 08:21:25 +00004058int
4059Channel::GetRTPStatistics(CallStatistics& stats)
4060{
4061 WebRtc_UWord8 fraction_lost(0);
4062 WebRtc_UWord32 cum_lost(0);
4063 WebRtc_UWord32 ext_max(0);
4064 WebRtc_UWord32 jitter(0);
4065 WebRtc_UWord32 max_jitter(0);
4066
4067 // --- Part one of the final structure (four values)
4068
4069 // The jitter statistics is updated for each received RTP packet and is
4070 // based on received packets.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004071 if (_rtpRtcpModule->StatisticsRTP(&fraction_lost,
niklase@google.com470e71d2011-07-07 08:21:25 +00004072 &cum_lost,
4073 &ext_max,
4074 &jitter,
4075 &max_jitter) != 0)
4076 {
4077 _engineStatisticsPtr->SetLastError(
4078 VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
4079 "GetRTPStatistics() failed to read RTP statistics from the "
4080 "RTP/RTCP module");
4081 }
4082
4083 stats.fractionLost = fraction_lost;
4084 stats.cumulativeLost = cum_lost;
4085 stats.extendedMax = ext_max;
4086 stats.jitterSamples = jitter;
4087
4088 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4089 VoEId(_instanceId, _channelId),
4090 "GetRTPStatistics() => fractionLost=%lu, cumulativeLost=%lu,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004091 " extendedMax=%lu, jitterSamples=%li)",
niklase@google.com470e71d2011-07-07 08:21:25 +00004092 stats.fractionLost, stats.cumulativeLost, stats.extendedMax,
4093 stats.jitterSamples);
4094
4095 // --- Part two of the final structure (one value)
4096
4097 WebRtc_UWord16 RTT(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004098 RTCPMethod method = _rtpRtcpModule->RTCP();
niklase@google.com470e71d2011-07-07 08:21:25 +00004099 if (method == kRtcpOff)
4100 {
4101 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4102 VoEId(_instanceId, _channelId),
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004103 "GetRTPStatistics() RTCP is disabled => valid RTT "
niklase@google.com470e71d2011-07-07 08:21:25 +00004104 "measurements cannot be retrieved");
4105 } else
4106 {
4107 // The remote SSRC will be zero if no RTP packet has been received.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004108 WebRtc_UWord32 remoteSSRC = _rtpRtcpModule->RemoteSSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00004109 if (remoteSSRC > 0)
4110 {
4111 WebRtc_UWord16 avgRTT(0);
4112 WebRtc_UWord16 maxRTT(0);
4113 WebRtc_UWord16 minRTT(0);
4114
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004115 if (_rtpRtcpModule->RTT(remoteSSRC, &RTT, &avgRTT, &minRTT, &maxRTT)
niklase@google.com470e71d2011-07-07 08:21:25 +00004116 != 0)
4117 {
4118 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4119 VoEId(_instanceId, _channelId),
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004120 "GetRTPStatistics() failed to retrieve RTT from "
niklase@google.com470e71d2011-07-07 08:21:25 +00004121 "the RTP/RTCP module");
4122 }
4123 } else
4124 {
4125 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4126 VoEId(_instanceId, _channelId),
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004127 "GetRTPStatistics() failed to measure RTT since no "
niklase@google.com470e71d2011-07-07 08:21:25 +00004128 "RTP packets have been received yet");
4129 }
4130 }
4131
4132 stats.rttMs = static_cast<int> (RTT);
4133
4134 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4135 VoEId(_instanceId, _channelId),
4136 "GetRTPStatistics() => rttMs=%d", stats.rttMs);
4137
4138 // --- Part three of the final structure (four values)
4139
4140 WebRtc_UWord32 bytesSent(0);
4141 WebRtc_UWord32 packetsSent(0);
4142 WebRtc_UWord32 bytesReceived(0);
4143 WebRtc_UWord32 packetsReceived(0);
4144
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004145 if (_rtpRtcpModule->DataCountersRTP(&bytesSent,
niklase@google.com470e71d2011-07-07 08:21:25 +00004146 &packetsSent,
4147 &bytesReceived,
4148 &packetsReceived) != 0)
4149 {
4150 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4151 VoEId(_instanceId, _channelId),
4152 "GetRTPStatistics() failed to retrieve RTP datacounters =>"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004153 " output will not be complete");
niklase@google.com470e71d2011-07-07 08:21:25 +00004154 }
4155
4156 stats.bytesSent = bytesSent;
4157 stats.packetsSent = packetsSent;
4158 stats.bytesReceived = bytesReceived;
4159 stats.packetsReceived = packetsReceived;
4160
4161 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4162 VoEId(_instanceId, _channelId),
4163 "GetRTPStatistics() => bytesSent=%d, packetsSent=%d,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004164 " bytesReceived=%d, packetsReceived=%d)",
niklase@google.com470e71d2011-07-07 08:21:25 +00004165 stats.bytesSent, stats.packetsSent, stats.bytesReceived,
4166 stats.packetsReceived);
4167
4168 return 0;
4169}
4170
turaj@webrtc.org42259e72012-12-11 02:15:12 +00004171int Channel::SetFECStatus(bool enable, int redPayloadtype) {
4172 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4173 "Channel::SetFECStatus()");
niklase@google.com470e71d2011-07-07 08:21:25 +00004174
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00004175 if (enable) {
4176 if (redPayloadtype < 0 || redPayloadtype > 127) {
4177 _engineStatisticsPtr->SetLastError(
4178 VE_PLTYPE_ERROR, kTraceError,
4179 "SetFECStatus() invalid RED payload type");
4180 return -1;
4181 }
4182
4183 if (SetRedPayloadType(redPayloadtype) < 0) {
4184 _engineStatisticsPtr->SetLastError(
4185 VE_CODEC_ERROR, kTraceError,
4186 "SetSecondarySendCodec() Failed to register RED ACM");
4187 return -1;
4188 }
turaj@webrtc.org42259e72012-12-11 02:15:12 +00004189 }
niklase@google.com470e71d2011-07-07 08:21:25 +00004190
turaj@webrtc.org42259e72012-12-11 02:15:12 +00004191 if (_audioCodingModule.SetFECStatus(enable) != 0) {
4192 _engineStatisticsPtr->SetLastError(
4193 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4194 "SetFECStatus() failed to set FEC state in the ACM");
4195 return -1;
4196 }
4197 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00004198}
4199
4200int
4201Channel::GetFECStatus(bool& enabled, int& redPayloadtype)
4202{
4203 enabled = _audioCodingModule.FECStatus();
4204 if (enabled)
4205 {
4206 WebRtc_Word8 payloadType(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004207 if (_rtpRtcpModule->SendREDPayloadType(payloadType) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004208 {
4209 _engineStatisticsPtr->SetLastError(
4210 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4211 "GetFECStatus() failed to retrieve RED PT from RTP/RTCP "
4212 "module");
4213 return -1;
4214 }
4215 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4216 VoEId(_instanceId, _channelId),
4217 "GetFECStatus() => enabled=%d, redPayloadtype=%d",
4218 enabled, redPayloadtype);
4219 return 0;
4220 }
4221 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4222 VoEId(_instanceId, _channelId),
4223 "GetFECStatus() => enabled=%d", enabled);
4224 return 0;
4225}
4226
4227int
niklase@google.com470e71d2011-07-07 08:21:25 +00004228Channel::StartRTPDump(const char fileNameUTF8[1024],
4229 RTPDirections direction)
4230{
4231 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4232 "Channel::StartRTPDump()");
4233 if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
4234 {
4235 _engineStatisticsPtr->SetLastError(
4236 VE_INVALID_ARGUMENT, kTraceError,
4237 "StartRTPDump() invalid RTP direction");
4238 return -1;
4239 }
4240 RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4241 &_rtpDumpIn : &_rtpDumpOut;
4242 if (rtpDumpPtr == NULL)
4243 {
4244 assert(false);
4245 return -1;
4246 }
4247 if (rtpDumpPtr->IsActive())
4248 {
4249 rtpDumpPtr->Stop();
4250 }
4251 if (rtpDumpPtr->Start(fileNameUTF8) != 0)
4252 {
4253 _engineStatisticsPtr->SetLastError(
4254 VE_BAD_FILE, kTraceError,
4255 "StartRTPDump() failed to create file");
4256 return -1;
4257 }
4258 return 0;
4259}
4260
4261int
4262Channel::StopRTPDump(RTPDirections direction)
4263{
4264 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4265 "Channel::StopRTPDump()");
4266 if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
4267 {
4268 _engineStatisticsPtr->SetLastError(
4269 VE_INVALID_ARGUMENT, kTraceError,
4270 "StopRTPDump() invalid RTP direction");
4271 return -1;
4272 }
4273 RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4274 &_rtpDumpIn : &_rtpDumpOut;
4275 if (rtpDumpPtr == NULL)
4276 {
4277 assert(false);
4278 return -1;
4279 }
4280 if (!rtpDumpPtr->IsActive())
4281 {
4282 return 0;
4283 }
4284 return rtpDumpPtr->Stop();
4285}
4286
4287bool
4288Channel::RTPDumpIsActive(RTPDirections direction)
4289{
4290 if ((direction != kRtpIncoming) &&
4291 (direction != kRtpOutgoing))
4292 {
4293 _engineStatisticsPtr->SetLastError(
4294 VE_INVALID_ARGUMENT, kTraceError,
4295 "RTPDumpIsActive() invalid RTP direction");
4296 return false;
4297 }
4298 RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4299 &_rtpDumpIn : &_rtpDumpOut;
4300 return rtpDumpPtr->IsActive();
4301}
4302
4303int
4304Channel::InsertExtraRTPPacket(unsigned char payloadType,
4305 bool markerBit,
4306 const char* payloadData,
4307 unsigned short payloadSize)
4308{
4309 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4310 "Channel::InsertExtraRTPPacket()");
4311 if (payloadType > 127)
4312 {
4313 _engineStatisticsPtr->SetLastError(
4314 VE_INVALID_PLTYPE, kTraceError,
4315 "InsertExtraRTPPacket() invalid payload type");
4316 return -1;
4317 }
4318 if (payloadData == NULL)
4319 {
4320 _engineStatisticsPtr->SetLastError(
4321 VE_INVALID_ARGUMENT, kTraceError,
4322 "InsertExtraRTPPacket() invalid payload data");
4323 return -1;
4324 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004325 if (payloadSize > _rtpRtcpModule->MaxDataPayloadLength())
niklase@google.com470e71d2011-07-07 08:21:25 +00004326 {
4327 _engineStatisticsPtr->SetLastError(
4328 VE_INVALID_ARGUMENT, kTraceError,
4329 "InsertExtraRTPPacket() invalid payload size");
4330 return -1;
4331 }
4332 if (!_sending)
4333 {
4334 _engineStatisticsPtr->SetLastError(
4335 VE_NOT_SENDING, kTraceError,
4336 "InsertExtraRTPPacket() not sending");
4337 return -1;
4338 }
4339
4340 // Create extra RTP packet by calling RtpRtcp::SendOutgoingData().
4341 // Transport::SendPacket() will be called by the module when the RTP packet
4342 // is created.
4343 // The call to SendOutgoingData() does *not* modify the timestamp and
4344 // payloadtype to ensure that the RTP module generates a valid RTP packet
4345 // (user might utilize a non-registered payload type).
4346 // The marker bit and payload type will be replaced just before the actual
4347 // transmission, i.e., the actual modification is done *after* the RTP
4348 // module has delivered its RTP packet back to the VoE.
4349 // We will use the stored values above when the packet is modified
4350 // (see Channel::SendPacket()).
4351
4352 _extraPayloadType = payloadType;
4353 _extraMarkerBit = markerBit;
4354 _insertExtraRTPPacket = true;
4355
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004356 if (_rtpRtcpModule->SendOutgoingData(kAudioFrameSpeech,
niklase@google.com470e71d2011-07-07 08:21:25 +00004357 _lastPayloadType,
4358 _lastLocalTimeStamp,
stefan@webrtc.orgddfdfed2012-07-03 13:21:22 +00004359 // Leaving the time when this frame was
4360 // received from the capture device as
4361 // undefined for voice for now.
4362 -1,
niklase@google.com470e71d2011-07-07 08:21:25 +00004363 (const WebRtc_UWord8*) payloadData,
4364 payloadSize) != 0)
4365 {
4366 _engineStatisticsPtr->SetLastError(
4367 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4368 "InsertExtraRTPPacket() failed to send extra RTP packet");
4369 return -1;
4370 }
4371
4372 return 0;
4373}
4374
4375WebRtc_UWord32
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004376Channel::Demultiplex(const AudioFrame& audioFrame)
niklase@google.com470e71d2011-07-07 08:21:25 +00004377{
4378 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004379 "Channel::Demultiplex()");
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00004380 _audioFrame.CopyFrom(audioFrame);
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004381 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00004382 return 0;
4383}
4384
4385WebRtc_UWord32
xians@google.com0b0665a2011-08-08 08:18:44 +00004386Channel::PrepareEncodeAndSend(int mixingFrequency)
niklase@google.com470e71d2011-07-07 08:21:25 +00004387{
4388 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4389 "Channel::PrepareEncodeAndSend()");
4390
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004391 if (_audioFrame.samples_per_channel_ == 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004392 {
4393 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4394 "Channel::PrepareEncodeAndSend() invalid audio frame");
4395 return -1;
4396 }
4397
4398 if (_inputFilePlaying)
4399 {
4400 MixOrReplaceAudioWithFile(mixingFrequency);
4401 }
4402
4403 if (_mute)
4404 {
4405 AudioFrameOperations::Mute(_audioFrame);
4406 }
4407
4408 if (_inputExternalMedia)
4409 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004410 CriticalSectionScoped cs(&_callbackCritSect);
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004411 const bool isStereo = (_audioFrame.num_channels_ == 2);
niklase@google.com470e71d2011-07-07 08:21:25 +00004412 if (_inputExternalMediaCallbackPtr)
4413 {
4414 _inputExternalMediaCallbackPtr->Process(
4415 _channelId,
4416 kRecordingPerChannel,
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004417 (WebRtc_Word16*)_audioFrame.data_,
4418 _audioFrame.samples_per_channel_,
4419 _audioFrame.sample_rate_hz_,
niklase@google.com470e71d2011-07-07 08:21:25 +00004420 isStereo);
4421 }
4422 }
4423
4424 InsertInbandDtmfTone();
4425
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004426 if (_includeAudioLevelIndication)
4427 {
4428 assert(_rtpAudioProc.get() != NULL);
4429
4430 // Check if settings need to be updated.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004431 if (_rtpAudioProc->sample_rate_hz() != _audioFrame.sample_rate_hz_)
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004432 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004433 if (_rtpAudioProc->set_sample_rate_hz(_audioFrame.sample_rate_hz_) !=
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004434 AudioProcessing::kNoError)
4435 {
4436 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4437 VoEId(_instanceId, _channelId),
4438 "Error setting AudioProcessing sample rate");
4439 return -1;
4440 }
4441 }
4442
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004443 if (_rtpAudioProc->num_input_channels() != _audioFrame.num_channels_)
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004444 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004445 if (_rtpAudioProc->set_num_channels(_audioFrame.num_channels_,
4446 _audioFrame.num_channels_)
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004447 != AudioProcessing::kNoError)
4448 {
4449 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4450 VoEId(_instanceId, _channelId),
4451 "Error setting AudioProcessing channels");
4452 return -1;
4453 }
4454 }
4455
4456 // Performs level analysis only; does not affect the signal.
4457 _rtpAudioProc->ProcessStream(&_audioFrame);
4458 }
4459
niklase@google.com470e71d2011-07-07 08:21:25 +00004460 return 0;
4461}
4462
4463WebRtc_UWord32
4464Channel::EncodeAndSend()
4465{
4466 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4467 "Channel::EncodeAndSend()");
4468
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004469 assert(_audioFrame.num_channels_ <= 2);
4470 if (_audioFrame.samples_per_channel_ == 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004471 {
4472 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4473 "Channel::EncodeAndSend() invalid audio frame");
4474 return -1;
4475 }
4476
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004477 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00004478
4479 // --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz.
4480
4481 // The ACM resamples internally.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004482 _audioFrame.timestamp_ = _timeStamp;
niklase@google.com470e71d2011-07-07 08:21:25 +00004483 if (_audioCodingModule.Add10MsData((AudioFrame&)_audioFrame) != 0)
4484 {
4485 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
4486 "Channel::EncodeAndSend() ACM encoding failed");
4487 return -1;
4488 }
4489
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004490 _timeStamp += _audioFrame.samples_per_channel_;
niklase@google.com470e71d2011-07-07 08:21:25 +00004491
4492 // --- Encode if complete frame is ready
4493
4494 // This call will trigger AudioPacketizationCallback::SendData if encoding
4495 // is done and payload is ready for packetization and transmission.
4496 return _audioCodingModule.Process();
4497}
4498
4499int Channel::RegisterExternalMediaProcessing(
4500 ProcessingTypes type,
4501 VoEMediaProcess& processObject)
4502{
4503 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4504 "Channel::RegisterExternalMediaProcessing()");
4505
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004506 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004507
4508 if (kPlaybackPerChannel == type)
4509 {
4510 if (_outputExternalMediaCallbackPtr)
4511 {
4512 _engineStatisticsPtr->SetLastError(
4513 VE_INVALID_OPERATION, kTraceError,
4514 "Channel::RegisterExternalMediaProcessing() "
4515 "output external media already enabled");
4516 return -1;
4517 }
4518 _outputExternalMediaCallbackPtr = &processObject;
4519 _outputExternalMedia = true;
4520 }
4521 else if (kRecordingPerChannel == type)
4522 {
4523 if (_inputExternalMediaCallbackPtr)
4524 {
4525 _engineStatisticsPtr->SetLastError(
4526 VE_INVALID_OPERATION, kTraceError,
4527 "Channel::RegisterExternalMediaProcessing() "
4528 "output external media already enabled");
4529 return -1;
4530 }
4531 _inputExternalMediaCallbackPtr = &processObject;
4532 _inputExternalMedia = true;
4533 }
4534 return 0;
4535}
4536
4537int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type)
4538{
4539 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4540 "Channel::DeRegisterExternalMediaProcessing()");
4541
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004542 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004543
4544 if (kPlaybackPerChannel == type)
4545 {
4546 if (!_outputExternalMediaCallbackPtr)
4547 {
4548 _engineStatisticsPtr->SetLastError(
4549 VE_INVALID_OPERATION, kTraceWarning,
4550 "Channel::DeRegisterExternalMediaProcessing() "
4551 "output external media already disabled");
4552 return 0;
4553 }
4554 _outputExternalMedia = false;
4555 _outputExternalMediaCallbackPtr = NULL;
4556 }
4557 else if (kRecordingPerChannel == type)
4558 {
4559 if (!_inputExternalMediaCallbackPtr)
4560 {
4561 _engineStatisticsPtr->SetLastError(
4562 VE_INVALID_OPERATION, kTraceWarning,
4563 "Channel::DeRegisterExternalMediaProcessing() "
4564 "input external media already disabled");
4565 return 0;
4566 }
4567 _inputExternalMedia = false;
4568 _inputExternalMediaCallbackPtr = NULL;
4569 }
4570
4571 return 0;
4572}
4573
roosa@google.com1b60ceb2012-12-12 23:00:29 +00004574int Channel::SetExternalMixing(bool enabled) {
4575 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4576 "Channel::SetExternalMixing(enabled=%d)", enabled);
4577
4578 if (_playing)
4579 {
4580 _engineStatisticsPtr->SetLastError(
4581 VE_INVALID_OPERATION, kTraceError,
4582 "Channel::SetExternalMixing() "
4583 "external mixing cannot be changed while playing.");
4584 return -1;
4585 }
4586
4587 _externalMixing = enabled;
4588
4589 return 0;
4590}
4591
niklase@google.com470e71d2011-07-07 08:21:25 +00004592int
4593Channel::ResetRTCPStatistics()
4594{
4595 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4596 "Channel::ResetRTCPStatistics()");
4597 WebRtc_UWord32 remoteSSRC(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004598 remoteSSRC = _rtpRtcpModule->RemoteSSRC();
4599 return _rtpRtcpModule->ResetRTT(remoteSSRC);
niklase@google.com470e71d2011-07-07 08:21:25 +00004600}
4601
4602int
4603Channel::GetRoundTripTimeSummary(StatVal& delaysMs) const
4604{
4605 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4606 "Channel::GetRoundTripTimeSummary()");
4607 // Override default module outputs for the case when RTCP is disabled.
4608 // This is done to ensure that we are backward compatible with the
4609 // VoiceEngine where we did not use RTP/RTCP module.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004610 if (!_rtpRtcpModule->RTCP())
niklase@google.com470e71d2011-07-07 08:21:25 +00004611 {
4612 delaysMs.min = -1;
4613 delaysMs.max = -1;
4614 delaysMs.average = -1;
4615 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4616 "Channel::GetRoundTripTimeSummary() RTCP is disabled =>"
4617 " valid RTT measurements cannot be retrieved");
4618 return 0;
4619 }
4620
4621 WebRtc_UWord32 remoteSSRC;
4622 WebRtc_UWord16 RTT;
4623 WebRtc_UWord16 avgRTT;
4624 WebRtc_UWord16 maxRTT;
4625 WebRtc_UWord16 minRTT;
4626 // The remote SSRC will be zero if no RTP packet has been received.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004627 remoteSSRC = _rtpRtcpModule->RemoteSSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00004628 if (remoteSSRC == 0)
4629 {
4630 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4631 "Channel::GetRoundTripTimeSummary() unable to measure RTT"
4632 " since no RTP packet has been received yet");
4633 }
4634
4635 // Retrieve RTT statistics from the RTP/RTCP module for the specified
4636 // channel and SSRC. The SSRC is required to parse out the correct source
4637 // in conference scenarios.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004638 if (_rtpRtcpModule->RTT(remoteSSRC, &RTT, &avgRTT, &minRTT,&maxRTT) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004639 {
4640 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4641 "GetRoundTripTimeSummary unable to retrieve RTT values"
4642 " from the RTCP layer");
4643 delaysMs.min = -1; delaysMs.max = -1; delaysMs.average = -1;
4644 }
4645 else
4646 {
4647 delaysMs.min = minRTT;
4648 delaysMs.max = maxRTT;
4649 delaysMs.average = avgRTT;
4650 }
4651 return 0;
4652}
4653
4654int
4655Channel::GetNetworkStatistics(NetworkStatistics& stats)
4656{
4657 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4658 "Channel::GetNetworkStatistics()");
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00004659 ACMNetworkStatistics acm_stats;
4660 int return_value = _audioCodingModule.NetworkStatistics(&acm_stats);
4661 if (return_value >= 0) {
4662 memcpy(&stats, &acm_stats, sizeof(NetworkStatistics));
4663 }
4664 return return_value;
niklase@google.com470e71d2011-07-07 08:21:25 +00004665}
4666
4667int
niklase@google.com470e71d2011-07-07 08:21:25 +00004668Channel::GetDelayEstimate(int& delayMs) const
4669{
4670 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4671 "Channel::GetDelayEstimate()");
4672 delayMs = (_averageDelayMs + 5) / 10 + _recPacketDelayMs;
4673 return 0;
4674}
4675
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +00004676int Channel::SetInitialPlayoutDelay(int delay_ms)
4677{
4678 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4679 "Channel::SetInitialPlayoutDelay()");
4680 if ((delay_ms < kVoiceEngineMinMinPlayoutDelayMs) ||
4681 (delay_ms > kVoiceEngineMaxMinPlayoutDelayMs))
4682 {
4683 _engineStatisticsPtr->SetLastError(
4684 VE_INVALID_ARGUMENT, kTraceError,
4685 "SetInitialPlayoutDelay() invalid min delay");
4686 return -1;
4687 }
4688 if (_audioCodingModule.SetInitialPlayoutDelay(delay_ms) != 0)
4689 {
4690 _engineStatisticsPtr->SetLastError(
4691 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4692 "SetInitialPlayoutDelay() failed to set min playout delay");
4693 return -1;
4694 }
4695 return 0;
4696}
4697
4698
niklase@google.com470e71d2011-07-07 08:21:25 +00004699int
4700Channel::SetMinimumPlayoutDelay(int delayMs)
4701{
4702 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4703 "Channel::SetMinimumPlayoutDelay()");
4704 if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
4705 (delayMs > kVoiceEngineMaxMinPlayoutDelayMs))
4706 {
4707 _engineStatisticsPtr->SetLastError(
4708 VE_INVALID_ARGUMENT, kTraceError,
4709 "SetMinimumPlayoutDelay() invalid min delay");
4710 return -1;
4711 }
4712 if (_audioCodingModule.SetMinimumPlayoutDelay(delayMs) != 0)
4713 {
4714 _engineStatisticsPtr->SetLastError(
4715 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4716 "SetMinimumPlayoutDelay() failed to set min playout delay");
4717 return -1;
4718 }
4719 return 0;
4720}
4721
4722int
4723Channel::GetPlayoutTimestamp(unsigned int& timestamp)
4724{
4725 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4726 "Channel::GetPlayoutTimestamp()");
4727 WebRtc_UWord32 playoutTimestamp(0);
4728 if (GetPlayoutTimeStamp(playoutTimestamp) != 0)
4729 {
4730 _engineStatisticsPtr->SetLastError(
4731 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4732 "GetPlayoutTimestamp() failed to retrieve timestamp");
4733 return -1;
4734 }
4735 timestamp = playoutTimestamp;
4736 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4737 VoEId(_instanceId,_channelId),
4738 "GetPlayoutTimestamp() => timestamp=%u", timestamp);
4739 return 0;
4740}
4741
4742int
4743Channel::SetInitTimestamp(unsigned int timestamp)
4744{
4745 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4746 "Channel::SetInitTimestamp()");
4747 if (_sending)
4748 {
4749 _engineStatisticsPtr->SetLastError(
4750 VE_SENDING, kTraceError, "SetInitTimestamp() already sending");
4751 return -1;
4752 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004753 if (_rtpRtcpModule->SetStartTimestamp(timestamp) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004754 {
4755 _engineStatisticsPtr->SetLastError(
4756 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4757 "SetInitTimestamp() failed to set timestamp");
4758 return -1;
4759 }
4760 return 0;
4761}
4762
4763int
4764Channel::SetInitSequenceNumber(short sequenceNumber)
4765{
4766 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4767 "Channel::SetInitSequenceNumber()");
4768 if (_sending)
4769 {
4770 _engineStatisticsPtr->SetLastError(
4771 VE_SENDING, kTraceError,
4772 "SetInitSequenceNumber() already sending");
4773 return -1;
4774 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004775 if (_rtpRtcpModule->SetSequenceNumber(sequenceNumber) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004776 {
4777 _engineStatisticsPtr->SetLastError(
4778 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4779 "SetInitSequenceNumber() failed to set sequence number");
4780 return -1;
4781 }
4782 return 0;
4783}
4784
4785int
4786Channel::GetRtpRtcp(RtpRtcp* &rtpRtcpModule) const
4787{
4788 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4789 "Channel::GetRtpRtcp()");
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004790 rtpRtcpModule = _rtpRtcpModule.get();
niklase@google.com470e71d2011-07-07 08:21:25 +00004791 return 0;
4792}
4793
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004794// TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use
4795// a shared helper.
niklase@google.com470e71d2011-07-07 08:21:25 +00004796WebRtc_Word32
xians@google.com0b0665a2011-08-08 08:18:44 +00004797Channel::MixOrReplaceAudioWithFile(const int mixingFrequency)
niklase@google.com470e71d2011-07-07 08:21:25 +00004798{
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004799 scoped_array<WebRtc_Word16> fileBuffer(new WebRtc_Word16[640]);
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004800 int fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004801
4802 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004803 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004804
4805 if (_inputFilePlayerPtr == NULL)
4806 {
4807 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4808 VoEId(_instanceId, _channelId),
4809 "Channel::MixOrReplaceAudioWithFile() fileplayer"
4810 " doesnt exist");
4811 return -1;
4812 }
4813
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004814 if (_inputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
niklase@google.com470e71d2011-07-07 08:21:25 +00004815 fileSamples,
4816 mixingFrequency) == -1)
4817 {
4818 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4819 VoEId(_instanceId, _channelId),
4820 "Channel::MixOrReplaceAudioWithFile() file mixing "
4821 "failed");
4822 return -1;
4823 }
4824 if (fileSamples == 0)
4825 {
4826 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4827 VoEId(_instanceId, _channelId),
4828 "Channel::MixOrReplaceAudioWithFile() file is ended");
4829 return 0;
4830 }
4831 }
4832
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004833 assert(_audioFrame.samples_per_channel_ == fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004834
4835 if (_mixFileWithMicrophone)
4836 {
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004837 // Currently file stream is always mono.
4838 // TODO(xians): Change the code when FilePlayer supports real stereo.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004839 Utility::MixWithSat(_audioFrame.data_,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004840 _audioFrame.num_channels_,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004841 fileBuffer.get(),
4842 1,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004843 fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004844 }
4845 else
4846 {
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004847 // Replace ACM audio with file.
4848 // Currently file stream is always mono.
4849 // TODO(xians): Change the code when FilePlayer supports real stereo.
niklase@google.com470e71d2011-07-07 08:21:25 +00004850 _audioFrame.UpdateFrame(_channelId,
4851 -1,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004852 fileBuffer.get(),
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004853 fileSamples,
niklase@google.com470e71d2011-07-07 08:21:25 +00004854 mixingFrequency,
4855 AudioFrame::kNormalSpeech,
4856 AudioFrame::kVadUnknown,
4857 1);
4858
4859 }
4860 return 0;
4861}
4862
4863WebRtc_Word32
4864Channel::MixAudioWithFile(AudioFrame& audioFrame,
xians@google.com0b0665a2011-08-08 08:18:44 +00004865 const int mixingFrequency)
niklase@google.com470e71d2011-07-07 08:21:25 +00004866{
4867 assert(mixingFrequency <= 32000);
4868
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004869 scoped_array<WebRtc_Word16> fileBuffer(new WebRtc_Word16[640]);
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004870 int fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004871
4872 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004873 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004874
4875 if (_outputFilePlayerPtr == NULL)
4876 {
4877 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4878 VoEId(_instanceId, _channelId),
4879 "Channel::MixAudioWithFile() file mixing failed");
4880 return -1;
4881 }
4882
4883 // We should get the frequency we ask for.
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004884 if (_outputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
niklase@google.com470e71d2011-07-07 08:21:25 +00004885 fileSamples,
4886 mixingFrequency) == -1)
4887 {
4888 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4889 VoEId(_instanceId, _channelId),
4890 "Channel::MixAudioWithFile() file mixing failed");
4891 return -1;
4892 }
4893 }
4894
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004895 if (audioFrame.samples_per_channel_ == fileSamples)
niklase@google.com470e71d2011-07-07 08:21:25 +00004896 {
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004897 // Currently file stream is always mono.
4898 // TODO(xians): Change the code when FilePlayer supports real stereo.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004899 Utility::MixWithSat(audioFrame.data_,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004900 audioFrame.num_channels_,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004901 fileBuffer.get(),
4902 1,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004903 fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004904 }
4905 else
4906 {
4907 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004908 "Channel::MixAudioWithFile() samples_per_channel_(%d) != "
niklase@google.com470e71d2011-07-07 08:21:25 +00004909 "fileSamples(%d)",
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004910 audioFrame.samples_per_channel_, fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004911 return -1;
4912 }
4913
4914 return 0;
4915}
4916
4917int
4918Channel::InsertInbandDtmfTone()
4919{
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00004920 // Check if we should start a new tone.
niklase@google.com470e71d2011-07-07 08:21:25 +00004921 if (_inbandDtmfQueue.PendingDtmf() &&
4922 !_inbandDtmfGenerator.IsAddingTone() &&
4923 _inbandDtmfGenerator.DelaySinceLastTone() >
4924 kMinTelephoneEventSeparationMs)
4925 {
4926 WebRtc_Word8 eventCode(0);
4927 WebRtc_UWord16 lengthMs(0);
4928 WebRtc_UWord8 attenuationDb(0);
4929
4930 eventCode = _inbandDtmfQueue.NextDtmf(&lengthMs, &attenuationDb);
4931 _inbandDtmfGenerator.AddTone(eventCode, lengthMs, attenuationDb);
4932 if (_playInbandDtmfEvent)
4933 {
4934 // Add tone to output mixer using a reduced length to minimize
4935 // risk of echo.
4936 _outputMixerPtr->PlayDtmfTone(eventCode, lengthMs - 80,
4937 attenuationDb);
4938 }
4939 }
4940
4941 if (_inbandDtmfGenerator.IsAddingTone())
4942 {
4943 WebRtc_UWord16 frequency(0);
4944 _inbandDtmfGenerator.GetSampleRate(frequency);
4945
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004946 if (frequency != _audioFrame.sample_rate_hz_)
niklase@google.com470e71d2011-07-07 08:21:25 +00004947 {
4948 // Update sample rate of Dtmf tone since the mixing frequency
4949 // has changed.
4950 _inbandDtmfGenerator.SetSampleRate(
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004951 (WebRtc_UWord16) (_audioFrame.sample_rate_hz_));
niklase@google.com470e71d2011-07-07 08:21:25 +00004952 // Reset the tone to be added taking the new sample rate into
4953 // account.
4954 _inbandDtmfGenerator.ResetTone();
4955 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00004956
niklase@google.com470e71d2011-07-07 08:21:25 +00004957 WebRtc_Word16 toneBuffer[320];
4958 WebRtc_UWord16 toneSamples(0);
4959 // Get 10ms tone segment and set time since last tone to zero
4960 if (_inbandDtmfGenerator.Get10msTone(toneBuffer, toneSamples) == -1)
4961 {
4962 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4963 VoEId(_instanceId, _channelId),
4964 "Channel::EncodeAndSend() inserting Dtmf failed");
4965 return -1;
4966 }
4967
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00004968 // Replace mixed audio with DTMF tone.
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00004969 for (int sample = 0;
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004970 sample < _audioFrame.samples_per_channel_;
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00004971 sample++)
4972 {
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00004973 for (int channel = 0;
4974 channel < _audioFrame.num_channels_;
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00004975 channel++)
4976 {
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00004977 const int index = sample * _audioFrame.num_channels_ + channel;
4978 _audioFrame.data_[index] = toneBuffer[sample];
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00004979 }
4980 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00004981
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004982 assert(_audioFrame.samples_per_channel_ == toneSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004983 } else
4984 {
4985 // Add 10ms to "delay-since-last-tone" counter
4986 _inbandDtmfGenerator.UpdateDelaySinceLastTone();
4987 }
4988 return 0;
4989}
4990
4991WebRtc_Word32
4992Channel::GetPlayoutTimeStamp(WebRtc_UWord32& playoutTimestamp)
4993{
4994 WebRtc_UWord32 timestamp(0);
4995 CodecInst currRecCodec;
4996
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00004997 if (_audioCodingModule.PlayoutTimestamp(&timestamp) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00004998 {
4999 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
5000 "Channel::GetPlayoutTimeStamp() failed to read playout"
5001 " timestamp from the ACM");
5002 return -1;
5003 }
5004
5005 WebRtc_UWord16 delayMS(0);
5006 if (_audioDeviceModulePtr->PlayoutDelay(&delayMS) == -1)
5007 {
5008 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
5009 "Channel::GetPlayoutTimeStamp() failed to read playout"
5010 " delay from the ADM");
5011 return -1;
5012 }
5013
5014 WebRtc_Word32 playoutFrequency = _audioCodingModule.PlayoutFrequency();
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00005015 if (_audioCodingModule.ReceiveCodec(&currRecCodec) == 0) {
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +00005016 if (STR_CASE_CMP("G722", currRecCodec.plname) == 0) {
5017 playoutFrequency = 8000;
5018 } else if (STR_CASE_CMP("opus", currRecCodec.plname) == 0) {
5019 playoutFrequency = 48000;
5020 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005021 }
5022 timestamp -= (delayMS * (playoutFrequency/1000));
5023
5024 playoutTimestamp = timestamp;
5025
5026 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
5027 "Channel::GetPlayoutTimeStamp() => playoutTimestamp = %lu",
5028 playoutTimestamp);
5029 return 0;
5030}
5031
5032void
5033Channel::ResetDeadOrAliveCounters()
5034{
5035 _countDeadDetections = 0;
5036 _countAliveDetections = 0;
5037}
5038
5039void
5040Channel::UpdateDeadOrAliveCounters(bool alive)
5041{
5042 if (alive)
5043 _countAliveDetections++;
5044 else
5045 _countDeadDetections++;
5046}
5047
5048int
5049Channel::GetDeadOrAliveCounters(int& countDead, int& countAlive) const
5050{
5051 bool enabled;
5052 WebRtc_UWord8 timeSec;
5053
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00005054 _rtpRtcpModule->PeriodicDeadOrAliveStatus(enabled, timeSec);
niklase@google.com470e71d2011-07-07 08:21:25 +00005055 if (!enabled)
5056 return (-1);
5057
5058 countDead = static_cast<int> (_countDeadDetections);
5059 countAlive = static_cast<int> (_countAliveDetections);
5060 return 0;
5061}
5062
5063WebRtc_Word32
5064Channel::SendPacketRaw(const void *data, int len, bool RTCP)
5065{
5066 if (_transportPtr == NULL)
5067 {
5068 return -1;
5069 }
5070 if (!RTCP)
5071 {
5072 return _transportPtr->SendPacket(_channelId, data, len);
5073 }
5074 else
5075 {
5076 return _transportPtr->SendRTCPPacket(_channelId, data, len);
5077 }
5078}
5079
5080WebRtc_Word32
5081Channel::UpdatePacketDelay(const WebRtc_UWord32 timestamp,
5082 const WebRtc_UWord16 sequenceNumber)
5083{
5084 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
5085 "Channel::UpdatePacketDelay(timestamp=%lu, sequenceNumber=%u)",
5086 timestamp, sequenceNumber);
5087
5088 WebRtc_Word32 rtpReceiveFrequency(0);
5089
5090 // Get frequency of last received payload
5091 rtpReceiveFrequency = _audioCodingModule.ReceiveFrequency();
5092
5093 CodecInst currRecCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00005094 if (_audioCodingModule.ReceiveCodec(&currRecCodec) == 0) {
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +00005095 if (STR_CASE_CMP("G722", currRecCodec.plname) == 0) {
5096 // Even though the actual sampling rate for G.722 audio is
5097 // 16,000 Hz, the RTP clock rate for the G722 payload format is
5098 // 8,000 Hz because that value was erroneously assigned in
5099 // RFC 1890 and must remain unchanged for backward compatibility.
5100 rtpReceiveFrequency = 8000;
5101 } else if (STR_CASE_CMP("opus", currRecCodec.plname) == 0) {
5102 // We are resampling Opus internally to 32,000 Hz until all our
5103 // DSP routines can operate at 48,000 Hz, but the RTP clock
5104 // rate for the Opus payload format is standardized to 48,000 Hz,
5105 // because that is the maximum supported decoding sampling rate.
5106 rtpReceiveFrequency = 48000;
5107 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005108 }
5109
5110 const WebRtc_UWord32 timeStampDiff = timestamp - _playoutTimeStampRTP;
5111 WebRtc_UWord32 timeStampDiffMs(0);
5112
5113 if (timeStampDiff > 0)
5114 {
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +00005115 switch (rtpReceiveFrequency) {
5116 case 8000:
5117 timeStampDiffMs = static_cast<WebRtc_UWord32>(timeStampDiff >> 3);
5118 break;
5119 case 16000:
5120 timeStampDiffMs = static_cast<WebRtc_UWord32>(timeStampDiff >> 4);
5121 break;
5122 case 32000:
5123 timeStampDiffMs = static_cast<WebRtc_UWord32>(timeStampDiff >> 5);
5124 break;
5125 case 48000:
5126 timeStampDiffMs = static_cast<WebRtc_UWord32>(timeStampDiff / 48);
5127 break;
5128 default:
5129 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
5130 VoEId(_instanceId, _channelId),
5131 "Channel::UpdatePacketDelay() invalid sample rate");
5132 timeStampDiffMs = 0;
5133 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00005134 }
niklas.enbom@webrtc.org218c5422013-01-17 22:25:49 +00005135 if (timeStampDiffMs > (2 * kVoiceEngineMaxMinPlayoutDelayMs))
niklase@google.com470e71d2011-07-07 08:21:25 +00005136 {
5137 timeStampDiffMs = 0;
5138 }
5139
5140 if (_averageDelayMs == 0)
5141 {
niklas.enbom@webrtc.org218c5422013-01-17 22:25:49 +00005142 _averageDelayMs = timeStampDiffMs * 10;
niklase@google.com470e71d2011-07-07 08:21:25 +00005143 }
5144 else
5145 {
5146 // Filter average delay value using exponential filter (alpha is
5147 // 7/8). We derive 10*_averageDelayMs here (reduces risk of
5148 // rounding error) and compensate for it in GetDelayEstimate()
5149 // later. Adding 4/8 results in correct rounding.
5150 _averageDelayMs = ((_averageDelayMs*7 + 10*timeStampDiffMs + 4)>>3);
5151 }
5152
5153 if (sequenceNumber - _previousSequenceNumber == 1)
5154 {
5155 WebRtc_UWord16 packetDelayMs = 0;
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +00005156 switch (rtpReceiveFrequency) {
5157 case 8000:
5158 packetDelayMs = static_cast<WebRtc_UWord16>(
niklase@google.com470e71d2011-07-07 08:21:25 +00005159 (timestamp - _previousTimestamp) >> 3);
5160 break;
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +00005161 case 16000:
5162 packetDelayMs = static_cast<WebRtc_UWord16>(
niklase@google.com470e71d2011-07-07 08:21:25 +00005163 (timestamp - _previousTimestamp) >> 4);
5164 break;
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +00005165 case 32000:
5166 packetDelayMs = static_cast<WebRtc_UWord16>(
niklase@google.com470e71d2011-07-07 08:21:25 +00005167 (timestamp - _previousTimestamp) >> 5);
5168 break;
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +00005169 case 48000:
5170 packetDelayMs = static_cast<WebRtc_UWord16>(
5171 (timestamp - _previousTimestamp) / 48);
5172 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00005173 }
5174
5175 if (packetDelayMs >= 10 && packetDelayMs <= 60)
5176 _recPacketDelayMs = packetDelayMs;
5177 }
5178 }
5179
5180 _previousSequenceNumber = sequenceNumber;
5181 _previousTimestamp = timestamp;
5182
5183 return 0;
5184}
5185
5186void
5187Channel::RegisterReceiveCodecsToRTPModule()
5188{
5189 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
5190 "Channel::RegisterReceiveCodecsToRTPModule()");
5191
5192
5193 CodecInst codec;
5194 const WebRtc_UWord8 nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
5195
5196 for (int idx = 0; idx < nSupportedCodecs; idx++)
5197 {
5198 // Open up the RTP/RTCP receiver for all supported codecs
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00005199 if ((_audioCodingModule.Codec(idx, &codec) == -1) ||
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00005200 (_rtpRtcpModule->RegisterReceivePayload(codec) == -1))
niklase@google.com470e71d2011-07-07 08:21:25 +00005201 {
5202 WEBRTC_TRACE(
5203 kTraceWarning,
5204 kTraceVoice,
5205 VoEId(_instanceId, _channelId),
5206 "Channel::RegisterReceiveCodecsToRTPModule() unable"
5207 " to register %s (%d/%d/%d/%d) to RTP/RTCP receiver",
5208 codec.plname, codec.pltype, codec.plfreq,
5209 codec.channels, codec.rate);
5210 }
5211 else
5212 {
5213 WEBRTC_TRACE(
5214 kTraceInfo,
5215 kTraceVoice,
5216 VoEId(_instanceId, _channelId),
5217 "Channel::RegisterReceiveCodecsToRTPModule() %s "
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00005218 "(%d/%d/%d/%d) has been added to the RTP/RTCP "
niklase@google.com470e71d2011-07-07 08:21:25 +00005219 "receiver",
5220 codec.plname, codec.pltype, codec.plfreq,
5221 codec.channels, codec.rate);
5222 }
5223 }
5224}
5225
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005226int Channel::ApmProcessRx(AudioFrame& frame) {
5227 AudioProcessing* audioproc = _rxAudioProcessingModulePtr;
5228 // Register the (possibly new) frame parameters.
5229 if (audioproc->set_sample_rate_hz(frame.sample_rate_hz_) != 0) {
andrew@webrtc.org655d8f52012-11-20 07:34:45 +00005230 LOG_FERR1(LS_WARNING, set_sample_rate_hz, frame.sample_rate_hz_);
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005231 }
5232 if (audioproc->set_num_channels(frame.num_channels_,
5233 frame.num_channels_) != 0) {
andrew@webrtc.org655d8f52012-11-20 07:34:45 +00005234 LOG_FERR1(LS_WARNING, set_num_channels, frame.num_channels_);
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005235 }
5236 if (audioproc->ProcessStream(&frame) != 0) {
andrew@webrtc.org655d8f52012-11-20 07:34:45 +00005237 LOG_FERR0(LS_WARNING, ProcessStream);
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005238 }
5239 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00005240}
5241
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005242int Channel::SetSecondarySendCodec(const CodecInst& codec,
5243 int red_payload_type) {
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00005244 // Sanity check for payload type.
5245 if (red_payload_type < 0 || red_payload_type > 127) {
5246 _engineStatisticsPtr->SetLastError(
5247 VE_PLTYPE_ERROR, kTraceError,
5248 "SetRedPayloadType() invalid RED payload type");
5249 return -1;
5250 }
5251
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005252 if (SetRedPayloadType(red_payload_type) < 0) {
5253 _engineStatisticsPtr->SetLastError(
5254 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5255 "SetSecondarySendCodec() Failed to register RED ACM");
5256 return -1;
5257 }
5258 if (_audioCodingModule.RegisterSecondarySendCodec(codec) < 0) {
5259 _engineStatisticsPtr->SetLastError(
5260 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5261 "SetSecondarySendCodec() Failed to register secondary send codec in "
5262 "ACM");
5263 return -1;
5264 }
5265
5266 return 0;
5267}
5268
5269void Channel::RemoveSecondarySendCodec() {
5270 _audioCodingModule.UnregisterSecondarySendCodec();
5271}
5272
5273int Channel::GetSecondarySendCodec(CodecInst* codec) {
5274 if (_audioCodingModule.SecondarySendCodec(codec) < 0) {
5275 _engineStatisticsPtr->SetLastError(
5276 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5277 "GetSecondarySendCodec() Failed to get secondary sent codec from ACM");
5278 return -1;
5279 }
5280 return 0;
5281}
5282
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00005283// Assuming this method is called with valid payload type.
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005284int Channel::SetRedPayloadType(int red_payload_type) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005285 CodecInst codec;
5286 bool found_red = false;
5287
5288 // Get default RED settings from the ACM database
5289 const int num_codecs = AudioCodingModule::NumberOfCodecs();
5290 for (int idx = 0; idx < num_codecs; idx++) {
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00005291 _audioCodingModule.Codec(idx, &codec);
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005292 if (!STR_CASE_CMP(codec.plname, "RED")) {
5293 found_red = true;
5294 break;
5295 }
5296 }
5297
5298 if (!found_red) {
5299 _engineStatisticsPtr->SetLastError(
5300 VE_CODEC_ERROR, kTraceError,
5301 "SetRedPayloadType() RED is not supported");
5302 return -1;
5303 }
5304
turaj@webrtc.org9d532fd2013-01-31 18:34:19 +00005305 codec.pltype = red_payload_type;
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005306 if (_audioCodingModule.RegisterSendCodec(codec) < 0) {
5307 _engineStatisticsPtr->SetLastError(
5308 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5309 "SetRedPayloadType() RED registration in ACM module failed");
5310 return -1;
5311 }
5312
5313 if (_rtpRtcpModule->SetSendREDPayloadType(red_payload_type) != 0) {
5314 _engineStatisticsPtr->SetLastError(
5315 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
5316 "SetRedPayloadType() RED registration in RTP/RTCP module failed");
5317 return -1;
5318 }
5319 return 0;
5320}
5321
niklase@google.com470e71d2011-07-07 08:21:25 +00005322} // namespace voe
niklase@google.com470e71d2011-07-07 08:21:25 +00005323} // namespace webrtc