blob: 7691bf090200e940d3900155f6427153a1bca75e [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
henrika@webrtc.org2919e952012-01-31 08:45:03 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000011#include "webrtc/voice_engine/channel.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000013#include "webrtc/modules/audio_device/include/audio_device.h"
14#include "webrtc/modules/audio_processing/include/audio_processing.h"
15#include "webrtc/modules/utility/interface/audio_frame_operations.h"
16#include "webrtc/modules/utility/interface/process_thread.h"
17#include "webrtc/modules/utility/interface/rtp_dump.h"
18#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
19#include "webrtc/system_wrappers/interface/logging.h"
20#include "webrtc/system_wrappers/interface/trace.h"
21#include "webrtc/voice_engine/include/voe_base.h"
22#include "webrtc/voice_engine/include/voe_external_media.h"
23#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
24#include "webrtc/voice_engine/output_mixer.h"
25#include "webrtc/voice_engine/statistics.h"
26#include "webrtc/voice_engine/transmit_mixer.h"
27#include "webrtc/voice_engine/utility.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000028
29#if defined(_WIN32)
30#include <Qos.h>
31#endif
32
andrew@webrtc.org50419b02012-11-14 19:07:54 +000033namespace webrtc {
34namespace voe {
niklase@google.com470e71d2011-07-07 08:21:25 +000035
36WebRtc_Word32
37Channel::SendData(FrameType frameType,
38 WebRtc_UWord8 payloadType,
39 WebRtc_UWord32 timeStamp,
40 const WebRtc_UWord8* payloadData,
41 WebRtc_UWord16 payloadSize,
42 const RTPFragmentationHeader* fragmentation)
43{
44 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
45 "Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u,"
46 " payloadSize=%u, fragmentation=0x%x)",
47 frameType, payloadType, timeStamp, payloadSize, fragmentation);
48
49 if (_includeAudioLevelIndication)
50 {
andrew@webrtc.org755b04a2011-11-15 16:57:56 +000051 assert(_rtpAudioProc.get() != NULL);
niklase@google.com470e71d2011-07-07 08:21:25 +000052 // Store current audio level in the RTP/RTCP module.
53 // The level will be used in combination with voice-activity state
54 // (frameType) to add an RTP header extension
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +000055 _rtpRtcpModule->SetAudioLevel(_rtpAudioProc->level_estimator()->RMS());
niklase@google.com470e71d2011-07-07 08:21:25 +000056 }
57
58 // Push data from ACM to RTP/RTCP-module to deliver audio frame for
59 // packetization.
60 // This call will trigger Transport::SendPacket() from the RTP/RTCP module.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +000061 if (_rtpRtcpModule->SendOutgoingData((FrameType&)frameType,
niklase@google.com470e71d2011-07-07 08:21:25 +000062 payloadType,
63 timeStamp,
stefan@webrtc.orgddfdfed2012-07-03 13:21:22 +000064 // Leaving the time when this frame was
65 // received from the capture device as
66 // undefined for voice for now.
67 -1,
niklase@google.com470e71d2011-07-07 08:21:25 +000068 payloadData,
69 payloadSize,
70 fragmentation) == -1)
71 {
72 _engineStatisticsPtr->SetLastError(
73 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
74 "Channel::SendData() failed to send data to RTP/RTCP module");
75 return -1;
76 }
77
78 _lastLocalTimeStamp = timeStamp;
79 _lastPayloadType = payloadType;
80
81 return 0;
82}
83
84WebRtc_Word32
85Channel::InFrameType(WebRtc_Word16 frameType)
86{
87 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
88 "Channel::InFrameType(frameType=%d)", frameType);
89
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +000090 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +000091 // 1 indicates speech
92 _sendFrameType = (frameType == 1) ? 1 : 0;
93 return 0;
94}
95
niklase@google.com470e71d2011-07-07 08:21:25 +000096WebRtc_Word32
97Channel::OnRxVadDetected(const int vadDecision)
98{
99 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
100 "Channel::OnRxVadDetected(vadDecision=%d)", vadDecision);
101
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000102 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000103 if (_rxVadObserverPtr)
104 {
105 _rxVadObserverPtr->OnRxVad(_channelId, vadDecision);
106 }
107
108 return 0;
109}
110
111int
112Channel::SendPacket(int channel, const void *data, int len)
113{
114 channel = VoEChannelId(channel);
115 assert(channel == _channelId);
116
117 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
118 "Channel::SendPacket(channel=%d, len=%d)", channel, len);
119
120 if (_transportPtr == NULL)
121 {
122 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
123 "Channel::SendPacket() failed to send RTP packet due to"
124 " invalid transport object");
125 return -1;
126 }
127
128 // Insert extra RTP packet using if user has called the InsertExtraRTPPacket
129 // API
130 if (_insertExtraRTPPacket)
131 {
132 WebRtc_UWord8* rtpHdr = (WebRtc_UWord8*)data;
133 WebRtc_UWord8 M_PT(0);
134 if (_extraMarkerBit)
135 {
136 M_PT = 0x80; // set the M-bit
137 }
138 M_PT += _extraPayloadType; // set the payload type
139 *(++rtpHdr) = M_PT; // modify the M|PT-byte within the RTP header
140 _insertExtraRTPPacket = false; // insert one packet only
141 }
142
143 WebRtc_UWord8* bufferToSendPtr = (WebRtc_UWord8*)data;
144 WebRtc_Word32 bufferLength = len;
145
146 // Dump the RTP packet to a file (if RTP dump is enabled).
147 if (_rtpDumpOut.DumpPacket((const WebRtc_UWord8*)data, len) == -1)
148 {
149 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
150 VoEId(_instanceId,_channelId),
151 "Channel::SendPacket() RTP dump to output file failed");
152 }
153
154 // SRTP or External encryption
155 if (_encrypting)
156 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000157 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000158
159 if (_encryptionPtr)
160 {
161 if (!_encryptionRTPBufferPtr)
162 {
163 // Allocate memory for encryption buffer one time only
164 _encryptionRTPBufferPtr =
165 new WebRtc_UWord8[kVoiceEngineMaxIpPacketSizeBytes];
xians@webrtc.org51253502012-10-25 13:58:02 +0000166 memset(_encryptionRTPBufferPtr, 0,
167 kVoiceEngineMaxIpPacketSizeBytes);
niklase@google.com470e71d2011-07-07 08:21:25 +0000168 }
169
170 // Perform encryption (SRTP or external)
171 WebRtc_Word32 encryptedBufferLength = 0;
172 _encryptionPtr->encrypt(_channelId,
173 bufferToSendPtr,
174 _encryptionRTPBufferPtr,
175 bufferLength,
176 (int*)&encryptedBufferLength);
177 if (encryptedBufferLength <= 0)
178 {
179 _engineStatisticsPtr->SetLastError(
180 VE_ENCRYPTION_FAILED,
181 kTraceError, "Channel::SendPacket() encryption failed");
182 return -1;
183 }
184
185 // Replace default data buffer with encrypted buffer
186 bufferToSendPtr = _encryptionRTPBufferPtr;
187 bufferLength = encryptedBufferLength;
188 }
189 }
190
191 // Packet transmission using WebRtc socket transport
192 if (!_externalTransport)
193 {
194 int n = _transportPtr->SendPacket(channel, bufferToSendPtr,
195 bufferLength);
196 if (n < 0)
197 {
198 WEBRTC_TRACE(kTraceError, kTraceVoice,
199 VoEId(_instanceId,_channelId),
200 "Channel::SendPacket() RTP transmission using WebRtc"
201 " sockets failed");
202 return -1;
203 }
204 return n;
205 }
206
207 // Packet transmission using external transport transport
208 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000209 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000210
211 int n = _transportPtr->SendPacket(channel,
212 bufferToSendPtr,
213 bufferLength);
214 if (n < 0)
215 {
216 WEBRTC_TRACE(kTraceError, kTraceVoice,
217 VoEId(_instanceId,_channelId),
218 "Channel::SendPacket() RTP transmission using external"
219 " transport failed");
220 return -1;
221 }
222 return n;
223 }
224}
225
226int
227Channel::SendRTCPPacket(int channel, const void *data, int len)
228{
229 channel = VoEChannelId(channel);
230 assert(channel == _channelId);
231
232 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
233 "Channel::SendRTCPPacket(channel=%d, len=%d)", channel, len);
234
niklase@google.com470e71d2011-07-07 08:21:25 +0000235 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000236 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.org83661f52011-11-25 10:58:15 +0000237 if (_transportPtr == NULL)
238 {
239 WEBRTC_TRACE(kTraceError, kTraceVoice,
240 VoEId(_instanceId,_channelId),
241 "Channel::SendRTCPPacket() failed to send RTCP packet"
242 " due to invalid transport object");
243 return -1;
244 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000245 }
246
247 WebRtc_UWord8* bufferToSendPtr = (WebRtc_UWord8*)data;
248 WebRtc_Word32 bufferLength = len;
249
250 // Dump the RTCP packet to a file (if RTP dump is enabled).
251 if (_rtpDumpOut.DumpPacket((const WebRtc_UWord8*)data, len) == -1)
252 {
253 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
254 VoEId(_instanceId,_channelId),
255 "Channel::SendPacket() RTCP dump to output file failed");
256 }
257
258 // SRTP or External encryption
259 if (_encrypting)
260 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000261 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000262
263 if (_encryptionPtr)
264 {
265 if (!_encryptionRTCPBufferPtr)
266 {
267 // Allocate memory for encryption buffer one time only
268 _encryptionRTCPBufferPtr =
269 new WebRtc_UWord8[kVoiceEngineMaxIpPacketSizeBytes];
270 }
271
272 // Perform encryption (SRTP or external).
273 WebRtc_Word32 encryptedBufferLength = 0;
274 _encryptionPtr->encrypt_rtcp(_channelId,
275 bufferToSendPtr,
276 _encryptionRTCPBufferPtr,
277 bufferLength,
278 (int*)&encryptedBufferLength);
279 if (encryptedBufferLength <= 0)
280 {
281 _engineStatisticsPtr->SetLastError(
282 VE_ENCRYPTION_FAILED, kTraceError,
283 "Channel::SendRTCPPacket() encryption failed");
284 return -1;
285 }
286
287 // Replace default data buffer with encrypted buffer
288 bufferToSendPtr = _encryptionRTCPBufferPtr;
289 bufferLength = encryptedBufferLength;
290 }
291 }
292
293 // Packet transmission using WebRtc socket transport
294 if (!_externalTransport)
295 {
296 int n = _transportPtr->SendRTCPPacket(channel,
297 bufferToSendPtr,
298 bufferLength);
299 if (n < 0)
300 {
301 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
302 VoEId(_instanceId,_channelId),
303 "Channel::SendRTCPPacket() transmission using WebRtc"
304 " sockets failed");
305 return -1;
306 }
307 return n;
308 }
309
310 // Packet transmission using external transport transport
311 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000312 CriticalSectionScoped cs(&_callbackCritSect);
henrike@webrtc.orgde727ab2012-11-18 18:49:13 +0000313 if (_transportPtr == NULL)
314 {
315 return -1;
316 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000317 int n = _transportPtr->SendRTCPPacket(channel,
318 bufferToSendPtr,
319 bufferLength);
320 if (n < 0)
321 {
322 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
323 VoEId(_instanceId,_channelId),
324 "Channel::SendRTCPPacket() transmission using external"
325 " transport failed");
326 return -1;
327 }
328 return n;
329 }
330
331 return len;
332}
333
334void
niklase@google.com470e71d2011-07-07 08:21:25 +0000335Channel::OnPlayTelephoneEvent(const WebRtc_Word32 id,
336 const WebRtc_UWord8 event,
337 const WebRtc_UWord16 lengthMs,
338 const WebRtc_UWord8 volume)
339{
340 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
341 "Channel::OnPlayTelephoneEvent(id=%d, event=%u, lengthMs=%u,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +0000342 " volume=%u)", id, event, lengthMs, volume);
niklase@google.com470e71d2011-07-07 08:21:25 +0000343
344 if (!_playOutbandDtmfEvent || (event > 15))
345 {
346 // Ignore callback since feedback is disabled or event is not a
347 // Dtmf tone event.
348 return;
349 }
350
351 assert(_outputMixerPtr != NULL);
352
353 // Start playing out the Dtmf tone (if playout is enabled).
354 // Reduce length of tone with 80ms to the reduce risk of echo.
355 _outputMixerPtr->PlayDtmfTone(event, lengthMs - 80, volume);
356}
357
358void
359Channel::OnIncomingSSRCChanged(const WebRtc_Word32 id,
360 const WebRtc_UWord32 SSRC)
361{
362 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
363 "Channel::OnIncomingSSRCChanged(id=%d, SSRC=%d)",
364 id, SSRC);
365
366 WebRtc_Word32 channel = VoEChannelId(id);
367 assert(channel == _channelId);
368
369 // Reset RTP-module counters since a new incoming RTP stream is detected
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +0000370 _rtpRtcpModule->ResetReceiveDataCountersRTP();
371 _rtpRtcpModule->ResetStatisticsRTP();
niklase@google.com470e71d2011-07-07 08:21:25 +0000372
373 if (_rtpObserver)
374 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000375 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000376
377 if (_rtpObserverPtr)
378 {
379 // Send new SSRC to registered observer using callback
380 _rtpObserverPtr->OnIncomingSSRCChanged(channel, SSRC);
381 }
382 }
383}
384
385void Channel::OnIncomingCSRCChanged(const WebRtc_Word32 id,
386 const WebRtc_UWord32 CSRC,
387 const bool added)
388{
389 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
390 "Channel::OnIncomingCSRCChanged(id=%d, CSRC=%d, added=%d)",
391 id, CSRC, added);
392
393 WebRtc_Word32 channel = VoEChannelId(id);
394 assert(channel == _channelId);
395
396 if (_rtpObserver)
397 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000398 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000399
400 if (_rtpObserverPtr)
401 {
402 _rtpObserverPtr->OnIncomingCSRCChanged(channel, CSRC, added);
403 }
404 }
405}
406
407void
408Channel::OnApplicationDataReceived(const WebRtc_Word32 id,
409 const WebRtc_UWord8 subType,
410 const WebRtc_UWord32 name,
411 const WebRtc_UWord16 length,
412 const WebRtc_UWord8* data)
413{
414 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
415 "Channel::OnApplicationDataReceived(id=%d, subType=%u,"
416 " name=%u, length=%u)",
417 id, subType, name, length);
418
419 WebRtc_Word32 channel = VoEChannelId(id);
420 assert(channel == _channelId);
421
422 if (_rtcpObserver)
423 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000424 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000425
426 if (_rtcpObserverPtr)
427 {
428 _rtcpObserverPtr->OnApplicationDataReceived(channel,
429 subType,
430 name,
431 data,
432 length);
433 }
434 }
435}
436
437WebRtc_Word32
438Channel::OnInitializeDecoder(
439 const WebRtc_Word32 id,
440 const WebRtc_Word8 payloadType,
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +0000441 const char payloadName[RTP_PAYLOAD_NAME_SIZE],
xians@google.com0b0665a2011-08-08 08:18:44 +0000442 const int frequency,
niklase@google.com470e71d2011-07-07 08:21:25 +0000443 const WebRtc_UWord8 channels,
444 const WebRtc_UWord32 rate)
445{
446 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
447 "Channel::OnInitializeDecoder(id=%d, payloadType=%d, "
448 "payloadName=%s, frequency=%u, channels=%u, rate=%u)",
449 id, payloadType, payloadName, frequency, channels, rate);
450
andrew@webrtc.orgceb148c2011-08-23 17:53:54 +0000451 assert(VoEChannelId(id) == _channelId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000452
henrika@webrtc.orgf75901f2012-01-16 08:45:42 +0000453 CodecInst receiveCodec = {0};
454 CodecInst dummyCodec = {0};
niklase@google.com470e71d2011-07-07 08:21:25 +0000455
456 receiveCodec.pltype = payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +0000457 receiveCodec.plfreq = frequency;
458 receiveCodec.channels = channels;
459 receiveCodec.rate = rate;
henrika@webrtc.orgf75901f2012-01-16 08:45:42 +0000460 strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +0000461
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +0000462 _audioCodingModule.Codec(payloadName, &dummyCodec, frequency, channels);
niklase@google.com470e71d2011-07-07 08:21:25 +0000463 receiveCodec.pacsize = dummyCodec.pacsize;
464
465 // Register the new codec to the ACM
466 if (_audioCodingModule.RegisterReceiveCodec(receiveCodec) == -1)
467 {
468 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
andrew@webrtc.orgceb148c2011-08-23 17:53:54 +0000469 VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +0000470 "Channel::OnInitializeDecoder() invalid codec ("
471 "pt=%d, name=%s) received - 1", payloadType, payloadName);
472 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR);
473 return -1;
474 }
475
476 return 0;
477}
478
479void
480Channel::OnPacketTimeout(const WebRtc_Word32 id)
481{
482 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
483 "Channel::OnPacketTimeout(id=%d)", id);
484
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000485 CriticalSectionScoped cs(_callbackCritSectPtr);
niklase@google.com470e71d2011-07-07 08:21:25 +0000486 if (_voiceEngineObserverPtr)
487 {
488 if (_receiving || _externalTransport)
489 {
490 WebRtc_Word32 channel = VoEChannelId(id);
491 assert(channel == _channelId);
492 // Ensure that next OnReceivedPacket() callback will trigger
493 // a VE_PACKET_RECEIPT_RESTARTED callback.
494 _rtpPacketTimedOut = true;
495 // Deliver callback to the observer
496 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
497 VoEId(_instanceId,_channelId),
498 "Channel::OnPacketTimeout() => "
499 "CallbackOnError(VE_RECEIVE_PACKET_TIMEOUT)");
500 _voiceEngineObserverPtr->CallbackOnError(channel,
501 VE_RECEIVE_PACKET_TIMEOUT);
502 }
503 }
504}
505
506void
507Channel::OnReceivedPacket(const WebRtc_Word32 id,
508 const RtpRtcpPacketType packetType)
509{
510 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
511 "Channel::OnReceivedPacket(id=%d, packetType=%d)",
512 id, packetType);
513
andrew@webrtc.orgceb148c2011-08-23 17:53:54 +0000514 assert(VoEChannelId(id) == _channelId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000515
516 // Notify only for the case when we have restarted an RTP session.
517 if (_rtpPacketTimedOut && (kPacketRtp == packetType))
518 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000519 CriticalSectionScoped cs(_callbackCritSectPtr);
niklase@google.com470e71d2011-07-07 08:21:25 +0000520 if (_voiceEngineObserverPtr)
521 {
522 WebRtc_Word32 channel = VoEChannelId(id);
523 assert(channel == _channelId);
524 // Reset timeout mechanism
525 _rtpPacketTimedOut = false;
526 // Deliver callback to the observer
527 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
528 VoEId(_instanceId,_channelId),
529 "Channel::OnPacketTimeout() =>"
530 " CallbackOnError(VE_PACKET_RECEIPT_RESTARTED)");
531 _voiceEngineObserverPtr->CallbackOnError(
532 channel,
533 VE_PACKET_RECEIPT_RESTARTED);
534 }
535 }
536}
537
538void
539Channel::OnPeriodicDeadOrAlive(const WebRtc_Word32 id,
540 const RTPAliveType alive)
541{
542 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
543 "Channel::OnPeriodicDeadOrAlive(id=%d, alive=%d)", id, alive);
544
545 if (!_connectionObserver)
546 return;
547
548 WebRtc_Word32 channel = VoEChannelId(id);
549 assert(channel == _channelId);
550
551 // Use Alive as default to limit risk of false Dead detections
552 bool isAlive(true);
553
554 // Always mark the connection as Dead when the module reports kRtpDead
555 if (kRtpDead == alive)
556 {
557 isAlive = false;
558 }
559
560 // It is possible that the connection is alive even if no RTP packet has
561 // been received for a long time since the other side might use VAD/DTX
562 // and a low SID-packet update rate.
563 if ((kRtpNoRtp == alive) && _playing)
564 {
565 // Detect Alive for all NetEQ states except for the case when we are
566 // in PLC_CNG state.
567 // PLC_CNG <=> background noise only due to long expand or error.
568 // Note that, the case where the other side stops sending during CNG
569 // state will be detected as Alive. Dead is is not set until after
570 // missing RTCP packets for at least twelve seconds (handled
571 // internally by the RTP/RTCP module).
572 isAlive = (_outputSpeechType != AudioFrame::kPLCCNG);
573 }
574
575 UpdateDeadOrAliveCounters(isAlive);
576
577 // Send callback to the registered observer
578 if (_connectionObserver)
579 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000580 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000581 if (_connectionObserverPtr)
582 {
583 _connectionObserverPtr->OnPeriodicDeadOrAlive(channel, isAlive);
584 }
585 }
586}
587
588WebRtc_Word32
589Channel::OnReceivedPayloadData(const WebRtc_UWord8* payloadData,
590 const WebRtc_UWord16 payloadSize,
591 const WebRtcRTPHeader* rtpHeader)
592{
593 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
594 "Channel::OnReceivedPayloadData(payloadSize=%d,"
595 " payloadType=%u, audioChannel=%u)",
596 payloadSize,
597 rtpHeader->header.payloadType,
598 rtpHeader->type.Audio.channel);
599
roosa@google.com0870f022012-12-12 21:31:41 +0000600 _lastRemoteTimeStamp = rtpHeader->header.timestamp;
601
niklase@google.com470e71d2011-07-07 08:21:25 +0000602 if (!_playing)
603 {
604 // Avoid inserting into NetEQ when we are not playing. Count the
605 // packet as discarded.
606 WEBRTC_TRACE(kTraceStream, kTraceVoice,
607 VoEId(_instanceId, _channelId),
608 "received packet is discarded since playing is not"
609 " activated");
610 _numberOfDiscardedPackets++;
611 return 0;
612 }
613
614 // Push the incoming payload (parsed and ready for decoding) into the ACM
tina.legrand@webrtc.org16b6b902012-04-12 11:02:38 +0000615 if (_audioCodingModule.IncomingPacket(payloadData,
niklase@google.com470e71d2011-07-07 08:21:25 +0000616 payloadSize,
617 *rtpHeader) != 0)
618 {
619 _engineStatisticsPtr->SetLastError(
620 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
621 "Channel::OnReceivedPayloadData() unable to push data to the ACM");
622 return -1;
623 }
624
625 // Update the packet delay
626 UpdatePacketDelay(rtpHeader->header.timestamp,
627 rtpHeader->header.sequenceNumber);
628
629 return 0;
630}
631
632WebRtc_Word32 Channel::GetAudioFrame(const WebRtc_Word32 id,
633 AudioFrame& audioFrame)
634{
635 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
636 "Channel::GetAudioFrame(id=%d)", id);
637
638 // Get 10ms raw PCM data from the ACM (mixer limits output frequency)
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000639 if (_audioCodingModule.PlayoutData10Ms(audioFrame.sample_rate_hz_,
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +0000640 &audioFrame) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000641 {
642 WEBRTC_TRACE(kTraceError, kTraceVoice,
643 VoEId(_instanceId,_channelId),
644 "Channel::GetAudioFrame() PlayoutData10Ms() failed!");
andrew@webrtc.org7859e102012-01-13 00:30:11 +0000645 // In all likelihood, the audio in this frame is garbage. We return an
646 // error so that the audio mixer module doesn't add it to the mix. As
647 // a result, it won't be played out and the actions skipped here are
648 // irrelevant.
649 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +0000650 }
651
652 if (_RxVadDetection)
653 {
654 UpdateRxVadDetection(audioFrame);
655 }
656
657 // Convert module ID to internal VoE channel ID
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000658 audioFrame.id_ = VoEChannelId(audioFrame.id_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000659 // Store speech type for dead-or-alive detection
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000660 _outputSpeechType = audioFrame.speech_type_;
niklase@google.com470e71d2011-07-07 08:21:25 +0000661
662 // Perform far-end AudioProcessing module processing on the received signal
663 if (_rxApmIsEnabled)
664 {
665 ApmProcessRx(audioFrame);
666 }
667
668 // Output volume scaling
669 if (_outputGain < 0.99f || _outputGain > 1.01f)
670 {
671 AudioFrameOperations::ScaleWithSat(_outputGain, audioFrame);
672 }
673
674 // Scale left and/or right channel(s) if stereo and master balance is
675 // active
676
677 if (_panLeft != 1.0f || _panRight != 1.0f)
678 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000679 if (audioFrame.num_channels_ == 1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000680 {
681 // Emulate stereo mode since panning is active.
682 // The mono signal is copied to both left and right channels here.
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +0000683 AudioFrameOperations::MonoToStereo(&audioFrame);
niklase@google.com470e71d2011-07-07 08:21:25 +0000684 }
685 // For true stereo mode (when we are receiving a stereo signal), no
686 // action is needed.
687
688 // Do the panning operation (the audio frame contains stereo at this
689 // stage)
690 AudioFrameOperations::Scale(_panLeft, _panRight, audioFrame);
691 }
692
693 // Mix decoded PCM output with file if file mixing is enabled
694 if (_outputFilePlaying)
695 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000696 MixAudioWithFile(audioFrame, audioFrame.sample_rate_hz_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000697 }
698
699 // Place channel in on-hold state (~muted) if on-hold is activated
700 if (_outputIsOnHold)
701 {
702 AudioFrameOperations::Mute(audioFrame);
703 }
704
705 // External media
706 if (_outputExternalMedia)
707 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000708 CriticalSectionScoped cs(&_callbackCritSect);
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000709 const bool isStereo = (audioFrame.num_channels_ == 2);
niklase@google.com470e71d2011-07-07 08:21:25 +0000710 if (_outputExternalMediaCallbackPtr)
711 {
712 _outputExternalMediaCallbackPtr->Process(
713 _channelId,
714 kPlaybackPerChannel,
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000715 (WebRtc_Word16*)audioFrame.data_,
716 audioFrame.samples_per_channel_,
717 audioFrame.sample_rate_hz_,
niklase@google.com470e71d2011-07-07 08:21:25 +0000718 isStereo);
719 }
720 }
721
722 // Record playout if enabled
723 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000724 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000725
726 if (_outputFileRecording && _outputFileRecorderPtr)
727 {
niklas.enbom@webrtc.org5398d952012-03-26 08:11:25 +0000728 _outputFileRecorderPtr->RecordAudioToFile(audioFrame);
niklase@google.com470e71d2011-07-07 08:21:25 +0000729 }
730 }
731
732 // Measure audio level (0-9)
733 _outputAudioLevel.ComputeLevel(audioFrame);
734
735 return 0;
736}
737
738WebRtc_Word32
739Channel::NeededFrequency(const WebRtc_Word32 id)
740{
741 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
742 "Channel::NeededFrequency(id=%d)", id);
743
744 int highestNeeded = 0;
745
746 // Determine highest needed receive frequency
747 WebRtc_Word32 receiveFrequency = _audioCodingModule.ReceiveFrequency();
748
749 // Return the bigger of playout and receive frequency in the ACM.
750 if (_audioCodingModule.PlayoutFrequency() > receiveFrequency)
751 {
752 highestNeeded = _audioCodingModule.PlayoutFrequency();
753 }
754 else
755 {
756 highestNeeded = receiveFrequency;
757 }
758
759 // Special case, if we're playing a file on the playout side
760 // we take that frequency into consideration as well
761 // This is not needed on sending side, since the codec will
762 // limit the spectrum anyway.
763 if (_outputFilePlaying)
764 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000765 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000766 if (_outputFilePlayerPtr && _outputFilePlaying)
767 {
768 if(_outputFilePlayerPtr->Frequency()>highestNeeded)
769 {
770 highestNeeded=_outputFilePlayerPtr->Frequency();
771 }
772 }
773 }
774
775 return(highestNeeded);
776}
777
niklase@google.com470e71d2011-07-07 08:21:25 +0000778WebRtc_Word32
779Channel::CreateChannel(Channel*& channel,
780 const WebRtc_Word32 channelId,
781 const WebRtc_UWord32 instanceId)
782{
783 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId,channelId),
784 "Channel::CreateChannel(channelId=%d, instanceId=%d)",
785 channelId, instanceId);
786
787 channel = new Channel(channelId, instanceId);
788 if (channel == NULL)
789 {
790 WEBRTC_TRACE(kTraceMemory, kTraceVoice,
791 VoEId(instanceId,channelId),
792 "Channel::CreateChannel() unable to allocate memory for"
793 " channel");
794 return -1;
795 }
796 return 0;
797}
798
799void
800Channel::PlayNotification(const WebRtc_Word32 id,
801 const WebRtc_UWord32 durationMs)
802{
803 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
804 "Channel::PlayNotification(id=%d, durationMs=%d)",
805 id, durationMs);
806
807 // Not implement yet
808}
809
810void
811Channel::RecordNotification(const WebRtc_Word32 id,
812 const WebRtc_UWord32 durationMs)
813{
814 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
815 "Channel::RecordNotification(id=%d, durationMs=%d)",
816 id, durationMs);
817
818 // Not implement yet
819}
820
821void
822Channel::PlayFileEnded(const WebRtc_Word32 id)
823{
824 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
825 "Channel::PlayFileEnded(id=%d)", id);
826
827 if (id == _inputFilePlayerId)
828 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000829 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000830
831 _inputFilePlaying = false;
832 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
833 VoEId(_instanceId,_channelId),
834 "Channel::PlayFileEnded() => input file player module is"
835 " shutdown");
836 }
837 else if (id == _outputFilePlayerId)
838 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000839 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000840
841 _outputFilePlaying = false;
842 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
843 VoEId(_instanceId,_channelId),
844 "Channel::PlayFileEnded() => output file player module is"
845 " shutdown");
846 }
847}
848
849void
850Channel::RecordFileEnded(const WebRtc_Word32 id)
851{
852 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
853 "Channel::RecordFileEnded(id=%d)", id);
854
855 assert(id == _outputFileRecorderId);
856
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000857 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000858
859 _outputFileRecording = false;
860 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
861 VoEId(_instanceId,_channelId),
862 "Channel::RecordFileEnded() => output file recorder module is"
863 " shutdown");
864}
865
866Channel::Channel(const WebRtc_Word32 channelId,
867 const WebRtc_UWord32 instanceId) :
868 _fileCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
869 _callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
niklase@google.com470e71d2011-07-07 08:21:25 +0000870 _instanceId(instanceId),
xians@google.com22963ab2011-08-03 12:40:23 +0000871 _channelId(channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +0000872 _audioCodingModule(*AudioCodingModule::Create(
xians@google.com22963ab2011-08-03 12:40:23 +0000873 VoEModuleId(instanceId, channelId))),
niklase@google.com470e71d2011-07-07 08:21:25 +0000874#ifdef WEBRTC_SRTP
875 _srtpModule(*SrtpModule::CreateSrtpModule(VoEModuleId(instanceId,
876 channelId))),
877#endif
878 _rtpDumpIn(*RtpDump::CreateRtpDump()),
879 _rtpDumpOut(*RtpDump::CreateRtpDump()),
niklase@google.com470e71d2011-07-07 08:21:25 +0000880 _outputAudioLevel(),
niklase@google.com470e71d2011-07-07 08:21:25 +0000881 _externalTransport(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000882 _inputFilePlayerPtr(NULL),
883 _outputFilePlayerPtr(NULL),
884 _outputFileRecorderPtr(NULL),
885 // Avoid conflict with other channels by adding 1024 - 1026,
886 // won't use as much as 1024 channels.
887 _inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
888 _outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
889 _outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
890 _inputFilePlaying(false),
891 _outputFilePlaying(false),
892 _outputFileRecording(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000893 _inbandDtmfQueue(VoEModuleId(instanceId, channelId)),
894 _inbandDtmfGenerator(VoEModuleId(instanceId, channelId)),
niklase@google.com470e71d2011-07-07 08:21:25 +0000895 _inputExternalMedia(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000896 _outputExternalMedia(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000897 _inputExternalMediaCallbackPtr(NULL),
898 _outputExternalMediaCallbackPtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000899 _encryptionRTPBufferPtr(NULL),
900 _decryptionRTPBufferPtr(NULL),
901 _encryptionRTCPBufferPtr(NULL),
902 _decryptionRTCPBufferPtr(NULL),
903 _timeStamp(0), // This is just an offset, RTP module will add it's own random offset
904 _sendTelephoneEventPayloadType(106),
905 _playoutTimeStampRTP(0),
906 _playoutTimeStampRTCP(0),
907 _numberOfDiscardedPackets(0),
908 _engineStatisticsPtr(NULL),
henrika@webrtc.org2919e952012-01-31 08:45:03 +0000909 _outputMixerPtr(NULL),
910 _transmitMixerPtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000911 _moduleProcessThreadPtr(NULL),
912 _audioDeviceModulePtr(NULL),
913 _voiceEngineObserverPtr(NULL),
914 _callbackCritSectPtr(NULL),
915 _transportPtr(NULL),
916 _encryptionPtr(NULL),
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000917 _rtpAudioProc(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000918 _rxAudioProcessingModulePtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000919 _rxVadObserverPtr(NULL),
920 _oldVadDecision(-1),
921 _sendFrameType(0),
niklase@google.com470e71d2011-07-07 08:21:25 +0000922 _rtpObserverPtr(NULL),
923 _rtcpObserverPtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000924 _outputIsOnHold(false),
925 _externalPlayout(false),
roosa@google.com1b60ceb2012-12-12 23:00:29 +0000926 _externalMixing(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000927 _inputIsOnHold(false),
928 _playing(false),
929 _sending(false),
930 _receiving(false),
931 _mixFileWithMicrophone(false),
932 _rtpObserver(false),
933 _rtcpObserver(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000934 _mute(false),
935 _panLeft(1.0f),
936 _panRight(1.0f),
937 _outputGain(1.0f),
xians@google.com22963ab2011-08-03 12:40:23 +0000938 _encrypting(false),
939 _decrypting(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000940 _playOutbandDtmfEvent(false),
941 _playInbandDtmfEvent(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000942 _extraPayloadType(0),
943 _insertExtraRTPPacket(false),
944 _extraMarkerBit(false),
945 _lastLocalTimeStamp(0),
roosa@google.com0870f022012-12-12 21:31:41 +0000946 _lastRemoteTimeStamp(0),
niklase@google.com470e71d2011-07-07 08:21:25 +0000947 _lastPayloadType(0),
xians@google.com22963ab2011-08-03 12:40:23 +0000948 _includeAudioLevelIndication(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000949 _rtpPacketTimedOut(false),
950 _rtpPacketTimeOutIsEnabled(false),
951 _rtpTimeOutSeconds(0),
952 _connectionObserver(false),
953 _connectionObserverPtr(NULL),
954 _countAliveDetections(0),
955 _countDeadDetections(0),
956 _outputSpeechType(AudioFrame::kNormalSpeech),
957 _averageDelayMs(0),
958 _previousSequenceNumber(0),
959 _previousTimestamp(0),
960 _recPacketDelayMs(20),
961 _RxVadDetection(false),
962 _rxApmIsEnabled(false),
963 _rxAgcIsEnabled(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000964 _rxNsIsEnabled(false)
niklase@google.com470e71d2011-07-07 08:21:25 +0000965{
966 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
967 "Channel::Channel() - ctor");
968 _inbandDtmfQueue.ResetDtmf();
969 _inbandDtmfGenerator.Init();
970 _outputAudioLevel.Clear();
971
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +0000972 RtpRtcp::Configuration configuration;
973 configuration.id = VoEModuleId(instanceId, channelId);
974 configuration.audio = true;
975 configuration.incoming_data = this;
976 configuration.incoming_messages = this;
977 configuration.outgoing_transport = this;
978 configuration.rtcp_feedback = this;
979 configuration.audio_messages = this;
980
981 _rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
982
niklase@google.com470e71d2011-07-07 08:21:25 +0000983 // Create far end AudioProcessing Module
984 _rxAudioProcessingModulePtr = AudioProcessing::Create(
985 VoEModuleId(instanceId, channelId));
986}
987
988Channel::~Channel()
989{
990 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
991 "Channel::~Channel() - dtor");
992
993 if (_outputExternalMedia)
994 {
995 DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
996 }
997 if (_inputExternalMedia)
998 {
999 DeRegisterExternalMediaProcessing(kRecordingPerChannel);
1000 }
1001 StopSend();
niklase@google.com470e71d2011-07-07 08:21:25 +00001002 StopPlayout();
1003
1004 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001005 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001006 if (_inputFilePlayerPtr)
1007 {
1008 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1009 _inputFilePlayerPtr->StopPlayingFile();
1010 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1011 _inputFilePlayerPtr = NULL;
1012 }
1013 if (_outputFilePlayerPtr)
1014 {
1015 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1016 _outputFilePlayerPtr->StopPlayingFile();
1017 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1018 _outputFilePlayerPtr = NULL;
1019 }
1020 if (_outputFileRecorderPtr)
1021 {
1022 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
1023 _outputFileRecorderPtr->StopRecording();
1024 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
1025 _outputFileRecorderPtr = NULL;
1026 }
1027 }
1028
1029 // The order to safely shutdown modules in a channel is:
1030 // 1. De-register callbacks in modules
1031 // 2. De-register modules in process thread
1032 // 3. Destroy modules
niklase@google.com470e71d2011-07-07 08:21:25 +00001033 if (_audioCodingModule.RegisterTransportCallback(NULL) == -1)
1034 {
1035 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1036 VoEId(_instanceId,_channelId),
1037 "~Channel() failed to de-register transport callback"
1038 " (Audio coding module)");
1039 }
1040 if (_audioCodingModule.RegisterVADCallback(NULL) == -1)
1041 {
1042 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1043 VoEId(_instanceId,_channelId),
1044 "~Channel() failed to de-register VAD callback"
1045 " (Audio coding module)");
1046 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001047 // De-register modules in process thread
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001048 if (_moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get()) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001049 {
1050 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
1051 VoEId(_instanceId,_channelId),
1052 "~Channel() failed to deregister RTP/RTCP module");
1053 }
1054
1055 // Destroy modules
niklase@google.com470e71d2011-07-07 08:21:25 +00001056 AudioCodingModule::Destroy(&_audioCodingModule);
1057#ifdef WEBRTC_SRTP
1058 SrtpModule::DestroySrtpModule(&_srtpModule);
1059#endif
1060 if (_rxAudioProcessingModulePtr != NULL)
1061 {
1062 AudioProcessing::Destroy(_rxAudioProcessingModulePtr); // far end APM
1063 _rxAudioProcessingModulePtr = NULL;
1064 }
1065
1066 // End of modules shutdown
1067
1068 // Delete other objects
1069 RtpDump::DestroyRtpDump(&_rtpDumpIn);
1070 RtpDump::DestroyRtpDump(&_rtpDumpOut);
1071 delete [] _encryptionRTPBufferPtr;
1072 delete [] _decryptionRTPBufferPtr;
1073 delete [] _encryptionRTCPBufferPtr;
1074 delete [] _decryptionRTCPBufferPtr;
1075 delete &_callbackCritSect;
niklase@google.com470e71d2011-07-07 08:21:25 +00001076 delete &_fileCritSect;
1077}
1078
1079WebRtc_Word32
1080Channel::Init()
1081{
1082 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1083 "Channel::Init()");
1084
1085 // --- Initial sanity
1086
1087 if ((_engineStatisticsPtr == NULL) ||
1088 (_moduleProcessThreadPtr == NULL))
1089 {
1090 WEBRTC_TRACE(kTraceError, kTraceVoice,
1091 VoEId(_instanceId,_channelId),
1092 "Channel::Init() must call SetEngineInformation() first");
1093 return -1;
1094 }
1095
1096 // --- Add modules to process thread (for periodic schedulation)
1097
1098 const bool processThreadFail =
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001099 ((_moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get()) != 0) ||
niklase@google.com470e71d2011-07-07 08:21:25 +00001100 false);
niklase@google.com470e71d2011-07-07 08:21:25 +00001101 if (processThreadFail)
1102 {
1103 _engineStatisticsPtr->SetLastError(
1104 VE_CANNOT_INIT_CHANNEL, kTraceError,
1105 "Channel::Init() modules not registered");
1106 return -1;
1107 }
pwestin@webrtc.orgc450a192012-01-04 15:00:12 +00001108 // --- ACM initialization
niklase@google.com470e71d2011-07-07 08:21:25 +00001109
1110 if ((_audioCodingModule.InitializeReceiver() == -1) ||
1111#ifdef WEBRTC_CODEC_AVT
1112 // out-of-band Dtmf tones are played out by default
1113 (_audioCodingModule.SetDtmfPlayoutStatus(true) == -1) ||
1114#endif
niklase@google.com470e71d2011-07-07 08:21:25 +00001115 (_audioCodingModule.InitializeSender() == -1))
1116 {
1117 _engineStatisticsPtr->SetLastError(
1118 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1119 "Channel::Init() unable to initialize the ACM - 1");
1120 return -1;
1121 }
1122
1123 // --- RTP/RTCP module initialization
1124
1125 // Ensure that RTCP is enabled by default for the created channel.
1126 // Note that, the module will keep generating RTCP until it is explicitly
1127 // disabled by the user.
1128 // After StopListen (when no sockets exists), RTCP packets will no longer
1129 // be transmitted since the Transport object will then be invalid.
1130
1131 const bool rtpRtcpFail =
turaj@webrtc.orgb7edd062013-03-12 22:27:27 +00001132 ((_rtpRtcpModule->SetTelephoneEventForwardToDecoder(true) == -1) ||
niklase@google.com470e71d2011-07-07 08:21:25 +00001133 // RTCP is enabled by default
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001134 (_rtpRtcpModule->SetRTCPStatus(kRtcpCompound) == -1));
niklase@google.com470e71d2011-07-07 08:21:25 +00001135 if (rtpRtcpFail)
1136 {
1137 _engineStatisticsPtr->SetLastError(
1138 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1139 "Channel::Init() RTP/RTCP module not initialized");
1140 return -1;
1141 }
1142
1143 // --- Register all permanent callbacks
niklase@google.com470e71d2011-07-07 08:21:25 +00001144 const bool fail =
niklase@google.com470e71d2011-07-07 08:21:25 +00001145 (_audioCodingModule.RegisterTransportCallback(this) == -1) ||
1146 (_audioCodingModule.RegisterVADCallback(this) == -1);
1147
1148 if (fail)
1149 {
1150 _engineStatisticsPtr->SetLastError(
1151 VE_CANNOT_INIT_CHANNEL, kTraceError,
1152 "Channel::Init() callbacks not registered");
1153 return -1;
1154 }
1155
1156 // --- Register all supported codecs to the receiving side of the
1157 // RTP/RTCP module
1158
1159 CodecInst codec;
1160 const WebRtc_UWord8 nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
1161
1162 for (int idx = 0; idx < nSupportedCodecs; idx++)
1163 {
1164 // Open up the RTP/RTCP receiver for all supported codecs
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001165 if ((_audioCodingModule.Codec(idx, &codec) == -1) ||
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001166 (_rtpRtcpModule->RegisterReceivePayload(codec) == -1))
niklase@google.com470e71d2011-07-07 08:21:25 +00001167 {
1168 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1169 VoEId(_instanceId,_channelId),
1170 "Channel::Init() unable to register %s (%d/%d/%d/%d) "
1171 "to RTP/RTCP receiver",
1172 codec.plname, codec.pltype, codec.plfreq,
1173 codec.channels, codec.rate);
1174 }
1175 else
1176 {
1177 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
1178 VoEId(_instanceId,_channelId),
1179 "Channel::Init() %s (%d/%d/%d/%d) has been added to "
1180 "the RTP/RTCP receiver",
1181 codec.plname, codec.pltype, codec.plfreq,
1182 codec.channels, codec.rate);
1183 }
1184
1185 // Ensure that PCMU is used as default codec on the sending side
tina.legrand@webrtc.org45175852012-06-01 09:27:35 +00001186 if (!STR_CASE_CMP(codec.plname, "PCMU") && (codec.channels == 1))
niklase@google.com470e71d2011-07-07 08:21:25 +00001187 {
1188 SetSendCodec(codec);
1189 }
1190
1191 // Register default PT for outband 'telephone-event'
1192 if (!STR_CASE_CMP(codec.plname, "telephone-event"))
1193 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001194 if ((_rtpRtcpModule->RegisterSendPayload(codec) == -1) ||
niklase@google.com470e71d2011-07-07 08:21:25 +00001195 (_audioCodingModule.RegisterReceiveCodec(codec) == -1))
1196 {
1197 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1198 VoEId(_instanceId,_channelId),
1199 "Channel::Init() failed to register outband "
1200 "'telephone-event' (%d/%d) correctly",
1201 codec.pltype, codec.plfreq);
1202 }
1203 }
1204
1205 if (!STR_CASE_CMP(codec.plname, "CN"))
1206 {
1207 if ((_audioCodingModule.RegisterSendCodec(codec) == -1) ||
1208 (_audioCodingModule.RegisterReceiveCodec(codec) == -1) ||
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001209 (_rtpRtcpModule->RegisterSendPayload(codec) == -1))
niklase@google.com470e71d2011-07-07 08:21:25 +00001210 {
1211 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1212 VoEId(_instanceId,_channelId),
1213 "Channel::Init() failed to register CN (%d/%d) "
1214 "correctly - 1",
1215 codec.pltype, codec.plfreq);
1216 }
1217 }
1218#ifdef WEBRTC_CODEC_RED
1219 // Register RED to the receiving side of the ACM.
1220 // We will not receive an OnInitializeDecoder() callback for RED.
1221 if (!STR_CASE_CMP(codec.plname, "RED"))
1222 {
1223 if (_audioCodingModule.RegisterReceiveCodec(codec) == -1)
1224 {
1225 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1226 VoEId(_instanceId,_channelId),
1227 "Channel::Init() failed to register RED (%d/%d) "
1228 "correctly",
1229 codec.pltype, codec.plfreq);
1230 }
1231 }
1232#endif
1233 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001234 // Initialize the far end AP module
1235 // Using 8 kHz as initial Fs, the same as in transmission. Might be
1236 // changed at the first receiving audio.
1237 if (_rxAudioProcessingModulePtr == NULL)
1238 {
1239 _engineStatisticsPtr->SetLastError(
1240 VE_NO_MEMORY, kTraceCritical,
1241 "Channel::Init() failed to create the far-end AudioProcessing"
1242 " module");
1243 return -1;
1244 }
1245
niklase@google.com470e71d2011-07-07 08:21:25 +00001246 if (_rxAudioProcessingModulePtr->set_sample_rate_hz(8000))
1247 {
1248 _engineStatisticsPtr->SetLastError(
1249 VE_APM_ERROR, kTraceWarning,
1250 "Channel::Init() failed to set the sample rate to 8K for"
1251 " far-end AP module");
1252 }
1253
1254 if (_rxAudioProcessingModulePtr->set_num_channels(1, 1) != 0)
1255 {
1256 _engineStatisticsPtr->SetLastError(
1257 VE_SOUNDCARD_ERROR, kTraceWarning,
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00001258 "Init() failed to set channels for the primary audio stream");
niklase@google.com470e71d2011-07-07 08:21:25 +00001259 }
1260
1261 if (_rxAudioProcessingModulePtr->high_pass_filter()->Enable(
1262 WEBRTC_VOICE_ENGINE_RX_HP_DEFAULT_STATE) != 0)
1263 {
1264 _engineStatisticsPtr->SetLastError(
1265 VE_APM_ERROR, kTraceWarning,
1266 "Channel::Init() failed to set the high-pass filter for"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00001267 " far-end AP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00001268 }
1269
1270 if (_rxAudioProcessingModulePtr->noise_suppression()->set_level(
1271 (NoiseSuppression::Level)WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE) != 0)
1272 {
1273 _engineStatisticsPtr->SetLastError(
1274 VE_APM_ERROR, kTraceWarning,
1275 "Init() failed to set noise reduction level for far-end"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00001276 " AP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00001277 }
1278 if (_rxAudioProcessingModulePtr->noise_suppression()->Enable(
1279 WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_STATE) != 0)
1280 {
1281 _engineStatisticsPtr->SetLastError(
1282 VE_APM_ERROR, kTraceWarning,
1283 "Init() failed to set noise reduction state for far-end"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00001284 " AP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00001285 }
1286
1287 if (_rxAudioProcessingModulePtr->gain_control()->set_mode(
1288 (GainControl::Mode)WEBRTC_VOICE_ENGINE_RX_AGC_DEFAULT_MODE) != 0)
1289 {
1290 _engineStatisticsPtr->SetLastError(
1291 VE_APM_ERROR, kTraceWarning,
1292 "Init() failed to set AGC mode for far-end AP module");
1293 }
1294 if (_rxAudioProcessingModulePtr->gain_control()->Enable(
1295 WEBRTC_VOICE_ENGINE_RX_AGC_DEFAULT_STATE) != 0)
1296 {
1297 _engineStatisticsPtr->SetLastError(
1298 VE_APM_ERROR, kTraceWarning,
1299 "Init() failed to set AGC state for far-end AP module");
1300 }
1301
1302 return 0;
1303}
1304
1305WebRtc_Word32
1306Channel::SetEngineInformation(Statistics& engineStatistics,
1307 OutputMixer& outputMixer,
1308 voe::TransmitMixer& transmitMixer,
1309 ProcessThread& moduleProcessThread,
1310 AudioDeviceModule& audioDeviceModule,
1311 VoiceEngineObserver* voiceEngineObserver,
1312 CriticalSectionWrapper* callbackCritSect)
1313{
1314 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1315 "Channel::SetEngineInformation()");
1316 _engineStatisticsPtr = &engineStatistics;
1317 _outputMixerPtr = &outputMixer;
1318 _transmitMixerPtr = &transmitMixer,
1319 _moduleProcessThreadPtr = &moduleProcessThread;
1320 _audioDeviceModulePtr = &audioDeviceModule;
1321 _voiceEngineObserverPtr = voiceEngineObserver;
1322 _callbackCritSectPtr = callbackCritSect;
1323 return 0;
1324}
1325
1326WebRtc_Word32
1327Channel::UpdateLocalTimeStamp()
1328{
1329
andrew@webrtc.org63a50982012-05-02 23:56:37 +00001330 _timeStamp += _audioFrame.samples_per_channel_;
niklase@google.com470e71d2011-07-07 08:21:25 +00001331 return 0;
1332}
1333
1334WebRtc_Word32
1335Channel::StartPlayout()
1336{
1337 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1338 "Channel::StartPlayout()");
1339 if (_playing)
1340 {
1341 return 0;
1342 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00001343
1344 if (!_externalMixing) {
1345 // Add participant as candidates for mixing.
1346 if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0)
1347 {
1348 _engineStatisticsPtr->SetLastError(
1349 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1350 "StartPlayout() failed to add participant to mixer");
1351 return -1;
1352 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001353 }
1354
1355 _playing = true;
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001356
1357 if (RegisterFilePlayingToMixer() != 0)
1358 return -1;
1359
niklase@google.com470e71d2011-07-07 08:21:25 +00001360 return 0;
1361}
1362
1363WebRtc_Word32
1364Channel::StopPlayout()
1365{
1366 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1367 "Channel::StopPlayout()");
1368 if (!_playing)
1369 {
1370 return 0;
1371 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00001372
1373 if (!_externalMixing) {
1374 // Remove participant as candidates for mixing
1375 if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0)
1376 {
1377 _engineStatisticsPtr->SetLastError(
1378 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1379 "StopPlayout() failed to remove participant from mixer");
1380 return -1;
1381 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001382 }
1383
1384 _playing = false;
1385 _outputAudioLevel.Clear();
1386
1387 return 0;
1388}
1389
1390WebRtc_Word32
1391Channel::StartSend()
1392{
1393 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1394 "Channel::StartSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001395 {
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001396 // A lock is needed because |_sending| can be accessed or modified by
1397 // another thread at the same time.
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001398 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001399
1400 if (_sending)
1401 {
1402 return 0;
1403 }
1404 _sending = true;
niklase@google.com470e71d2011-07-07 08:21:25 +00001405 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001406
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001407 if (_rtpRtcpModule->SetSendingStatus(true) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001408 {
1409 _engineStatisticsPtr->SetLastError(
1410 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1411 "StartSend() RTP/RTCP failed to start sending");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001412 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001413 _sending = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00001414 return -1;
1415 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001416
niklase@google.com470e71d2011-07-07 08:21:25 +00001417 return 0;
1418}
1419
1420WebRtc_Word32
1421Channel::StopSend()
1422{
1423 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1424 "Channel::StopSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001425 {
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001426 // A lock is needed because |_sending| can be accessed or modified by
1427 // another thread at the same time.
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001428 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001429
1430 if (!_sending)
1431 {
1432 return 0;
1433 }
1434 _sending = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00001435 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001436
niklase@google.com470e71d2011-07-07 08:21:25 +00001437 // Reset sending SSRC and sequence number and triggers direct transmission
1438 // of RTCP BYE
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001439 if (_rtpRtcpModule->SetSendingStatus(false) == -1 ||
1440 _rtpRtcpModule->ResetSendDataCountersRTP() == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001441 {
1442 _engineStatisticsPtr->SetLastError(
1443 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1444 "StartSend() RTP/RTCP failed to stop sending");
1445 }
1446
niklase@google.com470e71d2011-07-07 08:21:25 +00001447 return 0;
1448}
1449
1450WebRtc_Word32
1451Channel::StartReceiving()
1452{
1453 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1454 "Channel::StartReceiving()");
1455 if (_receiving)
1456 {
1457 return 0;
1458 }
1459 // If external transport is used, we will only initialize/set the variables
1460 // after this section, since we are not using the WebRtc transport but
1461 // still need to keep track of e.g. if we are receiving.
niklase@google.com470e71d2011-07-07 08:21:25 +00001462 _receiving = true;
1463 _numberOfDiscardedPackets = 0;
1464 return 0;
1465}
1466
1467WebRtc_Word32
1468Channel::StopReceiving()
1469{
1470 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1471 "Channel::StopReceiving()");
1472 if (!_receiving)
1473 {
1474 return 0;
1475 }
henrika@webrtc.orgaf71f0e2011-12-05 07:02:22 +00001476 // Recover DTMF detection status.
turaj@webrtc.orgb7edd062013-03-12 22:27:27 +00001477 WebRtc_Word32 ret = _rtpRtcpModule->SetTelephoneEventForwardToDecoder(true);
henrika@webrtc.orgaf71f0e2011-12-05 07:02:22 +00001478 if (ret != 0) {
1479 _engineStatisticsPtr->SetLastError(
1480 VE_INVALID_OPERATION, kTraceWarning,
1481 "StopReceiving() failed to restore telephone-event status.");
1482 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001483 RegisterReceiveCodecsToRTPModule();
1484 _receiving = false;
1485 return 0;
1486}
1487
niklase@google.com470e71d2011-07-07 08:21:25 +00001488WebRtc_Word32
1489Channel::SetNetEQPlayoutMode(NetEqModes mode)
1490{
1491 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1492 "Channel::SetNetEQPlayoutMode()");
1493 AudioPlayoutMode playoutMode(voice);
1494 switch (mode)
1495 {
1496 case kNetEqDefault:
1497 playoutMode = voice;
1498 break;
1499 case kNetEqStreaming:
1500 playoutMode = streaming;
1501 break;
1502 case kNetEqFax:
1503 playoutMode = fax;
1504 break;
roosa@google.comb7186192012-12-12 21:59:14 +00001505 case kNetEqOff:
1506 playoutMode = off;
1507 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00001508 }
1509 if (_audioCodingModule.SetPlayoutMode(playoutMode) != 0)
1510 {
1511 _engineStatisticsPtr->SetLastError(
1512 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1513 "SetNetEQPlayoutMode() failed to set playout mode");
1514 return -1;
1515 }
1516 return 0;
1517}
1518
1519WebRtc_Word32
1520Channel::GetNetEQPlayoutMode(NetEqModes& mode)
1521{
1522 const AudioPlayoutMode playoutMode = _audioCodingModule.PlayoutMode();
1523 switch (playoutMode)
1524 {
1525 case voice:
1526 mode = kNetEqDefault;
1527 break;
1528 case streaming:
1529 mode = kNetEqStreaming;
1530 break;
1531 case fax:
1532 mode = kNetEqFax;
1533 break;
roosa@google.comb7186192012-12-12 21:59:14 +00001534 case off:
1535 mode = kNetEqOff;
niklase@google.com470e71d2011-07-07 08:21:25 +00001536 }
1537 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
1538 VoEId(_instanceId,_channelId),
1539 "Channel::GetNetEQPlayoutMode() => mode=%u", mode);
1540 return 0;
1541}
1542
1543WebRtc_Word32
niklase@google.com470e71d2011-07-07 08:21:25 +00001544Channel::SetOnHoldStatus(bool enable, OnHoldModes mode)
1545{
1546 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1547 "Channel::SetOnHoldStatus()");
1548 if (mode == kHoldSendAndPlay)
1549 {
1550 _outputIsOnHold = enable;
1551 _inputIsOnHold = enable;
1552 }
1553 else if (mode == kHoldPlayOnly)
1554 {
1555 _outputIsOnHold = enable;
1556 }
1557 if (mode == kHoldSendOnly)
1558 {
1559 _inputIsOnHold = enable;
1560 }
1561 return 0;
1562}
1563
1564WebRtc_Word32
1565Channel::GetOnHoldStatus(bool& enabled, OnHoldModes& mode)
1566{
1567 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1568 "Channel::GetOnHoldStatus()");
1569 enabled = (_outputIsOnHold || _inputIsOnHold);
1570 if (_outputIsOnHold && _inputIsOnHold)
1571 {
1572 mode = kHoldSendAndPlay;
1573 }
1574 else if (_outputIsOnHold && !_inputIsOnHold)
1575 {
1576 mode = kHoldPlayOnly;
1577 }
1578 else if (!_outputIsOnHold && _inputIsOnHold)
1579 {
1580 mode = kHoldSendOnly;
1581 }
1582 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1583 "Channel::GetOnHoldStatus() => enabled=%d, mode=%d",
1584 enabled, mode);
1585 return 0;
1586}
1587
1588WebRtc_Word32
1589Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer)
1590{
1591 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1592 "Channel::RegisterVoiceEngineObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001593 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001594
1595 if (_voiceEngineObserverPtr)
1596 {
1597 _engineStatisticsPtr->SetLastError(
1598 VE_INVALID_OPERATION, kTraceError,
1599 "RegisterVoiceEngineObserver() observer already enabled");
1600 return -1;
1601 }
1602 _voiceEngineObserverPtr = &observer;
1603 return 0;
1604}
1605
1606WebRtc_Word32
1607Channel::DeRegisterVoiceEngineObserver()
1608{
1609 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1610 "Channel::DeRegisterVoiceEngineObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001611 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001612
1613 if (!_voiceEngineObserverPtr)
1614 {
1615 _engineStatisticsPtr->SetLastError(
1616 VE_INVALID_OPERATION, kTraceWarning,
1617 "DeRegisterVoiceEngineObserver() observer already disabled");
1618 return 0;
1619 }
1620 _voiceEngineObserverPtr = NULL;
1621 return 0;
1622}
1623
1624WebRtc_Word32
niklase@google.com470e71d2011-07-07 08:21:25 +00001625Channel::GetSendCodec(CodecInst& codec)
1626{
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001627 return (_audioCodingModule.SendCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001628}
1629
1630WebRtc_Word32
1631Channel::GetRecCodec(CodecInst& codec)
1632{
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001633 return (_audioCodingModule.ReceiveCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001634}
1635
1636WebRtc_Word32
1637Channel::SetSendCodec(const CodecInst& codec)
1638{
1639 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1640 "Channel::SetSendCodec()");
1641
1642 if (_audioCodingModule.RegisterSendCodec(codec) != 0)
1643 {
1644 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1645 "SetSendCodec() failed to register codec to ACM");
1646 return -1;
1647 }
1648
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001649 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001650 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001651 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1652 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001653 {
1654 WEBRTC_TRACE(
1655 kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1656 "SetSendCodec() failed to register codec to"
1657 " RTP/RTCP module");
1658 return -1;
1659 }
1660 }
1661
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001662 if (_rtpRtcpModule->SetAudioPacketSize(codec.pacsize) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001663 {
1664 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1665 "SetSendCodec() failed to set audio packet size");
1666 return -1;
1667 }
1668
1669 return 0;
1670}
1671
1672WebRtc_Word32
1673Channel::SetVADStatus(bool enableVAD, ACMVADMode mode, bool disableDTX)
1674{
1675 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1676 "Channel::SetVADStatus(mode=%d)", mode);
1677 // To disable VAD, DTX must be disabled too
1678 disableDTX = ((enableVAD == false) ? true : disableDTX);
1679 if (_audioCodingModule.SetVAD(!disableDTX, enableVAD, mode) != 0)
1680 {
1681 _engineStatisticsPtr->SetLastError(
1682 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1683 "SetVADStatus() failed to set VAD");
1684 return -1;
1685 }
1686 return 0;
1687}
1688
1689WebRtc_Word32
1690Channel::GetVADStatus(bool& enabledVAD, ACMVADMode& mode, bool& disabledDTX)
1691{
1692 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1693 "Channel::GetVADStatus");
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001694 if (_audioCodingModule.VAD(&disabledDTX, &enabledVAD, &mode) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001695 {
1696 _engineStatisticsPtr->SetLastError(
1697 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1698 "GetVADStatus() failed to get VAD status");
1699 return -1;
1700 }
1701 disabledDTX = !disabledDTX;
1702 return 0;
1703}
1704
1705WebRtc_Word32
1706Channel::SetRecPayloadType(const CodecInst& codec)
1707{
1708 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1709 "Channel::SetRecPayloadType()");
1710
1711 if (_playing)
1712 {
1713 _engineStatisticsPtr->SetLastError(
1714 VE_ALREADY_PLAYING, kTraceError,
1715 "SetRecPayloadType() unable to set PT while playing");
1716 return -1;
1717 }
1718 if (_receiving)
1719 {
1720 _engineStatisticsPtr->SetLastError(
1721 VE_ALREADY_LISTENING, kTraceError,
1722 "SetRecPayloadType() unable to set PT while listening");
1723 return -1;
1724 }
1725
1726 if (codec.pltype == -1)
1727 {
1728 // De-register the selected codec (RTP/RTCP module and ACM)
1729
1730 WebRtc_Word8 pltype(-1);
1731 CodecInst rxCodec = codec;
1732
1733 // Get payload type for the given codec
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001734 _rtpRtcpModule->ReceivePayloadType(rxCodec, &pltype);
niklase@google.com470e71d2011-07-07 08:21:25 +00001735 rxCodec.pltype = pltype;
1736
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001737 if (_rtpRtcpModule->DeRegisterReceivePayload(pltype) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001738 {
1739 _engineStatisticsPtr->SetLastError(
1740 VE_RTP_RTCP_MODULE_ERROR,
1741 kTraceError,
1742 "SetRecPayloadType() RTP/RTCP-module deregistration "
1743 "failed");
1744 return -1;
1745 }
1746 if (_audioCodingModule.UnregisterReceiveCodec(rxCodec.pltype) != 0)
1747 {
1748 _engineStatisticsPtr->SetLastError(
1749 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1750 "SetRecPayloadType() ACM deregistration failed - 1");
1751 return -1;
1752 }
1753 return 0;
1754 }
1755
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001756 if (_rtpRtcpModule->RegisterReceivePayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001757 {
1758 // First attempt to register failed => de-register and try again
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001759 _rtpRtcpModule->DeRegisterReceivePayload(codec.pltype);
1760 if (_rtpRtcpModule->RegisterReceivePayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001761 {
1762 _engineStatisticsPtr->SetLastError(
1763 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1764 "SetRecPayloadType() RTP/RTCP-module registration failed");
1765 return -1;
1766 }
1767 }
1768 if (_audioCodingModule.RegisterReceiveCodec(codec) != 0)
1769 {
1770 _audioCodingModule.UnregisterReceiveCodec(codec.pltype);
1771 if (_audioCodingModule.RegisterReceiveCodec(codec) != 0)
1772 {
1773 _engineStatisticsPtr->SetLastError(
1774 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1775 "SetRecPayloadType() ACM registration failed - 1");
1776 return -1;
1777 }
1778 }
1779 return 0;
1780}
1781
1782WebRtc_Word32
1783Channel::GetRecPayloadType(CodecInst& codec)
1784{
1785 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1786 "Channel::GetRecPayloadType()");
1787 WebRtc_Word8 payloadType(-1);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001788 if (_rtpRtcpModule->ReceivePayloadType(codec, &payloadType) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001789 {
1790 _engineStatisticsPtr->SetLastError(
henrika@webrtc.org37198002012-06-18 11:00:12 +00001791 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
niklase@google.com470e71d2011-07-07 08:21:25 +00001792 "GetRecPayloadType() failed to retrieve RX payload type");
1793 return -1;
1794 }
1795 codec.pltype = payloadType;
1796 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1797 "Channel::GetRecPayloadType() => pltype=%u", codec.pltype);
1798 return 0;
1799}
1800
1801WebRtc_Word32
1802Channel::SetAMREncFormat(AmrMode mode)
1803{
1804 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1805 "Channel::SetAMREncFormat()");
1806
1807 // ACM doesn't support AMR
1808 return -1;
1809}
1810
1811WebRtc_Word32
1812Channel::SetAMRDecFormat(AmrMode mode)
1813{
1814 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1815 "Channel::SetAMRDecFormat()");
1816
1817 // ACM doesn't support AMR
1818 return -1;
1819}
1820
1821WebRtc_Word32
1822Channel::SetAMRWbEncFormat(AmrMode mode)
1823{
1824 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1825 "Channel::SetAMRWbEncFormat()");
1826
1827 // ACM doesn't support AMR
1828 return -1;
1829
1830}
1831
1832WebRtc_Word32
1833Channel::SetAMRWbDecFormat(AmrMode mode)
1834{
1835 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1836 "Channel::SetAMRWbDecFormat()");
1837
1838 // ACM doesn't support AMR
1839 return -1;
1840}
1841
1842WebRtc_Word32
1843Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency)
1844{
1845 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1846 "Channel::SetSendCNPayloadType()");
1847
1848 CodecInst codec;
1849 WebRtc_Word32 samplingFreqHz(-1);
tina.legrand@webrtc.org45175852012-06-01 09:27:35 +00001850 const int kMono = 1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001851 if (frequency == kFreq32000Hz)
1852 samplingFreqHz = 32000;
1853 else if (frequency == kFreq16000Hz)
1854 samplingFreqHz = 16000;
1855
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001856 if (_audioCodingModule.Codec("CN", &codec, samplingFreqHz, kMono) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001857 {
1858 _engineStatisticsPtr->SetLastError(
1859 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1860 "SetSendCNPayloadType() failed to retrieve default CN codec "
1861 "settings");
1862 return -1;
1863 }
1864
1865 // Modify the payload type (must be set to dynamic range)
1866 codec.pltype = type;
1867
1868 if (_audioCodingModule.RegisterSendCodec(codec) != 0)
1869 {
1870 _engineStatisticsPtr->SetLastError(
1871 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1872 "SetSendCNPayloadType() failed to register CN to ACM");
1873 return -1;
1874 }
1875
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001876 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001877 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001878 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1879 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001880 {
1881 _engineStatisticsPtr->SetLastError(
1882 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1883 "SetSendCNPayloadType() failed to register CN to RTP/RTCP "
1884 "module");
1885 return -1;
1886 }
1887 }
1888 return 0;
1889}
1890
1891WebRtc_Word32
1892Channel::SetISACInitTargetRate(int rateBps, bool useFixedFrameSize)
1893{
1894 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1895 "Channel::SetISACInitTargetRate()");
1896
1897 CodecInst sendCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001898 if (_audioCodingModule.SendCodec(&sendCodec) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001899 {
1900 _engineStatisticsPtr->SetLastError(
1901 VE_CODEC_ERROR, kTraceError,
1902 "SetISACInitTargetRate() failed to retrieve send codec");
1903 return -1;
1904 }
1905 if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
1906 {
1907 // This API is only valid if iSAC is setup to run in channel-adaptive
1908 // mode.
1909 // We do not validate the adaptive mode here. It is done later in the
1910 // ConfigISACBandwidthEstimator() API.
1911 _engineStatisticsPtr->SetLastError(
1912 VE_CODEC_ERROR, kTraceError,
1913 "SetISACInitTargetRate() send codec is not iSAC");
1914 return -1;
1915 }
1916
1917 WebRtc_UWord8 initFrameSizeMsec(0);
1918 if (16000 == sendCodec.plfreq)
1919 {
1920 // Note that 0 is a valid and corresponds to "use default
1921 if ((rateBps != 0 &&
1922 rateBps < kVoiceEngineMinIsacInitTargetRateBpsWb) ||
1923 (rateBps > kVoiceEngineMaxIsacInitTargetRateBpsWb))
1924 {
1925 _engineStatisticsPtr->SetLastError(
1926 VE_INVALID_ARGUMENT, kTraceError,
1927 "SetISACInitTargetRate() invalid target rate - 1");
1928 return -1;
1929 }
1930 // 30 or 60ms
1931 initFrameSizeMsec = (WebRtc_UWord8)(sendCodec.pacsize / 16);
1932 }
1933 else if (32000 == sendCodec.plfreq)
1934 {
1935 if ((rateBps != 0 &&
1936 rateBps < kVoiceEngineMinIsacInitTargetRateBpsSwb) ||
1937 (rateBps > kVoiceEngineMaxIsacInitTargetRateBpsSwb))
1938 {
1939 _engineStatisticsPtr->SetLastError(
1940 VE_INVALID_ARGUMENT, kTraceError,
1941 "SetISACInitTargetRate() invalid target rate - 2");
1942 return -1;
1943 }
1944 initFrameSizeMsec = (WebRtc_UWord8)(sendCodec.pacsize / 32); // 30ms
1945 }
1946
1947 if (_audioCodingModule.ConfigISACBandwidthEstimator(
1948 initFrameSizeMsec, rateBps, useFixedFrameSize) == -1)
1949 {
1950 _engineStatisticsPtr->SetLastError(
1951 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1952 "SetISACInitTargetRate() iSAC BWE config failed");
1953 return -1;
1954 }
1955
1956 return 0;
1957}
1958
1959WebRtc_Word32
1960Channel::SetISACMaxRate(int rateBps)
1961{
1962 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1963 "Channel::SetISACMaxRate()");
1964
1965 CodecInst sendCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001966 if (_audioCodingModule.SendCodec(&sendCodec) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001967 {
1968 _engineStatisticsPtr->SetLastError(
1969 VE_CODEC_ERROR, kTraceError,
1970 "SetISACMaxRate() failed to retrieve send codec");
1971 return -1;
1972 }
1973 if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
1974 {
1975 // This API is only valid if iSAC is selected as sending codec.
1976 _engineStatisticsPtr->SetLastError(
1977 VE_CODEC_ERROR, kTraceError,
1978 "SetISACMaxRate() send codec is not iSAC");
1979 return -1;
1980 }
1981 if (16000 == sendCodec.plfreq)
1982 {
1983 if ((rateBps < kVoiceEngineMinIsacMaxRateBpsWb) ||
1984 (rateBps > kVoiceEngineMaxIsacMaxRateBpsWb))
1985 {
1986 _engineStatisticsPtr->SetLastError(
1987 VE_INVALID_ARGUMENT, kTraceError,
1988 "SetISACMaxRate() invalid max rate - 1");
1989 return -1;
1990 }
1991 }
1992 else if (32000 == sendCodec.plfreq)
1993 {
1994 if ((rateBps < kVoiceEngineMinIsacMaxRateBpsSwb) ||
1995 (rateBps > kVoiceEngineMaxIsacMaxRateBpsSwb))
1996 {
1997 _engineStatisticsPtr->SetLastError(
1998 VE_INVALID_ARGUMENT, kTraceError,
1999 "SetISACMaxRate() invalid max rate - 2");
2000 return -1;
2001 }
2002 }
2003 if (_sending)
2004 {
2005 _engineStatisticsPtr->SetLastError(
2006 VE_SENDING, kTraceError,
2007 "SetISACMaxRate() unable to set max rate while sending");
2008 return -1;
2009 }
2010
2011 // Set the maximum instantaneous rate of iSAC (works for both adaptive
2012 // and non-adaptive mode)
2013 if (_audioCodingModule.SetISACMaxRate(rateBps) == -1)
2014 {
2015 _engineStatisticsPtr->SetLastError(
2016 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2017 "SetISACMaxRate() failed to set max rate");
2018 return -1;
2019 }
2020
2021 return 0;
2022}
2023
2024WebRtc_Word32
2025Channel::SetISACMaxPayloadSize(int sizeBytes)
2026{
2027 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2028 "Channel::SetISACMaxPayloadSize()");
2029 CodecInst sendCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00002030 if (_audioCodingModule.SendCodec(&sendCodec) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00002031 {
2032 _engineStatisticsPtr->SetLastError(
2033 VE_CODEC_ERROR, kTraceError,
2034 "SetISACMaxPayloadSize() failed to retrieve send codec");
2035 return -1;
2036 }
2037 if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
2038 {
2039 _engineStatisticsPtr->SetLastError(
2040 VE_CODEC_ERROR, kTraceError,
2041 "SetISACMaxPayloadSize() send codec is not iSAC");
2042 return -1;
2043 }
2044 if (16000 == sendCodec.plfreq)
2045 {
2046 if ((sizeBytes < kVoiceEngineMinIsacMaxPayloadSizeBytesWb) ||
2047 (sizeBytes > kVoiceEngineMaxIsacMaxPayloadSizeBytesWb))
2048 {
2049 _engineStatisticsPtr->SetLastError(
2050 VE_INVALID_ARGUMENT, kTraceError,
2051 "SetISACMaxPayloadSize() invalid max payload - 1");
2052 return -1;
2053 }
2054 }
2055 else if (32000 == sendCodec.plfreq)
2056 {
2057 if ((sizeBytes < kVoiceEngineMinIsacMaxPayloadSizeBytesSwb) ||
2058 (sizeBytes > kVoiceEngineMaxIsacMaxPayloadSizeBytesSwb))
2059 {
2060 _engineStatisticsPtr->SetLastError(
2061 VE_INVALID_ARGUMENT, kTraceError,
2062 "SetISACMaxPayloadSize() invalid max payload - 2");
2063 return -1;
2064 }
2065 }
2066 if (_sending)
2067 {
2068 _engineStatisticsPtr->SetLastError(
2069 VE_SENDING, kTraceError,
2070 "SetISACMaxPayloadSize() unable to set max rate while sending");
2071 return -1;
2072 }
2073
2074 if (_audioCodingModule.SetISACMaxPayloadSize(sizeBytes) == -1)
2075 {
2076 _engineStatisticsPtr->SetLastError(
2077 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2078 "SetISACMaxPayloadSize() failed to set max payload size");
2079 return -1;
2080 }
2081 return 0;
2082}
2083
2084WebRtc_Word32 Channel::RegisterExternalTransport(Transport& transport)
2085{
2086 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2087 "Channel::RegisterExternalTransport()");
2088
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002089 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002090
niklase@google.com470e71d2011-07-07 08:21:25 +00002091 if (_externalTransport)
2092 {
2093 _engineStatisticsPtr->SetLastError(VE_INVALID_OPERATION,
2094 kTraceError,
2095 "RegisterExternalTransport() external transport already enabled");
2096 return -1;
2097 }
2098 _externalTransport = true;
2099 _transportPtr = &transport;
2100 return 0;
2101}
2102
2103WebRtc_Word32
2104Channel::DeRegisterExternalTransport()
2105{
2106 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2107 "Channel::DeRegisterExternalTransport()");
2108
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002109 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.org83661f52011-11-25 10:58:15 +00002110
niklase@google.com470e71d2011-07-07 08:21:25 +00002111 if (!_transportPtr)
2112 {
2113 _engineStatisticsPtr->SetLastError(
2114 VE_INVALID_OPERATION, kTraceWarning,
2115 "DeRegisterExternalTransport() external transport already "
2116 "disabled");
2117 return 0;
2118 }
2119 _externalTransport = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00002120 _transportPtr = NULL;
2121 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2122 "DeRegisterExternalTransport() all transport is disabled");
niklase@google.com470e71d2011-07-07 08:21:25 +00002123 return 0;
2124}
2125
2126WebRtc_Word32
2127Channel::ReceivedRTPPacket(const WebRtc_Word8* data, WebRtc_Word32 length)
2128{
pwestin@webrtc.org361bac72013-03-13 17:52:42 +00002129 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
2130 "Channel::ReceivedRTPPacket()");
2131
2132 // Store playout timestamp for the received RTP packet
2133 // to be used for upcoming delay estimations
2134 WebRtc_UWord32 playoutTimestamp(0);
2135 if (GetPlayoutTimeStamp(playoutTimestamp) == 0)
2136 {
2137 _playoutTimeStampRTP = playoutTimestamp;
2138 }
2139 // Dump the RTP packet to a file (if RTP dump is enabled).
2140 if (_rtpDumpIn.DumpPacket((const WebRtc_UWord8*)data,
2141 (WebRtc_UWord16)length) == -1)
2142 {
2143 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
2144 VoEId(_instanceId,_channelId),
2145 "Channel::SendPacket() RTP dump to input file failed");
2146 }
2147
2148 // Deliver RTP packet to RTP/RTCP module for parsing
2149 // The packet will be pushed back to the channel thru the
2150 // OnReceivedPayloadData callback so we don't push it to the ACM here
2151 if (_rtpRtcpModule->IncomingPacket((const WebRtc_UWord8*)data,
2152 (WebRtc_UWord16)length) == -1)
2153 {
2154 _engineStatisticsPtr->SetLastError(
2155 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
2156 "Channel::IncomingRTPPacket() RTP packet is invalid");
2157 }
2158 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002159}
2160
2161WebRtc_Word32
2162Channel::ReceivedRTCPPacket(const WebRtc_Word8* data, WebRtc_Word32 length)
2163{
pwestin@webrtc.org361bac72013-03-13 17:52:42 +00002164 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
2165 "Channel::ReceivedRTCPPacket()");
2166
2167 // Store playout timestamp for the received RTCP packet
2168 // which will be read by the GetRemoteRTCPData API
2169 WebRtc_UWord32 playoutTimestamp(0);
2170 if (GetPlayoutTimeStamp(playoutTimestamp) == 0)
2171 {
2172 _playoutTimeStampRTCP = playoutTimestamp;
2173 }
2174
2175 // Dump the RTCP packet to a file (if RTP dump is enabled).
2176 if (_rtpDumpIn.DumpPacket((const WebRtc_UWord8*)data,
2177 (WebRtc_UWord16)length) == -1)
2178 {
2179 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
2180 VoEId(_instanceId,_channelId),
2181 "Channel::SendPacket() RTCP dump to input file failed");
2182 }
2183
2184 // Deliver RTCP packet to RTP/RTCP module for parsing
2185 if (_rtpRtcpModule->IncomingPacket((const WebRtc_UWord8*)data,
2186 (WebRtc_UWord16)length) == -1)
2187 {
2188 _engineStatisticsPtr->SetLastError(
2189 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
2190 "Channel::IncomingRTPPacket() RTCP packet is invalid");
2191 }
2192 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002193}
2194
niklase@google.com470e71d2011-07-07 08:21:25 +00002195WebRtc_Word32
2196Channel::SetPacketTimeoutNotification(bool enable, int timeoutSeconds)
2197{
2198 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2199 "Channel::SetPacketTimeoutNotification()");
2200 if (enable)
2201 {
2202 const WebRtc_UWord32 RTPtimeoutMS = 1000*timeoutSeconds;
2203 const WebRtc_UWord32 RTCPtimeoutMS = 0;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00002204 _rtpRtcpModule->SetPacketTimeout(RTPtimeoutMS, RTCPtimeoutMS);
niklase@google.com470e71d2011-07-07 08:21:25 +00002205 _rtpPacketTimeOutIsEnabled = true;
2206 _rtpTimeOutSeconds = timeoutSeconds;
2207 }
2208 else
2209 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00002210 _rtpRtcpModule->SetPacketTimeout(0, 0);
niklase@google.com470e71d2011-07-07 08:21:25 +00002211 _rtpPacketTimeOutIsEnabled = false;
2212 _rtpTimeOutSeconds = 0;
2213 }
2214 return 0;
2215}
2216
2217WebRtc_Word32
2218Channel::GetPacketTimeoutNotification(bool& enabled, int& timeoutSeconds)
2219{
2220 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2221 "Channel::GetPacketTimeoutNotification()");
2222 enabled = _rtpPacketTimeOutIsEnabled;
2223 if (enabled)
2224 {
2225 timeoutSeconds = _rtpTimeOutSeconds;
2226 }
2227 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
2228 "GetPacketTimeoutNotification() => enabled=%d,"
2229 " timeoutSeconds=%d",
2230 enabled, timeoutSeconds);
2231 return 0;
2232}
2233
2234WebRtc_Word32
2235Channel::RegisterDeadOrAliveObserver(VoEConnectionObserver& observer)
2236{
2237 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2238 "Channel::RegisterDeadOrAliveObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002239 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002240
2241 if (_connectionObserverPtr)
2242 {
2243 _engineStatisticsPtr->SetLastError(VE_INVALID_OPERATION, kTraceError,
2244 "RegisterDeadOrAliveObserver() observer already enabled");
2245 return -1;
2246 }
2247
2248 _connectionObserverPtr = &observer;
2249 _connectionObserver = true;
2250
2251 return 0;
2252}
2253
2254WebRtc_Word32
2255Channel::DeRegisterDeadOrAliveObserver()
2256{
2257 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2258 "Channel::DeRegisterDeadOrAliveObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002259 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002260
2261 if (!_connectionObserverPtr)
2262 {
2263 _engineStatisticsPtr->SetLastError(
2264 VE_INVALID_OPERATION, kTraceWarning,
2265 "DeRegisterDeadOrAliveObserver() observer already disabled");
2266 return 0;
2267 }
2268
2269 _connectionObserver = false;
2270 _connectionObserverPtr = NULL;
2271
2272 return 0;
2273}
2274
2275WebRtc_Word32
2276Channel::SetPeriodicDeadOrAliveStatus(bool enable, int sampleTimeSeconds)
2277{
2278 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2279 "Channel::SetPeriodicDeadOrAliveStatus()");
2280 if (!_connectionObserverPtr)
2281 {
2282 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
2283 "SetPeriodicDeadOrAliveStatus() connection observer has"
2284 " not been registered");
2285 }
2286 if (enable)
2287 {
2288 ResetDeadOrAliveCounters();
2289 }
2290 bool enabled(false);
2291 WebRtc_UWord8 currentSampleTimeSec(0);
2292 // Store last state (will be used later if dead-or-alive is disabled).
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00002293 _rtpRtcpModule->PeriodicDeadOrAliveStatus(enabled, currentSampleTimeSec);
niklase@google.com470e71d2011-07-07 08:21:25 +00002294 // Update the dead-or-alive state.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00002295 if (_rtpRtcpModule->SetPeriodicDeadOrAliveStatus(
niklase@google.com470e71d2011-07-07 08:21:25 +00002296 enable, (WebRtc_UWord8)sampleTimeSeconds) != 0)
2297 {
2298 _engineStatisticsPtr->SetLastError(
2299 VE_RTP_RTCP_MODULE_ERROR,
2300 kTraceError,
2301 "SetPeriodicDeadOrAliveStatus() failed to set dead-or-alive "
2302 "status");
2303 return -1;
2304 }
2305 if (!enable)
2306 {
2307 // Restore last utilized sample time.
2308 // Without this, the sample time would always be reset to default
2309 // (2 sec), each time dead-or-alived was disabled without sample-time
2310 // parameter.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00002311 _rtpRtcpModule->SetPeriodicDeadOrAliveStatus(enable,
niklase@google.com470e71d2011-07-07 08:21:25 +00002312 currentSampleTimeSec);
2313 }
2314 return 0;
2315}
2316
2317WebRtc_Word32
2318Channel::GetPeriodicDeadOrAliveStatus(bool& enabled, int& sampleTimeSeconds)
2319{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00002320 _rtpRtcpModule->PeriodicDeadOrAliveStatus(
niklase@google.com470e71d2011-07-07 08:21:25 +00002321 enabled,
2322 (WebRtc_UWord8&)sampleTimeSeconds);
2323 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
2324 "GetPeriodicDeadOrAliveStatus() => enabled=%d,"
2325 " sampleTimeSeconds=%d",
2326 enabled, sampleTimeSeconds);
2327 return 0;
2328}
2329
niklase@google.com470e71d2011-07-07 08:21:25 +00002330int Channel::StartPlayingFileLocally(const char* fileName,
2331 const bool loop,
2332 const FileFormats format,
2333 const int startPosition,
2334 const float volumeScaling,
2335 const int stopPosition,
2336 const CodecInst* codecInst)
2337{
2338 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2339 "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d,"
2340 " format=%d, volumeScaling=%5.3f, startPosition=%d, "
2341 "stopPosition=%d)", fileName, loop, format, volumeScaling,
2342 startPosition, stopPosition);
2343
2344 if (_outputFilePlaying)
2345 {
2346 _engineStatisticsPtr->SetLastError(
2347 VE_ALREADY_PLAYING, kTraceError,
2348 "StartPlayingFileLocally() is already playing");
2349 return -1;
2350 }
2351
niklase@google.com470e71d2011-07-07 08:21:25 +00002352 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002353 CriticalSectionScoped cs(&_fileCritSect);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002354
2355 if (_outputFilePlayerPtr)
2356 {
2357 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2358 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2359 _outputFilePlayerPtr = NULL;
2360 }
2361
2362 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2363 _outputFilePlayerId, (const FileFormats)format);
2364
2365 if (_outputFilePlayerPtr == NULL)
2366 {
2367 _engineStatisticsPtr->SetLastError(
2368 VE_INVALID_ARGUMENT, kTraceError,
henrike@webrtc.org31d30702011-11-18 19:59:32 +00002369 "StartPlayingFileLocally() filePlayer format is not correct");
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002370 return -1;
2371 }
2372
2373 const WebRtc_UWord32 notificationTime(0);
2374
2375 if (_outputFilePlayerPtr->StartPlayingFile(
2376 fileName,
2377 loop,
2378 startPosition,
2379 volumeScaling,
2380 notificationTime,
2381 stopPosition,
2382 (const CodecInst*)codecInst) != 0)
2383 {
2384 _engineStatisticsPtr->SetLastError(
2385 VE_BAD_FILE, kTraceError,
2386 "StartPlayingFile() failed to start file playout");
2387 _outputFilePlayerPtr->StopPlayingFile();
2388 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2389 _outputFilePlayerPtr = NULL;
2390 return -1;
2391 }
2392 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
2393 _outputFilePlaying = true;
niklase@google.com470e71d2011-07-07 08:21:25 +00002394 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00002395
2396 if (RegisterFilePlayingToMixer() != 0)
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002397 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002398
2399 return 0;
2400}
2401
2402int Channel::StartPlayingFileLocally(InStream* stream,
2403 const FileFormats format,
2404 const int startPosition,
2405 const float volumeScaling,
2406 const int stopPosition,
2407 const CodecInst* codecInst)
2408{
2409 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2410 "Channel::StartPlayingFileLocally(format=%d,"
2411 " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
2412 format, volumeScaling, startPosition, stopPosition);
2413
2414 if(stream == NULL)
2415 {
2416 _engineStatisticsPtr->SetLastError(
2417 VE_BAD_FILE, kTraceError,
2418 "StartPlayingFileLocally() NULL as input stream");
2419 return -1;
2420 }
2421
2422
2423 if (_outputFilePlaying)
2424 {
2425 _engineStatisticsPtr->SetLastError(
2426 VE_ALREADY_PLAYING, kTraceError,
2427 "StartPlayingFileLocally() is already playing");
2428 return -1;
2429 }
2430
niklase@google.com470e71d2011-07-07 08:21:25 +00002431 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002432 CriticalSectionScoped cs(&_fileCritSect);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002433
2434 // Destroy the old instance
2435 if (_outputFilePlayerPtr)
2436 {
2437 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2438 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2439 _outputFilePlayerPtr = NULL;
2440 }
2441
2442 // Create the instance
2443 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2444 _outputFilePlayerId,
2445 (const FileFormats)format);
2446
2447 if (_outputFilePlayerPtr == NULL)
2448 {
2449 _engineStatisticsPtr->SetLastError(
2450 VE_INVALID_ARGUMENT, kTraceError,
2451 "StartPlayingFileLocally() filePlayer format isnot correct");
2452 return -1;
2453 }
2454
2455 const WebRtc_UWord32 notificationTime(0);
2456
2457 if (_outputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
2458 volumeScaling,
2459 notificationTime,
2460 stopPosition, codecInst) != 0)
2461 {
2462 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2463 "StartPlayingFile() failed to "
2464 "start file playout");
2465 _outputFilePlayerPtr->StopPlayingFile();
2466 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2467 _outputFilePlayerPtr = NULL;
2468 return -1;
2469 }
2470 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
2471 _outputFilePlaying = true;
niklase@google.com470e71d2011-07-07 08:21:25 +00002472 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00002473
2474 if (RegisterFilePlayingToMixer() != 0)
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002475 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002476
niklase@google.com470e71d2011-07-07 08:21:25 +00002477 return 0;
2478}
2479
2480int Channel::StopPlayingFileLocally()
2481{
2482 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2483 "Channel::StopPlayingFileLocally()");
2484
2485 if (!_outputFilePlaying)
2486 {
2487 _engineStatisticsPtr->SetLastError(
2488 VE_INVALID_OPERATION, kTraceWarning,
2489 "StopPlayingFileLocally() isnot playing");
2490 return 0;
2491 }
2492
niklase@google.com470e71d2011-07-07 08:21:25 +00002493 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002494 CriticalSectionScoped cs(&_fileCritSect);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002495
2496 if (_outputFilePlayerPtr->StopPlayingFile() != 0)
2497 {
2498 _engineStatisticsPtr->SetLastError(
2499 VE_STOP_RECORDING_FAILED, kTraceError,
2500 "StopPlayingFile() could not stop playing");
2501 return -1;
2502 }
2503 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2504 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2505 _outputFilePlayerPtr = NULL;
2506 _outputFilePlaying = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00002507 }
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002508 // _fileCritSect cannot be taken while calling
2509 // SetAnonymousMixibilityStatus. Refer to comments in
2510 // StartPlayingFileLocally(const char* ...) for more details.
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002511 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0)
2512 {
2513 _engineStatisticsPtr->SetLastError(
2514 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002515 "StopPlayingFile() failed to stop participant from playing as"
2516 "file in the mixer");
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002517 return -1;
2518 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002519
2520 return 0;
2521}
2522
2523int Channel::IsPlayingFileLocally() const
2524{
2525 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2526 "Channel::IsPlayingFileLocally()");
2527
2528 return (WebRtc_Word32)_outputFilePlaying;
2529}
2530
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00002531int Channel::RegisterFilePlayingToMixer()
2532{
2533 // Return success for not registering for file playing to mixer if:
2534 // 1. playing file before playout is started on that channel.
2535 // 2. starting playout without file playing on that channel.
2536 if (!_playing || !_outputFilePlaying)
2537 {
2538 return 0;
2539 }
2540
2541 // |_fileCritSect| cannot be taken while calling
2542 // SetAnonymousMixabilityStatus() since as soon as the participant is added
2543 // frames can be pulled by the mixer. Since the frames are generated from
2544 // the file, _fileCritSect will be taken. This would result in a deadlock.
2545 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0)
2546 {
2547 CriticalSectionScoped cs(&_fileCritSect);
2548 _outputFilePlaying = false;
2549 _engineStatisticsPtr->SetLastError(
2550 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
2551 "StartPlayingFile() failed to add participant as file to mixer");
2552 _outputFilePlayerPtr->StopPlayingFile();
2553 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2554 _outputFilePlayerPtr = NULL;
2555 return -1;
2556 }
2557
2558 return 0;
2559}
2560
niklase@google.com470e71d2011-07-07 08:21:25 +00002561int Channel::ScaleLocalFilePlayout(const float scale)
2562{
2563 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2564 "Channel::ScaleLocalFilePlayout(scale=%5.3f)", scale);
2565
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002566 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002567
2568 if (!_outputFilePlaying)
2569 {
2570 _engineStatisticsPtr->SetLastError(
2571 VE_INVALID_OPERATION, kTraceError,
2572 "ScaleLocalFilePlayout() isnot playing");
2573 return -1;
2574 }
2575 if ((_outputFilePlayerPtr == NULL) ||
2576 (_outputFilePlayerPtr->SetAudioScaling(scale) != 0))
2577 {
2578 _engineStatisticsPtr->SetLastError(
2579 VE_BAD_ARGUMENT, kTraceError,
2580 "SetAudioScaling() failed to scale the playout");
2581 return -1;
2582 }
2583
2584 return 0;
2585}
2586
2587int Channel::GetLocalPlayoutPosition(int& positionMs)
2588{
2589 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2590 "Channel::GetLocalPlayoutPosition(position=?)");
2591
2592 WebRtc_UWord32 position;
2593
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002594 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002595
2596 if (_outputFilePlayerPtr == NULL)
2597 {
2598 _engineStatisticsPtr->SetLastError(
2599 VE_INVALID_OPERATION, kTraceError,
2600 "GetLocalPlayoutPosition() filePlayer instance doesnot exist");
2601 return -1;
2602 }
2603
2604 if (_outputFilePlayerPtr->GetPlayoutPosition(position) != 0)
2605 {
2606 _engineStatisticsPtr->SetLastError(
2607 VE_BAD_FILE, kTraceError,
2608 "GetLocalPlayoutPosition() failed");
2609 return -1;
2610 }
2611 positionMs = position;
2612
2613 return 0;
2614}
2615
2616int Channel::StartPlayingFileAsMicrophone(const char* fileName,
2617 const bool loop,
2618 const FileFormats format,
2619 const int startPosition,
2620 const float volumeScaling,
2621 const int stopPosition,
2622 const CodecInst* codecInst)
2623{
2624 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2625 "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, "
2626 "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, "
2627 "stopPosition=%d)", fileName, loop, format, volumeScaling,
2628 startPosition, stopPosition);
2629
2630 if (_inputFilePlaying)
2631 {
2632 _engineStatisticsPtr->SetLastError(
2633 VE_ALREADY_PLAYING, kTraceWarning,
2634 "StartPlayingFileAsMicrophone() filePlayer is playing");
2635 return 0;
2636 }
2637
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002638 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002639
2640 // Destroy the old instance
2641 if (_inputFilePlayerPtr)
2642 {
2643 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2644 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2645 _inputFilePlayerPtr = NULL;
2646 }
2647
2648 // Create the instance
2649 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2650 _inputFilePlayerId, (const FileFormats)format);
2651
2652 if (_inputFilePlayerPtr == NULL)
2653 {
2654 _engineStatisticsPtr->SetLastError(
2655 VE_INVALID_ARGUMENT, kTraceError,
2656 "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
2657 return -1;
2658 }
2659
2660 const WebRtc_UWord32 notificationTime(0);
2661
2662 if (_inputFilePlayerPtr->StartPlayingFile(
2663 fileName,
2664 loop,
2665 startPosition,
2666 volumeScaling,
2667 notificationTime,
2668 stopPosition,
2669 (const CodecInst*)codecInst) != 0)
2670 {
2671 _engineStatisticsPtr->SetLastError(
2672 VE_BAD_FILE, kTraceError,
2673 "StartPlayingFile() failed to start file playout");
2674 _inputFilePlayerPtr->StopPlayingFile();
2675 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2676 _inputFilePlayerPtr = NULL;
2677 return -1;
2678 }
2679 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2680 _inputFilePlaying = true;
2681
2682 return 0;
2683}
2684
2685int Channel::StartPlayingFileAsMicrophone(InStream* stream,
2686 const FileFormats format,
2687 const int startPosition,
2688 const float volumeScaling,
2689 const int stopPosition,
2690 const CodecInst* codecInst)
2691{
2692 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2693 "Channel::StartPlayingFileAsMicrophone(format=%d, "
2694 "volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
2695 format, volumeScaling, startPosition, stopPosition);
2696
2697 if(stream == NULL)
2698 {
2699 _engineStatisticsPtr->SetLastError(
2700 VE_BAD_FILE, kTraceError,
2701 "StartPlayingFileAsMicrophone NULL as input stream");
2702 return -1;
2703 }
2704
2705 if (_inputFilePlaying)
2706 {
2707 _engineStatisticsPtr->SetLastError(
2708 VE_ALREADY_PLAYING, kTraceWarning,
2709 "StartPlayingFileAsMicrophone() is playing");
2710 return 0;
2711 }
2712
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002713 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002714
2715 // Destroy the old instance
2716 if (_inputFilePlayerPtr)
2717 {
2718 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2719 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2720 _inputFilePlayerPtr = NULL;
2721 }
2722
2723 // Create the instance
2724 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2725 _inputFilePlayerId, (const FileFormats)format);
2726
2727 if (_inputFilePlayerPtr == NULL)
2728 {
2729 _engineStatisticsPtr->SetLastError(
2730 VE_INVALID_ARGUMENT, kTraceError,
2731 "StartPlayingInputFile() filePlayer format isnot correct");
2732 return -1;
2733 }
2734
2735 const WebRtc_UWord32 notificationTime(0);
2736
2737 if (_inputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
2738 volumeScaling, notificationTime,
2739 stopPosition, codecInst) != 0)
2740 {
2741 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2742 "StartPlayingFile() failed to start "
2743 "file playout");
2744 _inputFilePlayerPtr->StopPlayingFile();
2745 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2746 _inputFilePlayerPtr = NULL;
2747 return -1;
2748 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00002749
niklase@google.com470e71d2011-07-07 08:21:25 +00002750 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2751 _inputFilePlaying = true;
2752
2753 return 0;
2754}
2755
2756int Channel::StopPlayingFileAsMicrophone()
2757{
2758 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2759 "Channel::StopPlayingFileAsMicrophone()");
2760
2761 if (!_inputFilePlaying)
2762 {
2763 _engineStatisticsPtr->SetLastError(
2764 VE_INVALID_OPERATION, kTraceWarning,
2765 "StopPlayingFileAsMicrophone() isnot playing");
2766 return 0;
2767 }
2768
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002769 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002770 if (_inputFilePlayerPtr->StopPlayingFile() != 0)
2771 {
2772 _engineStatisticsPtr->SetLastError(
2773 VE_STOP_RECORDING_FAILED, kTraceError,
2774 "StopPlayingFile() could not stop playing");
2775 return -1;
2776 }
2777 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2778 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2779 _inputFilePlayerPtr = NULL;
2780 _inputFilePlaying = false;
2781
2782 return 0;
2783}
2784
2785int Channel::IsPlayingFileAsMicrophone() const
2786{
2787 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2788 "Channel::IsPlayingFileAsMicrophone()");
2789
2790 return _inputFilePlaying;
2791}
2792
2793int Channel::ScaleFileAsMicrophonePlayout(const float scale)
2794{
2795 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2796 "Channel::ScaleFileAsMicrophonePlayout(scale=%5.3f)", scale);
2797
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002798 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002799
2800 if (!_inputFilePlaying)
2801 {
2802 _engineStatisticsPtr->SetLastError(
2803 VE_INVALID_OPERATION, kTraceError,
2804 "ScaleFileAsMicrophonePlayout() isnot playing");
2805 return -1;
2806 }
2807
2808 if ((_inputFilePlayerPtr == NULL) ||
2809 (_inputFilePlayerPtr->SetAudioScaling(scale) != 0))
2810 {
2811 _engineStatisticsPtr->SetLastError(
2812 VE_BAD_ARGUMENT, kTraceError,
2813 "SetAudioScaling() failed to scale playout");
2814 return -1;
2815 }
2816
2817 return 0;
2818}
2819
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00002820int Channel::StartRecordingPlayout(const char* fileName,
niklase@google.com470e71d2011-07-07 08:21:25 +00002821 const CodecInst* codecInst)
2822{
2823 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2824 "Channel::StartRecordingPlayout(fileName=%s)", fileName);
2825
2826 if (_outputFileRecording)
2827 {
2828 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
2829 "StartRecordingPlayout() is already recording");
2830 return 0;
2831 }
2832
2833 FileFormats format;
2834 const WebRtc_UWord32 notificationTime(0); // Not supported in VoE
2835 CodecInst dummyCodec={100,"L16",16000,320,1,320000};
2836
niklas.enbom@webrtc.org40197d72012-03-26 08:45:47 +00002837 if ((codecInst != NULL) &&
2838 ((codecInst->channels < 1) || (codecInst->channels > 2)))
niklase@google.com470e71d2011-07-07 08:21:25 +00002839 {
2840 _engineStatisticsPtr->SetLastError(
2841 VE_BAD_ARGUMENT, kTraceError,
2842 "StartRecordingPlayout() invalid compression");
2843 return(-1);
2844 }
2845 if(codecInst == NULL)
2846 {
2847 format = kFileFormatPcm16kHzFile;
2848 codecInst=&dummyCodec;
2849 }
2850 else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
2851 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
2852 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
2853 {
2854 format = kFileFormatWavFile;
2855 }
2856 else
2857 {
2858 format = kFileFormatCompressedFile;
2859 }
2860
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002861 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002862
2863 // Destroy the old instance
2864 if (_outputFileRecorderPtr)
2865 {
2866 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2867 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2868 _outputFileRecorderPtr = NULL;
2869 }
2870
2871 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2872 _outputFileRecorderId, (const FileFormats)format);
2873 if (_outputFileRecorderPtr == NULL)
2874 {
2875 _engineStatisticsPtr->SetLastError(
2876 VE_INVALID_ARGUMENT, kTraceError,
2877 "StartRecordingPlayout() fileRecorder format isnot correct");
2878 return -1;
2879 }
2880
2881 if (_outputFileRecorderPtr->StartRecordingAudioFile(
2882 fileName, (const CodecInst&)*codecInst, notificationTime) != 0)
2883 {
2884 _engineStatisticsPtr->SetLastError(
2885 VE_BAD_FILE, kTraceError,
2886 "StartRecordingAudioFile() failed to start file recording");
2887 _outputFileRecorderPtr->StopRecording();
2888 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2889 _outputFileRecorderPtr = NULL;
2890 return -1;
2891 }
2892 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2893 _outputFileRecording = true;
2894
2895 return 0;
2896}
2897
2898int Channel::StartRecordingPlayout(OutStream* stream,
2899 const CodecInst* codecInst)
2900{
2901 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2902 "Channel::StartRecordingPlayout()");
2903
2904 if (_outputFileRecording)
2905 {
2906 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
2907 "StartRecordingPlayout() is already recording");
2908 return 0;
2909 }
2910
2911 FileFormats format;
2912 const WebRtc_UWord32 notificationTime(0); // Not supported in VoE
2913 CodecInst dummyCodec={100,"L16",16000,320,1,320000};
2914
2915 if (codecInst != NULL && codecInst->channels != 1)
2916 {
2917 _engineStatisticsPtr->SetLastError(
2918 VE_BAD_ARGUMENT, kTraceError,
2919 "StartRecordingPlayout() invalid compression");
2920 return(-1);
2921 }
2922 if(codecInst == NULL)
2923 {
2924 format = kFileFormatPcm16kHzFile;
2925 codecInst=&dummyCodec;
2926 }
2927 else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
2928 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
2929 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
2930 {
2931 format = kFileFormatWavFile;
2932 }
2933 else
2934 {
2935 format = kFileFormatCompressedFile;
2936 }
2937
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002938 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002939
2940 // Destroy the old instance
2941 if (_outputFileRecorderPtr)
2942 {
2943 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2944 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2945 _outputFileRecorderPtr = NULL;
2946 }
2947
2948 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2949 _outputFileRecorderId, (const FileFormats)format);
2950 if (_outputFileRecorderPtr == NULL)
2951 {
2952 _engineStatisticsPtr->SetLastError(
2953 VE_INVALID_ARGUMENT, kTraceError,
2954 "StartRecordingPlayout() fileRecorder format isnot correct");
2955 return -1;
2956 }
2957
2958 if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream, *codecInst,
2959 notificationTime) != 0)
2960 {
2961 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2962 "StartRecordingPlayout() failed to "
2963 "start file recording");
2964 _outputFileRecorderPtr->StopRecording();
2965 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2966 _outputFileRecorderPtr = NULL;
2967 return -1;
2968 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00002969
niklase@google.com470e71d2011-07-07 08:21:25 +00002970 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2971 _outputFileRecording = true;
2972
2973 return 0;
2974}
2975
2976int Channel::StopRecordingPlayout()
2977{
2978 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
2979 "Channel::StopRecordingPlayout()");
2980
2981 if (!_outputFileRecording)
2982 {
2983 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
2984 "StopRecordingPlayout() isnot recording");
2985 return -1;
2986 }
2987
2988
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002989 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002990
2991 if (_outputFileRecorderPtr->StopRecording() != 0)
2992 {
2993 _engineStatisticsPtr->SetLastError(
2994 VE_STOP_RECORDING_FAILED, kTraceError,
2995 "StopRecording() could not stop recording");
2996 return(-1);
2997 }
2998 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2999 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
3000 _outputFileRecorderPtr = NULL;
3001 _outputFileRecording = false;
3002
3003 return 0;
3004}
3005
3006void
3007Channel::SetMixWithMicStatus(bool mix)
3008{
3009 _mixFileWithMicrophone=mix;
3010}
3011
3012int
3013Channel::GetSpeechOutputLevel(WebRtc_UWord32& level) const
3014{
3015 WebRtc_Word8 currentLevel = _outputAudioLevel.Level();
3016 level = static_cast<WebRtc_Word32> (currentLevel);
3017 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3018 VoEId(_instanceId,_channelId),
3019 "GetSpeechOutputLevel() => level=%u", level);
3020 return 0;
3021}
3022
3023int
3024Channel::GetSpeechOutputLevelFullRange(WebRtc_UWord32& level) const
3025{
3026 WebRtc_Word16 currentLevel = _outputAudioLevel.LevelFullRange();
3027 level = static_cast<WebRtc_Word32> (currentLevel);
3028 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3029 VoEId(_instanceId,_channelId),
3030 "GetSpeechOutputLevelFullRange() => level=%u", level);
3031 return 0;
3032}
3033
3034int
3035Channel::SetMute(bool enable)
3036{
3037 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3038 "Channel::SetMute(enable=%d)", enable);
3039 _mute = enable;
3040 return 0;
3041}
3042
3043bool
3044Channel::Mute() const
3045{
3046 return _mute;
3047}
3048
3049int
3050Channel::SetOutputVolumePan(float left, float right)
3051{
3052 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3053 "Channel::SetOutputVolumePan()");
3054 _panLeft = left;
3055 _panRight = right;
3056 return 0;
3057}
3058
3059int
3060Channel::GetOutputVolumePan(float& left, float& right) const
3061{
3062 left = _panLeft;
3063 right = _panRight;
3064 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3065 VoEId(_instanceId,_channelId),
3066 "GetOutputVolumePan() => left=%3.2f, right=%3.2f", left, right);
3067 return 0;
3068}
3069
3070int
3071Channel::SetChannelOutputVolumeScaling(float scaling)
3072{
3073 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3074 "Channel::SetChannelOutputVolumeScaling()");
3075 _outputGain = scaling;
3076 return 0;
3077}
3078
3079int
3080Channel::GetChannelOutputVolumeScaling(float& scaling) const
3081{
3082 scaling = _outputGain;
3083 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3084 VoEId(_instanceId,_channelId),
3085 "GetChannelOutputVolumeScaling() => scaling=%3.2f", scaling);
3086 return 0;
3087}
3088
3089#ifdef WEBRTC_SRTP
3090
3091int
3092Channel::EnableSRTPSend(
3093 CipherTypes cipherType,
3094 int cipherKeyLength,
3095 AuthenticationTypes authType,
3096 int authKeyLength,
3097 int authTagLength,
3098 SecurityLevels level,
3099 const unsigned char key[kVoiceEngineMaxSrtpKeyLength],
3100 bool useForRTCP)
3101{
3102 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3103 "Channel::EnableSRTPSend()");
3104
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003105 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003106
3107 if (_encrypting)
3108 {
3109 _engineStatisticsPtr->SetLastError(
3110 VE_INVALID_OPERATION, kTraceWarning,
3111 "EnableSRTPSend() encryption already enabled");
3112 return -1;
3113 }
3114
3115 if (key == NULL)
3116 {
3117 _engineStatisticsPtr->SetLastError(
3118 VE_INVALID_ARGUMENT, kTraceWarning,
3119 "EnableSRTPSend() invalid key string");
3120 return -1;
3121 }
3122
3123 if (((kEncryption == level ||
3124 kEncryptionAndAuthentication == level) &&
3125 (cipherKeyLength < kVoiceEngineMinSrtpEncryptLength ||
3126 cipherKeyLength > kVoiceEngineMaxSrtpEncryptLength)) ||
3127 ((kAuthentication == level ||
3128 kEncryptionAndAuthentication == level) &&
3129 kAuthHmacSha1 == authType &&
3130 (authKeyLength > kVoiceEngineMaxSrtpAuthSha1Length ||
3131 authTagLength > kVoiceEngineMaxSrtpAuthSha1Length)) ||
3132 ((kAuthentication == level ||
3133 kEncryptionAndAuthentication == level) &&
3134 kAuthNull == authType &&
3135 (authKeyLength > kVoiceEngineMaxSrtpKeyAuthNullLength ||
3136 authTagLength > kVoiceEngineMaxSrtpTagAuthNullLength)))
3137 {
3138 _engineStatisticsPtr->SetLastError(
3139 VE_INVALID_ARGUMENT, kTraceError,
3140 "EnableSRTPSend() invalid key length(s)");
3141 return -1;
3142 }
3143
3144
3145 if (_srtpModule.EnableSRTPEncrypt(
3146 !useForRTCP,
3147 (SrtpModule::CipherTypes)cipherType,
3148 cipherKeyLength,
3149 (SrtpModule::AuthenticationTypes)authType,
3150 authKeyLength, authTagLength,
3151 (SrtpModule::SecurityLevels)level,
3152 key) == -1)
3153 {
3154 _engineStatisticsPtr->SetLastError(
3155 VE_SRTP_ERROR, kTraceError,
3156 "EnableSRTPSend() failed to enable SRTP encryption");
3157 return -1;
3158 }
3159
3160 if (_encryptionPtr == NULL)
3161 {
3162 _encryptionPtr = &_srtpModule;
3163 }
3164 _encrypting = true;
3165
3166 return 0;
3167}
3168
3169int
3170Channel::DisableSRTPSend()
3171{
3172 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3173 "Channel::DisableSRTPSend()");
3174
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003175 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003176
3177 if (!_encrypting)
3178 {
3179 _engineStatisticsPtr->SetLastError(
3180 VE_INVALID_OPERATION, kTraceWarning,
3181 "DisableSRTPSend() SRTP encryption already disabled");
3182 return 0;
3183 }
3184
3185 _encrypting = false;
3186
3187 if (_srtpModule.DisableSRTPEncrypt() == -1)
3188 {
3189 _engineStatisticsPtr->SetLastError(
3190 VE_SRTP_ERROR, kTraceError,
3191 "DisableSRTPSend() failed to disable SRTP encryption");
3192 return -1;
3193 }
3194
3195 if (!_srtpModule.SRTPDecrypt() && !_srtpModule.SRTPEncrypt())
3196 {
3197 // Both directions are disabled
3198 _encryptionPtr = NULL;
3199 }
3200
3201 return 0;
3202}
3203
3204int
3205Channel::EnableSRTPReceive(
3206 CipherTypes cipherType,
3207 int cipherKeyLength,
3208 AuthenticationTypes authType,
3209 int authKeyLength,
3210 int authTagLength,
3211 SecurityLevels level,
3212 const unsigned char key[kVoiceEngineMaxSrtpKeyLength],
3213 bool useForRTCP)
3214{
3215 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3216 "Channel::EnableSRTPReceive()");
3217
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003218 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003219
3220 if (_decrypting)
3221 {
3222 _engineStatisticsPtr->SetLastError(
3223 VE_INVALID_OPERATION, kTraceWarning,
3224 "EnableSRTPReceive() SRTP decryption already enabled");
3225 return -1;
3226 }
3227
3228 if (key == NULL)
3229 {
3230 _engineStatisticsPtr->SetLastError(
3231 VE_INVALID_ARGUMENT, kTraceWarning,
3232 "EnableSRTPReceive() invalid key string");
3233 return -1;
3234 }
3235
3236 if ((((kEncryption == level) ||
3237 (kEncryptionAndAuthentication == level)) &&
3238 ((cipherKeyLength < kVoiceEngineMinSrtpEncryptLength) ||
3239 (cipherKeyLength > kVoiceEngineMaxSrtpEncryptLength))) ||
3240 (((kAuthentication == level) ||
3241 (kEncryptionAndAuthentication == level)) &&
3242 (kAuthHmacSha1 == authType) &&
3243 ((authKeyLength > kVoiceEngineMaxSrtpAuthSha1Length) ||
3244 (authTagLength > kVoiceEngineMaxSrtpAuthSha1Length))) ||
3245 (((kAuthentication == level) ||
3246 (kEncryptionAndAuthentication == level)) &&
3247 (kAuthNull == authType) &&
3248 ((authKeyLength > kVoiceEngineMaxSrtpKeyAuthNullLength) ||
3249 (authTagLength > kVoiceEngineMaxSrtpTagAuthNullLength))))
3250 {
3251 _engineStatisticsPtr->SetLastError(
3252 VE_INVALID_ARGUMENT, kTraceError,
3253 "EnableSRTPReceive() invalid key length(s)");
3254 return -1;
3255 }
3256
3257 if (_srtpModule.EnableSRTPDecrypt(
3258 !useForRTCP,
3259 (SrtpModule::CipherTypes)cipherType,
3260 cipherKeyLength,
3261 (SrtpModule::AuthenticationTypes)authType,
3262 authKeyLength,
3263 authTagLength,
3264 (SrtpModule::SecurityLevels)level,
3265 key) == -1)
3266 {
3267 _engineStatisticsPtr->SetLastError(
3268 VE_SRTP_ERROR, kTraceError,
3269 "EnableSRTPReceive() failed to enable SRTP decryption");
3270 return -1;
3271 }
3272
3273 if (_encryptionPtr == NULL)
3274 {
3275 _encryptionPtr = &_srtpModule;
3276 }
3277
3278 _decrypting = true;
3279
3280 return 0;
3281}
3282
3283int
3284Channel::DisableSRTPReceive()
3285{
3286 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3287 "Channel::DisableSRTPReceive()");
3288
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003289 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003290
3291 if (!_decrypting)
3292 {
3293 _engineStatisticsPtr->SetLastError(
3294 VE_INVALID_OPERATION, kTraceWarning,
3295 "DisableSRTPReceive() SRTP decryption already disabled");
3296 return 0;
3297 }
3298
3299 _decrypting = false;
3300
3301 if (_srtpModule.DisableSRTPDecrypt() == -1)
3302 {
3303 _engineStatisticsPtr->SetLastError(
3304 VE_SRTP_ERROR, kTraceError,
3305 "DisableSRTPReceive() failed to disable SRTP decryption");
3306 return -1;
3307 }
3308
3309 if (!_srtpModule.SRTPDecrypt() && !_srtpModule.SRTPEncrypt())
3310 {
3311 _encryptionPtr = NULL;
3312 }
3313
3314 return 0;
3315}
3316
3317#endif
3318
3319int
3320Channel::RegisterExternalEncryption(Encryption& encryption)
3321{
3322 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3323 "Channel::RegisterExternalEncryption()");
3324
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003325 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003326
3327 if (_encryptionPtr)
3328 {
3329 _engineStatisticsPtr->SetLastError(
3330 VE_INVALID_OPERATION, kTraceError,
3331 "RegisterExternalEncryption() encryption already enabled");
3332 return -1;
3333 }
3334
3335 _encryptionPtr = &encryption;
3336
3337 _decrypting = true;
3338 _encrypting = true;
3339
3340 return 0;
3341}
3342
3343int
3344Channel::DeRegisterExternalEncryption()
3345{
3346 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3347 "Channel::DeRegisterExternalEncryption()");
3348
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003349 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003350
3351 if (!_encryptionPtr)
3352 {
3353 _engineStatisticsPtr->SetLastError(
3354 VE_INVALID_OPERATION, kTraceWarning,
3355 "DeRegisterExternalEncryption() encryption already disabled");
3356 return 0;
3357 }
3358
3359 _decrypting = false;
3360 _encrypting = false;
3361
3362 _encryptionPtr = NULL;
3363
3364 return 0;
3365}
3366
3367int Channel::SendTelephoneEventOutband(unsigned char eventCode,
3368 int lengthMs, int attenuationDb,
3369 bool playDtmfEvent)
3370{
3371 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3372 "Channel::SendTelephoneEventOutband(..., playDtmfEvent=%d)",
3373 playDtmfEvent);
3374
3375 _playOutbandDtmfEvent = playDtmfEvent;
3376
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003377 if (_rtpRtcpModule->SendTelephoneEventOutband(eventCode, lengthMs,
niklase@google.com470e71d2011-07-07 08:21:25 +00003378 attenuationDb) != 0)
3379 {
3380 _engineStatisticsPtr->SetLastError(
3381 VE_SEND_DTMF_FAILED,
3382 kTraceWarning,
3383 "SendTelephoneEventOutband() failed to send event");
3384 return -1;
3385 }
3386 return 0;
3387}
3388
3389int Channel::SendTelephoneEventInband(unsigned char eventCode,
3390 int lengthMs,
3391 int attenuationDb,
3392 bool playDtmfEvent)
3393{
3394 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3395 "Channel::SendTelephoneEventInband(..., playDtmfEvent=%d)",
3396 playDtmfEvent);
3397
3398 _playInbandDtmfEvent = playDtmfEvent;
3399 _inbandDtmfQueue.AddDtmf(eventCode, lengthMs, attenuationDb);
3400
3401 return 0;
3402}
3403
3404int
3405Channel::SetDtmfPlayoutStatus(bool enable)
3406{
3407 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3408 "Channel::SetDtmfPlayoutStatus()");
3409 if (_audioCodingModule.SetDtmfPlayoutStatus(enable) != 0)
3410 {
3411 _engineStatisticsPtr->SetLastError(
3412 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
3413 "SetDtmfPlayoutStatus() failed to set Dtmf playout");
3414 return -1;
3415 }
3416 return 0;
3417}
3418
3419bool
3420Channel::DtmfPlayoutStatus() const
3421{
3422 return _audioCodingModule.DtmfPlayoutStatus();
3423}
3424
3425int
3426Channel::SetSendTelephoneEventPayloadType(unsigned char type)
3427{
3428 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3429 "Channel::SetSendTelephoneEventPayloadType()");
andrew@webrtc.orgf81f9f82011-08-19 22:56:22 +00003430 if (type > 127)
niklase@google.com470e71d2011-07-07 08:21:25 +00003431 {
3432 _engineStatisticsPtr->SetLastError(
3433 VE_INVALID_ARGUMENT, kTraceError,
3434 "SetSendTelephoneEventPayloadType() invalid type");
3435 return -1;
3436 }
pwestin@webrtc.org1da1ce02011-10-13 15:19:55 +00003437 CodecInst codec;
3438 codec.plfreq = 8000;
3439 codec.pltype = type;
3440 memcpy(codec.plname, "telephone-event", 16);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003441 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003442 {
3443 _engineStatisticsPtr->SetLastError(
3444 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3445 "SetSendTelephoneEventPayloadType() failed to register send"
3446 "payload type");
3447 return -1;
3448 }
3449 _sendTelephoneEventPayloadType = type;
3450 return 0;
3451}
3452
3453int
3454Channel::GetSendTelephoneEventPayloadType(unsigned char& type)
3455{
3456 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3457 "Channel::GetSendTelephoneEventPayloadType()");
3458 type = _sendTelephoneEventPayloadType;
3459 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3460 VoEId(_instanceId,_channelId),
3461 "GetSendTelephoneEventPayloadType() => type=%u", type);
3462 return 0;
3463}
3464
niklase@google.com470e71d2011-07-07 08:21:25 +00003465int
3466Channel::UpdateRxVadDetection(AudioFrame& audioFrame)
3467{
3468 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3469 "Channel::UpdateRxVadDetection()");
3470
3471 int vadDecision = 1;
3472
andrew@webrtc.org63a50982012-05-02 23:56:37 +00003473 vadDecision = (audioFrame.vad_activity_ == AudioFrame::kVadActive)? 1 : 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003474
3475 if ((vadDecision != _oldVadDecision) && _rxVadObserverPtr)
3476 {
3477 OnRxVadDetected(vadDecision);
3478 _oldVadDecision = vadDecision;
3479 }
3480
3481 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3482 "Channel::UpdateRxVadDetection() => vadDecision=%d",
3483 vadDecision);
3484 return 0;
3485}
3486
3487int
3488Channel::RegisterRxVadObserver(VoERxVadCallback &observer)
3489{
3490 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3491 "Channel::RegisterRxVadObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003492 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003493
3494 if (_rxVadObserverPtr)
3495 {
3496 _engineStatisticsPtr->SetLastError(
3497 VE_INVALID_OPERATION, kTraceError,
3498 "RegisterRxVadObserver() observer already enabled");
3499 return -1;
3500 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003501 _rxVadObserverPtr = &observer;
3502 _RxVadDetection = true;
3503 return 0;
3504}
3505
3506int
3507Channel::DeRegisterRxVadObserver()
3508{
3509 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3510 "Channel::DeRegisterRxVadObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003511 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003512
3513 if (!_rxVadObserverPtr)
3514 {
3515 _engineStatisticsPtr->SetLastError(
3516 VE_INVALID_OPERATION, kTraceWarning,
3517 "DeRegisterRxVadObserver() observer already disabled");
3518 return 0;
3519 }
3520 _rxVadObserverPtr = NULL;
3521 _RxVadDetection = false;
3522 return 0;
3523}
3524
3525int
3526Channel::VoiceActivityIndicator(int &activity)
3527{
3528 activity = _sendFrameType;
3529
3530 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3531 "Channel::VoiceActivityIndicator(indicator=%d)", activity);
3532 return 0;
3533}
3534
3535#ifdef WEBRTC_VOICE_ENGINE_AGC
3536
3537int
3538Channel::SetRxAgcStatus(const bool enable, const AgcModes mode)
3539{
3540 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3541 "Channel::SetRxAgcStatus(enable=%d, mode=%d)",
3542 (int)enable, (int)mode);
3543
3544 GainControl::Mode agcMode(GainControl::kFixedDigital);
3545 switch (mode)
3546 {
3547 case kAgcDefault:
3548 agcMode = GainControl::kAdaptiveDigital;
3549 break;
3550 case kAgcUnchanged:
3551 agcMode = _rxAudioProcessingModulePtr->gain_control()->mode();
3552 break;
3553 case kAgcFixedDigital:
3554 agcMode = GainControl::kFixedDigital;
3555 break;
3556 case kAgcAdaptiveDigital:
3557 agcMode =GainControl::kAdaptiveDigital;
3558 break;
3559 default:
3560 _engineStatisticsPtr->SetLastError(
3561 VE_INVALID_ARGUMENT, kTraceError,
3562 "SetRxAgcStatus() invalid Agc mode");
3563 return -1;
3564 }
3565
3566 if (_rxAudioProcessingModulePtr->gain_control()->set_mode(agcMode) != 0)
3567 {
3568 _engineStatisticsPtr->SetLastError(
3569 VE_APM_ERROR, kTraceError,
3570 "SetRxAgcStatus() failed to set Agc mode");
3571 return -1;
3572 }
3573 if (_rxAudioProcessingModulePtr->gain_control()->Enable(enable) != 0)
3574 {
3575 _engineStatisticsPtr->SetLastError(
3576 VE_APM_ERROR, kTraceError,
3577 "SetRxAgcStatus() failed to set Agc state");
3578 return -1;
3579 }
3580
3581 _rxAgcIsEnabled = enable;
niklase@google.com470e71d2011-07-07 08:21:25 +00003582 _rxApmIsEnabled = ((_rxAgcIsEnabled == true) || (_rxNsIsEnabled == true));
3583
3584 return 0;
3585}
3586
3587int
3588Channel::GetRxAgcStatus(bool& enabled, AgcModes& mode)
3589{
3590 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3591 "Channel::GetRxAgcStatus(enable=?, mode=?)");
3592
3593 bool enable = _rxAudioProcessingModulePtr->gain_control()->is_enabled();
3594 GainControl::Mode agcMode =
3595 _rxAudioProcessingModulePtr->gain_control()->mode();
3596
3597 enabled = enable;
3598
3599 switch (agcMode)
3600 {
3601 case GainControl::kFixedDigital:
3602 mode = kAgcFixedDigital;
3603 break;
3604 case GainControl::kAdaptiveDigital:
3605 mode = kAgcAdaptiveDigital;
3606 break;
3607 default:
3608 _engineStatisticsPtr->SetLastError(
3609 VE_APM_ERROR, kTraceError,
3610 "GetRxAgcStatus() invalid Agc mode");
3611 return -1;
3612 }
3613
3614 return 0;
3615}
3616
3617int
3618Channel::SetRxAgcConfig(const AgcConfig config)
3619{
3620 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3621 "Channel::SetRxAgcConfig()");
3622
3623 if (_rxAudioProcessingModulePtr->gain_control()->set_target_level_dbfs(
3624 config.targetLeveldBOv) != 0)
3625 {
3626 _engineStatisticsPtr->SetLastError(
3627 VE_APM_ERROR, kTraceError,
3628 "SetRxAgcConfig() failed to set target peak |level|"
3629 "(or envelope) of the Agc");
3630 return -1;
3631 }
3632 if (_rxAudioProcessingModulePtr->gain_control()->set_compression_gain_db(
3633 config.digitalCompressionGaindB) != 0)
3634 {
3635 _engineStatisticsPtr->SetLastError(
3636 VE_APM_ERROR, kTraceError,
3637 "SetRxAgcConfig() failed to set the range in |gain| the"
3638 " digital compression stage may apply");
3639 return -1;
3640 }
3641 if (_rxAudioProcessingModulePtr->gain_control()->enable_limiter(
3642 config.limiterEnable) != 0)
3643 {
3644 _engineStatisticsPtr->SetLastError(
3645 VE_APM_ERROR, kTraceError,
3646 "SetRxAgcConfig() failed to set hard limiter to the signal");
3647 return -1;
3648 }
3649
3650 return 0;
3651}
3652
3653int
3654Channel::GetRxAgcConfig(AgcConfig& config)
3655{
3656 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3657 "Channel::GetRxAgcConfig(config=%?)");
3658
3659 config.targetLeveldBOv =
3660 _rxAudioProcessingModulePtr->gain_control()->target_level_dbfs();
3661 config.digitalCompressionGaindB =
3662 _rxAudioProcessingModulePtr->gain_control()->compression_gain_db();
3663 config.limiterEnable =
3664 _rxAudioProcessingModulePtr->gain_control()->is_limiter_enabled();
3665
3666 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3667 VoEId(_instanceId,_channelId), "GetRxAgcConfig() => "
3668 "targetLeveldBOv=%u, digitalCompressionGaindB=%u,"
3669 " limiterEnable=%d",
3670 config.targetLeveldBOv,
3671 config.digitalCompressionGaindB,
3672 config.limiterEnable);
3673
3674 return 0;
3675}
3676
3677#endif // #ifdef WEBRTC_VOICE_ENGINE_AGC
3678
3679#ifdef WEBRTC_VOICE_ENGINE_NR
3680
3681int
3682Channel::SetRxNsStatus(const bool enable, const NsModes mode)
3683{
3684 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3685 "Channel::SetRxNsStatus(enable=%d, mode=%d)",
3686 (int)enable, (int)mode);
3687
3688 NoiseSuppression::Level nsLevel(
3689 (NoiseSuppression::Level)WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE);
3690 switch (mode)
3691 {
3692
3693 case kNsDefault:
3694 nsLevel = (NoiseSuppression::Level)
3695 WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE;
3696 break;
3697 case kNsUnchanged:
3698 nsLevel = _rxAudioProcessingModulePtr->noise_suppression()->level();
3699 break;
3700 case kNsConference:
3701 nsLevel = NoiseSuppression::kHigh;
3702 break;
3703 case kNsLowSuppression:
3704 nsLevel = NoiseSuppression::kLow;
3705 break;
3706 case kNsModerateSuppression:
3707 nsLevel = NoiseSuppression::kModerate;
3708 break;
3709 case kNsHighSuppression:
3710 nsLevel = NoiseSuppression::kHigh;
3711 break;
3712 case kNsVeryHighSuppression:
3713 nsLevel = NoiseSuppression::kVeryHigh;
3714 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00003715 }
3716
3717 if (_rxAudioProcessingModulePtr->noise_suppression()->set_level(nsLevel)
3718 != 0)
3719 {
3720 _engineStatisticsPtr->SetLastError(
3721 VE_APM_ERROR, kTraceError,
3722 "SetRxAgcStatus() failed to set Ns level");
3723 return -1;
3724 }
3725 if (_rxAudioProcessingModulePtr->noise_suppression()->Enable(enable) != 0)
3726 {
3727 _engineStatisticsPtr->SetLastError(
3728 VE_APM_ERROR, kTraceError,
3729 "SetRxAgcStatus() failed to set Agc state");
3730 return -1;
3731 }
3732
3733 _rxNsIsEnabled = enable;
3734 _rxApmIsEnabled = ((_rxAgcIsEnabled == true) || (_rxNsIsEnabled == true));
3735
3736 return 0;
3737}
3738
3739int
3740Channel::GetRxNsStatus(bool& enabled, NsModes& mode)
3741{
3742 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3743 "Channel::GetRxNsStatus(enable=?, mode=?)");
3744
3745 bool enable =
3746 _rxAudioProcessingModulePtr->noise_suppression()->is_enabled();
3747 NoiseSuppression::Level ncLevel =
3748 _rxAudioProcessingModulePtr->noise_suppression()->level();
3749
3750 enabled = enable;
3751
3752 switch (ncLevel)
3753 {
3754 case NoiseSuppression::kLow:
3755 mode = kNsLowSuppression;
3756 break;
3757 case NoiseSuppression::kModerate:
3758 mode = kNsModerateSuppression;
3759 break;
3760 case NoiseSuppression::kHigh:
3761 mode = kNsHighSuppression;
3762 break;
3763 case NoiseSuppression::kVeryHigh:
3764 mode = kNsVeryHighSuppression;
3765 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00003766 }
3767
3768 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3769 VoEId(_instanceId,_channelId),
3770 "GetRxNsStatus() => enabled=%d, mode=%d", enabled, mode);
3771 return 0;
3772}
3773
3774#endif // #ifdef WEBRTC_VOICE_ENGINE_NR
3775
3776int
3777Channel::RegisterRTPObserver(VoERTPObserver& observer)
3778{
3779 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3780 "Channel::RegisterRTPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003781 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003782
3783 if (_rtpObserverPtr)
3784 {
3785 _engineStatisticsPtr->SetLastError(
3786 VE_INVALID_OPERATION, kTraceError,
3787 "RegisterRTPObserver() observer already enabled");
3788 return -1;
3789 }
3790
3791 _rtpObserverPtr = &observer;
3792 _rtpObserver = true;
3793
3794 return 0;
3795}
3796
3797int
3798Channel::DeRegisterRTPObserver()
3799{
3800 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3801 "Channel::DeRegisterRTPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003802 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003803
3804 if (!_rtpObserverPtr)
3805 {
3806 _engineStatisticsPtr->SetLastError(
3807 VE_INVALID_OPERATION, kTraceWarning,
3808 "DeRegisterRTPObserver() observer already disabled");
3809 return 0;
3810 }
3811
3812 _rtpObserver = false;
3813 _rtpObserverPtr = NULL;
3814
3815 return 0;
3816}
3817
3818int
3819Channel::RegisterRTCPObserver(VoERTCPObserver& observer)
3820{
3821 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3822 "Channel::RegisterRTCPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003823 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003824
3825 if (_rtcpObserverPtr)
3826 {
3827 _engineStatisticsPtr->SetLastError(
3828 VE_INVALID_OPERATION, kTraceError,
3829 "RegisterRTCPObserver() observer already enabled");
3830 return -1;
3831 }
3832
3833 _rtcpObserverPtr = &observer;
3834 _rtcpObserver = true;
3835
3836 return 0;
3837}
3838
3839int
3840Channel::DeRegisterRTCPObserver()
3841{
3842 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3843 "Channel::DeRegisterRTCPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003844 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003845
3846 if (!_rtcpObserverPtr)
3847 {
3848 _engineStatisticsPtr->SetLastError(
3849 VE_INVALID_OPERATION, kTraceWarning,
3850 "DeRegisterRTCPObserver() observer already disabled");
3851 return 0;
3852 }
3853
3854 _rtcpObserver = false;
3855 _rtcpObserverPtr = NULL;
3856
3857 return 0;
3858}
3859
3860int
3861Channel::SetLocalSSRC(unsigned int ssrc)
3862{
3863 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3864 "Channel::SetLocalSSRC()");
3865 if (_sending)
3866 {
3867 _engineStatisticsPtr->SetLastError(
3868 VE_ALREADY_SENDING, kTraceError,
3869 "SetLocalSSRC() already sending");
3870 return -1;
3871 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003872 if (_rtpRtcpModule->SetSSRC(ssrc) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003873 {
3874 _engineStatisticsPtr->SetLastError(
3875 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3876 "SetLocalSSRC() failed to set SSRC");
3877 return -1;
3878 }
3879 return 0;
3880}
3881
3882int
3883Channel::GetLocalSSRC(unsigned int& ssrc)
3884{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003885 ssrc = _rtpRtcpModule->SSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00003886 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3887 VoEId(_instanceId,_channelId),
3888 "GetLocalSSRC() => ssrc=%lu", ssrc);
3889 return 0;
3890}
3891
3892int
3893Channel::GetRemoteSSRC(unsigned int& ssrc)
3894{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003895 ssrc = _rtpRtcpModule->RemoteSSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00003896 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3897 VoEId(_instanceId,_channelId),
3898 "GetRemoteSSRC() => ssrc=%lu", ssrc);
3899 return 0;
3900}
3901
3902int
3903Channel::GetRemoteCSRCs(unsigned int arrCSRC[15])
3904{
3905 if (arrCSRC == NULL)
3906 {
3907 _engineStatisticsPtr->SetLastError(
3908 VE_INVALID_ARGUMENT, kTraceError,
3909 "GetRemoteCSRCs() invalid array argument");
3910 return -1;
3911 }
3912 WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize];
3913 WebRtc_Word32 CSRCs(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003914 CSRCs = _rtpRtcpModule->CSRCs(arrOfCSRC);
niklase@google.com470e71d2011-07-07 08:21:25 +00003915 if (CSRCs > 0)
3916 {
3917 memcpy(arrCSRC, arrOfCSRC, CSRCs * sizeof(WebRtc_UWord32));
3918 for (int i = 0; i < (int) CSRCs; i++)
3919 {
3920 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3921 VoEId(_instanceId, _channelId),
3922 "GetRemoteCSRCs() => arrCSRC[%d]=%lu", i, arrCSRC[i]);
3923 }
3924 } else
3925 {
3926 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3927 VoEId(_instanceId, _channelId),
3928 "GetRemoteCSRCs() => list is empty!");
3929 }
3930 return CSRCs;
3931}
3932
3933int
3934Channel::SetRTPAudioLevelIndicationStatus(bool enable, unsigned char ID)
3935{
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00003936 if (_rtpAudioProc.get() == NULL)
3937 {
3938 _rtpAudioProc.reset(AudioProcessing::Create(VoEModuleId(_instanceId,
3939 _channelId)));
3940 if (_rtpAudioProc.get() == NULL)
3941 {
3942 _engineStatisticsPtr->SetLastError(VE_NO_MEMORY, kTraceCritical,
3943 "Failed to create AudioProcessing");
3944 return -1;
3945 }
3946 }
3947
3948 if (_rtpAudioProc->level_estimator()->Enable(enable) !=
3949 AudioProcessing::kNoError)
3950 {
3951 _engineStatisticsPtr->SetLastError(VE_APM_ERROR, kTraceWarning,
3952 "Failed to enable AudioProcessing::level_estimator()");
3953 }
3954
niklase@google.com470e71d2011-07-07 08:21:25 +00003955 _includeAudioLevelIndication = enable;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003956 return _rtpRtcpModule->SetRTPAudioLevelIndicationStatus(enable, ID);
niklase@google.com470e71d2011-07-07 08:21:25 +00003957}
3958int
3959Channel::GetRTPAudioLevelIndicationStatus(bool& enabled, unsigned char& ID)
3960{
3961 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3962 VoEId(_instanceId,_channelId),
3963 "GetRTPAudioLevelIndicationStatus() => enabled=%d, ID=%u",
3964 enabled, ID);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003965 return _rtpRtcpModule->GetRTPAudioLevelIndicationStatus(enabled, ID);
niklase@google.com470e71d2011-07-07 08:21:25 +00003966}
3967
3968int
3969Channel::SetRTCPStatus(bool enable)
3970{
3971 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3972 "Channel::SetRTCPStatus()");
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003973 if (_rtpRtcpModule->SetRTCPStatus(enable ?
niklase@google.com470e71d2011-07-07 08:21:25 +00003974 kRtcpCompound : kRtcpOff) != 0)
3975 {
3976 _engineStatisticsPtr->SetLastError(
3977 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3978 "SetRTCPStatus() failed to set RTCP status");
3979 return -1;
3980 }
3981 return 0;
3982}
3983
3984int
3985Channel::GetRTCPStatus(bool& enabled)
3986{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003987 RTCPMethod method = _rtpRtcpModule->RTCP();
niklase@google.com470e71d2011-07-07 08:21:25 +00003988 enabled = (method != kRtcpOff);
3989 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3990 VoEId(_instanceId,_channelId),
3991 "GetRTCPStatus() => enabled=%d", enabled);
3992 return 0;
3993}
3994
3995int
3996Channel::SetRTCP_CNAME(const char cName[256])
3997{
3998 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3999 "Channel::SetRTCP_CNAME()");
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004000 if (_rtpRtcpModule->SetCNAME(cName) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004001 {
4002 _engineStatisticsPtr->SetLastError(
4003 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4004 "SetRTCP_CNAME() failed to set RTCP CNAME");
4005 return -1;
4006 }
4007 return 0;
4008}
4009
4010int
4011Channel::GetRTCP_CNAME(char cName[256])
4012{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004013 if (_rtpRtcpModule->CNAME(cName) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004014 {
4015 _engineStatisticsPtr->SetLastError(
4016 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4017 "GetRTCP_CNAME() failed to retrieve RTCP CNAME");
4018 return -1;
4019 }
4020 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4021 VoEId(_instanceId, _channelId),
4022 "GetRTCP_CNAME() => cName=%s", cName);
4023 return 0;
4024}
4025
4026int
4027Channel::GetRemoteRTCP_CNAME(char cName[256])
4028{
4029 if (cName == NULL)
4030 {
4031 _engineStatisticsPtr->SetLastError(
4032 VE_INVALID_ARGUMENT, kTraceError,
4033 "GetRemoteRTCP_CNAME() invalid CNAME input buffer");
4034 return -1;
4035 }
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00004036 char cname[RTCP_CNAME_SIZE];
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004037 const WebRtc_UWord32 remoteSSRC = _rtpRtcpModule->RemoteSSRC();
4038 if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004039 {
4040 _engineStatisticsPtr->SetLastError(
4041 VE_CANNOT_RETRIEVE_CNAME, kTraceError,
4042 "GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME");
4043 return -1;
4044 }
4045 strcpy(cName, cname);
4046 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4047 VoEId(_instanceId, _channelId),
4048 "GetRemoteRTCP_CNAME() => cName=%s", cName);
4049 return 0;
4050}
4051
4052int
4053Channel::GetRemoteRTCPData(
4054 unsigned int& NTPHigh,
4055 unsigned int& NTPLow,
4056 unsigned int& timestamp,
4057 unsigned int& playoutTimestamp,
4058 unsigned int* jitter,
4059 unsigned short* fractionLost)
4060{
4061 // --- Information from sender info in received Sender Reports
4062
4063 RTCPSenderInfo senderInfo;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004064 if (_rtpRtcpModule->RemoteRTCPStat(&senderInfo) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004065 {
4066 _engineStatisticsPtr->SetLastError(
4067 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004068 "GetRemoteRTCPData() failed to retrieve sender info for remote "
niklase@google.com470e71d2011-07-07 08:21:25 +00004069 "side");
4070 return -1;
4071 }
4072
4073 // We only utilize 12 out of 20 bytes in the sender info (ignores packet
4074 // and octet count)
4075 NTPHigh = senderInfo.NTPseconds;
4076 NTPLow = senderInfo.NTPfraction;
4077 timestamp = senderInfo.RTPtimeStamp;
4078
4079 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4080 VoEId(_instanceId, _channelId),
4081 "GetRemoteRTCPData() => NTPHigh=%lu, NTPLow=%lu, "
4082 "timestamp=%lu",
4083 NTPHigh, NTPLow, timestamp);
4084
4085 // --- Locally derived information
4086
4087 // This value is updated on each incoming RTCP packet (0 when no packet
4088 // has been received)
4089 playoutTimestamp = _playoutTimeStampRTCP;
4090
4091 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4092 VoEId(_instanceId, _channelId),
4093 "GetRemoteRTCPData() => playoutTimestamp=%lu",
4094 _playoutTimeStampRTCP);
4095
4096 if (NULL != jitter || NULL != fractionLost)
4097 {
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00004098 // Get all RTCP receiver report blocks that have been received on this
4099 // channel. If we receive RTP packets from a remote source we know the
4100 // remote SSRC and use the report block from him.
4101 // Otherwise use the first report block.
4102 std::vector<RTCPReportBlock> remote_stats;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004103 if (_rtpRtcpModule->RemoteRTCPStat(&remote_stats) != 0 ||
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00004104 remote_stats.empty()) {
4105 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4106 VoEId(_instanceId, _channelId),
4107 "GetRemoteRTCPData() failed to measure statistics due"
4108 " to lack of received RTP and/or RTCP packets");
4109 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00004110 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00004111
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004112 WebRtc_UWord32 remoteSSRC = _rtpRtcpModule->RemoteSSRC();
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00004113 std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
4114 for (; it != remote_stats.end(); ++it) {
4115 if (it->remoteSSRC == remoteSSRC)
4116 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00004117 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00004118
4119 if (it == remote_stats.end()) {
4120 // If we have not received any RTCP packets from this SSRC it probably
4121 // means that we have not received any RTP packets.
4122 // Use the first received report block instead.
4123 it = remote_stats.begin();
4124 remoteSSRC = it->remoteSSRC;
niklase@google.com470e71d2011-07-07 08:21:25 +00004125 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00004126
xians@webrtc.org79af7342012-01-31 12:22:14 +00004127 if (jitter) {
4128 *jitter = it->jitter;
4129 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4130 VoEId(_instanceId, _channelId),
4131 "GetRemoteRTCPData() => jitter = %lu", *jitter);
4132 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00004133
xians@webrtc.org79af7342012-01-31 12:22:14 +00004134 if (fractionLost) {
4135 *fractionLost = it->fractionLost;
4136 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4137 VoEId(_instanceId, _channelId),
4138 "GetRemoteRTCPData() => fractionLost = %lu",
4139 *fractionLost);
4140 }
niklase@google.com470e71d2011-07-07 08:21:25 +00004141 }
4142 return 0;
4143}
4144
4145int
4146Channel::SendApplicationDefinedRTCPPacket(const unsigned char subType,
4147 unsigned int name,
4148 const char* data,
4149 unsigned short dataLengthInBytes)
4150{
4151 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4152 "Channel::SendApplicationDefinedRTCPPacket()");
4153 if (!_sending)
4154 {
4155 _engineStatisticsPtr->SetLastError(
4156 VE_NOT_SENDING, kTraceError,
4157 "SendApplicationDefinedRTCPPacket() not sending");
4158 return -1;
4159 }
4160 if (NULL == data)
4161 {
4162 _engineStatisticsPtr->SetLastError(
4163 VE_INVALID_ARGUMENT, kTraceError,
4164 "SendApplicationDefinedRTCPPacket() invalid data value");
4165 return -1;
4166 }
4167 if (dataLengthInBytes % 4 != 0)
4168 {
4169 _engineStatisticsPtr->SetLastError(
4170 VE_INVALID_ARGUMENT, kTraceError,
4171 "SendApplicationDefinedRTCPPacket() invalid length value");
4172 return -1;
4173 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004174 RTCPMethod status = _rtpRtcpModule->RTCP();
niklase@google.com470e71d2011-07-07 08:21:25 +00004175 if (status == kRtcpOff)
4176 {
4177 _engineStatisticsPtr->SetLastError(
4178 VE_RTCP_ERROR, kTraceError,
4179 "SendApplicationDefinedRTCPPacket() RTCP is disabled");
4180 return -1;
4181 }
4182
4183 // Create and schedule the RTCP APP packet for transmission
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004184 if (_rtpRtcpModule->SetRTCPApplicationSpecificData(
niklase@google.com470e71d2011-07-07 08:21:25 +00004185 subType,
4186 name,
4187 (const unsigned char*) data,
4188 dataLengthInBytes) != 0)
4189 {
4190 _engineStatisticsPtr->SetLastError(
4191 VE_SEND_ERROR, kTraceError,
4192 "SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
4193 return -1;
4194 }
4195 return 0;
4196}
4197
4198int
4199Channel::GetRTPStatistics(
4200 unsigned int& averageJitterMs,
4201 unsigned int& maxJitterMs,
4202 unsigned int& discardedPackets)
4203{
4204 WebRtc_UWord8 fraction_lost(0);
4205 WebRtc_UWord32 cum_lost(0);
4206 WebRtc_UWord32 ext_max(0);
4207 WebRtc_UWord32 jitter(0);
4208 WebRtc_UWord32 max_jitter(0);
4209
4210 // The jitter statistics is updated for each received RTP packet and is
4211 // based on received packets.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004212 if (_rtpRtcpModule->StatisticsRTP(&fraction_lost,
niklase@google.com470e71d2011-07-07 08:21:25 +00004213 &cum_lost,
4214 &ext_max,
4215 &jitter,
4216 &max_jitter) != 0)
4217 {
4218 _engineStatisticsPtr->SetLastError(
4219 VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004220 "GetRTPStatistics() failed to read RTP statistics from the "
niklase@google.com470e71d2011-07-07 08:21:25 +00004221 "RTP/RTCP module");
4222 }
4223
4224 const WebRtc_Word32 playoutFrequency =
4225 _audioCodingModule.PlayoutFrequency();
4226 if (playoutFrequency > 0)
4227 {
4228 // Scale RTP statistics given the current playout frequency
4229 maxJitterMs = max_jitter / (playoutFrequency / 1000);
4230 averageJitterMs = jitter / (playoutFrequency / 1000);
4231 }
4232
4233 discardedPackets = _numberOfDiscardedPackets;
4234
4235 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4236 VoEId(_instanceId, _channelId),
4237 "GetRTPStatistics() => averageJitterMs = %lu, maxJitterMs = %lu,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004238 " discardedPackets = %lu)",
niklase@google.com470e71d2011-07-07 08:21:25 +00004239 averageJitterMs, maxJitterMs, discardedPackets);
4240 return 0;
4241}
4242
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00004243int Channel::GetRemoteRTCPSenderInfo(SenderInfo* sender_info) {
4244 if (sender_info == NULL) {
4245 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
4246 "GetRemoteRTCPSenderInfo() invalid sender_info.");
4247 return -1;
4248 }
4249
4250 // Get the sender info from the latest received RTCP Sender Report.
4251 RTCPSenderInfo rtcp_sender_info;
4252 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_sender_info) != 0) {
4253 _engineStatisticsPtr->SetLastError(VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4254 "GetRemoteRTCPSenderInfo() failed to read RTCP SR sender info.");
4255 return -1;
4256 }
4257
4258 sender_info->NTP_timestamp_high = rtcp_sender_info.NTPseconds;
4259 sender_info->NTP_timestamp_low = rtcp_sender_info.NTPfraction;
4260 sender_info->RTP_timestamp = rtcp_sender_info.RTPtimeStamp;
4261 sender_info->sender_packet_count = rtcp_sender_info.sendPacketCount;
4262 sender_info->sender_octet_count = rtcp_sender_info.sendOctetCount;
4263 return 0;
4264}
4265
4266int Channel::GetRemoteRTCPReportBlocks(
4267 std::vector<ReportBlock>* report_blocks) {
4268 if (report_blocks == NULL) {
4269 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
4270 "GetRemoteRTCPReportBlock()s invalid report_blocks.");
4271 return -1;
4272 }
4273
4274 // Get the report blocks from the latest received RTCP Sender or Receiver
4275 // Report. Each element in the vector contains the sender's SSRC and a
4276 // report block according to RFC 3550.
4277 std::vector<RTCPReportBlock> rtcp_report_blocks;
4278 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks) != 0) {
4279 _engineStatisticsPtr->SetLastError(VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4280 "GetRemoteRTCPReportBlocks() failed to read RTCP SR/RR report block.");
4281 return -1;
4282 }
4283
4284 if (rtcp_report_blocks.empty())
4285 return 0;
4286
4287 std::vector<RTCPReportBlock>::const_iterator it = rtcp_report_blocks.begin();
4288 for (; it != rtcp_report_blocks.end(); ++it) {
4289 ReportBlock report_block;
4290 report_block.sender_SSRC = it->remoteSSRC;
4291 report_block.source_SSRC = it->sourceSSRC;
4292 report_block.fraction_lost = it->fractionLost;
4293 report_block.cumulative_num_packets_lost = it->cumulativeLost;
4294 report_block.extended_highest_sequence_number = it->extendedHighSeqNum;
4295 report_block.interarrival_jitter = it->jitter;
4296 report_block.last_SR_timestamp = it->lastSR;
4297 report_block.delay_since_last_SR = it->delaySinceLastSR;
4298 report_blocks->push_back(report_block);
4299 }
4300 return 0;
4301}
4302
niklase@google.com470e71d2011-07-07 08:21:25 +00004303int
4304Channel::GetRTPStatistics(CallStatistics& stats)
4305{
4306 WebRtc_UWord8 fraction_lost(0);
4307 WebRtc_UWord32 cum_lost(0);
4308 WebRtc_UWord32 ext_max(0);
4309 WebRtc_UWord32 jitter(0);
4310 WebRtc_UWord32 max_jitter(0);
4311
4312 // --- Part one of the final structure (four values)
4313
4314 // The jitter statistics is updated for each received RTP packet and is
4315 // based on received packets.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004316 if (_rtpRtcpModule->StatisticsRTP(&fraction_lost,
niklase@google.com470e71d2011-07-07 08:21:25 +00004317 &cum_lost,
4318 &ext_max,
4319 &jitter,
4320 &max_jitter) != 0)
4321 {
4322 _engineStatisticsPtr->SetLastError(
4323 VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
4324 "GetRTPStatistics() failed to read RTP statistics from the "
4325 "RTP/RTCP module");
4326 }
4327
4328 stats.fractionLost = fraction_lost;
4329 stats.cumulativeLost = cum_lost;
4330 stats.extendedMax = ext_max;
4331 stats.jitterSamples = jitter;
4332
4333 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4334 VoEId(_instanceId, _channelId),
4335 "GetRTPStatistics() => fractionLost=%lu, cumulativeLost=%lu,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004336 " extendedMax=%lu, jitterSamples=%li)",
niklase@google.com470e71d2011-07-07 08:21:25 +00004337 stats.fractionLost, stats.cumulativeLost, stats.extendedMax,
4338 stats.jitterSamples);
4339
4340 // --- Part two of the final structure (one value)
4341
4342 WebRtc_UWord16 RTT(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004343 RTCPMethod method = _rtpRtcpModule->RTCP();
niklase@google.com470e71d2011-07-07 08:21:25 +00004344 if (method == kRtcpOff)
4345 {
4346 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4347 VoEId(_instanceId, _channelId),
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004348 "GetRTPStatistics() RTCP is disabled => valid RTT "
niklase@google.com470e71d2011-07-07 08:21:25 +00004349 "measurements cannot be retrieved");
4350 } else
4351 {
4352 // The remote SSRC will be zero if no RTP packet has been received.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004353 WebRtc_UWord32 remoteSSRC = _rtpRtcpModule->RemoteSSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00004354 if (remoteSSRC > 0)
4355 {
4356 WebRtc_UWord16 avgRTT(0);
4357 WebRtc_UWord16 maxRTT(0);
4358 WebRtc_UWord16 minRTT(0);
4359
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004360 if (_rtpRtcpModule->RTT(remoteSSRC, &RTT, &avgRTT, &minRTT, &maxRTT)
niklase@google.com470e71d2011-07-07 08:21:25 +00004361 != 0)
4362 {
4363 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4364 VoEId(_instanceId, _channelId),
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004365 "GetRTPStatistics() failed to retrieve RTT from "
niklase@google.com470e71d2011-07-07 08:21:25 +00004366 "the RTP/RTCP module");
4367 }
4368 } else
4369 {
4370 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4371 VoEId(_instanceId, _channelId),
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004372 "GetRTPStatistics() failed to measure RTT since no "
niklase@google.com470e71d2011-07-07 08:21:25 +00004373 "RTP packets have been received yet");
4374 }
4375 }
4376
4377 stats.rttMs = static_cast<int> (RTT);
4378
4379 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4380 VoEId(_instanceId, _channelId),
4381 "GetRTPStatistics() => rttMs=%d", stats.rttMs);
4382
4383 // --- Part three of the final structure (four values)
4384
4385 WebRtc_UWord32 bytesSent(0);
4386 WebRtc_UWord32 packetsSent(0);
4387 WebRtc_UWord32 bytesReceived(0);
4388 WebRtc_UWord32 packetsReceived(0);
4389
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004390 if (_rtpRtcpModule->DataCountersRTP(&bytesSent,
niklase@google.com470e71d2011-07-07 08:21:25 +00004391 &packetsSent,
4392 &bytesReceived,
4393 &packetsReceived) != 0)
4394 {
4395 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4396 VoEId(_instanceId, _channelId),
4397 "GetRTPStatistics() failed to retrieve RTP datacounters =>"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004398 " output will not be complete");
niklase@google.com470e71d2011-07-07 08:21:25 +00004399 }
4400
4401 stats.bytesSent = bytesSent;
4402 stats.packetsSent = packetsSent;
4403 stats.bytesReceived = bytesReceived;
4404 stats.packetsReceived = packetsReceived;
4405
4406 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4407 VoEId(_instanceId, _channelId),
4408 "GetRTPStatistics() => bytesSent=%d, packetsSent=%d,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004409 " bytesReceived=%d, packetsReceived=%d)",
niklase@google.com470e71d2011-07-07 08:21:25 +00004410 stats.bytesSent, stats.packetsSent, stats.bytesReceived,
4411 stats.packetsReceived);
4412
4413 return 0;
4414}
4415
turaj@webrtc.org42259e72012-12-11 02:15:12 +00004416int Channel::SetFECStatus(bool enable, int redPayloadtype) {
4417 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4418 "Channel::SetFECStatus()");
niklase@google.com470e71d2011-07-07 08:21:25 +00004419
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00004420 if (enable) {
4421 if (redPayloadtype < 0 || redPayloadtype > 127) {
4422 _engineStatisticsPtr->SetLastError(
4423 VE_PLTYPE_ERROR, kTraceError,
4424 "SetFECStatus() invalid RED payload type");
4425 return -1;
4426 }
4427
4428 if (SetRedPayloadType(redPayloadtype) < 0) {
4429 _engineStatisticsPtr->SetLastError(
4430 VE_CODEC_ERROR, kTraceError,
4431 "SetSecondarySendCodec() Failed to register RED ACM");
4432 return -1;
4433 }
turaj@webrtc.org42259e72012-12-11 02:15:12 +00004434 }
niklase@google.com470e71d2011-07-07 08:21:25 +00004435
turaj@webrtc.org42259e72012-12-11 02:15:12 +00004436 if (_audioCodingModule.SetFECStatus(enable) != 0) {
4437 _engineStatisticsPtr->SetLastError(
4438 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4439 "SetFECStatus() failed to set FEC state in the ACM");
4440 return -1;
4441 }
4442 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00004443}
4444
4445int
4446Channel::GetFECStatus(bool& enabled, int& redPayloadtype)
4447{
4448 enabled = _audioCodingModule.FECStatus();
4449 if (enabled)
4450 {
4451 WebRtc_Word8 payloadType(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004452 if (_rtpRtcpModule->SendREDPayloadType(payloadType) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004453 {
4454 _engineStatisticsPtr->SetLastError(
4455 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4456 "GetFECStatus() failed to retrieve RED PT from RTP/RTCP "
4457 "module");
4458 return -1;
4459 }
4460 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4461 VoEId(_instanceId, _channelId),
4462 "GetFECStatus() => enabled=%d, redPayloadtype=%d",
4463 enabled, redPayloadtype);
4464 return 0;
4465 }
4466 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4467 VoEId(_instanceId, _channelId),
4468 "GetFECStatus() => enabled=%d", enabled);
4469 return 0;
4470}
4471
4472int
niklase@google.com470e71d2011-07-07 08:21:25 +00004473Channel::StartRTPDump(const char fileNameUTF8[1024],
4474 RTPDirections direction)
4475{
4476 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4477 "Channel::StartRTPDump()");
4478 if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
4479 {
4480 _engineStatisticsPtr->SetLastError(
4481 VE_INVALID_ARGUMENT, kTraceError,
4482 "StartRTPDump() invalid RTP direction");
4483 return -1;
4484 }
4485 RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4486 &_rtpDumpIn : &_rtpDumpOut;
4487 if (rtpDumpPtr == NULL)
4488 {
4489 assert(false);
4490 return -1;
4491 }
4492 if (rtpDumpPtr->IsActive())
4493 {
4494 rtpDumpPtr->Stop();
4495 }
4496 if (rtpDumpPtr->Start(fileNameUTF8) != 0)
4497 {
4498 _engineStatisticsPtr->SetLastError(
4499 VE_BAD_FILE, kTraceError,
4500 "StartRTPDump() failed to create file");
4501 return -1;
4502 }
4503 return 0;
4504}
4505
4506int
4507Channel::StopRTPDump(RTPDirections direction)
4508{
4509 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4510 "Channel::StopRTPDump()");
4511 if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
4512 {
4513 _engineStatisticsPtr->SetLastError(
4514 VE_INVALID_ARGUMENT, kTraceError,
4515 "StopRTPDump() invalid RTP direction");
4516 return -1;
4517 }
4518 RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4519 &_rtpDumpIn : &_rtpDumpOut;
4520 if (rtpDumpPtr == NULL)
4521 {
4522 assert(false);
4523 return -1;
4524 }
4525 if (!rtpDumpPtr->IsActive())
4526 {
4527 return 0;
4528 }
4529 return rtpDumpPtr->Stop();
4530}
4531
4532bool
4533Channel::RTPDumpIsActive(RTPDirections direction)
4534{
4535 if ((direction != kRtpIncoming) &&
4536 (direction != kRtpOutgoing))
4537 {
4538 _engineStatisticsPtr->SetLastError(
4539 VE_INVALID_ARGUMENT, kTraceError,
4540 "RTPDumpIsActive() invalid RTP direction");
4541 return false;
4542 }
4543 RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4544 &_rtpDumpIn : &_rtpDumpOut;
4545 return rtpDumpPtr->IsActive();
4546}
4547
4548int
4549Channel::InsertExtraRTPPacket(unsigned char payloadType,
4550 bool markerBit,
4551 const char* payloadData,
4552 unsigned short payloadSize)
4553{
4554 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4555 "Channel::InsertExtraRTPPacket()");
4556 if (payloadType > 127)
4557 {
4558 _engineStatisticsPtr->SetLastError(
4559 VE_INVALID_PLTYPE, kTraceError,
4560 "InsertExtraRTPPacket() invalid payload type");
4561 return -1;
4562 }
4563 if (payloadData == NULL)
4564 {
4565 _engineStatisticsPtr->SetLastError(
4566 VE_INVALID_ARGUMENT, kTraceError,
4567 "InsertExtraRTPPacket() invalid payload data");
4568 return -1;
4569 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004570 if (payloadSize > _rtpRtcpModule->MaxDataPayloadLength())
niklase@google.com470e71d2011-07-07 08:21:25 +00004571 {
4572 _engineStatisticsPtr->SetLastError(
4573 VE_INVALID_ARGUMENT, kTraceError,
4574 "InsertExtraRTPPacket() invalid payload size");
4575 return -1;
4576 }
4577 if (!_sending)
4578 {
4579 _engineStatisticsPtr->SetLastError(
4580 VE_NOT_SENDING, kTraceError,
4581 "InsertExtraRTPPacket() not sending");
4582 return -1;
4583 }
4584
4585 // Create extra RTP packet by calling RtpRtcp::SendOutgoingData().
4586 // Transport::SendPacket() will be called by the module when the RTP packet
4587 // is created.
4588 // The call to SendOutgoingData() does *not* modify the timestamp and
4589 // payloadtype to ensure that the RTP module generates a valid RTP packet
4590 // (user might utilize a non-registered payload type).
4591 // The marker bit and payload type will be replaced just before the actual
4592 // transmission, i.e., the actual modification is done *after* the RTP
4593 // module has delivered its RTP packet back to the VoE.
4594 // We will use the stored values above when the packet is modified
4595 // (see Channel::SendPacket()).
4596
4597 _extraPayloadType = payloadType;
4598 _extraMarkerBit = markerBit;
4599 _insertExtraRTPPacket = true;
4600
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004601 if (_rtpRtcpModule->SendOutgoingData(kAudioFrameSpeech,
niklase@google.com470e71d2011-07-07 08:21:25 +00004602 _lastPayloadType,
4603 _lastLocalTimeStamp,
stefan@webrtc.orgddfdfed2012-07-03 13:21:22 +00004604 // Leaving the time when this frame was
4605 // received from the capture device as
4606 // undefined for voice for now.
4607 -1,
niklase@google.com470e71d2011-07-07 08:21:25 +00004608 (const WebRtc_UWord8*) payloadData,
4609 payloadSize) != 0)
4610 {
4611 _engineStatisticsPtr->SetLastError(
4612 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4613 "InsertExtraRTPPacket() failed to send extra RTP packet");
4614 return -1;
4615 }
4616
4617 return 0;
4618}
4619
4620WebRtc_UWord32
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004621Channel::Demultiplex(const AudioFrame& audioFrame)
niklase@google.com470e71d2011-07-07 08:21:25 +00004622{
4623 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004624 "Channel::Demultiplex()");
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00004625 _audioFrame.CopyFrom(audioFrame);
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004626 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00004627 return 0;
4628}
4629
4630WebRtc_UWord32
xians@google.com0b0665a2011-08-08 08:18:44 +00004631Channel::PrepareEncodeAndSend(int mixingFrequency)
niklase@google.com470e71d2011-07-07 08:21:25 +00004632{
4633 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4634 "Channel::PrepareEncodeAndSend()");
4635
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004636 if (_audioFrame.samples_per_channel_ == 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004637 {
4638 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4639 "Channel::PrepareEncodeAndSend() invalid audio frame");
4640 return -1;
4641 }
4642
4643 if (_inputFilePlaying)
4644 {
4645 MixOrReplaceAudioWithFile(mixingFrequency);
4646 }
4647
4648 if (_mute)
4649 {
4650 AudioFrameOperations::Mute(_audioFrame);
4651 }
4652
4653 if (_inputExternalMedia)
4654 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004655 CriticalSectionScoped cs(&_callbackCritSect);
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004656 const bool isStereo = (_audioFrame.num_channels_ == 2);
niklase@google.com470e71d2011-07-07 08:21:25 +00004657 if (_inputExternalMediaCallbackPtr)
4658 {
4659 _inputExternalMediaCallbackPtr->Process(
4660 _channelId,
4661 kRecordingPerChannel,
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004662 (WebRtc_Word16*)_audioFrame.data_,
4663 _audioFrame.samples_per_channel_,
4664 _audioFrame.sample_rate_hz_,
niklase@google.com470e71d2011-07-07 08:21:25 +00004665 isStereo);
4666 }
4667 }
4668
4669 InsertInbandDtmfTone();
4670
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004671 if (_includeAudioLevelIndication)
4672 {
4673 assert(_rtpAudioProc.get() != NULL);
4674
4675 // Check if settings need to be updated.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004676 if (_rtpAudioProc->sample_rate_hz() != _audioFrame.sample_rate_hz_)
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004677 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004678 if (_rtpAudioProc->set_sample_rate_hz(_audioFrame.sample_rate_hz_) !=
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004679 AudioProcessing::kNoError)
4680 {
4681 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4682 VoEId(_instanceId, _channelId),
4683 "Error setting AudioProcessing sample rate");
4684 return -1;
4685 }
4686 }
4687
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004688 if (_rtpAudioProc->num_input_channels() != _audioFrame.num_channels_)
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004689 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004690 if (_rtpAudioProc->set_num_channels(_audioFrame.num_channels_,
4691 _audioFrame.num_channels_)
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004692 != AudioProcessing::kNoError)
4693 {
4694 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4695 VoEId(_instanceId, _channelId),
4696 "Error setting AudioProcessing channels");
4697 return -1;
4698 }
4699 }
4700
4701 // Performs level analysis only; does not affect the signal.
4702 _rtpAudioProc->ProcessStream(&_audioFrame);
4703 }
4704
niklase@google.com470e71d2011-07-07 08:21:25 +00004705 return 0;
4706}
4707
4708WebRtc_UWord32
4709Channel::EncodeAndSend()
4710{
4711 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4712 "Channel::EncodeAndSend()");
4713
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004714 assert(_audioFrame.num_channels_ <= 2);
4715 if (_audioFrame.samples_per_channel_ == 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004716 {
4717 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4718 "Channel::EncodeAndSend() invalid audio frame");
4719 return -1;
4720 }
4721
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004722 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00004723
4724 // --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz.
4725
4726 // The ACM resamples internally.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004727 _audioFrame.timestamp_ = _timeStamp;
niklase@google.com470e71d2011-07-07 08:21:25 +00004728 if (_audioCodingModule.Add10MsData((AudioFrame&)_audioFrame) != 0)
4729 {
4730 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
4731 "Channel::EncodeAndSend() ACM encoding failed");
4732 return -1;
4733 }
4734
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004735 _timeStamp += _audioFrame.samples_per_channel_;
niklase@google.com470e71d2011-07-07 08:21:25 +00004736
4737 // --- Encode if complete frame is ready
4738
4739 // This call will trigger AudioPacketizationCallback::SendData if encoding
4740 // is done and payload is ready for packetization and transmission.
4741 return _audioCodingModule.Process();
4742}
4743
4744int Channel::RegisterExternalMediaProcessing(
4745 ProcessingTypes type,
4746 VoEMediaProcess& processObject)
4747{
4748 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4749 "Channel::RegisterExternalMediaProcessing()");
4750
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004751 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004752
4753 if (kPlaybackPerChannel == type)
4754 {
4755 if (_outputExternalMediaCallbackPtr)
4756 {
4757 _engineStatisticsPtr->SetLastError(
4758 VE_INVALID_OPERATION, kTraceError,
4759 "Channel::RegisterExternalMediaProcessing() "
4760 "output external media already enabled");
4761 return -1;
4762 }
4763 _outputExternalMediaCallbackPtr = &processObject;
4764 _outputExternalMedia = true;
4765 }
4766 else if (kRecordingPerChannel == type)
4767 {
4768 if (_inputExternalMediaCallbackPtr)
4769 {
4770 _engineStatisticsPtr->SetLastError(
4771 VE_INVALID_OPERATION, kTraceError,
4772 "Channel::RegisterExternalMediaProcessing() "
4773 "output external media already enabled");
4774 return -1;
4775 }
4776 _inputExternalMediaCallbackPtr = &processObject;
4777 _inputExternalMedia = true;
4778 }
4779 return 0;
4780}
4781
4782int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type)
4783{
4784 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4785 "Channel::DeRegisterExternalMediaProcessing()");
4786
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004787 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004788
4789 if (kPlaybackPerChannel == type)
4790 {
4791 if (!_outputExternalMediaCallbackPtr)
4792 {
4793 _engineStatisticsPtr->SetLastError(
4794 VE_INVALID_OPERATION, kTraceWarning,
4795 "Channel::DeRegisterExternalMediaProcessing() "
4796 "output external media already disabled");
4797 return 0;
4798 }
4799 _outputExternalMedia = false;
4800 _outputExternalMediaCallbackPtr = NULL;
4801 }
4802 else if (kRecordingPerChannel == type)
4803 {
4804 if (!_inputExternalMediaCallbackPtr)
4805 {
4806 _engineStatisticsPtr->SetLastError(
4807 VE_INVALID_OPERATION, kTraceWarning,
4808 "Channel::DeRegisterExternalMediaProcessing() "
4809 "input external media already disabled");
4810 return 0;
4811 }
4812 _inputExternalMedia = false;
4813 _inputExternalMediaCallbackPtr = NULL;
4814 }
4815
4816 return 0;
4817}
4818
roosa@google.com1b60ceb2012-12-12 23:00:29 +00004819int Channel::SetExternalMixing(bool enabled) {
4820 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4821 "Channel::SetExternalMixing(enabled=%d)", enabled);
4822
4823 if (_playing)
4824 {
4825 _engineStatisticsPtr->SetLastError(
4826 VE_INVALID_OPERATION, kTraceError,
4827 "Channel::SetExternalMixing() "
4828 "external mixing cannot be changed while playing.");
4829 return -1;
4830 }
4831
4832 _externalMixing = enabled;
4833
4834 return 0;
4835}
4836
niklase@google.com470e71d2011-07-07 08:21:25 +00004837int
4838Channel::ResetRTCPStatistics()
4839{
4840 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4841 "Channel::ResetRTCPStatistics()");
4842 WebRtc_UWord32 remoteSSRC(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004843 remoteSSRC = _rtpRtcpModule->RemoteSSRC();
4844 return _rtpRtcpModule->ResetRTT(remoteSSRC);
niklase@google.com470e71d2011-07-07 08:21:25 +00004845}
4846
4847int
4848Channel::GetRoundTripTimeSummary(StatVal& delaysMs) const
4849{
4850 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4851 "Channel::GetRoundTripTimeSummary()");
4852 // Override default module outputs for the case when RTCP is disabled.
4853 // This is done to ensure that we are backward compatible with the
4854 // VoiceEngine where we did not use RTP/RTCP module.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004855 if (!_rtpRtcpModule->RTCP())
niklase@google.com470e71d2011-07-07 08:21:25 +00004856 {
4857 delaysMs.min = -1;
4858 delaysMs.max = -1;
4859 delaysMs.average = -1;
4860 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4861 "Channel::GetRoundTripTimeSummary() RTCP is disabled =>"
4862 " valid RTT measurements cannot be retrieved");
4863 return 0;
4864 }
4865
4866 WebRtc_UWord32 remoteSSRC;
4867 WebRtc_UWord16 RTT;
4868 WebRtc_UWord16 avgRTT;
4869 WebRtc_UWord16 maxRTT;
4870 WebRtc_UWord16 minRTT;
4871 // The remote SSRC will be zero if no RTP packet has been received.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004872 remoteSSRC = _rtpRtcpModule->RemoteSSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00004873 if (remoteSSRC == 0)
4874 {
4875 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4876 "Channel::GetRoundTripTimeSummary() unable to measure RTT"
4877 " since no RTP packet has been received yet");
4878 }
4879
4880 // Retrieve RTT statistics from the RTP/RTCP module for the specified
4881 // channel and SSRC. The SSRC is required to parse out the correct source
4882 // in conference scenarios.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004883 if (_rtpRtcpModule->RTT(remoteSSRC, &RTT, &avgRTT, &minRTT,&maxRTT) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004884 {
4885 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4886 "GetRoundTripTimeSummary unable to retrieve RTT values"
4887 " from the RTCP layer");
4888 delaysMs.min = -1; delaysMs.max = -1; delaysMs.average = -1;
4889 }
4890 else
4891 {
4892 delaysMs.min = minRTT;
4893 delaysMs.max = maxRTT;
4894 delaysMs.average = avgRTT;
4895 }
4896 return 0;
4897}
4898
4899int
4900Channel::GetNetworkStatistics(NetworkStatistics& stats)
4901{
4902 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4903 "Channel::GetNetworkStatistics()");
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00004904 ACMNetworkStatistics acm_stats;
4905 int return_value = _audioCodingModule.NetworkStatistics(&acm_stats);
4906 if (return_value >= 0) {
4907 memcpy(&stats, &acm_stats, sizeof(NetworkStatistics));
4908 }
4909 return return_value;
niklase@google.com470e71d2011-07-07 08:21:25 +00004910}
4911
4912int
niklase@google.com470e71d2011-07-07 08:21:25 +00004913Channel::GetDelayEstimate(int& delayMs) const
4914{
4915 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4916 "Channel::GetDelayEstimate()");
4917 delayMs = (_averageDelayMs + 5) / 10 + _recPacketDelayMs;
4918 return 0;
4919}
4920
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +00004921int Channel::SetInitialPlayoutDelay(int delay_ms)
4922{
4923 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4924 "Channel::SetInitialPlayoutDelay()");
4925 if ((delay_ms < kVoiceEngineMinMinPlayoutDelayMs) ||
4926 (delay_ms > kVoiceEngineMaxMinPlayoutDelayMs))
4927 {
4928 _engineStatisticsPtr->SetLastError(
4929 VE_INVALID_ARGUMENT, kTraceError,
4930 "SetInitialPlayoutDelay() invalid min delay");
4931 return -1;
4932 }
4933 if (_audioCodingModule.SetInitialPlayoutDelay(delay_ms) != 0)
4934 {
4935 _engineStatisticsPtr->SetLastError(
4936 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4937 "SetInitialPlayoutDelay() failed to set min playout delay");
4938 return -1;
4939 }
4940 return 0;
4941}
4942
4943
niklase@google.com470e71d2011-07-07 08:21:25 +00004944int
4945Channel::SetMinimumPlayoutDelay(int delayMs)
4946{
4947 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4948 "Channel::SetMinimumPlayoutDelay()");
4949 if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
4950 (delayMs > kVoiceEngineMaxMinPlayoutDelayMs))
4951 {
4952 _engineStatisticsPtr->SetLastError(
4953 VE_INVALID_ARGUMENT, kTraceError,
4954 "SetMinimumPlayoutDelay() invalid min delay");
4955 return -1;
4956 }
4957 if (_audioCodingModule.SetMinimumPlayoutDelay(delayMs) != 0)
4958 {
4959 _engineStatisticsPtr->SetLastError(
4960 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4961 "SetMinimumPlayoutDelay() failed to set min playout delay");
4962 return -1;
4963 }
4964 return 0;
4965}
4966
4967int
4968Channel::GetPlayoutTimestamp(unsigned int& timestamp)
4969{
4970 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4971 "Channel::GetPlayoutTimestamp()");
4972 WebRtc_UWord32 playoutTimestamp(0);
4973 if (GetPlayoutTimeStamp(playoutTimestamp) != 0)
4974 {
4975 _engineStatisticsPtr->SetLastError(
4976 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4977 "GetPlayoutTimestamp() failed to retrieve timestamp");
4978 return -1;
4979 }
4980 timestamp = playoutTimestamp;
4981 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4982 VoEId(_instanceId,_channelId),
4983 "GetPlayoutTimestamp() => timestamp=%u", timestamp);
4984 return 0;
4985}
4986
4987int
4988Channel::SetInitTimestamp(unsigned int timestamp)
4989{
4990 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4991 "Channel::SetInitTimestamp()");
4992 if (_sending)
4993 {
4994 _engineStatisticsPtr->SetLastError(
4995 VE_SENDING, kTraceError, "SetInitTimestamp() already sending");
4996 return -1;
4997 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004998 if (_rtpRtcpModule->SetStartTimestamp(timestamp) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004999 {
5000 _engineStatisticsPtr->SetLastError(
5001 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
5002 "SetInitTimestamp() failed to set timestamp");
5003 return -1;
5004 }
5005 return 0;
5006}
5007
5008int
5009Channel::SetInitSequenceNumber(short sequenceNumber)
5010{
5011 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
5012 "Channel::SetInitSequenceNumber()");
5013 if (_sending)
5014 {
5015 _engineStatisticsPtr->SetLastError(
5016 VE_SENDING, kTraceError,
5017 "SetInitSequenceNumber() already sending");
5018 return -1;
5019 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00005020 if (_rtpRtcpModule->SetSequenceNumber(sequenceNumber) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00005021 {
5022 _engineStatisticsPtr->SetLastError(
5023 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
5024 "SetInitSequenceNumber() failed to set sequence number");
5025 return -1;
5026 }
5027 return 0;
5028}
5029
5030int
5031Channel::GetRtpRtcp(RtpRtcp* &rtpRtcpModule) const
5032{
5033 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
5034 "Channel::GetRtpRtcp()");
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00005035 rtpRtcpModule = _rtpRtcpModule.get();
niklase@google.com470e71d2011-07-07 08:21:25 +00005036 return 0;
5037}
5038
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00005039// TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use
5040// a shared helper.
niklase@google.com470e71d2011-07-07 08:21:25 +00005041WebRtc_Word32
xians@google.com0b0665a2011-08-08 08:18:44 +00005042Channel::MixOrReplaceAudioWithFile(const int mixingFrequency)
niklase@google.com470e71d2011-07-07 08:21:25 +00005043{
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00005044 scoped_array<WebRtc_Word16> fileBuffer(new WebRtc_Word16[640]);
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00005045 int fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00005046
5047 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00005048 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00005049
5050 if (_inputFilePlayerPtr == NULL)
5051 {
5052 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
5053 VoEId(_instanceId, _channelId),
5054 "Channel::MixOrReplaceAudioWithFile() fileplayer"
5055 " doesnt exist");
5056 return -1;
5057 }
5058
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00005059 if (_inputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
niklase@google.com470e71d2011-07-07 08:21:25 +00005060 fileSamples,
5061 mixingFrequency) == -1)
5062 {
5063 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
5064 VoEId(_instanceId, _channelId),
5065 "Channel::MixOrReplaceAudioWithFile() file mixing "
5066 "failed");
5067 return -1;
5068 }
5069 if (fileSamples == 0)
5070 {
5071 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
5072 VoEId(_instanceId, _channelId),
5073 "Channel::MixOrReplaceAudioWithFile() file is ended");
5074 return 0;
5075 }
5076 }
5077
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005078 assert(_audioFrame.samples_per_channel_ == fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00005079
5080 if (_mixFileWithMicrophone)
5081 {
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00005082 // Currently file stream is always mono.
5083 // TODO(xians): Change the code when FilePlayer supports real stereo.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005084 Utility::MixWithSat(_audioFrame.data_,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00005085 _audioFrame.num_channels_,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00005086 fileBuffer.get(),
5087 1,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00005088 fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00005089 }
5090 else
5091 {
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00005092 // Replace ACM audio with file.
5093 // Currently file stream is always mono.
5094 // TODO(xians): Change the code when FilePlayer supports real stereo.
niklase@google.com470e71d2011-07-07 08:21:25 +00005095 _audioFrame.UpdateFrame(_channelId,
5096 -1,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00005097 fileBuffer.get(),
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00005098 fileSamples,
niklase@google.com470e71d2011-07-07 08:21:25 +00005099 mixingFrequency,
5100 AudioFrame::kNormalSpeech,
5101 AudioFrame::kVadUnknown,
5102 1);
5103
5104 }
5105 return 0;
5106}
5107
5108WebRtc_Word32
5109Channel::MixAudioWithFile(AudioFrame& audioFrame,
xians@google.com0b0665a2011-08-08 08:18:44 +00005110 const int mixingFrequency)
niklase@google.com470e71d2011-07-07 08:21:25 +00005111{
5112 assert(mixingFrequency <= 32000);
5113
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00005114 scoped_array<WebRtc_Word16> fileBuffer(new WebRtc_Word16[640]);
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00005115 int fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00005116
5117 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00005118 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00005119
5120 if (_outputFilePlayerPtr == NULL)
5121 {
5122 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
5123 VoEId(_instanceId, _channelId),
5124 "Channel::MixAudioWithFile() file mixing failed");
5125 return -1;
5126 }
5127
5128 // We should get the frequency we ask for.
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00005129 if (_outputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
niklase@google.com470e71d2011-07-07 08:21:25 +00005130 fileSamples,
5131 mixingFrequency) == -1)
5132 {
5133 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
5134 VoEId(_instanceId, _channelId),
5135 "Channel::MixAudioWithFile() file mixing failed");
5136 return -1;
5137 }
5138 }
5139
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005140 if (audioFrame.samples_per_channel_ == fileSamples)
niklase@google.com470e71d2011-07-07 08:21:25 +00005141 {
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00005142 // Currently file stream is always mono.
5143 // TODO(xians): Change the code when FilePlayer supports real stereo.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005144 Utility::MixWithSat(audioFrame.data_,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00005145 audioFrame.num_channels_,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00005146 fileBuffer.get(),
5147 1,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00005148 fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00005149 }
5150 else
5151 {
5152 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005153 "Channel::MixAudioWithFile() samples_per_channel_(%d) != "
niklase@google.com470e71d2011-07-07 08:21:25 +00005154 "fileSamples(%d)",
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005155 audioFrame.samples_per_channel_, fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00005156 return -1;
5157 }
5158
5159 return 0;
5160}
5161
5162int
5163Channel::InsertInbandDtmfTone()
5164{
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00005165 // Check if we should start a new tone.
niklase@google.com470e71d2011-07-07 08:21:25 +00005166 if (_inbandDtmfQueue.PendingDtmf() &&
5167 !_inbandDtmfGenerator.IsAddingTone() &&
5168 _inbandDtmfGenerator.DelaySinceLastTone() >
5169 kMinTelephoneEventSeparationMs)
5170 {
5171 WebRtc_Word8 eventCode(0);
5172 WebRtc_UWord16 lengthMs(0);
5173 WebRtc_UWord8 attenuationDb(0);
5174
5175 eventCode = _inbandDtmfQueue.NextDtmf(&lengthMs, &attenuationDb);
5176 _inbandDtmfGenerator.AddTone(eventCode, lengthMs, attenuationDb);
5177 if (_playInbandDtmfEvent)
5178 {
5179 // Add tone to output mixer using a reduced length to minimize
5180 // risk of echo.
5181 _outputMixerPtr->PlayDtmfTone(eventCode, lengthMs - 80,
5182 attenuationDb);
5183 }
5184 }
5185
5186 if (_inbandDtmfGenerator.IsAddingTone())
5187 {
5188 WebRtc_UWord16 frequency(0);
5189 _inbandDtmfGenerator.GetSampleRate(frequency);
5190
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005191 if (frequency != _audioFrame.sample_rate_hz_)
niklase@google.com470e71d2011-07-07 08:21:25 +00005192 {
5193 // Update sample rate of Dtmf tone since the mixing frequency
5194 // has changed.
5195 _inbandDtmfGenerator.SetSampleRate(
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005196 (WebRtc_UWord16) (_audioFrame.sample_rate_hz_));
niklase@google.com470e71d2011-07-07 08:21:25 +00005197 // Reset the tone to be added taking the new sample rate into
5198 // account.
5199 _inbandDtmfGenerator.ResetTone();
5200 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00005201
niklase@google.com470e71d2011-07-07 08:21:25 +00005202 WebRtc_Word16 toneBuffer[320];
5203 WebRtc_UWord16 toneSamples(0);
5204 // Get 10ms tone segment and set time since last tone to zero
5205 if (_inbandDtmfGenerator.Get10msTone(toneBuffer, toneSamples) == -1)
5206 {
5207 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
5208 VoEId(_instanceId, _channelId),
5209 "Channel::EncodeAndSend() inserting Dtmf failed");
5210 return -1;
5211 }
5212
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00005213 // Replace mixed audio with DTMF tone.
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00005214 for (int sample = 0;
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005215 sample < _audioFrame.samples_per_channel_;
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00005216 sample++)
5217 {
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00005218 for (int channel = 0;
5219 channel < _audioFrame.num_channels_;
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00005220 channel++)
5221 {
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00005222 const int index = sample * _audioFrame.num_channels_ + channel;
5223 _audioFrame.data_[index] = toneBuffer[sample];
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00005224 }
5225 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00005226
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005227 assert(_audioFrame.samples_per_channel_ == toneSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00005228 } else
5229 {
5230 // Add 10ms to "delay-since-last-tone" counter
5231 _inbandDtmfGenerator.UpdateDelaySinceLastTone();
5232 }
5233 return 0;
5234}
5235
5236WebRtc_Word32
5237Channel::GetPlayoutTimeStamp(WebRtc_UWord32& playoutTimestamp)
5238{
5239 WebRtc_UWord32 timestamp(0);
5240 CodecInst currRecCodec;
5241
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00005242 if (_audioCodingModule.PlayoutTimestamp(&timestamp) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00005243 {
5244 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
5245 "Channel::GetPlayoutTimeStamp() failed to read playout"
5246 " timestamp from the ACM");
5247 return -1;
5248 }
5249
5250 WebRtc_UWord16 delayMS(0);
5251 if (_audioDeviceModulePtr->PlayoutDelay(&delayMS) == -1)
5252 {
5253 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
5254 "Channel::GetPlayoutTimeStamp() failed to read playout"
5255 " delay from the ADM");
5256 return -1;
5257 }
5258
5259 WebRtc_Word32 playoutFrequency = _audioCodingModule.PlayoutFrequency();
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00005260 if (_audioCodingModule.ReceiveCodec(&currRecCodec) == 0) {
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +00005261 if (STR_CASE_CMP("G722", currRecCodec.plname) == 0) {
5262 playoutFrequency = 8000;
5263 } else if (STR_CASE_CMP("opus", currRecCodec.plname) == 0) {
5264 playoutFrequency = 48000;
5265 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005266 }
5267 timestamp -= (delayMS * (playoutFrequency/1000));
5268
5269 playoutTimestamp = timestamp;
5270
5271 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
5272 "Channel::GetPlayoutTimeStamp() => playoutTimestamp = %lu",
5273 playoutTimestamp);
5274 return 0;
5275}
5276
5277void
5278Channel::ResetDeadOrAliveCounters()
5279{
5280 _countDeadDetections = 0;
5281 _countAliveDetections = 0;
5282}
5283
5284void
5285Channel::UpdateDeadOrAliveCounters(bool alive)
5286{
5287 if (alive)
5288 _countAliveDetections++;
5289 else
5290 _countDeadDetections++;
5291}
5292
5293int
5294Channel::GetDeadOrAliveCounters(int& countDead, int& countAlive) const
5295{
5296 bool enabled;
5297 WebRtc_UWord8 timeSec;
5298
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00005299 _rtpRtcpModule->PeriodicDeadOrAliveStatus(enabled, timeSec);
niklase@google.com470e71d2011-07-07 08:21:25 +00005300 if (!enabled)
5301 return (-1);
5302
5303 countDead = static_cast<int> (_countDeadDetections);
5304 countAlive = static_cast<int> (_countAliveDetections);
5305 return 0;
5306}
5307
5308WebRtc_Word32
5309Channel::SendPacketRaw(const void *data, int len, bool RTCP)
5310{
5311 if (_transportPtr == NULL)
5312 {
5313 return -1;
5314 }
5315 if (!RTCP)
5316 {
5317 return _transportPtr->SendPacket(_channelId, data, len);
5318 }
5319 else
5320 {
5321 return _transportPtr->SendRTCPPacket(_channelId, data, len);
5322 }
5323}
5324
5325WebRtc_Word32
5326Channel::UpdatePacketDelay(const WebRtc_UWord32 timestamp,
5327 const WebRtc_UWord16 sequenceNumber)
5328{
5329 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
5330 "Channel::UpdatePacketDelay(timestamp=%lu, sequenceNumber=%u)",
5331 timestamp, sequenceNumber);
5332
5333 WebRtc_Word32 rtpReceiveFrequency(0);
5334
5335 // Get frequency of last received payload
5336 rtpReceiveFrequency = _audioCodingModule.ReceiveFrequency();
5337
5338 CodecInst currRecCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00005339 if (_audioCodingModule.ReceiveCodec(&currRecCodec) == 0) {
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +00005340 if (STR_CASE_CMP("G722", currRecCodec.plname) == 0) {
5341 // Even though the actual sampling rate for G.722 audio is
5342 // 16,000 Hz, the RTP clock rate for the G722 payload format is
5343 // 8,000 Hz because that value was erroneously assigned in
5344 // RFC 1890 and must remain unchanged for backward compatibility.
5345 rtpReceiveFrequency = 8000;
5346 } else if (STR_CASE_CMP("opus", currRecCodec.plname) == 0) {
5347 // We are resampling Opus internally to 32,000 Hz until all our
5348 // DSP routines can operate at 48,000 Hz, but the RTP clock
5349 // rate for the Opus payload format is standardized to 48,000 Hz,
5350 // because that is the maximum supported decoding sampling rate.
5351 rtpReceiveFrequency = 48000;
5352 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005353 }
5354
5355 const WebRtc_UWord32 timeStampDiff = timestamp - _playoutTimeStampRTP;
5356 WebRtc_UWord32 timeStampDiffMs(0);
5357
5358 if (timeStampDiff > 0)
5359 {
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +00005360 switch (rtpReceiveFrequency) {
5361 case 8000:
5362 timeStampDiffMs = static_cast<WebRtc_UWord32>(timeStampDiff >> 3);
5363 break;
5364 case 16000:
5365 timeStampDiffMs = static_cast<WebRtc_UWord32>(timeStampDiff >> 4);
5366 break;
5367 case 32000:
5368 timeStampDiffMs = static_cast<WebRtc_UWord32>(timeStampDiff >> 5);
5369 break;
5370 case 48000:
5371 timeStampDiffMs = static_cast<WebRtc_UWord32>(timeStampDiff / 48);
5372 break;
5373 default:
5374 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
5375 VoEId(_instanceId, _channelId),
5376 "Channel::UpdatePacketDelay() invalid sample rate");
5377 timeStampDiffMs = 0;
5378 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00005379 }
niklas.enbom@webrtc.org218c5422013-01-17 22:25:49 +00005380 if (timeStampDiffMs > (2 * kVoiceEngineMaxMinPlayoutDelayMs))
niklase@google.com470e71d2011-07-07 08:21:25 +00005381 {
5382 timeStampDiffMs = 0;
5383 }
5384
5385 if (_averageDelayMs == 0)
5386 {
niklas.enbom@webrtc.org218c5422013-01-17 22:25:49 +00005387 _averageDelayMs = timeStampDiffMs * 10;
niklase@google.com470e71d2011-07-07 08:21:25 +00005388 }
5389 else
5390 {
5391 // Filter average delay value using exponential filter (alpha is
5392 // 7/8). We derive 10*_averageDelayMs here (reduces risk of
5393 // rounding error) and compensate for it in GetDelayEstimate()
5394 // later. Adding 4/8 results in correct rounding.
5395 _averageDelayMs = ((_averageDelayMs*7 + 10*timeStampDiffMs + 4)>>3);
5396 }
5397
5398 if (sequenceNumber - _previousSequenceNumber == 1)
5399 {
5400 WebRtc_UWord16 packetDelayMs = 0;
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +00005401 switch (rtpReceiveFrequency) {
5402 case 8000:
5403 packetDelayMs = static_cast<WebRtc_UWord16>(
niklase@google.com470e71d2011-07-07 08:21:25 +00005404 (timestamp - _previousTimestamp) >> 3);
5405 break;
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +00005406 case 16000:
5407 packetDelayMs = static_cast<WebRtc_UWord16>(
niklase@google.com470e71d2011-07-07 08:21:25 +00005408 (timestamp - _previousTimestamp) >> 4);
5409 break;
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +00005410 case 32000:
5411 packetDelayMs = static_cast<WebRtc_UWord16>(
niklase@google.com470e71d2011-07-07 08:21:25 +00005412 (timestamp - _previousTimestamp) >> 5);
5413 break;
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +00005414 case 48000:
5415 packetDelayMs = static_cast<WebRtc_UWord16>(
5416 (timestamp - _previousTimestamp) / 48);
5417 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00005418 }
5419
5420 if (packetDelayMs >= 10 && packetDelayMs <= 60)
5421 _recPacketDelayMs = packetDelayMs;
5422 }
5423 }
5424
5425 _previousSequenceNumber = sequenceNumber;
5426 _previousTimestamp = timestamp;
5427
5428 return 0;
5429}
5430
5431void
5432Channel::RegisterReceiveCodecsToRTPModule()
5433{
5434 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
5435 "Channel::RegisterReceiveCodecsToRTPModule()");
5436
5437
5438 CodecInst codec;
5439 const WebRtc_UWord8 nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
5440
5441 for (int idx = 0; idx < nSupportedCodecs; idx++)
5442 {
5443 // Open up the RTP/RTCP receiver for all supported codecs
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00005444 if ((_audioCodingModule.Codec(idx, &codec) == -1) ||
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00005445 (_rtpRtcpModule->RegisterReceivePayload(codec) == -1))
niklase@google.com470e71d2011-07-07 08:21:25 +00005446 {
5447 WEBRTC_TRACE(
5448 kTraceWarning,
5449 kTraceVoice,
5450 VoEId(_instanceId, _channelId),
5451 "Channel::RegisterReceiveCodecsToRTPModule() unable"
5452 " to register %s (%d/%d/%d/%d) to RTP/RTCP receiver",
5453 codec.plname, codec.pltype, codec.plfreq,
5454 codec.channels, codec.rate);
5455 }
5456 else
5457 {
5458 WEBRTC_TRACE(
5459 kTraceInfo,
5460 kTraceVoice,
5461 VoEId(_instanceId, _channelId),
5462 "Channel::RegisterReceiveCodecsToRTPModule() %s "
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00005463 "(%d/%d/%d/%d) has been added to the RTP/RTCP "
niklase@google.com470e71d2011-07-07 08:21:25 +00005464 "receiver",
5465 codec.plname, codec.pltype, codec.plfreq,
5466 codec.channels, codec.rate);
5467 }
5468 }
5469}
5470
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005471int Channel::ApmProcessRx(AudioFrame& frame) {
5472 AudioProcessing* audioproc = _rxAudioProcessingModulePtr;
5473 // Register the (possibly new) frame parameters.
5474 if (audioproc->set_sample_rate_hz(frame.sample_rate_hz_) != 0) {
andrew@webrtc.org655d8f52012-11-20 07:34:45 +00005475 LOG_FERR1(LS_WARNING, set_sample_rate_hz, frame.sample_rate_hz_);
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005476 }
5477 if (audioproc->set_num_channels(frame.num_channels_,
5478 frame.num_channels_) != 0) {
andrew@webrtc.org655d8f52012-11-20 07:34:45 +00005479 LOG_FERR1(LS_WARNING, set_num_channels, frame.num_channels_);
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005480 }
5481 if (audioproc->ProcessStream(&frame) != 0) {
andrew@webrtc.org655d8f52012-11-20 07:34:45 +00005482 LOG_FERR0(LS_WARNING, ProcessStream);
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005483 }
5484 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00005485}
5486
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005487int Channel::SetSecondarySendCodec(const CodecInst& codec,
5488 int red_payload_type) {
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00005489 // Sanity check for payload type.
5490 if (red_payload_type < 0 || red_payload_type > 127) {
5491 _engineStatisticsPtr->SetLastError(
5492 VE_PLTYPE_ERROR, kTraceError,
5493 "SetRedPayloadType() invalid RED payload type");
5494 return -1;
5495 }
5496
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005497 if (SetRedPayloadType(red_payload_type) < 0) {
5498 _engineStatisticsPtr->SetLastError(
5499 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5500 "SetSecondarySendCodec() Failed to register RED ACM");
5501 return -1;
5502 }
5503 if (_audioCodingModule.RegisterSecondarySendCodec(codec) < 0) {
5504 _engineStatisticsPtr->SetLastError(
5505 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5506 "SetSecondarySendCodec() Failed to register secondary send codec in "
5507 "ACM");
5508 return -1;
5509 }
5510
5511 return 0;
5512}
5513
5514void Channel::RemoveSecondarySendCodec() {
5515 _audioCodingModule.UnregisterSecondarySendCodec();
5516}
5517
5518int Channel::GetSecondarySendCodec(CodecInst* codec) {
5519 if (_audioCodingModule.SecondarySendCodec(codec) < 0) {
5520 _engineStatisticsPtr->SetLastError(
5521 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5522 "GetSecondarySendCodec() Failed to get secondary sent codec from ACM");
5523 return -1;
5524 }
5525 return 0;
5526}
5527
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00005528// Assuming this method is called with valid payload type.
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005529int Channel::SetRedPayloadType(int red_payload_type) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005530 CodecInst codec;
5531 bool found_red = false;
5532
5533 // Get default RED settings from the ACM database
5534 const int num_codecs = AudioCodingModule::NumberOfCodecs();
5535 for (int idx = 0; idx < num_codecs; idx++) {
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00005536 _audioCodingModule.Codec(idx, &codec);
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005537 if (!STR_CASE_CMP(codec.plname, "RED")) {
5538 found_red = true;
5539 break;
5540 }
5541 }
5542
5543 if (!found_red) {
5544 _engineStatisticsPtr->SetLastError(
5545 VE_CODEC_ERROR, kTraceError,
5546 "SetRedPayloadType() RED is not supported");
5547 return -1;
5548 }
5549
turaj@webrtc.org9d532fd2013-01-31 18:34:19 +00005550 codec.pltype = red_payload_type;
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005551 if (_audioCodingModule.RegisterSendCodec(codec) < 0) {
5552 _engineStatisticsPtr->SetLastError(
5553 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5554 "SetRedPayloadType() RED registration in ACM module failed");
5555 return -1;
5556 }
5557
5558 if (_rtpRtcpModule->SetSendREDPayloadType(red_payload_type) != 0) {
5559 _engineStatisticsPtr->SetLastError(
5560 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
5561 "SetRedPayloadType() RED registration in RTP/RTCP module failed");
5562 return -1;
5563 }
5564 return 0;
5565}
5566
niklase@google.com470e71d2011-07-07 08:21:25 +00005567} // namespace voe
niklase@google.com470e71d2011-07-07 08:21:25 +00005568} // namespace webrtc