blob: b6b8c292f1512aa3848f8d91615379f3a37470c5 [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
henrika@webrtc.org2919e952012-01-31 08:45:03 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000011#include "webrtc/voice_engine/channel.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000013#include "webrtc/modules/audio_device/include/audio_device.h"
14#include "webrtc/modules/audio_processing/include/audio_processing.h"
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +000015#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
16#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
17#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
18#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000019#include "webrtc/modules/utility/interface/audio_frame_operations.h"
20#include "webrtc/modules/utility/interface/process_thread.h"
21#include "webrtc/modules/utility/interface/rtp_dump.h"
22#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
23#include "webrtc/system_wrappers/interface/logging.h"
24#include "webrtc/system_wrappers/interface/trace.h"
25#include "webrtc/voice_engine/include/voe_base.h"
26#include "webrtc/voice_engine/include/voe_external_media.h"
27#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
28#include "webrtc/voice_engine/output_mixer.h"
29#include "webrtc/voice_engine/statistics.h"
30#include "webrtc/voice_engine/transmit_mixer.h"
31#include "webrtc/voice_engine/utility.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000032
33#if defined(_WIN32)
34#include <Qos.h>
35#endif
36
andrew@webrtc.org50419b02012-11-14 19:07:54 +000037namespace webrtc {
38namespace voe {
niklase@google.com470e71d2011-07-07 08:21:25 +000039
pbos@webrtc.org6141e132013-04-09 10:09:10 +000040int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +000041Channel::SendData(FrameType frameType,
pbos@webrtc.org6141e132013-04-09 10:09:10 +000042 uint8_t payloadType,
43 uint32_t timeStamp,
44 const uint8_t* payloadData,
45 uint16_t payloadSize,
niklase@google.com470e71d2011-07-07 08:21:25 +000046 const RTPFragmentationHeader* fragmentation)
47{
48 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
49 "Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u,"
50 " payloadSize=%u, fragmentation=0x%x)",
51 frameType, payloadType, timeStamp, payloadSize, fragmentation);
52
53 if (_includeAudioLevelIndication)
54 {
andrew@webrtc.org755b04a2011-11-15 16:57:56 +000055 assert(_rtpAudioProc.get() != NULL);
niklase@google.com470e71d2011-07-07 08:21:25 +000056 // Store current audio level in the RTP/RTCP module.
57 // The level will be used in combination with voice-activity state
58 // (frameType) to add an RTP header extension
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +000059 _rtpRtcpModule->SetAudioLevel(_rtpAudioProc->level_estimator()->RMS());
niklase@google.com470e71d2011-07-07 08:21:25 +000060 }
61
62 // Push data from ACM to RTP/RTCP-module to deliver audio frame for
63 // packetization.
64 // This call will trigger Transport::SendPacket() from the RTP/RTCP module.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +000065 if (_rtpRtcpModule->SendOutgoingData((FrameType&)frameType,
niklase@google.com470e71d2011-07-07 08:21:25 +000066 payloadType,
67 timeStamp,
stefan@webrtc.orgddfdfed2012-07-03 13:21:22 +000068 // Leaving the time when this frame was
69 // received from the capture device as
70 // undefined for voice for now.
71 -1,
niklase@google.com470e71d2011-07-07 08:21:25 +000072 payloadData,
73 payloadSize,
74 fragmentation) == -1)
75 {
76 _engineStatisticsPtr->SetLastError(
77 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
78 "Channel::SendData() failed to send data to RTP/RTCP module");
79 return -1;
80 }
81
82 _lastLocalTimeStamp = timeStamp;
83 _lastPayloadType = payloadType;
84
85 return 0;
86}
87
pbos@webrtc.org6141e132013-04-09 10:09:10 +000088int32_t
89Channel::InFrameType(int16_t frameType)
niklase@google.com470e71d2011-07-07 08:21:25 +000090{
91 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
92 "Channel::InFrameType(frameType=%d)", frameType);
93
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +000094 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +000095 // 1 indicates speech
96 _sendFrameType = (frameType == 1) ? 1 : 0;
97 return 0;
98}
99
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000100int32_t
pbos@webrtc.org92135212013-05-14 08:31:39 +0000101Channel::OnRxVadDetected(int vadDecision)
niklase@google.com470e71d2011-07-07 08:21:25 +0000102{
103 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
104 "Channel::OnRxVadDetected(vadDecision=%d)", vadDecision);
105
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000106 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000107 if (_rxVadObserverPtr)
108 {
109 _rxVadObserverPtr->OnRxVad(_channelId, vadDecision);
110 }
111
112 return 0;
113}
114
115int
116Channel::SendPacket(int channel, const void *data, int len)
117{
118 channel = VoEChannelId(channel);
119 assert(channel == _channelId);
120
121 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
122 "Channel::SendPacket(channel=%d, len=%d)", channel, len);
123
124 if (_transportPtr == NULL)
125 {
126 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
127 "Channel::SendPacket() failed to send RTP packet due to"
128 " invalid transport object");
129 return -1;
130 }
131
132 // Insert extra RTP packet using if user has called the InsertExtraRTPPacket
133 // API
134 if (_insertExtraRTPPacket)
135 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000136 uint8_t* rtpHdr = (uint8_t*)data;
137 uint8_t M_PT(0);
niklase@google.com470e71d2011-07-07 08:21:25 +0000138 if (_extraMarkerBit)
139 {
140 M_PT = 0x80; // set the M-bit
141 }
142 M_PT += _extraPayloadType; // set the payload type
143 *(++rtpHdr) = M_PT; // modify the M|PT-byte within the RTP header
144 _insertExtraRTPPacket = false; // insert one packet only
145 }
146
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000147 uint8_t* bufferToSendPtr = (uint8_t*)data;
148 int32_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000149
150 // Dump the RTP packet to a file (if RTP dump is enabled).
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000151 if (_rtpDumpOut.DumpPacket((const uint8_t*)data, len) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000152 {
153 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
154 VoEId(_instanceId,_channelId),
155 "Channel::SendPacket() RTP dump to output file failed");
156 }
157
158 // SRTP or External encryption
159 if (_encrypting)
160 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000161 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000162
163 if (_encryptionPtr)
164 {
165 if (!_encryptionRTPBufferPtr)
166 {
167 // Allocate memory for encryption buffer one time only
168 _encryptionRTPBufferPtr =
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000169 new uint8_t[kVoiceEngineMaxIpPacketSizeBytes];
xians@webrtc.org51253502012-10-25 13:58:02 +0000170 memset(_encryptionRTPBufferPtr, 0,
171 kVoiceEngineMaxIpPacketSizeBytes);
niklase@google.com470e71d2011-07-07 08:21:25 +0000172 }
173
174 // Perform encryption (SRTP or external)
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000175 int32_t encryptedBufferLength = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000176 _encryptionPtr->encrypt(_channelId,
177 bufferToSendPtr,
178 _encryptionRTPBufferPtr,
179 bufferLength,
180 (int*)&encryptedBufferLength);
181 if (encryptedBufferLength <= 0)
182 {
183 _engineStatisticsPtr->SetLastError(
184 VE_ENCRYPTION_FAILED,
185 kTraceError, "Channel::SendPacket() encryption failed");
186 return -1;
187 }
188
189 // Replace default data buffer with encrypted buffer
190 bufferToSendPtr = _encryptionRTPBufferPtr;
191 bufferLength = encryptedBufferLength;
192 }
193 }
194
195 // Packet transmission using WebRtc socket transport
196 if (!_externalTransport)
197 {
198 int n = _transportPtr->SendPacket(channel, bufferToSendPtr,
199 bufferLength);
200 if (n < 0)
201 {
202 WEBRTC_TRACE(kTraceError, kTraceVoice,
203 VoEId(_instanceId,_channelId),
204 "Channel::SendPacket() RTP transmission using WebRtc"
205 " sockets failed");
206 return -1;
207 }
208 return n;
209 }
210
211 // Packet transmission using external transport transport
212 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000213 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000214
215 int n = _transportPtr->SendPacket(channel,
216 bufferToSendPtr,
217 bufferLength);
218 if (n < 0)
219 {
220 WEBRTC_TRACE(kTraceError, kTraceVoice,
221 VoEId(_instanceId,_channelId),
222 "Channel::SendPacket() RTP transmission using external"
223 " transport failed");
224 return -1;
225 }
226 return n;
227 }
228}
229
230int
231Channel::SendRTCPPacket(int channel, const void *data, int len)
232{
233 channel = VoEChannelId(channel);
234 assert(channel == _channelId);
235
236 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
237 "Channel::SendRTCPPacket(channel=%d, len=%d)", channel, len);
238
niklase@google.com470e71d2011-07-07 08:21:25 +0000239 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000240 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.org83661f52011-11-25 10:58:15 +0000241 if (_transportPtr == NULL)
242 {
243 WEBRTC_TRACE(kTraceError, kTraceVoice,
244 VoEId(_instanceId,_channelId),
245 "Channel::SendRTCPPacket() failed to send RTCP packet"
246 " due to invalid transport object");
247 return -1;
248 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000249 }
250
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000251 uint8_t* bufferToSendPtr = (uint8_t*)data;
252 int32_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000253
254 // Dump the RTCP packet to a file (if RTP dump is enabled).
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000255 if (_rtpDumpOut.DumpPacket((const uint8_t*)data, len) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000256 {
257 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
258 VoEId(_instanceId,_channelId),
259 "Channel::SendPacket() RTCP dump to output file failed");
260 }
261
262 // SRTP or External encryption
263 if (_encrypting)
264 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000265 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000266
267 if (_encryptionPtr)
268 {
269 if (!_encryptionRTCPBufferPtr)
270 {
271 // Allocate memory for encryption buffer one time only
272 _encryptionRTCPBufferPtr =
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000273 new uint8_t[kVoiceEngineMaxIpPacketSizeBytes];
niklase@google.com470e71d2011-07-07 08:21:25 +0000274 }
275
276 // Perform encryption (SRTP or external).
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000277 int32_t encryptedBufferLength = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000278 _encryptionPtr->encrypt_rtcp(_channelId,
279 bufferToSendPtr,
280 _encryptionRTCPBufferPtr,
281 bufferLength,
282 (int*)&encryptedBufferLength);
283 if (encryptedBufferLength <= 0)
284 {
285 _engineStatisticsPtr->SetLastError(
286 VE_ENCRYPTION_FAILED, kTraceError,
287 "Channel::SendRTCPPacket() encryption failed");
288 return -1;
289 }
290
291 // Replace default data buffer with encrypted buffer
292 bufferToSendPtr = _encryptionRTCPBufferPtr;
293 bufferLength = encryptedBufferLength;
294 }
295 }
296
297 // Packet transmission using WebRtc socket transport
298 if (!_externalTransport)
299 {
300 int n = _transportPtr->SendRTCPPacket(channel,
301 bufferToSendPtr,
302 bufferLength);
303 if (n < 0)
304 {
305 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
306 VoEId(_instanceId,_channelId),
307 "Channel::SendRTCPPacket() transmission using WebRtc"
308 " sockets failed");
309 return -1;
310 }
311 return n;
312 }
313
314 // Packet transmission using external transport transport
315 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000316 CriticalSectionScoped cs(&_callbackCritSect);
henrike@webrtc.orgde727ab2012-11-18 18:49:13 +0000317 if (_transportPtr == NULL)
318 {
319 return -1;
320 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000321 int n = _transportPtr->SendRTCPPacket(channel,
322 bufferToSendPtr,
323 bufferLength);
324 if (n < 0)
325 {
326 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
327 VoEId(_instanceId,_channelId),
328 "Channel::SendRTCPPacket() transmission using external"
329 " transport failed");
330 return -1;
331 }
332 return n;
333 }
334
335 return len;
336}
337
338void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000339Channel::OnPlayTelephoneEvent(int32_t id,
340 uint8_t event,
341 uint16_t lengthMs,
342 uint8_t volume)
niklase@google.com470e71d2011-07-07 08:21:25 +0000343{
344 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
345 "Channel::OnPlayTelephoneEvent(id=%d, event=%u, lengthMs=%u,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +0000346 " volume=%u)", id, event, lengthMs, volume);
niklase@google.com470e71d2011-07-07 08:21:25 +0000347
348 if (!_playOutbandDtmfEvent || (event > 15))
349 {
350 // Ignore callback since feedback is disabled or event is not a
351 // Dtmf tone event.
352 return;
353 }
354
355 assert(_outputMixerPtr != NULL);
356
357 // Start playing out the Dtmf tone (if playout is enabled).
358 // Reduce length of tone with 80ms to the reduce risk of echo.
359 _outputMixerPtr->PlayDtmfTone(event, lengthMs - 80, volume);
360}
361
362void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000363Channel::OnIncomingSSRCChanged(int32_t id,
364 uint32_t SSRC)
niklase@google.com470e71d2011-07-07 08:21:25 +0000365{
366 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
367 "Channel::OnIncomingSSRCChanged(id=%d, SSRC=%d)",
368 id, SSRC);
369
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000370 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000371 assert(channel == _channelId);
372
373 // Reset RTP-module counters since a new incoming RTP stream is detected
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +0000374 rtp_receive_statistics_->ResetDataCounters();
375 rtp_receive_statistics_->ResetStatistics();
niklase@google.com470e71d2011-07-07 08:21:25 +0000376
377 if (_rtpObserver)
378 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000379 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000380
381 if (_rtpObserverPtr)
382 {
383 // Send new SSRC to registered observer using callback
384 _rtpObserverPtr->OnIncomingSSRCChanged(channel, SSRC);
385 }
386 }
387}
388
pbos@webrtc.org92135212013-05-14 08:31:39 +0000389void Channel::OnIncomingCSRCChanged(int32_t id,
390 uint32_t CSRC,
391 bool added)
niklase@google.com470e71d2011-07-07 08:21:25 +0000392{
393 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
394 "Channel::OnIncomingCSRCChanged(id=%d, CSRC=%d, added=%d)",
395 id, CSRC, added);
396
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000397 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000398 assert(channel == _channelId);
399
400 if (_rtpObserver)
401 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000402 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000403
404 if (_rtpObserverPtr)
405 {
406 _rtpObserverPtr->OnIncomingCSRCChanged(channel, CSRC, added);
407 }
408 }
409}
410
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +0000411void Channel::OnResetStatistics() {
412 rtp_receive_statistics_->ResetStatistics();
413}
414
niklase@google.com470e71d2011-07-07 08:21:25 +0000415void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000416Channel::OnApplicationDataReceived(int32_t id,
417 uint8_t subType,
418 uint32_t name,
419 uint16_t length,
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000420 const uint8_t* data)
niklase@google.com470e71d2011-07-07 08:21:25 +0000421{
422 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
423 "Channel::OnApplicationDataReceived(id=%d, subType=%u,"
424 " name=%u, length=%u)",
425 id, subType, name, length);
426
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000427 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000428 assert(channel == _channelId);
429
430 if (_rtcpObserver)
431 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000432 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000433
434 if (_rtcpObserverPtr)
435 {
436 _rtcpObserverPtr->OnApplicationDataReceived(channel,
437 subType,
438 name,
439 data,
440 length);
441 }
442 }
443}
444
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000445int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +0000446Channel::OnInitializeDecoder(
pbos@webrtc.org92135212013-05-14 08:31:39 +0000447 int32_t id,
448 int8_t payloadType,
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +0000449 const char payloadName[RTP_PAYLOAD_NAME_SIZE],
pbos@webrtc.org92135212013-05-14 08:31:39 +0000450 int frequency,
451 uint8_t channels,
452 uint32_t rate)
niklase@google.com470e71d2011-07-07 08:21:25 +0000453{
454 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
455 "Channel::OnInitializeDecoder(id=%d, payloadType=%d, "
456 "payloadName=%s, frequency=%u, channels=%u, rate=%u)",
457 id, payloadType, payloadName, frequency, channels, rate);
458
andrew@webrtc.orgceb148c2011-08-23 17:53:54 +0000459 assert(VoEChannelId(id) == _channelId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000460
henrika@webrtc.orgf75901f2012-01-16 08:45:42 +0000461 CodecInst receiveCodec = {0};
462 CodecInst dummyCodec = {0};
niklase@google.com470e71d2011-07-07 08:21:25 +0000463
464 receiveCodec.pltype = payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +0000465 receiveCodec.plfreq = frequency;
466 receiveCodec.channels = channels;
467 receiveCodec.rate = rate;
henrika@webrtc.orgf75901f2012-01-16 08:45:42 +0000468 strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +0000469
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +0000470 _audioCodingModule.Codec(payloadName, &dummyCodec, frequency, channels);
niklase@google.com470e71d2011-07-07 08:21:25 +0000471 receiveCodec.pacsize = dummyCodec.pacsize;
472
473 // Register the new codec to the ACM
474 if (_audioCodingModule.RegisterReceiveCodec(receiveCodec) == -1)
475 {
476 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
andrew@webrtc.orgceb148c2011-08-23 17:53:54 +0000477 VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +0000478 "Channel::OnInitializeDecoder() invalid codec ("
479 "pt=%d, name=%s) received - 1", payloadType, payloadName);
480 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR);
481 return -1;
482 }
483
484 return 0;
485}
486
487void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000488Channel::OnPacketTimeout(int32_t id)
niklase@google.com470e71d2011-07-07 08:21:25 +0000489{
490 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
491 "Channel::OnPacketTimeout(id=%d)", id);
492
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000493 CriticalSectionScoped cs(_callbackCritSectPtr);
niklase@google.com470e71d2011-07-07 08:21:25 +0000494 if (_voiceEngineObserverPtr)
495 {
496 if (_receiving || _externalTransport)
497 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000498 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000499 assert(channel == _channelId);
500 // Ensure that next OnReceivedPacket() callback will trigger
501 // a VE_PACKET_RECEIPT_RESTARTED callback.
502 _rtpPacketTimedOut = true;
503 // Deliver callback to the observer
504 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
505 VoEId(_instanceId,_channelId),
506 "Channel::OnPacketTimeout() => "
507 "CallbackOnError(VE_RECEIVE_PACKET_TIMEOUT)");
508 _voiceEngineObserverPtr->CallbackOnError(channel,
509 VE_RECEIVE_PACKET_TIMEOUT);
510 }
511 }
512}
513
514void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000515Channel::OnReceivedPacket(int32_t id,
516 RtpRtcpPacketType packetType)
niklase@google.com470e71d2011-07-07 08:21:25 +0000517{
518 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
519 "Channel::OnReceivedPacket(id=%d, packetType=%d)",
520 id, packetType);
521
andrew@webrtc.orgceb148c2011-08-23 17:53:54 +0000522 assert(VoEChannelId(id) == _channelId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000523
524 // Notify only for the case when we have restarted an RTP session.
525 if (_rtpPacketTimedOut && (kPacketRtp == packetType))
526 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000527 CriticalSectionScoped cs(_callbackCritSectPtr);
niklase@google.com470e71d2011-07-07 08:21:25 +0000528 if (_voiceEngineObserverPtr)
529 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000530 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000531 assert(channel == _channelId);
532 // Reset timeout mechanism
533 _rtpPacketTimedOut = false;
534 // Deliver callback to the observer
535 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
536 VoEId(_instanceId,_channelId),
537 "Channel::OnPacketTimeout() =>"
538 " CallbackOnError(VE_PACKET_RECEIPT_RESTARTED)");
539 _voiceEngineObserverPtr->CallbackOnError(
540 channel,
541 VE_PACKET_RECEIPT_RESTARTED);
542 }
543 }
544}
545
546void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000547Channel::OnPeriodicDeadOrAlive(int32_t id,
548 RTPAliveType alive)
niklase@google.com470e71d2011-07-07 08:21:25 +0000549{
550 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
551 "Channel::OnPeriodicDeadOrAlive(id=%d, alive=%d)", id, alive);
552
henrika@webrtc.org19da7192013-04-05 14:34:57 +0000553 {
554 CriticalSectionScoped cs(&_callbackCritSect);
555 if (!_connectionObserver)
556 return;
557 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000558
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000559 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000560 assert(channel == _channelId);
561
562 // Use Alive as default to limit risk of false Dead detections
563 bool isAlive(true);
564
565 // Always mark the connection as Dead when the module reports kRtpDead
566 if (kRtpDead == alive)
567 {
568 isAlive = false;
569 }
570
571 // It is possible that the connection is alive even if no RTP packet has
572 // been received for a long time since the other side might use VAD/DTX
573 // and a low SID-packet update rate.
574 if ((kRtpNoRtp == alive) && _playing)
575 {
576 // Detect Alive for all NetEQ states except for the case when we are
577 // in PLC_CNG state.
578 // PLC_CNG <=> background noise only due to long expand or error.
579 // Note that, the case where the other side stops sending during CNG
580 // state will be detected as Alive. Dead is is not set until after
581 // missing RTCP packets for at least twelve seconds (handled
582 // internally by the RTP/RTCP module).
583 isAlive = (_outputSpeechType != AudioFrame::kPLCCNG);
584 }
585
586 UpdateDeadOrAliveCounters(isAlive);
587
588 // Send callback to the registered observer
589 if (_connectionObserver)
590 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000591 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000592 if (_connectionObserverPtr)
593 {
594 _connectionObserverPtr->OnPeriodicDeadOrAlive(channel, isAlive);
595 }
596 }
597}
598
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000599int32_t
600Channel::OnReceivedPayloadData(const uint8_t* payloadData,
pbos@webrtc.org92135212013-05-14 08:31:39 +0000601 uint16_t payloadSize,
niklase@google.com470e71d2011-07-07 08:21:25 +0000602 const WebRtcRTPHeader* rtpHeader)
603{
604 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
605 "Channel::OnReceivedPayloadData(payloadSize=%d,"
606 " payloadType=%u, audioChannel=%u)",
607 payloadSize,
608 rtpHeader->header.payloadType,
609 rtpHeader->type.Audio.channel);
610
roosa@google.com0870f022012-12-12 21:31:41 +0000611 _lastRemoteTimeStamp = rtpHeader->header.timestamp;
612
niklase@google.com470e71d2011-07-07 08:21:25 +0000613 if (!_playing)
614 {
615 // Avoid inserting into NetEQ when we are not playing. Count the
616 // packet as discarded.
617 WEBRTC_TRACE(kTraceStream, kTraceVoice,
618 VoEId(_instanceId, _channelId),
619 "received packet is discarded since playing is not"
620 " activated");
621 _numberOfDiscardedPackets++;
622 return 0;
623 }
624
625 // Push the incoming payload (parsed and ready for decoding) into the ACM
tina.legrand@webrtc.org16b6b902012-04-12 11:02:38 +0000626 if (_audioCodingModule.IncomingPacket(payloadData,
niklase@google.com470e71d2011-07-07 08:21:25 +0000627 payloadSize,
628 *rtpHeader) != 0)
629 {
630 _engineStatisticsPtr->SetLastError(
631 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
632 "Channel::OnReceivedPayloadData() unable to push data to the ACM");
633 return -1;
634 }
635
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +0000636 // Update the packet delay.
niklase@google.com470e71d2011-07-07 08:21:25 +0000637 UpdatePacketDelay(rtpHeader->header.timestamp,
638 rtpHeader->header.sequenceNumber);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +0000639
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +0000640 uint16_t round_trip_time = 0;
641 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), &round_trip_time,
642 NULL, NULL, NULL);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +0000643
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +0000644 std::vector<uint16_t> nack_list = _audioCodingModule.GetNackList(
645 round_trip_time);
646 if (!nack_list.empty()) {
647 // Can't use nack_list.data() since it's not supported by all
648 // compilers.
649 ResendPackets(&(nack_list[0]), static_cast<int>(nack_list.size()));
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +0000650 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000651 return 0;
652}
653
pbos@webrtc.org92135212013-05-14 08:31:39 +0000654int32_t Channel::GetAudioFrame(int32_t id, AudioFrame& audioFrame)
niklase@google.com470e71d2011-07-07 08:21:25 +0000655{
656 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
657 "Channel::GetAudioFrame(id=%d)", id);
658
659 // Get 10ms raw PCM data from the ACM (mixer limits output frequency)
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000660 if (_audioCodingModule.PlayoutData10Ms(audioFrame.sample_rate_hz_,
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +0000661 &audioFrame) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000662 {
663 WEBRTC_TRACE(kTraceError, kTraceVoice,
664 VoEId(_instanceId,_channelId),
665 "Channel::GetAudioFrame() PlayoutData10Ms() failed!");
andrew@webrtc.org7859e102012-01-13 00:30:11 +0000666 // In all likelihood, the audio in this frame is garbage. We return an
667 // error so that the audio mixer module doesn't add it to the mix. As
668 // a result, it won't be played out and the actions skipped here are
669 // irrelevant.
670 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +0000671 }
672
673 if (_RxVadDetection)
674 {
675 UpdateRxVadDetection(audioFrame);
676 }
677
678 // Convert module ID to internal VoE channel ID
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000679 audioFrame.id_ = VoEChannelId(audioFrame.id_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000680 // Store speech type for dead-or-alive detection
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000681 _outputSpeechType = audioFrame.speech_type_;
niklase@google.com470e71d2011-07-07 08:21:25 +0000682
683 // Perform far-end AudioProcessing module processing on the received signal
684 if (_rxApmIsEnabled)
685 {
686 ApmProcessRx(audioFrame);
687 }
688
689 // Output volume scaling
690 if (_outputGain < 0.99f || _outputGain > 1.01f)
691 {
692 AudioFrameOperations::ScaleWithSat(_outputGain, audioFrame);
693 }
694
695 // Scale left and/or right channel(s) if stereo and master balance is
696 // active
697
698 if (_panLeft != 1.0f || _panRight != 1.0f)
699 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000700 if (audioFrame.num_channels_ == 1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000701 {
702 // Emulate stereo mode since panning is active.
703 // The mono signal is copied to both left and right channels here.
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +0000704 AudioFrameOperations::MonoToStereo(&audioFrame);
niklase@google.com470e71d2011-07-07 08:21:25 +0000705 }
706 // For true stereo mode (when we are receiving a stereo signal), no
707 // action is needed.
708
709 // Do the panning operation (the audio frame contains stereo at this
710 // stage)
711 AudioFrameOperations::Scale(_panLeft, _panRight, audioFrame);
712 }
713
714 // Mix decoded PCM output with file if file mixing is enabled
715 if (_outputFilePlaying)
716 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000717 MixAudioWithFile(audioFrame, audioFrame.sample_rate_hz_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000718 }
719
720 // Place channel in on-hold state (~muted) if on-hold is activated
721 if (_outputIsOnHold)
722 {
723 AudioFrameOperations::Mute(audioFrame);
724 }
725
726 // External media
727 if (_outputExternalMedia)
728 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000729 CriticalSectionScoped cs(&_callbackCritSect);
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000730 const bool isStereo = (audioFrame.num_channels_ == 2);
niklase@google.com470e71d2011-07-07 08:21:25 +0000731 if (_outputExternalMediaCallbackPtr)
732 {
733 _outputExternalMediaCallbackPtr->Process(
734 _channelId,
735 kPlaybackPerChannel,
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000736 (int16_t*)audioFrame.data_,
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000737 audioFrame.samples_per_channel_,
738 audioFrame.sample_rate_hz_,
niklase@google.com470e71d2011-07-07 08:21:25 +0000739 isStereo);
740 }
741 }
742
743 // Record playout if enabled
744 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000745 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000746
747 if (_outputFileRecording && _outputFileRecorderPtr)
748 {
niklas.enbom@webrtc.org5398d952012-03-26 08:11:25 +0000749 _outputFileRecorderPtr->RecordAudioToFile(audioFrame);
niklase@google.com470e71d2011-07-07 08:21:25 +0000750 }
751 }
752
753 // Measure audio level (0-9)
754 _outputAudioLevel.ComputeLevel(audioFrame);
755
756 return 0;
757}
758
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000759int32_t
pbos@webrtc.org92135212013-05-14 08:31:39 +0000760Channel::NeededFrequency(int32_t id)
niklase@google.com470e71d2011-07-07 08:21:25 +0000761{
762 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
763 "Channel::NeededFrequency(id=%d)", id);
764
765 int highestNeeded = 0;
766
767 // Determine highest needed receive frequency
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000768 int32_t receiveFrequency = _audioCodingModule.ReceiveFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +0000769
770 // Return the bigger of playout and receive frequency in the ACM.
771 if (_audioCodingModule.PlayoutFrequency() > receiveFrequency)
772 {
773 highestNeeded = _audioCodingModule.PlayoutFrequency();
774 }
775 else
776 {
777 highestNeeded = receiveFrequency;
778 }
779
780 // Special case, if we're playing a file on the playout side
781 // we take that frequency into consideration as well
782 // This is not needed on sending side, since the codec will
783 // limit the spectrum anyway.
784 if (_outputFilePlaying)
785 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000786 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000787 if (_outputFilePlayerPtr && _outputFilePlaying)
788 {
789 if(_outputFilePlayerPtr->Frequency()>highestNeeded)
790 {
791 highestNeeded=_outputFilePlayerPtr->Frequency();
792 }
793 }
794 }
795
796 return(highestNeeded);
797}
798
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000799int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +0000800Channel::CreateChannel(Channel*& channel,
pbos@webrtc.org92135212013-05-14 08:31:39 +0000801 int32_t channelId,
802 uint32_t instanceId)
niklase@google.com470e71d2011-07-07 08:21:25 +0000803{
804 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId,channelId),
805 "Channel::CreateChannel(channelId=%d, instanceId=%d)",
806 channelId, instanceId);
807
808 channel = new Channel(channelId, instanceId);
809 if (channel == NULL)
810 {
811 WEBRTC_TRACE(kTraceMemory, kTraceVoice,
812 VoEId(instanceId,channelId),
813 "Channel::CreateChannel() unable to allocate memory for"
814 " channel");
815 return -1;
816 }
817 return 0;
818}
819
820void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000821Channel::PlayNotification(int32_t id, uint32_t durationMs)
niklase@google.com470e71d2011-07-07 08:21:25 +0000822{
823 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
824 "Channel::PlayNotification(id=%d, durationMs=%d)",
825 id, durationMs);
826
827 // Not implement yet
828}
829
830void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000831Channel::RecordNotification(int32_t id, uint32_t durationMs)
niklase@google.com470e71d2011-07-07 08:21:25 +0000832{
833 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
834 "Channel::RecordNotification(id=%d, durationMs=%d)",
835 id, durationMs);
836
837 // Not implement yet
838}
839
840void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000841Channel::PlayFileEnded(int32_t id)
niklase@google.com470e71d2011-07-07 08:21:25 +0000842{
843 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
844 "Channel::PlayFileEnded(id=%d)", id);
845
846 if (id == _inputFilePlayerId)
847 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000848 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000849
850 _inputFilePlaying = false;
851 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
852 VoEId(_instanceId,_channelId),
853 "Channel::PlayFileEnded() => input file player module is"
854 " shutdown");
855 }
856 else if (id == _outputFilePlayerId)
857 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000858 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000859
860 _outputFilePlaying = false;
861 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
862 VoEId(_instanceId,_channelId),
863 "Channel::PlayFileEnded() => output file player module is"
864 " shutdown");
865 }
866}
867
868void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000869Channel::RecordFileEnded(int32_t id)
niklase@google.com470e71d2011-07-07 08:21:25 +0000870{
871 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
872 "Channel::RecordFileEnded(id=%d)", id);
873
874 assert(id == _outputFileRecorderId);
875
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000876 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000877
878 _outputFileRecording = false;
879 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
880 VoEId(_instanceId,_channelId),
881 "Channel::RecordFileEnded() => output file recorder module is"
882 " shutdown");
883}
884
pbos@webrtc.org92135212013-05-14 08:31:39 +0000885Channel::Channel(int32_t channelId,
886 uint32_t instanceId) :
niklase@google.com470e71d2011-07-07 08:21:25 +0000887 _fileCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
888 _callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
niklase@google.com470e71d2011-07-07 08:21:25 +0000889 _instanceId(instanceId),
xians@google.com22963ab2011-08-03 12:40:23 +0000890 _channelId(channelId),
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +0000891 rtp_header_parser_(RtpHeaderParser::Create()),
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +0000892 rtp_payload_registry_(
893 new RTPPayloadRegistry(channelId,
894 RTPPayloadStrategy::CreateStrategy(true))),
895 rtp_receive_statistics_(ReceiveStatistics::Create(
896 Clock::GetRealTimeClock())),
897 rtp_receiver_(RtpReceiver::CreateAudioReceiver(
898 VoEModuleId(instanceId, channelId), Clock::GetRealTimeClock(), this,
899 this, this, rtp_payload_registry_.get())),
900 telephone_event_handler_(rtp_receiver_->GetTelephoneEventHandler()),
niklase@google.com470e71d2011-07-07 08:21:25 +0000901 _audioCodingModule(*AudioCodingModule::Create(
xians@google.com22963ab2011-08-03 12:40:23 +0000902 VoEModuleId(instanceId, channelId))),
niklase@google.com470e71d2011-07-07 08:21:25 +0000903 _rtpDumpIn(*RtpDump::CreateRtpDump()),
904 _rtpDumpOut(*RtpDump::CreateRtpDump()),
niklase@google.com470e71d2011-07-07 08:21:25 +0000905 _outputAudioLevel(),
niklase@google.com470e71d2011-07-07 08:21:25 +0000906 _externalTransport(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000907 _inputFilePlayerPtr(NULL),
908 _outputFilePlayerPtr(NULL),
909 _outputFileRecorderPtr(NULL),
910 // Avoid conflict with other channels by adding 1024 - 1026,
911 // won't use as much as 1024 channels.
912 _inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
913 _outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
914 _outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
915 _inputFilePlaying(false),
916 _outputFilePlaying(false),
917 _outputFileRecording(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000918 _inbandDtmfQueue(VoEModuleId(instanceId, channelId)),
919 _inbandDtmfGenerator(VoEModuleId(instanceId, channelId)),
niklase@google.com470e71d2011-07-07 08:21:25 +0000920 _inputExternalMedia(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000921 _outputExternalMedia(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000922 _inputExternalMediaCallbackPtr(NULL),
923 _outputExternalMediaCallbackPtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000924 _encryptionRTPBufferPtr(NULL),
925 _decryptionRTPBufferPtr(NULL),
926 _encryptionRTCPBufferPtr(NULL),
927 _decryptionRTCPBufferPtr(NULL),
928 _timeStamp(0), // This is just an offset, RTP module will add it's own random offset
929 _sendTelephoneEventPayloadType(106),
pwestin@webrtc.org1de01352013-04-11 20:23:35 +0000930 playout_timestamp_rtp_(0),
931 playout_timestamp_rtcp_(0),
xians@google.com22963ab2011-08-03 12:40:23 +0000932 _numberOfDiscardedPackets(0),
933 _engineStatisticsPtr(NULL),
henrika@webrtc.org2919e952012-01-31 08:45:03 +0000934 _outputMixerPtr(NULL),
935 _transmitMixerPtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000936 _moduleProcessThreadPtr(NULL),
937 _audioDeviceModulePtr(NULL),
938 _voiceEngineObserverPtr(NULL),
939 _callbackCritSectPtr(NULL),
940 _transportPtr(NULL),
941 _encryptionPtr(NULL),
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000942 _rtpAudioProc(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000943 _rxAudioProcessingModulePtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000944 _rxVadObserverPtr(NULL),
945 _oldVadDecision(-1),
946 _sendFrameType(0),
niklase@google.com470e71d2011-07-07 08:21:25 +0000947 _rtpObserverPtr(NULL),
948 _rtcpObserverPtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000949 _outputIsOnHold(false),
950 _externalPlayout(false),
roosa@google.com1b60ceb2012-12-12 23:00:29 +0000951 _externalMixing(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000952 _inputIsOnHold(false),
953 _playing(false),
954 _sending(false),
955 _receiving(false),
956 _mixFileWithMicrophone(false),
957 _rtpObserver(false),
958 _rtcpObserver(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000959 _mute(false),
960 _panLeft(1.0f),
961 _panRight(1.0f),
962 _outputGain(1.0f),
xians@google.com22963ab2011-08-03 12:40:23 +0000963 _encrypting(false),
964 _decrypting(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000965 _playOutbandDtmfEvent(false),
966 _playInbandDtmfEvent(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000967 _extraPayloadType(0),
968 _insertExtraRTPPacket(false),
969 _extraMarkerBit(false),
970 _lastLocalTimeStamp(0),
roosa@google.com0870f022012-12-12 21:31:41 +0000971 _lastRemoteTimeStamp(0),
niklase@google.com470e71d2011-07-07 08:21:25 +0000972 _lastPayloadType(0),
xians@google.com22963ab2011-08-03 12:40:23 +0000973 _includeAudioLevelIndication(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000974 _rtpPacketTimedOut(false),
975 _rtpPacketTimeOutIsEnabled(false),
976 _rtpTimeOutSeconds(0),
977 _connectionObserver(false),
978 _connectionObserverPtr(NULL),
979 _countAliveDetections(0),
980 _countDeadDetections(0),
981 _outputSpeechType(AudioFrame::kNormalSpeech),
pwestin@webrtc.org1de01352013-04-11 20:23:35 +0000982 _average_jitter_buffer_delay_us(0),
turaj@webrtc.orge46c8d32013-05-22 20:39:43 +0000983 least_required_delay_ms_(0),
niklase@google.com470e71d2011-07-07 08:21:25 +0000984 _previousTimestamp(0),
985 _recPacketDelayMs(20),
986 _RxVadDetection(false),
987 _rxApmIsEnabled(false),
988 _rxAgcIsEnabled(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000989 _rxNsIsEnabled(false)
niklase@google.com470e71d2011-07-07 08:21:25 +0000990{
991 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
992 "Channel::Channel() - ctor");
993 _inbandDtmfQueue.ResetDtmf();
994 _inbandDtmfGenerator.Init();
995 _outputAudioLevel.Clear();
996
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +0000997 RtpRtcp::Configuration configuration;
998 configuration.id = VoEModuleId(instanceId, channelId);
999 configuration.audio = true;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001000 configuration.outgoing_transport = this;
1001 configuration.rtcp_feedback = this;
1002 configuration.audio_messages = this;
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00001003 configuration.receive_statistics = rtp_receive_statistics_.get();
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001004
1005 _rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
1006
niklase@google.com470e71d2011-07-07 08:21:25 +00001007 // Create far end AudioProcessing Module
1008 _rxAudioProcessingModulePtr = AudioProcessing::Create(
1009 VoEModuleId(instanceId, channelId));
1010}
1011
1012Channel::~Channel()
1013{
1014 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
1015 "Channel::~Channel() - dtor");
1016
1017 if (_outputExternalMedia)
1018 {
1019 DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
1020 }
1021 if (_inputExternalMedia)
1022 {
1023 DeRegisterExternalMediaProcessing(kRecordingPerChannel);
1024 }
1025 StopSend();
niklase@google.com470e71d2011-07-07 08:21:25 +00001026 StopPlayout();
1027
1028 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001029 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001030 if (_inputFilePlayerPtr)
1031 {
1032 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1033 _inputFilePlayerPtr->StopPlayingFile();
1034 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1035 _inputFilePlayerPtr = NULL;
1036 }
1037 if (_outputFilePlayerPtr)
1038 {
1039 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1040 _outputFilePlayerPtr->StopPlayingFile();
1041 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1042 _outputFilePlayerPtr = NULL;
1043 }
1044 if (_outputFileRecorderPtr)
1045 {
1046 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
1047 _outputFileRecorderPtr->StopRecording();
1048 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
1049 _outputFileRecorderPtr = NULL;
1050 }
1051 }
1052
1053 // The order to safely shutdown modules in a channel is:
1054 // 1. De-register callbacks in modules
1055 // 2. De-register modules in process thread
1056 // 3. Destroy modules
niklase@google.com470e71d2011-07-07 08:21:25 +00001057 if (_audioCodingModule.RegisterTransportCallback(NULL) == -1)
1058 {
1059 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1060 VoEId(_instanceId,_channelId),
1061 "~Channel() failed to de-register transport callback"
1062 " (Audio coding module)");
1063 }
1064 if (_audioCodingModule.RegisterVADCallback(NULL) == -1)
1065 {
1066 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1067 VoEId(_instanceId,_channelId),
1068 "~Channel() failed to de-register VAD callback"
1069 " (Audio coding module)");
1070 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001071 // De-register modules in process thread
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001072 if (_moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get()) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001073 {
1074 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
1075 VoEId(_instanceId,_channelId),
1076 "~Channel() failed to deregister RTP/RTCP module");
1077 }
1078
1079 // Destroy modules
niklase@google.com470e71d2011-07-07 08:21:25 +00001080 AudioCodingModule::Destroy(&_audioCodingModule);
niklase@google.com470e71d2011-07-07 08:21:25 +00001081 if (_rxAudioProcessingModulePtr != NULL)
1082 {
1083 AudioProcessing::Destroy(_rxAudioProcessingModulePtr); // far end APM
1084 _rxAudioProcessingModulePtr = NULL;
1085 }
1086
1087 // End of modules shutdown
1088
1089 // Delete other objects
1090 RtpDump::DestroyRtpDump(&_rtpDumpIn);
1091 RtpDump::DestroyRtpDump(&_rtpDumpOut);
1092 delete [] _encryptionRTPBufferPtr;
1093 delete [] _decryptionRTPBufferPtr;
1094 delete [] _encryptionRTCPBufferPtr;
1095 delete [] _decryptionRTCPBufferPtr;
1096 delete &_callbackCritSect;
niklase@google.com470e71d2011-07-07 08:21:25 +00001097 delete &_fileCritSect;
1098}
1099
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001100int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001101Channel::Init()
1102{
1103 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1104 "Channel::Init()");
1105
1106 // --- Initial sanity
1107
1108 if ((_engineStatisticsPtr == NULL) ||
1109 (_moduleProcessThreadPtr == NULL))
1110 {
1111 WEBRTC_TRACE(kTraceError, kTraceVoice,
1112 VoEId(_instanceId,_channelId),
1113 "Channel::Init() must call SetEngineInformation() first");
1114 return -1;
1115 }
1116
1117 // --- Add modules to process thread (for periodic schedulation)
1118
1119 const bool processThreadFail =
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001120 ((_moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get()) != 0) ||
niklase@google.com470e71d2011-07-07 08:21:25 +00001121 false);
niklase@google.com470e71d2011-07-07 08:21:25 +00001122 if (processThreadFail)
1123 {
1124 _engineStatisticsPtr->SetLastError(
1125 VE_CANNOT_INIT_CHANNEL, kTraceError,
1126 "Channel::Init() modules not registered");
1127 return -1;
1128 }
pwestin@webrtc.orgc450a192012-01-04 15:00:12 +00001129 // --- ACM initialization
niklase@google.com470e71d2011-07-07 08:21:25 +00001130
1131 if ((_audioCodingModule.InitializeReceiver() == -1) ||
1132#ifdef WEBRTC_CODEC_AVT
1133 // out-of-band Dtmf tones are played out by default
1134 (_audioCodingModule.SetDtmfPlayoutStatus(true) == -1) ||
1135#endif
niklase@google.com470e71d2011-07-07 08:21:25 +00001136 (_audioCodingModule.InitializeSender() == -1))
1137 {
1138 _engineStatisticsPtr->SetLastError(
1139 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1140 "Channel::Init() unable to initialize the ACM - 1");
1141 return -1;
1142 }
1143
1144 // --- RTP/RTCP module initialization
1145
1146 // Ensure that RTCP is enabled by default for the created channel.
1147 // Note that, the module will keep generating RTCP until it is explicitly
1148 // disabled by the user.
1149 // After StopListen (when no sockets exists), RTCP packets will no longer
1150 // be transmitted since the Transport object will then be invalid.
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00001151 telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
1152 // RTCP is enabled by default.
1153 if (_rtpRtcpModule->SetRTCPStatus(kRtcpCompound) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001154 {
1155 _engineStatisticsPtr->SetLastError(
1156 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1157 "Channel::Init() RTP/RTCP module not initialized");
1158 return -1;
1159 }
1160
1161 // --- Register all permanent callbacks
niklase@google.com470e71d2011-07-07 08:21:25 +00001162 const bool fail =
niklase@google.com470e71d2011-07-07 08:21:25 +00001163 (_audioCodingModule.RegisterTransportCallback(this) == -1) ||
1164 (_audioCodingModule.RegisterVADCallback(this) == -1);
1165
1166 if (fail)
1167 {
1168 _engineStatisticsPtr->SetLastError(
1169 VE_CANNOT_INIT_CHANNEL, kTraceError,
1170 "Channel::Init() callbacks not registered");
1171 return -1;
1172 }
1173
1174 // --- Register all supported codecs to the receiving side of the
1175 // RTP/RTCP module
1176
1177 CodecInst codec;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001178 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
niklase@google.com470e71d2011-07-07 08:21:25 +00001179
1180 for (int idx = 0; idx < nSupportedCodecs; idx++)
1181 {
1182 // Open up the RTP/RTCP receiver for all supported codecs
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001183 if ((_audioCodingModule.Codec(idx, &codec) == -1) ||
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00001184 (rtp_receiver_->RegisterReceivePayload(
1185 codec.plname,
1186 codec.pltype,
1187 codec.plfreq,
1188 codec.channels,
1189 (codec.rate < 0) ? 0 : codec.rate) == -1))
niklase@google.com470e71d2011-07-07 08:21:25 +00001190 {
1191 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1192 VoEId(_instanceId,_channelId),
1193 "Channel::Init() unable to register %s (%d/%d/%d/%d) "
1194 "to RTP/RTCP receiver",
1195 codec.plname, codec.pltype, codec.plfreq,
1196 codec.channels, codec.rate);
1197 }
1198 else
1199 {
1200 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
1201 VoEId(_instanceId,_channelId),
1202 "Channel::Init() %s (%d/%d/%d/%d) has been added to "
1203 "the RTP/RTCP receiver",
1204 codec.plname, codec.pltype, codec.plfreq,
1205 codec.channels, codec.rate);
1206 }
1207
1208 // Ensure that PCMU is used as default codec on the sending side
tina.legrand@webrtc.org45175852012-06-01 09:27:35 +00001209 if (!STR_CASE_CMP(codec.plname, "PCMU") && (codec.channels == 1))
niklase@google.com470e71d2011-07-07 08:21:25 +00001210 {
1211 SetSendCodec(codec);
1212 }
1213
1214 // Register default PT for outband 'telephone-event'
1215 if (!STR_CASE_CMP(codec.plname, "telephone-event"))
1216 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001217 if ((_rtpRtcpModule->RegisterSendPayload(codec) == -1) ||
niklase@google.com470e71d2011-07-07 08:21:25 +00001218 (_audioCodingModule.RegisterReceiveCodec(codec) == -1))
1219 {
1220 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1221 VoEId(_instanceId,_channelId),
1222 "Channel::Init() failed to register outband "
1223 "'telephone-event' (%d/%d) correctly",
1224 codec.pltype, codec.plfreq);
1225 }
1226 }
1227
1228 if (!STR_CASE_CMP(codec.plname, "CN"))
1229 {
1230 if ((_audioCodingModule.RegisterSendCodec(codec) == -1) ||
1231 (_audioCodingModule.RegisterReceiveCodec(codec) == -1) ||
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001232 (_rtpRtcpModule->RegisterSendPayload(codec) == -1))
niklase@google.com470e71d2011-07-07 08:21:25 +00001233 {
1234 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1235 VoEId(_instanceId,_channelId),
1236 "Channel::Init() failed to register CN (%d/%d) "
1237 "correctly - 1",
1238 codec.pltype, codec.plfreq);
1239 }
1240 }
1241#ifdef WEBRTC_CODEC_RED
1242 // Register RED to the receiving side of the ACM.
1243 // We will not receive an OnInitializeDecoder() callback for RED.
1244 if (!STR_CASE_CMP(codec.plname, "RED"))
1245 {
1246 if (_audioCodingModule.RegisterReceiveCodec(codec) == -1)
1247 {
1248 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1249 VoEId(_instanceId,_channelId),
1250 "Channel::Init() failed to register RED (%d/%d) "
1251 "correctly",
1252 codec.pltype, codec.plfreq);
1253 }
1254 }
1255#endif
1256 }
pwestin@webrtc.org684f0572013-03-13 23:20:57 +00001257
niklase@google.com470e71d2011-07-07 08:21:25 +00001258 // Initialize the far end AP module
1259 // Using 8 kHz as initial Fs, the same as in transmission. Might be
1260 // changed at the first receiving audio.
1261 if (_rxAudioProcessingModulePtr == NULL)
1262 {
1263 _engineStatisticsPtr->SetLastError(
1264 VE_NO_MEMORY, kTraceCritical,
1265 "Channel::Init() failed to create the far-end AudioProcessing"
1266 " module");
1267 return -1;
1268 }
1269
niklase@google.com470e71d2011-07-07 08:21:25 +00001270 if (_rxAudioProcessingModulePtr->set_sample_rate_hz(8000))
1271 {
1272 _engineStatisticsPtr->SetLastError(
1273 VE_APM_ERROR, kTraceWarning,
1274 "Channel::Init() failed to set the sample rate to 8K for"
1275 " far-end AP module");
1276 }
1277
1278 if (_rxAudioProcessingModulePtr->set_num_channels(1, 1) != 0)
1279 {
1280 _engineStatisticsPtr->SetLastError(
1281 VE_SOUNDCARD_ERROR, kTraceWarning,
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00001282 "Init() failed to set channels for the primary audio stream");
niklase@google.com470e71d2011-07-07 08:21:25 +00001283 }
1284
1285 if (_rxAudioProcessingModulePtr->high_pass_filter()->Enable(
1286 WEBRTC_VOICE_ENGINE_RX_HP_DEFAULT_STATE) != 0)
1287 {
1288 _engineStatisticsPtr->SetLastError(
1289 VE_APM_ERROR, kTraceWarning,
1290 "Channel::Init() failed to set the high-pass filter for"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00001291 " far-end AP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00001292 }
1293
1294 if (_rxAudioProcessingModulePtr->noise_suppression()->set_level(
1295 (NoiseSuppression::Level)WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE) != 0)
1296 {
1297 _engineStatisticsPtr->SetLastError(
1298 VE_APM_ERROR, kTraceWarning,
1299 "Init() failed to set noise reduction level for far-end"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00001300 " AP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00001301 }
1302 if (_rxAudioProcessingModulePtr->noise_suppression()->Enable(
1303 WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_STATE) != 0)
1304 {
1305 _engineStatisticsPtr->SetLastError(
1306 VE_APM_ERROR, kTraceWarning,
1307 "Init() failed to set noise reduction state for far-end"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00001308 " AP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00001309 }
1310
1311 if (_rxAudioProcessingModulePtr->gain_control()->set_mode(
1312 (GainControl::Mode)WEBRTC_VOICE_ENGINE_RX_AGC_DEFAULT_MODE) != 0)
1313 {
1314 _engineStatisticsPtr->SetLastError(
1315 VE_APM_ERROR, kTraceWarning,
1316 "Init() failed to set AGC mode for far-end AP module");
1317 }
1318 if (_rxAudioProcessingModulePtr->gain_control()->Enable(
1319 WEBRTC_VOICE_ENGINE_RX_AGC_DEFAULT_STATE) != 0)
1320 {
1321 _engineStatisticsPtr->SetLastError(
1322 VE_APM_ERROR, kTraceWarning,
1323 "Init() failed to set AGC state for far-end AP module");
1324 }
1325
1326 return 0;
1327}
1328
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001329int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001330Channel::SetEngineInformation(Statistics& engineStatistics,
1331 OutputMixer& outputMixer,
1332 voe::TransmitMixer& transmitMixer,
1333 ProcessThread& moduleProcessThread,
1334 AudioDeviceModule& audioDeviceModule,
1335 VoiceEngineObserver* voiceEngineObserver,
1336 CriticalSectionWrapper* callbackCritSect)
1337{
1338 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1339 "Channel::SetEngineInformation()");
1340 _engineStatisticsPtr = &engineStatistics;
1341 _outputMixerPtr = &outputMixer;
1342 _transmitMixerPtr = &transmitMixer,
1343 _moduleProcessThreadPtr = &moduleProcessThread;
1344 _audioDeviceModulePtr = &audioDeviceModule;
1345 _voiceEngineObserverPtr = voiceEngineObserver;
1346 _callbackCritSectPtr = callbackCritSect;
1347 return 0;
1348}
1349
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001350int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001351Channel::UpdateLocalTimeStamp()
1352{
1353
andrew@webrtc.org63a50982012-05-02 23:56:37 +00001354 _timeStamp += _audioFrame.samples_per_channel_;
niklase@google.com470e71d2011-07-07 08:21:25 +00001355 return 0;
1356}
1357
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001358int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001359Channel::StartPlayout()
1360{
1361 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1362 "Channel::StartPlayout()");
1363 if (_playing)
1364 {
1365 return 0;
1366 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00001367
1368 if (!_externalMixing) {
1369 // Add participant as candidates for mixing.
1370 if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0)
1371 {
1372 _engineStatisticsPtr->SetLastError(
1373 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1374 "StartPlayout() failed to add participant to mixer");
1375 return -1;
1376 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001377 }
1378
1379 _playing = true;
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001380
1381 if (RegisterFilePlayingToMixer() != 0)
1382 return -1;
1383
niklase@google.com470e71d2011-07-07 08:21:25 +00001384 return 0;
1385}
1386
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001387int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001388Channel::StopPlayout()
1389{
1390 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1391 "Channel::StopPlayout()");
1392 if (!_playing)
1393 {
1394 return 0;
1395 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00001396
1397 if (!_externalMixing) {
1398 // Remove participant as candidates for mixing
1399 if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0)
1400 {
1401 _engineStatisticsPtr->SetLastError(
1402 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1403 "StopPlayout() failed to remove participant from mixer");
1404 return -1;
1405 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001406 }
1407
1408 _playing = false;
1409 _outputAudioLevel.Clear();
1410
1411 return 0;
1412}
1413
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001414int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001415Channel::StartSend()
1416{
1417 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1418 "Channel::StartSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001419 {
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001420 // A lock is needed because |_sending| can be accessed or modified by
1421 // another thread at the same time.
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001422 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001423
1424 if (_sending)
1425 {
1426 return 0;
1427 }
1428 _sending = true;
niklase@google.com470e71d2011-07-07 08:21:25 +00001429 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001430
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001431 if (_rtpRtcpModule->SetSendingStatus(true) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001432 {
1433 _engineStatisticsPtr->SetLastError(
1434 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1435 "StartSend() RTP/RTCP failed to start sending");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001436 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001437 _sending = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00001438 return -1;
1439 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001440
niklase@google.com470e71d2011-07-07 08:21:25 +00001441 return 0;
1442}
1443
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001444int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001445Channel::StopSend()
1446{
1447 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1448 "Channel::StopSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001449 {
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001450 // A lock is needed because |_sending| can be accessed or modified by
1451 // another thread at the same time.
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001452 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001453
1454 if (!_sending)
1455 {
1456 return 0;
1457 }
1458 _sending = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00001459 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001460
niklase@google.com470e71d2011-07-07 08:21:25 +00001461 // Reset sending SSRC and sequence number and triggers direct transmission
1462 // of RTCP BYE
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001463 if (_rtpRtcpModule->SetSendingStatus(false) == -1 ||
1464 _rtpRtcpModule->ResetSendDataCountersRTP() == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001465 {
1466 _engineStatisticsPtr->SetLastError(
1467 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1468 "StartSend() RTP/RTCP failed to stop sending");
1469 }
1470
niklase@google.com470e71d2011-07-07 08:21:25 +00001471 return 0;
1472}
1473
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001474int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001475Channel::StartReceiving()
1476{
1477 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1478 "Channel::StartReceiving()");
1479 if (_receiving)
1480 {
1481 return 0;
1482 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001483 _receiving = true;
1484 _numberOfDiscardedPackets = 0;
1485 return 0;
1486}
1487
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001488int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001489Channel::StopReceiving()
1490{
1491 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1492 "Channel::StopReceiving()");
1493 if (!_receiving)
1494 {
1495 return 0;
1496 }
pwestin@webrtc.org684f0572013-03-13 23:20:57 +00001497
henrika@webrtc.orgaf71f0e2011-12-05 07:02:22 +00001498 // Recover DTMF detection status.
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00001499 telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
niklase@google.com470e71d2011-07-07 08:21:25 +00001500 RegisterReceiveCodecsToRTPModule();
1501 _receiving = false;
1502 return 0;
1503}
1504
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001505int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001506Channel::SetNetEQPlayoutMode(NetEqModes mode)
1507{
1508 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1509 "Channel::SetNetEQPlayoutMode()");
1510 AudioPlayoutMode playoutMode(voice);
1511 switch (mode)
1512 {
1513 case kNetEqDefault:
1514 playoutMode = voice;
1515 break;
1516 case kNetEqStreaming:
1517 playoutMode = streaming;
1518 break;
1519 case kNetEqFax:
1520 playoutMode = fax;
1521 break;
roosa@google.comb7186192012-12-12 21:59:14 +00001522 case kNetEqOff:
1523 playoutMode = off;
1524 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00001525 }
1526 if (_audioCodingModule.SetPlayoutMode(playoutMode) != 0)
1527 {
1528 _engineStatisticsPtr->SetLastError(
1529 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1530 "SetNetEQPlayoutMode() failed to set playout mode");
1531 return -1;
1532 }
1533 return 0;
1534}
1535
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001536int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001537Channel::GetNetEQPlayoutMode(NetEqModes& mode)
1538{
1539 const AudioPlayoutMode playoutMode = _audioCodingModule.PlayoutMode();
1540 switch (playoutMode)
1541 {
1542 case voice:
1543 mode = kNetEqDefault;
1544 break;
1545 case streaming:
1546 mode = kNetEqStreaming;
1547 break;
1548 case fax:
1549 mode = kNetEqFax;
1550 break;
roosa@google.comb7186192012-12-12 21:59:14 +00001551 case off:
1552 mode = kNetEqOff;
niklase@google.com470e71d2011-07-07 08:21:25 +00001553 }
1554 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
1555 VoEId(_instanceId,_channelId),
1556 "Channel::GetNetEQPlayoutMode() => mode=%u", mode);
1557 return 0;
1558}
1559
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001560int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001561Channel::SetOnHoldStatus(bool enable, OnHoldModes mode)
1562{
1563 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1564 "Channel::SetOnHoldStatus()");
1565 if (mode == kHoldSendAndPlay)
1566 {
1567 _outputIsOnHold = enable;
1568 _inputIsOnHold = enable;
1569 }
1570 else if (mode == kHoldPlayOnly)
1571 {
1572 _outputIsOnHold = enable;
1573 }
1574 if (mode == kHoldSendOnly)
1575 {
1576 _inputIsOnHold = enable;
1577 }
1578 return 0;
1579}
1580
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001581int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001582Channel::GetOnHoldStatus(bool& enabled, OnHoldModes& mode)
1583{
1584 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1585 "Channel::GetOnHoldStatus()");
1586 enabled = (_outputIsOnHold || _inputIsOnHold);
1587 if (_outputIsOnHold && _inputIsOnHold)
1588 {
1589 mode = kHoldSendAndPlay;
1590 }
1591 else if (_outputIsOnHold && !_inputIsOnHold)
1592 {
1593 mode = kHoldPlayOnly;
1594 }
1595 else if (!_outputIsOnHold && _inputIsOnHold)
1596 {
1597 mode = kHoldSendOnly;
1598 }
1599 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1600 "Channel::GetOnHoldStatus() => enabled=%d, mode=%d",
1601 enabled, mode);
1602 return 0;
1603}
1604
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001605int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001606Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer)
1607{
1608 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1609 "Channel::RegisterVoiceEngineObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001610 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001611
1612 if (_voiceEngineObserverPtr)
1613 {
1614 _engineStatisticsPtr->SetLastError(
1615 VE_INVALID_OPERATION, kTraceError,
1616 "RegisterVoiceEngineObserver() observer already enabled");
1617 return -1;
1618 }
1619 _voiceEngineObserverPtr = &observer;
1620 return 0;
1621}
1622
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001623int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001624Channel::DeRegisterVoiceEngineObserver()
1625{
1626 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1627 "Channel::DeRegisterVoiceEngineObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001628 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001629
1630 if (!_voiceEngineObserverPtr)
1631 {
1632 _engineStatisticsPtr->SetLastError(
1633 VE_INVALID_OPERATION, kTraceWarning,
1634 "DeRegisterVoiceEngineObserver() observer already disabled");
1635 return 0;
1636 }
1637 _voiceEngineObserverPtr = NULL;
1638 return 0;
1639}
1640
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001641int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001642Channel::GetSendCodec(CodecInst& codec)
1643{
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001644 return (_audioCodingModule.SendCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001645}
1646
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001647int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001648Channel::GetRecCodec(CodecInst& codec)
1649{
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001650 return (_audioCodingModule.ReceiveCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001651}
1652
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001653int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001654Channel::SetSendCodec(const CodecInst& codec)
1655{
1656 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1657 "Channel::SetSendCodec()");
1658
1659 if (_audioCodingModule.RegisterSendCodec(codec) != 0)
1660 {
1661 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1662 "SetSendCodec() failed to register codec to ACM");
1663 return -1;
1664 }
1665
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001666 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001667 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001668 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1669 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001670 {
1671 WEBRTC_TRACE(
1672 kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1673 "SetSendCodec() failed to register codec to"
1674 " RTP/RTCP module");
1675 return -1;
1676 }
1677 }
1678
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001679 if (_rtpRtcpModule->SetAudioPacketSize(codec.pacsize) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001680 {
1681 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1682 "SetSendCodec() failed to set audio packet size");
1683 return -1;
1684 }
1685
1686 return 0;
1687}
1688
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001689int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001690Channel::SetVADStatus(bool enableVAD, ACMVADMode mode, bool disableDTX)
1691{
1692 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1693 "Channel::SetVADStatus(mode=%d)", mode);
1694 // To disable VAD, DTX must be disabled too
1695 disableDTX = ((enableVAD == false) ? true : disableDTX);
1696 if (_audioCodingModule.SetVAD(!disableDTX, enableVAD, mode) != 0)
1697 {
1698 _engineStatisticsPtr->SetLastError(
1699 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1700 "SetVADStatus() failed to set VAD");
1701 return -1;
1702 }
1703 return 0;
1704}
1705
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001706int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001707Channel::GetVADStatus(bool& enabledVAD, ACMVADMode& mode, bool& disabledDTX)
1708{
1709 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1710 "Channel::GetVADStatus");
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001711 if (_audioCodingModule.VAD(&disabledDTX, &enabledVAD, &mode) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001712 {
1713 _engineStatisticsPtr->SetLastError(
1714 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1715 "GetVADStatus() failed to get VAD status");
1716 return -1;
1717 }
1718 disabledDTX = !disabledDTX;
1719 return 0;
1720}
1721
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001722int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001723Channel::SetRecPayloadType(const CodecInst& codec)
1724{
1725 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1726 "Channel::SetRecPayloadType()");
1727
1728 if (_playing)
1729 {
1730 _engineStatisticsPtr->SetLastError(
1731 VE_ALREADY_PLAYING, kTraceError,
1732 "SetRecPayloadType() unable to set PT while playing");
1733 return -1;
1734 }
1735 if (_receiving)
1736 {
1737 _engineStatisticsPtr->SetLastError(
1738 VE_ALREADY_LISTENING, kTraceError,
1739 "SetRecPayloadType() unable to set PT while listening");
1740 return -1;
1741 }
1742
1743 if (codec.pltype == -1)
1744 {
1745 // De-register the selected codec (RTP/RTCP module and ACM)
1746
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001747 int8_t pltype(-1);
niklase@google.com470e71d2011-07-07 08:21:25 +00001748 CodecInst rxCodec = codec;
1749
1750 // Get payload type for the given codec
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00001751 rtp_payload_registry_->ReceivePayloadType(
1752 rxCodec.plname,
1753 rxCodec.plfreq,
1754 rxCodec.channels,
1755 (rxCodec.rate < 0) ? 0 : rxCodec.rate,
1756 &pltype);
niklase@google.com470e71d2011-07-07 08:21:25 +00001757 rxCodec.pltype = pltype;
1758
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00001759 if (rtp_receiver_->DeRegisterReceivePayload(pltype) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001760 {
1761 _engineStatisticsPtr->SetLastError(
1762 VE_RTP_RTCP_MODULE_ERROR,
1763 kTraceError,
1764 "SetRecPayloadType() RTP/RTCP-module deregistration "
1765 "failed");
1766 return -1;
1767 }
1768 if (_audioCodingModule.UnregisterReceiveCodec(rxCodec.pltype) != 0)
1769 {
1770 _engineStatisticsPtr->SetLastError(
1771 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1772 "SetRecPayloadType() ACM deregistration failed - 1");
1773 return -1;
1774 }
1775 return 0;
1776 }
1777
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00001778 if (rtp_receiver_->RegisterReceivePayload(
1779 codec.plname,
1780 codec.pltype,
1781 codec.plfreq,
1782 codec.channels,
1783 (codec.rate < 0) ? 0 : codec.rate) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001784 {
1785 // First attempt to register failed => de-register and try again
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00001786 rtp_receiver_->DeRegisterReceivePayload(codec.pltype);
1787 if (rtp_receiver_->RegisterReceivePayload(
1788 codec.plname,
1789 codec.pltype,
1790 codec.plfreq,
1791 codec.channels,
1792 (codec.rate < 0) ? 0 : codec.rate) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001793 {
1794 _engineStatisticsPtr->SetLastError(
1795 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1796 "SetRecPayloadType() RTP/RTCP-module registration failed");
1797 return -1;
1798 }
1799 }
1800 if (_audioCodingModule.RegisterReceiveCodec(codec) != 0)
1801 {
1802 _audioCodingModule.UnregisterReceiveCodec(codec.pltype);
1803 if (_audioCodingModule.RegisterReceiveCodec(codec) != 0)
1804 {
1805 _engineStatisticsPtr->SetLastError(
1806 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1807 "SetRecPayloadType() ACM registration failed - 1");
1808 return -1;
1809 }
1810 }
1811 return 0;
1812}
1813
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001814int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001815Channel::GetRecPayloadType(CodecInst& codec)
1816{
1817 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1818 "Channel::GetRecPayloadType()");
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001819 int8_t payloadType(-1);
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00001820 if (rtp_payload_registry_->ReceivePayloadType(
1821 codec.plname,
1822 codec.plfreq,
1823 codec.channels,
1824 (codec.rate < 0) ? 0 : codec.rate,
1825 &payloadType) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001826 {
1827 _engineStatisticsPtr->SetLastError(
henrika@webrtc.org37198002012-06-18 11:00:12 +00001828 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
niklase@google.com470e71d2011-07-07 08:21:25 +00001829 "GetRecPayloadType() failed to retrieve RX payload type");
1830 return -1;
1831 }
1832 codec.pltype = payloadType;
1833 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1834 "Channel::GetRecPayloadType() => pltype=%u", codec.pltype);
1835 return 0;
1836}
1837
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001838int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001839Channel::SetAMREncFormat(AmrMode mode)
1840{
1841 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1842 "Channel::SetAMREncFormat()");
1843
1844 // ACM doesn't support AMR
1845 return -1;
1846}
1847
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001848int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001849Channel::SetAMRDecFormat(AmrMode mode)
1850{
1851 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1852 "Channel::SetAMRDecFormat()");
1853
1854 // ACM doesn't support AMR
1855 return -1;
1856}
1857
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001858int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001859Channel::SetAMRWbEncFormat(AmrMode mode)
1860{
1861 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1862 "Channel::SetAMRWbEncFormat()");
1863
1864 // ACM doesn't support AMR
1865 return -1;
1866
1867}
1868
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001869int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001870Channel::SetAMRWbDecFormat(AmrMode mode)
1871{
1872 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1873 "Channel::SetAMRWbDecFormat()");
1874
1875 // ACM doesn't support AMR
1876 return -1;
1877}
1878
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001879int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001880Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency)
1881{
1882 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1883 "Channel::SetSendCNPayloadType()");
1884
1885 CodecInst codec;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001886 int32_t samplingFreqHz(-1);
tina.legrand@webrtc.org45175852012-06-01 09:27:35 +00001887 const int kMono = 1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001888 if (frequency == kFreq32000Hz)
1889 samplingFreqHz = 32000;
1890 else if (frequency == kFreq16000Hz)
1891 samplingFreqHz = 16000;
1892
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001893 if (_audioCodingModule.Codec("CN", &codec, samplingFreqHz, kMono) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001894 {
1895 _engineStatisticsPtr->SetLastError(
1896 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1897 "SetSendCNPayloadType() failed to retrieve default CN codec "
1898 "settings");
1899 return -1;
1900 }
1901
1902 // Modify the payload type (must be set to dynamic range)
1903 codec.pltype = type;
1904
1905 if (_audioCodingModule.RegisterSendCodec(codec) != 0)
1906 {
1907 _engineStatisticsPtr->SetLastError(
1908 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1909 "SetSendCNPayloadType() failed to register CN to ACM");
1910 return -1;
1911 }
1912
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001913 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001914 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001915 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1916 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001917 {
1918 _engineStatisticsPtr->SetLastError(
1919 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1920 "SetSendCNPayloadType() failed to register CN to RTP/RTCP "
1921 "module");
1922 return -1;
1923 }
1924 }
1925 return 0;
1926}
1927
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001928int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001929Channel::SetISACInitTargetRate(int rateBps, bool useFixedFrameSize)
1930{
1931 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1932 "Channel::SetISACInitTargetRate()");
1933
1934 CodecInst sendCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001935 if (_audioCodingModule.SendCodec(&sendCodec) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001936 {
1937 _engineStatisticsPtr->SetLastError(
1938 VE_CODEC_ERROR, kTraceError,
1939 "SetISACInitTargetRate() failed to retrieve send codec");
1940 return -1;
1941 }
1942 if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
1943 {
1944 // This API is only valid if iSAC is setup to run in channel-adaptive
1945 // mode.
1946 // We do not validate the adaptive mode here. It is done later in the
1947 // ConfigISACBandwidthEstimator() API.
1948 _engineStatisticsPtr->SetLastError(
1949 VE_CODEC_ERROR, kTraceError,
1950 "SetISACInitTargetRate() send codec is not iSAC");
1951 return -1;
1952 }
1953
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001954 uint8_t initFrameSizeMsec(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00001955 if (16000 == sendCodec.plfreq)
1956 {
1957 // Note that 0 is a valid and corresponds to "use default
1958 if ((rateBps != 0 &&
1959 rateBps < kVoiceEngineMinIsacInitTargetRateBpsWb) ||
1960 (rateBps > kVoiceEngineMaxIsacInitTargetRateBpsWb))
1961 {
1962 _engineStatisticsPtr->SetLastError(
1963 VE_INVALID_ARGUMENT, kTraceError,
1964 "SetISACInitTargetRate() invalid target rate - 1");
1965 return -1;
1966 }
1967 // 30 or 60ms
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001968 initFrameSizeMsec = (uint8_t)(sendCodec.pacsize / 16);
niklase@google.com470e71d2011-07-07 08:21:25 +00001969 }
1970 else if (32000 == sendCodec.plfreq)
1971 {
1972 if ((rateBps != 0 &&
1973 rateBps < kVoiceEngineMinIsacInitTargetRateBpsSwb) ||
1974 (rateBps > kVoiceEngineMaxIsacInitTargetRateBpsSwb))
1975 {
1976 _engineStatisticsPtr->SetLastError(
1977 VE_INVALID_ARGUMENT, kTraceError,
1978 "SetISACInitTargetRate() invalid target rate - 2");
1979 return -1;
1980 }
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001981 initFrameSizeMsec = (uint8_t)(sendCodec.pacsize / 32); // 30ms
niklase@google.com470e71d2011-07-07 08:21:25 +00001982 }
1983
1984 if (_audioCodingModule.ConfigISACBandwidthEstimator(
1985 initFrameSizeMsec, rateBps, useFixedFrameSize) == -1)
1986 {
1987 _engineStatisticsPtr->SetLastError(
1988 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1989 "SetISACInitTargetRate() iSAC BWE config failed");
1990 return -1;
1991 }
1992
1993 return 0;
1994}
1995
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001996int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001997Channel::SetISACMaxRate(int rateBps)
1998{
1999 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2000 "Channel::SetISACMaxRate()");
2001
2002 CodecInst sendCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00002003 if (_audioCodingModule.SendCodec(&sendCodec) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00002004 {
2005 _engineStatisticsPtr->SetLastError(
2006 VE_CODEC_ERROR, kTraceError,
2007 "SetISACMaxRate() failed to retrieve send codec");
2008 return -1;
2009 }
2010 if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
2011 {
2012 // This API is only valid if iSAC is selected as sending codec.
2013 _engineStatisticsPtr->SetLastError(
2014 VE_CODEC_ERROR, kTraceError,
2015 "SetISACMaxRate() send codec is not iSAC");
2016 return -1;
2017 }
2018 if (16000 == sendCodec.plfreq)
2019 {
2020 if ((rateBps < kVoiceEngineMinIsacMaxRateBpsWb) ||
2021 (rateBps > kVoiceEngineMaxIsacMaxRateBpsWb))
2022 {
2023 _engineStatisticsPtr->SetLastError(
2024 VE_INVALID_ARGUMENT, kTraceError,
2025 "SetISACMaxRate() invalid max rate - 1");
2026 return -1;
2027 }
2028 }
2029 else if (32000 == sendCodec.plfreq)
2030 {
2031 if ((rateBps < kVoiceEngineMinIsacMaxRateBpsSwb) ||
2032 (rateBps > kVoiceEngineMaxIsacMaxRateBpsSwb))
2033 {
2034 _engineStatisticsPtr->SetLastError(
2035 VE_INVALID_ARGUMENT, kTraceError,
2036 "SetISACMaxRate() invalid max rate - 2");
2037 return -1;
2038 }
2039 }
2040 if (_sending)
2041 {
2042 _engineStatisticsPtr->SetLastError(
2043 VE_SENDING, kTraceError,
2044 "SetISACMaxRate() unable to set max rate while sending");
2045 return -1;
2046 }
2047
2048 // Set the maximum instantaneous rate of iSAC (works for both adaptive
2049 // and non-adaptive mode)
2050 if (_audioCodingModule.SetISACMaxRate(rateBps) == -1)
2051 {
2052 _engineStatisticsPtr->SetLastError(
2053 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2054 "SetISACMaxRate() failed to set max rate");
2055 return -1;
2056 }
2057
2058 return 0;
2059}
2060
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002061int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00002062Channel::SetISACMaxPayloadSize(int sizeBytes)
2063{
2064 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2065 "Channel::SetISACMaxPayloadSize()");
2066 CodecInst sendCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00002067 if (_audioCodingModule.SendCodec(&sendCodec) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00002068 {
2069 _engineStatisticsPtr->SetLastError(
2070 VE_CODEC_ERROR, kTraceError,
2071 "SetISACMaxPayloadSize() failed to retrieve send codec");
2072 return -1;
2073 }
2074 if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
2075 {
2076 _engineStatisticsPtr->SetLastError(
2077 VE_CODEC_ERROR, kTraceError,
2078 "SetISACMaxPayloadSize() send codec is not iSAC");
2079 return -1;
2080 }
2081 if (16000 == sendCodec.plfreq)
2082 {
2083 if ((sizeBytes < kVoiceEngineMinIsacMaxPayloadSizeBytesWb) ||
2084 (sizeBytes > kVoiceEngineMaxIsacMaxPayloadSizeBytesWb))
2085 {
2086 _engineStatisticsPtr->SetLastError(
2087 VE_INVALID_ARGUMENT, kTraceError,
2088 "SetISACMaxPayloadSize() invalid max payload - 1");
2089 return -1;
2090 }
2091 }
2092 else if (32000 == sendCodec.plfreq)
2093 {
2094 if ((sizeBytes < kVoiceEngineMinIsacMaxPayloadSizeBytesSwb) ||
2095 (sizeBytes > kVoiceEngineMaxIsacMaxPayloadSizeBytesSwb))
2096 {
2097 _engineStatisticsPtr->SetLastError(
2098 VE_INVALID_ARGUMENT, kTraceError,
2099 "SetISACMaxPayloadSize() invalid max payload - 2");
2100 return -1;
2101 }
2102 }
2103 if (_sending)
2104 {
2105 _engineStatisticsPtr->SetLastError(
2106 VE_SENDING, kTraceError,
2107 "SetISACMaxPayloadSize() unable to set max rate while sending");
2108 return -1;
2109 }
2110
2111 if (_audioCodingModule.SetISACMaxPayloadSize(sizeBytes) == -1)
2112 {
2113 _engineStatisticsPtr->SetLastError(
2114 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2115 "SetISACMaxPayloadSize() failed to set max payload size");
2116 return -1;
2117 }
2118 return 0;
2119}
2120
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002121int32_t Channel::RegisterExternalTransport(Transport& transport)
niklase@google.com470e71d2011-07-07 08:21:25 +00002122{
2123 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2124 "Channel::RegisterExternalTransport()");
2125
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002126 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002127
niklase@google.com470e71d2011-07-07 08:21:25 +00002128 if (_externalTransport)
2129 {
2130 _engineStatisticsPtr->SetLastError(VE_INVALID_OPERATION,
2131 kTraceError,
2132 "RegisterExternalTransport() external transport already enabled");
2133 return -1;
2134 }
2135 _externalTransport = true;
2136 _transportPtr = &transport;
2137 return 0;
2138}
2139
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002140int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00002141Channel::DeRegisterExternalTransport()
2142{
2143 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2144 "Channel::DeRegisterExternalTransport()");
2145
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002146 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.org83661f52011-11-25 10:58:15 +00002147
niklase@google.com470e71d2011-07-07 08:21:25 +00002148 if (!_transportPtr)
2149 {
2150 _engineStatisticsPtr->SetLastError(
2151 VE_INVALID_OPERATION, kTraceWarning,
2152 "DeRegisterExternalTransport() external transport already "
2153 "disabled");
2154 return 0;
2155 }
2156 _externalTransport = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00002157 _transportPtr = NULL;
2158 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2159 "DeRegisterExternalTransport() all transport is disabled");
niklase@google.com470e71d2011-07-07 08:21:25 +00002160 return 0;
2161}
2162
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002163int32_t Channel::ReceivedRTPPacket(const int8_t* data, int32_t length) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002164 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
2165 "Channel::ReceivedRTPPacket()");
2166
2167 // Store playout timestamp for the received RTP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00002168 UpdatePlayoutTimestamp(false);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002169
2170 // Dump the RTP packet to a file (if RTP dump is enabled).
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002171 if (_rtpDumpIn.DumpPacket((const uint8_t*)data,
2172 (uint16_t)length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002173 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
2174 VoEId(_instanceId,_channelId),
2175 "Channel::SendPacket() RTP dump to input file failed");
2176 }
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00002177 RTPHeader header;
2178 if (!rtp_header_parser_->Parse(reinterpret_cast<const uint8_t*>(data),
2179 static_cast<uint16_t>(length), &header)) {
2180 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideo,
2181 VoEId(_instanceId,_channelId),
2182 "IncomingPacket invalid RTP header");
2183 return -1;
2184 }
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00002185 bool retransmitted = IsPacketRetransmitted(header);
2186 bool in_order = rtp_receiver_->InOrderPacket(header.sequenceNumber);
2187 rtp_receive_statistics_->IncomingPacket(header, static_cast<uint16_t>(length),
2188 retransmitted, in_order);
2189 PayloadUnion payload_specific;
2190 if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
2191 &payload_specific)) {
2192 return -1;
2193 }
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002194 // Deliver RTP packet to RTP/RTCP module for parsing
2195 // The packet will be pushed back to the channel thru the
2196 // OnReceivedPayloadData callback so we don't push it to the ACM here
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00002197 if (!rtp_receiver_->IncomingRtpPacket(&header,
2198 reinterpret_cast<const uint8_t*>(data),
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00002199 static_cast<uint16_t>(length),
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00002200 payload_specific, in_order)) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002201 _engineStatisticsPtr->SetLastError(
2202 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
2203 "Channel::IncomingRTPPacket() RTP packet is invalid");
2204 }
2205 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002206}
2207
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00002208bool Channel::IsPacketRetransmitted(const RTPHeader& header) const {
2209 bool rtx_enabled = false;
2210 uint32_t rtx_ssrc = 0;
2211 int rtx_payload_type = 0;
2212 rtp_receiver_->RTXStatus(&rtx_enabled, &rtx_ssrc, &rtx_payload_type);
2213 if (!rtx_enabled) {
2214 // Check if this is a retransmission.
2215 ReceiveStatistics::RtpReceiveStatistics stats;
2216 if (rtp_receive_statistics_->Statistics(&stats, false)) {
2217 uint16_t min_rtt = 0;
2218 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL);
2219 return rtp_receiver_->RetransmitOfOldPacket(header, stats.jitter,
2220 min_rtt);
2221 }
2222 }
2223 return false;
2224}
2225
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002226int32_t Channel::ReceivedRTCPPacket(const int8_t* data, int32_t length) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002227 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
2228 "Channel::ReceivedRTCPPacket()");
2229 // Store playout timestamp for the received RTCP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00002230 UpdatePlayoutTimestamp(true);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002231
2232 // Dump the RTCP packet to a file (if RTP dump is enabled).
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002233 if (_rtpDumpIn.DumpPacket((const uint8_t*)data,
2234 (uint16_t)length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002235 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
2236 VoEId(_instanceId,_channelId),
2237 "Channel::SendPacket() RTCP dump to input file failed");
2238 }
2239
2240 // Deliver RTCP packet to RTP/RTCP module for parsing
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00002241 if (_rtpRtcpModule->IncomingRtcpPacket((const uint8_t*)data,
2242 (uint16_t)length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002243 _engineStatisticsPtr->SetLastError(
2244 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
2245 "Channel::IncomingRTPPacket() RTCP packet is invalid");
2246 }
2247 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002248}
2249
niklase@google.com470e71d2011-07-07 08:21:25 +00002250int Channel::StartPlayingFileLocally(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00002251 bool loop,
2252 FileFormats format,
2253 int startPosition,
2254 float volumeScaling,
2255 int stopPosition,
niklase@google.com470e71d2011-07-07 08:21:25 +00002256 const CodecInst* codecInst)
2257{
2258 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2259 "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d,"
2260 " format=%d, volumeScaling=%5.3f, startPosition=%d, "
2261 "stopPosition=%d)", fileName, loop, format, volumeScaling,
2262 startPosition, stopPosition);
2263
2264 if (_outputFilePlaying)
2265 {
2266 _engineStatisticsPtr->SetLastError(
2267 VE_ALREADY_PLAYING, kTraceError,
2268 "StartPlayingFileLocally() is already playing");
2269 return -1;
2270 }
2271
niklase@google.com470e71d2011-07-07 08:21:25 +00002272 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002273 CriticalSectionScoped cs(&_fileCritSect);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002274
2275 if (_outputFilePlayerPtr)
2276 {
2277 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2278 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2279 _outputFilePlayerPtr = NULL;
2280 }
2281
2282 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2283 _outputFilePlayerId, (const FileFormats)format);
2284
2285 if (_outputFilePlayerPtr == NULL)
2286 {
2287 _engineStatisticsPtr->SetLastError(
2288 VE_INVALID_ARGUMENT, kTraceError,
henrike@webrtc.org31d30702011-11-18 19:59:32 +00002289 "StartPlayingFileLocally() filePlayer format is not correct");
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002290 return -1;
2291 }
2292
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002293 const uint32_t notificationTime(0);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002294
2295 if (_outputFilePlayerPtr->StartPlayingFile(
2296 fileName,
2297 loop,
2298 startPosition,
2299 volumeScaling,
2300 notificationTime,
2301 stopPosition,
2302 (const CodecInst*)codecInst) != 0)
2303 {
2304 _engineStatisticsPtr->SetLastError(
2305 VE_BAD_FILE, kTraceError,
2306 "StartPlayingFile() failed to start file playout");
2307 _outputFilePlayerPtr->StopPlayingFile();
2308 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2309 _outputFilePlayerPtr = NULL;
2310 return -1;
2311 }
2312 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
2313 _outputFilePlaying = true;
niklase@google.com470e71d2011-07-07 08:21:25 +00002314 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00002315
2316 if (RegisterFilePlayingToMixer() != 0)
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002317 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002318
2319 return 0;
2320}
2321
2322int Channel::StartPlayingFileLocally(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00002323 FileFormats format,
2324 int startPosition,
2325 float volumeScaling,
2326 int stopPosition,
niklase@google.com470e71d2011-07-07 08:21:25 +00002327 const CodecInst* codecInst)
2328{
2329 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2330 "Channel::StartPlayingFileLocally(format=%d,"
2331 " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
2332 format, volumeScaling, startPosition, stopPosition);
2333
2334 if(stream == NULL)
2335 {
2336 _engineStatisticsPtr->SetLastError(
2337 VE_BAD_FILE, kTraceError,
2338 "StartPlayingFileLocally() NULL as input stream");
2339 return -1;
2340 }
2341
2342
2343 if (_outputFilePlaying)
2344 {
2345 _engineStatisticsPtr->SetLastError(
2346 VE_ALREADY_PLAYING, kTraceError,
2347 "StartPlayingFileLocally() is already playing");
2348 return -1;
2349 }
2350
niklase@google.com470e71d2011-07-07 08:21:25 +00002351 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002352 CriticalSectionScoped cs(&_fileCritSect);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002353
2354 // Destroy the old instance
2355 if (_outputFilePlayerPtr)
2356 {
2357 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2358 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2359 _outputFilePlayerPtr = NULL;
2360 }
2361
2362 // Create the instance
2363 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2364 _outputFilePlayerId,
2365 (const FileFormats)format);
2366
2367 if (_outputFilePlayerPtr == NULL)
2368 {
2369 _engineStatisticsPtr->SetLastError(
2370 VE_INVALID_ARGUMENT, kTraceError,
2371 "StartPlayingFileLocally() filePlayer format isnot correct");
2372 return -1;
2373 }
2374
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002375 const uint32_t notificationTime(0);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002376
2377 if (_outputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
2378 volumeScaling,
2379 notificationTime,
2380 stopPosition, codecInst) != 0)
2381 {
2382 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2383 "StartPlayingFile() failed to "
2384 "start file playout");
2385 _outputFilePlayerPtr->StopPlayingFile();
2386 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2387 _outputFilePlayerPtr = NULL;
2388 return -1;
2389 }
2390 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
2391 _outputFilePlaying = true;
niklase@google.com470e71d2011-07-07 08:21:25 +00002392 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00002393
2394 if (RegisterFilePlayingToMixer() != 0)
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002395 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002396
niklase@google.com470e71d2011-07-07 08:21:25 +00002397 return 0;
2398}
2399
2400int Channel::StopPlayingFileLocally()
2401{
2402 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2403 "Channel::StopPlayingFileLocally()");
2404
2405 if (!_outputFilePlaying)
2406 {
2407 _engineStatisticsPtr->SetLastError(
2408 VE_INVALID_OPERATION, kTraceWarning,
2409 "StopPlayingFileLocally() isnot playing");
2410 return 0;
2411 }
2412
niklase@google.com470e71d2011-07-07 08:21:25 +00002413 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002414 CriticalSectionScoped cs(&_fileCritSect);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002415
2416 if (_outputFilePlayerPtr->StopPlayingFile() != 0)
2417 {
2418 _engineStatisticsPtr->SetLastError(
2419 VE_STOP_RECORDING_FAILED, kTraceError,
2420 "StopPlayingFile() could not stop playing");
2421 return -1;
2422 }
2423 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2424 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2425 _outputFilePlayerPtr = NULL;
2426 _outputFilePlaying = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00002427 }
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002428 // _fileCritSect cannot be taken while calling
2429 // SetAnonymousMixibilityStatus. Refer to comments in
2430 // StartPlayingFileLocally(const char* ...) for more details.
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002431 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0)
2432 {
2433 _engineStatisticsPtr->SetLastError(
2434 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002435 "StopPlayingFile() failed to stop participant from playing as"
2436 "file in the mixer");
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002437 return -1;
2438 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002439
2440 return 0;
2441}
2442
2443int Channel::IsPlayingFileLocally() const
2444{
2445 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2446 "Channel::IsPlayingFileLocally()");
2447
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002448 return (int32_t)_outputFilePlaying;
niklase@google.com470e71d2011-07-07 08:21:25 +00002449}
2450
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00002451int Channel::RegisterFilePlayingToMixer()
2452{
2453 // Return success for not registering for file playing to mixer if:
2454 // 1. playing file before playout is started on that channel.
2455 // 2. starting playout without file playing on that channel.
2456 if (!_playing || !_outputFilePlaying)
2457 {
2458 return 0;
2459 }
2460
2461 // |_fileCritSect| cannot be taken while calling
2462 // SetAnonymousMixabilityStatus() since as soon as the participant is added
2463 // frames can be pulled by the mixer. Since the frames are generated from
2464 // the file, _fileCritSect will be taken. This would result in a deadlock.
2465 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0)
2466 {
2467 CriticalSectionScoped cs(&_fileCritSect);
2468 _outputFilePlaying = false;
2469 _engineStatisticsPtr->SetLastError(
2470 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
2471 "StartPlayingFile() failed to add participant as file to mixer");
2472 _outputFilePlayerPtr->StopPlayingFile();
2473 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2474 _outputFilePlayerPtr = NULL;
2475 return -1;
2476 }
2477
2478 return 0;
2479}
2480
pbos@webrtc.org92135212013-05-14 08:31:39 +00002481int Channel::ScaleLocalFilePlayout(float scale)
niklase@google.com470e71d2011-07-07 08:21:25 +00002482{
2483 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2484 "Channel::ScaleLocalFilePlayout(scale=%5.3f)", scale);
2485
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002486 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002487
2488 if (!_outputFilePlaying)
2489 {
2490 _engineStatisticsPtr->SetLastError(
2491 VE_INVALID_OPERATION, kTraceError,
2492 "ScaleLocalFilePlayout() isnot playing");
2493 return -1;
2494 }
2495 if ((_outputFilePlayerPtr == NULL) ||
2496 (_outputFilePlayerPtr->SetAudioScaling(scale) != 0))
2497 {
2498 _engineStatisticsPtr->SetLastError(
2499 VE_BAD_ARGUMENT, kTraceError,
2500 "SetAudioScaling() failed to scale the playout");
2501 return -1;
2502 }
2503
2504 return 0;
2505}
2506
2507int Channel::GetLocalPlayoutPosition(int& positionMs)
2508{
2509 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2510 "Channel::GetLocalPlayoutPosition(position=?)");
2511
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002512 uint32_t position;
niklase@google.com470e71d2011-07-07 08:21:25 +00002513
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002514 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002515
2516 if (_outputFilePlayerPtr == NULL)
2517 {
2518 _engineStatisticsPtr->SetLastError(
2519 VE_INVALID_OPERATION, kTraceError,
2520 "GetLocalPlayoutPosition() filePlayer instance doesnot exist");
2521 return -1;
2522 }
2523
2524 if (_outputFilePlayerPtr->GetPlayoutPosition(position) != 0)
2525 {
2526 _engineStatisticsPtr->SetLastError(
2527 VE_BAD_FILE, kTraceError,
2528 "GetLocalPlayoutPosition() failed");
2529 return -1;
2530 }
2531 positionMs = position;
2532
2533 return 0;
2534}
2535
2536int Channel::StartPlayingFileAsMicrophone(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00002537 bool loop,
2538 FileFormats format,
2539 int startPosition,
2540 float volumeScaling,
2541 int stopPosition,
niklase@google.com470e71d2011-07-07 08:21:25 +00002542 const CodecInst* codecInst)
2543{
2544 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2545 "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, "
2546 "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, "
2547 "stopPosition=%d)", fileName, loop, format, volumeScaling,
2548 startPosition, stopPosition);
2549
2550 if (_inputFilePlaying)
2551 {
2552 _engineStatisticsPtr->SetLastError(
2553 VE_ALREADY_PLAYING, kTraceWarning,
2554 "StartPlayingFileAsMicrophone() filePlayer is playing");
2555 return 0;
2556 }
2557
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002558 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002559
2560 // Destroy the old instance
2561 if (_inputFilePlayerPtr)
2562 {
2563 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2564 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2565 _inputFilePlayerPtr = NULL;
2566 }
2567
2568 // Create the instance
2569 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2570 _inputFilePlayerId, (const FileFormats)format);
2571
2572 if (_inputFilePlayerPtr == NULL)
2573 {
2574 _engineStatisticsPtr->SetLastError(
2575 VE_INVALID_ARGUMENT, kTraceError,
2576 "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
2577 return -1;
2578 }
2579
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002580 const uint32_t notificationTime(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00002581
2582 if (_inputFilePlayerPtr->StartPlayingFile(
2583 fileName,
2584 loop,
2585 startPosition,
2586 volumeScaling,
2587 notificationTime,
2588 stopPosition,
2589 (const CodecInst*)codecInst) != 0)
2590 {
2591 _engineStatisticsPtr->SetLastError(
2592 VE_BAD_FILE, kTraceError,
2593 "StartPlayingFile() failed to start file playout");
2594 _inputFilePlayerPtr->StopPlayingFile();
2595 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2596 _inputFilePlayerPtr = NULL;
2597 return -1;
2598 }
2599 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2600 _inputFilePlaying = true;
2601
2602 return 0;
2603}
2604
2605int Channel::StartPlayingFileAsMicrophone(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00002606 FileFormats format,
2607 int startPosition,
2608 float volumeScaling,
2609 int stopPosition,
niklase@google.com470e71d2011-07-07 08:21:25 +00002610 const CodecInst* codecInst)
2611{
2612 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2613 "Channel::StartPlayingFileAsMicrophone(format=%d, "
2614 "volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
2615 format, volumeScaling, startPosition, stopPosition);
2616
2617 if(stream == NULL)
2618 {
2619 _engineStatisticsPtr->SetLastError(
2620 VE_BAD_FILE, kTraceError,
2621 "StartPlayingFileAsMicrophone NULL as input stream");
2622 return -1;
2623 }
2624
2625 if (_inputFilePlaying)
2626 {
2627 _engineStatisticsPtr->SetLastError(
2628 VE_ALREADY_PLAYING, kTraceWarning,
2629 "StartPlayingFileAsMicrophone() is playing");
2630 return 0;
2631 }
2632
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002633 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002634
2635 // Destroy the old instance
2636 if (_inputFilePlayerPtr)
2637 {
2638 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2639 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2640 _inputFilePlayerPtr = NULL;
2641 }
2642
2643 // Create the instance
2644 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2645 _inputFilePlayerId, (const FileFormats)format);
2646
2647 if (_inputFilePlayerPtr == NULL)
2648 {
2649 _engineStatisticsPtr->SetLastError(
2650 VE_INVALID_ARGUMENT, kTraceError,
2651 "StartPlayingInputFile() filePlayer format isnot correct");
2652 return -1;
2653 }
2654
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002655 const uint32_t notificationTime(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00002656
2657 if (_inputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
2658 volumeScaling, notificationTime,
2659 stopPosition, codecInst) != 0)
2660 {
2661 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2662 "StartPlayingFile() failed to start "
2663 "file playout");
2664 _inputFilePlayerPtr->StopPlayingFile();
2665 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2666 _inputFilePlayerPtr = NULL;
2667 return -1;
2668 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00002669
niklase@google.com470e71d2011-07-07 08:21:25 +00002670 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2671 _inputFilePlaying = true;
2672
2673 return 0;
2674}
2675
2676int Channel::StopPlayingFileAsMicrophone()
2677{
2678 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2679 "Channel::StopPlayingFileAsMicrophone()");
2680
2681 if (!_inputFilePlaying)
2682 {
2683 _engineStatisticsPtr->SetLastError(
2684 VE_INVALID_OPERATION, kTraceWarning,
2685 "StopPlayingFileAsMicrophone() isnot playing");
2686 return 0;
2687 }
2688
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002689 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002690 if (_inputFilePlayerPtr->StopPlayingFile() != 0)
2691 {
2692 _engineStatisticsPtr->SetLastError(
2693 VE_STOP_RECORDING_FAILED, kTraceError,
2694 "StopPlayingFile() could not stop playing");
2695 return -1;
2696 }
2697 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2698 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2699 _inputFilePlayerPtr = NULL;
2700 _inputFilePlaying = false;
2701
2702 return 0;
2703}
2704
2705int Channel::IsPlayingFileAsMicrophone() const
2706{
2707 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2708 "Channel::IsPlayingFileAsMicrophone()");
2709
2710 return _inputFilePlaying;
2711}
2712
pbos@webrtc.org92135212013-05-14 08:31:39 +00002713int Channel::ScaleFileAsMicrophonePlayout(float scale)
niklase@google.com470e71d2011-07-07 08:21:25 +00002714{
2715 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2716 "Channel::ScaleFileAsMicrophonePlayout(scale=%5.3f)", scale);
2717
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002718 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002719
2720 if (!_inputFilePlaying)
2721 {
2722 _engineStatisticsPtr->SetLastError(
2723 VE_INVALID_OPERATION, kTraceError,
2724 "ScaleFileAsMicrophonePlayout() isnot playing");
2725 return -1;
2726 }
2727
2728 if ((_inputFilePlayerPtr == NULL) ||
2729 (_inputFilePlayerPtr->SetAudioScaling(scale) != 0))
2730 {
2731 _engineStatisticsPtr->SetLastError(
2732 VE_BAD_ARGUMENT, kTraceError,
2733 "SetAudioScaling() failed to scale playout");
2734 return -1;
2735 }
2736
2737 return 0;
2738}
2739
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00002740int Channel::StartRecordingPlayout(const char* fileName,
niklase@google.com470e71d2011-07-07 08:21:25 +00002741 const CodecInst* codecInst)
2742{
2743 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2744 "Channel::StartRecordingPlayout(fileName=%s)", fileName);
2745
2746 if (_outputFileRecording)
2747 {
2748 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
2749 "StartRecordingPlayout() is already recording");
2750 return 0;
2751 }
2752
2753 FileFormats format;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002754 const uint32_t notificationTime(0); // Not supported in VoE
niklase@google.com470e71d2011-07-07 08:21:25 +00002755 CodecInst dummyCodec={100,"L16",16000,320,1,320000};
2756
niklas.enbom@webrtc.org40197d72012-03-26 08:45:47 +00002757 if ((codecInst != NULL) &&
2758 ((codecInst->channels < 1) || (codecInst->channels > 2)))
niklase@google.com470e71d2011-07-07 08:21:25 +00002759 {
2760 _engineStatisticsPtr->SetLastError(
2761 VE_BAD_ARGUMENT, kTraceError,
2762 "StartRecordingPlayout() invalid compression");
2763 return(-1);
2764 }
2765 if(codecInst == NULL)
2766 {
2767 format = kFileFormatPcm16kHzFile;
2768 codecInst=&dummyCodec;
2769 }
2770 else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
2771 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
2772 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
2773 {
2774 format = kFileFormatWavFile;
2775 }
2776 else
2777 {
2778 format = kFileFormatCompressedFile;
2779 }
2780
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002781 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002782
2783 // Destroy the old instance
2784 if (_outputFileRecorderPtr)
2785 {
2786 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2787 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2788 _outputFileRecorderPtr = NULL;
2789 }
2790
2791 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2792 _outputFileRecorderId, (const FileFormats)format);
2793 if (_outputFileRecorderPtr == NULL)
2794 {
2795 _engineStatisticsPtr->SetLastError(
2796 VE_INVALID_ARGUMENT, kTraceError,
2797 "StartRecordingPlayout() fileRecorder format isnot correct");
2798 return -1;
2799 }
2800
2801 if (_outputFileRecorderPtr->StartRecordingAudioFile(
2802 fileName, (const CodecInst&)*codecInst, notificationTime) != 0)
2803 {
2804 _engineStatisticsPtr->SetLastError(
2805 VE_BAD_FILE, kTraceError,
2806 "StartRecordingAudioFile() failed to start file recording");
2807 _outputFileRecorderPtr->StopRecording();
2808 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2809 _outputFileRecorderPtr = NULL;
2810 return -1;
2811 }
2812 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2813 _outputFileRecording = true;
2814
2815 return 0;
2816}
2817
2818int Channel::StartRecordingPlayout(OutStream* stream,
2819 const CodecInst* codecInst)
2820{
2821 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2822 "Channel::StartRecordingPlayout()");
2823
2824 if (_outputFileRecording)
2825 {
2826 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
2827 "StartRecordingPlayout() is already recording");
2828 return 0;
2829 }
2830
2831 FileFormats format;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002832 const uint32_t notificationTime(0); // Not supported in VoE
niklase@google.com470e71d2011-07-07 08:21:25 +00002833 CodecInst dummyCodec={100,"L16",16000,320,1,320000};
2834
2835 if (codecInst != NULL && codecInst->channels != 1)
2836 {
2837 _engineStatisticsPtr->SetLastError(
2838 VE_BAD_ARGUMENT, kTraceError,
2839 "StartRecordingPlayout() invalid compression");
2840 return(-1);
2841 }
2842 if(codecInst == NULL)
2843 {
2844 format = kFileFormatPcm16kHzFile;
2845 codecInst=&dummyCodec;
2846 }
2847 else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
2848 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
2849 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
2850 {
2851 format = kFileFormatWavFile;
2852 }
2853 else
2854 {
2855 format = kFileFormatCompressedFile;
2856 }
2857
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002858 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002859
2860 // Destroy the old instance
2861 if (_outputFileRecorderPtr)
2862 {
2863 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2864 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2865 _outputFileRecorderPtr = NULL;
2866 }
2867
2868 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2869 _outputFileRecorderId, (const FileFormats)format);
2870 if (_outputFileRecorderPtr == NULL)
2871 {
2872 _engineStatisticsPtr->SetLastError(
2873 VE_INVALID_ARGUMENT, kTraceError,
2874 "StartRecordingPlayout() fileRecorder format isnot correct");
2875 return -1;
2876 }
2877
2878 if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream, *codecInst,
2879 notificationTime) != 0)
2880 {
2881 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2882 "StartRecordingPlayout() failed to "
2883 "start file recording");
2884 _outputFileRecorderPtr->StopRecording();
2885 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2886 _outputFileRecorderPtr = NULL;
2887 return -1;
2888 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00002889
niklase@google.com470e71d2011-07-07 08:21:25 +00002890 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2891 _outputFileRecording = true;
2892
2893 return 0;
2894}
2895
2896int Channel::StopRecordingPlayout()
2897{
2898 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
2899 "Channel::StopRecordingPlayout()");
2900
2901 if (!_outputFileRecording)
2902 {
2903 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
2904 "StopRecordingPlayout() isnot recording");
2905 return -1;
2906 }
2907
2908
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002909 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002910
2911 if (_outputFileRecorderPtr->StopRecording() != 0)
2912 {
2913 _engineStatisticsPtr->SetLastError(
2914 VE_STOP_RECORDING_FAILED, kTraceError,
2915 "StopRecording() could not stop recording");
2916 return(-1);
2917 }
2918 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2919 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2920 _outputFileRecorderPtr = NULL;
2921 _outputFileRecording = false;
2922
2923 return 0;
2924}
2925
2926void
2927Channel::SetMixWithMicStatus(bool mix)
2928{
2929 _mixFileWithMicrophone=mix;
2930}
2931
2932int
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002933Channel::GetSpeechOutputLevel(uint32_t& level) const
niklase@google.com470e71d2011-07-07 08:21:25 +00002934{
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002935 int8_t currentLevel = _outputAudioLevel.Level();
2936 level = static_cast<int32_t> (currentLevel);
niklase@google.com470e71d2011-07-07 08:21:25 +00002937 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2938 VoEId(_instanceId,_channelId),
2939 "GetSpeechOutputLevel() => level=%u", level);
2940 return 0;
2941}
2942
2943int
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002944Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const
niklase@google.com470e71d2011-07-07 08:21:25 +00002945{
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002946 int16_t currentLevel = _outputAudioLevel.LevelFullRange();
2947 level = static_cast<int32_t> (currentLevel);
niklase@google.com470e71d2011-07-07 08:21:25 +00002948 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2949 VoEId(_instanceId,_channelId),
2950 "GetSpeechOutputLevelFullRange() => level=%u", level);
2951 return 0;
2952}
2953
2954int
2955Channel::SetMute(bool enable)
2956{
2957 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2958 "Channel::SetMute(enable=%d)", enable);
2959 _mute = enable;
2960 return 0;
2961}
2962
2963bool
2964Channel::Mute() const
2965{
2966 return _mute;
2967}
2968
2969int
2970Channel::SetOutputVolumePan(float left, float right)
2971{
2972 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2973 "Channel::SetOutputVolumePan()");
2974 _panLeft = left;
2975 _panRight = right;
2976 return 0;
2977}
2978
2979int
2980Channel::GetOutputVolumePan(float& left, float& right) const
2981{
2982 left = _panLeft;
2983 right = _panRight;
2984 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2985 VoEId(_instanceId,_channelId),
2986 "GetOutputVolumePan() => left=%3.2f, right=%3.2f", left, right);
2987 return 0;
2988}
2989
2990int
2991Channel::SetChannelOutputVolumeScaling(float scaling)
2992{
2993 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2994 "Channel::SetChannelOutputVolumeScaling()");
2995 _outputGain = scaling;
2996 return 0;
2997}
2998
2999int
3000Channel::GetChannelOutputVolumeScaling(float& scaling) const
3001{
3002 scaling = _outputGain;
3003 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3004 VoEId(_instanceId,_channelId),
3005 "GetChannelOutputVolumeScaling() => scaling=%3.2f", scaling);
3006 return 0;
3007}
3008
niklase@google.com470e71d2011-07-07 08:21:25 +00003009int
3010Channel::RegisterExternalEncryption(Encryption& encryption)
3011{
3012 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3013 "Channel::RegisterExternalEncryption()");
3014
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003015 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003016
3017 if (_encryptionPtr)
3018 {
3019 _engineStatisticsPtr->SetLastError(
3020 VE_INVALID_OPERATION, kTraceError,
3021 "RegisterExternalEncryption() encryption already enabled");
3022 return -1;
3023 }
3024
3025 _encryptionPtr = &encryption;
3026
3027 _decrypting = true;
3028 _encrypting = true;
3029
3030 return 0;
3031}
3032
3033int
3034Channel::DeRegisterExternalEncryption()
3035{
3036 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3037 "Channel::DeRegisterExternalEncryption()");
3038
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003039 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003040
3041 if (!_encryptionPtr)
3042 {
3043 _engineStatisticsPtr->SetLastError(
3044 VE_INVALID_OPERATION, kTraceWarning,
3045 "DeRegisterExternalEncryption() encryption already disabled");
3046 return 0;
3047 }
3048
3049 _decrypting = false;
3050 _encrypting = false;
3051
3052 _encryptionPtr = NULL;
3053
3054 return 0;
3055}
3056
3057int Channel::SendTelephoneEventOutband(unsigned char eventCode,
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00003058 int lengthMs, int attenuationDb,
3059 bool playDtmfEvent)
niklase@google.com470e71d2011-07-07 08:21:25 +00003060{
3061 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3062 "Channel::SendTelephoneEventOutband(..., playDtmfEvent=%d)",
3063 playDtmfEvent);
3064
3065 _playOutbandDtmfEvent = playDtmfEvent;
3066
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003067 if (_rtpRtcpModule->SendTelephoneEventOutband(eventCode, lengthMs,
niklase@google.com470e71d2011-07-07 08:21:25 +00003068 attenuationDb) != 0)
3069 {
3070 _engineStatisticsPtr->SetLastError(
3071 VE_SEND_DTMF_FAILED,
3072 kTraceWarning,
3073 "SendTelephoneEventOutband() failed to send event");
3074 return -1;
3075 }
3076 return 0;
3077}
3078
3079int Channel::SendTelephoneEventInband(unsigned char eventCode,
3080 int lengthMs,
3081 int attenuationDb,
3082 bool playDtmfEvent)
3083{
3084 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3085 "Channel::SendTelephoneEventInband(..., playDtmfEvent=%d)",
3086 playDtmfEvent);
3087
3088 _playInbandDtmfEvent = playDtmfEvent;
3089 _inbandDtmfQueue.AddDtmf(eventCode, lengthMs, attenuationDb);
3090
3091 return 0;
3092}
3093
3094int
3095Channel::SetDtmfPlayoutStatus(bool enable)
3096{
3097 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3098 "Channel::SetDtmfPlayoutStatus()");
3099 if (_audioCodingModule.SetDtmfPlayoutStatus(enable) != 0)
3100 {
3101 _engineStatisticsPtr->SetLastError(
3102 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
3103 "SetDtmfPlayoutStatus() failed to set Dtmf playout");
3104 return -1;
3105 }
3106 return 0;
3107}
3108
3109bool
3110Channel::DtmfPlayoutStatus() const
3111{
3112 return _audioCodingModule.DtmfPlayoutStatus();
3113}
3114
3115int
3116Channel::SetSendTelephoneEventPayloadType(unsigned char type)
3117{
3118 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3119 "Channel::SetSendTelephoneEventPayloadType()");
andrew@webrtc.orgf81f9f82011-08-19 22:56:22 +00003120 if (type > 127)
niklase@google.com470e71d2011-07-07 08:21:25 +00003121 {
3122 _engineStatisticsPtr->SetLastError(
3123 VE_INVALID_ARGUMENT, kTraceError,
3124 "SetSendTelephoneEventPayloadType() invalid type");
3125 return -1;
3126 }
pwestin@webrtc.org1da1ce02011-10-13 15:19:55 +00003127 CodecInst codec;
3128 codec.plfreq = 8000;
3129 codec.pltype = type;
3130 memcpy(codec.plname, "telephone-event", 16);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003131 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003132 {
henrika@webrtc.org4392d5f2013-04-17 07:34:25 +00003133 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
3134 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
3135 _engineStatisticsPtr->SetLastError(
3136 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3137 "SetSendTelephoneEventPayloadType() failed to register send"
3138 "payload type");
3139 return -1;
3140 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003141 }
3142 _sendTelephoneEventPayloadType = type;
3143 return 0;
3144}
3145
3146int
3147Channel::GetSendTelephoneEventPayloadType(unsigned char& type)
3148{
3149 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3150 "Channel::GetSendTelephoneEventPayloadType()");
3151 type = _sendTelephoneEventPayloadType;
3152 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3153 VoEId(_instanceId,_channelId),
3154 "GetSendTelephoneEventPayloadType() => type=%u", type);
3155 return 0;
3156}
3157
niklase@google.com470e71d2011-07-07 08:21:25 +00003158int
3159Channel::UpdateRxVadDetection(AudioFrame& audioFrame)
3160{
3161 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3162 "Channel::UpdateRxVadDetection()");
3163
3164 int vadDecision = 1;
3165
andrew@webrtc.org63a50982012-05-02 23:56:37 +00003166 vadDecision = (audioFrame.vad_activity_ == AudioFrame::kVadActive)? 1 : 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003167
3168 if ((vadDecision != _oldVadDecision) && _rxVadObserverPtr)
3169 {
3170 OnRxVadDetected(vadDecision);
3171 _oldVadDecision = vadDecision;
3172 }
3173
3174 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3175 "Channel::UpdateRxVadDetection() => vadDecision=%d",
3176 vadDecision);
3177 return 0;
3178}
3179
3180int
3181Channel::RegisterRxVadObserver(VoERxVadCallback &observer)
3182{
3183 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3184 "Channel::RegisterRxVadObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003185 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003186
3187 if (_rxVadObserverPtr)
3188 {
3189 _engineStatisticsPtr->SetLastError(
3190 VE_INVALID_OPERATION, kTraceError,
3191 "RegisterRxVadObserver() observer already enabled");
3192 return -1;
3193 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003194 _rxVadObserverPtr = &observer;
3195 _RxVadDetection = true;
3196 return 0;
3197}
3198
3199int
3200Channel::DeRegisterRxVadObserver()
3201{
3202 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3203 "Channel::DeRegisterRxVadObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003204 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003205
3206 if (!_rxVadObserverPtr)
3207 {
3208 _engineStatisticsPtr->SetLastError(
3209 VE_INVALID_OPERATION, kTraceWarning,
3210 "DeRegisterRxVadObserver() observer already disabled");
3211 return 0;
3212 }
3213 _rxVadObserverPtr = NULL;
3214 _RxVadDetection = false;
3215 return 0;
3216}
3217
3218int
3219Channel::VoiceActivityIndicator(int &activity)
3220{
3221 activity = _sendFrameType;
3222
3223 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3224 "Channel::VoiceActivityIndicator(indicator=%d)", activity);
3225 return 0;
3226}
3227
3228#ifdef WEBRTC_VOICE_ENGINE_AGC
3229
3230int
pbos@webrtc.org92135212013-05-14 08:31:39 +00003231Channel::SetRxAgcStatus(bool enable, AgcModes mode)
niklase@google.com470e71d2011-07-07 08:21:25 +00003232{
3233 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3234 "Channel::SetRxAgcStatus(enable=%d, mode=%d)",
3235 (int)enable, (int)mode);
3236
3237 GainControl::Mode agcMode(GainControl::kFixedDigital);
3238 switch (mode)
3239 {
3240 case kAgcDefault:
3241 agcMode = GainControl::kAdaptiveDigital;
3242 break;
3243 case kAgcUnchanged:
3244 agcMode = _rxAudioProcessingModulePtr->gain_control()->mode();
3245 break;
3246 case kAgcFixedDigital:
3247 agcMode = GainControl::kFixedDigital;
3248 break;
3249 case kAgcAdaptiveDigital:
3250 agcMode =GainControl::kAdaptiveDigital;
3251 break;
3252 default:
3253 _engineStatisticsPtr->SetLastError(
3254 VE_INVALID_ARGUMENT, kTraceError,
3255 "SetRxAgcStatus() invalid Agc mode");
3256 return -1;
3257 }
3258
3259 if (_rxAudioProcessingModulePtr->gain_control()->set_mode(agcMode) != 0)
3260 {
3261 _engineStatisticsPtr->SetLastError(
3262 VE_APM_ERROR, kTraceError,
3263 "SetRxAgcStatus() failed to set Agc mode");
3264 return -1;
3265 }
3266 if (_rxAudioProcessingModulePtr->gain_control()->Enable(enable) != 0)
3267 {
3268 _engineStatisticsPtr->SetLastError(
3269 VE_APM_ERROR, kTraceError,
3270 "SetRxAgcStatus() failed to set Agc state");
3271 return -1;
3272 }
3273
3274 _rxAgcIsEnabled = enable;
niklase@google.com470e71d2011-07-07 08:21:25 +00003275 _rxApmIsEnabled = ((_rxAgcIsEnabled == true) || (_rxNsIsEnabled == true));
3276
3277 return 0;
3278}
3279
3280int
3281Channel::GetRxAgcStatus(bool& enabled, AgcModes& mode)
3282{
3283 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3284 "Channel::GetRxAgcStatus(enable=?, mode=?)");
3285
3286 bool enable = _rxAudioProcessingModulePtr->gain_control()->is_enabled();
3287 GainControl::Mode agcMode =
3288 _rxAudioProcessingModulePtr->gain_control()->mode();
3289
3290 enabled = enable;
3291
3292 switch (agcMode)
3293 {
3294 case GainControl::kFixedDigital:
3295 mode = kAgcFixedDigital;
3296 break;
3297 case GainControl::kAdaptiveDigital:
3298 mode = kAgcAdaptiveDigital;
3299 break;
3300 default:
3301 _engineStatisticsPtr->SetLastError(
3302 VE_APM_ERROR, kTraceError,
3303 "GetRxAgcStatus() invalid Agc mode");
3304 return -1;
3305 }
3306
3307 return 0;
3308}
3309
3310int
pbos@webrtc.org92135212013-05-14 08:31:39 +00003311Channel::SetRxAgcConfig(AgcConfig config)
niklase@google.com470e71d2011-07-07 08:21:25 +00003312{
3313 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3314 "Channel::SetRxAgcConfig()");
3315
3316 if (_rxAudioProcessingModulePtr->gain_control()->set_target_level_dbfs(
3317 config.targetLeveldBOv) != 0)
3318 {
3319 _engineStatisticsPtr->SetLastError(
3320 VE_APM_ERROR, kTraceError,
3321 "SetRxAgcConfig() failed to set target peak |level|"
3322 "(or envelope) of the Agc");
3323 return -1;
3324 }
3325 if (_rxAudioProcessingModulePtr->gain_control()->set_compression_gain_db(
3326 config.digitalCompressionGaindB) != 0)
3327 {
3328 _engineStatisticsPtr->SetLastError(
3329 VE_APM_ERROR, kTraceError,
3330 "SetRxAgcConfig() failed to set the range in |gain| the"
3331 " digital compression stage may apply");
3332 return -1;
3333 }
3334 if (_rxAudioProcessingModulePtr->gain_control()->enable_limiter(
3335 config.limiterEnable) != 0)
3336 {
3337 _engineStatisticsPtr->SetLastError(
3338 VE_APM_ERROR, kTraceError,
3339 "SetRxAgcConfig() failed to set hard limiter to the signal");
3340 return -1;
3341 }
3342
3343 return 0;
3344}
3345
3346int
3347Channel::GetRxAgcConfig(AgcConfig& config)
3348{
3349 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3350 "Channel::GetRxAgcConfig(config=%?)");
3351
3352 config.targetLeveldBOv =
3353 _rxAudioProcessingModulePtr->gain_control()->target_level_dbfs();
3354 config.digitalCompressionGaindB =
3355 _rxAudioProcessingModulePtr->gain_control()->compression_gain_db();
3356 config.limiterEnable =
3357 _rxAudioProcessingModulePtr->gain_control()->is_limiter_enabled();
3358
3359 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3360 VoEId(_instanceId,_channelId), "GetRxAgcConfig() => "
3361 "targetLeveldBOv=%u, digitalCompressionGaindB=%u,"
3362 " limiterEnable=%d",
3363 config.targetLeveldBOv,
3364 config.digitalCompressionGaindB,
3365 config.limiterEnable);
3366
3367 return 0;
3368}
3369
3370#endif // #ifdef WEBRTC_VOICE_ENGINE_AGC
3371
3372#ifdef WEBRTC_VOICE_ENGINE_NR
3373
3374int
pbos@webrtc.org92135212013-05-14 08:31:39 +00003375Channel::SetRxNsStatus(bool enable, NsModes mode)
niklase@google.com470e71d2011-07-07 08:21:25 +00003376{
3377 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3378 "Channel::SetRxNsStatus(enable=%d, mode=%d)",
3379 (int)enable, (int)mode);
3380
3381 NoiseSuppression::Level nsLevel(
3382 (NoiseSuppression::Level)WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE);
3383 switch (mode)
3384 {
3385
3386 case kNsDefault:
3387 nsLevel = (NoiseSuppression::Level)
3388 WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE;
3389 break;
3390 case kNsUnchanged:
3391 nsLevel = _rxAudioProcessingModulePtr->noise_suppression()->level();
3392 break;
3393 case kNsConference:
3394 nsLevel = NoiseSuppression::kHigh;
3395 break;
3396 case kNsLowSuppression:
3397 nsLevel = NoiseSuppression::kLow;
3398 break;
3399 case kNsModerateSuppression:
3400 nsLevel = NoiseSuppression::kModerate;
3401 break;
3402 case kNsHighSuppression:
3403 nsLevel = NoiseSuppression::kHigh;
3404 break;
3405 case kNsVeryHighSuppression:
3406 nsLevel = NoiseSuppression::kVeryHigh;
3407 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00003408 }
3409
3410 if (_rxAudioProcessingModulePtr->noise_suppression()->set_level(nsLevel)
3411 != 0)
3412 {
3413 _engineStatisticsPtr->SetLastError(
3414 VE_APM_ERROR, kTraceError,
3415 "SetRxAgcStatus() failed to set Ns level");
3416 return -1;
3417 }
3418 if (_rxAudioProcessingModulePtr->noise_suppression()->Enable(enable) != 0)
3419 {
3420 _engineStatisticsPtr->SetLastError(
3421 VE_APM_ERROR, kTraceError,
3422 "SetRxAgcStatus() failed to set Agc state");
3423 return -1;
3424 }
3425
3426 _rxNsIsEnabled = enable;
3427 _rxApmIsEnabled = ((_rxAgcIsEnabled == true) || (_rxNsIsEnabled == true));
3428
3429 return 0;
3430}
3431
3432int
3433Channel::GetRxNsStatus(bool& enabled, NsModes& mode)
3434{
3435 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3436 "Channel::GetRxNsStatus(enable=?, mode=?)");
3437
3438 bool enable =
3439 _rxAudioProcessingModulePtr->noise_suppression()->is_enabled();
3440 NoiseSuppression::Level ncLevel =
3441 _rxAudioProcessingModulePtr->noise_suppression()->level();
3442
3443 enabled = enable;
3444
3445 switch (ncLevel)
3446 {
3447 case NoiseSuppression::kLow:
3448 mode = kNsLowSuppression;
3449 break;
3450 case NoiseSuppression::kModerate:
3451 mode = kNsModerateSuppression;
3452 break;
3453 case NoiseSuppression::kHigh:
3454 mode = kNsHighSuppression;
3455 break;
3456 case NoiseSuppression::kVeryHigh:
3457 mode = kNsVeryHighSuppression;
3458 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00003459 }
3460
3461 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3462 VoEId(_instanceId,_channelId),
3463 "GetRxNsStatus() => enabled=%d, mode=%d", enabled, mode);
3464 return 0;
3465}
3466
3467#endif // #ifdef WEBRTC_VOICE_ENGINE_NR
3468
3469int
3470Channel::RegisterRTPObserver(VoERTPObserver& observer)
3471{
3472 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3473 "Channel::RegisterRTPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003474 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003475
3476 if (_rtpObserverPtr)
3477 {
3478 _engineStatisticsPtr->SetLastError(
3479 VE_INVALID_OPERATION, kTraceError,
3480 "RegisterRTPObserver() observer already enabled");
3481 return -1;
3482 }
3483
3484 _rtpObserverPtr = &observer;
3485 _rtpObserver = true;
3486
3487 return 0;
3488}
3489
3490int
3491Channel::DeRegisterRTPObserver()
3492{
3493 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3494 "Channel::DeRegisterRTPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003495 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003496
3497 if (!_rtpObserverPtr)
3498 {
3499 _engineStatisticsPtr->SetLastError(
3500 VE_INVALID_OPERATION, kTraceWarning,
3501 "DeRegisterRTPObserver() observer already disabled");
3502 return 0;
3503 }
3504
3505 _rtpObserver = false;
3506 _rtpObserverPtr = NULL;
3507
3508 return 0;
3509}
3510
3511int
3512Channel::RegisterRTCPObserver(VoERTCPObserver& observer)
3513{
3514 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3515 "Channel::RegisterRTCPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003516 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003517
3518 if (_rtcpObserverPtr)
3519 {
3520 _engineStatisticsPtr->SetLastError(
3521 VE_INVALID_OPERATION, kTraceError,
3522 "RegisterRTCPObserver() observer already enabled");
3523 return -1;
3524 }
3525
3526 _rtcpObserverPtr = &observer;
3527 _rtcpObserver = true;
3528
3529 return 0;
3530}
3531
3532int
3533Channel::DeRegisterRTCPObserver()
3534{
3535 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3536 "Channel::DeRegisterRTCPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003537 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003538
3539 if (!_rtcpObserverPtr)
3540 {
3541 _engineStatisticsPtr->SetLastError(
3542 VE_INVALID_OPERATION, kTraceWarning,
3543 "DeRegisterRTCPObserver() observer already disabled");
3544 return 0;
3545 }
3546
3547 _rtcpObserver = false;
3548 _rtcpObserverPtr = NULL;
3549
3550 return 0;
3551}
3552
3553int
3554Channel::SetLocalSSRC(unsigned int ssrc)
3555{
3556 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3557 "Channel::SetLocalSSRC()");
3558 if (_sending)
3559 {
3560 _engineStatisticsPtr->SetLastError(
3561 VE_ALREADY_SENDING, kTraceError,
3562 "SetLocalSSRC() already sending");
3563 return -1;
3564 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003565 if (_rtpRtcpModule->SetSSRC(ssrc) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003566 {
3567 _engineStatisticsPtr->SetLastError(
3568 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3569 "SetLocalSSRC() failed to set SSRC");
3570 return -1;
3571 }
3572 return 0;
3573}
3574
3575int
3576Channel::GetLocalSSRC(unsigned int& ssrc)
3577{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003578 ssrc = _rtpRtcpModule->SSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00003579 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3580 VoEId(_instanceId,_channelId),
3581 "GetLocalSSRC() => ssrc=%lu", ssrc);
3582 return 0;
3583}
3584
3585int
3586Channel::GetRemoteSSRC(unsigned int& ssrc)
3587{
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00003588 ssrc = rtp_receiver_->SSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00003589 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3590 VoEId(_instanceId,_channelId),
3591 "GetRemoteSSRC() => ssrc=%lu", ssrc);
3592 return 0;
3593}
3594
3595int
3596Channel::GetRemoteCSRCs(unsigned int arrCSRC[15])
3597{
3598 if (arrCSRC == NULL)
3599 {
3600 _engineStatisticsPtr->SetLastError(
3601 VE_INVALID_ARGUMENT, kTraceError,
3602 "GetRemoteCSRCs() invalid array argument");
3603 return -1;
3604 }
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003605 uint32_t arrOfCSRC[kRtpCsrcSize];
3606 int32_t CSRCs(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003607 CSRCs = _rtpRtcpModule->CSRCs(arrOfCSRC);
niklase@google.com470e71d2011-07-07 08:21:25 +00003608 if (CSRCs > 0)
3609 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003610 memcpy(arrCSRC, arrOfCSRC, CSRCs * sizeof(uint32_t));
niklase@google.com470e71d2011-07-07 08:21:25 +00003611 for (int i = 0; i < (int) CSRCs; i++)
3612 {
3613 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3614 VoEId(_instanceId, _channelId),
3615 "GetRemoteCSRCs() => arrCSRC[%d]=%lu", i, arrCSRC[i]);
3616 }
3617 } else
3618 {
3619 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3620 VoEId(_instanceId, _channelId),
3621 "GetRemoteCSRCs() => list is empty!");
3622 }
3623 return CSRCs;
3624}
3625
3626int
3627Channel::SetRTPAudioLevelIndicationStatus(bool enable, unsigned char ID)
3628{
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00003629 if (_rtpAudioProc.get() == NULL)
3630 {
3631 _rtpAudioProc.reset(AudioProcessing::Create(VoEModuleId(_instanceId,
3632 _channelId)));
3633 if (_rtpAudioProc.get() == NULL)
3634 {
3635 _engineStatisticsPtr->SetLastError(VE_NO_MEMORY, kTraceCritical,
3636 "Failed to create AudioProcessing");
3637 return -1;
3638 }
3639 }
3640
3641 if (_rtpAudioProc->level_estimator()->Enable(enable) !=
3642 AudioProcessing::kNoError)
3643 {
3644 _engineStatisticsPtr->SetLastError(VE_APM_ERROR, kTraceWarning,
3645 "Failed to enable AudioProcessing::level_estimator()");
3646 }
3647
niklase@google.com470e71d2011-07-07 08:21:25 +00003648 _includeAudioLevelIndication = enable;
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00003649 if (enable) {
3650 rtp_header_parser_->RegisterRtpHeaderExtension(kRtpExtensionAudioLevel,
3651 ID);
3652 } else {
3653 rtp_header_parser_->DeregisterRtpHeaderExtension(kRtpExtensionAudioLevel);
3654 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003655 return _rtpRtcpModule->SetRTPAudioLevelIndicationStatus(enable, ID);
niklase@google.com470e71d2011-07-07 08:21:25 +00003656}
3657int
3658Channel::GetRTPAudioLevelIndicationStatus(bool& enabled, unsigned char& ID)
3659{
3660 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3661 VoEId(_instanceId,_channelId),
3662 "GetRTPAudioLevelIndicationStatus() => enabled=%d, ID=%u",
3663 enabled, ID);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003664 return _rtpRtcpModule->GetRTPAudioLevelIndicationStatus(enabled, ID);
niklase@google.com470e71d2011-07-07 08:21:25 +00003665}
3666
3667int
3668Channel::SetRTCPStatus(bool enable)
3669{
3670 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3671 "Channel::SetRTCPStatus()");
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003672 if (_rtpRtcpModule->SetRTCPStatus(enable ?
niklase@google.com470e71d2011-07-07 08:21:25 +00003673 kRtcpCompound : kRtcpOff) != 0)
3674 {
3675 _engineStatisticsPtr->SetLastError(
3676 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3677 "SetRTCPStatus() failed to set RTCP status");
3678 return -1;
3679 }
3680 return 0;
3681}
3682
3683int
3684Channel::GetRTCPStatus(bool& enabled)
3685{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003686 RTCPMethod method = _rtpRtcpModule->RTCP();
niklase@google.com470e71d2011-07-07 08:21:25 +00003687 enabled = (method != kRtcpOff);
3688 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3689 VoEId(_instanceId,_channelId),
3690 "GetRTCPStatus() => enabled=%d", enabled);
3691 return 0;
3692}
3693
3694int
3695Channel::SetRTCP_CNAME(const char cName[256])
3696{
3697 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3698 "Channel::SetRTCP_CNAME()");
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003699 if (_rtpRtcpModule->SetCNAME(cName) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003700 {
3701 _engineStatisticsPtr->SetLastError(
3702 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3703 "SetRTCP_CNAME() failed to set RTCP CNAME");
3704 return -1;
3705 }
3706 return 0;
3707}
3708
3709int
3710Channel::GetRTCP_CNAME(char cName[256])
3711{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003712 if (_rtpRtcpModule->CNAME(cName) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003713 {
3714 _engineStatisticsPtr->SetLastError(
3715 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3716 "GetRTCP_CNAME() failed to retrieve RTCP CNAME");
3717 return -1;
3718 }
3719 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3720 VoEId(_instanceId, _channelId),
3721 "GetRTCP_CNAME() => cName=%s", cName);
3722 return 0;
3723}
3724
3725int
3726Channel::GetRemoteRTCP_CNAME(char cName[256])
3727{
3728 if (cName == NULL)
3729 {
3730 _engineStatisticsPtr->SetLastError(
3731 VE_INVALID_ARGUMENT, kTraceError,
3732 "GetRemoteRTCP_CNAME() invalid CNAME input buffer");
3733 return -1;
3734 }
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00003735 char cname[RTCP_CNAME_SIZE];
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00003736 const uint32_t remoteSSRC = rtp_receiver_->SSRC();
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003737 if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003738 {
3739 _engineStatisticsPtr->SetLastError(
3740 VE_CANNOT_RETRIEVE_CNAME, kTraceError,
3741 "GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME");
3742 return -1;
3743 }
3744 strcpy(cName, cname);
3745 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3746 VoEId(_instanceId, _channelId),
3747 "GetRemoteRTCP_CNAME() => cName=%s", cName);
3748 return 0;
3749}
3750
3751int
3752Channel::GetRemoteRTCPData(
3753 unsigned int& NTPHigh,
3754 unsigned int& NTPLow,
3755 unsigned int& timestamp,
3756 unsigned int& playoutTimestamp,
3757 unsigned int* jitter,
3758 unsigned short* fractionLost)
3759{
3760 // --- Information from sender info in received Sender Reports
3761
3762 RTCPSenderInfo senderInfo;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003763 if (_rtpRtcpModule->RemoteRTCPStat(&senderInfo) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003764 {
3765 _engineStatisticsPtr->SetLastError(
3766 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00003767 "GetRemoteRTCPData() failed to retrieve sender info for remote "
niklase@google.com470e71d2011-07-07 08:21:25 +00003768 "side");
3769 return -1;
3770 }
3771
3772 // We only utilize 12 out of 20 bytes in the sender info (ignores packet
3773 // and octet count)
3774 NTPHigh = senderInfo.NTPseconds;
3775 NTPLow = senderInfo.NTPfraction;
3776 timestamp = senderInfo.RTPtimeStamp;
3777
3778 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3779 VoEId(_instanceId, _channelId),
3780 "GetRemoteRTCPData() => NTPHigh=%lu, NTPLow=%lu, "
3781 "timestamp=%lu",
3782 NTPHigh, NTPLow, timestamp);
3783
3784 // --- Locally derived information
3785
3786 // This value is updated on each incoming RTCP packet (0 when no packet
3787 // has been received)
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003788 playoutTimestamp = playout_timestamp_rtcp_;
niklase@google.com470e71d2011-07-07 08:21:25 +00003789
3790 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3791 VoEId(_instanceId, _channelId),
3792 "GetRemoteRTCPData() => playoutTimestamp=%lu",
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003793 playout_timestamp_rtcp_);
niklase@google.com470e71d2011-07-07 08:21:25 +00003794
3795 if (NULL != jitter || NULL != fractionLost)
3796 {
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003797 // Get all RTCP receiver report blocks that have been received on this
3798 // channel. If we receive RTP packets from a remote source we know the
3799 // remote SSRC and use the report block from him.
3800 // Otherwise use the first report block.
3801 std::vector<RTCPReportBlock> remote_stats;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003802 if (_rtpRtcpModule->RemoteRTCPStat(&remote_stats) != 0 ||
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003803 remote_stats.empty()) {
3804 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3805 VoEId(_instanceId, _channelId),
3806 "GetRemoteRTCPData() failed to measure statistics due"
3807 " to lack of received RTP and/or RTCP packets");
3808 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003809 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003810
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00003811 uint32_t remoteSSRC = rtp_receiver_->SSRC();
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003812 std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
3813 for (; it != remote_stats.end(); ++it) {
3814 if (it->remoteSSRC == remoteSSRC)
3815 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00003816 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003817
3818 if (it == remote_stats.end()) {
3819 // If we have not received any RTCP packets from this SSRC it probably
3820 // means that we have not received any RTP packets.
3821 // Use the first received report block instead.
3822 it = remote_stats.begin();
3823 remoteSSRC = it->remoteSSRC;
niklase@google.com470e71d2011-07-07 08:21:25 +00003824 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003825
xians@webrtc.org79af7342012-01-31 12:22:14 +00003826 if (jitter) {
3827 *jitter = it->jitter;
3828 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3829 VoEId(_instanceId, _channelId),
3830 "GetRemoteRTCPData() => jitter = %lu", *jitter);
3831 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003832
xians@webrtc.org79af7342012-01-31 12:22:14 +00003833 if (fractionLost) {
3834 *fractionLost = it->fractionLost;
3835 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3836 VoEId(_instanceId, _channelId),
3837 "GetRemoteRTCPData() => fractionLost = %lu",
3838 *fractionLost);
3839 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003840 }
3841 return 0;
3842}
3843
3844int
pbos@webrtc.org92135212013-05-14 08:31:39 +00003845Channel::SendApplicationDefinedRTCPPacket(unsigned char subType,
niklase@google.com470e71d2011-07-07 08:21:25 +00003846 unsigned int name,
3847 const char* data,
3848 unsigned short dataLengthInBytes)
3849{
3850 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3851 "Channel::SendApplicationDefinedRTCPPacket()");
3852 if (!_sending)
3853 {
3854 _engineStatisticsPtr->SetLastError(
3855 VE_NOT_SENDING, kTraceError,
3856 "SendApplicationDefinedRTCPPacket() not sending");
3857 return -1;
3858 }
3859 if (NULL == data)
3860 {
3861 _engineStatisticsPtr->SetLastError(
3862 VE_INVALID_ARGUMENT, kTraceError,
3863 "SendApplicationDefinedRTCPPacket() invalid data value");
3864 return -1;
3865 }
3866 if (dataLengthInBytes % 4 != 0)
3867 {
3868 _engineStatisticsPtr->SetLastError(
3869 VE_INVALID_ARGUMENT, kTraceError,
3870 "SendApplicationDefinedRTCPPacket() invalid length value");
3871 return -1;
3872 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003873 RTCPMethod status = _rtpRtcpModule->RTCP();
niklase@google.com470e71d2011-07-07 08:21:25 +00003874 if (status == kRtcpOff)
3875 {
3876 _engineStatisticsPtr->SetLastError(
3877 VE_RTCP_ERROR, kTraceError,
3878 "SendApplicationDefinedRTCPPacket() RTCP is disabled");
3879 return -1;
3880 }
3881
3882 // Create and schedule the RTCP APP packet for transmission
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003883 if (_rtpRtcpModule->SetRTCPApplicationSpecificData(
niklase@google.com470e71d2011-07-07 08:21:25 +00003884 subType,
3885 name,
3886 (const unsigned char*) data,
3887 dataLengthInBytes) != 0)
3888 {
3889 _engineStatisticsPtr->SetLastError(
3890 VE_SEND_ERROR, kTraceError,
3891 "SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
3892 return -1;
3893 }
3894 return 0;
3895}
3896
3897int
3898Channel::GetRTPStatistics(
3899 unsigned int& averageJitterMs,
3900 unsigned int& maxJitterMs,
3901 unsigned int& discardedPackets)
3902{
niklase@google.com470e71d2011-07-07 08:21:25 +00003903 // The jitter statistics is updated for each received RTP packet and is
3904 // based on received packets.
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00003905 ReceiveStatistics::RtpReceiveStatistics statistics;
3906 if (!rtp_receive_statistics_->Statistics(
3907 &statistics, _rtpRtcpModule->RTCP() == kRtcpOff)) {
3908 _engineStatisticsPtr->SetLastError(
3909 VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
3910 "GetRTPStatistics() failed to read RTP statistics from the "
3911 "RTP/RTCP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00003912 }
3913
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003914 const int32_t playoutFrequency =
niklase@google.com470e71d2011-07-07 08:21:25 +00003915 _audioCodingModule.PlayoutFrequency();
3916 if (playoutFrequency > 0)
3917 {
3918 // Scale RTP statistics given the current playout frequency
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00003919 maxJitterMs = statistics.max_jitter / (playoutFrequency / 1000);
3920 averageJitterMs = statistics.jitter / (playoutFrequency / 1000);
niklase@google.com470e71d2011-07-07 08:21:25 +00003921 }
3922
3923 discardedPackets = _numberOfDiscardedPackets;
3924
3925 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3926 VoEId(_instanceId, _channelId),
3927 "GetRTPStatistics() => averageJitterMs = %lu, maxJitterMs = %lu,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00003928 " discardedPackets = %lu)",
niklase@google.com470e71d2011-07-07 08:21:25 +00003929 averageJitterMs, maxJitterMs, discardedPackets);
3930 return 0;
3931}
3932
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00003933int Channel::GetRemoteRTCPSenderInfo(SenderInfo* sender_info) {
3934 if (sender_info == NULL) {
3935 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
3936 "GetRemoteRTCPSenderInfo() invalid sender_info.");
3937 return -1;
3938 }
3939
3940 // Get the sender info from the latest received RTCP Sender Report.
3941 RTCPSenderInfo rtcp_sender_info;
3942 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_sender_info) != 0) {
3943 _engineStatisticsPtr->SetLastError(VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3944 "GetRemoteRTCPSenderInfo() failed to read RTCP SR sender info.");
3945 return -1;
3946 }
3947
3948 sender_info->NTP_timestamp_high = rtcp_sender_info.NTPseconds;
3949 sender_info->NTP_timestamp_low = rtcp_sender_info.NTPfraction;
3950 sender_info->RTP_timestamp = rtcp_sender_info.RTPtimeStamp;
3951 sender_info->sender_packet_count = rtcp_sender_info.sendPacketCount;
3952 sender_info->sender_octet_count = rtcp_sender_info.sendOctetCount;
3953 return 0;
3954}
3955
3956int Channel::GetRemoteRTCPReportBlocks(
3957 std::vector<ReportBlock>* report_blocks) {
3958 if (report_blocks == NULL) {
3959 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
3960 "GetRemoteRTCPReportBlock()s invalid report_blocks.");
3961 return -1;
3962 }
3963
3964 // Get the report blocks from the latest received RTCP Sender or Receiver
3965 // Report. Each element in the vector contains the sender's SSRC and a
3966 // report block according to RFC 3550.
3967 std::vector<RTCPReportBlock> rtcp_report_blocks;
3968 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks) != 0) {
3969 _engineStatisticsPtr->SetLastError(VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3970 "GetRemoteRTCPReportBlocks() failed to read RTCP SR/RR report block.");
3971 return -1;
3972 }
3973
3974 if (rtcp_report_blocks.empty())
3975 return 0;
3976
3977 std::vector<RTCPReportBlock>::const_iterator it = rtcp_report_blocks.begin();
3978 for (; it != rtcp_report_blocks.end(); ++it) {
3979 ReportBlock report_block;
3980 report_block.sender_SSRC = it->remoteSSRC;
3981 report_block.source_SSRC = it->sourceSSRC;
3982 report_block.fraction_lost = it->fractionLost;
3983 report_block.cumulative_num_packets_lost = it->cumulativeLost;
3984 report_block.extended_highest_sequence_number = it->extendedHighSeqNum;
3985 report_block.interarrival_jitter = it->jitter;
3986 report_block.last_SR_timestamp = it->lastSR;
3987 report_block.delay_since_last_SR = it->delaySinceLastSR;
3988 report_blocks->push_back(report_block);
3989 }
3990 return 0;
3991}
3992
niklase@google.com470e71d2011-07-07 08:21:25 +00003993int
3994Channel::GetRTPStatistics(CallStatistics& stats)
3995{
niklase@google.com470e71d2011-07-07 08:21:25 +00003996 // --- Part one of the final structure (four values)
3997
3998 // The jitter statistics is updated for each received RTP packet and is
3999 // based on received packets.
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00004000 ReceiveStatistics::RtpReceiveStatistics statistics;
4001 if (!rtp_receive_statistics_->Statistics(
4002 &statistics, _rtpRtcpModule->RTCP() == kRtcpOff)) {
4003 _engineStatisticsPtr->SetLastError(
4004 VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
4005 "GetRTPStatistics() failed to read RTP statistics from the "
4006 "RTP/RTCP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00004007 }
4008
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00004009 stats.fractionLost = statistics.fraction_lost;
4010 stats.cumulativeLost = statistics.cumulative_lost;
4011 stats.extendedMax = statistics.extended_max_sequence_number;
4012 stats.jitterSamples = statistics.jitter;
niklase@google.com470e71d2011-07-07 08:21:25 +00004013
4014 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4015 VoEId(_instanceId, _channelId),
4016 "GetRTPStatistics() => fractionLost=%lu, cumulativeLost=%lu,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004017 " extendedMax=%lu, jitterSamples=%li)",
niklase@google.com470e71d2011-07-07 08:21:25 +00004018 stats.fractionLost, stats.cumulativeLost, stats.extendedMax,
4019 stats.jitterSamples);
4020
4021 // --- Part two of the final structure (one value)
4022
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004023 uint16_t RTT(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004024 RTCPMethod method = _rtpRtcpModule->RTCP();
niklase@google.com470e71d2011-07-07 08:21:25 +00004025 if (method == kRtcpOff)
4026 {
4027 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4028 VoEId(_instanceId, _channelId),
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004029 "GetRTPStatistics() RTCP is disabled => valid RTT "
niklase@google.com470e71d2011-07-07 08:21:25 +00004030 "measurements cannot be retrieved");
4031 } else
4032 {
4033 // The remote SSRC will be zero if no RTP packet has been received.
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00004034 uint32_t remoteSSRC = rtp_receiver_->SSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00004035 if (remoteSSRC > 0)
4036 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004037 uint16_t avgRTT(0);
4038 uint16_t maxRTT(0);
4039 uint16_t minRTT(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004040
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004041 if (_rtpRtcpModule->RTT(remoteSSRC, &RTT, &avgRTT, &minRTT, &maxRTT)
niklase@google.com470e71d2011-07-07 08:21:25 +00004042 != 0)
4043 {
4044 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4045 VoEId(_instanceId, _channelId),
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004046 "GetRTPStatistics() failed to retrieve RTT from "
niklase@google.com470e71d2011-07-07 08:21:25 +00004047 "the RTP/RTCP module");
4048 }
4049 } else
4050 {
4051 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4052 VoEId(_instanceId, _channelId),
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004053 "GetRTPStatistics() failed to measure RTT since no "
niklase@google.com470e71d2011-07-07 08:21:25 +00004054 "RTP packets have been received yet");
4055 }
4056 }
4057
4058 stats.rttMs = static_cast<int> (RTT);
4059
4060 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4061 VoEId(_instanceId, _channelId),
4062 "GetRTPStatistics() => rttMs=%d", stats.rttMs);
4063
4064 // --- Part three of the final structure (four values)
4065
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004066 uint32_t bytesSent(0);
4067 uint32_t packetsSent(0);
4068 uint32_t bytesReceived(0);
4069 uint32_t packetsReceived(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004070
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00004071 rtp_receive_statistics_->GetDataCounters(&bytesReceived, &packetsReceived);
4072
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004073 if (_rtpRtcpModule->DataCountersRTP(&bytesSent,
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00004074 &packetsSent) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004075 {
4076 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4077 VoEId(_instanceId, _channelId),
4078 "GetRTPStatistics() failed to retrieve RTP datacounters =>"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004079 " output will not be complete");
niklase@google.com470e71d2011-07-07 08:21:25 +00004080 }
4081
4082 stats.bytesSent = bytesSent;
4083 stats.packetsSent = packetsSent;
4084 stats.bytesReceived = bytesReceived;
4085 stats.packetsReceived = packetsReceived;
4086
4087 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4088 VoEId(_instanceId, _channelId),
4089 "GetRTPStatistics() => bytesSent=%d, packetsSent=%d,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004090 " bytesReceived=%d, packetsReceived=%d)",
niklase@google.com470e71d2011-07-07 08:21:25 +00004091 stats.bytesSent, stats.packetsSent, stats.bytesReceived,
4092 stats.packetsReceived);
4093
4094 return 0;
4095}
4096
turaj@webrtc.org42259e72012-12-11 02:15:12 +00004097int Channel::SetFECStatus(bool enable, int redPayloadtype) {
4098 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4099 "Channel::SetFECStatus()");
niklase@google.com470e71d2011-07-07 08:21:25 +00004100
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00004101 if (enable) {
4102 if (redPayloadtype < 0 || redPayloadtype > 127) {
4103 _engineStatisticsPtr->SetLastError(
4104 VE_PLTYPE_ERROR, kTraceError,
4105 "SetFECStatus() invalid RED payload type");
4106 return -1;
4107 }
4108
4109 if (SetRedPayloadType(redPayloadtype) < 0) {
4110 _engineStatisticsPtr->SetLastError(
4111 VE_CODEC_ERROR, kTraceError,
4112 "SetSecondarySendCodec() Failed to register RED ACM");
4113 return -1;
4114 }
turaj@webrtc.org42259e72012-12-11 02:15:12 +00004115 }
niklase@google.com470e71d2011-07-07 08:21:25 +00004116
turaj@webrtc.org42259e72012-12-11 02:15:12 +00004117 if (_audioCodingModule.SetFECStatus(enable) != 0) {
4118 _engineStatisticsPtr->SetLastError(
4119 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4120 "SetFECStatus() failed to set FEC state in the ACM");
4121 return -1;
4122 }
4123 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00004124}
4125
4126int
4127Channel::GetFECStatus(bool& enabled, int& redPayloadtype)
4128{
4129 enabled = _audioCodingModule.FECStatus();
4130 if (enabled)
4131 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004132 int8_t payloadType(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004133 if (_rtpRtcpModule->SendREDPayloadType(payloadType) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004134 {
4135 _engineStatisticsPtr->SetLastError(
4136 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4137 "GetFECStatus() failed to retrieve RED PT from RTP/RTCP "
4138 "module");
4139 return -1;
4140 }
4141 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4142 VoEId(_instanceId, _channelId),
4143 "GetFECStatus() => enabled=%d, redPayloadtype=%d",
4144 enabled, redPayloadtype);
4145 return 0;
4146 }
4147 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4148 VoEId(_instanceId, _channelId),
4149 "GetFECStatus() => enabled=%d", enabled);
4150 return 0;
4151}
4152
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00004153void Channel::SetNACKStatus(bool enable, int maxNumberOfPackets) {
4154 // None of these functions can fail.
4155 _rtpRtcpModule->SetStorePacketsStatus(enable, maxNumberOfPackets);
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00004156 rtp_receiver_->SetNACKStatus(enable ? kNackRtcp : kNackOff,
4157 maxNumberOfPackets);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00004158 if (enable)
4159 _audioCodingModule.EnableNack(maxNumberOfPackets);
4160 else
4161 _audioCodingModule.DisableNack();
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00004162}
4163
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00004164// Called when we are missing one or more packets.
4165int Channel::ResendPackets(const uint16_t* sequence_numbers, int length) {
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00004166 return _rtpRtcpModule->SendNACK(sequence_numbers, length);
4167}
4168
niklase@google.com470e71d2011-07-07 08:21:25 +00004169int
niklase@google.com470e71d2011-07-07 08:21:25 +00004170Channel::StartRTPDump(const char fileNameUTF8[1024],
4171 RTPDirections direction)
4172{
4173 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4174 "Channel::StartRTPDump()");
4175 if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
4176 {
4177 _engineStatisticsPtr->SetLastError(
4178 VE_INVALID_ARGUMENT, kTraceError,
4179 "StartRTPDump() invalid RTP direction");
4180 return -1;
4181 }
4182 RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4183 &_rtpDumpIn : &_rtpDumpOut;
4184 if (rtpDumpPtr == NULL)
4185 {
4186 assert(false);
4187 return -1;
4188 }
4189 if (rtpDumpPtr->IsActive())
4190 {
4191 rtpDumpPtr->Stop();
4192 }
4193 if (rtpDumpPtr->Start(fileNameUTF8) != 0)
4194 {
4195 _engineStatisticsPtr->SetLastError(
4196 VE_BAD_FILE, kTraceError,
4197 "StartRTPDump() failed to create file");
4198 return -1;
4199 }
4200 return 0;
4201}
4202
4203int
4204Channel::StopRTPDump(RTPDirections direction)
4205{
4206 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4207 "Channel::StopRTPDump()");
4208 if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
4209 {
4210 _engineStatisticsPtr->SetLastError(
4211 VE_INVALID_ARGUMENT, kTraceError,
4212 "StopRTPDump() invalid RTP direction");
4213 return -1;
4214 }
4215 RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4216 &_rtpDumpIn : &_rtpDumpOut;
4217 if (rtpDumpPtr == NULL)
4218 {
4219 assert(false);
4220 return -1;
4221 }
4222 if (!rtpDumpPtr->IsActive())
4223 {
4224 return 0;
4225 }
4226 return rtpDumpPtr->Stop();
4227}
4228
4229bool
4230Channel::RTPDumpIsActive(RTPDirections direction)
4231{
4232 if ((direction != kRtpIncoming) &&
4233 (direction != kRtpOutgoing))
4234 {
4235 _engineStatisticsPtr->SetLastError(
4236 VE_INVALID_ARGUMENT, kTraceError,
4237 "RTPDumpIsActive() invalid RTP direction");
4238 return false;
4239 }
4240 RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4241 &_rtpDumpIn : &_rtpDumpOut;
4242 return rtpDumpPtr->IsActive();
4243}
4244
4245int
4246Channel::InsertExtraRTPPacket(unsigned char payloadType,
4247 bool markerBit,
4248 const char* payloadData,
4249 unsigned short payloadSize)
4250{
4251 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4252 "Channel::InsertExtraRTPPacket()");
4253 if (payloadType > 127)
4254 {
4255 _engineStatisticsPtr->SetLastError(
4256 VE_INVALID_PLTYPE, kTraceError,
4257 "InsertExtraRTPPacket() invalid payload type");
4258 return -1;
4259 }
4260 if (payloadData == NULL)
4261 {
4262 _engineStatisticsPtr->SetLastError(
4263 VE_INVALID_ARGUMENT, kTraceError,
4264 "InsertExtraRTPPacket() invalid payload data");
4265 return -1;
4266 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004267 if (payloadSize > _rtpRtcpModule->MaxDataPayloadLength())
niklase@google.com470e71d2011-07-07 08:21:25 +00004268 {
4269 _engineStatisticsPtr->SetLastError(
4270 VE_INVALID_ARGUMENT, kTraceError,
4271 "InsertExtraRTPPacket() invalid payload size");
4272 return -1;
4273 }
4274 if (!_sending)
4275 {
4276 _engineStatisticsPtr->SetLastError(
4277 VE_NOT_SENDING, kTraceError,
4278 "InsertExtraRTPPacket() not sending");
4279 return -1;
4280 }
4281
4282 // Create extra RTP packet by calling RtpRtcp::SendOutgoingData().
4283 // Transport::SendPacket() will be called by the module when the RTP packet
4284 // is created.
4285 // The call to SendOutgoingData() does *not* modify the timestamp and
4286 // payloadtype to ensure that the RTP module generates a valid RTP packet
4287 // (user might utilize a non-registered payload type).
4288 // The marker bit and payload type will be replaced just before the actual
4289 // transmission, i.e., the actual modification is done *after* the RTP
4290 // module has delivered its RTP packet back to the VoE.
4291 // We will use the stored values above when the packet is modified
4292 // (see Channel::SendPacket()).
4293
4294 _extraPayloadType = payloadType;
4295 _extraMarkerBit = markerBit;
4296 _insertExtraRTPPacket = true;
4297
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004298 if (_rtpRtcpModule->SendOutgoingData(kAudioFrameSpeech,
niklase@google.com470e71d2011-07-07 08:21:25 +00004299 _lastPayloadType,
4300 _lastLocalTimeStamp,
stefan@webrtc.orgddfdfed2012-07-03 13:21:22 +00004301 // Leaving the time when this frame was
4302 // received from the capture device as
4303 // undefined for voice for now.
4304 -1,
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004305 (const uint8_t*) payloadData,
niklase@google.com470e71d2011-07-07 08:21:25 +00004306 payloadSize) != 0)
4307 {
4308 _engineStatisticsPtr->SetLastError(
4309 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4310 "InsertExtraRTPPacket() failed to send extra RTP packet");
4311 return -1;
4312 }
4313
4314 return 0;
4315}
4316
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004317uint32_t
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004318Channel::Demultiplex(const AudioFrame& audioFrame)
niklase@google.com470e71d2011-07-07 08:21:25 +00004319{
4320 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004321 "Channel::Demultiplex()");
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00004322 _audioFrame.CopyFrom(audioFrame);
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004323 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00004324 return 0;
4325}
4326
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004327uint32_t
xians@google.com0b0665a2011-08-08 08:18:44 +00004328Channel::PrepareEncodeAndSend(int mixingFrequency)
niklase@google.com470e71d2011-07-07 08:21:25 +00004329{
4330 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4331 "Channel::PrepareEncodeAndSend()");
4332
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004333 if (_audioFrame.samples_per_channel_ == 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004334 {
4335 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4336 "Channel::PrepareEncodeAndSend() invalid audio frame");
4337 return -1;
4338 }
4339
4340 if (_inputFilePlaying)
4341 {
4342 MixOrReplaceAudioWithFile(mixingFrequency);
4343 }
4344
4345 if (_mute)
4346 {
4347 AudioFrameOperations::Mute(_audioFrame);
4348 }
4349
4350 if (_inputExternalMedia)
4351 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004352 CriticalSectionScoped cs(&_callbackCritSect);
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004353 const bool isStereo = (_audioFrame.num_channels_ == 2);
niklase@google.com470e71d2011-07-07 08:21:25 +00004354 if (_inputExternalMediaCallbackPtr)
4355 {
4356 _inputExternalMediaCallbackPtr->Process(
4357 _channelId,
4358 kRecordingPerChannel,
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004359 (int16_t*)_audioFrame.data_,
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004360 _audioFrame.samples_per_channel_,
4361 _audioFrame.sample_rate_hz_,
niklase@google.com470e71d2011-07-07 08:21:25 +00004362 isStereo);
4363 }
4364 }
4365
4366 InsertInbandDtmfTone();
4367
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004368 if (_includeAudioLevelIndication)
4369 {
4370 assert(_rtpAudioProc.get() != NULL);
4371
4372 // Check if settings need to be updated.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004373 if (_rtpAudioProc->sample_rate_hz() != _audioFrame.sample_rate_hz_)
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004374 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004375 if (_rtpAudioProc->set_sample_rate_hz(_audioFrame.sample_rate_hz_) !=
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004376 AudioProcessing::kNoError)
4377 {
4378 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4379 VoEId(_instanceId, _channelId),
4380 "Error setting AudioProcessing sample rate");
4381 return -1;
4382 }
4383 }
4384
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004385 if (_rtpAudioProc->num_input_channels() != _audioFrame.num_channels_)
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004386 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004387 if (_rtpAudioProc->set_num_channels(_audioFrame.num_channels_,
4388 _audioFrame.num_channels_)
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004389 != AudioProcessing::kNoError)
4390 {
4391 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4392 VoEId(_instanceId, _channelId),
4393 "Error setting AudioProcessing channels");
4394 return -1;
4395 }
4396 }
4397
4398 // Performs level analysis only; does not affect the signal.
4399 _rtpAudioProc->ProcessStream(&_audioFrame);
4400 }
4401
niklase@google.com470e71d2011-07-07 08:21:25 +00004402 return 0;
4403}
4404
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004405uint32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00004406Channel::EncodeAndSend()
4407{
4408 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4409 "Channel::EncodeAndSend()");
4410
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004411 assert(_audioFrame.num_channels_ <= 2);
4412 if (_audioFrame.samples_per_channel_ == 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004413 {
4414 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4415 "Channel::EncodeAndSend() invalid audio frame");
4416 return -1;
4417 }
4418
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004419 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00004420
4421 // --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz.
4422
4423 // The ACM resamples internally.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004424 _audioFrame.timestamp_ = _timeStamp;
niklase@google.com470e71d2011-07-07 08:21:25 +00004425 if (_audioCodingModule.Add10MsData((AudioFrame&)_audioFrame) != 0)
4426 {
4427 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
4428 "Channel::EncodeAndSend() ACM encoding failed");
4429 return -1;
4430 }
4431
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004432 _timeStamp += _audioFrame.samples_per_channel_;
niklase@google.com470e71d2011-07-07 08:21:25 +00004433
4434 // --- Encode if complete frame is ready
4435
4436 // This call will trigger AudioPacketizationCallback::SendData if encoding
4437 // is done and payload is ready for packetization and transmission.
4438 return _audioCodingModule.Process();
4439}
4440
4441int Channel::RegisterExternalMediaProcessing(
4442 ProcessingTypes type,
4443 VoEMediaProcess& processObject)
4444{
4445 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4446 "Channel::RegisterExternalMediaProcessing()");
4447
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004448 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004449
4450 if (kPlaybackPerChannel == type)
4451 {
4452 if (_outputExternalMediaCallbackPtr)
4453 {
4454 _engineStatisticsPtr->SetLastError(
4455 VE_INVALID_OPERATION, kTraceError,
4456 "Channel::RegisterExternalMediaProcessing() "
4457 "output external media already enabled");
4458 return -1;
4459 }
4460 _outputExternalMediaCallbackPtr = &processObject;
4461 _outputExternalMedia = true;
4462 }
4463 else if (kRecordingPerChannel == type)
4464 {
4465 if (_inputExternalMediaCallbackPtr)
4466 {
4467 _engineStatisticsPtr->SetLastError(
4468 VE_INVALID_OPERATION, kTraceError,
4469 "Channel::RegisterExternalMediaProcessing() "
4470 "output external media already enabled");
4471 return -1;
4472 }
4473 _inputExternalMediaCallbackPtr = &processObject;
4474 _inputExternalMedia = true;
4475 }
4476 return 0;
4477}
4478
4479int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type)
4480{
4481 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4482 "Channel::DeRegisterExternalMediaProcessing()");
4483
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004484 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004485
4486 if (kPlaybackPerChannel == type)
4487 {
4488 if (!_outputExternalMediaCallbackPtr)
4489 {
4490 _engineStatisticsPtr->SetLastError(
4491 VE_INVALID_OPERATION, kTraceWarning,
4492 "Channel::DeRegisterExternalMediaProcessing() "
4493 "output external media already disabled");
4494 return 0;
4495 }
4496 _outputExternalMedia = false;
4497 _outputExternalMediaCallbackPtr = NULL;
4498 }
4499 else if (kRecordingPerChannel == type)
4500 {
4501 if (!_inputExternalMediaCallbackPtr)
4502 {
4503 _engineStatisticsPtr->SetLastError(
4504 VE_INVALID_OPERATION, kTraceWarning,
4505 "Channel::DeRegisterExternalMediaProcessing() "
4506 "input external media already disabled");
4507 return 0;
4508 }
4509 _inputExternalMedia = false;
4510 _inputExternalMediaCallbackPtr = NULL;
4511 }
4512
4513 return 0;
4514}
4515
roosa@google.com1b60ceb2012-12-12 23:00:29 +00004516int Channel::SetExternalMixing(bool enabled) {
4517 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4518 "Channel::SetExternalMixing(enabled=%d)", enabled);
4519
4520 if (_playing)
4521 {
4522 _engineStatisticsPtr->SetLastError(
4523 VE_INVALID_OPERATION, kTraceError,
4524 "Channel::SetExternalMixing() "
4525 "external mixing cannot be changed while playing.");
4526 return -1;
4527 }
4528
4529 _externalMixing = enabled;
4530
4531 return 0;
4532}
4533
niklase@google.com470e71d2011-07-07 08:21:25 +00004534int
4535Channel::ResetRTCPStatistics()
4536{
4537 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4538 "Channel::ResetRTCPStatistics()");
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004539 uint32_t remoteSSRC(0);
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00004540 remoteSSRC = rtp_receiver_->SSRC();
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004541 return _rtpRtcpModule->ResetRTT(remoteSSRC);
niklase@google.com470e71d2011-07-07 08:21:25 +00004542}
4543
4544int
4545Channel::GetRoundTripTimeSummary(StatVal& delaysMs) const
4546{
4547 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4548 "Channel::GetRoundTripTimeSummary()");
4549 // Override default module outputs for the case when RTCP is disabled.
4550 // This is done to ensure that we are backward compatible with the
4551 // VoiceEngine where we did not use RTP/RTCP module.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004552 if (!_rtpRtcpModule->RTCP())
niklase@google.com470e71d2011-07-07 08:21:25 +00004553 {
4554 delaysMs.min = -1;
4555 delaysMs.max = -1;
4556 delaysMs.average = -1;
4557 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4558 "Channel::GetRoundTripTimeSummary() RTCP is disabled =>"
4559 " valid RTT measurements cannot be retrieved");
4560 return 0;
4561 }
4562
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004563 uint32_t remoteSSRC;
4564 uint16_t RTT;
4565 uint16_t avgRTT;
4566 uint16_t maxRTT;
4567 uint16_t minRTT;
niklase@google.com470e71d2011-07-07 08:21:25 +00004568 // The remote SSRC will be zero if no RTP packet has been received.
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00004569 remoteSSRC = rtp_receiver_->SSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00004570 if (remoteSSRC == 0)
4571 {
4572 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4573 "Channel::GetRoundTripTimeSummary() unable to measure RTT"
4574 " since no RTP packet has been received yet");
4575 }
4576
4577 // Retrieve RTT statistics from the RTP/RTCP module for the specified
4578 // channel and SSRC. The SSRC is required to parse out the correct source
4579 // in conference scenarios.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004580 if (_rtpRtcpModule->RTT(remoteSSRC, &RTT, &avgRTT, &minRTT,&maxRTT) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004581 {
4582 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4583 "GetRoundTripTimeSummary unable to retrieve RTT values"
4584 " from the RTCP layer");
4585 delaysMs.min = -1; delaysMs.max = -1; delaysMs.average = -1;
4586 }
4587 else
4588 {
4589 delaysMs.min = minRTT;
4590 delaysMs.max = maxRTT;
4591 delaysMs.average = avgRTT;
4592 }
4593 return 0;
4594}
4595
4596int
4597Channel::GetNetworkStatistics(NetworkStatistics& stats)
4598{
4599 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4600 "Channel::GetNetworkStatistics()");
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00004601 ACMNetworkStatistics acm_stats;
4602 int return_value = _audioCodingModule.NetworkStatistics(&acm_stats);
4603 if (return_value >= 0) {
4604 memcpy(&stats, &acm_stats, sizeof(NetworkStatistics));
4605 }
4606 return return_value;
niklase@google.com470e71d2011-07-07 08:21:25 +00004607}
4608
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00004609bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms,
4610 int* playout_buffer_delay_ms) const {
4611 if (_average_jitter_buffer_delay_us == 0) {
niklase@google.com470e71d2011-07-07 08:21:25 +00004612 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00004613 "Channel::GetDelayEstimate() no valid estimate.");
4614 return false;
4615 }
4616 *jitter_buffer_delay_ms = (_average_jitter_buffer_delay_us + 500) / 1000 +
4617 _recPacketDelayMs;
4618 *playout_buffer_delay_ms = playout_delay_ms_;
4619 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4620 "Channel::GetDelayEstimate()");
4621 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +00004622}
4623
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +00004624int Channel::SetInitialPlayoutDelay(int delay_ms)
4625{
4626 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4627 "Channel::SetInitialPlayoutDelay()");
4628 if ((delay_ms < kVoiceEngineMinMinPlayoutDelayMs) ||
4629 (delay_ms > kVoiceEngineMaxMinPlayoutDelayMs))
4630 {
4631 _engineStatisticsPtr->SetLastError(
4632 VE_INVALID_ARGUMENT, kTraceError,
4633 "SetInitialPlayoutDelay() invalid min delay");
4634 return -1;
4635 }
4636 if (_audioCodingModule.SetInitialPlayoutDelay(delay_ms) != 0)
4637 {
4638 _engineStatisticsPtr->SetLastError(
4639 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4640 "SetInitialPlayoutDelay() failed to set min playout delay");
4641 return -1;
4642 }
4643 return 0;
4644}
4645
4646
niklase@google.com470e71d2011-07-07 08:21:25 +00004647int
4648Channel::SetMinimumPlayoutDelay(int delayMs)
4649{
4650 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4651 "Channel::SetMinimumPlayoutDelay()");
4652 if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
4653 (delayMs > kVoiceEngineMaxMinPlayoutDelayMs))
4654 {
4655 _engineStatisticsPtr->SetLastError(
4656 VE_INVALID_ARGUMENT, kTraceError,
4657 "SetMinimumPlayoutDelay() invalid min delay");
4658 return -1;
4659 }
4660 if (_audioCodingModule.SetMinimumPlayoutDelay(delayMs) != 0)
4661 {
4662 _engineStatisticsPtr->SetLastError(
4663 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4664 "SetMinimumPlayoutDelay() failed to set min playout delay");
4665 return -1;
4666 }
4667 return 0;
4668}
4669
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00004670void Channel::UpdatePlayoutTimestamp(bool rtcp) {
4671 uint32_t playout_timestamp = 0;
4672
4673 if (_audioCodingModule.PlayoutTimestamp(&playout_timestamp) == -1) {
4674 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4675 "Channel::UpdatePlayoutTimestamp() failed to read playout"
4676 " timestamp from the ACM");
4677 _engineStatisticsPtr->SetLastError(
4678 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4679 "UpdatePlayoutTimestamp() failed to retrieve timestamp");
4680 return;
4681 }
4682
4683 uint16_t delay_ms = 0;
4684 if (_audioDeviceModulePtr->PlayoutDelay(&delay_ms) == -1) {
4685 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4686 "Channel::UpdatePlayoutTimestamp() failed to read playout"
4687 " delay from the ADM");
4688 _engineStatisticsPtr->SetLastError(
4689 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4690 "UpdatePlayoutTimestamp() failed to retrieve playout delay");
4691 return;
4692 }
4693
4694 int32_t playout_frequency = _audioCodingModule.PlayoutFrequency();
4695 CodecInst current_recive_codec;
4696 if (_audioCodingModule.ReceiveCodec(&current_recive_codec) == 0) {
4697 if (STR_CASE_CMP("G722", current_recive_codec.plname) == 0) {
4698 playout_frequency = 8000;
4699 } else if (STR_CASE_CMP("opus", current_recive_codec.plname) == 0) {
4700 playout_frequency = 48000;
niklase@google.com470e71d2011-07-07 08:21:25 +00004701 }
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00004702 }
4703
4704 // Remove the playout delay.
4705 playout_timestamp -= (delay_ms * (playout_frequency / 1000));
4706
4707 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4708 "Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu",
4709 playout_timestamp);
4710
4711 if (rtcp) {
4712 playout_timestamp_rtcp_ = playout_timestamp;
4713 } else {
4714 playout_timestamp_rtp_ = playout_timestamp;
4715 }
4716 playout_delay_ms_ = delay_ms;
4717}
4718
4719int Channel::GetPlayoutTimestamp(unsigned int& timestamp) {
4720 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4721 "Channel::GetPlayoutTimestamp()");
4722 if (playout_timestamp_rtp_ == 0) {
4723 _engineStatisticsPtr->SetLastError(
4724 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4725 "GetPlayoutTimestamp() failed to retrieve timestamp");
4726 return -1;
4727 }
4728 timestamp = playout_timestamp_rtp_;
4729 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4730 VoEId(_instanceId,_channelId),
4731 "GetPlayoutTimestamp() => timestamp=%u", timestamp);
4732 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00004733}
4734
4735int
4736Channel::SetInitTimestamp(unsigned int timestamp)
4737{
4738 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4739 "Channel::SetInitTimestamp()");
4740 if (_sending)
4741 {
4742 _engineStatisticsPtr->SetLastError(
4743 VE_SENDING, kTraceError, "SetInitTimestamp() already sending");
4744 return -1;
4745 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004746 if (_rtpRtcpModule->SetStartTimestamp(timestamp) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004747 {
4748 _engineStatisticsPtr->SetLastError(
4749 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4750 "SetInitTimestamp() failed to set timestamp");
4751 return -1;
4752 }
4753 return 0;
4754}
4755
4756int
4757Channel::SetInitSequenceNumber(short sequenceNumber)
4758{
4759 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4760 "Channel::SetInitSequenceNumber()");
4761 if (_sending)
4762 {
4763 _engineStatisticsPtr->SetLastError(
4764 VE_SENDING, kTraceError,
4765 "SetInitSequenceNumber() already sending");
4766 return -1;
4767 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004768 if (_rtpRtcpModule->SetSequenceNumber(sequenceNumber) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004769 {
4770 _engineStatisticsPtr->SetLastError(
4771 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4772 "SetInitSequenceNumber() failed to set sequence number");
4773 return -1;
4774 }
4775 return 0;
4776}
4777
4778int
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00004779Channel::GetRtpRtcp(RtpRtcp** rtpRtcpModule, RtpReceiver** rtp_receiver) const
niklase@google.com470e71d2011-07-07 08:21:25 +00004780{
4781 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4782 "Channel::GetRtpRtcp()");
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00004783 *rtpRtcpModule = _rtpRtcpModule.get();
4784 *rtp_receiver = rtp_receiver_.get();
niklase@google.com470e71d2011-07-07 08:21:25 +00004785 return 0;
4786}
4787
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004788// TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use
4789// a shared helper.
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004790int32_t
pbos@webrtc.org92135212013-05-14 08:31:39 +00004791Channel::MixOrReplaceAudioWithFile(int mixingFrequency)
niklase@google.com470e71d2011-07-07 08:21:25 +00004792{
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004793 scoped_array<int16_t> fileBuffer(new int16_t[640]);
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004794 int fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004795
4796 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004797 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004798
4799 if (_inputFilePlayerPtr == NULL)
4800 {
4801 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4802 VoEId(_instanceId, _channelId),
4803 "Channel::MixOrReplaceAudioWithFile() fileplayer"
4804 " doesnt exist");
4805 return -1;
4806 }
4807
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004808 if (_inputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
niklase@google.com470e71d2011-07-07 08:21:25 +00004809 fileSamples,
4810 mixingFrequency) == -1)
4811 {
4812 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4813 VoEId(_instanceId, _channelId),
4814 "Channel::MixOrReplaceAudioWithFile() file mixing "
4815 "failed");
4816 return -1;
4817 }
4818 if (fileSamples == 0)
4819 {
4820 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4821 VoEId(_instanceId, _channelId),
4822 "Channel::MixOrReplaceAudioWithFile() file is ended");
4823 return 0;
4824 }
4825 }
4826
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004827 assert(_audioFrame.samples_per_channel_ == fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004828
4829 if (_mixFileWithMicrophone)
4830 {
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004831 // Currently file stream is always mono.
4832 // TODO(xians): Change the code when FilePlayer supports real stereo.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004833 Utility::MixWithSat(_audioFrame.data_,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004834 _audioFrame.num_channels_,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004835 fileBuffer.get(),
4836 1,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004837 fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004838 }
4839 else
4840 {
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004841 // Replace ACM audio with file.
4842 // Currently file stream is always mono.
4843 // TODO(xians): Change the code when FilePlayer supports real stereo.
niklase@google.com470e71d2011-07-07 08:21:25 +00004844 _audioFrame.UpdateFrame(_channelId,
4845 -1,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004846 fileBuffer.get(),
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004847 fileSamples,
niklase@google.com470e71d2011-07-07 08:21:25 +00004848 mixingFrequency,
4849 AudioFrame::kNormalSpeech,
4850 AudioFrame::kVadUnknown,
4851 1);
4852
4853 }
4854 return 0;
4855}
4856
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004857int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00004858Channel::MixAudioWithFile(AudioFrame& audioFrame,
pbos@webrtc.org92135212013-05-14 08:31:39 +00004859 int mixingFrequency)
niklase@google.com470e71d2011-07-07 08:21:25 +00004860{
4861 assert(mixingFrequency <= 32000);
4862
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004863 scoped_array<int16_t> fileBuffer(new int16_t[640]);
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004864 int fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004865
4866 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004867 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004868
4869 if (_outputFilePlayerPtr == NULL)
4870 {
4871 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4872 VoEId(_instanceId, _channelId),
4873 "Channel::MixAudioWithFile() file mixing failed");
4874 return -1;
4875 }
4876
4877 // We should get the frequency we ask for.
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004878 if (_outputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
niklase@google.com470e71d2011-07-07 08:21:25 +00004879 fileSamples,
4880 mixingFrequency) == -1)
4881 {
4882 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4883 VoEId(_instanceId, _channelId),
4884 "Channel::MixAudioWithFile() file mixing failed");
4885 return -1;
4886 }
4887 }
4888
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004889 if (audioFrame.samples_per_channel_ == fileSamples)
niklase@google.com470e71d2011-07-07 08:21:25 +00004890 {
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004891 // Currently file stream is always mono.
4892 // TODO(xians): Change the code when FilePlayer supports real stereo.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004893 Utility::MixWithSat(audioFrame.data_,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004894 audioFrame.num_channels_,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004895 fileBuffer.get(),
4896 1,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004897 fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004898 }
4899 else
4900 {
4901 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004902 "Channel::MixAudioWithFile() samples_per_channel_(%d) != "
niklase@google.com470e71d2011-07-07 08:21:25 +00004903 "fileSamples(%d)",
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004904 audioFrame.samples_per_channel_, fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004905 return -1;
4906 }
4907
4908 return 0;
4909}
4910
4911int
4912Channel::InsertInbandDtmfTone()
4913{
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00004914 // Check if we should start a new tone.
niklase@google.com470e71d2011-07-07 08:21:25 +00004915 if (_inbandDtmfQueue.PendingDtmf() &&
4916 !_inbandDtmfGenerator.IsAddingTone() &&
4917 _inbandDtmfGenerator.DelaySinceLastTone() >
4918 kMinTelephoneEventSeparationMs)
4919 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004920 int8_t eventCode(0);
4921 uint16_t lengthMs(0);
4922 uint8_t attenuationDb(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004923
4924 eventCode = _inbandDtmfQueue.NextDtmf(&lengthMs, &attenuationDb);
4925 _inbandDtmfGenerator.AddTone(eventCode, lengthMs, attenuationDb);
4926 if (_playInbandDtmfEvent)
4927 {
4928 // Add tone to output mixer using a reduced length to minimize
4929 // risk of echo.
4930 _outputMixerPtr->PlayDtmfTone(eventCode, lengthMs - 80,
4931 attenuationDb);
4932 }
4933 }
4934
4935 if (_inbandDtmfGenerator.IsAddingTone())
4936 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004937 uint16_t frequency(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004938 _inbandDtmfGenerator.GetSampleRate(frequency);
4939
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004940 if (frequency != _audioFrame.sample_rate_hz_)
niklase@google.com470e71d2011-07-07 08:21:25 +00004941 {
4942 // Update sample rate of Dtmf tone since the mixing frequency
4943 // has changed.
4944 _inbandDtmfGenerator.SetSampleRate(
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004945 (uint16_t) (_audioFrame.sample_rate_hz_));
niklase@google.com470e71d2011-07-07 08:21:25 +00004946 // Reset the tone to be added taking the new sample rate into
4947 // account.
4948 _inbandDtmfGenerator.ResetTone();
4949 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00004950
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004951 int16_t toneBuffer[320];
4952 uint16_t toneSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004953 // Get 10ms tone segment and set time since last tone to zero
4954 if (_inbandDtmfGenerator.Get10msTone(toneBuffer, toneSamples) == -1)
4955 {
4956 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4957 VoEId(_instanceId, _channelId),
4958 "Channel::EncodeAndSend() inserting Dtmf failed");
4959 return -1;
4960 }
4961
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00004962 // Replace mixed audio with DTMF tone.
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00004963 for (int sample = 0;
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004964 sample < _audioFrame.samples_per_channel_;
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00004965 sample++)
4966 {
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00004967 for (int channel = 0;
4968 channel < _audioFrame.num_channels_;
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00004969 channel++)
4970 {
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00004971 const int index = sample * _audioFrame.num_channels_ + channel;
4972 _audioFrame.data_[index] = toneBuffer[sample];
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00004973 }
4974 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00004975
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004976 assert(_audioFrame.samples_per_channel_ == toneSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004977 } else
4978 {
4979 // Add 10ms to "delay-since-last-tone" counter
4980 _inbandDtmfGenerator.UpdateDelaySinceLastTone();
4981 }
4982 return 0;
4983}
4984
niklase@google.com470e71d2011-07-07 08:21:25 +00004985void
4986Channel::ResetDeadOrAliveCounters()
4987{
4988 _countDeadDetections = 0;
4989 _countAliveDetections = 0;
4990}
4991
4992void
4993Channel::UpdateDeadOrAliveCounters(bool alive)
4994{
4995 if (alive)
4996 _countAliveDetections++;
4997 else
4998 _countDeadDetections++;
4999}
5000
5001int
5002Channel::GetDeadOrAliveCounters(int& countDead, int& countAlive) const
5003{
niklase@google.com470e71d2011-07-07 08:21:25 +00005004 return 0;
5005}
5006
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005007int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00005008Channel::SendPacketRaw(const void *data, int len, bool RTCP)
5009{
5010 if (_transportPtr == NULL)
5011 {
5012 return -1;
5013 }
5014 if (!RTCP)
5015 {
5016 return _transportPtr->SendPacket(_channelId, data, len);
5017 }
5018 else
5019 {
5020 return _transportPtr->SendRTCPPacket(_channelId, data, len);
5021 }
5022}
5023
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005024// Called for incoming RTP packets after successful RTP header parsing.
5025void Channel::UpdatePacketDelay(uint32_t rtp_timestamp,
5026 uint16_t sequence_number) {
5027 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
5028 "Channel::UpdatePacketDelay(timestamp=%lu, sequenceNumber=%u)",
5029 rtp_timestamp, sequence_number);
niklase@google.com470e71d2011-07-07 08:21:25 +00005030
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005031 // Get frequency of last received payload
5032 int rtp_receive_frequency = _audioCodingModule.ReceiveFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +00005033
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005034 CodecInst current_receive_codec;
5035 if (_audioCodingModule.ReceiveCodec(&current_receive_codec) != 0) {
5036 return;
5037 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005038
turaj@webrtc.orge46c8d32013-05-22 20:39:43 +00005039 // Update the least required delay.
5040 least_required_delay_ms_ = _audioCodingModule.LeastRequiredDelayMs();
5041
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005042 if (STR_CASE_CMP("G722", current_receive_codec.plname) == 0) {
5043 // Even though the actual sampling rate for G.722 audio is
5044 // 16,000 Hz, the RTP clock rate for the G722 payload format is
5045 // 8,000 Hz because that value was erroneously assigned in
5046 // RFC 1890 and must remain unchanged for backward compatibility.
5047 rtp_receive_frequency = 8000;
5048 } else if (STR_CASE_CMP("opus", current_receive_codec.plname) == 0) {
5049 // We are resampling Opus internally to 32,000 Hz until all our
5050 // DSP routines can operate at 48,000 Hz, but the RTP clock
5051 // rate for the Opus payload format is standardized to 48,000 Hz,
5052 // because that is the maximum supported decoding sampling rate.
5053 rtp_receive_frequency = 48000;
5054 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005055
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005056 // playout_timestamp_rtp_ updated in UpdatePlayoutTimestamp for every incoming
5057 // packet.
5058 uint32_t timestamp_diff_ms = (rtp_timestamp - playout_timestamp_rtp_) /
5059 (rtp_receive_frequency / 1000);
niklase@google.com470e71d2011-07-07 08:21:25 +00005060
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005061 uint16_t packet_delay_ms = (rtp_timestamp - _previousTimestamp) /
5062 (rtp_receive_frequency / 1000);
niklase@google.com470e71d2011-07-07 08:21:25 +00005063
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005064 _previousTimestamp = rtp_timestamp;
niklase@google.com470e71d2011-07-07 08:21:25 +00005065
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005066 if (timestamp_diff_ms > (2 * kVoiceEngineMaxMinPlayoutDelayMs)) {
5067 timestamp_diff_ms = 0;
5068 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005069
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005070 if (timestamp_diff_ms == 0) return;
niklase@google.com470e71d2011-07-07 08:21:25 +00005071
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005072 if (packet_delay_ms >= 10 && packet_delay_ms <= 60) {
5073 _recPacketDelayMs = packet_delay_ms;
5074 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005075
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005076 if (_average_jitter_buffer_delay_us == 0) {
5077 _average_jitter_buffer_delay_us = timestamp_diff_ms * 1000;
5078 return;
5079 }
5080
5081 // Filter average delay value using exponential filter (alpha is
5082 // 7/8). We derive 1000 *_average_jitter_buffer_delay_us here (reduces
5083 // risk of rounding error) and compensate for it in GetDelayEstimate()
5084 // later.
5085 _average_jitter_buffer_delay_us = (_average_jitter_buffer_delay_us * 7 +
5086 1000 * timestamp_diff_ms + 500) / 8;
niklase@google.com470e71d2011-07-07 08:21:25 +00005087}
5088
5089void
5090Channel::RegisterReceiveCodecsToRTPModule()
5091{
5092 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
5093 "Channel::RegisterReceiveCodecsToRTPModule()");
5094
5095
5096 CodecInst codec;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005097 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
niklase@google.com470e71d2011-07-07 08:21:25 +00005098
5099 for (int idx = 0; idx < nSupportedCodecs; idx++)
5100 {
5101 // Open up the RTP/RTCP receiver for all supported codecs
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00005102 if ((_audioCodingModule.Codec(idx, &codec) == -1) ||
stefan@webrtc.org66b2e5c2013-07-05 14:30:48 +00005103 (rtp_receiver_->RegisterReceivePayload(
5104 codec.plname,
5105 codec.pltype,
5106 codec.plfreq,
5107 codec.channels,
5108 (codec.rate < 0) ? 0 : codec.rate) == -1))
niklase@google.com470e71d2011-07-07 08:21:25 +00005109 {
5110 WEBRTC_TRACE(
5111 kTraceWarning,
5112 kTraceVoice,
5113 VoEId(_instanceId, _channelId),
5114 "Channel::RegisterReceiveCodecsToRTPModule() unable"
5115 " to register %s (%d/%d/%d/%d) to RTP/RTCP receiver",
5116 codec.plname, codec.pltype, codec.plfreq,
5117 codec.channels, codec.rate);
5118 }
5119 else
5120 {
5121 WEBRTC_TRACE(
5122 kTraceInfo,
5123 kTraceVoice,
5124 VoEId(_instanceId, _channelId),
5125 "Channel::RegisterReceiveCodecsToRTPModule() %s "
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00005126 "(%d/%d/%d/%d) has been added to the RTP/RTCP "
niklase@google.com470e71d2011-07-07 08:21:25 +00005127 "receiver",
5128 codec.plname, codec.pltype, codec.plfreq,
5129 codec.channels, codec.rate);
5130 }
5131 }
5132}
5133
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005134int Channel::ApmProcessRx(AudioFrame& frame) {
5135 AudioProcessing* audioproc = _rxAudioProcessingModulePtr;
5136 // Register the (possibly new) frame parameters.
5137 if (audioproc->set_sample_rate_hz(frame.sample_rate_hz_) != 0) {
andrew@webrtc.org655d8f52012-11-20 07:34:45 +00005138 LOG_FERR1(LS_WARNING, set_sample_rate_hz, frame.sample_rate_hz_);
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005139 }
5140 if (audioproc->set_num_channels(frame.num_channels_,
5141 frame.num_channels_) != 0) {
andrew@webrtc.org655d8f52012-11-20 07:34:45 +00005142 LOG_FERR1(LS_WARNING, set_num_channels, frame.num_channels_);
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005143 }
5144 if (audioproc->ProcessStream(&frame) != 0) {
andrew@webrtc.org655d8f52012-11-20 07:34:45 +00005145 LOG_FERR0(LS_WARNING, ProcessStream);
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005146 }
5147 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00005148}
5149
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005150int Channel::SetSecondarySendCodec(const CodecInst& codec,
5151 int red_payload_type) {
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00005152 // Sanity check for payload type.
5153 if (red_payload_type < 0 || red_payload_type > 127) {
5154 _engineStatisticsPtr->SetLastError(
5155 VE_PLTYPE_ERROR, kTraceError,
5156 "SetRedPayloadType() invalid RED payload type");
5157 return -1;
5158 }
5159
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005160 if (SetRedPayloadType(red_payload_type) < 0) {
5161 _engineStatisticsPtr->SetLastError(
5162 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5163 "SetSecondarySendCodec() Failed to register RED ACM");
5164 return -1;
5165 }
5166 if (_audioCodingModule.RegisterSecondarySendCodec(codec) < 0) {
5167 _engineStatisticsPtr->SetLastError(
5168 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5169 "SetSecondarySendCodec() Failed to register secondary send codec in "
5170 "ACM");
5171 return -1;
5172 }
5173
5174 return 0;
5175}
5176
5177void Channel::RemoveSecondarySendCodec() {
5178 _audioCodingModule.UnregisterSecondarySendCodec();
5179}
5180
5181int Channel::GetSecondarySendCodec(CodecInst* codec) {
5182 if (_audioCodingModule.SecondarySendCodec(codec) < 0) {
5183 _engineStatisticsPtr->SetLastError(
5184 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5185 "GetSecondarySendCodec() Failed to get secondary sent codec from ACM");
5186 return -1;
5187 }
5188 return 0;
5189}
5190
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00005191// Assuming this method is called with valid payload type.
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005192int Channel::SetRedPayloadType(int red_payload_type) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005193 CodecInst codec;
5194 bool found_red = false;
5195
5196 // Get default RED settings from the ACM database
5197 const int num_codecs = AudioCodingModule::NumberOfCodecs();
5198 for (int idx = 0; idx < num_codecs; idx++) {
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00005199 _audioCodingModule.Codec(idx, &codec);
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005200 if (!STR_CASE_CMP(codec.plname, "RED")) {
5201 found_red = true;
5202 break;
5203 }
5204 }
5205
5206 if (!found_red) {
5207 _engineStatisticsPtr->SetLastError(
5208 VE_CODEC_ERROR, kTraceError,
5209 "SetRedPayloadType() RED is not supported");
5210 return -1;
5211 }
5212
turaj@webrtc.org9d532fd2013-01-31 18:34:19 +00005213 codec.pltype = red_payload_type;
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005214 if (_audioCodingModule.RegisterSendCodec(codec) < 0) {
5215 _engineStatisticsPtr->SetLastError(
5216 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5217 "SetRedPayloadType() RED registration in ACM module failed");
5218 return -1;
5219 }
5220
5221 if (_rtpRtcpModule->SetSendREDPayloadType(red_payload_type) != 0) {
5222 _engineStatisticsPtr->SetLastError(
5223 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
5224 "SetRedPayloadType() RED registration in RTP/RTCP module failed");
5225 return -1;
5226 }
5227 return 0;
5228}
5229
pbos@webrtc.orgd900e8b2013-07-03 15:12:26 +00005230} // namespace voe
5231} // namespace webrtc