blob: f8f8bd2c36236a9e8afbde4ee3b017d1c95e2aa1 [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
henrika@webrtc.org2919e952012-01-31 08:45:03 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000011#include "webrtc/voice_engine/channel.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000013#include "webrtc/modules/audio_device/include/audio_device.h"
14#include "webrtc/modules/audio_processing/include/audio_processing.h"
wu@webrtc.org822fbd82013-08-15 23:38:54 +000015#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
16#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
17#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
18#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000019#include "webrtc/modules/utility/interface/audio_frame_operations.h"
20#include "webrtc/modules/utility/interface/process_thread.h"
21#include "webrtc/modules/utility/interface/rtp_dump.h"
22#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
23#include "webrtc/system_wrappers/interface/logging.h"
24#include "webrtc/system_wrappers/interface/trace.h"
25#include "webrtc/voice_engine/include/voe_base.h"
26#include "webrtc/voice_engine/include/voe_external_media.h"
27#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
28#include "webrtc/voice_engine/output_mixer.h"
29#include "webrtc/voice_engine/statistics.h"
30#include "webrtc/voice_engine/transmit_mixer.h"
31#include "webrtc/voice_engine/utility.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000032
33#if defined(_WIN32)
34#include <Qos.h>
35#endif
36
andrew@webrtc.org50419b02012-11-14 19:07:54 +000037namespace webrtc {
38namespace voe {
niklase@google.com470e71d2011-07-07 08:21:25 +000039
pbos@webrtc.org6141e132013-04-09 10:09:10 +000040int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +000041Channel::SendData(FrameType frameType,
pbos@webrtc.org6141e132013-04-09 10:09:10 +000042 uint8_t payloadType,
43 uint32_t timeStamp,
44 const uint8_t* payloadData,
45 uint16_t payloadSize,
niklase@google.com470e71d2011-07-07 08:21:25 +000046 const RTPFragmentationHeader* fragmentation)
47{
48 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
49 "Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u,"
50 " payloadSize=%u, fragmentation=0x%x)",
51 frameType, payloadType, timeStamp, payloadSize, fragmentation);
52
53 if (_includeAudioLevelIndication)
54 {
andrew@webrtc.org755b04a2011-11-15 16:57:56 +000055 assert(_rtpAudioProc.get() != NULL);
niklase@google.com470e71d2011-07-07 08:21:25 +000056 // Store current audio level in the RTP/RTCP module.
57 // The level will be used in combination with voice-activity state
58 // (frameType) to add an RTP header extension
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +000059 _rtpRtcpModule->SetAudioLevel(_rtpAudioProc->level_estimator()->RMS());
niklase@google.com470e71d2011-07-07 08:21:25 +000060 }
61
62 // Push data from ACM to RTP/RTCP-module to deliver audio frame for
63 // packetization.
64 // This call will trigger Transport::SendPacket() from the RTP/RTCP module.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +000065 if (_rtpRtcpModule->SendOutgoingData((FrameType&)frameType,
niklase@google.com470e71d2011-07-07 08:21:25 +000066 payloadType,
67 timeStamp,
stefan@webrtc.orgddfdfed2012-07-03 13:21:22 +000068 // Leaving the time when this frame was
69 // received from the capture device as
70 // undefined for voice for now.
71 -1,
niklase@google.com470e71d2011-07-07 08:21:25 +000072 payloadData,
73 payloadSize,
74 fragmentation) == -1)
75 {
76 _engineStatisticsPtr->SetLastError(
77 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
78 "Channel::SendData() failed to send data to RTP/RTCP module");
79 return -1;
80 }
81
82 _lastLocalTimeStamp = timeStamp;
83 _lastPayloadType = payloadType;
84
85 return 0;
86}
87
pbos@webrtc.org6141e132013-04-09 10:09:10 +000088int32_t
89Channel::InFrameType(int16_t frameType)
niklase@google.com470e71d2011-07-07 08:21:25 +000090{
91 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
92 "Channel::InFrameType(frameType=%d)", frameType);
93
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +000094 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +000095 // 1 indicates speech
96 _sendFrameType = (frameType == 1) ? 1 : 0;
97 return 0;
98}
99
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000100int32_t
pbos@webrtc.org92135212013-05-14 08:31:39 +0000101Channel::OnRxVadDetected(int vadDecision)
niklase@google.com470e71d2011-07-07 08:21:25 +0000102{
103 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
104 "Channel::OnRxVadDetected(vadDecision=%d)", vadDecision);
105
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000106 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000107 if (_rxVadObserverPtr)
108 {
109 _rxVadObserverPtr->OnRxVad(_channelId, vadDecision);
110 }
111
112 return 0;
113}
114
115int
116Channel::SendPacket(int channel, const void *data, int len)
117{
118 channel = VoEChannelId(channel);
119 assert(channel == _channelId);
120
121 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
122 "Channel::SendPacket(channel=%d, len=%d)", channel, len);
123
124 if (_transportPtr == NULL)
125 {
126 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
127 "Channel::SendPacket() failed to send RTP packet due to"
128 " invalid transport object");
129 return -1;
130 }
131
132 // Insert extra RTP packet using if user has called the InsertExtraRTPPacket
133 // API
134 if (_insertExtraRTPPacket)
135 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000136 uint8_t* rtpHdr = (uint8_t*)data;
137 uint8_t M_PT(0);
niklase@google.com470e71d2011-07-07 08:21:25 +0000138 if (_extraMarkerBit)
139 {
140 M_PT = 0x80; // set the M-bit
141 }
142 M_PT += _extraPayloadType; // set the payload type
143 *(++rtpHdr) = M_PT; // modify the M|PT-byte within the RTP header
144 _insertExtraRTPPacket = false; // insert one packet only
145 }
146
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000147 uint8_t* bufferToSendPtr = (uint8_t*)data;
148 int32_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000149
150 // Dump the RTP packet to a file (if RTP dump is enabled).
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000151 if (_rtpDumpOut.DumpPacket((const uint8_t*)data, len) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000152 {
153 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
154 VoEId(_instanceId,_channelId),
155 "Channel::SendPacket() RTP dump to output file failed");
156 }
157
158 // SRTP or External encryption
159 if (_encrypting)
160 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000161 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000162
163 if (_encryptionPtr)
164 {
165 if (!_encryptionRTPBufferPtr)
166 {
167 // Allocate memory for encryption buffer one time only
168 _encryptionRTPBufferPtr =
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000169 new uint8_t[kVoiceEngineMaxIpPacketSizeBytes];
xians@webrtc.org51253502012-10-25 13:58:02 +0000170 memset(_encryptionRTPBufferPtr, 0,
171 kVoiceEngineMaxIpPacketSizeBytes);
niklase@google.com470e71d2011-07-07 08:21:25 +0000172 }
173
174 // Perform encryption (SRTP or external)
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000175 int32_t encryptedBufferLength = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000176 _encryptionPtr->encrypt(_channelId,
177 bufferToSendPtr,
178 _encryptionRTPBufferPtr,
179 bufferLength,
180 (int*)&encryptedBufferLength);
181 if (encryptedBufferLength <= 0)
182 {
183 _engineStatisticsPtr->SetLastError(
184 VE_ENCRYPTION_FAILED,
185 kTraceError, "Channel::SendPacket() encryption failed");
186 return -1;
187 }
188
189 // Replace default data buffer with encrypted buffer
190 bufferToSendPtr = _encryptionRTPBufferPtr;
191 bufferLength = encryptedBufferLength;
192 }
193 }
194
195 // Packet transmission using WebRtc socket transport
196 if (!_externalTransport)
197 {
198 int n = _transportPtr->SendPacket(channel, bufferToSendPtr,
199 bufferLength);
200 if (n < 0)
201 {
202 WEBRTC_TRACE(kTraceError, kTraceVoice,
203 VoEId(_instanceId,_channelId),
204 "Channel::SendPacket() RTP transmission using WebRtc"
205 " sockets failed");
206 return -1;
207 }
208 return n;
209 }
210
211 // Packet transmission using external transport transport
212 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000213 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000214
215 int n = _transportPtr->SendPacket(channel,
216 bufferToSendPtr,
217 bufferLength);
218 if (n < 0)
219 {
220 WEBRTC_TRACE(kTraceError, kTraceVoice,
221 VoEId(_instanceId,_channelId),
222 "Channel::SendPacket() RTP transmission using external"
223 " transport failed");
224 return -1;
225 }
226 return n;
227 }
228}
229
230int
231Channel::SendRTCPPacket(int channel, const void *data, int len)
232{
233 channel = VoEChannelId(channel);
234 assert(channel == _channelId);
235
236 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
237 "Channel::SendRTCPPacket(channel=%d, len=%d)", channel, len);
238
niklase@google.com470e71d2011-07-07 08:21:25 +0000239 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000240 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.org83661f52011-11-25 10:58:15 +0000241 if (_transportPtr == NULL)
242 {
243 WEBRTC_TRACE(kTraceError, kTraceVoice,
244 VoEId(_instanceId,_channelId),
245 "Channel::SendRTCPPacket() failed to send RTCP packet"
246 " due to invalid transport object");
247 return -1;
248 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000249 }
250
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000251 uint8_t* bufferToSendPtr = (uint8_t*)data;
252 int32_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000253
254 // Dump the RTCP packet to a file (if RTP dump is enabled).
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000255 if (_rtpDumpOut.DumpPacket((const uint8_t*)data, len) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000256 {
257 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
258 VoEId(_instanceId,_channelId),
259 "Channel::SendPacket() RTCP dump to output file failed");
260 }
261
262 // SRTP or External encryption
263 if (_encrypting)
264 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000265 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000266
267 if (_encryptionPtr)
268 {
269 if (!_encryptionRTCPBufferPtr)
270 {
271 // Allocate memory for encryption buffer one time only
272 _encryptionRTCPBufferPtr =
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000273 new uint8_t[kVoiceEngineMaxIpPacketSizeBytes];
niklase@google.com470e71d2011-07-07 08:21:25 +0000274 }
275
276 // Perform encryption (SRTP or external).
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000277 int32_t encryptedBufferLength = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000278 _encryptionPtr->encrypt_rtcp(_channelId,
279 bufferToSendPtr,
280 _encryptionRTCPBufferPtr,
281 bufferLength,
282 (int*)&encryptedBufferLength);
283 if (encryptedBufferLength <= 0)
284 {
285 _engineStatisticsPtr->SetLastError(
286 VE_ENCRYPTION_FAILED, kTraceError,
287 "Channel::SendRTCPPacket() encryption failed");
288 return -1;
289 }
290
291 // Replace default data buffer with encrypted buffer
292 bufferToSendPtr = _encryptionRTCPBufferPtr;
293 bufferLength = encryptedBufferLength;
294 }
295 }
296
297 // Packet transmission using WebRtc socket transport
298 if (!_externalTransport)
299 {
300 int n = _transportPtr->SendRTCPPacket(channel,
301 bufferToSendPtr,
302 bufferLength);
303 if (n < 0)
304 {
305 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
306 VoEId(_instanceId,_channelId),
307 "Channel::SendRTCPPacket() transmission using WebRtc"
308 " sockets failed");
309 return -1;
310 }
311 return n;
312 }
313
314 // Packet transmission using external transport transport
315 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000316 CriticalSectionScoped cs(&_callbackCritSect);
henrike@webrtc.orgde727ab2012-11-18 18:49:13 +0000317 if (_transportPtr == NULL)
318 {
319 return -1;
320 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000321 int n = _transportPtr->SendRTCPPacket(channel,
322 bufferToSendPtr,
323 bufferLength);
324 if (n < 0)
325 {
326 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
327 VoEId(_instanceId,_channelId),
328 "Channel::SendRTCPPacket() transmission using external"
329 " transport failed");
330 return -1;
331 }
332 return n;
333 }
334
335 return len;
336}
337
338void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000339Channel::OnPlayTelephoneEvent(int32_t id,
340 uint8_t event,
341 uint16_t lengthMs,
342 uint8_t volume)
niklase@google.com470e71d2011-07-07 08:21:25 +0000343{
344 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
345 "Channel::OnPlayTelephoneEvent(id=%d, event=%u, lengthMs=%u,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +0000346 " volume=%u)", id, event, lengthMs, volume);
niklase@google.com470e71d2011-07-07 08:21:25 +0000347
348 if (!_playOutbandDtmfEvent || (event > 15))
349 {
350 // Ignore callback since feedback is disabled or event is not a
351 // Dtmf tone event.
352 return;
353 }
354
355 assert(_outputMixerPtr != NULL);
356
357 // Start playing out the Dtmf tone (if playout is enabled).
358 // Reduce length of tone with 80ms to the reduce risk of echo.
359 _outputMixerPtr->PlayDtmfTone(event, lengthMs - 80, volume);
360}
361
362void
elham@webrtc.orgb7eda432013-07-15 21:08:27 +0000363Channel::OnIncomingSSRCChanged(int32_t id,
364 uint32_t SSRC)
niklase@google.com470e71d2011-07-07 08:21:25 +0000365{
366 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
367 "Channel::OnIncomingSSRCChanged(id=%d, SSRC=%d)",
elham@webrtc.orgb7eda432013-07-15 21:08:27 +0000368 id, SSRC);
niklase@google.com470e71d2011-07-07 08:21:25 +0000369
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000370 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000371 assert(channel == _channelId);
372
elham@webrtc.orgb7eda432013-07-15 21:08:27 +0000373 // Reset RTP-module counters since a new incoming RTP stream is detected
wu@webrtc.org822fbd82013-08-15 23:38:54 +0000374 rtp_receive_statistics_->ResetDataCounters();
375 rtp_receive_statistics_->ResetStatistics();
elham@webrtc.orgb7eda432013-07-15 21:08:27 +0000376
niklase@google.com470e71d2011-07-07 08:21:25 +0000377 if (_rtpObserver)
378 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000379 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000380
381 if (_rtpObserverPtr)
382 {
383 // Send new SSRC to registered observer using callback
elham@webrtc.orgb7eda432013-07-15 21:08:27 +0000384 _rtpObserverPtr->OnIncomingSSRCChanged(channel, SSRC);
niklase@google.com470e71d2011-07-07 08:21:25 +0000385 }
386 }
387}
388
pbos@webrtc.org92135212013-05-14 08:31:39 +0000389void Channel::OnIncomingCSRCChanged(int32_t id,
390 uint32_t CSRC,
391 bool added)
niklase@google.com470e71d2011-07-07 08:21:25 +0000392{
393 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
394 "Channel::OnIncomingCSRCChanged(id=%d, CSRC=%d, added=%d)",
395 id, CSRC, added);
396
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000397 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000398 assert(channel == _channelId);
399
400 if (_rtpObserver)
401 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000402 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000403
404 if (_rtpObserverPtr)
405 {
406 _rtpObserverPtr->OnIncomingCSRCChanged(channel, CSRC, added);
407 }
408 }
409}
410
wu@webrtc.org822fbd82013-08-15 23:38:54 +0000411void Channel::ResetStatistics() {
412 rtp_receive_statistics_->ResetStatistics();
413}
414
niklase@google.com470e71d2011-07-07 08:21:25 +0000415void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000416Channel::OnApplicationDataReceived(int32_t id,
417 uint8_t subType,
418 uint32_t name,
419 uint16_t length,
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000420 const uint8_t* data)
niklase@google.com470e71d2011-07-07 08:21:25 +0000421{
422 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
423 "Channel::OnApplicationDataReceived(id=%d, subType=%u,"
424 " name=%u, length=%u)",
425 id, subType, name, length);
426
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000427 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000428 assert(channel == _channelId);
429
430 if (_rtcpObserver)
431 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000432 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000433
434 if (_rtcpObserverPtr)
435 {
436 _rtcpObserverPtr->OnApplicationDataReceived(channel,
437 subType,
438 name,
439 data,
440 length);
441 }
442 }
443}
444
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000445int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +0000446Channel::OnInitializeDecoder(
pbos@webrtc.org92135212013-05-14 08:31:39 +0000447 int32_t id,
448 int8_t payloadType,
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +0000449 const char payloadName[RTP_PAYLOAD_NAME_SIZE],
pbos@webrtc.org92135212013-05-14 08:31:39 +0000450 int frequency,
451 uint8_t channels,
452 uint32_t rate)
niklase@google.com470e71d2011-07-07 08:21:25 +0000453{
454 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
455 "Channel::OnInitializeDecoder(id=%d, payloadType=%d, "
456 "payloadName=%s, frequency=%u, channels=%u, rate=%u)",
457 id, payloadType, payloadName, frequency, channels, rate);
458
andrew@webrtc.orgceb148c2011-08-23 17:53:54 +0000459 assert(VoEChannelId(id) == _channelId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000460
henrika@webrtc.orgf75901f2012-01-16 08:45:42 +0000461 CodecInst receiveCodec = {0};
462 CodecInst dummyCodec = {0};
niklase@google.com470e71d2011-07-07 08:21:25 +0000463
464 receiveCodec.pltype = payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +0000465 receiveCodec.plfreq = frequency;
466 receiveCodec.channels = channels;
467 receiveCodec.rate = rate;
henrika@webrtc.orgf75901f2012-01-16 08:45:42 +0000468 strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +0000469
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +0000470 _audioCodingModule.Codec(payloadName, &dummyCodec, frequency, channels);
niklase@google.com470e71d2011-07-07 08:21:25 +0000471 receiveCodec.pacsize = dummyCodec.pacsize;
472
473 // Register the new codec to the ACM
474 if (_audioCodingModule.RegisterReceiveCodec(receiveCodec) == -1)
475 {
476 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
andrew@webrtc.orgceb148c2011-08-23 17:53:54 +0000477 VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +0000478 "Channel::OnInitializeDecoder() invalid codec ("
479 "pt=%d, name=%s) received - 1", payloadType, payloadName);
480 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR);
481 return -1;
482 }
483
484 return 0;
485}
486
487void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000488Channel::OnPacketTimeout(int32_t id)
niklase@google.com470e71d2011-07-07 08:21:25 +0000489{
490 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
491 "Channel::OnPacketTimeout(id=%d)", id);
492
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000493 CriticalSectionScoped cs(_callbackCritSectPtr);
niklase@google.com470e71d2011-07-07 08:21:25 +0000494 if (_voiceEngineObserverPtr)
495 {
496 if (_receiving || _externalTransport)
497 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000498 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000499 assert(channel == _channelId);
500 // Ensure that next OnReceivedPacket() callback will trigger
501 // a VE_PACKET_RECEIPT_RESTARTED callback.
502 _rtpPacketTimedOut = true;
503 // Deliver callback to the observer
504 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
505 VoEId(_instanceId,_channelId),
506 "Channel::OnPacketTimeout() => "
507 "CallbackOnError(VE_RECEIVE_PACKET_TIMEOUT)");
508 _voiceEngineObserverPtr->CallbackOnError(channel,
509 VE_RECEIVE_PACKET_TIMEOUT);
510 }
511 }
512}
513
514void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000515Channel::OnReceivedPacket(int32_t id,
516 RtpRtcpPacketType packetType)
niklase@google.com470e71d2011-07-07 08:21:25 +0000517{
518 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
519 "Channel::OnReceivedPacket(id=%d, packetType=%d)",
520 id, packetType);
521
andrew@webrtc.orgceb148c2011-08-23 17:53:54 +0000522 assert(VoEChannelId(id) == _channelId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000523
524 // Notify only for the case when we have restarted an RTP session.
525 if (_rtpPacketTimedOut && (kPacketRtp == packetType))
526 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000527 CriticalSectionScoped cs(_callbackCritSectPtr);
niklase@google.com470e71d2011-07-07 08:21:25 +0000528 if (_voiceEngineObserverPtr)
529 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000530 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000531 assert(channel == _channelId);
532 // Reset timeout mechanism
533 _rtpPacketTimedOut = false;
534 // Deliver callback to the observer
535 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
536 VoEId(_instanceId,_channelId),
537 "Channel::OnPacketTimeout() =>"
538 " CallbackOnError(VE_PACKET_RECEIPT_RESTARTED)");
539 _voiceEngineObserverPtr->CallbackOnError(
540 channel,
541 VE_PACKET_RECEIPT_RESTARTED);
542 }
543 }
544}
545
546void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000547Channel::OnPeriodicDeadOrAlive(int32_t id,
548 RTPAliveType alive)
niklase@google.com470e71d2011-07-07 08:21:25 +0000549{
550 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
551 "Channel::OnPeriodicDeadOrAlive(id=%d, alive=%d)", id, alive);
552
henrika@webrtc.org19da7192013-04-05 14:34:57 +0000553 {
554 CriticalSectionScoped cs(&_callbackCritSect);
555 if (!_connectionObserver)
556 return;
557 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000558
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000559 int32_t channel = VoEChannelId(id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000560 assert(channel == _channelId);
561
562 // Use Alive as default to limit risk of false Dead detections
563 bool isAlive(true);
564
565 // Always mark the connection as Dead when the module reports kRtpDead
566 if (kRtpDead == alive)
567 {
568 isAlive = false;
569 }
570
571 // It is possible that the connection is alive even if no RTP packet has
572 // been received for a long time since the other side might use VAD/DTX
573 // and a low SID-packet update rate.
574 if ((kRtpNoRtp == alive) && _playing)
575 {
576 // Detect Alive for all NetEQ states except for the case when we are
577 // in PLC_CNG state.
578 // PLC_CNG <=> background noise only due to long expand or error.
579 // Note that, the case where the other side stops sending during CNG
580 // state will be detected as Alive. Dead is is not set until after
581 // missing RTCP packets for at least twelve seconds (handled
582 // internally by the RTP/RTCP module).
583 isAlive = (_outputSpeechType != AudioFrame::kPLCCNG);
584 }
585
586 UpdateDeadOrAliveCounters(isAlive);
587
588 // Send callback to the registered observer
589 if (_connectionObserver)
590 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000591 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000592 if (_connectionObserverPtr)
593 {
594 _connectionObserverPtr->OnPeriodicDeadOrAlive(channel, isAlive);
595 }
596 }
597}
598
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000599int32_t
600Channel::OnReceivedPayloadData(const uint8_t* payloadData,
pbos@webrtc.org92135212013-05-14 08:31:39 +0000601 uint16_t payloadSize,
niklase@google.com470e71d2011-07-07 08:21:25 +0000602 const WebRtcRTPHeader* rtpHeader)
603{
604 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
605 "Channel::OnReceivedPayloadData(payloadSize=%d,"
606 " payloadType=%u, audioChannel=%u)",
607 payloadSize,
608 rtpHeader->header.payloadType,
609 rtpHeader->type.Audio.channel);
610
roosa@google.com0870f022012-12-12 21:31:41 +0000611 _lastRemoteTimeStamp = rtpHeader->header.timestamp;
612
niklase@google.com470e71d2011-07-07 08:21:25 +0000613 if (!_playing)
614 {
615 // Avoid inserting into NetEQ when we are not playing. Count the
616 // packet as discarded.
617 WEBRTC_TRACE(kTraceStream, kTraceVoice,
618 VoEId(_instanceId, _channelId),
619 "received packet is discarded since playing is not"
620 " activated");
621 _numberOfDiscardedPackets++;
622 return 0;
623 }
624
625 // Push the incoming payload (parsed and ready for decoding) into the ACM
tina.legrand@webrtc.org16b6b902012-04-12 11:02:38 +0000626 if (_audioCodingModule.IncomingPacket(payloadData,
niklase@google.com470e71d2011-07-07 08:21:25 +0000627 payloadSize,
628 *rtpHeader) != 0)
629 {
630 _engineStatisticsPtr->SetLastError(
631 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
632 "Channel::OnReceivedPayloadData() unable to push data to the ACM");
633 return -1;
634 }
635
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +0000636 // Update the packet delay.
niklase@google.com470e71d2011-07-07 08:21:25 +0000637 UpdatePacketDelay(rtpHeader->header.timestamp,
638 rtpHeader->header.sequenceNumber);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +0000639
wu@webrtc.org822fbd82013-08-15 23:38:54 +0000640 uint16_t round_trip_time = 0;
641 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), &round_trip_time,
642 NULL, NULL, NULL);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +0000643
wu@webrtc.org822fbd82013-08-15 23:38:54 +0000644 std::vector<uint16_t> nack_list = _audioCodingModule.GetNackList(
645 round_trip_time);
646 if (!nack_list.empty()) {
647 // Can't use nack_list.data() since it's not supported by all
648 // compilers.
649 ResendPackets(&(nack_list[0]), static_cast<int>(nack_list.size()));
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +0000650 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000651 return 0;
652}
653
pbos@webrtc.org92135212013-05-14 08:31:39 +0000654int32_t Channel::GetAudioFrame(int32_t id, AudioFrame& audioFrame)
niklase@google.com470e71d2011-07-07 08:21:25 +0000655{
656 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
657 "Channel::GetAudioFrame(id=%d)", id);
658
659 // Get 10ms raw PCM data from the ACM (mixer limits output frequency)
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000660 if (_audioCodingModule.PlayoutData10Ms(audioFrame.sample_rate_hz_,
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +0000661 &audioFrame) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000662 {
663 WEBRTC_TRACE(kTraceError, kTraceVoice,
664 VoEId(_instanceId,_channelId),
665 "Channel::GetAudioFrame() PlayoutData10Ms() failed!");
andrew@webrtc.org7859e102012-01-13 00:30:11 +0000666 // In all likelihood, the audio in this frame is garbage. We return an
667 // error so that the audio mixer module doesn't add it to the mix. As
668 // a result, it won't be played out and the actions skipped here are
669 // irrelevant.
670 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +0000671 }
672
673 if (_RxVadDetection)
674 {
675 UpdateRxVadDetection(audioFrame);
676 }
677
678 // Convert module ID to internal VoE channel ID
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000679 audioFrame.id_ = VoEChannelId(audioFrame.id_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000680 // Store speech type for dead-or-alive detection
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000681 _outputSpeechType = audioFrame.speech_type_;
niklase@google.com470e71d2011-07-07 08:21:25 +0000682
683 // Perform far-end AudioProcessing module processing on the received signal
684 if (_rxApmIsEnabled)
685 {
686 ApmProcessRx(audioFrame);
687 }
688
689 // Output volume scaling
690 if (_outputGain < 0.99f || _outputGain > 1.01f)
691 {
692 AudioFrameOperations::ScaleWithSat(_outputGain, audioFrame);
693 }
694
695 // Scale left and/or right channel(s) if stereo and master balance is
696 // active
697
698 if (_panLeft != 1.0f || _panRight != 1.0f)
699 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000700 if (audioFrame.num_channels_ == 1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000701 {
702 // Emulate stereo mode since panning is active.
703 // The mono signal is copied to both left and right channels here.
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +0000704 AudioFrameOperations::MonoToStereo(&audioFrame);
niklase@google.com470e71d2011-07-07 08:21:25 +0000705 }
706 // For true stereo mode (when we are receiving a stereo signal), no
707 // action is needed.
708
709 // Do the panning operation (the audio frame contains stereo at this
710 // stage)
711 AudioFrameOperations::Scale(_panLeft, _panRight, audioFrame);
712 }
713
714 // Mix decoded PCM output with file if file mixing is enabled
715 if (_outputFilePlaying)
716 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000717 MixAudioWithFile(audioFrame, audioFrame.sample_rate_hz_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000718 }
719
720 // Place channel in on-hold state (~muted) if on-hold is activated
721 if (_outputIsOnHold)
722 {
723 AudioFrameOperations::Mute(audioFrame);
724 }
725
726 // External media
727 if (_outputExternalMedia)
728 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000729 CriticalSectionScoped cs(&_callbackCritSect);
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000730 const bool isStereo = (audioFrame.num_channels_ == 2);
niklase@google.com470e71d2011-07-07 08:21:25 +0000731 if (_outputExternalMediaCallbackPtr)
732 {
733 _outputExternalMediaCallbackPtr->Process(
734 _channelId,
735 kPlaybackPerChannel,
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000736 (int16_t*)audioFrame.data_,
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000737 audioFrame.samples_per_channel_,
738 audioFrame.sample_rate_hz_,
niklase@google.com470e71d2011-07-07 08:21:25 +0000739 isStereo);
740 }
741 }
742
743 // Record playout if enabled
744 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000745 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000746
747 if (_outputFileRecording && _outputFileRecorderPtr)
748 {
niklas.enbom@webrtc.org5398d952012-03-26 08:11:25 +0000749 _outputFileRecorderPtr->RecordAudioToFile(audioFrame);
niklase@google.com470e71d2011-07-07 08:21:25 +0000750 }
751 }
752
753 // Measure audio level (0-9)
754 _outputAudioLevel.ComputeLevel(audioFrame);
755
756 return 0;
757}
758
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000759int32_t
pbos@webrtc.org92135212013-05-14 08:31:39 +0000760Channel::NeededFrequency(int32_t id)
niklase@google.com470e71d2011-07-07 08:21:25 +0000761{
762 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
763 "Channel::NeededFrequency(id=%d)", id);
764
765 int highestNeeded = 0;
766
767 // Determine highest needed receive frequency
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000768 int32_t receiveFrequency = _audioCodingModule.ReceiveFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +0000769
770 // Return the bigger of playout and receive frequency in the ACM.
771 if (_audioCodingModule.PlayoutFrequency() > receiveFrequency)
772 {
773 highestNeeded = _audioCodingModule.PlayoutFrequency();
774 }
775 else
776 {
777 highestNeeded = receiveFrequency;
778 }
779
780 // Special case, if we're playing a file on the playout side
781 // we take that frequency into consideration as well
782 // This is not needed on sending side, since the codec will
783 // limit the spectrum anyway.
784 if (_outputFilePlaying)
785 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000786 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000787 if (_outputFilePlayerPtr && _outputFilePlaying)
788 {
789 if(_outputFilePlayerPtr->Frequency()>highestNeeded)
790 {
791 highestNeeded=_outputFilePlayerPtr->Frequency();
792 }
793 }
794 }
795
796 return(highestNeeded);
797}
798
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000799int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +0000800Channel::CreateChannel(Channel*& channel,
pbos@webrtc.org92135212013-05-14 08:31:39 +0000801 int32_t channelId,
802 uint32_t instanceId)
niklase@google.com470e71d2011-07-07 08:21:25 +0000803{
804 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId,channelId),
805 "Channel::CreateChannel(channelId=%d, instanceId=%d)",
806 channelId, instanceId);
807
808 channel = new Channel(channelId, instanceId);
809 if (channel == NULL)
810 {
811 WEBRTC_TRACE(kTraceMemory, kTraceVoice,
812 VoEId(instanceId,channelId),
813 "Channel::CreateChannel() unable to allocate memory for"
814 " channel");
815 return -1;
816 }
817 return 0;
818}
819
820void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000821Channel::PlayNotification(int32_t id, uint32_t durationMs)
niklase@google.com470e71d2011-07-07 08:21:25 +0000822{
823 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
824 "Channel::PlayNotification(id=%d, durationMs=%d)",
825 id, durationMs);
826
827 // Not implement yet
828}
829
830void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000831Channel::RecordNotification(int32_t id, uint32_t durationMs)
niklase@google.com470e71d2011-07-07 08:21:25 +0000832{
833 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
834 "Channel::RecordNotification(id=%d, durationMs=%d)",
835 id, durationMs);
836
837 // Not implement yet
838}
839
840void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000841Channel::PlayFileEnded(int32_t id)
niklase@google.com470e71d2011-07-07 08:21:25 +0000842{
843 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
844 "Channel::PlayFileEnded(id=%d)", id);
845
846 if (id == _inputFilePlayerId)
847 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000848 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000849
850 _inputFilePlaying = false;
851 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
852 VoEId(_instanceId,_channelId),
853 "Channel::PlayFileEnded() => input file player module is"
854 " shutdown");
855 }
856 else if (id == _outputFilePlayerId)
857 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000858 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000859
860 _outputFilePlaying = false;
861 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
862 VoEId(_instanceId,_channelId),
863 "Channel::PlayFileEnded() => output file player module is"
864 " shutdown");
865 }
866}
867
868void
pbos@webrtc.org92135212013-05-14 08:31:39 +0000869Channel::RecordFileEnded(int32_t id)
niklase@google.com470e71d2011-07-07 08:21:25 +0000870{
871 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
872 "Channel::RecordFileEnded(id=%d)", id);
873
874 assert(id == _outputFileRecorderId);
875
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000876 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000877
878 _outputFileRecording = false;
879 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
880 VoEId(_instanceId,_channelId),
881 "Channel::RecordFileEnded() => output file recorder module is"
882 " shutdown");
883}
884
pbos@webrtc.org92135212013-05-14 08:31:39 +0000885Channel::Channel(int32_t channelId,
886 uint32_t instanceId) :
niklase@google.com470e71d2011-07-07 08:21:25 +0000887 _fileCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
888 _callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
niklase@google.com470e71d2011-07-07 08:21:25 +0000889 _instanceId(instanceId),
xians@google.com22963ab2011-08-03 12:40:23 +0000890 _channelId(channelId),
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +0000891 rtp_header_parser_(RtpHeaderParser::Create()),
wu@webrtc.org822fbd82013-08-15 23:38:54 +0000892 rtp_payload_registry_(
893 new RTPPayloadRegistry(channelId,
894 RTPPayloadStrategy::CreateStrategy(true))),
895 rtp_receive_statistics_(ReceiveStatistics::Create(
896 Clock::GetRealTimeClock())),
897 rtp_receiver_(RtpReceiver::CreateAudioReceiver(
898 VoEModuleId(instanceId, channelId), Clock::GetRealTimeClock(), this,
899 this, this, rtp_payload_registry_.get())),
900 telephone_event_handler_(rtp_receiver_->GetTelephoneEventHandler()),
niklase@google.com470e71d2011-07-07 08:21:25 +0000901 _audioCodingModule(*AudioCodingModule::Create(
xians@google.com22963ab2011-08-03 12:40:23 +0000902 VoEModuleId(instanceId, channelId))),
niklase@google.com470e71d2011-07-07 08:21:25 +0000903 _rtpDumpIn(*RtpDump::CreateRtpDump()),
904 _rtpDumpOut(*RtpDump::CreateRtpDump()),
niklase@google.com470e71d2011-07-07 08:21:25 +0000905 _outputAudioLevel(),
niklase@google.com470e71d2011-07-07 08:21:25 +0000906 _externalTransport(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000907 _inputFilePlayerPtr(NULL),
908 _outputFilePlayerPtr(NULL),
909 _outputFileRecorderPtr(NULL),
910 // Avoid conflict with other channels by adding 1024 - 1026,
911 // won't use as much as 1024 channels.
912 _inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
913 _outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
914 _outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
915 _inputFilePlaying(false),
916 _outputFilePlaying(false),
917 _outputFileRecording(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000918 _inbandDtmfQueue(VoEModuleId(instanceId, channelId)),
919 _inbandDtmfGenerator(VoEModuleId(instanceId, channelId)),
niklase@google.com470e71d2011-07-07 08:21:25 +0000920 _inputExternalMedia(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000921 _outputExternalMedia(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000922 _inputExternalMediaCallbackPtr(NULL),
923 _outputExternalMediaCallbackPtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000924 _encryptionRTPBufferPtr(NULL),
925 _decryptionRTPBufferPtr(NULL),
926 _encryptionRTCPBufferPtr(NULL),
927 _decryptionRTCPBufferPtr(NULL),
928 _timeStamp(0), // This is just an offset, RTP module will add it's own random offset
929 _sendTelephoneEventPayloadType(106),
pwestin@webrtc.org1de01352013-04-11 20:23:35 +0000930 playout_timestamp_rtp_(0),
931 playout_timestamp_rtcp_(0),
xians@google.com22963ab2011-08-03 12:40:23 +0000932 _numberOfDiscardedPackets(0),
xians@webrtc.org09e8c472013-07-31 16:30:19 +0000933 send_sequence_number_(0),
xians@google.com22963ab2011-08-03 12:40:23 +0000934 _engineStatisticsPtr(NULL),
henrika@webrtc.org2919e952012-01-31 08:45:03 +0000935 _outputMixerPtr(NULL),
936 _transmitMixerPtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000937 _moduleProcessThreadPtr(NULL),
938 _audioDeviceModulePtr(NULL),
939 _voiceEngineObserverPtr(NULL),
940 _callbackCritSectPtr(NULL),
941 _transportPtr(NULL),
942 _encryptionPtr(NULL),
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000943 _rtpAudioProc(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000944 _rxAudioProcessingModulePtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000945 _rxVadObserverPtr(NULL),
946 _oldVadDecision(-1),
947 _sendFrameType(0),
niklase@google.com470e71d2011-07-07 08:21:25 +0000948 _rtpObserverPtr(NULL),
949 _rtcpObserverPtr(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000950 _outputIsOnHold(false),
951 _externalPlayout(false),
roosa@google.com1b60ceb2012-12-12 23:00:29 +0000952 _externalMixing(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000953 _inputIsOnHold(false),
954 _playing(false),
955 _sending(false),
956 _receiving(false),
957 _mixFileWithMicrophone(false),
958 _rtpObserver(false),
959 _rtcpObserver(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000960 _mute(false),
961 _panLeft(1.0f),
962 _panRight(1.0f),
963 _outputGain(1.0f),
xians@google.com22963ab2011-08-03 12:40:23 +0000964 _encrypting(false),
965 _decrypting(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000966 _playOutbandDtmfEvent(false),
967 _playInbandDtmfEvent(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000968 _extraPayloadType(0),
969 _insertExtraRTPPacket(false),
970 _extraMarkerBit(false),
971 _lastLocalTimeStamp(0),
roosa@google.com0870f022012-12-12 21:31:41 +0000972 _lastRemoteTimeStamp(0),
niklase@google.com470e71d2011-07-07 08:21:25 +0000973 _lastPayloadType(0),
xians@google.com22963ab2011-08-03 12:40:23 +0000974 _includeAudioLevelIndication(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000975 _rtpPacketTimedOut(false),
976 _rtpPacketTimeOutIsEnabled(false),
977 _rtpTimeOutSeconds(0),
978 _connectionObserver(false),
979 _connectionObserverPtr(NULL),
980 _countAliveDetections(0),
981 _countDeadDetections(0),
982 _outputSpeechType(AudioFrame::kNormalSpeech),
pwestin@webrtc.org1de01352013-04-11 20:23:35 +0000983 _average_jitter_buffer_delay_us(0),
turaj@webrtc.orge46c8d32013-05-22 20:39:43 +0000984 least_required_delay_ms_(0),
niklase@google.com470e71d2011-07-07 08:21:25 +0000985 _previousTimestamp(0),
986 _recPacketDelayMs(20),
987 _RxVadDetection(false),
988 _rxApmIsEnabled(false),
989 _rxAgcIsEnabled(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000990 _rxNsIsEnabled(false)
niklase@google.com470e71d2011-07-07 08:21:25 +0000991{
992 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
993 "Channel::Channel() - ctor");
994 _inbandDtmfQueue.ResetDtmf();
995 _inbandDtmfGenerator.Init();
996 _outputAudioLevel.Clear();
997
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +0000998 RtpRtcp::Configuration configuration;
999 configuration.id = VoEModuleId(instanceId, channelId);
1000 configuration.audio = true;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001001 configuration.outgoing_transport = this;
1002 configuration.rtcp_feedback = this;
1003 configuration.audio_messages = this;
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001004 configuration.receive_statistics = rtp_receive_statistics_.get();
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001005
1006 _rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
1007
niklase@google.com470e71d2011-07-07 08:21:25 +00001008 // Create far end AudioProcessing Module
1009 _rxAudioProcessingModulePtr = AudioProcessing::Create(
1010 VoEModuleId(instanceId, channelId));
1011}
1012
1013Channel::~Channel()
1014{
1015 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
1016 "Channel::~Channel() - dtor");
1017
1018 if (_outputExternalMedia)
1019 {
1020 DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
1021 }
1022 if (_inputExternalMedia)
1023 {
1024 DeRegisterExternalMediaProcessing(kRecordingPerChannel);
1025 }
1026 StopSend();
niklase@google.com470e71d2011-07-07 08:21:25 +00001027 StopPlayout();
1028
1029 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001030 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001031 if (_inputFilePlayerPtr)
1032 {
1033 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1034 _inputFilePlayerPtr->StopPlayingFile();
1035 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1036 _inputFilePlayerPtr = NULL;
1037 }
1038 if (_outputFilePlayerPtr)
1039 {
1040 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1041 _outputFilePlayerPtr->StopPlayingFile();
1042 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1043 _outputFilePlayerPtr = NULL;
1044 }
1045 if (_outputFileRecorderPtr)
1046 {
1047 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
1048 _outputFileRecorderPtr->StopRecording();
1049 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
1050 _outputFileRecorderPtr = NULL;
1051 }
1052 }
1053
1054 // The order to safely shutdown modules in a channel is:
1055 // 1. De-register callbacks in modules
1056 // 2. De-register modules in process thread
1057 // 3. Destroy modules
niklase@google.com470e71d2011-07-07 08:21:25 +00001058 if (_audioCodingModule.RegisterTransportCallback(NULL) == -1)
1059 {
1060 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1061 VoEId(_instanceId,_channelId),
1062 "~Channel() failed to de-register transport callback"
1063 " (Audio coding module)");
1064 }
1065 if (_audioCodingModule.RegisterVADCallback(NULL) == -1)
1066 {
1067 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1068 VoEId(_instanceId,_channelId),
1069 "~Channel() failed to de-register VAD callback"
1070 " (Audio coding module)");
1071 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001072 // De-register modules in process thread
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001073 if (_moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get()) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001074 {
1075 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
1076 VoEId(_instanceId,_channelId),
1077 "~Channel() failed to deregister RTP/RTCP module");
1078 }
1079
1080 // Destroy modules
niklase@google.com470e71d2011-07-07 08:21:25 +00001081 AudioCodingModule::Destroy(&_audioCodingModule);
niklase@google.com470e71d2011-07-07 08:21:25 +00001082 if (_rxAudioProcessingModulePtr != NULL)
1083 {
1084 AudioProcessing::Destroy(_rxAudioProcessingModulePtr); // far end APM
1085 _rxAudioProcessingModulePtr = NULL;
1086 }
1087
1088 // End of modules shutdown
1089
1090 // Delete other objects
1091 RtpDump::DestroyRtpDump(&_rtpDumpIn);
1092 RtpDump::DestroyRtpDump(&_rtpDumpOut);
1093 delete [] _encryptionRTPBufferPtr;
1094 delete [] _decryptionRTPBufferPtr;
1095 delete [] _encryptionRTCPBufferPtr;
1096 delete [] _decryptionRTCPBufferPtr;
1097 delete &_callbackCritSect;
niklase@google.com470e71d2011-07-07 08:21:25 +00001098 delete &_fileCritSect;
1099}
1100
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001101int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001102Channel::Init()
1103{
1104 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1105 "Channel::Init()");
1106
1107 // --- Initial sanity
1108
1109 if ((_engineStatisticsPtr == NULL) ||
1110 (_moduleProcessThreadPtr == NULL))
1111 {
1112 WEBRTC_TRACE(kTraceError, kTraceVoice,
1113 VoEId(_instanceId,_channelId),
1114 "Channel::Init() must call SetEngineInformation() first");
1115 return -1;
1116 }
1117
1118 // --- Add modules to process thread (for periodic schedulation)
1119
1120 const bool processThreadFail =
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001121 ((_moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get()) != 0) ||
niklase@google.com470e71d2011-07-07 08:21:25 +00001122 false);
niklase@google.com470e71d2011-07-07 08:21:25 +00001123 if (processThreadFail)
1124 {
1125 _engineStatisticsPtr->SetLastError(
1126 VE_CANNOT_INIT_CHANNEL, kTraceError,
1127 "Channel::Init() modules not registered");
1128 return -1;
1129 }
pwestin@webrtc.orgc450a192012-01-04 15:00:12 +00001130 // --- ACM initialization
niklase@google.com470e71d2011-07-07 08:21:25 +00001131
1132 if ((_audioCodingModule.InitializeReceiver() == -1) ||
1133#ifdef WEBRTC_CODEC_AVT
1134 // out-of-band Dtmf tones are played out by default
1135 (_audioCodingModule.SetDtmfPlayoutStatus(true) == -1) ||
1136#endif
niklase@google.com470e71d2011-07-07 08:21:25 +00001137 (_audioCodingModule.InitializeSender() == -1))
1138 {
1139 _engineStatisticsPtr->SetLastError(
1140 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1141 "Channel::Init() unable to initialize the ACM - 1");
1142 return -1;
1143 }
1144
1145 // --- RTP/RTCP module initialization
1146
1147 // Ensure that RTCP is enabled by default for the created channel.
1148 // Note that, the module will keep generating RTCP until it is explicitly
1149 // disabled by the user.
1150 // After StopListen (when no sockets exists), RTCP packets will no longer
1151 // be transmitted since the Transport object will then be invalid.
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001152 telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
1153 // RTCP is enabled by default.
1154 if (_rtpRtcpModule->SetRTCPStatus(kRtcpCompound) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001155 {
1156 _engineStatisticsPtr->SetLastError(
1157 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1158 "Channel::Init() RTP/RTCP module not initialized");
1159 return -1;
1160 }
1161
1162 // --- Register all permanent callbacks
niklase@google.com470e71d2011-07-07 08:21:25 +00001163 const bool fail =
niklase@google.com470e71d2011-07-07 08:21:25 +00001164 (_audioCodingModule.RegisterTransportCallback(this) == -1) ||
1165 (_audioCodingModule.RegisterVADCallback(this) == -1);
1166
1167 if (fail)
1168 {
1169 _engineStatisticsPtr->SetLastError(
1170 VE_CANNOT_INIT_CHANNEL, kTraceError,
1171 "Channel::Init() callbacks not registered");
1172 return -1;
1173 }
1174
1175 // --- Register all supported codecs to the receiving side of the
1176 // RTP/RTCP module
1177
1178 CodecInst codec;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001179 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
niklase@google.com470e71d2011-07-07 08:21:25 +00001180
1181 for (int idx = 0; idx < nSupportedCodecs; idx++)
1182 {
1183 // Open up the RTP/RTCP receiver for all supported codecs
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001184 if ((_audioCodingModule.Codec(idx, &codec) == -1) ||
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001185 (rtp_receiver_->RegisterReceivePayload(
1186 codec.plname,
1187 codec.pltype,
1188 codec.plfreq,
1189 codec.channels,
1190 (codec.rate < 0) ? 0 : codec.rate) == -1))
niklase@google.com470e71d2011-07-07 08:21:25 +00001191 {
1192 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1193 VoEId(_instanceId,_channelId),
1194 "Channel::Init() unable to register %s (%d/%d/%d/%d) "
1195 "to RTP/RTCP receiver",
1196 codec.plname, codec.pltype, codec.plfreq,
1197 codec.channels, codec.rate);
1198 }
1199 else
1200 {
1201 WEBRTC_TRACE(kTraceInfo, kTraceVoice,
1202 VoEId(_instanceId,_channelId),
1203 "Channel::Init() %s (%d/%d/%d/%d) has been added to "
1204 "the RTP/RTCP receiver",
1205 codec.plname, codec.pltype, codec.plfreq,
1206 codec.channels, codec.rate);
1207 }
1208
1209 // Ensure that PCMU is used as default codec on the sending side
tina.legrand@webrtc.org45175852012-06-01 09:27:35 +00001210 if (!STR_CASE_CMP(codec.plname, "PCMU") && (codec.channels == 1))
niklase@google.com470e71d2011-07-07 08:21:25 +00001211 {
1212 SetSendCodec(codec);
1213 }
1214
1215 // Register default PT for outband 'telephone-event'
1216 if (!STR_CASE_CMP(codec.plname, "telephone-event"))
1217 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001218 if ((_rtpRtcpModule->RegisterSendPayload(codec) == -1) ||
niklase@google.com470e71d2011-07-07 08:21:25 +00001219 (_audioCodingModule.RegisterReceiveCodec(codec) == -1))
1220 {
1221 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1222 VoEId(_instanceId,_channelId),
1223 "Channel::Init() failed to register outband "
1224 "'telephone-event' (%d/%d) correctly",
1225 codec.pltype, codec.plfreq);
1226 }
1227 }
1228
1229 if (!STR_CASE_CMP(codec.plname, "CN"))
1230 {
1231 if ((_audioCodingModule.RegisterSendCodec(codec) == -1) ||
1232 (_audioCodingModule.RegisterReceiveCodec(codec) == -1) ||
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001233 (_rtpRtcpModule->RegisterSendPayload(codec) == -1))
niklase@google.com470e71d2011-07-07 08:21:25 +00001234 {
1235 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1236 VoEId(_instanceId,_channelId),
1237 "Channel::Init() failed to register CN (%d/%d) "
1238 "correctly - 1",
1239 codec.pltype, codec.plfreq);
1240 }
1241 }
1242#ifdef WEBRTC_CODEC_RED
1243 // Register RED to the receiving side of the ACM.
1244 // We will not receive an OnInitializeDecoder() callback for RED.
1245 if (!STR_CASE_CMP(codec.plname, "RED"))
1246 {
1247 if (_audioCodingModule.RegisterReceiveCodec(codec) == -1)
1248 {
1249 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1250 VoEId(_instanceId,_channelId),
1251 "Channel::Init() failed to register RED (%d/%d) "
1252 "correctly",
1253 codec.pltype, codec.plfreq);
1254 }
1255 }
1256#endif
1257 }
pwestin@webrtc.org684f0572013-03-13 23:20:57 +00001258
niklase@google.com470e71d2011-07-07 08:21:25 +00001259 // Initialize the far end AP module
1260 // Using 8 kHz as initial Fs, the same as in transmission. Might be
1261 // changed at the first receiving audio.
1262 if (_rxAudioProcessingModulePtr == NULL)
1263 {
1264 _engineStatisticsPtr->SetLastError(
1265 VE_NO_MEMORY, kTraceCritical,
1266 "Channel::Init() failed to create the far-end AudioProcessing"
1267 " module");
1268 return -1;
1269 }
1270
niklase@google.com470e71d2011-07-07 08:21:25 +00001271 if (_rxAudioProcessingModulePtr->set_sample_rate_hz(8000))
1272 {
1273 _engineStatisticsPtr->SetLastError(
1274 VE_APM_ERROR, kTraceWarning,
1275 "Channel::Init() failed to set the sample rate to 8K for"
1276 " far-end AP module");
1277 }
1278
1279 if (_rxAudioProcessingModulePtr->set_num_channels(1, 1) != 0)
1280 {
1281 _engineStatisticsPtr->SetLastError(
1282 VE_SOUNDCARD_ERROR, kTraceWarning,
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00001283 "Init() failed to set channels for the primary audio stream");
niklase@google.com470e71d2011-07-07 08:21:25 +00001284 }
1285
1286 if (_rxAudioProcessingModulePtr->high_pass_filter()->Enable(
1287 WEBRTC_VOICE_ENGINE_RX_HP_DEFAULT_STATE) != 0)
1288 {
1289 _engineStatisticsPtr->SetLastError(
1290 VE_APM_ERROR, kTraceWarning,
1291 "Channel::Init() failed to set the high-pass filter for"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00001292 " far-end AP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00001293 }
1294
1295 if (_rxAudioProcessingModulePtr->noise_suppression()->set_level(
1296 (NoiseSuppression::Level)WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE) != 0)
1297 {
1298 _engineStatisticsPtr->SetLastError(
1299 VE_APM_ERROR, kTraceWarning,
1300 "Init() failed to set noise reduction level for far-end"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00001301 " AP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00001302 }
1303 if (_rxAudioProcessingModulePtr->noise_suppression()->Enable(
1304 WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_STATE) != 0)
1305 {
1306 _engineStatisticsPtr->SetLastError(
1307 VE_APM_ERROR, kTraceWarning,
1308 "Init() failed to set noise reduction state for far-end"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00001309 " AP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00001310 }
1311
1312 if (_rxAudioProcessingModulePtr->gain_control()->set_mode(
1313 (GainControl::Mode)WEBRTC_VOICE_ENGINE_RX_AGC_DEFAULT_MODE) != 0)
1314 {
1315 _engineStatisticsPtr->SetLastError(
1316 VE_APM_ERROR, kTraceWarning,
1317 "Init() failed to set AGC mode for far-end AP module");
1318 }
1319 if (_rxAudioProcessingModulePtr->gain_control()->Enable(
1320 WEBRTC_VOICE_ENGINE_RX_AGC_DEFAULT_STATE) != 0)
1321 {
1322 _engineStatisticsPtr->SetLastError(
1323 VE_APM_ERROR, kTraceWarning,
1324 "Init() failed to set AGC state for far-end AP module");
1325 }
1326
1327 return 0;
1328}
1329
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001330int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001331Channel::SetEngineInformation(Statistics& engineStatistics,
1332 OutputMixer& outputMixer,
1333 voe::TransmitMixer& transmitMixer,
1334 ProcessThread& moduleProcessThread,
1335 AudioDeviceModule& audioDeviceModule,
1336 VoiceEngineObserver* voiceEngineObserver,
1337 CriticalSectionWrapper* callbackCritSect)
1338{
1339 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1340 "Channel::SetEngineInformation()");
1341 _engineStatisticsPtr = &engineStatistics;
1342 _outputMixerPtr = &outputMixer;
1343 _transmitMixerPtr = &transmitMixer,
1344 _moduleProcessThreadPtr = &moduleProcessThread;
1345 _audioDeviceModulePtr = &audioDeviceModule;
1346 _voiceEngineObserverPtr = voiceEngineObserver;
1347 _callbackCritSectPtr = callbackCritSect;
1348 return 0;
1349}
1350
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001351int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001352Channel::UpdateLocalTimeStamp()
1353{
1354
andrew@webrtc.org63a50982012-05-02 23:56:37 +00001355 _timeStamp += _audioFrame.samples_per_channel_;
niklase@google.com470e71d2011-07-07 08:21:25 +00001356 return 0;
1357}
1358
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001359int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001360Channel::StartPlayout()
1361{
1362 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1363 "Channel::StartPlayout()");
1364 if (_playing)
1365 {
1366 return 0;
1367 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00001368
1369 if (!_externalMixing) {
1370 // Add participant as candidates for mixing.
1371 if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0)
1372 {
1373 _engineStatisticsPtr->SetLastError(
1374 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1375 "StartPlayout() failed to add participant to mixer");
1376 return -1;
1377 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001378 }
1379
1380 _playing = true;
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001381
1382 if (RegisterFilePlayingToMixer() != 0)
1383 return -1;
1384
niklase@google.com470e71d2011-07-07 08:21:25 +00001385 return 0;
1386}
1387
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001388int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001389Channel::StopPlayout()
1390{
1391 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1392 "Channel::StopPlayout()");
1393 if (!_playing)
1394 {
1395 return 0;
1396 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00001397
1398 if (!_externalMixing) {
1399 // Remove participant as candidates for mixing
1400 if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0)
1401 {
1402 _engineStatisticsPtr->SetLastError(
1403 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1404 "StopPlayout() failed to remove participant from mixer");
1405 return -1;
1406 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001407 }
1408
1409 _playing = false;
1410 _outputAudioLevel.Clear();
1411
1412 return 0;
1413}
1414
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001415int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001416Channel::StartSend()
1417{
1418 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1419 "Channel::StartSend()");
xians@webrtc.org09e8c472013-07-31 16:30:19 +00001420 // Resume the previous sequence number which was reset by StopSend().
1421 // This needs to be done before |_sending| is set to true.
1422 if (send_sequence_number_)
1423 SetInitSequenceNumber(send_sequence_number_);
1424
niklase@google.com470e71d2011-07-07 08:21:25 +00001425 {
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001426 // A lock is needed because |_sending| can be accessed or modified by
1427 // another thread at the same time.
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001428 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001429
1430 if (_sending)
1431 {
1432 return 0;
1433 }
1434 _sending = true;
niklase@google.com470e71d2011-07-07 08:21:25 +00001435 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001436
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001437 if (_rtpRtcpModule->SetSendingStatus(true) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001438 {
1439 _engineStatisticsPtr->SetLastError(
1440 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1441 "StartSend() RTP/RTCP failed to start sending");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001442 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001443 _sending = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00001444 return -1;
1445 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001446
niklase@google.com470e71d2011-07-07 08:21:25 +00001447 return 0;
1448}
1449
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001450int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001451Channel::StopSend()
1452{
1453 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1454 "Channel::StopSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001455 {
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001456 // A lock is needed because |_sending| can be accessed or modified by
1457 // another thread at the same time.
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001458 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001459
1460 if (!_sending)
1461 {
1462 return 0;
1463 }
1464 _sending = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00001465 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001466
xians@webrtc.org09e8c472013-07-31 16:30:19 +00001467 // Store the sequence number to be able to pick up the same sequence for
1468 // the next StartSend(). This is needed for restarting device, otherwise
1469 // it might cause libSRTP to complain about packets being replayed.
1470 // TODO(xians): Remove this workaround after RtpRtcpModule's refactoring
1471 // CL is landed. See issue
1472 // https://code.google.com/p/webrtc/issues/detail?id=2111 .
1473 send_sequence_number_ = _rtpRtcpModule->SequenceNumber();
1474
niklase@google.com470e71d2011-07-07 08:21:25 +00001475 // Reset sending SSRC and sequence number and triggers direct transmission
1476 // of RTCP BYE
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001477 if (_rtpRtcpModule->SetSendingStatus(false) == -1 ||
1478 _rtpRtcpModule->ResetSendDataCountersRTP() == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001479 {
1480 _engineStatisticsPtr->SetLastError(
1481 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1482 "StartSend() RTP/RTCP failed to stop sending");
1483 }
1484
niklase@google.com470e71d2011-07-07 08:21:25 +00001485 return 0;
1486}
1487
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001488int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001489Channel::StartReceiving()
1490{
1491 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1492 "Channel::StartReceiving()");
1493 if (_receiving)
1494 {
1495 return 0;
1496 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001497 _receiving = true;
1498 _numberOfDiscardedPackets = 0;
1499 return 0;
1500}
1501
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001502int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001503Channel::StopReceiving()
1504{
1505 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1506 "Channel::StopReceiving()");
1507 if (!_receiving)
1508 {
1509 return 0;
1510 }
pwestin@webrtc.org684f0572013-03-13 23:20:57 +00001511
henrika@webrtc.orgaf71f0e2011-12-05 07:02:22 +00001512 // Recover DTMF detection status.
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001513 telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
niklase@google.com470e71d2011-07-07 08:21:25 +00001514 RegisterReceiveCodecsToRTPModule();
1515 _receiving = false;
1516 return 0;
1517}
1518
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001519int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001520Channel::SetNetEQPlayoutMode(NetEqModes mode)
1521{
1522 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1523 "Channel::SetNetEQPlayoutMode()");
1524 AudioPlayoutMode playoutMode(voice);
1525 switch (mode)
1526 {
1527 case kNetEqDefault:
1528 playoutMode = voice;
1529 break;
1530 case kNetEqStreaming:
1531 playoutMode = streaming;
1532 break;
1533 case kNetEqFax:
1534 playoutMode = fax;
1535 break;
roosa@google.comb7186192012-12-12 21:59:14 +00001536 case kNetEqOff:
1537 playoutMode = off;
1538 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00001539 }
1540 if (_audioCodingModule.SetPlayoutMode(playoutMode) != 0)
1541 {
1542 _engineStatisticsPtr->SetLastError(
1543 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1544 "SetNetEQPlayoutMode() failed to set playout mode");
1545 return -1;
1546 }
1547 return 0;
1548}
1549
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001550int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001551Channel::GetNetEQPlayoutMode(NetEqModes& mode)
1552{
1553 const AudioPlayoutMode playoutMode = _audioCodingModule.PlayoutMode();
1554 switch (playoutMode)
1555 {
1556 case voice:
1557 mode = kNetEqDefault;
1558 break;
1559 case streaming:
1560 mode = kNetEqStreaming;
1561 break;
1562 case fax:
1563 mode = kNetEqFax;
1564 break;
roosa@google.comb7186192012-12-12 21:59:14 +00001565 case off:
1566 mode = kNetEqOff;
niklase@google.com470e71d2011-07-07 08:21:25 +00001567 }
1568 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
1569 VoEId(_instanceId,_channelId),
1570 "Channel::GetNetEQPlayoutMode() => mode=%u", mode);
1571 return 0;
1572}
1573
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001574int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001575Channel::SetOnHoldStatus(bool enable, OnHoldModes mode)
1576{
1577 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1578 "Channel::SetOnHoldStatus()");
1579 if (mode == kHoldSendAndPlay)
1580 {
1581 _outputIsOnHold = enable;
1582 _inputIsOnHold = enable;
1583 }
1584 else if (mode == kHoldPlayOnly)
1585 {
1586 _outputIsOnHold = enable;
1587 }
1588 if (mode == kHoldSendOnly)
1589 {
1590 _inputIsOnHold = enable;
1591 }
1592 return 0;
1593}
1594
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001595int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001596Channel::GetOnHoldStatus(bool& enabled, OnHoldModes& mode)
1597{
1598 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1599 "Channel::GetOnHoldStatus()");
1600 enabled = (_outputIsOnHold || _inputIsOnHold);
1601 if (_outputIsOnHold && _inputIsOnHold)
1602 {
1603 mode = kHoldSendAndPlay;
1604 }
1605 else if (_outputIsOnHold && !_inputIsOnHold)
1606 {
1607 mode = kHoldPlayOnly;
1608 }
1609 else if (!_outputIsOnHold && _inputIsOnHold)
1610 {
1611 mode = kHoldSendOnly;
1612 }
1613 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1614 "Channel::GetOnHoldStatus() => enabled=%d, mode=%d",
1615 enabled, mode);
1616 return 0;
1617}
1618
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001619int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001620Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer)
1621{
1622 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1623 "Channel::RegisterVoiceEngineObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001624 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001625
1626 if (_voiceEngineObserverPtr)
1627 {
1628 _engineStatisticsPtr->SetLastError(
1629 VE_INVALID_OPERATION, kTraceError,
1630 "RegisterVoiceEngineObserver() observer already enabled");
1631 return -1;
1632 }
1633 _voiceEngineObserverPtr = &observer;
1634 return 0;
1635}
1636
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001637int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001638Channel::DeRegisterVoiceEngineObserver()
1639{
1640 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1641 "Channel::DeRegisterVoiceEngineObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001642 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001643
1644 if (!_voiceEngineObserverPtr)
1645 {
1646 _engineStatisticsPtr->SetLastError(
1647 VE_INVALID_OPERATION, kTraceWarning,
1648 "DeRegisterVoiceEngineObserver() observer already disabled");
1649 return 0;
1650 }
1651 _voiceEngineObserverPtr = NULL;
1652 return 0;
1653}
1654
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001655int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001656Channel::GetSendCodec(CodecInst& codec)
1657{
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001658 return (_audioCodingModule.SendCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001659}
1660
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001661int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001662Channel::GetRecCodec(CodecInst& codec)
1663{
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001664 return (_audioCodingModule.ReceiveCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001665}
1666
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001667int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001668Channel::SetSendCodec(const CodecInst& codec)
1669{
1670 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1671 "Channel::SetSendCodec()");
1672
1673 if (_audioCodingModule.RegisterSendCodec(codec) != 0)
1674 {
1675 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1676 "SetSendCodec() failed to register codec to ACM");
1677 return -1;
1678 }
1679
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001680 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001681 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001682 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1683 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001684 {
1685 WEBRTC_TRACE(
1686 kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1687 "SetSendCodec() failed to register codec to"
1688 " RTP/RTCP module");
1689 return -1;
1690 }
1691 }
1692
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001693 if (_rtpRtcpModule->SetAudioPacketSize(codec.pacsize) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001694 {
1695 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1696 "SetSendCodec() failed to set audio packet size");
1697 return -1;
1698 }
1699
1700 return 0;
1701}
1702
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001703int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001704Channel::SetVADStatus(bool enableVAD, ACMVADMode mode, bool disableDTX)
1705{
1706 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1707 "Channel::SetVADStatus(mode=%d)", mode);
1708 // To disable VAD, DTX must be disabled too
1709 disableDTX = ((enableVAD == false) ? true : disableDTX);
1710 if (_audioCodingModule.SetVAD(!disableDTX, enableVAD, mode) != 0)
1711 {
1712 _engineStatisticsPtr->SetLastError(
1713 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1714 "SetVADStatus() failed to set VAD");
1715 return -1;
1716 }
1717 return 0;
1718}
1719
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001720int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001721Channel::GetVADStatus(bool& enabledVAD, ACMVADMode& mode, bool& disabledDTX)
1722{
1723 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1724 "Channel::GetVADStatus");
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001725 if (_audioCodingModule.VAD(&disabledDTX, &enabledVAD, &mode) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001726 {
1727 _engineStatisticsPtr->SetLastError(
1728 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1729 "GetVADStatus() failed to get VAD status");
1730 return -1;
1731 }
1732 disabledDTX = !disabledDTX;
1733 return 0;
1734}
1735
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001736int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001737Channel::SetRecPayloadType(const CodecInst& codec)
1738{
1739 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1740 "Channel::SetRecPayloadType()");
1741
1742 if (_playing)
1743 {
1744 _engineStatisticsPtr->SetLastError(
1745 VE_ALREADY_PLAYING, kTraceError,
1746 "SetRecPayloadType() unable to set PT while playing");
1747 return -1;
1748 }
1749 if (_receiving)
1750 {
1751 _engineStatisticsPtr->SetLastError(
1752 VE_ALREADY_LISTENING, kTraceError,
1753 "SetRecPayloadType() unable to set PT while listening");
1754 return -1;
1755 }
1756
1757 if (codec.pltype == -1)
1758 {
1759 // De-register the selected codec (RTP/RTCP module and ACM)
1760
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001761 int8_t pltype(-1);
niklase@google.com470e71d2011-07-07 08:21:25 +00001762 CodecInst rxCodec = codec;
1763
1764 // Get payload type for the given codec
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001765 rtp_payload_registry_->ReceivePayloadType(
1766 rxCodec.plname,
1767 rxCodec.plfreq,
1768 rxCodec.channels,
1769 (rxCodec.rate < 0) ? 0 : rxCodec.rate,
1770 &pltype);
niklase@google.com470e71d2011-07-07 08:21:25 +00001771 rxCodec.pltype = pltype;
1772
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001773 if (rtp_receiver_->DeRegisterReceivePayload(pltype) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001774 {
1775 _engineStatisticsPtr->SetLastError(
1776 VE_RTP_RTCP_MODULE_ERROR,
1777 kTraceError,
1778 "SetRecPayloadType() RTP/RTCP-module deregistration "
1779 "failed");
1780 return -1;
1781 }
1782 if (_audioCodingModule.UnregisterReceiveCodec(rxCodec.pltype) != 0)
1783 {
1784 _engineStatisticsPtr->SetLastError(
1785 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1786 "SetRecPayloadType() ACM deregistration failed - 1");
1787 return -1;
1788 }
1789 return 0;
1790 }
1791
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001792 if (rtp_receiver_->RegisterReceivePayload(
1793 codec.plname,
1794 codec.pltype,
1795 codec.plfreq,
1796 codec.channels,
1797 (codec.rate < 0) ? 0 : codec.rate) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001798 {
1799 // First attempt to register failed => de-register and try again
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001800 rtp_receiver_->DeRegisterReceivePayload(codec.pltype);
1801 if (rtp_receiver_->RegisterReceivePayload(
1802 codec.plname,
1803 codec.pltype,
1804 codec.plfreq,
1805 codec.channels,
1806 (codec.rate < 0) ? 0 : codec.rate) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001807 {
1808 _engineStatisticsPtr->SetLastError(
1809 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1810 "SetRecPayloadType() RTP/RTCP-module registration failed");
1811 return -1;
1812 }
1813 }
1814 if (_audioCodingModule.RegisterReceiveCodec(codec) != 0)
1815 {
1816 _audioCodingModule.UnregisterReceiveCodec(codec.pltype);
1817 if (_audioCodingModule.RegisterReceiveCodec(codec) != 0)
1818 {
1819 _engineStatisticsPtr->SetLastError(
1820 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1821 "SetRecPayloadType() ACM registration failed - 1");
1822 return -1;
1823 }
1824 }
1825 return 0;
1826}
1827
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001828int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001829Channel::GetRecPayloadType(CodecInst& codec)
1830{
1831 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1832 "Channel::GetRecPayloadType()");
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001833 int8_t payloadType(-1);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001834 if (rtp_payload_registry_->ReceivePayloadType(
1835 codec.plname,
1836 codec.plfreq,
1837 codec.channels,
1838 (codec.rate < 0) ? 0 : codec.rate,
1839 &payloadType) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001840 {
1841 _engineStatisticsPtr->SetLastError(
henrika@webrtc.org37198002012-06-18 11:00:12 +00001842 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
niklase@google.com470e71d2011-07-07 08:21:25 +00001843 "GetRecPayloadType() failed to retrieve RX payload type");
1844 return -1;
1845 }
1846 codec.pltype = payloadType;
1847 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1848 "Channel::GetRecPayloadType() => pltype=%u", codec.pltype);
1849 return 0;
1850}
1851
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001852int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001853Channel::SetAMREncFormat(AmrMode mode)
1854{
1855 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1856 "Channel::SetAMREncFormat()");
1857
1858 // ACM doesn't support AMR
1859 return -1;
1860}
1861
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001862int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001863Channel::SetAMRDecFormat(AmrMode mode)
1864{
1865 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1866 "Channel::SetAMRDecFormat()");
1867
1868 // ACM doesn't support AMR
1869 return -1;
1870}
1871
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001872int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001873Channel::SetAMRWbEncFormat(AmrMode mode)
1874{
1875 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1876 "Channel::SetAMRWbEncFormat()");
1877
1878 // ACM doesn't support AMR
1879 return -1;
1880
1881}
1882
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001883int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001884Channel::SetAMRWbDecFormat(AmrMode mode)
1885{
1886 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1887 "Channel::SetAMRWbDecFormat()");
1888
1889 // ACM doesn't support AMR
1890 return -1;
1891}
1892
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001893int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001894Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency)
1895{
1896 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1897 "Channel::SetSendCNPayloadType()");
1898
1899 CodecInst codec;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001900 int32_t samplingFreqHz(-1);
tina.legrand@webrtc.org45175852012-06-01 09:27:35 +00001901 const int kMono = 1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001902 if (frequency == kFreq32000Hz)
1903 samplingFreqHz = 32000;
1904 else if (frequency == kFreq16000Hz)
1905 samplingFreqHz = 16000;
1906
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001907 if (_audioCodingModule.Codec("CN", &codec, samplingFreqHz, kMono) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001908 {
1909 _engineStatisticsPtr->SetLastError(
1910 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1911 "SetSendCNPayloadType() failed to retrieve default CN codec "
1912 "settings");
1913 return -1;
1914 }
1915
1916 // Modify the payload type (must be set to dynamic range)
1917 codec.pltype = type;
1918
1919 if (_audioCodingModule.RegisterSendCodec(codec) != 0)
1920 {
1921 _engineStatisticsPtr->SetLastError(
1922 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1923 "SetSendCNPayloadType() failed to register CN to ACM");
1924 return -1;
1925 }
1926
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001927 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001928 {
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00001929 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1930 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001931 {
1932 _engineStatisticsPtr->SetLastError(
1933 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1934 "SetSendCNPayloadType() failed to register CN to RTP/RTCP "
1935 "module");
1936 return -1;
1937 }
1938 }
1939 return 0;
1940}
1941
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001942int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00001943Channel::SetISACInitTargetRate(int rateBps, bool useFixedFrameSize)
1944{
1945 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1946 "Channel::SetISACInitTargetRate()");
1947
1948 CodecInst sendCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00001949 if (_audioCodingModule.SendCodec(&sendCodec) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001950 {
1951 _engineStatisticsPtr->SetLastError(
1952 VE_CODEC_ERROR, kTraceError,
1953 "SetISACInitTargetRate() failed to retrieve send codec");
1954 return -1;
1955 }
1956 if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
1957 {
1958 // This API is only valid if iSAC is setup to run in channel-adaptive
1959 // mode.
1960 // We do not validate the adaptive mode here. It is done later in the
1961 // ConfigISACBandwidthEstimator() API.
1962 _engineStatisticsPtr->SetLastError(
1963 VE_CODEC_ERROR, kTraceError,
1964 "SetISACInitTargetRate() send codec is not iSAC");
1965 return -1;
1966 }
1967
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001968 uint8_t initFrameSizeMsec(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00001969 if (16000 == sendCodec.plfreq)
1970 {
1971 // Note that 0 is a valid and corresponds to "use default
1972 if ((rateBps != 0 &&
1973 rateBps < kVoiceEngineMinIsacInitTargetRateBpsWb) ||
1974 (rateBps > kVoiceEngineMaxIsacInitTargetRateBpsWb))
1975 {
1976 _engineStatisticsPtr->SetLastError(
1977 VE_INVALID_ARGUMENT, kTraceError,
1978 "SetISACInitTargetRate() invalid target rate - 1");
1979 return -1;
1980 }
1981 // 30 or 60ms
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001982 initFrameSizeMsec = (uint8_t)(sendCodec.pacsize / 16);
niklase@google.com470e71d2011-07-07 08:21:25 +00001983 }
1984 else if (32000 == sendCodec.plfreq)
1985 {
1986 if ((rateBps != 0 &&
1987 rateBps < kVoiceEngineMinIsacInitTargetRateBpsSwb) ||
1988 (rateBps > kVoiceEngineMaxIsacInitTargetRateBpsSwb))
1989 {
1990 _engineStatisticsPtr->SetLastError(
1991 VE_INVALID_ARGUMENT, kTraceError,
1992 "SetISACInitTargetRate() invalid target rate - 2");
1993 return -1;
1994 }
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001995 initFrameSizeMsec = (uint8_t)(sendCodec.pacsize / 32); // 30ms
niklase@google.com470e71d2011-07-07 08:21:25 +00001996 }
1997
1998 if (_audioCodingModule.ConfigISACBandwidthEstimator(
1999 initFrameSizeMsec, rateBps, useFixedFrameSize) == -1)
2000 {
2001 _engineStatisticsPtr->SetLastError(
2002 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2003 "SetISACInitTargetRate() iSAC BWE config failed");
2004 return -1;
2005 }
2006
2007 return 0;
2008}
2009
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002010int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00002011Channel::SetISACMaxRate(int rateBps)
2012{
2013 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2014 "Channel::SetISACMaxRate()");
2015
2016 CodecInst sendCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00002017 if (_audioCodingModule.SendCodec(&sendCodec) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00002018 {
2019 _engineStatisticsPtr->SetLastError(
2020 VE_CODEC_ERROR, kTraceError,
2021 "SetISACMaxRate() failed to retrieve send codec");
2022 return -1;
2023 }
2024 if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
2025 {
2026 // This API is only valid if iSAC is selected as sending codec.
2027 _engineStatisticsPtr->SetLastError(
2028 VE_CODEC_ERROR, kTraceError,
2029 "SetISACMaxRate() send codec is not iSAC");
2030 return -1;
2031 }
2032 if (16000 == sendCodec.plfreq)
2033 {
2034 if ((rateBps < kVoiceEngineMinIsacMaxRateBpsWb) ||
2035 (rateBps > kVoiceEngineMaxIsacMaxRateBpsWb))
2036 {
2037 _engineStatisticsPtr->SetLastError(
2038 VE_INVALID_ARGUMENT, kTraceError,
2039 "SetISACMaxRate() invalid max rate - 1");
2040 return -1;
2041 }
2042 }
2043 else if (32000 == sendCodec.plfreq)
2044 {
2045 if ((rateBps < kVoiceEngineMinIsacMaxRateBpsSwb) ||
2046 (rateBps > kVoiceEngineMaxIsacMaxRateBpsSwb))
2047 {
2048 _engineStatisticsPtr->SetLastError(
2049 VE_INVALID_ARGUMENT, kTraceError,
2050 "SetISACMaxRate() invalid max rate - 2");
2051 return -1;
2052 }
2053 }
2054 if (_sending)
2055 {
2056 _engineStatisticsPtr->SetLastError(
2057 VE_SENDING, kTraceError,
2058 "SetISACMaxRate() unable to set max rate while sending");
2059 return -1;
2060 }
2061
2062 // Set the maximum instantaneous rate of iSAC (works for both adaptive
2063 // and non-adaptive mode)
2064 if (_audioCodingModule.SetISACMaxRate(rateBps) == -1)
2065 {
2066 _engineStatisticsPtr->SetLastError(
2067 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2068 "SetISACMaxRate() failed to set max rate");
2069 return -1;
2070 }
2071
2072 return 0;
2073}
2074
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002075int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00002076Channel::SetISACMaxPayloadSize(int sizeBytes)
2077{
2078 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2079 "Channel::SetISACMaxPayloadSize()");
2080 CodecInst sendCodec;
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00002081 if (_audioCodingModule.SendCodec(&sendCodec) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00002082 {
2083 _engineStatisticsPtr->SetLastError(
2084 VE_CODEC_ERROR, kTraceError,
2085 "SetISACMaxPayloadSize() failed to retrieve send codec");
2086 return -1;
2087 }
2088 if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
2089 {
2090 _engineStatisticsPtr->SetLastError(
2091 VE_CODEC_ERROR, kTraceError,
2092 "SetISACMaxPayloadSize() send codec is not iSAC");
2093 return -1;
2094 }
2095 if (16000 == sendCodec.plfreq)
2096 {
2097 if ((sizeBytes < kVoiceEngineMinIsacMaxPayloadSizeBytesWb) ||
2098 (sizeBytes > kVoiceEngineMaxIsacMaxPayloadSizeBytesWb))
2099 {
2100 _engineStatisticsPtr->SetLastError(
2101 VE_INVALID_ARGUMENT, kTraceError,
2102 "SetISACMaxPayloadSize() invalid max payload - 1");
2103 return -1;
2104 }
2105 }
2106 else if (32000 == sendCodec.plfreq)
2107 {
2108 if ((sizeBytes < kVoiceEngineMinIsacMaxPayloadSizeBytesSwb) ||
2109 (sizeBytes > kVoiceEngineMaxIsacMaxPayloadSizeBytesSwb))
2110 {
2111 _engineStatisticsPtr->SetLastError(
2112 VE_INVALID_ARGUMENT, kTraceError,
2113 "SetISACMaxPayloadSize() invalid max payload - 2");
2114 return -1;
2115 }
2116 }
2117 if (_sending)
2118 {
2119 _engineStatisticsPtr->SetLastError(
2120 VE_SENDING, kTraceError,
2121 "SetISACMaxPayloadSize() unable to set max rate while sending");
2122 return -1;
2123 }
2124
2125 if (_audioCodingModule.SetISACMaxPayloadSize(sizeBytes) == -1)
2126 {
2127 _engineStatisticsPtr->SetLastError(
2128 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2129 "SetISACMaxPayloadSize() failed to set max payload size");
2130 return -1;
2131 }
2132 return 0;
2133}
2134
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002135int32_t Channel::RegisterExternalTransport(Transport& transport)
niklase@google.com470e71d2011-07-07 08:21:25 +00002136{
2137 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2138 "Channel::RegisterExternalTransport()");
2139
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002140 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002141
niklase@google.com470e71d2011-07-07 08:21:25 +00002142 if (_externalTransport)
2143 {
2144 _engineStatisticsPtr->SetLastError(VE_INVALID_OPERATION,
2145 kTraceError,
2146 "RegisterExternalTransport() external transport already enabled");
2147 return -1;
2148 }
2149 _externalTransport = true;
2150 _transportPtr = &transport;
2151 return 0;
2152}
2153
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002154int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00002155Channel::DeRegisterExternalTransport()
2156{
2157 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2158 "Channel::DeRegisterExternalTransport()");
2159
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002160 CriticalSectionScoped cs(&_callbackCritSect);
xians@webrtc.org83661f52011-11-25 10:58:15 +00002161
niklase@google.com470e71d2011-07-07 08:21:25 +00002162 if (!_transportPtr)
2163 {
2164 _engineStatisticsPtr->SetLastError(
2165 VE_INVALID_OPERATION, kTraceWarning,
2166 "DeRegisterExternalTransport() external transport already "
2167 "disabled");
2168 return 0;
2169 }
2170 _externalTransport = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00002171 _transportPtr = NULL;
2172 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2173 "DeRegisterExternalTransport() all transport is disabled");
niklase@google.com470e71d2011-07-07 08:21:25 +00002174 return 0;
2175}
2176
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002177int32_t Channel::ReceivedRTPPacket(const int8_t* data, int32_t length) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002178 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
2179 "Channel::ReceivedRTPPacket()");
2180
2181 // Store playout timestamp for the received RTP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00002182 UpdatePlayoutTimestamp(false);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002183
2184 // Dump the RTP packet to a file (if RTP dump is enabled).
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002185 if (_rtpDumpIn.DumpPacket((const uint8_t*)data,
2186 (uint16_t)length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002187 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
2188 VoEId(_instanceId,_channelId),
2189 "Channel::SendPacket() RTP dump to input file failed");
2190 }
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00002191 RTPHeader header;
2192 if (!rtp_header_parser_->Parse(reinterpret_cast<const uint8_t*>(data),
2193 static_cast<uint16_t>(length), &header)) {
2194 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideo,
2195 VoEId(_instanceId,_channelId),
2196 "IncomingPacket invalid RTP header");
2197 return -1;
2198 }
wu@webrtc.org822fbd82013-08-15 23:38:54 +00002199 header.payload_type_frequency =
2200 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
2201 if (header.payload_type_frequency < 0) {
2202 return -1;
2203 }
2204 bool retransmitted = IsPacketRetransmitted(header);
2205 bool in_order = rtp_receiver_->InOrderPacket(header.sequenceNumber);
2206 rtp_receive_statistics_->IncomingPacket(header, static_cast<uint16_t>(length),
2207 retransmitted, in_order);
2208 PayloadUnion payload_specific;
2209 if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
2210 &payload_specific)) {
2211 return -1;
2212 }
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002213 // Deliver RTP packet to RTP/RTCP module for parsing
2214 // The packet will be pushed back to the channel thru the
2215 // OnReceivedPayloadData callback so we don't push it to the ACM here
wu@webrtc.org822fbd82013-08-15 23:38:54 +00002216 if (!rtp_receiver_->IncomingRtpPacket(&header,
2217 reinterpret_cast<const uint8_t*>(data),
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00002218 static_cast<uint16_t>(length),
wu@webrtc.org822fbd82013-08-15 23:38:54 +00002219 payload_specific, in_order)) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002220 _engineStatisticsPtr->SetLastError(
2221 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
2222 "Channel::IncomingRTPPacket() RTP packet is invalid");
2223 }
2224 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002225}
2226
wu@webrtc.org822fbd82013-08-15 23:38:54 +00002227bool Channel::IsPacketRetransmitted(const RTPHeader& header) const {
2228 bool rtx_enabled = false;
2229 uint32_t rtx_ssrc = 0;
2230 int rtx_payload_type = 0;
2231 rtp_receiver_->RTXStatus(&rtx_enabled, &rtx_ssrc, &rtx_payload_type);
2232 if (!rtx_enabled) {
2233 // Check if this is a retransmission.
2234 ReceiveStatistics::RtpReceiveStatistics stats;
2235 if (rtp_receive_statistics_->Statistics(&stats, false)) {
2236 uint16_t min_rtt = 0;
2237 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL);
2238 return rtp_receiver_->RetransmitOfOldPacket(header, stats.jitter,
2239 min_rtt);
2240 }
2241 }
2242 return false;
2243}
2244
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002245int32_t Channel::ReceivedRTCPPacket(const int8_t* data, int32_t length) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002246 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
2247 "Channel::ReceivedRTCPPacket()");
2248 // Store playout timestamp for the received RTCP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00002249 UpdatePlayoutTimestamp(true);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002250
2251 // Dump the RTCP packet to a file (if RTP dump is enabled).
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002252 if (_rtpDumpIn.DumpPacket((const uint8_t*)data,
2253 (uint16_t)length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002254 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
2255 VoEId(_instanceId,_channelId),
2256 "Channel::SendPacket() RTCP dump to input file failed");
2257 }
2258
2259 // Deliver RTCP packet to RTP/RTCP module for parsing
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00002260 if (_rtpRtcpModule->IncomingRtcpPacket((const uint8_t*)data,
2261 (uint16_t)length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00002262 _engineStatisticsPtr->SetLastError(
2263 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
2264 "Channel::IncomingRTPPacket() RTCP packet is invalid");
2265 }
2266 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002267}
2268
niklase@google.com470e71d2011-07-07 08:21:25 +00002269int Channel::StartPlayingFileLocally(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00002270 bool loop,
2271 FileFormats format,
2272 int startPosition,
2273 float volumeScaling,
2274 int stopPosition,
niklase@google.com470e71d2011-07-07 08:21:25 +00002275 const CodecInst* codecInst)
2276{
2277 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2278 "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d,"
2279 " format=%d, volumeScaling=%5.3f, startPosition=%d, "
2280 "stopPosition=%d)", fileName, loop, format, volumeScaling,
2281 startPosition, stopPosition);
2282
2283 if (_outputFilePlaying)
2284 {
2285 _engineStatisticsPtr->SetLastError(
2286 VE_ALREADY_PLAYING, kTraceError,
2287 "StartPlayingFileLocally() is already playing");
2288 return -1;
2289 }
2290
niklase@google.com470e71d2011-07-07 08:21:25 +00002291 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002292 CriticalSectionScoped cs(&_fileCritSect);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002293
2294 if (_outputFilePlayerPtr)
2295 {
2296 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2297 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2298 _outputFilePlayerPtr = NULL;
2299 }
2300
2301 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2302 _outputFilePlayerId, (const FileFormats)format);
2303
2304 if (_outputFilePlayerPtr == NULL)
2305 {
2306 _engineStatisticsPtr->SetLastError(
2307 VE_INVALID_ARGUMENT, kTraceError,
henrike@webrtc.org31d30702011-11-18 19:59:32 +00002308 "StartPlayingFileLocally() filePlayer format is not correct");
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002309 return -1;
2310 }
2311
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002312 const uint32_t notificationTime(0);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002313
2314 if (_outputFilePlayerPtr->StartPlayingFile(
2315 fileName,
2316 loop,
2317 startPosition,
2318 volumeScaling,
2319 notificationTime,
2320 stopPosition,
2321 (const CodecInst*)codecInst) != 0)
2322 {
2323 _engineStatisticsPtr->SetLastError(
2324 VE_BAD_FILE, kTraceError,
2325 "StartPlayingFile() failed to start file playout");
2326 _outputFilePlayerPtr->StopPlayingFile();
2327 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2328 _outputFilePlayerPtr = NULL;
2329 return -1;
2330 }
2331 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
2332 _outputFilePlaying = true;
niklase@google.com470e71d2011-07-07 08:21:25 +00002333 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00002334
2335 if (RegisterFilePlayingToMixer() != 0)
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002336 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002337
2338 return 0;
2339}
2340
2341int Channel::StartPlayingFileLocally(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00002342 FileFormats format,
2343 int startPosition,
2344 float volumeScaling,
2345 int stopPosition,
niklase@google.com470e71d2011-07-07 08:21:25 +00002346 const CodecInst* codecInst)
2347{
2348 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2349 "Channel::StartPlayingFileLocally(format=%d,"
2350 " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
2351 format, volumeScaling, startPosition, stopPosition);
2352
2353 if(stream == NULL)
2354 {
2355 _engineStatisticsPtr->SetLastError(
2356 VE_BAD_FILE, kTraceError,
2357 "StartPlayingFileLocally() NULL as input stream");
2358 return -1;
2359 }
2360
2361
2362 if (_outputFilePlaying)
2363 {
2364 _engineStatisticsPtr->SetLastError(
2365 VE_ALREADY_PLAYING, kTraceError,
2366 "StartPlayingFileLocally() is already playing");
2367 return -1;
2368 }
2369
niklase@google.com470e71d2011-07-07 08:21:25 +00002370 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002371 CriticalSectionScoped cs(&_fileCritSect);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002372
2373 // Destroy the old instance
2374 if (_outputFilePlayerPtr)
2375 {
2376 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2377 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2378 _outputFilePlayerPtr = NULL;
2379 }
2380
2381 // Create the instance
2382 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2383 _outputFilePlayerId,
2384 (const FileFormats)format);
2385
2386 if (_outputFilePlayerPtr == NULL)
2387 {
2388 _engineStatisticsPtr->SetLastError(
2389 VE_INVALID_ARGUMENT, kTraceError,
2390 "StartPlayingFileLocally() filePlayer format isnot correct");
2391 return -1;
2392 }
2393
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002394 const uint32_t notificationTime(0);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002395
2396 if (_outputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
2397 volumeScaling,
2398 notificationTime,
2399 stopPosition, codecInst) != 0)
2400 {
2401 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2402 "StartPlayingFile() failed to "
2403 "start file playout");
2404 _outputFilePlayerPtr->StopPlayingFile();
2405 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2406 _outputFilePlayerPtr = NULL;
2407 return -1;
2408 }
2409 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
2410 _outputFilePlaying = true;
niklase@google.com470e71d2011-07-07 08:21:25 +00002411 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00002412
2413 if (RegisterFilePlayingToMixer() != 0)
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002414 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002415
niklase@google.com470e71d2011-07-07 08:21:25 +00002416 return 0;
2417}
2418
2419int Channel::StopPlayingFileLocally()
2420{
2421 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2422 "Channel::StopPlayingFileLocally()");
2423
2424 if (!_outputFilePlaying)
2425 {
2426 _engineStatisticsPtr->SetLastError(
2427 VE_INVALID_OPERATION, kTraceWarning,
2428 "StopPlayingFileLocally() isnot playing");
2429 return 0;
2430 }
2431
niklase@google.com470e71d2011-07-07 08:21:25 +00002432 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002433 CriticalSectionScoped cs(&_fileCritSect);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002434
2435 if (_outputFilePlayerPtr->StopPlayingFile() != 0)
2436 {
2437 _engineStatisticsPtr->SetLastError(
2438 VE_STOP_RECORDING_FAILED, kTraceError,
2439 "StopPlayingFile() could not stop playing");
2440 return -1;
2441 }
2442 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2443 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2444 _outputFilePlayerPtr = NULL;
2445 _outputFilePlaying = false;
niklase@google.com470e71d2011-07-07 08:21:25 +00002446 }
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002447 // _fileCritSect cannot be taken while calling
2448 // SetAnonymousMixibilityStatus. Refer to comments in
2449 // StartPlayingFileLocally(const char* ...) for more details.
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002450 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0)
2451 {
2452 _engineStatisticsPtr->SetLastError(
2453 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00002454 "StopPlayingFile() failed to stop participant from playing as"
2455 "file in the mixer");
henrike@webrtc.org066f9e52011-10-28 23:15:47 +00002456 return -1;
2457 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002458
2459 return 0;
2460}
2461
2462int Channel::IsPlayingFileLocally() const
2463{
2464 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2465 "Channel::IsPlayingFileLocally()");
2466
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002467 return (int32_t)_outputFilePlaying;
niklase@google.com470e71d2011-07-07 08:21:25 +00002468}
2469
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00002470int Channel::RegisterFilePlayingToMixer()
2471{
2472 // Return success for not registering for file playing to mixer if:
2473 // 1. playing file before playout is started on that channel.
2474 // 2. starting playout without file playing on that channel.
2475 if (!_playing || !_outputFilePlaying)
2476 {
2477 return 0;
2478 }
2479
2480 // |_fileCritSect| cannot be taken while calling
2481 // SetAnonymousMixabilityStatus() since as soon as the participant is added
2482 // frames can be pulled by the mixer. Since the frames are generated from
2483 // the file, _fileCritSect will be taken. This would result in a deadlock.
2484 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0)
2485 {
2486 CriticalSectionScoped cs(&_fileCritSect);
2487 _outputFilePlaying = false;
2488 _engineStatisticsPtr->SetLastError(
2489 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
2490 "StartPlayingFile() failed to add participant as file to mixer");
2491 _outputFilePlayerPtr->StopPlayingFile();
2492 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2493 _outputFilePlayerPtr = NULL;
2494 return -1;
2495 }
2496
2497 return 0;
2498}
2499
pbos@webrtc.org92135212013-05-14 08:31:39 +00002500int Channel::ScaleLocalFilePlayout(float scale)
niklase@google.com470e71d2011-07-07 08:21:25 +00002501{
2502 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2503 "Channel::ScaleLocalFilePlayout(scale=%5.3f)", scale);
2504
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002505 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002506
2507 if (!_outputFilePlaying)
2508 {
2509 _engineStatisticsPtr->SetLastError(
2510 VE_INVALID_OPERATION, kTraceError,
2511 "ScaleLocalFilePlayout() isnot playing");
2512 return -1;
2513 }
2514 if ((_outputFilePlayerPtr == NULL) ||
2515 (_outputFilePlayerPtr->SetAudioScaling(scale) != 0))
2516 {
2517 _engineStatisticsPtr->SetLastError(
2518 VE_BAD_ARGUMENT, kTraceError,
2519 "SetAudioScaling() failed to scale the playout");
2520 return -1;
2521 }
2522
2523 return 0;
2524}
2525
2526int Channel::GetLocalPlayoutPosition(int& positionMs)
2527{
2528 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2529 "Channel::GetLocalPlayoutPosition(position=?)");
2530
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002531 uint32_t position;
niklase@google.com470e71d2011-07-07 08:21:25 +00002532
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002533 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002534
2535 if (_outputFilePlayerPtr == NULL)
2536 {
2537 _engineStatisticsPtr->SetLastError(
2538 VE_INVALID_OPERATION, kTraceError,
2539 "GetLocalPlayoutPosition() filePlayer instance doesnot exist");
2540 return -1;
2541 }
2542
2543 if (_outputFilePlayerPtr->GetPlayoutPosition(position) != 0)
2544 {
2545 _engineStatisticsPtr->SetLastError(
2546 VE_BAD_FILE, kTraceError,
2547 "GetLocalPlayoutPosition() failed");
2548 return -1;
2549 }
2550 positionMs = position;
2551
2552 return 0;
2553}
2554
2555int Channel::StartPlayingFileAsMicrophone(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00002556 bool loop,
2557 FileFormats format,
2558 int startPosition,
2559 float volumeScaling,
2560 int stopPosition,
niklase@google.com470e71d2011-07-07 08:21:25 +00002561 const CodecInst* codecInst)
2562{
2563 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2564 "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, "
2565 "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, "
2566 "stopPosition=%d)", fileName, loop, format, volumeScaling,
2567 startPosition, stopPosition);
2568
2569 if (_inputFilePlaying)
2570 {
2571 _engineStatisticsPtr->SetLastError(
2572 VE_ALREADY_PLAYING, kTraceWarning,
2573 "StartPlayingFileAsMicrophone() filePlayer is playing");
2574 return 0;
2575 }
2576
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002577 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002578
2579 // Destroy the old instance
2580 if (_inputFilePlayerPtr)
2581 {
2582 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2583 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2584 _inputFilePlayerPtr = NULL;
2585 }
2586
2587 // Create the instance
2588 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2589 _inputFilePlayerId, (const FileFormats)format);
2590
2591 if (_inputFilePlayerPtr == NULL)
2592 {
2593 _engineStatisticsPtr->SetLastError(
2594 VE_INVALID_ARGUMENT, kTraceError,
2595 "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
2596 return -1;
2597 }
2598
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002599 const uint32_t notificationTime(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00002600
2601 if (_inputFilePlayerPtr->StartPlayingFile(
2602 fileName,
2603 loop,
2604 startPosition,
2605 volumeScaling,
2606 notificationTime,
2607 stopPosition,
2608 (const CodecInst*)codecInst) != 0)
2609 {
2610 _engineStatisticsPtr->SetLastError(
2611 VE_BAD_FILE, kTraceError,
2612 "StartPlayingFile() failed to start file playout");
2613 _inputFilePlayerPtr->StopPlayingFile();
2614 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2615 _inputFilePlayerPtr = NULL;
2616 return -1;
2617 }
2618 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2619 _inputFilePlaying = true;
2620
2621 return 0;
2622}
2623
2624int Channel::StartPlayingFileAsMicrophone(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00002625 FileFormats format,
2626 int startPosition,
2627 float volumeScaling,
2628 int stopPosition,
niklase@google.com470e71d2011-07-07 08:21:25 +00002629 const CodecInst* codecInst)
2630{
2631 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2632 "Channel::StartPlayingFileAsMicrophone(format=%d, "
2633 "volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
2634 format, volumeScaling, startPosition, stopPosition);
2635
2636 if(stream == NULL)
2637 {
2638 _engineStatisticsPtr->SetLastError(
2639 VE_BAD_FILE, kTraceError,
2640 "StartPlayingFileAsMicrophone NULL as input stream");
2641 return -1;
2642 }
2643
2644 if (_inputFilePlaying)
2645 {
2646 _engineStatisticsPtr->SetLastError(
2647 VE_ALREADY_PLAYING, kTraceWarning,
2648 "StartPlayingFileAsMicrophone() is playing");
2649 return 0;
2650 }
2651
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002652 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002653
2654 // Destroy the old instance
2655 if (_inputFilePlayerPtr)
2656 {
2657 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2658 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2659 _inputFilePlayerPtr = NULL;
2660 }
2661
2662 // Create the instance
2663 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2664 _inputFilePlayerId, (const FileFormats)format);
2665
2666 if (_inputFilePlayerPtr == NULL)
2667 {
2668 _engineStatisticsPtr->SetLastError(
2669 VE_INVALID_ARGUMENT, kTraceError,
2670 "StartPlayingInputFile() filePlayer format isnot correct");
2671 return -1;
2672 }
2673
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002674 const uint32_t notificationTime(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00002675
2676 if (_inputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
2677 volumeScaling, notificationTime,
2678 stopPosition, codecInst) != 0)
2679 {
2680 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2681 "StartPlayingFile() failed to start "
2682 "file playout");
2683 _inputFilePlayerPtr->StopPlayingFile();
2684 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2685 _inputFilePlayerPtr = NULL;
2686 return -1;
2687 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00002688
niklase@google.com470e71d2011-07-07 08:21:25 +00002689 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2690 _inputFilePlaying = true;
2691
2692 return 0;
2693}
2694
2695int Channel::StopPlayingFileAsMicrophone()
2696{
2697 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2698 "Channel::StopPlayingFileAsMicrophone()");
2699
2700 if (!_inputFilePlaying)
2701 {
2702 _engineStatisticsPtr->SetLastError(
2703 VE_INVALID_OPERATION, kTraceWarning,
2704 "StopPlayingFileAsMicrophone() isnot playing");
2705 return 0;
2706 }
2707
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002708 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002709 if (_inputFilePlayerPtr->StopPlayingFile() != 0)
2710 {
2711 _engineStatisticsPtr->SetLastError(
2712 VE_STOP_RECORDING_FAILED, kTraceError,
2713 "StopPlayingFile() could not stop playing");
2714 return -1;
2715 }
2716 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2717 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2718 _inputFilePlayerPtr = NULL;
2719 _inputFilePlaying = false;
2720
2721 return 0;
2722}
2723
2724int Channel::IsPlayingFileAsMicrophone() const
2725{
2726 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2727 "Channel::IsPlayingFileAsMicrophone()");
2728
2729 return _inputFilePlaying;
2730}
2731
pbos@webrtc.org92135212013-05-14 08:31:39 +00002732int Channel::ScaleFileAsMicrophonePlayout(float scale)
niklase@google.com470e71d2011-07-07 08:21:25 +00002733{
2734 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2735 "Channel::ScaleFileAsMicrophonePlayout(scale=%5.3f)", scale);
2736
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002737 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002738
2739 if (!_inputFilePlaying)
2740 {
2741 _engineStatisticsPtr->SetLastError(
2742 VE_INVALID_OPERATION, kTraceError,
2743 "ScaleFileAsMicrophonePlayout() isnot playing");
2744 return -1;
2745 }
2746
2747 if ((_inputFilePlayerPtr == NULL) ||
2748 (_inputFilePlayerPtr->SetAudioScaling(scale) != 0))
2749 {
2750 _engineStatisticsPtr->SetLastError(
2751 VE_BAD_ARGUMENT, kTraceError,
2752 "SetAudioScaling() failed to scale playout");
2753 return -1;
2754 }
2755
2756 return 0;
2757}
2758
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00002759int Channel::StartRecordingPlayout(const char* fileName,
niklase@google.com470e71d2011-07-07 08:21:25 +00002760 const CodecInst* codecInst)
2761{
2762 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2763 "Channel::StartRecordingPlayout(fileName=%s)", fileName);
2764
2765 if (_outputFileRecording)
2766 {
2767 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
2768 "StartRecordingPlayout() is already recording");
2769 return 0;
2770 }
2771
2772 FileFormats format;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002773 const uint32_t notificationTime(0); // Not supported in VoE
niklase@google.com470e71d2011-07-07 08:21:25 +00002774 CodecInst dummyCodec={100,"L16",16000,320,1,320000};
2775
niklas.enbom@webrtc.org40197d72012-03-26 08:45:47 +00002776 if ((codecInst != NULL) &&
2777 ((codecInst->channels < 1) || (codecInst->channels > 2)))
niklase@google.com470e71d2011-07-07 08:21:25 +00002778 {
2779 _engineStatisticsPtr->SetLastError(
2780 VE_BAD_ARGUMENT, kTraceError,
2781 "StartRecordingPlayout() invalid compression");
2782 return(-1);
2783 }
2784 if(codecInst == NULL)
2785 {
2786 format = kFileFormatPcm16kHzFile;
2787 codecInst=&dummyCodec;
2788 }
2789 else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
2790 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
2791 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
2792 {
2793 format = kFileFormatWavFile;
2794 }
2795 else
2796 {
2797 format = kFileFormatCompressedFile;
2798 }
2799
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002800 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002801
2802 // Destroy the old instance
2803 if (_outputFileRecorderPtr)
2804 {
2805 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2806 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2807 _outputFileRecorderPtr = NULL;
2808 }
2809
2810 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2811 _outputFileRecorderId, (const FileFormats)format);
2812 if (_outputFileRecorderPtr == NULL)
2813 {
2814 _engineStatisticsPtr->SetLastError(
2815 VE_INVALID_ARGUMENT, kTraceError,
2816 "StartRecordingPlayout() fileRecorder format isnot correct");
2817 return -1;
2818 }
2819
2820 if (_outputFileRecorderPtr->StartRecordingAudioFile(
2821 fileName, (const CodecInst&)*codecInst, notificationTime) != 0)
2822 {
2823 _engineStatisticsPtr->SetLastError(
2824 VE_BAD_FILE, kTraceError,
2825 "StartRecordingAudioFile() failed to start file recording");
2826 _outputFileRecorderPtr->StopRecording();
2827 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2828 _outputFileRecorderPtr = NULL;
2829 return -1;
2830 }
2831 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2832 _outputFileRecording = true;
2833
2834 return 0;
2835}
2836
2837int Channel::StartRecordingPlayout(OutStream* stream,
2838 const CodecInst* codecInst)
2839{
2840 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2841 "Channel::StartRecordingPlayout()");
2842
2843 if (_outputFileRecording)
2844 {
2845 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
2846 "StartRecordingPlayout() is already recording");
2847 return 0;
2848 }
2849
2850 FileFormats format;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002851 const uint32_t notificationTime(0); // Not supported in VoE
niklase@google.com470e71d2011-07-07 08:21:25 +00002852 CodecInst dummyCodec={100,"L16",16000,320,1,320000};
2853
2854 if (codecInst != NULL && codecInst->channels != 1)
2855 {
2856 _engineStatisticsPtr->SetLastError(
2857 VE_BAD_ARGUMENT, kTraceError,
2858 "StartRecordingPlayout() invalid compression");
2859 return(-1);
2860 }
2861 if(codecInst == NULL)
2862 {
2863 format = kFileFormatPcm16kHzFile;
2864 codecInst=&dummyCodec;
2865 }
2866 else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
2867 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
2868 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
2869 {
2870 format = kFileFormatWavFile;
2871 }
2872 else
2873 {
2874 format = kFileFormatCompressedFile;
2875 }
2876
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002877 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002878
2879 // Destroy the old instance
2880 if (_outputFileRecorderPtr)
2881 {
2882 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2883 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2884 _outputFileRecorderPtr = NULL;
2885 }
2886
2887 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2888 _outputFileRecorderId, (const FileFormats)format);
2889 if (_outputFileRecorderPtr == NULL)
2890 {
2891 _engineStatisticsPtr->SetLastError(
2892 VE_INVALID_ARGUMENT, kTraceError,
2893 "StartRecordingPlayout() fileRecorder format isnot correct");
2894 return -1;
2895 }
2896
2897 if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream, *codecInst,
2898 notificationTime) != 0)
2899 {
2900 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2901 "StartRecordingPlayout() failed to "
2902 "start file recording");
2903 _outputFileRecorderPtr->StopRecording();
2904 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2905 _outputFileRecorderPtr = NULL;
2906 return -1;
2907 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00002908
niklase@google.com470e71d2011-07-07 08:21:25 +00002909 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2910 _outputFileRecording = true;
2911
2912 return 0;
2913}
2914
2915int Channel::StopRecordingPlayout()
2916{
2917 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
2918 "Channel::StopRecordingPlayout()");
2919
2920 if (!_outputFileRecording)
2921 {
2922 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
2923 "StopRecordingPlayout() isnot recording");
2924 return -1;
2925 }
2926
2927
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00002928 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002929
2930 if (_outputFileRecorderPtr->StopRecording() != 0)
2931 {
2932 _engineStatisticsPtr->SetLastError(
2933 VE_STOP_RECORDING_FAILED, kTraceError,
2934 "StopRecording() could not stop recording");
2935 return(-1);
2936 }
2937 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2938 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2939 _outputFileRecorderPtr = NULL;
2940 _outputFileRecording = false;
2941
2942 return 0;
2943}
2944
2945void
2946Channel::SetMixWithMicStatus(bool mix)
2947{
2948 _mixFileWithMicrophone=mix;
2949}
2950
2951int
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002952Channel::GetSpeechOutputLevel(uint32_t& level) const
niklase@google.com470e71d2011-07-07 08:21:25 +00002953{
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002954 int8_t currentLevel = _outputAudioLevel.Level();
2955 level = static_cast<int32_t> (currentLevel);
niklase@google.com470e71d2011-07-07 08:21:25 +00002956 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2957 VoEId(_instanceId,_channelId),
2958 "GetSpeechOutputLevel() => level=%u", level);
2959 return 0;
2960}
2961
2962int
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002963Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const
niklase@google.com470e71d2011-07-07 08:21:25 +00002964{
pbos@webrtc.org6141e132013-04-09 10:09:10 +00002965 int16_t currentLevel = _outputAudioLevel.LevelFullRange();
2966 level = static_cast<int32_t> (currentLevel);
niklase@google.com470e71d2011-07-07 08:21:25 +00002967 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2968 VoEId(_instanceId,_channelId),
2969 "GetSpeechOutputLevelFullRange() => level=%u", level);
2970 return 0;
2971}
2972
2973int
2974Channel::SetMute(bool enable)
2975{
2976 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2977 "Channel::SetMute(enable=%d)", enable);
2978 _mute = enable;
2979 return 0;
2980}
2981
2982bool
2983Channel::Mute() const
2984{
2985 return _mute;
2986}
2987
2988int
2989Channel::SetOutputVolumePan(float left, float right)
2990{
2991 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2992 "Channel::SetOutputVolumePan()");
2993 _panLeft = left;
2994 _panRight = right;
2995 return 0;
2996}
2997
2998int
2999Channel::GetOutputVolumePan(float& left, float& right) const
3000{
3001 left = _panLeft;
3002 right = _panRight;
3003 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3004 VoEId(_instanceId,_channelId),
3005 "GetOutputVolumePan() => left=%3.2f, right=%3.2f", left, right);
3006 return 0;
3007}
3008
3009int
3010Channel::SetChannelOutputVolumeScaling(float scaling)
3011{
3012 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3013 "Channel::SetChannelOutputVolumeScaling()");
3014 _outputGain = scaling;
3015 return 0;
3016}
3017
3018int
3019Channel::GetChannelOutputVolumeScaling(float& scaling) const
3020{
3021 scaling = _outputGain;
3022 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3023 VoEId(_instanceId,_channelId),
3024 "GetChannelOutputVolumeScaling() => scaling=%3.2f", scaling);
3025 return 0;
3026}
3027
niklase@google.com470e71d2011-07-07 08:21:25 +00003028int
3029Channel::RegisterExternalEncryption(Encryption& encryption)
3030{
3031 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3032 "Channel::RegisterExternalEncryption()");
3033
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003034 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003035
3036 if (_encryptionPtr)
3037 {
3038 _engineStatisticsPtr->SetLastError(
3039 VE_INVALID_OPERATION, kTraceError,
3040 "RegisterExternalEncryption() encryption already enabled");
3041 return -1;
3042 }
3043
3044 _encryptionPtr = &encryption;
3045
3046 _decrypting = true;
3047 _encrypting = true;
3048
3049 return 0;
3050}
3051
3052int
3053Channel::DeRegisterExternalEncryption()
3054{
3055 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3056 "Channel::DeRegisterExternalEncryption()");
3057
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003058 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003059
3060 if (!_encryptionPtr)
3061 {
3062 _engineStatisticsPtr->SetLastError(
3063 VE_INVALID_OPERATION, kTraceWarning,
3064 "DeRegisterExternalEncryption() encryption already disabled");
3065 return 0;
3066 }
3067
3068 _decrypting = false;
3069 _encrypting = false;
3070
3071 _encryptionPtr = NULL;
3072
3073 return 0;
3074}
3075
3076int Channel::SendTelephoneEventOutband(unsigned char eventCode,
wu@webrtc.org822fbd82013-08-15 23:38:54 +00003077 int lengthMs, int attenuationDb,
3078 bool playDtmfEvent)
niklase@google.com470e71d2011-07-07 08:21:25 +00003079{
3080 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3081 "Channel::SendTelephoneEventOutband(..., playDtmfEvent=%d)",
3082 playDtmfEvent);
3083
3084 _playOutbandDtmfEvent = playDtmfEvent;
3085
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003086 if (_rtpRtcpModule->SendTelephoneEventOutband(eventCode, lengthMs,
niklase@google.com470e71d2011-07-07 08:21:25 +00003087 attenuationDb) != 0)
3088 {
3089 _engineStatisticsPtr->SetLastError(
3090 VE_SEND_DTMF_FAILED,
3091 kTraceWarning,
3092 "SendTelephoneEventOutband() failed to send event");
3093 return -1;
3094 }
3095 return 0;
3096}
3097
3098int Channel::SendTelephoneEventInband(unsigned char eventCode,
3099 int lengthMs,
3100 int attenuationDb,
3101 bool playDtmfEvent)
3102{
3103 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3104 "Channel::SendTelephoneEventInband(..., playDtmfEvent=%d)",
3105 playDtmfEvent);
3106
3107 _playInbandDtmfEvent = playDtmfEvent;
3108 _inbandDtmfQueue.AddDtmf(eventCode, lengthMs, attenuationDb);
3109
3110 return 0;
3111}
3112
3113int
3114Channel::SetDtmfPlayoutStatus(bool enable)
3115{
3116 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3117 "Channel::SetDtmfPlayoutStatus()");
3118 if (_audioCodingModule.SetDtmfPlayoutStatus(enable) != 0)
3119 {
3120 _engineStatisticsPtr->SetLastError(
3121 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
3122 "SetDtmfPlayoutStatus() failed to set Dtmf playout");
3123 return -1;
3124 }
3125 return 0;
3126}
3127
3128bool
3129Channel::DtmfPlayoutStatus() const
3130{
3131 return _audioCodingModule.DtmfPlayoutStatus();
3132}
3133
3134int
3135Channel::SetSendTelephoneEventPayloadType(unsigned char type)
3136{
3137 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3138 "Channel::SetSendTelephoneEventPayloadType()");
andrew@webrtc.orgf81f9f82011-08-19 22:56:22 +00003139 if (type > 127)
niklase@google.com470e71d2011-07-07 08:21:25 +00003140 {
3141 _engineStatisticsPtr->SetLastError(
3142 VE_INVALID_ARGUMENT, kTraceError,
3143 "SetSendTelephoneEventPayloadType() invalid type");
3144 return -1;
3145 }
pbos@webrtc.org5b10d8f2013-07-11 15:50:07 +00003146 CodecInst codec = {};
pwestin@webrtc.org1da1ce02011-10-13 15:19:55 +00003147 codec.plfreq = 8000;
3148 codec.pltype = type;
3149 memcpy(codec.plname, "telephone-event", 16);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003150 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003151 {
henrika@webrtc.org4392d5f2013-04-17 07:34:25 +00003152 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
3153 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
3154 _engineStatisticsPtr->SetLastError(
3155 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3156 "SetSendTelephoneEventPayloadType() failed to register send"
3157 "payload type");
3158 return -1;
3159 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003160 }
3161 _sendTelephoneEventPayloadType = type;
3162 return 0;
3163}
3164
3165int
3166Channel::GetSendTelephoneEventPayloadType(unsigned char& type)
3167{
3168 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3169 "Channel::GetSendTelephoneEventPayloadType()");
3170 type = _sendTelephoneEventPayloadType;
3171 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3172 VoEId(_instanceId,_channelId),
3173 "GetSendTelephoneEventPayloadType() => type=%u", type);
3174 return 0;
3175}
3176
niklase@google.com470e71d2011-07-07 08:21:25 +00003177int
3178Channel::UpdateRxVadDetection(AudioFrame& audioFrame)
3179{
3180 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3181 "Channel::UpdateRxVadDetection()");
3182
3183 int vadDecision = 1;
3184
andrew@webrtc.org63a50982012-05-02 23:56:37 +00003185 vadDecision = (audioFrame.vad_activity_ == AudioFrame::kVadActive)? 1 : 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003186
3187 if ((vadDecision != _oldVadDecision) && _rxVadObserverPtr)
3188 {
3189 OnRxVadDetected(vadDecision);
3190 _oldVadDecision = vadDecision;
3191 }
3192
3193 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3194 "Channel::UpdateRxVadDetection() => vadDecision=%d",
3195 vadDecision);
3196 return 0;
3197}
3198
3199int
3200Channel::RegisterRxVadObserver(VoERxVadCallback &observer)
3201{
3202 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3203 "Channel::RegisterRxVadObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003204 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003205
3206 if (_rxVadObserverPtr)
3207 {
3208 _engineStatisticsPtr->SetLastError(
3209 VE_INVALID_OPERATION, kTraceError,
3210 "RegisterRxVadObserver() observer already enabled");
3211 return -1;
3212 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003213 _rxVadObserverPtr = &observer;
3214 _RxVadDetection = true;
3215 return 0;
3216}
3217
3218int
3219Channel::DeRegisterRxVadObserver()
3220{
3221 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3222 "Channel::DeRegisterRxVadObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003223 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003224
3225 if (!_rxVadObserverPtr)
3226 {
3227 _engineStatisticsPtr->SetLastError(
3228 VE_INVALID_OPERATION, kTraceWarning,
3229 "DeRegisterRxVadObserver() observer already disabled");
3230 return 0;
3231 }
3232 _rxVadObserverPtr = NULL;
3233 _RxVadDetection = false;
3234 return 0;
3235}
3236
3237int
3238Channel::VoiceActivityIndicator(int &activity)
3239{
3240 activity = _sendFrameType;
3241
3242 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3243 "Channel::VoiceActivityIndicator(indicator=%d)", activity);
3244 return 0;
3245}
3246
3247#ifdef WEBRTC_VOICE_ENGINE_AGC
3248
3249int
pbos@webrtc.org92135212013-05-14 08:31:39 +00003250Channel::SetRxAgcStatus(bool enable, AgcModes mode)
niklase@google.com470e71d2011-07-07 08:21:25 +00003251{
3252 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3253 "Channel::SetRxAgcStatus(enable=%d, mode=%d)",
3254 (int)enable, (int)mode);
3255
3256 GainControl::Mode agcMode(GainControl::kFixedDigital);
3257 switch (mode)
3258 {
3259 case kAgcDefault:
3260 agcMode = GainControl::kAdaptiveDigital;
3261 break;
3262 case kAgcUnchanged:
3263 agcMode = _rxAudioProcessingModulePtr->gain_control()->mode();
3264 break;
3265 case kAgcFixedDigital:
3266 agcMode = GainControl::kFixedDigital;
3267 break;
3268 case kAgcAdaptiveDigital:
3269 agcMode =GainControl::kAdaptiveDigital;
3270 break;
3271 default:
3272 _engineStatisticsPtr->SetLastError(
3273 VE_INVALID_ARGUMENT, kTraceError,
3274 "SetRxAgcStatus() invalid Agc mode");
3275 return -1;
3276 }
3277
3278 if (_rxAudioProcessingModulePtr->gain_control()->set_mode(agcMode) != 0)
3279 {
3280 _engineStatisticsPtr->SetLastError(
3281 VE_APM_ERROR, kTraceError,
3282 "SetRxAgcStatus() failed to set Agc mode");
3283 return -1;
3284 }
3285 if (_rxAudioProcessingModulePtr->gain_control()->Enable(enable) != 0)
3286 {
3287 _engineStatisticsPtr->SetLastError(
3288 VE_APM_ERROR, kTraceError,
3289 "SetRxAgcStatus() failed to set Agc state");
3290 return -1;
3291 }
3292
3293 _rxAgcIsEnabled = enable;
niklase@google.com470e71d2011-07-07 08:21:25 +00003294 _rxApmIsEnabled = ((_rxAgcIsEnabled == true) || (_rxNsIsEnabled == true));
3295
3296 return 0;
3297}
3298
3299int
3300Channel::GetRxAgcStatus(bool& enabled, AgcModes& mode)
3301{
3302 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3303 "Channel::GetRxAgcStatus(enable=?, mode=?)");
3304
3305 bool enable = _rxAudioProcessingModulePtr->gain_control()->is_enabled();
3306 GainControl::Mode agcMode =
3307 _rxAudioProcessingModulePtr->gain_control()->mode();
3308
3309 enabled = enable;
3310
3311 switch (agcMode)
3312 {
3313 case GainControl::kFixedDigital:
3314 mode = kAgcFixedDigital;
3315 break;
3316 case GainControl::kAdaptiveDigital:
3317 mode = kAgcAdaptiveDigital;
3318 break;
3319 default:
3320 _engineStatisticsPtr->SetLastError(
3321 VE_APM_ERROR, kTraceError,
3322 "GetRxAgcStatus() invalid Agc mode");
3323 return -1;
3324 }
3325
3326 return 0;
3327}
3328
3329int
pbos@webrtc.org92135212013-05-14 08:31:39 +00003330Channel::SetRxAgcConfig(AgcConfig config)
niklase@google.com470e71d2011-07-07 08:21:25 +00003331{
3332 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3333 "Channel::SetRxAgcConfig()");
3334
3335 if (_rxAudioProcessingModulePtr->gain_control()->set_target_level_dbfs(
3336 config.targetLeveldBOv) != 0)
3337 {
3338 _engineStatisticsPtr->SetLastError(
3339 VE_APM_ERROR, kTraceError,
3340 "SetRxAgcConfig() failed to set target peak |level|"
3341 "(or envelope) of the Agc");
3342 return -1;
3343 }
3344 if (_rxAudioProcessingModulePtr->gain_control()->set_compression_gain_db(
3345 config.digitalCompressionGaindB) != 0)
3346 {
3347 _engineStatisticsPtr->SetLastError(
3348 VE_APM_ERROR, kTraceError,
3349 "SetRxAgcConfig() failed to set the range in |gain| the"
3350 " digital compression stage may apply");
3351 return -1;
3352 }
3353 if (_rxAudioProcessingModulePtr->gain_control()->enable_limiter(
3354 config.limiterEnable) != 0)
3355 {
3356 _engineStatisticsPtr->SetLastError(
3357 VE_APM_ERROR, kTraceError,
3358 "SetRxAgcConfig() failed to set hard limiter to the signal");
3359 return -1;
3360 }
3361
3362 return 0;
3363}
3364
3365int
3366Channel::GetRxAgcConfig(AgcConfig& config)
3367{
3368 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3369 "Channel::GetRxAgcConfig(config=%?)");
3370
3371 config.targetLeveldBOv =
3372 _rxAudioProcessingModulePtr->gain_control()->target_level_dbfs();
3373 config.digitalCompressionGaindB =
3374 _rxAudioProcessingModulePtr->gain_control()->compression_gain_db();
3375 config.limiterEnable =
3376 _rxAudioProcessingModulePtr->gain_control()->is_limiter_enabled();
3377
3378 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3379 VoEId(_instanceId,_channelId), "GetRxAgcConfig() => "
3380 "targetLeveldBOv=%u, digitalCompressionGaindB=%u,"
3381 " limiterEnable=%d",
3382 config.targetLeveldBOv,
3383 config.digitalCompressionGaindB,
3384 config.limiterEnable);
3385
3386 return 0;
3387}
3388
3389#endif // #ifdef WEBRTC_VOICE_ENGINE_AGC
3390
3391#ifdef WEBRTC_VOICE_ENGINE_NR
3392
3393int
pbos@webrtc.org92135212013-05-14 08:31:39 +00003394Channel::SetRxNsStatus(bool enable, NsModes mode)
niklase@google.com470e71d2011-07-07 08:21:25 +00003395{
3396 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3397 "Channel::SetRxNsStatus(enable=%d, mode=%d)",
3398 (int)enable, (int)mode);
3399
3400 NoiseSuppression::Level nsLevel(
3401 (NoiseSuppression::Level)WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE);
3402 switch (mode)
3403 {
3404
3405 case kNsDefault:
3406 nsLevel = (NoiseSuppression::Level)
3407 WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE;
3408 break;
3409 case kNsUnchanged:
3410 nsLevel = _rxAudioProcessingModulePtr->noise_suppression()->level();
3411 break;
3412 case kNsConference:
3413 nsLevel = NoiseSuppression::kHigh;
3414 break;
3415 case kNsLowSuppression:
3416 nsLevel = NoiseSuppression::kLow;
3417 break;
3418 case kNsModerateSuppression:
3419 nsLevel = NoiseSuppression::kModerate;
3420 break;
3421 case kNsHighSuppression:
3422 nsLevel = NoiseSuppression::kHigh;
3423 break;
3424 case kNsVeryHighSuppression:
3425 nsLevel = NoiseSuppression::kVeryHigh;
3426 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00003427 }
3428
3429 if (_rxAudioProcessingModulePtr->noise_suppression()->set_level(nsLevel)
3430 != 0)
3431 {
3432 _engineStatisticsPtr->SetLastError(
3433 VE_APM_ERROR, kTraceError,
3434 "SetRxAgcStatus() failed to set Ns level");
3435 return -1;
3436 }
3437 if (_rxAudioProcessingModulePtr->noise_suppression()->Enable(enable) != 0)
3438 {
3439 _engineStatisticsPtr->SetLastError(
3440 VE_APM_ERROR, kTraceError,
3441 "SetRxAgcStatus() failed to set Agc state");
3442 return -1;
3443 }
3444
3445 _rxNsIsEnabled = enable;
3446 _rxApmIsEnabled = ((_rxAgcIsEnabled == true) || (_rxNsIsEnabled == true));
3447
3448 return 0;
3449}
3450
3451int
3452Channel::GetRxNsStatus(bool& enabled, NsModes& mode)
3453{
3454 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3455 "Channel::GetRxNsStatus(enable=?, mode=?)");
3456
3457 bool enable =
3458 _rxAudioProcessingModulePtr->noise_suppression()->is_enabled();
3459 NoiseSuppression::Level ncLevel =
3460 _rxAudioProcessingModulePtr->noise_suppression()->level();
3461
3462 enabled = enable;
3463
3464 switch (ncLevel)
3465 {
3466 case NoiseSuppression::kLow:
3467 mode = kNsLowSuppression;
3468 break;
3469 case NoiseSuppression::kModerate:
3470 mode = kNsModerateSuppression;
3471 break;
3472 case NoiseSuppression::kHigh:
3473 mode = kNsHighSuppression;
3474 break;
3475 case NoiseSuppression::kVeryHigh:
3476 mode = kNsVeryHighSuppression;
3477 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00003478 }
3479
3480 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3481 VoEId(_instanceId,_channelId),
3482 "GetRxNsStatus() => enabled=%d, mode=%d", enabled, mode);
3483 return 0;
3484}
3485
3486#endif // #ifdef WEBRTC_VOICE_ENGINE_NR
3487
3488int
3489Channel::RegisterRTPObserver(VoERTPObserver& observer)
3490{
3491 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3492 "Channel::RegisterRTPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003493 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003494
3495 if (_rtpObserverPtr)
3496 {
3497 _engineStatisticsPtr->SetLastError(
3498 VE_INVALID_OPERATION, kTraceError,
3499 "RegisterRTPObserver() observer already enabled");
3500 return -1;
3501 }
3502
3503 _rtpObserverPtr = &observer;
3504 _rtpObserver = true;
3505
3506 return 0;
3507}
3508
3509int
3510Channel::DeRegisterRTPObserver()
3511{
3512 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3513 "Channel::DeRegisterRTPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003514 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003515
3516 if (!_rtpObserverPtr)
3517 {
3518 _engineStatisticsPtr->SetLastError(
3519 VE_INVALID_OPERATION, kTraceWarning,
3520 "DeRegisterRTPObserver() observer already disabled");
3521 return 0;
3522 }
3523
3524 _rtpObserver = false;
3525 _rtpObserverPtr = NULL;
3526
3527 return 0;
3528}
3529
3530int
3531Channel::RegisterRTCPObserver(VoERTCPObserver& observer)
3532{
3533 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3534 "Channel::RegisterRTCPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003535 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003536
3537 if (_rtcpObserverPtr)
3538 {
3539 _engineStatisticsPtr->SetLastError(
3540 VE_INVALID_OPERATION, kTraceError,
3541 "RegisterRTCPObserver() observer already enabled");
3542 return -1;
3543 }
3544
3545 _rtcpObserverPtr = &observer;
3546 _rtcpObserver = true;
3547
3548 return 0;
3549}
3550
3551int
3552Channel::DeRegisterRTCPObserver()
3553{
3554 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3555 "Channel::DeRegisterRTCPObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00003556 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003557
3558 if (!_rtcpObserverPtr)
3559 {
3560 _engineStatisticsPtr->SetLastError(
3561 VE_INVALID_OPERATION, kTraceWarning,
3562 "DeRegisterRTCPObserver() observer already disabled");
3563 return 0;
3564 }
3565
3566 _rtcpObserver = false;
3567 _rtcpObserverPtr = NULL;
3568
3569 return 0;
3570}
3571
3572int
3573Channel::SetLocalSSRC(unsigned int ssrc)
3574{
3575 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3576 "Channel::SetLocalSSRC()");
3577 if (_sending)
3578 {
3579 _engineStatisticsPtr->SetLastError(
3580 VE_ALREADY_SENDING, kTraceError,
3581 "SetLocalSSRC() already sending");
3582 return -1;
3583 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003584 if (_rtpRtcpModule->SetSSRC(ssrc) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003585 {
3586 _engineStatisticsPtr->SetLastError(
3587 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3588 "SetLocalSSRC() failed to set SSRC");
3589 return -1;
3590 }
3591 return 0;
3592}
3593
3594int
3595Channel::GetLocalSSRC(unsigned int& ssrc)
3596{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003597 ssrc = _rtpRtcpModule->SSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00003598 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3599 VoEId(_instanceId,_channelId),
3600 "GetLocalSSRC() => ssrc=%lu", ssrc);
3601 return 0;
3602}
3603
3604int
3605Channel::GetRemoteSSRC(unsigned int& ssrc)
3606{
wu@webrtc.org822fbd82013-08-15 23:38:54 +00003607 ssrc = rtp_receiver_->SSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00003608 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3609 VoEId(_instanceId,_channelId),
3610 "GetRemoteSSRC() => ssrc=%lu", ssrc);
3611 return 0;
3612}
3613
3614int
3615Channel::GetRemoteCSRCs(unsigned int arrCSRC[15])
3616{
3617 if (arrCSRC == NULL)
3618 {
3619 _engineStatisticsPtr->SetLastError(
3620 VE_INVALID_ARGUMENT, kTraceError,
3621 "GetRemoteCSRCs() invalid array argument");
3622 return -1;
3623 }
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003624 uint32_t arrOfCSRC[kRtpCsrcSize];
3625 int32_t CSRCs(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003626 CSRCs = _rtpRtcpModule->CSRCs(arrOfCSRC);
niklase@google.com470e71d2011-07-07 08:21:25 +00003627 if (CSRCs > 0)
3628 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003629 memcpy(arrCSRC, arrOfCSRC, CSRCs * sizeof(uint32_t));
niklase@google.com470e71d2011-07-07 08:21:25 +00003630 for (int i = 0; i < (int) CSRCs; i++)
3631 {
3632 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3633 VoEId(_instanceId, _channelId),
3634 "GetRemoteCSRCs() => arrCSRC[%d]=%lu", i, arrCSRC[i]);
3635 }
3636 } else
3637 {
3638 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3639 VoEId(_instanceId, _channelId),
3640 "GetRemoteCSRCs() => list is empty!");
3641 }
3642 return CSRCs;
3643}
3644
3645int
3646Channel::SetRTPAudioLevelIndicationStatus(bool enable, unsigned char ID)
3647{
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00003648 if (_rtpAudioProc.get() == NULL)
3649 {
3650 _rtpAudioProc.reset(AudioProcessing::Create(VoEModuleId(_instanceId,
3651 _channelId)));
3652 if (_rtpAudioProc.get() == NULL)
3653 {
3654 _engineStatisticsPtr->SetLastError(VE_NO_MEMORY, kTraceCritical,
3655 "Failed to create AudioProcessing");
3656 return -1;
3657 }
3658 }
3659
3660 if (_rtpAudioProc->level_estimator()->Enable(enable) !=
3661 AudioProcessing::kNoError)
3662 {
3663 _engineStatisticsPtr->SetLastError(VE_APM_ERROR, kTraceWarning,
3664 "Failed to enable AudioProcessing::level_estimator()");
3665 }
3666
niklase@google.com470e71d2011-07-07 08:21:25 +00003667 _includeAudioLevelIndication = enable;
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00003668 if (enable) {
3669 rtp_header_parser_->RegisterRtpHeaderExtension(kRtpExtensionAudioLevel,
3670 ID);
3671 } else {
3672 rtp_header_parser_->DeregisterRtpHeaderExtension(kRtpExtensionAudioLevel);
3673 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003674 return _rtpRtcpModule->SetRTPAudioLevelIndicationStatus(enable, ID);
niklase@google.com470e71d2011-07-07 08:21:25 +00003675}
3676int
3677Channel::GetRTPAudioLevelIndicationStatus(bool& enabled, unsigned char& ID)
3678{
3679 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3680 VoEId(_instanceId,_channelId),
3681 "GetRTPAudioLevelIndicationStatus() => enabled=%d, ID=%u",
3682 enabled, ID);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003683 return _rtpRtcpModule->GetRTPAudioLevelIndicationStatus(enabled, ID);
niklase@google.com470e71d2011-07-07 08:21:25 +00003684}
3685
3686int
3687Channel::SetRTCPStatus(bool enable)
3688{
3689 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3690 "Channel::SetRTCPStatus()");
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003691 if (_rtpRtcpModule->SetRTCPStatus(enable ?
niklase@google.com470e71d2011-07-07 08:21:25 +00003692 kRtcpCompound : kRtcpOff) != 0)
3693 {
3694 _engineStatisticsPtr->SetLastError(
3695 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3696 "SetRTCPStatus() failed to set RTCP status");
3697 return -1;
3698 }
3699 return 0;
3700}
3701
3702int
3703Channel::GetRTCPStatus(bool& enabled)
3704{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003705 RTCPMethod method = _rtpRtcpModule->RTCP();
niklase@google.com470e71d2011-07-07 08:21:25 +00003706 enabled = (method != kRtcpOff);
3707 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3708 VoEId(_instanceId,_channelId),
3709 "GetRTCPStatus() => enabled=%d", enabled);
3710 return 0;
3711}
3712
3713int
3714Channel::SetRTCP_CNAME(const char cName[256])
3715{
3716 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3717 "Channel::SetRTCP_CNAME()");
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003718 if (_rtpRtcpModule->SetCNAME(cName) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003719 {
3720 _engineStatisticsPtr->SetLastError(
3721 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3722 "SetRTCP_CNAME() failed to set RTCP CNAME");
3723 return -1;
3724 }
3725 return 0;
3726}
3727
3728int
3729Channel::GetRTCP_CNAME(char cName[256])
3730{
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003731 if (_rtpRtcpModule->CNAME(cName) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003732 {
3733 _engineStatisticsPtr->SetLastError(
3734 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3735 "GetRTCP_CNAME() failed to retrieve RTCP CNAME");
3736 return -1;
3737 }
3738 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3739 VoEId(_instanceId, _channelId),
3740 "GetRTCP_CNAME() => cName=%s", cName);
3741 return 0;
3742}
3743
3744int
3745Channel::GetRemoteRTCP_CNAME(char cName[256])
3746{
3747 if (cName == NULL)
3748 {
3749 _engineStatisticsPtr->SetLastError(
3750 VE_INVALID_ARGUMENT, kTraceError,
3751 "GetRemoteRTCP_CNAME() invalid CNAME input buffer");
3752 return -1;
3753 }
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00003754 char cname[RTCP_CNAME_SIZE];
wu@webrtc.org822fbd82013-08-15 23:38:54 +00003755 const uint32_t remoteSSRC = rtp_receiver_->SSRC();
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003756 if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003757 {
3758 _engineStatisticsPtr->SetLastError(
3759 VE_CANNOT_RETRIEVE_CNAME, kTraceError,
3760 "GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME");
3761 return -1;
3762 }
3763 strcpy(cName, cname);
3764 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3765 VoEId(_instanceId, _channelId),
3766 "GetRemoteRTCP_CNAME() => cName=%s", cName);
3767 return 0;
3768}
3769
3770int
3771Channel::GetRemoteRTCPData(
3772 unsigned int& NTPHigh,
3773 unsigned int& NTPLow,
3774 unsigned int& timestamp,
3775 unsigned int& playoutTimestamp,
3776 unsigned int* jitter,
3777 unsigned short* fractionLost)
3778{
3779 // --- Information from sender info in received Sender Reports
3780
3781 RTCPSenderInfo senderInfo;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003782 if (_rtpRtcpModule->RemoteRTCPStat(&senderInfo) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00003783 {
3784 _engineStatisticsPtr->SetLastError(
3785 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00003786 "GetRemoteRTCPData() failed to retrieve sender info for remote "
niklase@google.com470e71d2011-07-07 08:21:25 +00003787 "side");
3788 return -1;
3789 }
3790
3791 // We only utilize 12 out of 20 bytes in the sender info (ignores packet
3792 // and octet count)
3793 NTPHigh = senderInfo.NTPseconds;
3794 NTPLow = senderInfo.NTPfraction;
3795 timestamp = senderInfo.RTPtimeStamp;
3796
3797 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3798 VoEId(_instanceId, _channelId),
3799 "GetRemoteRTCPData() => NTPHigh=%lu, NTPLow=%lu, "
3800 "timestamp=%lu",
3801 NTPHigh, NTPLow, timestamp);
3802
3803 // --- Locally derived information
3804
3805 // This value is updated on each incoming RTCP packet (0 when no packet
3806 // has been received)
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003807 playoutTimestamp = playout_timestamp_rtcp_;
niklase@google.com470e71d2011-07-07 08:21:25 +00003808
3809 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3810 VoEId(_instanceId, _channelId),
3811 "GetRemoteRTCPData() => playoutTimestamp=%lu",
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003812 playout_timestamp_rtcp_);
niklase@google.com470e71d2011-07-07 08:21:25 +00003813
3814 if (NULL != jitter || NULL != fractionLost)
3815 {
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003816 // Get all RTCP receiver report blocks that have been received on this
3817 // channel. If we receive RTP packets from a remote source we know the
3818 // remote SSRC and use the report block from him.
3819 // Otherwise use the first report block.
3820 std::vector<RTCPReportBlock> remote_stats;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003821 if (_rtpRtcpModule->RemoteRTCPStat(&remote_stats) != 0 ||
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003822 remote_stats.empty()) {
3823 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3824 VoEId(_instanceId, _channelId),
3825 "GetRemoteRTCPData() failed to measure statistics due"
3826 " to lack of received RTP and/or RTCP packets");
3827 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003828 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003829
wu@webrtc.org822fbd82013-08-15 23:38:54 +00003830 uint32_t remoteSSRC = rtp_receiver_->SSRC();
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003831 std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
3832 for (; it != remote_stats.end(); ++it) {
3833 if (it->remoteSSRC == remoteSSRC)
3834 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00003835 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003836
3837 if (it == remote_stats.end()) {
3838 // If we have not received any RTCP packets from this SSRC it probably
3839 // means that we have not received any RTP packets.
3840 // Use the first received report block instead.
3841 it = remote_stats.begin();
3842 remoteSSRC = it->remoteSSRC;
niklase@google.com470e71d2011-07-07 08:21:25 +00003843 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003844
xians@webrtc.org79af7342012-01-31 12:22:14 +00003845 if (jitter) {
3846 *jitter = it->jitter;
3847 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3848 VoEId(_instanceId, _channelId),
3849 "GetRemoteRTCPData() => jitter = %lu", *jitter);
3850 }
perkj@webrtc.orgce5990c2012-01-11 13:00:08 +00003851
xians@webrtc.org79af7342012-01-31 12:22:14 +00003852 if (fractionLost) {
3853 *fractionLost = it->fractionLost;
3854 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3855 VoEId(_instanceId, _channelId),
3856 "GetRemoteRTCPData() => fractionLost = %lu",
3857 *fractionLost);
3858 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003859 }
3860 return 0;
3861}
3862
3863int
pbos@webrtc.org92135212013-05-14 08:31:39 +00003864Channel::SendApplicationDefinedRTCPPacket(unsigned char subType,
niklase@google.com470e71d2011-07-07 08:21:25 +00003865 unsigned int name,
3866 const char* data,
3867 unsigned short dataLengthInBytes)
3868{
3869 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3870 "Channel::SendApplicationDefinedRTCPPacket()");
3871 if (!_sending)
3872 {
3873 _engineStatisticsPtr->SetLastError(
3874 VE_NOT_SENDING, kTraceError,
3875 "SendApplicationDefinedRTCPPacket() not sending");
3876 return -1;
3877 }
3878 if (NULL == data)
3879 {
3880 _engineStatisticsPtr->SetLastError(
3881 VE_INVALID_ARGUMENT, kTraceError,
3882 "SendApplicationDefinedRTCPPacket() invalid data value");
3883 return -1;
3884 }
3885 if (dataLengthInBytes % 4 != 0)
3886 {
3887 _engineStatisticsPtr->SetLastError(
3888 VE_INVALID_ARGUMENT, kTraceError,
3889 "SendApplicationDefinedRTCPPacket() invalid length value");
3890 return -1;
3891 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003892 RTCPMethod status = _rtpRtcpModule->RTCP();
niklase@google.com470e71d2011-07-07 08:21:25 +00003893 if (status == kRtcpOff)
3894 {
3895 _engineStatisticsPtr->SetLastError(
3896 VE_RTCP_ERROR, kTraceError,
3897 "SendApplicationDefinedRTCPPacket() RTCP is disabled");
3898 return -1;
3899 }
3900
3901 // Create and schedule the RTCP APP packet for transmission
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00003902 if (_rtpRtcpModule->SetRTCPApplicationSpecificData(
niklase@google.com470e71d2011-07-07 08:21:25 +00003903 subType,
3904 name,
3905 (const unsigned char*) data,
3906 dataLengthInBytes) != 0)
3907 {
3908 _engineStatisticsPtr->SetLastError(
3909 VE_SEND_ERROR, kTraceError,
3910 "SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
3911 return -1;
3912 }
3913 return 0;
3914}
3915
3916int
3917Channel::GetRTPStatistics(
3918 unsigned int& averageJitterMs,
3919 unsigned int& maxJitterMs,
3920 unsigned int& discardedPackets)
3921{
niklase@google.com470e71d2011-07-07 08:21:25 +00003922 // The jitter statistics is updated for each received RTP packet and is
3923 // based on received packets.
wu@webrtc.org822fbd82013-08-15 23:38:54 +00003924 ReceiveStatistics::RtpReceiveStatistics statistics;
3925 if (!rtp_receive_statistics_->Statistics(
3926 &statistics, _rtpRtcpModule->RTCP() == kRtcpOff)) {
3927 _engineStatisticsPtr->SetLastError(
3928 VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
3929 "GetRTPStatistics() failed to read RTP statistics from the "
3930 "RTP/RTCP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00003931 }
3932
pbos@webrtc.org6141e132013-04-09 10:09:10 +00003933 const int32_t playoutFrequency =
niklase@google.com470e71d2011-07-07 08:21:25 +00003934 _audioCodingModule.PlayoutFrequency();
3935 if (playoutFrequency > 0)
3936 {
3937 // Scale RTP statistics given the current playout frequency
wu@webrtc.org822fbd82013-08-15 23:38:54 +00003938 maxJitterMs = statistics.max_jitter / (playoutFrequency / 1000);
3939 averageJitterMs = statistics.jitter / (playoutFrequency / 1000);
niklase@google.com470e71d2011-07-07 08:21:25 +00003940 }
3941
3942 discardedPackets = _numberOfDiscardedPackets;
3943
3944 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3945 VoEId(_instanceId, _channelId),
3946 "GetRTPStatistics() => averageJitterMs = %lu, maxJitterMs = %lu,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00003947 " discardedPackets = %lu)",
niklase@google.com470e71d2011-07-07 08:21:25 +00003948 averageJitterMs, maxJitterMs, discardedPackets);
3949 return 0;
3950}
3951
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00003952int Channel::GetRemoteRTCPSenderInfo(SenderInfo* sender_info) {
3953 if (sender_info == NULL) {
3954 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
3955 "GetRemoteRTCPSenderInfo() invalid sender_info.");
3956 return -1;
3957 }
3958
3959 // Get the sender info from the latest received RTCP Sender Report.
3960 RTCPSenderInfo rtcp_sender_info;
3961 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_sender_info) != 0) {
3962 _engineStatisticsPtr->SetLastError(VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3963 "GetRemoteRTCPSenderInfo() failed to read RTCP SR sender info.");
3964 return -1;
3965 }
3966
3967 sender_info->NTP_timestamp_high = rtcp_sender_info.NTPseconds;
3968 sender_info->NTP_timestamp_low = rtcp_sender_info.NTPfraction;
3969 sender_info->RTP_timestamp = rtcp_sender_info.RTPtimeStamp;
3970 sender_info->sender_packet_count = rtcp_sender_info.sendPacketCount;
3971 sender_info->sender_octet_count = rtcp_sender_info.sendOctetCount;
3972 return 0;
3973}
3974
3975int Channel::GetRemoteRTCPReportBlocks(
3976 std::vector<ReportBlock>* report_blocks) {
3977 if (report_blocks == NULL) {
3978 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
3979 "GetRemoteRTCPReportBlock()s invalid report_blocks.");
3980 return -1;
3981 }
3982
3983 // Get the report blocks from the latest received RTCP Sender or Receiver
3984 // Report. Each element in the vector contains the sender's SSRC and a
3985 // report block according to RFC 3550.
3986 std::vector<RTCPReportBlock> rtcp_report_blocks;
3987 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks) != 0) {
3988 _engineStatisticsPtr->SetLastError(VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3989 "GetRemoteRTCPReportBlocks() failed to read RTCP SR/RR report block.");
3990 return -1;
3991 }
3992
3993 if (rtcp_report_blocks.empty())
3994 return 0;
3995
3996 std::vector<RTCPReportBlock>::const_iterator it = rtcp_report_blocks.begin();
3997 for (; it != rtcp_report_blocks.end(); ++it) {
3998 ReportBlock report_block;
3999 report_block.sender_SSRC = it->remoteSSRC;
4000 report_block.source_SSRC = it->sourceSSRC;
4001 report_block.fraction_lost = it->fractionLost;
4002 report_block.cumulative_num_packets_lost = it->cumulativeLost;
4003 report_block.extended_highest_sequence_number = it->extendedHighSeqNum;
4004 report_block.interarrival_jitter = it->jitter;
4005 report_block.last_SR_timestamp = it->lastSR;
4006 report_block.delay_since_last_SR = it->delaySinceLastSR;
4007 report_blocks->push_back(report_block);
4008 }
4009 return 0;
4010}
4011
niklase@google.com470e71d2011-07-07 08:21:25 +00004012int
4013Channel::GetRTPStatistics(CallStatistics& stats)
4014{
niklase@google.com470e71d2011-07-07 08:21:25 +00004015 // --- Part one of the final structure (four values)
4016
4017 // The jitter statistics is updated for each received RTP packet and is
4018 // based on received packets.
wu@webrtc.org822fbd82013-08-15 23:38:54 +00004019 ReceiveStatistics::RtpReceiveStatistics statistics;
4020 if (!rtp_receive_statistics_->Statistics(
4021 &statistics, _rtpRtcpModule->RTCP() == kRtcpOff)) {
4022 _engineStatisticsPtr->SetLastError(
4023 VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
4024 "GetRTPStatistics() failed to read RTP statistics from the "
4025 "RTP/RTCP module");
niklase@google.com470e71d2011-07-07 08:21:25 +00004026 }
4027
wu@webrtc.org822fbd82013-08-15 23:38:54 +00004028 stats.fractionLost = statistics.fraction_lost;
4029 stats.cumulativeLost = statistics.cumulative_lost;
4030 stats.extendedMax = statistics.extended_max_sequence_number;
4031 stats.jitterSamples = statistics.jitter;
niklase@google.com470e71d2011-07-07 08:21:25 +00004032
4033 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4034 VoEId(_instanceId, _channelId),
4035 "GetRTPStatistics() => fractionLost=%lu, cumulativeLost=%lu,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004036 " extendedMax=%lu, jitterSamples=%li)",
niklase@google.com470e71d2011-07-07 08:21:25 +00004037 stats.fractionLost, stats.cumulativeLost, stats.extendedMax,
4038 stats.jitterSamples);
4039
4040 // --- Part two of the final structure (one value)
4041
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004042 uint16_t RTT(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004043 RTCPMethod method = _rtpRtcpModule->RTCP();
niklase@google.com470e71d2011-07-07 08:21:25 +00004044 if (method == kRtcpOff)
4045 {
4046 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4047 VoEId(_instanceId, _channelId),
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004048 "GetRTPStatistics() RTCP is disabled => valid RTT "
niklase@google.com470e71d2011-07-07 08:21:25 +00004049 "measurements cannot be retrieved");
4050 } else
4051 {
4052 // The remote SSRC will be zero if no RTP packet has been received.
wu@webrtc.org822fbd82013-08-15 23:38:54 +00004053 uint32_t remoteSSRC = rtp_receiver_->SSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00004054 if (remoteSSRC > 0)
4055 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004056 uint16_t avgRTT(0);
4057 uint16_t maxRTT(0);
4058 uint16_t minRTT(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004059
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004060 if (_rtpRtcpModule->RTT(remoteSSRC, &RTT, &avgRTT, &minRTT, &maxRTT)
niklase@google.com470e71d2011-07-07 08:21:25 +00004061 != 0)
4062 {
4063 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4064 VoEId(_instanceId, _channelId),
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004065 "GetRTPStatistics() failed to retrieve RTT from "
niklase@google.com470e71d2011-07-07 08:21:25 +00004066 "the RTP/RTCP module");
4067 }
4068 } else
4069 {
4070 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4071 VoEId(_instanceId, _channelId),
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004072 "GetRTPStatistics() failed to measure RTT since no "
niklase@google.com470e71d2011-07-07 08:21:25 +00004073 "RTP packets have been received yet");
4074 }
4075 }
4076
4077 stats.rttMs = static_cast<int> (RTT);
4078
4079 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4080 VoEId(_instanceId, _channelId),
4081 "GetRTPStatistics() => rttMs=%d", stats.rttMs);
4082
4083 // --- Part three of the final structure (four values)
4084
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004085 uint32_t bytesSent(0);
4086 uint32_t packetsSent(0);
4087 uint32_t bytesReceived(0);
4088 uint32_t packetsReceived(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004089
wu@webrtc.org822fbd82013-08-15 23:38:54 +00004090 rtp_receive_statistics_->GetDataCounters(&bytesReceived, &packetsReceived);
4091
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004092 if (_rtpRtcpModule->DataCountersRTP(&bytesSent,
wu@webrtc.org822fbd82013-08-15 23:38:54 +00004093 &packetsSent) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004094 {
4095 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4096 VoEId(_instanceId, _channelId),
4097 "GetRTPStatistics() failed to retrieve RTP datacounters =>"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004098 " output will not be complete");
niklase@google.com470e71d2011-07-07 08:21:25 +00004099 }
4100
4101 stats.bytesSent = bytesSent;
4102 stats.packetsSent = packetsSent;
4103 stats.bytesReceived = bytesReceived;
4104 stats.packetsReceived = packetsReceived;
4105
4106 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4107 VoEId(_instanceId, _channelId),
4108 "GetRTPStatistics() => bytesSent=%d, packetsSent=%d,"
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00004109 " bytesReceived=%d, packetsReceived=%d)",
niklase@google.com470e71d2011-07-07 08:21:25 +00004110 stats.bytesSent, stats.packetsSent, stats.bytesReceived,
4111 stats.packetsReceived);
4112
4113 return 0;
4114}
4115
turaj@webrtc.org42259e72012-12-11 02:15:12 +00004116int Channel::SetFECStatus(bool enable, int redPayloadtype) {
4117 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4118 "Channel::SetFECStatus()");
niklase@google.com470e71d2011-07-07 08:21:25 +00004119
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00004120 if (enable) {
4121 if (redPayloadtype < 0 || redPayloadtype > 127) {
4122 _engineStatisticsPtr->SetLastError(
4123 VE_PLTYPE_ERROR, kTraceError,
4124 "SetFECStatus() invalid RED payload type");
4125 return -1;
4126 }
4127
4128 if (SetRedPayloadType(redPayloadtype) < 0) {
4129 _engineStatisticsPtr->SetLastError(
4130 VE_CODEC_ERROR, kTraceError,
4131 "SetSecondarySendCodec() Failed to register RED ACM");
4132 return -1;
4133 }
turaj@webrtc.org42259e72012-12-11 02:15:12 +00004134 }
niklase@google.com470e71d2011-07-07 08:21:25 +00004135
turaj@webrtc.org42259e72012-12-11 02:15:12 +00004136 if (_audioCodingModule.SetFECStatus(enable) != 0) {
4137 _engineStatisticsPtr->SetLastError(
4138 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4139 "SetFECStatus() failed to set FEC state in the ACM");
4140 return -1;
4141 }
4142 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00004143}
4144
4145int
4146Channel::GetFECStatus(bool& enabled, int& redPayloadtype)
4147{
4148 enabled = _audioCodingModule.FECStatus();
4149 if (enabled)
4150 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004151 int8_t payloadType(0);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004152 if (_rtpRtcpModule->SendREDPayloadType(payloadType) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004153 {
4154 _engineStatisticsPtr->SetLastError(
4155 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4156 "GetFECStatus() failed to retrieve RED PT from RTP/RTCP "
4157 "module");
4158 return -1;
4159 }
4160 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4161 VoEId(_instanceId, _channelId),
4162 "GetFECStatus() => enabled=%d, redPayloadtype=%d",
4163 enabled, redPayloadtype);
4164 return 0;
4165 }
4166 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4167 VoEId(_instanceId, _channelId),
4168 "GetFECStatus() => enabled=%d", enabled);
4169 return 0;
4170}
4171
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00004172void Channel::SetNACKStatus(bool enable, int maxNumberOfPackets) {
4173 // None of these functions can fail.
4174 _rtpRtcpModule->SetStorePacketsStatus(enable, maxNumberOfPackets);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00004175 rtp_receiver_->SetNACKStatus(enable ? kNackRtcp : kNackOff,
4176 maxNumberOfPackets);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00004177 if (enable)
4178 _audioCodingModule.EnableNack(maxNumberOfPackets);
4179 else
4180 _audioCodingModule.DisableNack();
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00004181}
4182
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00004183// Called when we are missing one or more packets.
4184int Channel::ResendPackets(const uint16_t* sequence_numbers, int length) {
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00004185 return _rtpRtcpModule->SendNACK(sequence_numbers, length);
4186}
4187
niklase@google.com470e71d2011-07-07 08:21:25 +00004188int
niklase@google.com470e71d2011-07-07 08:21:25 +00004189Channel::StartRTPDump(const char fileNameUTF8[1024],
4190 RTPDirections direction)
4191{
4192 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4193 "Channel::StartRTPDump()");
4194 if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
4195 {
4196 _engineStatisticsPtr->SetLastError(
4197 VE_INVALID_ARGUMENT, kTraceError,
4198 "StartRTPDump() invalid RTP direction");
4199 return -1;
4200 }
4201 RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4202 &_rtpDumpIn : &_rtpDumpOut;
4203 if (rtpDumpPtr == NULL)
4204 {
4205 assert(false);
4206 return -1;
4207 }
4208 if (rtpDumpPtr->IsActive())
4209 {
4210 rtpDumpPtr->Stop();
4211 }
4212 if (rtpDumpPtr->Start(fileNameUTF8) != 0)
4213 {
4214 _engineStatisticsPtr->SetLastError(
4215 VE_BAD_FILE, kTraceError,
4216 "StartRTPDump() failed to create file");
4217 return -1;
4218 }
4219 return 0;
4220}
4221
4222int
4223Channel::StopRTPDump(RTPDirections direction)
4224{
4225 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4226 "Channel::StopRTPDump()");
4227 if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
4228 {
4229 _engineStatisticsPtr->SetLastError(
4230 VE_INVALID_ARGUMENT, kTraceError,
4231 "StopRTPDump() invalid RTP direction");
4232 return -1;
4233 }
4234 RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4235 &_rtpDumpIn : &_rtpDumpOut;
4236 if (rtpDumpPtr == NULL)
4237 {
4238 assert(false);
4239 return -1;
4240 }
4241 if (!rtpDumpPtr->IsActive())
4242 {
4243 return 0;
4244 }
4245 return rtpDumpPtr->Stop();
4246}
4247
4248bool
4249Channel::RTPDumpIsActive(RTPDirections direction)
4250{
4251 if ((direction != kRtpIncoming) &&
4252 (direction != kRtpOutgoing))
4253 {
4254 _engineStatisticsPtr->SetLastError(
4255 VE_INVALID_ARGUMENT, kTraceError,
4256 "RTPDumpIsActive() invalid RTP direction");
4257 return false;
4258 }
4259 RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4260 &_rtpDumpIn : &_rtpDumpOut;
4261 return rtpDumpPtr->IsActive();
4262}
4263
4264int
4265Channel::InsertExtraRTPPacket(unsigned char payloadType,
4266 bool markerBit,
4267 const char* payloadData,
4268 unsigned short payloadSize)
4269{
4270 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4271 "Channel::InsertExtraRTPPacket()");
4272 if (payloadType > 127)
4273 {
4274 _engineStatisticsPtr->SetLastError(
4275 VE_INVALID_PLTYPE, kTraceError,
4276 "InsertExtraRTPPacket() invalid payload type");
4277 return -1;
4278 }
4279 if (payloadData == NULL)
4280 {
4281 _engineStatisticsPtr->SetLastError(
4282 VE_INVALID_ARGUMENT, kTraceError,
4283 "InsertExtraRTPPacket() invalid payload data");
4284 return -1;
4285 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004286 if (payloadSize > _rtpRtcpModule->MaxDataPayloadLength())
niklase@google.com470e71d2011-07-07 08:21:25 +00004287 {
4288 _engineStatisticsPtr->SetLastError(
4289 VE_INVALID_ARGUMENT, kTraceError,
4290 "InsertExtraRTPPacket() invalid payload size");
4291 return -1;
4292 }
4293 if (!_sending)
4294 {
4295 _engineStatisticsPtr->SetLastError(
4296 VE_NOT_SENDING, kTraceError,
4297 "InsertExtraRTPPacket() not sending");
4298 return -1;
4299 }
4300
4301 // Create extra RTP packet by calling RtpRtcp::SendOutgoingData().
4302 // Transport::SendPacket() will be called by the module when the RTP packet
4303 // is created.
4304 // The call to SendOutgoingData() does *not* modify the timestamp and
4305 // payloadtype to ensure that the RTP module generates a valid RTP packet
4306 // (user might utilize a non-registered payload type).
4307 // The marker bit and payload type will be replaced just before the actual
4308 // transmission, i.e., the actual modification is done *after* the RTP
4309 // module has delivered its RTP packet back to the VoE.
4310 // We will use the stored values above when the packet is modified
4311 // (see Channel::SendPacket()).
4312
4313 _extraPayloadType = payloadType;
4314 _extraMarkerBit = markerBit;
4315 _insertExtraRTPPacket = true;
4316
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004317 if (_rtpRtcpModule->SendOutgoingData(kAudioFrameSpeech,
niklase@google.com470e71d2011-07-07 08:21:25 +00004318 _lastPayloadType,
4319 _lastLocalTimeStamp,
stefan@webrtc.orgddfdfed2012-07-03 13:21:22 +00004320 // Leaving the time when this frame was
4321 // received from the capture device as
4322 // undefined for voice for now.
4323 -1,
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004324 (const uint8_t*) payloadData,
niklase@google.com470e71d2011-07-07 08:21:25 +00004325 payloadSize) != 0)
4326 {
4327 _engineStatisticsPtr->SetLastError(
4328 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4329 "InsertExtraRTPPacket() failed to send extra RTP packet");
4330 return -1;
4331 }
4332
4333 return 0;
4334}
4335
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004336uint32_t
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004337Channel::Demultiplex(const AudioFrame& audioFrame)
niklase@google.com470e71d2011-07-07 08:21:25 +00004338{
4339 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004340 "Channel::Demultiplex()");
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00004341 _audioFrame.CopyFrom(audioFrame);
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004342 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00004343 return 0;
4344}
4345
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00004346// TODO(xians): This method borrows quite some code from
4347// TransmitMixer::GenerateAudioFrame(), refactor these two methods and reduce
4348// code duplication.
4349void Channel::Demultiplex(const int16_t* audio_data,
xians@webrtc.org8fff1f02013-07-31 16:27:42 +00004350 int sample_rate,
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00004351 int number_of_frames,
xians@webrtc.org8fff1f02013-07-31 16:27:42 +00004352 int number_of_channels) {
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00004353 // The highest sample rate that WebRTC supports for mono audio is 96kHz.
4354 static const int kMaxNumberOfFrames = 960;
4355 assert(number_of_frames <= kMaxNumberOfFrames);
4356
4357 // Get the send codec information for doing resampling or downmixing later on.
4358 CodecInst codec;
4359 GetSendCodec(codec);
4360 assert(codec.channels == 1 || codec.channels == 2);
4361 int support_sample_rate = std::min(32000,
4362 std::min(sample_rate, codec.plfreq));
4363
4364 // Downmix the data to mono if needed.
4365 const int16_t* audio_ptr = audio_data;
4366 if (number_of_channels == 2 && codec.channels == 1) {
4367 if (!mono_recording_audio_.get())
4368 mono_recording_audio_.reset(new int16_t[kMaxNumberOfFrames]);
4369
4370 AudioFrameOperations::StereoToMono(audio_data, number_of_frames,
4371 mono_recording_audio_.get());
4372 audio_ptr = mono_recording_audio_.get();
4373 }
4374
4375 // Resample the data to the sample rate that the codec is using.
4376 if (input_resampler_.InitializeIfNeeded(sample_rate,
4377 support_sample_rate,
4378 codec.channels)) {
4379 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
4380 "Channel::Demultiplex() unable to resample");
4381 return;
4382 }
4383
4384 int out_length = input_resampler_.Resample(audio_ptr,
4385 number_of_frames * codec.channels,
4386 _audioFrame.data_,
4387 AudioFrame::kMaxDataSizeSamples);
4388 if (out_length == -1) {
4389 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
4390 "Channel::Demultiplex() resampling failed");
4391 return;
4392 }
4393
4394 _audioFrame.samples_per_channel_ = out_length / codec.channels;
4395 _audioFrame.timestamp_ = -1;
4396 _audioFrame.sample_rate_hz_ = support_sample_rate;
4397 _audioFrame.speech_type_ = AudioFrame::kNormalSpeech;
4398 _audioFrame.vad_activity_ = AudioFrame::kVadUnknown;
4399 _audioFrame.num_channels_ = codec.channels;
4400 _audioFrame.id_ = _channelId;
4401}
4402
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004403uint32_t
xians@google.com0b0665a2011-08-08 08:18:44 +00004404Channel::PrepareEncodeAndSend(int mixingFrequency)
niklase@google.com470e71d2011-07-07 08:21:25 +00004405{
4406 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4407 "Channel::PrepareEncodeAndSend()");
4408
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004409 if (_audioFrame.samples_per_channel_ == 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004410 {
4411 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4412 "Channel::PrepareEncodeAndSend() invalid audio frame");
4413 return -1;
4414 }
4415
4416 if (_inputFilePlaying)
4417 {
4418 MixOrReplaceAudioWithFile(mixingFrequency);
4419 }
4420
4421 if (_mute)
4422 {
4423 AudioFrameOperations::Mute(_audioFrame);
4424 }
4425
4426 if (_inputExternalMedia)
4427 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004428 CriticalSectionScoped cs(&_callbackCritSect);
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004429 const bool isStereo = (_audioFrame.num_channels_ == 2);
niklase@google.com470e71d2011-07-07 08:21:25 +00004430 if (_inputExternalMediaCallbackPtr)
4431 {
4432 _inputExternalMediaCallbackPtr->Process(
4433 _channelId,
4434 kRecordingPerChannel,
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004435 (int16_t*)_audioFrame.data_,
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004436 _audioFrame.samples_per_channel_,
4437 _audioFrame.sample_rate_hz_,
niklase@google.com470e71d2011-07-07 08:21:25 +00004438 isStereo);
4439 }
4440 }
4441
4442 InsertInbandDtmfTone();
4443
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004444 if (_includeAudioLevelIndication)
4445 {
4446 assert(_rtpAudioProc.get() != NULL);
4447
4448 // Check if settings need to be updated.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004449 if (_rtpAudioProc->sample_rate_hz() != _audioFrame.sample_rate_hz_)
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004450 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004451 if (_rtpAudioProc->set_sample_rate_hz(_audioFrame.sample_rate_hz_) !=
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004452 AudioProcessing::kNoError)
4453 {
4454 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4455 VoEId(_instanceId, _channelId),
4456 "Error setting AudioProcessing sample rate");
4457 return -1;
4458 }
4459 }
4460
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004461 if (_rtpAudioProc->num_input_channels() != _audioFrame.num_channels_)
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004462 {
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004463 if (_rtpAudioProc->set_num_channels(_audioFrame.num_channels_,
4464 _audioFrame.num_channels_)
andrew@webrtc.org755b04a2011-11-15 16:57:56 +00004465 != AudioProcessing::kNoError)
4466 {
4467 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4468 VoEId(_instanceId, _channelId),
4469 "Error setting AudioProcessing channels");
4470 return -1;
4471 }
4472 }
4473
4474 // Performs level analysis only; does not affect the signal.
4475 _rtpAudioProc->ProcessStream(&_audioFrame);
4476 }
4477
niklase@google.com470e71d2011-07-07 08:21:25 +00004478 return 0;
4479}
4480
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004481uint32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00004482Channel::EncodeAndSend()
4483{
4484 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4485 "Channel::EncodeAndSend()");
4486
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004487 assert(_audioFrame.num_channels_ <= 2);
4488 if (_audioFrame.samples_per_channel_ == 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004489 {
4490 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4491 "Channel::EncodeAndSend() invalid audio frame");
4492 return -1;
4493 }
4494
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004495 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00004496
4497 // --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz.
4498
4499 // The ACM resamples internally.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004500 _audioFrame.timestamp_ = _timeStamp;
niklase@google.com470e71d2011-07-07 08:21:25 +00004501 if (_audioCodingModule.Add10MsData((AudioFrame&)_audioFrame) != 0)
4502 {
4503 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
4504 "Channel::EncodeAndSend() ACM encoding failed");
4505 return -1;
4506 }
4507
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004508 _timeStamp += _audioFrame.samples_per_channel_;
niklase@google.com470e71d2011-07-07 08:21:25 +00004509
4510 // --- Encode if complete frame is ready
4511
4512 // This call will trigger AudioPacketizationCallback::SendData if encoding
4513 // is done and payload is ready for packetization and transmission.
4514 return _audioCodingModule.Process();
4515}
4516
4517int Channel::RegisterExternalMediaProcessing(
4518 ProcessingTypes type,
4519 VoEMediaProcess& processObject)
4520{
4521 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4522 "Channel::RegisterExternalMediaProcessing()");
4523
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004524 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004525
4526 if (kPlaybackPerChannel == type)
4527 {
4528 if (_outputExternalMediaCallbackPtr)
4529 {
4530 _engineStatisticsPtr->SetLastError(
4531 VE_INVALID_OPERATION, kTraceError,
4532 "Channel::RegisterExternalMediaProcessing() "
4533 "output external media already enabled");
4534 return -1;
4535 }
4536 _outputExternalMediaCallbackPtr = &processObject;
4537 _outputExternalMedia = true;
4538 }
4539 else if (kRecordingPerChannel == type)
4540 {
4541 if (_inputExternalMediaCallbackPtr)
4542 {
4543 _engineStatisticsPtr->SetLastError(
4544 VE_INVALID_OPERATION, kTraceError,
4545 "Channel::RegisterExternalMediaProcessing() "
4546 "output external media already enabled");
4547 return -1;
4548 }
4549 _inputExternalMediaCallbackPtr = &processObject;
4550 _inputExternalMedia = true;
4551 }
4552 return 0;
4553}
4554
4555int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type)
4556{
4557 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4558 "Channel::DeRegisterExternalMediaProcessing()");
4559
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004560 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004561
4562 if (kPlaybackPerChannel == type)
4563 {
4564 if (!_outputExternalMediaCallbackPtr)
4565 {
4566 _engineStatisticsPtr->SetLastError(
4567 VE_INVALID_OPERATION, kTraceWarning,
4568 "Channel::DeRegisterExternalMediaProcessing() "
4569 "output external media already disabled");
4570 return 0;
4571 }
4572 _outputExternalMedia = false;
4573 _outputExternalMediaCallbackPtr = NULL;
4574 }
4575 else if (kRecordingPerChannel == type)
4576 {
4577 if (!_inputExternalMediaCallbackPtr)
4578 {
4579 _engineStatisticsPtr->SetLastError(
4580 VE_INVALID_OPERATION, kTraceWarning,
4581 "Channel::DeRegisterExternalMediaProcessing() "
4582 "input external media already disabled");
4583 return 0;
4584 }
4585 _inputExternalMedia = false;
4586 _inputExternalMediaCallbackPtr = NULL;
4587 }
4588
4589 return 0;
4590}
4591
roosa@google.com1b60ceb2012-12-12 23:00:29 +00004592int Channel::SetExternalMixing(bool enabled) {
4593 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4594 "Channel::SetExternalMixing(enabled=%d)", enabled);
4595
4596 if (_playing)
4597 {
4598 _engineStatisticsPtr->SetLastError(
4599 VE_INVALID_OPERATION, kTraceError,
4600 "Channel::SetExternalMixing() "
4601 "external mixing cannot be changed while playing.");
4602 return -1;
4603 }
4604
4605 _externalMixing = enabled;
4606
4607 return 0;
4608}
4609
niklase@google.com470e71d2011-07-07 08:21:25 +00004610int
4611Channel::ResetRTCPStatistics()
4612{
4613 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4614 "Channel::ResetRTCPStatistics()");
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004615 uint32_t remoteSSRC(0);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00004616 remoteSSRC = rtp_receiver_->SSRC();
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004617 return _rtpRtcpModule->ResetRTT(remoteSSRC);
niklase@google.com470e71d2011-07-07 08:21:25 +00004618}
4619
4620int
4621Channel::GetRoundTripTimeSummary(StatVal& delaysMs) const
4622{
4623 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4624 "Channel::GetRoundTripTimeSummary()");
4625 // Override default module outputs for the case when RTCP is disabled.
4626 // This is done to ensure that we are backward compatible with the
4627 // VoiceEngine where we did not use RTP/RTCP module.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004628 if (!_rtpRtcpModule->RTCP())
niklase@google.com470e71d2011-07-07 08:21:25 +00004629 {
4630 delaysMs.min = -1;
4631 delaysMs.max = -1;
4632 delaysMs.average = -1;
4633 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4634 "Channel::GetRoundTripTimeSummary() RTCP is disabled =>"
4635 " valid RTT measurements cannot be retrieved");
4636 return 0;
4637 }
4638
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004639 uint32_t remoteSSRC;
4640 uint16_t RTT;
4641 uint16_t avgRTT;
4642 uint16_t maxRTT;
4643 uint16_t minRTT;
niklase@google.com470e71d2011-07-07 08:21:25 +00004644 // The remote SSRC will be zero if no RTP packet has been received.
wu@webrtc.org822fbd82013-08-15 23:38:54 +00004645 remoteSSRC = rtp_receiver_->SSRC();
niklase@google.com470e71d2011-07-07 08:21:25 +00004646 if (remoteSSRC == 0)
4647 {
4648 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4649 "Channel::GetRoundTripTimeSummary() unable to measure RTT"
4650 " since no RTP packet has been received yet");
4651 }
4652
4653 // Retrieve RTT statistics from the RTP/RTCP module for the specified
4654 // channel and SSRC. The SSRC is required to parse out the correct source
4655 // in conference scenarios.
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004656 if (_rtpRtcpModule->RTT(remoteSSRC, &RTT, &avgRTT, &minRTT,&maxRTT) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004657 {
4658 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4659 "GetRoundTripTimeSummary unable to retrieve RTT values"
4660 " from the RTCP layer");
4661 delaysMs.min = -1; delaysMs.max = -1; delaysMs.average = -1;
4662 }
4663 else
4664 {
4665 delaysMs.min = minRTT;
4666 delaysMs.max = maxRTT;
4667 delaysMs.average = avgRTT;
4668 }
4669 return 0;
4670}
4671
4672int
4673Channel::GetNetworkStatistics(NetworkStatistics& stats)
4674{
4675 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4676 "Channel::GetNetworkStatistics()");
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00004677 ACMNetworkStatistics acm_stats;
4678 int return_value = _audioCodingModule.NetworkStatistics(&acm_stats);
4679 if (return_value >= 0) {
4680 memcpy(&stats, &acm_stats, sizeof(NetworkStatistics));
4681 }
4682 return return_value;
niklase@google.com470e71d2011-07-07 08:21:25 +00004683}
4684
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00004685bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms,
4686 int* playout_buffer_delay_ms) const {
4687 if (_average_jitter_buffer_delay_us == 0) {
niklase@google.com470e71d2011-07-07 08:21:25 +00004688 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00004689 "Channel::GetDelayEstimate() no valid estimate.");
4690 return false;
4691 }
4692 *jitter_buffer_delay_ms = (_average_jitter_buffer_delay_us + 500) / 1000 +
4693 _recPacketDelayMs;
4694 *playout_buffer_delay_ms = playout_delay_ms_;
4695 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4696 "Channel::GetDelayEstimate()");
4697 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +00004698}
4699
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +00004700int Channel::SetInitialPlayoutDelay(int delay_ms)
4701{
4702 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4703 "Channel::SetInitialPlayoutDelay()");
4704 if ((delay_ms < kVoiceEngineMinMinPlayoutDelayMs) ||
4705 (delay_ms > kVoiceEngineMaxMinPlayoutDelayMs))
4706 {
4707 _engineStatisticsPtr->SetLastError(
4708 VE_INVALID_ARGUMENT, kTraceError,
4709 "SetInitialPlayoutDelay() invalid min delay");
4710 return -1;
4711 }
4712 if (_audioCodingModule.SetInitialPlayoutDelay(delay_ms) != 0)
4713 {
4714 _engineStatisticsPtr->SetLastError(
4715 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4716 "SetInitialPlayoutDelay() failed to set min playout delay");
4717 return -1;
4718 }
4719 return 0;
4720}
4721
4722
niklase@google.com470e71d2011-07-07 08:21:25 +00004723int
4724Channel::SetMinimumPlayoutDelay(int delayMs)
4725{
4726 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4727 "Channel::SetMinimumPlayoutDelay()");
4728 if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
4729 (delayMs > kVoiceEngineMaxMinPlayoutDelayMs))
4730 {
4731 _engineStatisticsPtr->SetLastError(
4732 VE_INVALID_ARGUMENT, kTraceError,
4733 "SetMinimumPlayoutDelay() invalid min delay");
4734 return -1;
4735 }
4736 if (_audioCodingModule.SetMinimumPlayoutDelay(delayMs) != 0)
4737 {
4738 _engineStatisticsPtr->SetLastError(
4739 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4740 "SetMinimumPlayoutDelay() failed to set min playout delay");
4741 return -1;
4742 }
4743 return 0;
4744}
4745
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00004746void Channel::UpdatePlayoutTimestamp(bool rtcp) {
4747 uint32_t playout_timestamp = 0;
4748
4749 if (_audioCodingModule.PlayoutTimestamp(&playout_timestamp) == -1) {
4750 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4751 "Channel::UpdatePlayoutTimestamp() failed to read playout"
4752 " timestamp from the ACM");
4753 _engineStatisticsPtr->SetLastError(
4754 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4755 "UpdatePlayoutTimestamp() failed to retrieve timestamp");
4756 return;
4757 }
4758
4759 uint16_t delay_ms = 0;
4760 if (_audioDeviceModulePtr->PlayoutDelay(&delay_ms) == -1) {
4761 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4762 "Channel::UpdatePlayoutTimestamp() failed to read playout"
4763 " delay from the ADM");
4764 _engineStatisticsPtr->SetLastError(
4765 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4766 "UpdatePlayoutTimestamp() failed to retrieve playout delay");
4767 return;
4768 }
4769
4770 int32_t playout_frequency = _audioCodingModule.PlayoutFrequency();
4771 CodecInst current_recive_codec;
4772 if (_audioCodingModule.ReceiveCodec(&current_recive_codec) == 0) {
4773 if (STR_CASE_CMP("G722", current_recive_codec.plname) == 0) {
4774 playout_frequency = 8000;
4775 } else if (STR_CASE_CMP("opus", current_recive_codec.plname) == 0) {
4776 playout_frequency = 48000;
niklase@google.com470e71d2011-07-07 08:21:25 +00004777 }
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00004778 }
4779
4780 // Remove the playout delay.
4781 playout_timestamp -= (delay_ms * (playout_frequency / 1000));
4782
4783 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4784 "Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu",
4785 playout_timestamp);
4786
4787 if (rtcp) {
4788 playout_timestamp_rtcp_ = playout_timestamp;
4789 } else {
4790 playout_timestamp_rtp_ = playout_timestamp;
4791 }
4792 playout_delay_ms_ = delay_ms;
4793}
4794
4795int Channel::GetPlayoutTimestamp(unsigned int& timestamp) {
4796 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4797 "Channel::GetPlayoutTimestamp()");
4798 if (playout_timestamp_rtp_ == 0) {
4799 _engineStatisticsPtr->SetLastError(
4800 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4801 "GetPlayoutTimestamp() failed to retrieve timestamp");
4802 return -1;
4803 }
4804 timestamp = playout_timestamp_rtp_;
4805 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4806 VoEId(_instanceId,_channelId),
4807 "GetPlayoutTimestamp() => timestamp=%u", timestamp);
4808 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00004809}
4810
4811int
4812Channel::SetInitTimestamp(unsigned int timestamp)
4813{
4814 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4815 "Channel::SetInitTimestamp()");
4816 if (_sending)
4817 {
4818 _engineStatisticsPtr->SetLastError(
4819 VE_SENDING, kTraceError, "SetInitTimestamp() already sending");
4820 return -1;
4821 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004822 if (_rtpRtcpModule->SetStartTimestamp(timestamp) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004823 {
4824 _engineStatisticsPtr->SetLastError(
4825 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4826 "SetInitTimestamp() failed to set timestamp");
4827 return -1;
4828 }
4829 return 0;
4830}
4831
4832int
4833Channel::SetInitSequenceNumber(short sequenceNumber)
4834{
4835 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4836 "Channel::SetInitSequenceNumber()");
4837 if (_sending)
4838 {
4839 _engineStatisticsPtr->SetLastError(
4840 VE_SENDING, kTraceError,
4841 "SetInitSequenceNumber() already sending");
4842 return -1;
4843 }
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +00004844 if (_rtpRtcpModule->SetSequenceNumber(sequenceNumber) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00004845 {
4846 _engineStatisticsPtr->SetLastError(
4847 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4848 "SetInitSequenceNumber() failed to set sequence number");
4849 return -1;
4850 }
4851 return 0;
4852}
4853
4854int
wu@webrtc.org822fbd82013-08-15 23:38:54 +00004855Channel::GetRtpRtcp(RtpRtcp** rtpRtcpModule, RtpReceiver** rtp_receiver) const
niklase@google.com470e71d2011-07-07 08:21:25 +00004856{
4857 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4858 "Channel::GetRtpRtcp()");
wu@webrtc.org822fbd82013-08-15 23:38:54 +00004859 *rtpRtcpModule = _rtpRtcpModule.get();
4860 *rtp_receiver = rtp_receiver_.get();
niklase@google.com470e71d2011-07-07 08:21:25 +00004861 return 0;
4862}
4863
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004864// TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use
4865// a shared helper.
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004866int32_t
pbos@webrtc.org92135212013-05-14 08:31:39 +00004867Channel::MixOrReplaceAudioWithFile(int mixingFrequency)
niklase@google.com470e71d2011-07-07 08:21:25 +00004868{
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004869 scoped_array<int16_t> fileBuffer(new int16_t[640]);
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004870 int fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004871
4872 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004873 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004874
4875 if (_inputFilePlayerPtr == NULL)
4876 {
4877 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4878 VoEId(_instanceId, _channelId),
4879 "Channel::MixOrReplaceAudioWithFile() fileplayer"
4880 " doesnt exist");
4881 return -1;
4882 }
4883
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004884 if (_inputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
niklase@google.com470e71d2011-07-07 08:21:25 +00004885 fileSamples,
4886 mixingFrequency) == -1)
4887 {
4888 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4889 VoEId(_instanceId, _channelId),
4890 "Channel::MixOrReplaceAudioWithFile() file mixing "
4891 "failed");
4892 return -1;
4893 }
4894 if (fileSamples == 0)
4895 {
4896 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4897 VoEId(_instanceId, _channelId),
4898 "Channel::MixOrReplaceAudioWithFile() file is ended");
4899 return 0;
4900 }
4901 }
4902
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004903 assert(_audioFrame.samples_per_channel_ == fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004904
4905 if (_mixFileWithMicrophone)
4906 {
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004907 // Currently file stream is always mono.
4908 // TODO(xians): Change the code when FilePlayer supports real stereo.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004909 Utility::MixWithSat(_audioFrame.data_,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004910 _audioFrame.num_channels_,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004911 fileBuffer.get(),
4912 1,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004913 fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004914 }
4915 else
4916 {
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004917 // Replace ACM audio with file.
4918 // Currently file stream is always mono.
4919 // TODO(xians): Change the code when FilePlayer supports real stereo.
niklase@google.com470e71d2011-07-07 08:21:25 +00004920 _audioFrame.UpdateFrame(_channelId,
4921 -1,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004922 fileBuffer.get(),
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004923 fileSamples,
niklase@google.com470e71d2011-07-07 08:21:25 +00004924 mixingFrequency,
4925 AudioFrame::kNormalSpeech,
4926 AudioFrame::kVadUnknown,
4927 1);
4928
4929 }
4930 return 0;
4931}
4932
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004933int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00004934Channel::MixAudioWithFile(AudioFrame& audioFrame,
pbos@webrtc.org92135212013-05-14 08:31:39 +00004935 int mixingFrequency)
niklase@google.com470e71d2011-07-07 08:21:25 +00004936{
4937 assert(mixingFrequency <= 32000);
4938
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004939 scoped_array<int16_t> fileBuffer(new int16_t[640]);
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004940 int fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004941
4942 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00004943 CriticalSectionScoped cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00004944
4945 if (_outputFilePlayerPtr == NULL)
4946 {
4947 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4948 VoEId(_instanceId, _channelId),
4949 "Channel::MixAudioWithFile() file mixing failed");
4950 return -1;
4951 }
4952
4953 // We should get the frequency we ask for.
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004954 if (_outputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
niklase@google.com470e71d2011-07-07 08:21:25 +00004955 fileSamples,
4956 mixingFrequency) == -1)
4957 {
4958 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4959 VoEId(_instanceId, _channelId),
4960 "Channel::MixAudioWithFile() file mixing failed");
4961 return -1;
4962 }
4963 }
4964
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004965 if (audioFrame.samples_per_channel_ == fileSamples)
niklase@google.com470e71d2011-07-07 08:21:25 +00004966 {
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004967 // Currently file stream is always mono.
4968 // TODO(xians): Change the code when FilePlayer supports real stereo.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004969 Utility::MixWithSat(audioFrame.data_,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004970 audioFrame.num_channels_,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00004971 fileBuffer.get(),
4972 1,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00004973 fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004974 }
4975 else
4976 {
4977 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004978 "Channel::MixAudioWithFile() samples_per_channel_(%d) != "
niklase@google.com470e71d2011-07-07 08:21:25 +00004979 "fileSamples(%d)",
andrew@webrtc.org63a50982012-05-02 23:56:37 +00004980 audioFrame.samples_per_channel_, fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00004981 return -1;
4982 }
4983
4984 return 0;
4985}
4986
4987int
4988Channel::InsertInbandDtmfTone()
4989{
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00004990 // Check if we should start a new tone.
niklase@google.com470e71d2011-07-07 08:21:25 +00004991 if (_inbandDtmfQueue.PendingDtmf() &&
4992 !_inbandDtmfGenerator.IsAddingTone() &&
4993 _inbandDtmfGenerator.DelaySinceLastTone() >
4994 kMinTelephoneEventSeparationMs)
4995 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00004996 int8_t eventCode(0);
4997 uint16_t lengthMs(0);
4998 uint8_t attenuationDb(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00004999
5000 eventCode = _inbandDtmfQueue.NextDtmf(&lengthMs, &attenuationDb);
5001 _inbandDtmfGenerator.AddTone(eventCode, lengthMs, attenuationDb);
5002 if (_playInbandDtmfEvent)
5003 {
5004 // Add tone to output mixer using a reduced length to minimize
5005 // risk of echo.
5006 _outputMixerPtr->PlayDtmfTone(eventCode, lengthMs - 80,
5007 attenuationDb);
5008 }
5009 }
5010
5011 if (_inbandDtmfGenerator.IsAddingTone())
5012 {
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005013 uint16_t frequency(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00005014 _inbandDtmfGenerator.GetSampleRate(frequency);
5015
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005016 if (frequency != _audioFrame.sample_rate_hz_)
niklase@google.com470e71d2011-07-07 08:21:25 +00005017 {
5018 // Update sample rate of Dtmf tone since the mixing frequency
5019 // has changed.
5020 _inbandDtmfGenerator.SetSampleRate(
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005021 (uint16_t) (_audioFrame.sample_rate_hz_));
niklase@google.com470e71d2011-07-07 08:21:25 +00005022 // Reset the tone to be added taking the new sample rate into
5023 // account.
5024 _inbandDtmfGenerator.ResetTone();
5025 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00005026
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005027 int16_t toneBuffer[320];
5028 uint16_t toneSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00005029 // Get 10ms tone segment and set time since last tone to zero
5030 if (_inbandDtmfGenerator.Get10msTone(toneBuffer, toneSamples) == -1)
5031 {
5032 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
5033 VoEId(_instanceId, _channelId),
5034 "Channel::EncodeAndSend() inserting Dtmf failed");
5035 return -1;
5036 }
5037
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00005038 // Replace mixed audio with DTMF tone.
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00005039 for (int sample = 0;
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005040 sample < _audioFrame.samples_per_channel_;
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00005041 sample++)
5042 {
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00005043 for (int channel = 0;
5044 channel < _audioFrame.num_channels_;
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00005045 channel++)
5046 {
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00005047 const int index = sample * _audioFrame.num_channels_ + channel;
5048 _audioFrame.data_[index] = toneBuffer[sample];
niklas.enbom@webrtc.orgaf26f642011-11-16 12:41:36 +00005049 }
5050 }
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +00005051
andrew@webrtc.org63a50982012-05-02 23:56:37 +00005052 assert(_audioFrame.samples_per_channel_ == toneSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00005053 } else
5054 {
5055 // Add 10ms to "delay-since-last-tone" counter
5056 _inbandDtmfGenerator.UpdateDelaySinceLastTone();
5057 }
5058 return 0;
5059}
5060
niklase@google.com470e71d2011-07-07 08:21:25 +00005061void
5062Channel::ResetDeadOrAliveCounters()
5063{
5064 _countDeadDetections = 0;
5065 _countAliveDetections = 0;
5066}
5067
5068void
5069Channel::UpdateDeadOrAliveCounters(bool alive)
5070{
5071 if (alive)
5072 _countAliveDetections++;
5073 else
5074 _countDeadDetections++;
5075}
5076
5077int
5078Channel::GetDeadOrAliveCounters(int& countDead, int& countAlive) const
5079{
niklase@google.com470e71d2011-07-07 08:21:25 +00005080 return 0;
5081}
5082
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005083int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +00005084Channel::SendPacketRaw(const void *data, int len, bool RTCP)
5085{
5086 if (_transportPtr == NULL)
5087 {
5088 return -1;
5089 }
5090 if (!RTCP)
5091 {
5092 return _transportPtr->SendPacket(_channelId, data, len);
5093 }
5094 else
5095 {
5096 return _transportPtr->SendRTCPPacket(_channelId, data, len);
5097 }
5098}
5099
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005100// Called for incoming RTP packets after successful RTP header parsing.
5101void Channel::UpdatePacketDelay(uint32_t rtp_timestamp,
5102 uint16_t sequence_number) {
5103 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
5104 "Channel::UpdatePacketDelay(timestamp=%lu, sequenceNumber=%u)",
5105 rtp_timestamp, sequence_number);
niklase@google.com470e71d2011-07-07 08:21:25 +00005106
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005107 // Get frequency of last received payload
5108 int rtp_receive_frequency = _audioCodingModule.ReceiveFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +00005109
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005110 CodecInst current_receive_codec;
5111 if (_audioCodingModule.ReceiveCodec(&current_receive_codec) != 0) {
5112 return;
5113 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005114
turaj@webrtc.orge46c8d32013-05-22 20:39:43 +00005115 // Update the least required delay.
5116 least_required_delay_ms_ = _audioCodingModule.LeastRequiredDelayMs();
5117
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005118 if (STR_CASE_CMP("G722", current_receive_codec.plname) == 0) {
5119 // Even though the actual sampling rate for G.722 audio is
5120 // 16,000 Hz, the RTP clock rate for the G722 payload format is
5121 // 8,000 Hz because that value was erroneously assigned in
5122 // RFC 1890 and must remain unchanged for backward compatibility.
5123 rtp_receive_frequency = 8000;
5124 } else if (STR_CASE_CMP("opus", current_receive_codec.plname) == 0) {
5125 // We are resampling Opus internally to 32,000 Hz until all our
5126 // DSP routines can operate at 48,000 Hz, but the RTP clock
5127 // rate for the Opus payload format is standardized to 48,000 Hz,
5128 // because that is the maximum supported decoding sampling rate.
5129 rtp_receive_frequency = 48000;
5130 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005131
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005132 // playout_timestamp_rtp_ updated in UpdatePlayoutTimestamp for every incoming
5133 // packet.
5134 uint32_t timestamp_diff_ms = (rtp_timestamp - playout_timestamp_rtp_) /
5135 (rtp_receive_frequency / 1000);
niklase@google.com470e71d2011-07-07 08:21:25 +00005136
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005137 uint16_t packet_delay_ms = (rtp_timestamp - _previousTimestamp) /
5138 (rtp_receive_frequency / 1000);
niklase@google.com470e71d2011-07-07 08:21:25 +00005139
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005140 _previousTimestamp = rtp_timestamp;
niklase@google.com470e71d2011-07-07 08:21:25 +00005141
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005142 if (timestamp_diff_ms > (2 * kVoiceEngineMaxMinPlayoutDelayMs)) {
5143 timestamp_diff_ms = 0;
5144 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005145
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005146 if (timestamp_diff_ms == 0) return;
niklase@google.com470e71d2011-07-07 08:21:25 +00005147
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005148 if (packet_delay_ms >= 10 && packet_delay_ms <= 60) {
5149 _recPacketDelayMs = packet_delay_ms;
5150 }
niklase@google.com470e71d2011-07-07 08:21:25 +00005151
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00005152 if (_average_jitter_buffer_delay_us == 0) {
5153 _average_jitter_buffer_delay_us = timestamp_diff_ms * 1000;
5154 return;
5155 }
5156
5157 // Filter average delay value using exponential filter (alpha is
5158 // 7/8). We derive 1000 *_average_jitter_buffer_delay_us here (reduces
5159 // risk of rounding error) and compensate for it in GetDelayEstimate()
5160 // later.
5161 _average_jitter_buffer_delay_us = (_average_jitter_buffer_delay_us * 7 +
5162 1000 * timestamp_diff_ms + 500) / 8;
niklase@google.com470e71d2011-07-07 08:21:25 +00005163}
5164
5165void
5166Channel::RegisterReceiveCodecsToRTPModule()
5167{
5168 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
5169 "Channel::RegisterReceiveCodecsToRTPModule()");
5170
5171
5172 CodecInst codec;
pbos@webrtc.org6141e132013-04-09 10:09:10 +00005173 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
niklase@google.com470e71d2011-07-07 08:21:25 +00005174
5175 for (int idx = 0; idx < nSupportedCodecs; idx++)
5176 {
5177 // Open up the RTP/RTCP receiver for all supported codecs
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00005178 if ((_audioCodingModule.Codec(idx, &codec) == -1) ||
wu@webrtc.org822fbd82013-08-15 23:38:54 +00005179 (rtp_receiver_->RegisterReceivePayload(
5180 codec.plname,
5181 codec.pltype,
5182 codec.plfreq,
5183 codec.channels,
5184 (codec.rate < 0) ? 0 : codec.rate) == -1))
niklase@google.com470e71d2011-07-07 08:21:25 +00005185 {
5186 WEBRTC_TRACE(
5187 kTraceWarning,
5188 kTraceVoice,
5189 VoEId(_instanceId, _channelId),
5190 "Channel::RegisterReceiveCodecsToRTPModule() unable"
5191 " to register %s (%d/%d/%d/%d) to RTP/RTCP receiver",
5192 codec.plname, codec.pltype, codec.plfreq,
5193 codec.channels, codec.rate);
5194 }
5195 else
5196 {
5197 WEBRTC_TRACE(
5198 kTraceInfo,
5199 kTraceVoice,
5200 VoEId(_instanceId, _channelId),
5201 "Channel::RegisterReceiveCodecsToRTPModule() %s "
wu@webrtc.orgfcd12b32011-09-15 20:49:50 +00005202 "(%d/%d/%d/%d) has been added to the RTP/RTCP "
niklase@google.com470e71d2011-07-07 08:21:25 +00005203 "receiver",
5204 codec.plname, codec.pltype, codec.plfreq,
5205 codec.channels, codec.rate);
5206 }
5207 }
5208}
5209
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005210int Channel::ApmProcessRx(AudioFrame& frame) {
5211 AudioProcessing* audioproc = _rxAudioProcessingModulePtr;
5212 // Register the (possibly new) frame parameters.
5213 if (audioproc->set_sample_rate_hz(frame.sample_rate_hz_) != 0) {
andrew@webrtc.org655d8f52012-11-20 07:34:45 +00005214 LOG_FERR1(LS_WARNING, set_sample_rate_hz, frame.sample_rate_hz_);
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005215 }
5216 if (audioproc->set_num_channels(frame.num_channels_,
5217 frame.num_channels_) != 0) {
andrew@webrtc.org655d8f52012-11-20 07:34:45 +00005218 LOG_FERR1(LS_WARNING, set_num_channels, frame.num_channels_);
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005219 }
5220 if (audioproc->ProcessStream(&frame) != 0) {
andrew@webrtc.org655d8f52012-11-20 07:34:45 +00005221 LOG_FERR0(LS_WARNING, ProcessStream);
andrew@webrtc.org50419b02012-11-14 19:07:54 +00005222 }
5223 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00005224}
5225
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005226int Channel::SetSecondarySendCodec(const CodecInst& codec,
5227 int red_payload_type) {
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00005228 // Sanity check for payload type.
5229 if (red_payload_type < 0 || red_payload_type > 127) {
5230 _engineStatisticsPtr->SetLastError(
5231 VE_PLTYPE_ERROR, kTraceError,
5232 "SetRedPayloadType() invalid RED payload type");
5233 return -1;
5234 }
5235
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005236 if (SetRedPayloadType(red_payload_type) < 0) {
5237 _engineStatisticsPtr->SetLastError(
5238 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5239 "SetSecondarySendCodec() Failed to register RED ACM");
5240 return -1;
5241 }
5242 if (_audioCodingModule.RegisterSecondarySendCodec(codec) < 0) {
5243 _engineStatisticsPtr->SetLastError(
5244 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5245 "SetSecondarySendCodec() Failed to register secondary send codec in "
5246 "ACM");
5247 return -1;
5248 }
5249
5250 return 0;
5251}
5252
5253void Channel::RemoveSecondarySendCodec() {
5254 _audioCodingModule.UnregisterSecondarySendCodec();
5255}
5256
5257int Channel::GetSecondarySendCodec(CodecInst* codec) {
5258 if (_audioCodingModule.SecondarySendCodec(codec) < 0) {
5259 _engineStatisticsPtr->SetLastError(
5260 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5261 "GetSecondarySendCodec() Failed to get secondary sent codec from ACM");
5262 return -1;
5263 }
5264 return 0;
5265}
5266
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00005267// Assuming this method is called with valid payload type.
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005268int Channel::SetRedPayloadType(int red_payload_type) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005269 CodecInst codec;
5270 bool found_red = false;
5271
5272 // Get default RED settings from the ACM database
5273 const int num_codecs = AudioCodingModule::NumberOfCodecs();
5274 for (int idx = 0; idx < num_codecs; idx++) {
tina.legrand@webrtc.org7a7a0082013-02-21 10:27:48 +00005275 _audioCodingModule.Codec(idx, &codec);
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005276 if (!STR_CASE_CMP(codec.plname, "RED")) {
5277 found_red = true;
5278 break;
5279 }
5280 }
5281
5282 if (!found_red) {
5283 _engineStatisticsPtr->SetLastError(
5284 VE_CODEC_ERROR, kTraceError,
5285 "SetRedPayloadType() RED is not supported");
5286 return -1;
5287 }
5288
turaj@webrtc.org9d532fd2013-01-31 18:34:19 +00005289 codec.pltype = red_payload_type;
turaj@webrtc.org42259e72012-12-11 02:15:12 +00005290 if (_audioCodingModule.RegisterSendCodec(codec) < 0) {
5291 _engineStatisticsPtr->SetLastError(
5292 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5293 "SetRedPayloadType() RED registration in ACM module failed");
5294 return -1;
5295 }
5296
5297 if (_rtpRtcpModule->SetSendREDPayloadType(red_payload_type) != 0) {
5298 _engineStatisticsPtr->SetLastError(
5299 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
5300 "SetRedPayloadType() RED registration in RTP/RTCP module failed");
5301 return -1;
5302 }
5303 return 0;
5304}
5305
pbos@webrtc.orgd900e8b2013-07-03 15:12:26 +00005306} // namespace voe
5307} // namespace webrtc