blob: 4e6e6d6796fc8a8c5209ddfc2292f8d035339f1c [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
niklas.enbom@webrtc.org87885e82012-02-07 14:48:59 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11#include "transmit_mixer.h"
12
13#include "audio_frame_operations.h"
14#include "channel.h"
15#include "channel_manager.h"
16#include "critical_section_wrapper.h"
17#include "event_wrapper.h"
18#include "statistics.h"
19#include "trace.h"
20#include "utility.h"
21#include "voe_base_impl.h"
22#include "voe_external_media.h"
andrew@webrtc.org6be1e932013-03-01 18:47:28 +000023#include "webrtc/system_wrappers/interface/logging.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000024
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +000025#define WEBRTC_ABS(a) (((a) < 0) ? -(a) : (a))
niklase@google.com470e71d2011-07-07 08:21:25 +000026
27namespace webrtc {
28
29namespace voe {
30
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +000031// Used for downmixing before resampling.
andrew@webrtc.org6be1e932013-03-01 18:47:28 +000032// TODO(ajm): audio_device should advertise the maximum sample rate it can
33// provide.
andrew@webrtc.org07ebdb92012-08-03 18:03:02 +000034static const int kMaxMonoDeviceDataSizeSamples = 960; // 10 ms, 96 kHz, mono.
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +000035
andrew@webrtc.org6be1e932013-03-01 18:47:28 +000036// TODO(ajm): The thread safety of this is dubious...
andrew@webrtc.org7fbfc4c2012-05-23 22:22:36 +000037void
niklase@google.com470e71d2011-07-07 08:21:25 +000038TransmitMixer::OnPeriodicProcess()
39{
40 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1),
41 "TransmitMixer::OnPeriodicProcess()");
42
43#if defined(WEBRTC_VOICE_ENGINE_TYPING_DETECTION)
andrew@webrtc.org6be1e932013-03-01 18:47:28 +000044 if (_typingNoiseWarning)
niklase@google.com470e71d2011-07-07 08:21:25 +000045 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +000046 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +000047 if (_voiceEngineObserverPtr)
48 {
49 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
50 "TransmitMixer::OnPeriodicProcess() => "
51 "CallbackOnError(VE_TYPING_NOISE_WARNING)");
52 _voiceEngineObserverPtr->CallbackOnError(-1,
53 VE_TYPING_NOISE_WARNING);
54 }
andrew@webrtc.org6be1e932013-03-01 18:47:28 +000055 _typingNoiseWarning = false;
niklase@google.com470e71d2011-07-07 08:21:25 +000056 }
57#endif
58
henrika@webrtc.orgbb8ada62013-04-04 08:39:09 +000059 bool saturationWarning = false;
60 {
61 // Modify |_saturationWarning| under lock to avoid conflict with write op
62 // in ProcessAudio and also ensure that we don't hold the lock during the
63 // callback.
64 CriticalSectionScoped cs(&_critSect);
65 saturationWarning = _saturationWarning;
66 if (_saturationWarning)
67 _saturationWarning = false;
68 }
69
70 if (saturationWarning)
niklase@google.com470e71d2011-07-07 08:21:25 +000071 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +000072 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +000073 if (_voiceEngineObserverPtr)
74 {
75 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
76 "TransmitMixer::OnPeriodicProcess() =>"
77 " CallbackOnError(VE_SATURATION_WARNING)");
78 _voiceEngineObserverPtr->CallbackOnError(-1, VE_SATURATION_WARNING);
niklase@google.com470e71d2011-07-07 08:21:25 +000079 }
niklase@google.com470e71d2011-07-07 08:21:25 +000080 }
81}
82
83
pbos@webrtc.org6141e132013-04-09 10:09:10 +000084void TransmitMixer::PlayNotification(const int32_t id,
85 const uint32_t durationMs)
niklase@google.com470e71d2011-07-07 08:21:25 +000086{
87 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1),
88 "TransmitMixer::PlayNotification(id=%d, durationMs=%d)",
89 id, durationMs);
90
91 // Not implement yet
92}
andrew@webrtc.org07ebdb92012-08-03 18:03:02 +000093
pbos@webrtc.org6141e132013-04-09 10:09:10 +000094void TransmitMixer::RecordNotification(const int32_t id,
95 const uint32_t durationMs)
niklase@google.com470e71d2011-07-07 08:21:25 +000096{
97 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,-1),
98 "TransmitMixer::RecordNotification(id=%d, durationMs=%d)",
99 id, durationMs);
100
101 // Not implement yet
102}
103
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000104void TransmitMixer::PlayFileEnded(const int32_t id)
niklase@google.com470e71d2011-07-07 08:21:25 +0000105{
106 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1),
107 "TransmitMixer::PlayFileEnded(id=%d)", id);
108
109 assert(id == _filePlayerId);
110
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000111 CriticalSectionScoped cs(&_critSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000112
113 _filePlaying = false;
114 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
115 "TransmitMixer::PlayFileEnded() =>"
116 "file player module is shutdown");
117}
118
andrew@webrtc.org07ebdb92012-08-03 18:03:02 +0000119void
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000120TransmitMixer::RecordFileEnded(const int32_t id)
niklase@google.com470e71d2011-07-07 08:21:25 +0000121{
122 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1),
123 "TransmitMixer::RecordFileEnded(id=%d)", id);
124
125 if (id == _fileRecorderId)
126 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000127 CriticalSectionScoped cs(&_critSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000128 _fileRecording = false;
129 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
130 "TransmitMixer::RecordFileEnded() => fileRecorder module"
131 "is shutdown");
132 } else if (id == _fileCallRecorderId)
133 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000134 CriticalSectionScoped cs(&_critSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000135 _fileCallRecording = false;
136 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
137 "TransmitMixer::RecordFileEnded() => fileCallRecorder"
138 "module is shutdown");
139 }
140}
141
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000142int32_t
143TransmitMixer::Create(TransmitMixer*& mixer, const uint32_t instanceId)
niklase@google.com470e71d2011-07-07 08:21:25 +0000144{
145 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, -1),
146 "TransmitMixer::Create(instanceId=%d)", instanceId);
147 mixer = new TransmitMixer(instanceId);
148 if (mixer == NULL)
149 {
150 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, -1),
151 "TransmitMixer::Create() unable to allocate memory"
152 "for mixer");
153 return -1;
154 }
155 return 0;
156}
157
158void
159TransmitMixer::Destroy(TransmitMixer*& mixer)
160{
161 if (mixer)
162 {
163 delete mixer;
164 mixer = NULL;
165 }
166}
167
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000168TransmitMixer::TransmitMixer(const uint32_t instanceId) :
niklase@google.com470e71d2011-07-07 08:21:25 +0000169 _engineStatisticsPtr(NULL),
170 _channelManagerPtr(NULL),
andrew@webrtc.org6be1e932013-03-01 18:47:28 +0000171 audioproc_(NULL),
xians@google.com22963ab2011-08-03 12:40:23 +0000172 _voiceEngineObserverPtr(NULL),
173 _processThreadPtr(NULL),
niklase@google.com470e71d2011-07-07 08:21:25 +0000174 _filePlayerPtr(NULL),
175 _fileRecorderPtr(NULL),
176 _fileCallRecorderPtr(NULL),
177 // Avoid conflict with other channels by adding 1024 - 1026,
178 // won't use as much as 1024 channels.
179 _filePlayerId(instanceId + 1024),
180 _fileRecorderId(instanceId + 1025),
181 _fileCallRecorderId(instanceId + 1026),
182 _filePlaying(false),
183 _fileRecording(false),
184 _fileCallRecording(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000185 _audioLevel(),
186 _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
187 _callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
188#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
189 _timeActive(0),
niklas.enbom@webrtc.org3dc88652012-03-30 09:53:54 +0000190 _timeSinceLastTyping(0),
xians@google.com22963ab2011-08-03 12:40:23 +0000191 _penaltyCounter(0),
andrew@webrtc.org6be1e932013-03-01 18:47:28 +0000192 _typingNoiseWarning(false),
niklas.enbom@webrtc.org06e722a2012-04-04 07:44:27 +0000193 _timeWindow(10), // 10ms slots accepted to count as a hit
194 _costPerTyping(100), // Penalty added for a typing + activity coincide
195 _reportingThreshold(300), // Threshold for _penaltyCounter
196 _penaltyDecay(1), // how much we reduce _penaltyCounter every 10 ms.
niklas.enbom@webrtc.orgf6edfef2012-05-09 13:16:12 +0000197 _typeEventDelay(2), // how "old" event we check for
xians@google.com22963ab2011-08-03 12:40:23 +0000198#endif
andrew@webrtc.org6be1e932013-03-01 18:47:28 +0000199 _saturationWarning(false),
xians@google.com22963ab2011-08-03 12:40:23 +0000200 _instanceId(instanceId),
niklase@google.com470e71d2011-07-07 08:21:25 +0000201 _mixFileWithMicrophone(false),
202 _captureLevel(0),
andrew@webrtc.org21ab3ba2012-10-19 17:30:56 +0000203 external_postproc_ptr_(NULL),
204 external_preproc_ptr_(NULL),
niklase@google.com470e71d2011-07-07 08:21:25 +0000205 _mute(false),
206 _remainingMuteMicTimeMs(0),
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +0000207 stereo_codec_(false),
andrew@webrtc.org02d71742012-04-24 19:47:00 +0000208 swap_stereo_channels_(false)
niklase@google.com470e71d2011-07-07 08:21:25 +0000209{
210 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, -1),
211 "TransmitMixer::TransmitMixer() - ctor");
212}
andrew@webrtc.org02d71742012-04-24 19:47:00 +0000213
niklase@google.com470e71d2011-07-07 08:21:25 +0000214TransmitMixer::~TransmitMixer()
215{
216 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, -1),
217 "TransmitMixer::~TransmitMixer() - dtor");
218 _monitorModule.DeRegisterObserver();
219 if (_processThreadPtr)
220 {
221 _processThreadPtr->DeRegisterModule(&_monitorModule);
222 }
andrew@webrtc.org21ab3ba2012-10-19 17:30:56 +0000223 DeRegisterExternalMediaProcessing(kRecordingAllChannelsMixed);
224 DeRegisterExternalMediaProcessing(kRecordingPreprocessing);
niklase@google.com470e71d2011-07-07 08:21:25 +0000225 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000226 CriticalSectionScoped cs(&_critSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000227 if (_fileRecorderPtr)
228 {
229 _fileRecorderPtr->RegisterModuleFileCallback(NULL);
230 _fileRecorderPtr->StopRecording();
231 FileRecorder::DestroyFileRecorder(_fileRecorderPtr);
232 _fileRecorderPtr = NULL;
233 }
234 if (_fileCallRecorderPtr)
235 {
236 _fileCallRecorderPtr->RegisterModuleFileCallback(NULL);
237 _fileCallRecorderPtr->StopRecording();
238 FileRecorder::DestroyFileRecorder(_fileCallRecorderPtr);
239 _fileCallRecorderPtr = NULL;
240 }
241 if (_filePlayerPtr)
242 {
243 _filePlayerPtr->RegisterModuleFileCallback(NULL);
244 _filePlayerPtr->StopPlayingFile();
245 FilePlayer::DestroyFilePlayer(_filePlayerPtr);
246 _filePlayerPtr = NULL;
247 }
248 }
249 delete &_critSect;
250 delete &_callbackCritSect;
251}
252
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000253int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +0000254TransmitMixer::SetEngineInformation(ProcessThread& processThread,
255 Statistics& engineStatistics,
256 ChannelManager& channelManager)
257{
258 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
259 "TransmitMixer::SetEngineInformation()");
260
261 _processThreadPtr = &processThread;
262 _engineStatisticsPtr = &engineStatistics;
263 _channelManagerPtr = &channelManager;
264
265 if (_processThreadPtr->RegisterModule(&_monitorModule) == -1)
266 {
267 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
268 "TransmitMixer::SetEngineInformation() failed to"
269 "register the monitor module");
270 } else
271 {
272 _monitorModule.RegisterObserver(*this);
273 }
274
275 return 0;
276}
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +0000277
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000278int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +0000279TransmitMixer::RegisterVoiceEngineObserver(VoiceEngineObserver& observer)
280{
281 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
282 "TransmitMixer::RegisterVoiceEngineObserver()");
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000283 CriticalSectionScoped cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000284
285 if (_voiceEngineObserverPtr)
286 {
287 _engineStatisticsPtr->SetLastError(
288 VE_INVALID_OPERATION, kTraceError,
289 "RegisterVoiceEngineObserver() observer already enabled");
290 return -1;
291 }
292 _voiceEngineObserverPtr = &observer;
293 return 0;
294}
295
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000296int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +0000297TransmitMixer::SetAudioProcessingModule(AudioProcessing* audioProcessingModule)
298{
299 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
300 "TransmitMixer::SetAudioProcessingModule("
301 "audioProcessingModule=0x%x)",
302 audioProcessingModule);
andrew@webrtc.org6be1e932013-03-01 18:47:28 +0000303 audioproc_ = audioProcessingModule;
niklase@google.com470e71d2011-07-07 08:21:25 +0000304 return 0;
305}
306
andrew@webrtc.org24120852013-03-02 00:14:46 +0000307void TransmitMixer::GetSendCodecInfo(int* max_sample_rate, int* max_channels) {
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +0000308 ScopedChannel sc(*_channelManagerPtr);
309 void* iterator = NULL;
310 Channel* channel = sc.GetFirstChannel(iterator);
andrew@webrtc.org24120852013-03-02 00:14:46 +0000311
312 *max_sample_rate = 8000;
313 *max_channels = 1;
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +0000314 while (channel != NULL) {
315 if (channel->Sending()) {
316 CodecInst codec;
317 channel->GetSendCodec(codec);
andrew@webrtc.org24120852013-03-02 00:14:46 +0000318 // TODO(tlegrand): Remove the 32 kHz restriction once we have full 48 kHz
319 // support in Audio Coding Module.
320 *max_sample_rate = std::min(32000,
321 std::max(*max_sample_rate, codec.plfreq));
322 *max_channels = std::max(*max_channels, codec.channels);
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +0000323 }
324 channel = sc.GetNextChannel(iterator);
325 }
326}
327
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000328int32_t
henrika@webrtc.org907bc552012-03-09 08:59:19 +0000329TransmitMixer::PrepareDemux(const void* audioSamples,
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000330 const uint32_t nSamples,
331 const uint8_t nChannels,
332 const uint32_t samplesPerSec,
333 const uint16_t totalDelayMS,
334 const int32_t clockDrift,
335 const uint16_t currentMicLevel)
niklase@google.com470e71d2011-07-07 08:21:25 +0000336{
337 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1),
338 "TransmitMixer::PrepareDemux(nSamples=%u, nChannels=%u,"
andrew@webrtc.orga5e7e762013-01-09 17:11:50 +0000339 "samplesPerSec=%u, totalDelayMS=%u, clockDrift=%d,"
niklase@google.com470e71d2011-07-07 08:21:25 +0000340 "currentMicLevel=%u)", nSamples, nChannels, samplesPerSec,
341 totalDelayMS, clockDrift, currentMicLevel);
342
niklase@google.com470e71d2011-07-07 08:21:25 +0000343 // --- Resample input audio and create/store the initial audio frame
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000344 if (GenerateAudioFrame(static_cast<const int16_t*>(audioSamples),
niklase@google.com470e71d2011-07-07 08:21:25 +0000345 nSamples,
346 nChannels,
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +0000347 samplesPerSec) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +0000348 {
349 return -1;
350 }
351
andrew@webrtc.org21ab3ba2012-10-19 17:30:56 +0000352 {
353 CriticalSectionScoped cs(&_callbackCritSect);
354 if (external_preproc_ptr_) {
355 external_preproc_ptr_->Process(-1, kRecordingPreprocessing,
356 _audioFrame.data_,
357 _audioFrame.samples_per_channel_,
358 _audioFrame.sample_rate_hz_,
359 _audioFrame.num_channels_ == 2);
360 }
361 }
362
andrew@webrtc.org6be1e932013-03-01 18:47:28 +0000363 // --- Near-end audio processing.
364 ProcessAudio(totalDelayMS, clockDrift, currentMicLevel);
niklase@google.com470e71d2011-07-07 08:21:25 +0000365
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +0000366 if (swap_stereo_channels_ && stereo_codec_)
andrew@webrtc.org02d71742012-04-24 19:47:00 +0000367 // Only bother swapping if we're using a stereo codec.
368 AudioFrameOperations::SwapStereoChannels(&_audioFrame);
369
niklase@google.com470e71d2011-07-07 08:21:25 +0000370 // --- Annoying typing detection (utilizes the APM/VAD decision)
niklase@google.com470e71d2011-07-07 08:21:25 +0000371#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
372 TypingDetection();
373#endif
374
375 // --- Mute during DTMF tone if direct feedback is enabled
niklase@google.com470e71d2011-07-07 08:21:25 +0000376 if (_remainingMuteMicTimeMs > 0)
377 {
378 AudioFrameOperations::Mute(_audioFrame);
379 _remainingMuteMicTimeMs -= 10;
380 if (_remainingMuteMicTimeMs < 0)
381 {
382 _remainingMuteMicTimeMs = 0;
383 }
384 }
385
386 // --- Mute signal
niklase@google.com470e71d2011-07-07 08:21:25 +0000387 if (_mute)
388 {
389 AudioFrameOperations::Mute(_audioFrame);
niklase@google.com470e71d2011-07-07 08:21:25 +0000390 }
391
niklase@google.com470e71d2011-07-07 08:21:25 +0000392 // --- Mix with file (does not affect the mixing frequency)
niklase@google.com470e71d2011-07-07 08:21:25 +0000393 if (_filePlaying)
394 {
andrew@webrtc.org24120852013-03-02 00:14:46 +0000395 MixOrReplaceAudioWithFile(_audioFrame.sample_rate_hz_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000396 }
397
398 // --- Record to file
niklase@google.com470e71d2011-07-07 08:21:25 +0000399 if (_fileRecording)
400 {
andrew@webrtc.org24120852013-03-02 00:14:46 +0000401 RecordAudioToFile(_audioFrame.sample_rate_hz_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000402 }
403
niklase@google.com470e71d2011-07-07 08:21:25 +0000404 {
andrew@webrtc.org21ab3ba2012-10-19 17:30:56 +0000405 CriticalSectionScoped cs(&_callbackCritSect);
406 if (external_postproc_ptr_) {
407 external_postproc_ptr_->Process(-1, kRecordingAllChannelsMixed,
408 _audioFrame.data_,
409 _audioFrame.samples_per_channel_,
410 _audioFrame.sample_rate_hz_,
411 _audioFrame.num_channels_ == 2);
412 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000413 }
414
andrew@webrtc.orgc862f492012-11-07 19:08:03 +0000415 // --- Measure audio level of speech after all processing.
416 _audioLevel.ComputeLevel(_audioFrame);
niklase@google.com470e71d2011-07-07 08:21:25 +0000417 return 0;
418}
419
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000420int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +0000421TransmitMixer::DemuxAndMix()
422{
423 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1),
424 "TransmitMixer::DemuxAndMix()");
425
426 ScopedChannel sc(*_channelManagerPtr);
427 void* iterator(NULL);
428 Channel* channelPtr = sc.GetFirstChannel(iterator);
429 while (channelPtr != NULL)
430 {
431 if (channelPtr->InputIsOnHold())
432 {
433 channelPtr->UpdateLocalTimeStamp();
434 } else if (channelPtr->Sending())
435 {
andrew@webrtc.org24120852013-03-02 00:14:46 +0000436 // Demultiplex makes a copy of its input.
437 channelPtr->Demultiplex(_audioFrame);
438 channelPtr->PrepareEncodeAndSend(_audioFrame.sample_rate_hz_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000439 }
440 channelPtr = sc.GetNextChannel(iterator);
441 }
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +0000442 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000443}
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +0000444
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000445int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +0000446TransmitMixer::EncodeAndSend()
447{
448 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1),
449 "TransmitMixer::EncodeAndSend()");
450
451 ScopedChannel sc(*_channelManagerPtr);
452 void* iterator(NULL);
453 Channel* channelPtr = sc.GetFirstChannel(iterator);
454 while (channelPtr != NULL)
455 {
456 if (channelPtr->Sending() && !channelPtr->InputIsOnHold())
457 {
458 channelPtr->EncodeAndSend();
459 }
460 channelPtr = sc.GetNextChannel(iterator);
461 }
462 return 0;
463}
464
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000465uint32_t TransmitMixer::CaptureLevel() const
niklase@google.com470e71d2011-07-07 08:21:25 +0000466{
henrika@webrtc.orgbb8ada62013-04-04 08:39:09 +0000467 CriticalSectionScoped cs(&_critSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000468 return _captureLevel;
469}
470
471void
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000472TransmitMixer::UpdateMuteMicrophoneTime(const uint32_t lengthMs)
niklase@google.com470e71d2011-07-07 08:21:25 +0000473{
474 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
475 "TransmitMixer::UpdateMuteMicrophoneTime(lengthMs=%d)",
476 lengthMs);
477 _remainingMuteMicTimeMs = lengthMs;
478}
479
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000480int32_t
niklase@google.com470e71d2011-07-07 08:21:25 +0000481TransmitMixer::StopSend()
482{
483 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
484 "TransmitMixer::StopSend()");
485 _audioLevel.Clear();
486 return 0;
487}
488
489int TransmitMixer::StartPlayingFileAsMicrophone(const char* fileName,
490 const bool loop,
491 const FileFormats format,
492 const int startPosition,
493 const float volumeScaling,
494 const int stopPosition,
495 const CodecInst* codecInst)
496{
497 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
498 "TransmitMixer::StartPlayingFileAsMicrophone("
499 "fileNameUTF8[]=%s,loop=%d, format=%d, volumeScaling=%5.3f,"
500 " startPosition=%d, stopPosition=%d)", fileName, loop,
501 format, volumeScaling, startPosition, stopPosition);
502
503 if (_filePlaying)
504 {
505 _engineStatisticsPtr->SetLastError(
506 VE_ALREADY_PLAYING, kTraceWarning,
507 "StartPlayingFileAsMicrophone() is already playing");
508 return 0;
509 }
510
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000511 CriticalSectionScoped cs(&_critSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000512
513 // Destroy the old instance
514 if (_filePlayerPtr)
515 {
516 _filePlayerPtr->RegisterModuleFileCallback(NULL);
517 FilePlayer::DestroyFilePlayer(_filePlayerPtr);
518 _filePlayerPtr = NULL;
519 }
520
521 // Dynamically create the instance
522 _filePlayerPtr
523 = FilePlayer::CreateFilePlayer(_filePlayerId,
524 (const FileFormats) format);
525
526 if (_filePlayerPtr == NULL)
527 {
528 _engineStatisticsPtr->SetLastError(
529 VE_INVALID_ARGUMENT, kTraceError,
530 "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
531 return -1;
532 }
533
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000534 const uint32_t notificationTime(0);
niklase@google.com470e71d2011-07-07 08:21:25 +0000535
536 if (_filePlayerPtr->StartPlayingFile(
537 fileName,
538 loop,
539 startPosition,
540 volumeScaling,
541 notificationTime,
542 stopPosition,
543 (const CodecInst*) codecInst) != 0)
544 {
545 _engineStatisticsPtr->SetLastError(
546 VE_BAD_FILE, kTraceError,
547 "StartPlayingFile() failed to start file playout");
548 _filePlayerPtr->StopPlayingFile();
549 FilePlayer::DestroyFilePlayer(_filePlayerPtr);
550 _filePlayerPtr = NULL;
551 return -1;
552 }
553
554 _filePlayerPtr->RegisterModuleFileCallback(this);
555 _filePlaying = true;
556
557 return 0;
558}
559
560int TransmitMixer::StartPlayingFileAsMicrophone(InStream* stream,
561 const FileFormats format,
562 const int startPosition,
563 const float volumeScaling,
564 const int stopPosition,
565 const CodecInst* codecInst)
566{
567 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
568 "TransmitMixer::StartPlayingFileAsMicrophone(format=%d,"
569 " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
570 format, volumeScaling, startPosition, stopPosition);
andrew@webrtc.org07ebdb92012-08-03 18:03:02 +0000571
niklase@google.com470e71d2011-07-07 08:21:25 +0000572 if (stream == NULL)
573 {
574 _engineStatisticsPtr->SetLastError(
575 VE_BAD_FILE, kTraceError,
576 "StartPlayingFileAsMicrophone() NULL as input stream");
577 return -1;
578 }
579
580 if (_filePlaying)
581 {
582 _engineStatisticsPtr->SetLastError(
583 VE_ALREADY_PLAYING, kTraceWarning,
584 "StartPlayingFileAsMicrophone() is already playing");
585 return 0;
586 }
587
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000588 CriticalSectionScoped cs(&_critSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000589
590 // Destroy the old instance
591 if (_filePlayerPtr)
592 {
593 _filePlayerPtr->RegisterModuleFileCallback(NULL);
594 FilePlayer::DestroyFilePlayer(_filePlayerPtr);
595 _filePlayerPtr = NULL;
596 }
597
598 // Dynamically create the instance
599 _filePlayerPtr
600 = FilePlayer::CreateFilePlayer(_filePlayerId,
601 (const FileFormats) format);
602
603 if (_filePlayerPtr == NULL)
604 {
605 _engineStatisticsPtr->SetLastError(
606 VE_INVALID_ARGUMENT, kTraceWarning,
607 "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
608 return -1;
609 }
610
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000611 const uint32_t notificationTime(0);
niklase@google.com470e71d2011-07-07 08:21:25 +0000612
613 if (_filePlayerPtr->StartPlayingFile(
614 (InStream&) *stream,
615 startPosition,
616 volumeScaling,
617 notificationTime,
618 stopPosition,
619 (const CodecInst*) codecInst) != 0)
620 {
621 _engineStatisticsPtr->SetLastError(
622 VE_BAD_FILE, kTraceError,
623 "StartPlayingFile() failed to start file playout");
624 _filePlayerPtr->StopPlayingFile();
625 FilePlayer::DestroyFilePlayer(_filePlayerPtr);
626 _filePlayerPtr = NULL;
627 return -1;
628 }
629 _filePlayerPtr->RegisterModuleFileCallback(this);
630 _filePlaying = true;
631
632 return 0;
633}
634
635int TransmitMixer::StopPlayingFileAsMicrophone()
636{
637 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
638 "TransmitMixer::StopPlayingFileAsMicrophone()");
639
640 if (!_filePlaying)
641 {
642 _engineStatisticsPtr->SetLastError(
643 VE_INVALID_OPERATION, kTraceWarning,
644 "StopPlayingFileAsMicrophone() isnot playing");
645 return 0;
646 }
647
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000648 CriticalSectionScoped cs(&_critSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000649
650 if (_filePlayerPtr->StopPlayingFile() != 0)
651 {
652 _engineStatisticsPtr->SetLastError(
653 VE_CANNOT_STOP_PLAYOUT, kTraceError,
654 "StopPlayingFile() couldnot stop playing file");
655 return -1;
656 }
657
658 _filePlayerPtr->RegisterModuleFileCallback(NULL);
659 FilePlayer::DestroyFilePlayer(_filePlayerPtr);
660 _filePlayerPtr = NULL;
661 _filePlaying = false;
662
663 return 0;
664}
665
666int TransmitMixer::IsPlayingFileAsMicrophone() const
667{
668 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
669 "TransmitMixer::IsPlayingFileAsMicrophone()");
670 return _filePlaying;
671}
672
673int TransmitMixer::ScaleFileAsMicrophonePlayout(const float scale)
674{
675 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
676 "TransmitMixer::ScaleFileAsMicrophonePlayout(scale=%5.3f)",
677 scale);
678
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000679 CriticalSectionScoped cs(&_critSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000680
681 if (!_filePlaying)
682 {
683 _engineStatisticsPtr->SetLastError(
684 VE_INVALID_OPERATION, kTraceError,
685 "ScaleFileAsMicrophonePlayout() isnot playing file");
686 return -1;
687 }
688
689 if ((_filePlayerPtr == NULL) ||
690 (_filePlayerPtr->SetAudioScaling(scale) != 0))
691 {
692 _engineStatisticsPtr->SetLastError(
693 VE_BAD_ARGUMENT, kTraceError,
694 "SetAudioScaling() failed to scale playout");
695 return -1;
696 }
697
698 return 0;
699}
700
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +0000701int TransmitMixer::StartRecordingMicrophone(const char* fileName,
niklase@google.com470e71d2011-07-07 08:21:25 +0000702 const CodecInst* codecInst)
703{
704 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
705 "TransmitMixer::StartRecordingMicrophone(fileName=%s)",
706 fileName);
707
708 if (_fileRecording)
709 {
710 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
711 "StartRecordingMicrophone() is already recording");
712 return 0;
713 }
714
715 FileFormats format;
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000716 const uint32_t notificationTime(0); // Not supported in VoE
niklase@google.com470e71d2011-07-07 08:21:25 +0000717 CodecInst dummyCodec = { 100, "L16", 16000, 320, 1, 320000 };
718
niklas.enbom@webrtc.org87885e82012-02-07 14:48:59 +0000719 if (codecInst != NULL &&
720 (codecInst->channels < 0 || codecInst->channels > 2))
niklase@google.com470e71d2011-07-07 08:21:25 +0000721 {
722 _engineStatisticsPtr->SetLastError(
723 VE_BAD_ARGUMENT, kTraceError,
724 "StartRecordingMicrophone() invalid compression");
725 return (-1);
726 }
727 if (codecInst == NULL)
728 {
729 format = kFileFormatPcm16kHzFile;
730 codecInst = &dummyCodec;
731 } else if ((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
732 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
733 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
734 {
735 format = kFileFormatWavFile;
736 } else
737 {
738 format = kFileFormatCompressedFile;
739 }
740
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000741 CriticalSectionScoped cs(&_critSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000742
743 // Destroy the old instance
744 if (_fileRecorderPtr)
745 {
746 _fileRecorderPtr->RegisterModuleFileCallback(NULL);
747 FileRecorder::DestroyFileRecorder(_fileRecorderPtr);
748 _fileRecorderPtr = NULL;
749 }
750
751 _fileRecorderPtr =
752 FileRecorder::CreateFileRecorder(_fileRecorderId,
753 (const FileFormats) format);
754 if (_fileRecorderPtr == NULL)
755 {
756 _engineStatisticsPtr->SetLastError(
757 VE_INVALID_ARGUMENT, kTraceError,
758 "StartRecordingMicrophone() fileRecorder format isnot correct");
759 return -1;
760 }
761
762 if (_fileRecorderPtr->StartRecordingAudioFile(
763 fileName,
764 (const CodecInst&) *codecInst,
765 notificationTime) != 0)
766 {
767 _engineStatisticsPtr->SetLastError(
768 VE_BAD_FILE, kTraceError,
769 "StartRecordingAudioFile() failed to start file recording");
770 _fileRecorderPtr->StopRecording();
771 FileRecorder::DestroyFileRecorder(_fileRecorderPtr);
772 _fileRecorderPtr = NULL;
773 return -1;
774 }
775 _fileRecorderPtr->RegisterModuleFileCallback(this);
776 _fileRecording = true;
777
778 return 0;
779}
780
781int TransmitMixer::StartRecordingMicrophone(OutStream* stream,
782 const CodecInst* codecInst)
783{
784 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
785 "TransmitMixer::StartRecordingMicrophone()");
786
787 if (_fileRecording)
788 {
789 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
790 "StartRecordingMicrophone() is already recording");
791 return 0;
792 }
793
794 FileFormats format;
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000795 const uint32_t notificationTime(0); // Not supported in VoE
niklase@google.com470e71d2011-07-07 08:21:25 +0000796 CodecInst dummyCodec = { 100, "L16", 16000, 320, 1, 320000 };
797
798 if (codecInst != NULL && codecInst->channels != 1)
799 {
800 _engineStatisticsPtr->SetLastError(
801 VE_BAD_ARGUMENT, kTraceError,
802 "StartRecordingMicrophone() invalid compression");
803 return (-1);
804 }
805 if (codecInst == NULL)
806 {
807 format = kFileFormatPcm16kHzFile;
808 codecInst = &dummyCodec;
809 } else if ((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
810 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
811 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
812 {
813 format = kFileFormatWavFile;
814 } else
815 {
816 format = kFileFormatCompressedFile;
817 }
818
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000819 CriticalSectionScoped cs(&_critSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000820
821 // Destroy the old instance
822 if (_fileRecorderPtr)
823 {
824 _fileRecorderPtr->RegisterModuleFileCallback(NULL);
825 FileRecorder::DestroyFileRecorder(_fileRecorderPtr);
826 _fileRecorderPtr = NULL;
827 }
828
829 _fileRecorderPtr =
830 FileRecorder::CreateFileRecorder(_fileRecorderId,
831 (const FileFormats) format);
832 if (_fileRecorderPtr == NULL)
833 {
834 _engineStatisticsPtr->SetLastError(
835 VE_INVALID_ARGUMENT, kTraceError,
836 "StartRecordingMicrophone() fileRecorder format isnot correct");
837 return -1;
838 }
839
840 if (_fileRecorderPtr->StartRecordingAudioFile(*stream,
841 *codecInst,
842 notificationTime) != 0)
843 {
844 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
845 "StartRecordingAudioFile() failed to start file recording");
846 _fileRecorderPtr->StopRecording();
847 FileRecorder::DestroyFileRecorder(_fileRecorderPtr);
848 _fileRecorderPtr = NULL;
849 return -1;
850 }
851
852 _fileRecorderPtr->RegisterModuleFileCallback(this);
853 _fileRecording = true;
854
855 return 0;
856}
857
858
859int TransmitMixer::StopRecordingMicrophone()
860{
861 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
862 "TransmitMixer::StopRecordingMicrophone()");
863
864 if (!_fileRecording)
865 {
braveyao@webrtc.org4de777b2012-06-15 02:37:53 +0000866 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
niklase@google.com470e71d2011-07-07 08:21:25 +0000867 "StopRecordingMicrophone() isnot recording");
braveyao@webrtc.org4de777b2012-06-15 02:37:53 +0000868 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000869 }
870
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000871 CriticalSectionScoped cs(&_critSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000872
873 if (_fileRecorderPtr->StopRecording() != 0)
874 {
875 _engineStatisticsPtr->SetLastError(
876 VE_STOP_RECORDING_FAILED, kTraceError,
877 "StopRecording(), could not stop recording");
878 return -1;
879 }
880 _fileRecorderPtr->RegisterModuleFileCallback(NULL);
881 FileRecorder::DestroyFileRecorder(_fileRecorderPtr);
882 _fileRecorderPtr = NULL;
883 _fileRecording = false;
884
885 return 0;
886}
887
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +0000888int TransmitMixer::StartRecordingCall(const char* fileName,
niklase@google.com470e71d2011-07-07 08:21:25 +0000889 const CodecInst* codecInst)
890{
891 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
892 "TransmitMixer::StartRecordingCall(fileName=%s)", fileName);
893
894 if (_fileCallRecording)
895 {
896 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
897 "StartRecordingCall() is already recording");
898 return 0;
899 }
900
901 FileFormats format;
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000902 const uint32_t notificationTime(0); // Not supported in VoE
niklase@google.com470e71d2011-07-07 08:21:25 +0000903 CodecInst dummyCodec = { 100, "L16", 16000, 320, 1, 320000 };
904
905 if (codecInst != NULL && codecInst->channels != 1)
906 {
907 _engineStatisticsPtr->SetLastError(
908 VE_BAD_ARGUMENT, kTraceError,
909 "StartRecordingCall() invalid compression");
910 return (-1);
911 }
912 if (codecInst == NULL)
913 {
914 format = kFileFormatPcm16kHzFile;
915 codecInst = &dummyCodec;
916 } else if ((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
917 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
918 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
919 {
920 format = kFileFormatWavFile;
921 } else
922 {
923 format = kFileFormatCompressedFile;
924 }
925
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +0000926 CriticalSectionScoped cs(&_critSect);
niklase@google.com470e71d2011-07-07 08:21:25 +0000927
928 // Destroy the old instance
929 if (_fileCallRecorderPtr)
930 {
931 _fileCallRecorderPtr->RegisterModuleFileCallback(NULL);
932 FileRecorder::DestroyFileRecorder(_fileCallRecorderPtr);
933 _fileCallRecorderPtr = NULL;
934 }
935
936 _fileCallRecorderPtr
937 = FileRecorder::CreateFileRecorder(_fileCallRecorderId,
938 (const FileFormats) format);
939 if (_fileCallRecorderPtr == NULL)
940 {
941 _engineStatisticsPtr->SetLastError(
942 VE_INVALID_ARGUMENT, kTraceError,
943 "StartRecordingCall() fileRecorder format isnot correct");
944 return -1;
945 }
946
947 if (_fileCallRecorderPtr->StartRecordingAudioFile(
948 fileName,
949 (const CodecInst&) *codecInst,
950 notificationTime) != 0)
951 {
952 _engineStatisticsPtr->SetLastError(
953 VE_BAD_FILE, kTraceError,
954 "StartRecordingAudioFile() failed to start file recording");
955 _fileCallRecorderPtr->StopRecording();
956 FileRecorder::DestroyFileRecorder(_fileCallRecorderPtr);
957 _fileCallRecorderPtr = NULL;
958 return -1;
959 }
960 _fileCallRecorderPtr->RegisterModuleFileCallback(this);
961 _fileCallRecording = true;
962
963 return 0;
964}
965
966int TransmitMixer::StartRecordingCall(OutStream* stream,
967 const CodecInst* codecInst)
968{
969 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
970 "TransmitMixer::StartRecordingCall()");
971
972 if (_fileCallRecording)
973 {
974 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
975 "StartRecordingCall() is already recording");
976 return 0;
977 }
978
979 FileFormats format;
pbos@webrtc.org6141e132013-04-09 10:09:10 +0000980 const uint32_t notificationTime(0); // Not supported in VoE
niklase@google.com470e71d2011-07-07 08:21:25 +0000981 CodecInst dummyCodec = { 100, "L16", 16000, 320, 1, 320000 };
982
983 if (codecInst != NULL && codecInst->channels != 1)
984 {
985 _engineStatisticsPtr->SetLastError(
986 VE_BAD_ARGUMENT, kTraceError,
987 "StartRecordingCall() invalid compression");
988 return (-1);
989 }
990 if (codecInst == NULL)
991 {
992 format = kFileFormatPcm16kHzFile;
993 codecInst = &dummyCodec;
994 } else if ((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
995 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
996 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
997 {
998 format = kFileFormatWavFile;
999 } else
1000 {
1001 format = kFileFormatCompressedFile;
1002 }
1003
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001004 CriticalSectionScoped cs(&_critSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001005
1006 // Destroy the old instance
1007 if (_fileCallRecorderPtr)
1008 {
1009 _fileCallRecorderPtr->RegisterModuleFileCallback(NULL);
1010 FileRecorder::DestroyFileRecorder(_fileCallRecorderPtr);
1011 _fileCallRecorderPtr = NULL;
1012 }
1013
1014 _fileCallRecorderPtr =
1015 FileRecorder::CreateFileRecorder(_fileCallRecorderId,
1016 (const FileFormats) format);
1017 if (_fileCallRecorderPtr == NULL)
1018 {
1019 _engineStatisticsPtr->SetLastError(
1020 VE_INVALID_ARGUMENT, kTraceError,
1021 "StartRecordingCall() fileRecorder format isnot correct");
1022 return -1;
1023 }
1024
1025 if (_fileCallRecorderPtr->StartRecordingAudioFile(*stream,
1026 *codecInst,
1027 notificationTime) != 0)
1028 {
1029 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
1030 "StartRecordingAudioFile() failed to start file recording");
1031 _fileCallRecorderPtr->StopRecording();
1032 FileRecorder::DestroyFileRecorder(_fileCallRecorderPtr);
1033 _fileCallRecorderPtr = NULL;
1034 return -1;
1035 }
andrew@webrtc.org07ebdb92012-08-03 18:03:02 +00001036
niklase@google.com470e71d2011-07-07 08:21:25 +00001037 _fileCallRecorderPtr->RegisterModuleFileCallback(this);
1038 _fileCallRecording = true;
1039
1040 return 0;
1041}
1042
1043int TransmitMixer::StopRecordingCall()
1044{
1045 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
1046 "TransmitMixer::StopRecordingCall()");
1047
1048 if (!_fileCallRecording)
1049 {
1050 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
1051 "StopRecordingCall() file isnot recording");
1052 return -1;
1053 }
1054
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001055 CriticalSectionScoped cs(&_critSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001056
1057 if (_fileCallRecorderPtr->StopRecording() != 0)
1058 {
1059 _engineStatisticsPtr->SetLastError(
1060 VE_STOP_RECORDING_FAILED, kTraceError,
1061 "StopRecording(), could not stop recording");
1062 return -1;
1063 }
1064
1065 _fileCallRecorderPtr->RegisterModuleFileCallback(NULL);
1066 FileRecorder::DestroyFileRecorder(_fileCallRecorderPtr);
1067 _fileCallRecorderPtr = NULL;
1068 _fileCallRecording = false;
1069
1070 return 0;
1071}
1072
andrew@webrtc.org07ebdb92012-08-03 18:03:02 +00001073void
niklase@google.com470e71d2011-07-07 08:21:25 +00001074TransmitMixer::SetMixWithMicStatus(bool mix)
1075{
1076 _mixFileWithMicrophone = mix;
1077}
1078
1079int TransmitMixer::RegisterExternalMediaProcessing(
andrew@webrtc.org21ab3ba2012-10-19 17:30:56 +00001080 VoEMediaProcess* object,
1081 ProcessingTypes type) {
1082 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
1083 "TransmitMixer::RegisterExternalMediaProcessing()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001084
andrew@webrtc.org21ab3ba2012-10-19 17:30:56 +00001085 CriticalSectionScoped cs(&_callbackCritSect);
1086 if (!object) {
1087 return -1;
1088 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001089
andrew@webrtc.org21ab3ba2012-10-19 17:30:56 +00001090 // Store the callback object according to the processing type.
1091 if (type == kRecordingAllChannelsMixed) {
1092 external_postproc_ptr_ = object;
1093 } else if (type == kRecordingPreprocessing) {
1094 external_preproc_ptr_ = object;
1095 } else {
1096 return -1;
1097 }
1098 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001099}
1100
andrew@webrtc.org21ab3ba2012-10-19 17:30:56 +00001101int TransmitMixer::DeRegisterExternalMediaProcessing(ProcessingTypes type) {
1102 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
1103 "TransmitMixer::DeRegisterExternalMediaProcessing()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001104
andrew@webrtc.org21ab3ba2012-10-19 17:30:56 +00001105 CriticalSectionScoped cs(&_callbackCritSect);
1106 if (type == kRecordingAllChannelsMixed) {
1107 external_postproc_ptr_ = NULL;
1108 } else if (type == kRecordingPreprocessing) {
1109 external_preproc_ptr_ = NULL;
1110 } else {
1111 return -1;
1112 }
1113 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001114}
1115
1116int
1117TransmitMixer::SetMute(bool enable)
1118{
1119 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
1120 "TransmitMixer::SetMute(enable=%d)", enable);
1121 _mute = enable;
1122 return 0;
1123}
1124
1125bool
1126TransmitMixer::Mute() const
1127{
1128 return _mute;
1129}
1130
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001131int8_t TransmitMixer::AudioLevel() const
niklase@google.com470e71d2011-07-07 08:21:25 +00001132{
1133 // Speech + file level [0,9]
1134 return _audioLevel.Level();
1135}
1136
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001137int16_t TransmitMixer::AudioLevelFullRange() const
niklase@google.com470e71d2011-07-07 08:21:25 +00001138{
1139 // Speech + file level [0,32767]
1140 return _audioLevel.LevelFullRange();
1141}
1142
1143bool TransmitMixer::IsRecordingCall()
1144{
1145 return _fileCallRecording;
1146}
1147
1148bool TransmitMixer::IsRecordingMic()
1149{
1150
1151 return _fileRecording;
1152}
1153
andrew@webrtc.org07ebdb92012-08-03 18:03:02 +00001154// TODO(andrew): use RemixAndResample for this.
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +00001155int TransmitMixer::GenerateAudioFrame(const int16_t audio[],
1156 int samples_per_channel,
1157 int num_channels,
1158 int sample_rate_hz)
niklase@google.com470e71d2011-07-07 08:21:25 +00001159{
andrew@webrtc.org24120852013-03-02 00:14:46 +00001160 int destination_rate;
1161 int num_codec_channels;
1162 GetSendCodecInfo(&destination_rate, &num_codec_channels);
1163
1164 // Never upsample the capture signal here. This should be done at the
1165 // end of the send chain.
1166 destination_rate = std::min(destination_rate, sample_rate_hz);
1167 stereo_codec_ = num_codec_channels == 2;
1168
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +00001169 const int16_t* audio_ptr = audio;
1170 int16_t mono_audio[kMaxMonoDeviceDataSizeSamples];
andrew@webrtc.org07ebdb92012-08-03 18:03:02 +00001171 assert(samples_per_channel <= kMaxMonoDeviceDataSizeSamples);
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +00001172 // If no stereo codecs are in use, we downmix a stereo stream from the
1173 // device early in the chain, before resampling.
1174 if (num_channels == 2 && !stereo_codec_) {
1175 AudioFrameOperations::StereoToMono(audio, samples_per_channel,
1176 mono_audio);
1177 audio_ptr = mono_audio;
1178 num_channels = 1;
1179 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001180
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +00001181 ResamplerType resampler_type = (num_channels == 1) ?
niklas.enbom@webrtc.orge33a1022011-11-16 10:33:53 +00001182 kResamplerSynchronous : kResamplerSynchronousStereo;
niklas.enbom@webrtc.orge33a1022011-11-16 10:33:53 +00001183
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +00001184 if (_audioResampler.ResetIfNeeded(sample_rate_hz,
andrew@webrtc.org24120852013-03-02 00:14:46 +00001185 destination_rate,
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +00001186 resampler_type) != 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001187 {
1188 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
1189 "TransmitMixer::GenerateAudioFrame() unable to resample");
1190 return -1;
1191 }
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +00001192 if (_audioResampler.Push(audio_ptr,
1193 samples_per_channel * num_channels,
1194 _audioFrame.data_,
1195 AudioFrame::kMaxDataSizeSamples,
1196 _audioFrame.samples_per_channel_) == -1)
niklase@google.com470e71d2011-07-07 08:21:25 +00001197 {
1198 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
1199 "TransmitMixer::GenerateAudioFrame() resampling failed");
1200 return -1;
1201 }
1202
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +00001203 _audioFrame.samples_per_channel_ /= num_channels;
andrew@webrtc.org63a50982012-05-02 23:56:37 +00001204 _audioFrame.id_ = _instanceId;
1205 _audioFrame.timestamp_ = -1;
andrew@webrtc.org24120852013-03-02 00:14:46 +00001206 _audioFrame.sample_rate_hz_ = destination_rate;
andrew@webrtc.org63a50982012-05-02 23:56:37 +00001207 _audioFrame.speech_type_ = AudioFrame::kNormalSpeech;
1208 _audioFrame.vad_activity_ = AudioFrame::kVadUnknown;
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +00001209 _audioFrame.num_channels_ = num_channels;
niklase@google.com470e71d2011-07-07 08:21:25 +00001210
1211 return 0;
1212}
1213
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001214int32_t TransmitMixer::RecordAudioToFile(
1215 const uint32_t mixingFrequency)
niklase@google.com470e71d2011-07-07 08:21:25 +00001216{
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001217 CriticalSectionScoped cs(&_critSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001218 if (_fileRecorderPtr == NULL)
1219 {
1220 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
1221 "TransmitMixer::RecordAudioToFile() filerecorder doesnot"
1222 "exist");
1223 return -1;
1224 }
1225
1226 if (_fileRecorderPtr->RecordAudioToFile(_audioFrame) != 0)
1227 {
1228 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
1229 "TransmitMixer::RecordAudioToFile() file recording"
1230 "failed");
1231 return -1;
1232 }
1233
1234 return 0;
1235}
1236
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001237int32_t TransmitMixer::MixOrReplaceAudioWithFile(
xians@google.com0b0665a2011-08-08 08:18:44 +00001238 const int mixingFrequency)
niklase@google.com470e71d2011-07-07 08:21:25 +00001239{
pbos@webrtc.org6141e132013-04-09 10:09:10 +00001240 scoped_array<int16_t> fileBuffer(new int16_t[640]);
niklase@google.com470e71d2011-07-07 08:21:25 +00001241
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00001242 int fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00001243 {
mflodman@webrtc.org9a065d12012-03-07 08:12:21 +00001244 CriticalSectionScoped cs(&_critSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001245 if (_filePlayerPtr == NULL)
1246 {
1247 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1248 VoEId(_instanceId, -1),
1249 "TransmitMixer::MixOrReplaceAudioWithFile()"
1250 "fileplayer doesnot exist");
1251 return -1;
1252 }
1253
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00001254 if (_filePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
niklase@google.com470e71d2011-07-07 08:21:25 +00001255 fileSamples,
1256 mixingFrequency) == -1)
1257 {
1258 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
1259 "TransmitMixer::MixOrReplaceAudioWithFile() file"
1260 " mixing failed");
1261 return -1;
1262 }
1263 }
1264
andrew@webrtc.org63a50982012-05-02 23:56:37 +00001265 assert(_audioFrame.samples_per_channel_ == fileSamples);
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00001266
niklase@google.com470e71d2011-07-07 08:21:25 +00001267 if (_mixFileWithMicrophone)
1268 {
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00001269 // Currently file stream is always mono.
1270 // TODO(xians): Change the code when FilePlayer supports real stereo.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00001271 Utility::MixWithSat(_audioFrame.data_,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00001272 _audioFrame.num_channels_,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00001273 fileBuffer.get(),
1274 1,
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00001275 fileSamples);
niklase@google.com470e71d2011-07-07 08:21:25 +00001276 } else
1277 {
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00001278 // Replace ACM audio with file.
1279 // Currently file stream is always mono.
1280 // TODO(xians): Change the code when FilePlayer supports real stereo.
niklase@google.com470e71d2011-07-07 08:21:25 +00001281 _audioFrame.UpdateFrame(-1,
1282 -1,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00001283 fileBuffer.get(),
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00001284 fileSamples,
braveyao@webrtc.orgd7131432012-03-29 10:39:44 +00001285 mixingFrequency,
niklase@google.com470e71d2011-07-07 08:21:25 +00001286 AudioFrame::kNormalSpeech,
1287 AudioFrame::kVadUnknown,
1288 1);
niklase@google.com470e71d2011-07-07 08:21:25 +00001289 }
1290 return 0;
1291}
1292
andrew@webrtc.org6be1e932013-03-01 18:47:28 +00001293void TransmitMixer::ProcessAudio(int delay_ms, int clock_drift,
1294 int current_mic_level) {
1295 if (audioproc_->set_num_channels(_audioFrame.num_channels_,
1296 _audioFrame.num_channels_) != 0) {
1297 LOG_FERR2(LS_ERROR, set_num_channels, _audioFrame.num_channels_,
1298 _audioFrame.num_channels_);
1299 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001300
andrew@webrtc.org6be1e932013-03-01 18:47:28 +00001301 if (audioproc_->set_sample_rate_hz(_audioFrame.sample_rate_hz_) != 0) {
1302 LOG_FERR1(LS_ERROR, set_sample_rate_hz, _audioFrame.sample_rate_hz_);
1303 }
andrew@webrtc.org3192d652011-12-21 18:00:59 +00001304
andrew@webrtc.org6be1e932013-03-01 18:47:28 +00001305 if (audioproc_->set_stream_delay_ms(delay_ms) != 0) {
1306 // Report as a warning; we can occasionally run into very large delays.
1307 LOG_FERR1(LS_WARNING, set_stream_delay_ms, delay_ms);
1308 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001309
andrew@webrtc.org6be1e932013-03-01 18:47:28 +00001310 GainControl* agc = audioproc_->gain_control();
1311 if (agc->set_stream_analog_level(current_mic_level) != 0) {
1312 LOG_FERR1(LS_ERROR, set_stream_analog_level, current_mic_level);
1313 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001314
andrew@webrtc.org6be1e932013-03-01 18:47:28 +00001315 EchoCancellation* aec = audioproc_->echo_cancellation();
1316 if (aec->is_drift_compensation_enabled()) {
1317 aec->set_stream_drift_samples(clock_drift);
1318 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001319
andrew@webrtc.org6be1e932013-03-01 18:47:28 +00001320 int err = audioproc_->ProcessStream(&_audioFrame);
1321 if (err != 0) {
1322 LOG(LS_ERROR) << "ProcessStream() error: " << err;
1323 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001324
henrika@webrtc.orgbb8ada62013-04-04 08:39:09 +00001325 CriticalSectionScoped cs(&_critSect);
1326
andrew@webrtc.org6be1e932013-03-01 18:47:28 +00001327 // Store new capture level. Only updated when analog AGC is enabled.
1328 _captureLevel = agc->stream_analog_level();
1329
1330 // Triggers a callback in OnPeriodicProcess().
1331 _saturationWarning |= agc->stream_is_saturated();
niklase@google.com470e71d2011-07-07 08:21:25 +00001332}
1333
1334#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
1335int TransmitMixer::TypingDetection()
1336{
niklas.enbom@webrtc.orgf6edfef2012-05-09 13:16:12 +00001337
niklase@google.com470e71d2011-07-07 08:21:25 +00001338 // We let the VAD determine if we're using this feature or not.
andrew@webrtc.org63a50982012-05-02 23:56:37 +00001339 if (_audioFrame.vad_activity_ == AudioFrame::kVadUnknown)
niklase@google.com470e71d2011-07-07 08:21:25 +00001340 {
1341 return (0);
1342 }
1343
1344 int keyPressed = EventWrapper::KeyPressed();
1345
1346 if (keyPressed < 0)
1347 {
1348 return (-1);
1349 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001350
andrew@webrtc.org63a50982012-05-02 23:56:37 +00001351 if (_audioFrame.vad_activity_ == AudioFrame::kVadActive)
niklase@google.com470e71d2011-07-07 08:21:25 +00001352 _timeActive++;
1353 else
1354 _timeActive = 0;
1355
niklas.enbom@webrtc.org3dc88652012-03-30 09:53:54 +00001356 // Keep track if time since last typing event
1357 if (keyPressed)
1358 {
1359 _timeSinceLastTyping = 0;
1360 }
1361 else
1362 {
1363 ++_timeSinceLastTyping;
1364 }
1365
niklas.enbom@webrtc.orgf6edfef2012-05-09 13:16:12 +00001366 if ((_timeSinceLastTyping < _typeEventDelay)
1367 && (_audioFrame.vad_activity_ == AudioFrame::kVadActive)
niklas.enbom@webrtc.org06e722a2012-04-04 07:44:27 +00001368 && (_timeActive < _timeWindow))
niklase@google.com470e71d2011-07-07 08:21:25 +00001369 {
niklas.enbom@webrtc.org06e722a2012-04-04 07:44:27 +00001370 _penaltyCounter += _costPerTyping;
1371 if (_penaltyCounter > _reportingThreshold)
niklase@google.com470e71d2011-07-07 08:21:25 +00001372 {
andrew@webrtc.org6be1e932013-03-01 18:47:28 +00001373 // Triggers a callback in OnPeriodicProcess().
1374 _typingNoiseWarning = true;
niklase@google.com470e71d2011-07-07 08:21:25 +00001375 }
1376 }
1377
1378 if (_penaltyCounter > 0)
niklas.enbom@webrtc.org06e722a2012-04-04 07:44:27 +00001379 _penaltyCounter-=_penaltyDecay;
niklase@google.com470e71d2011-07-07 08:21:25 +00001380
1381 return (0);
1382}
1383#endif
1384
xians@google.com0b0665a2011-08-08 08:18:44 +00001385int TransmitMixer::GetMixingFrequency()
niklase@google.com470e71d2011-07-07 08:21:25 +00001386{
andrew@webrtc.org24120852013-03-02 00:14:46 +00001387 assert(_audioFrame.sample_rate_hz_ != 0);
1388 return _audioFrame.sample_rate_hz_;
niklase@google.com470e71d2011-07-07 08:21:25 +00001389}
1390
niklas.enbom@webrtc.org3dc88652012-03-30 09:53:54 +00001391#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
1392int TransmitMixer::TimeSinceLastTyping(int &seconds)
1393{
1394 // We check in VoEAudioProcessingImpl that this is only called when
1395 // typing detection is active.
1396
1397 // Round to whole seconds
1398 seconds = (_timeSinceLastTyping + 50) / 100;
1399 return(0);
1400}
1401#endif
1402
niklas.enbom@webrtc.org06e722a2012-04-04 07:44:27 +00001403#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
1404int TransmitMixer::SetTypingDetectionParameters(int timeWindow,
1405 int costPerTyping,
1406 int reportingThreshold,
niklas.enbom@webrtc.orgf6edfef2012-05-09 13:16:12 +00001407 int penaltyDecay,
1408 int typeEventDelay)
niklas.enbom@webrtc.org06e722a2012-04-04 07:44:27 +00001409{
1410 if(timeWindow != 0)
1411 _timeWindow = timeWindow;
1412 if(costPerTyping != 0)
1413 _costPerTyping = costPerTyping;
1414 if(reportingThreshold != 0)
1415 _reportingThreshold = reportingThreshold;
1416 if(penaltyDecay != 0)
1417 _penaltyDecay = penaltyDecay;
vikasmarwaha@webrtc.orgbdb03d42012-08-21 17:46:09 +00001418 if(typeEventDelay != 0)
niklas.enbom@webrtc.orgee646c32012-05-24 11:41:02 +00001419 _typeEventDelay = typeEventDelay;
niklas.enbom@webrtc.orgf6edfef2012-05-09 13:16:12 +00001420
niklas.enbom@webrtc.org06e722a2012-04-04 07:44:27 +00001421
1422 return(0);
1423}
1424#endif
1425
andrew@webrtc.org02d71742012-04-24 19:47:00 +00001426void TransmitMixer::EnableStereoChannelSwapping(bool enable) {
1427 swap_stereo_channels_ = enable;
1428}
1429
1430bool TransmitMixer::IsStereoChannelSwappingEnabled() {
1431 return swap_stereo_channels_;
1432}
1433
niklase@google.com470e71d2011-07-07 08:21:25 +00001434} // namespace voe
1435
1436} // namespace webrtc