blob: 1cdaea15f0b8d945b7e0a6c6661b83dc6ad7f3de [file] [log] [blame]
henrike@webrtc.org82f014a2013-09-10 18:24:07 +00001/*
2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11#include "webrtc/modules/audio_device/android/opensles_input.h"
12
13#include <assert.h>
14
henrike@webrtc.org9ee75e92013-12-11 21:42:44 +000015#include "webrtc/modules/audio_device/android/audio_common.h"
16#include "webrtc/modules/audio_device/android/opensles_common.h"
henrike@webrtc.org82f014a2013-09-10 18:24:07 +000017#include "webrtc/modules/audio_device/android/single_rw_fifo.h"
18#include "webrtc/modules/audio_device/audio_device_buffer.h"
19#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
20#include "webrtc/system_wrappers/interface/thread_wrapper.h"
21#include "webrtc/system_wrappers/interface/trace.h"
22
henrike@webrtc.org82f014a2013-09-10 18:24:07 +000023#define VOID_RETURN
24#define OPENSL_RETURN_ON_FAILURE(op, ret_val) \
25 do { \
26 SLresult err = (op); \
27 if (err != SL_RESULT_SUCCESS) { \
henrike@webrtc.org82f014a2013-09-10 18:24:07 +000028 assert(false); \
29 return ret_val; \
30 } \
31 } while (0)
32
33static const SLEngineOption kOption[] = {
34 { SL_ENGINEOPTION_THREADSAFE, static_cast<SLuint32>(SL_BOOLEAN_TRUE) },
35};
36
37enum {
38 kNoOverrun,
39 kOverrun,
40};
41
42namespace webrtc {
43
henrika@webrtc.org962c6242015-02-23 11:54:05 +000044OpenSlesInput::OpenSlesInput(PlayoutDelayProvider* delay_provider)
45 : delay_provider_(delay_provider),
46 initialized_(false),
henrike@webrtc.org82f014a2013-09-10 18:24:07 +000047 mic_initialized_(false),
48 rec_initialized_(false),
49 crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
50 recording_(false),
51 num_fifo_buffers_needed_(0),
52 number_overruns_(0),
53 sles_engine_(NULL),
54 sles_engine_itf_(NULL),
55 sles_recorder_(NULL),
56 sles_recorder_itf_(NULL),
57 sles_recorder_sbq_itf_(NULL),
58 audio_buffer_(NULL),
59 active_queue_(0),
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +000060 rec_sampling_rate_(0),
henrike@webrtc.org82f014a2013-09-10 18:24:07 +000061 agc_enabled_(false),
62 recording_delay_(0) {
63}
64
65OpenSlesInput::~OpenSlesInput() {
66}
67
henrike@webrtc.org9ee75e92013-12-11 21:42:44 +000068int32_t OpenSlesInput::SetAndroidAudioDeviceObjects(void* javaVM,
69 void* env,
70 void* context) {
71 return 0;
72}
73
henrike@webrtc.org573a1b42014-01-10 22:58:06 +000074void OpenSlesInput::ClearAndroidAudioDeviceObjects() {
75}
76
henrike@webrtc.org82f014a2013-09-10 18:24:07 +000077int32_t OpenSlesInput::Init() {
78 assert(!initialized_);
79
80 // Set up OpenSL engine.
81 OPENSL_RETURN_ON_FAILURE(slCreateEngine(&sles_engine_, 1, kOption, 0,
82 NULL, NULL),
83 -1);
84 OPENSL_RETURN_ON_FAILURE((*sles_engine_)->Realize(sles_engine_,
85 SL_BOOLEAN_FALSE),
86 -1);
87 OPENSL_RETURN_ON_FAILURE((*sles_engine_)->GetInterface(sles_engine_,
88 SL_IID_ENGINE,
89 &sles_engine_itf_),
90 -1);
91
92 if (InitSampleRate() != 0) {
93 return -1;
94 }
95 AllocateBuffers();
96 initialized_ = true;
97 return 0;
98}
99
100int32_t OpenSlesInput::Terminate() {
101 // It is assumed that the caller has stopped recording before terminating.
102 assert(!recording_);
103 (*sles_engine_)->Destroy(sles_engine_);
104 initialized_ = false;
105 mic_initialized_ = false;
106 rec_initialized_ = false;
107 return 0;
108}
109
110int32_t OpenSlesInput::RecordingDeviceName(uint16_t index,
111 char name[kAdmMaxDeviceNameSize],
112 char guid[kAdmMaxGuidSize]) {
113 assert(index == 0);
114 // Empty strings.
115 name[0] = '\0';
116 guid[0] = '\0';
117 return 0;
118}
119
120int32_t OpenSlesInput::SetRecordingDevice(uint16_t index) {
121 assert(index == 0);
122 return 0;
123}
124
125int32_t OpenSlesInput::RecordingIsAvailable(bool& available) { // NOLINT
126 available = true;
127 return 0;
128}
129
130int32_t OpenSlesInput::InitRecording() {
131 assert(initialized_);
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000132 rec_initialized_ = true;
133 return 0;
134}
135
136int32_t OpenSlesInput::StartRecording() {
137 assert(rec_initialized_);
138 assert(!recording_);
139 if (!CreateAudioRecorder()) {
140 return -1;
141 }
142 // Setup to receive buffer queue event callbacks.
143 OPENSL_RETURN_ON_FAILURE(
144 (*sles_recorder_sbq_itf_)->RegisterCallback(
145 sles_recorder_sbq_itf_,
146 RecorderSimpleBufferQueueCallback,
147 this),
148 -1);
149
150 if (!EnqueueAllBuffers()) {
151 return -1;
152 }
153
154 {
155 // To prevent the compiler from e.g. optimizing the code to
156 // recording_ = StartCbThreads() which wouldn't have been thread safe.
157 CriticalSectionScoped lock(crit_sect_.get());
158 recording_ = true;
159 }
160 if (!StartCbThreads()) {
161 recording_ = false;
162 return -1;
163 }
164 return 0;
165}
166
167int32_t OpenSlesInput::StopRecording() {
168 StopCbThreads();
169 DestroyAudioRecorder();
henrike@webrtc.orga7500442013-11-20 22:32:12 +0000170 recording_ = false;
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000171 return 0;
172}
173
174int32_t OpenSlesInput::SetAGC(bool enable) {
175 agc_enabled_ = enable;
176 return 0;
177}
178
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000179int32_t OpenSlesInput::InitMicrophone() {
180 assert(initialized_);
181 assert(!recording_);
182 mic_initialized_ = true;
183 return 0;
184}
185
186int32_t OpenSlesInput::MicrophoneVolumeIsAvailable(bool& available) { // NOLINT
187 available = false;
188 return 0;
189}
190
191int32_t OpenSlesInput::MinMicrophoneVolume(
192 uint32_t& minVolume) const { // NOLINT
193 minVolume = 0;
194 return 0;
195}
196
197int32_t OpenSlesInput::MicrophoneVolumeStepSize(
198 uint16_t& stepSize) const {
199 stepSize = 1;
200 return 0;
201}
202
203int32_t OpenSlesInput::MicrophoneMuteIsAvailable(bool& available) { // NOLINT
204 available = false; // Mic mute not supported on Android
205 return 0;
206}
207
208int32_t OpenSlesInput::MicrophoneBoostIsAvailable(bool& available) { // NOLINT
209 available = false; // Mic boost not supported on Android.
210 return 0;
211}
212
213int32_t OpenSlesInput::SetMicrophoneBoost(bool enable) {
214 assert(false);
215 return -1; // Not supported
216}
217
218int32_t OpenSlesInput::MicrophoneBoost(bool& enabled) const { // NOLINT
219 assert(false);
220 return -1; // Not supported
221}
222
223int32_t OpenSlesInput::StereoRecordingIsAvailable(bool& available) { // NOLINT
224 available = false; // Stereo recording not supported on Android.
225 return 0;
226}
227
228int32_t OpenSlesInput::StereoRecording(bool& enabled) const { // NOLINT
229 enabled = false;
230 return 0;
231}
232
233int32_t OpenSlesInput::RecordingDelay(uint16_t& delayMS) const { // NOLINT
234 delayMS = recording_delay_;
235 return 0;
236}
237
238void OpenSlesInput::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
239 audio_buffer_ = audioBuffer;
240}
241
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000242int OpenSlesInput::InitSampleRate() {
243 UpdateSampleRate();
244 audio_buffer_->SetRecordingSampleRate(rec_sampling_rate_);
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000245 audio_buffer_->SetRecordingChannels(kNumChannels);
246 UpdateRecordingDelay();
247 return 0;
248}
249
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000250int OpenSlesInput::buffer_size_samples() const {
251 // Since there is no low latency recording, use buffer size corresponding to
252 // 10ms of data since that's the framesize WebRTC uses. Getting any other
253 // size would require patching together buffers somewhere before passing them
254 // to WebRTC.
255 return rec_sampling_rate_ * 10 / 1000;
256}
257
258int OpenSlesInput::buffer_size_bytes() const {
259 return buffer_size_samples() * kNumChannels * sizeof(int16_t);
260}
261
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000262void OpenSlesInput::UpdateRecordingDelay() {
263 // TODO(hellner): Add accurate delay estimate.
264 // On average half the current buffer will have been filled with audio.
265 int outstanding_samples =
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000266 (TotalBuffersUsed() - 0.5) * buffer_size_samples();
267 recording_delay_ = outstanding_samples / (rec_sampling_rate_ / 1000);
268}
269
270void OpenSlesInput::UpdateSampleRate() {
271 rec_sampling_rate_ = audio_manager_.low_latency_supported() ?
272 audio_manager_.native_output_sample_rate() : kDefaultSampleRate;
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000273}
274
275void OpenSlesInput::CalculateNumFifoBuffersNeeded() {
276 // Buffer size is 10ms of data.
277 num_fifo_buffers_needed_ = kNum10MsToBuffer;
278}
279
280void OpenSlesInput::AllocateBuffers() {
281 // Allocate FIFO to handle passing buffers between processing and OpenSL
282 // threads.
283 CalculateNumFifoBuffersNeeded();
284 assert(num_fifo_buffers_needed_ > 0);
285 fifo_.reset(new SingleRwFifo(num_fifo_buffers_needed_));
286
287 // Allocate the memory area to be used.
andrew@webrtc.org8f693302014-04-25 23:10:28 +0000288 rec_buf_.reset(new scoped_ptr<int8_t[]>[TotalBuffersUsed()]);
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000289 for (int i = 0; i < TotalBuffersUsed(); ++i) {
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000290 rec_buf_[i].reset(new int8_t[buffer_size_bytes()]);
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000291 }
292}
293
294int OpenSlesInput::TotalBuffersUsed() const {
295 return num_fifo_buffers_needed_ + kNumOpenSlBuffers;
296}
297
298bool OpenSlesInput::EnqueueAllBuffers() {
299 active_queue_ = 0;
300 number_overruns_ = 0;
301 for (int i = 0; i < kNumOpenSlBuffers; ++i) {
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000302 memset(rec_buf_[i].get(), 0, buffer_size_bytes());
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000303 OPENSL_RETURN_ON_FAILURE(
304 (*sles_recorder_sbq_itf_)->Enqueue(
305 sles_recorder_sbq_itf_,
306 reinterpret_cast<void*>(rec_buf_[i].get()),
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000307 buffer_size_bytes()),
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000308 false);
309 }
310 // In case of underrun the fifo will be at capacity. In case of first enqueue
311 // no audio can have been returned yet meaning fifo must be empty. Any other
312 // values are unexpected.
313 assert(fifo_->size() == fifo_->capacity() ||
314 fifo_->size() == 0);
315 // OpenSL recording has been stopped. I.e. only this thread is touching
316 // |fifo_|.
317 while (fifo_->size() != 0) {
318 // Clear the fifo.
319 fifo_->Pop();
320 }
321 return true;
322}
323
324bool OpenSlesInput::CreateAudioRecorder() {
325 if (!event_.Start()) {
326 assert(false);
327 return false;
328 }
329 SLDataLocator_IODevice micLocator = {
330 SL_DATALOCATOR_IODEVICE, SL_IODEVICE_AUDIOINPUT,
331 SL_DEFAULTDEVICEID_AUDIOINPUT, NULL };
332 SLDataSource audio_source = { &micLocator, NULL };
333
334 SLDataLocator_AndroidSimpleBufferQueue simple_buf_queue = {
335 SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE,
336 static_cast<SLuint32>(TotalBuffersUsed())
337 };
338 SLDataFormat_PCM configuration =
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000339 webrtc_opensl::CreatePcmConfiguration(rec_sampling_rate_);
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000340 SLDataSink audio_sink = { &simple_buf_queue, &configuration };
341
342 // Interfaces for recording android audio data and Android are needed.
343 // Note the interfaces still need to be initialized. This only tells OpenSl
344 // that the interfaces will be needed at some point.
345 const SLInterfaceID id[kNumInterfaces] = {
346 SL_IID_ANDROIDSIMPLEBUFFERQUEUE, SL_IID_ANDROIDCONFIGURATION };
347 const SLboolean req[kNumInterfaces] = {
348 SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE };
349 OPENSL_RETURN_ON_FAILURE(
350 (*sles_engine_itf_)->CreateAudioRecorder(sles_engine_itf_,
351 &sles_recorder_,
352 &audio_source,
353 &audio_sink,
354 kNumInterfaces,
355 id,
356 req),
357 false);
358
henrika@webrtc.orgdd43bbe2014-11-06 15:48:05 +0000359 SLAndroidConfigurationItf recorder_config;
360 OPENSL_RETURN_ON_FAILURE(
361 (*sles_recorder_)->GetInterface(sles_recorder_,
362 SL_IID_ANDROIDCONFIGURATION,
363 &recorder_config),
364 false);
365
366 // Set audio recorder configuration to
367 // SL_ANDROID_RECORDING_PRESET_VOICE_COMMUNICATION which ensures that we
368 // use the main microphone tuned for audio communications.
369 SLint32 stream_type = SL_ANDROID_RECORDING_PRESET_VOICE_COMMUNICATION;
370 OPENSL_RETURN_ON_FAILURE(
371 (*recorder_config)->SetConfiguration(recorder_config,
372 SL_ANDROID_KEY_RECORDING_PRESET,
373 &stream_type,
374 sizeof(SLint32)),
375 false);
376
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000377 // Realize the recorder in synchronous mode.
378 OPENSL_RETURN_ON_FAILURE((*sles_recorder_)->Realize(sles_recorder_,
379 SL_BOOLEAN_FALSE),
380 false);
381 OPENSL_RETURN_ON_FAILURE(
382 (*sles_recorder_)->GetInterface(sles_recorder_, SL_IID_RECORD,
383 static_cast<void*>(&sles_recorder_itf_)),
384 false);
385 OPENSL_RETURN_ON_FAILURE(
386 (*sles_recorder_)->GetInterface(
387 sles_recorder_,
388 SL_IID_ANDROIDSIMPLEBUFFERQUEUE,
389 static_cast<void*>(&sles_recorder_sbq_itf_)),
390 false);
391 return true;
392}
393
394void OpenSlesInput::DestroyAudioRecorder() {
395 event_.Stop();
396 if (sles_recorder_sbq_itf_) {
397 // Release all buffers currently queued up.
398 OPENSL_RETURN_ON_FAILURE(
399 (*sles_recorder_sbq_itf_)->Clear(sles_recorder_sbq_itf_),
400 VOID_RETURN);
401 sles_recorder_sbq_itf_ = NULL;
402 }
403 sles_recorder_itf_ = NULL;
404
henrike@webrtc.org6138c5c2013-09-11 18:50:06 +0000405 if (sles_recorder_) {
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000406 (*sles_recorder_)->Destroy(sles_recorder_);
407 sles_recorder_ = NULL;
408 }
409}
410
411bool OpenSlesInput::HandleOverrun(int event_id, int event_msg) {
412 if (!recording_) {
413 return false;
414 }
415 if (event_id == kNoOverrun) {
416 return false;
417 }
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000418 assert(event_id == kOverrun);
419 assert(event_msg > 0);
420 // Wait for all enqueued buffers be flushed.
421 if (event_msg != kNumOpenSlBuffers) {
422 return true;
423 }
424 // All buffers passed to OpenSL have been flushed. Restart the audio from
425 // scratch.
426 // No need to check sles_recorder_itf_ as recording_ would be false before it
427 // is set to NULL.
428 OPENSL_RETURN_ON_FAILURE(
429 (*sles_recorder_itf_)->SetRecordState(sles_recorder_itf_,
430 SL_RECORDSTATE_STOPPED),
431 true);
432 EnqueueAllBuffers();
433 OPENSL_RETURN_ON_FAILURE(
434 (*sles_recorder_itf_)->SetRecordState(sles_recorder_itf_,
435 SL_RECORDSTATE_RECORDING),
436 true);
437 return true;
438}
439
440void OpenSlesInput::RecorderSimpleBufferQueueCallback(
441 SLAndroidSimpleBufferQueueItf queue_itf,
442 void* context) {
443 OpenSlesInput* audio_device = reinterpret_cast<OpenSlesInput*>(context);
444 audio_device->RecorderSimpleBufferQueueCallbackHandler(queue_itf);
445}
446
447void OpenSlesInput::RecorderSimpleBufferQueueCallbackHandler(
448 SLAndroidSimpleBufferQueueItf queue_itf) {
449 if (fifo_->size() >= fifo_->capacity() || number_overruns_ > 0) {
450 ++number_overruns_;
451 event_.SignalEvent(kOverrun, number_overruns_);
452 return;
453 }
454 int8_t* audio = rec_buf_[active_queue_].get();
455 // There is at least one spot available in the fifo.
456 fifo_->Push(audio);
457 active_queue_ = (active_queue_ + 1) % TotalBuffersUsed();
458 event_.SignalEvent(kNoOverrun, 0);
459 // active_queue_ is indexing the next buffer to record to. Since the current
460 // buffer has been recorded it means that the buffer index
461 // kNumOpenSlBuffers - 1 past |active_queue_| contains the next free buffer.
462 // Since |fifo_| wasn't at capacity, at least one buffer is free to be used.
463 int next_free_buffer =
464 (active_queue_ + kNumOpenSlBuffers - 1) % TotalBuffersUsed();
465 OPENSL_RETURN_ON_FAILURE(
466 (*sles_recorder_sbq_itf_)->Enqueue(
467 sles_recorder_sbq_itf_,
468 reinterpret_cast<void*>(rec_buf_[next_free_buffer].get()),
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000469 buffer_size_bytes()),
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000470 VOID_RETURN);
471}
472
473bool OpenSlesInput::StartCbThreads() {
474 rec_thread_.reset(ThreadWrapper::CreateThread(CbThread,
475 this,
476 kRealtimePriority,
477 "opensl_rec_thread"));
478 assert(rec_thread_.get());
479 unsigned int thread_id = 0;
480 if (!rec_thread_->Start(thread_id)) {
481 assert(false);
482 return false;
483 }
484 OPENSL_RETURN_ON_FAILURE(
485 (*sles_recorder_itf_)->SetRecordState(sles_recorder_itf_,
486 SL_RECORDSTATE_RECORDING),
487 false);
488 return true;
489}
490
491void OpenSlesInput::StopCbThreads() {
492 {
493 CriticalSectionScoped lock(crit_sect_.get());
494 recording_ = false;
495 }
496 if (sles_recorder_itf_) {
497 OPENSL_RETURN_ON_FAILURE(
498 (*sles_recorder_itf_)->SetRecordState(sles_recorder_itf_,
499 SL_RECORDSTATE_STOPPED),
500 VOID_RETURN);
501 }
502 if (rec_thread_.get() == NULL) {
503 return;
504 }
505 event_.Stop();
506 if (rec_thread_->Stop()) {
507 rec_thread_.reset();
508 } else {
509 assert(false);
510 }
511}
512
513bool OpenSlesInput::CbThread(void* context) {
514 return reinterpret_cast<OpenSlesInput*>(context)->CbThreadImpl();
515}
516
517bool OpenSlesInput::CbThreadImpl() {
518 int event_id;
519 int event_msg;
520 // event_ must not be waited on while a lock has been taken.
521 event_.WaitOnEvent(&event_id, &event_msg);
522
523 CriticalSectionScoped lock(crit_sect_.get());
524 if (HandleOverrun(event_id, event_msg)) {
525 return recording_;
526 }
527 // If the fifo_ has audio data process it.
528 while (fifo_->size() > 0 && recording_) {
529 int8_t* audio = fifo_->Pop();
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000530 audio_buffer_->SetRecordedBuffer(audio, buffer_size_samples());
henrika@webrtc.org962c6242015-02-23 11:54:05 +0000531 audio_buffer_->SetVQEData(delay_provider_->PlayoutDelayMs(),
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000532 recording_delay_, 0);
533 audio_buffer_->DeliverRecordedData();
534 }
535 return recording_;
536}
537
538} // namespace webrtc