blob: e68a6aa2f8faf4b4ed55f98d86b0a5fe43184716 [file] [log] [blame]
henrike@webrtc.org82f014a2013-09-10 18:24:07 +00001/*
2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11#include "webrtc/modules/audio_device/android/opensles_input.h"
12
13#include <assert.h>
14
henrike@webrtc.org9ee75e92013-12-11 21:42:44 +000015#include "webrtc/modules/audio_device/android/audio_common.h"
16#include "webrtc/modules/audio_device/android/opensles_common.h"
henrike@webrtc.org82f014a2013-09-10 18:24:07 +000017#include "webrtc/modules/audio_device/android/single_rw_fifo.h"
18#include "webrtc/modules/audio_device/audio_device_buffer.h"
19#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
20#include "webrtc/system_wrappers/interface/thread_wrapper.h"
21#include "webrtc/system_wrappers/interface/trace.h"
22
henrike@webrtc.org82f014a2013-09-10 18:24:07 +000023#define VOID_RETURN
24#define OPENSL_RETURN_ON_FAILURE(op, ret_val) \
25 do { \
26 SLresult err = (op); \
27 if (err != SL_RESULT_SUCCESS) { \
28 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, id_, \
29 "OpenSL error: %d", err); \
30 assert(false); \
31 return ret_val; \
32 } \
33 } while (0)
34
35static const SLEngineOption kOption[] = {
36 { SL_ENGINEOPTION_THREADSAFE, static_cast<SLuint32>(SL_BOOLEAN_TRUE) },
37};
38
39enum {
40 kNoOverrun,
41 kOverrun,
42};
43
44namespace webrtc {
45
46OpenSlesInput::OpenSlesInput(
henrike@webrtc.org9ee75e92013-12-11 21:42:44 +000047 const int32_t id, PlayoutDelayProvider* delay_provider)
henrike@webrtc.org82f014a2013-09-10 18:24:07 +000048 : id_(id),
49 delay_provider_(delay_provider),
50 initialized_(false),
51 mic_initialized_(false),
52 rec_initialized_(false),
53 crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
54 recording_(false),
55 num_fifo_buffers_needed_(0),
56 number_overruns_(0),
57 sles_engine_(NULL),
58 sles_engine_itf_(NULL),
59 sles_recorder_(NULL),
60 sles_recorder_itf_(NULL),
61 sles_recorder_sbq_itf_(NULL),
62 audio_buffer_(NULL),
63 active_queue_(0),
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +000064 rec_sampling_rate_(0),
henrike@webrtc.org82f014a2013-09-10 18:24:07 +000065 agc_enabled_(false),
66 recording_delay_(0) {
67}
68
69OpenSlesInput::~OpenSlesInput() {
70}
71
henrike@webrtc.org9ee75e92013-12-11 21:42:44 +000072int32_t OpenSlesInput::SetAndroidAudioDeviceObjects(void* javaVM,
73 void* env,
74 void* context) {
75 return 0;
76}
77
henrike@webrtc.org573a1b42014-01-10 22:58:06 +000078void OpenSlesInput::ClearAndroidAudioDeviceObjects() {
79}
80
henrike@webrtc.org82f014a2013-09-10 18:24:07 +000081int32_t OpenSlesInput::Init() {
82 assert(!initialized_);
83
84 // Set up OpenSL engine.
85 OPENSL_RETURN_ON_FAILURE(slCreateEngine(&sles_engine_, 1, kOption, 0,
86 NULL, NULL),
87 -1);
88 OPENSL_RETURN_ON_FAILURE((*sles_engine_)->Realize(sles_engine_,
89 SL_BOOLEAN_FALSE),
90 -1);
91 OPENSL_RETURN_ON_FAILURE((*sles_engine_)->GetInterface(sles_engine_,
92 SL_IID_ENGINE,
93 &sles_engine_itf_),
94 -1);
95
96 if (InitSampleRate() != 0) {
97 return -1;
98 }
99 AllocateBuffers();
100 initialized_ = true;
101 return 0;
102}
103
104int32_t OpenSlesInput::Terminate() {
105 // It is assumed that the caller has stopped recording before terminating.
106 assert(!recording_);
107 (*sles_engine_)->Destroy(sles_engine_);
108 initialized_ = false;
109 mic_initialized_ = false;
110 rec_initialized_ = false;
111 return 0;
112}
113
114int32_t OpenSlesInput::RecordingDeviceName(uint16_t index,
115 char name[kAdmMaxDeviceNameSize],
116 char guid[kAdmMaxGuidSize]) {
117 assert(index == 0);
118 // Empty strings.
119 name[0] = '\0';
120 guid[0] = '\0';
121 return 0;
122}
123
124int32_t OpenSlesInput::SetRecordingDevice(uint16_t index) {
125 assert(index == 0);
126 return 0;
127}
128
129int32_t OpenSlesInput::RecordingIsAvailable(bool& available) { // NOLINT
130 available = true;
131 return 0;
132}
133
134int32_t OpenSlesInput::InitRecording() {
135 assert(initialized_);
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000136 rec_initialized_ = true;
137 return 0;
138}
139
140int32_t OpenSlesInput::StartRecording() {
141 assert(rec_initialized_);
142 assert(!recording_);
143 if (!CreateAudioRecorder()) {
144 return -1;
145 }
146 // Setup to receive buffer queue event callbacks.
147 OPENSL_RETURN_ON_FAILURE(
148 (*sles_recorder_sbq_itf_)->RegisterCallback(
149 sles_recorder_sbq_itf_,
150 RecorderSimpleBufferQueueCallback,
151 this),
152 -1);
153
154 if (!EnqueueAllBuffers()) {
155 return -1;
156 }
157
158 {
159 // To prevent the compiler from e.g. optimizing the code to
160 // recording_ = StartCbThreads() which wouldn't have been thread safe.
161 CriticalSectionScoped lock(crit_sect_.get());
162 recording_ = true;
163 }
164 if (!StartCbThreads()) {
165 recording_ = false;
166 return -1;
167 }
168 return 0;
169}
170
171int32_t OpenSlesInput::StopRecording() {
172 StopCbThreads();
173 DestroyAudioRecorder();
henrike@webrtc.orga7500442013-11-20 22:32:12 +0000174 recording_ = false;
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000175 return 0;
176}
177
178int32_t OpenSlesInput::SetAGC(bool enable) {
179 agc_enabled_ = enable;
180 return 0;
181}
182
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000183int32_t OpenSlesInput::InitMicrophone() {
184 assert(initialized_);
185 assert(!recording_);
186 mic_initialized_ = true;
187 return 0;
188}
189
190int32_t OpenSlesInput::MicrophoneVolumeIsAvailable(bool& available) { // NOLINT
191 available = false;
192 return 0;
193}
194
195int32_t OpenSlesInput::MinMicrophoneVolume(
196 uint32_t& minVolume) const { // NOLINT
197 minVolume = 0;
198 return 0;
199}
200
201int32_t OpenSlesInput::MicrophoneVolumeStepSize(
202 uint16_t& stepSize) const {
203 stepSize = 1;
204 return 0;
205}
206
207int32_t OpenSlesInput::MicrophoneMuteIsAvailable(bool& available) { // NOLINT
208 available = false; // Mic mute not supported on Android
209 return 0;
210}
211
212int32_t OpenSlesInput::MicrophoneBoostIsAvailable(bool& available) { // NOLINT
213 available = false; // Mic boost not supported on Android.
214 return 0;
215}
216
217int32_t OpenSlesInput::SetMicrophoneBoost(bool enable) {
218 assert(false);
219 return -1; // Not supported
220}
221
222int32_t OpenSlesInput::MicrophoneBoost(bool& enabled) const { // NOLINT
223 assert(false);
224 return -1; // Not supported
225}
226
227int32_t OpenSlesInput::StereoRecordingIsAvailable(bool& available) { // NOLINT
228 available = false; // Stereo recording not supported on Android.
229 return 0;
230}
231
232int32_t OpenSlesInput::StereoRecording(bool& enabled) const { // NOLINT
233 enabled = false;
234 return 0;
235}
236
237int32_t OpenSlesInput::RecordingDelay(uint16_t& delayMS) const { // NOLINT
238 delayMS = recording_delay_;
239 return 0;
240}
241
242void OpenSlesInput::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
243 audio_buffer_ = audioBuffer;
244}
245
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000246int OpenSlesInput::InitSampleRate() {
247 UpdateSampleRate();
248 audio_buffer_->SetRecordingSampleRate(rec_sampling_rate_);
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000249 audio_buffer_->SetRecordingChannels(kNumChannels);
250 UpdateRecordingDelay();
251 return 0;
252}
253
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000254int OpenSlesInput::buffer_size_samples() const {
255 // Since there is no low latency recording, use buffer size corresponding to
256 // 10ms of data since that's the framesize WebRTC uses. Getting any other
257 // size would require patching together buffers somewhere before passing them
258 // to WebRTC.
259 return rec_sampling_rate_ * 10 / 1000;
260}
261
262int OpenSlesInput::buffer_size_bytes() const {
263 return buffer_size_samples() * kNumChannels * sizeof(int16_t);
264}
265
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000266void OpenSlesInput::UpdateRecordingDelay() {
267 // TODO(hellner): Add accurate delay estimate.
268 // On average half the current buffer will have been filled with audio.
269 int outstanding_samples =
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000270 (TotalBuffersUsed() - 0.5) * buffer_size_samples();
271 recording_delay_ = outstanding_samples / (rec_sampling_rate_ / 1000);
272}
273
274void OpenSlesInput::UpdateSampleRate() {
275 rec_sampling_rate_ = audio_manager_.low_latency_supported() ?
276 audio_manager_.native_output_sample_rate() : kDefaultSampleRate;
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000277}
278
279void OpenSlesInput::CalculateNumFifoBuffersNeeded() {
280 // Buffer size is 10ms of data.
281 num_fifo_buffers_needed_ = kNum10MsToBuffer;
282}
283
284void OpenSlesInput::AllocateBuffers() {
285 // Allocate FIFO to handle passing buffers between processing and OpenSL
286 // threads.
287 CalculateNumFifoBuffersNeeded();
288 assert(num_fifo_buffers_needed_ > 0);
289 fifo_.reset(new SingleRwFifo(num_fifo_buffers_needed_));
290
291 // Allocate the memory area to be used.
andrew@webrtc.org8f693302014-04-25 23:10:28 +0000292 rec_buf_.reset(new scoped_ptr<int8_t[]>[TotalBuffersUsed()]);
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000293 for (int i = 0; i < TotalBuffersUsed(); ++i) {
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000294 rec_buf_[i].reset(new int8_t[buffer_size_bytes()]);
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000295 }
296}
297
298int OpenSlesInput::TotalBuffersUsed() const {
299 return num_fifo_buffers_needed_ + kNumOpenSlBuffers;
300}
301
302bool OpenSlesInput::EnqueueAllBuffers() {
303 active_queue_ = 0;
304 number_overruns_ = 0;
305 for (int i = 0; i < kNumOpenSlBuffers; ++i) {
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000306 memset(rec_buf_[i].get(), 0, buffer_size_bytes());
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000307 OPENSL_RETURN_ON_FAILURE(
308 (*sles_recorder_sbq_itf_)->Enqueue(
309 sles_recorder_sbq_itf_,
310 reinterpret_cast<void*>(rec_buf_[i].get()),
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000311 buffer_size_bytes()),
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000312 false);
313 }
314 // In case of underrun the fifo will be at capacity. In case of first enqueue
315 // no audio can have been returned yet meaning fifo must be empty. Any other
316 // values are unexpected.
317 assert(fifo_->size() == fifo_->capacity() ||
318 fifo_->size() == 0);
319 // OpenSL recording has been stopped. I.e. only this thread is touching
320 // |fifo_|.
321 while (fifo_->size() != 0) {
322 // Clear the fifo.
323 fifo_->Pop();
324 }
325 return true;
326}
327
328bool OpenSlesInput::CreateAudioRecorder() {
329 if (!event_.Start()) {
330 assert(false);
331 return false;
332 }
333 SLDataLocator_IODevice micLocator = {
334 SL_DATALOCATOR_IODEVICE, SL_IODEVICE_AUDIOINPUT,
335 SL_DEFAULTDEVICEID_AUDIOINPUT, NULL };
336 SLDataSource audio_source = { &micLocator, NULL };
337
338 SLDataLocator_AndroidSimpleBufferQueue simple_buf_queue = {
339 SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE,
340 static_cast<SLuint32>(TotalBuffersUsed())
341 };
342 SLDataFormat_PCM configuration =
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000343 webrtc_opensl::CreatePcmConfiguration(rec_sampling_rate_);
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000344 SLDataSink audio_sink = { &simple_buf_queue, &configuration };
345
346 // Interfaces for recording android audio data and Android are needed.
347 // Note the interfaces still need to be initialized. This only tells OpenSl
348 // that the interfaces will be needed at some point.
349 const SLInterfaceID id[kNumInterfaces] = {
350 SL_IID_ANDROIDSIMPLEBUFFERQUEUE, SL_IID_ANDROIDCONFIGURATION };
351 const SLboolean req[kNumInterfaces] = {
352 SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE };
353 OPENSL_RETURN_ON_FAILURE(
354 (*sles_engine_itf_)->CreateAudioRecorder(sles_engine_itf_,
355 &sles_recorder_,
356 &audio_source,
357 &audio_sink,
358 kNumInterfaces,
359 id,
360 req),
361 false);
362
henrika@webrtc.orgdd43bbe2014-11-06 15:48:05 +0000363 SLAndroidConfigurationItf recorder_config;
364 OPENSL_RETURN_ON_FAILURE(
365 (*sles_recorder_)->GetInterface(sles_recorder_,
366 SL_IID_ANDROIDCONFIGURATION,
367 &recorder_config),
368 false);
369
370 // Set audio recorder configuration to
371 // SL_ANDROID_RECORDING_PRESET_VOICE_COMMUNICATION which ensures that we
372 // use the main microphone tuned for audio communications.
373 SLint32 stream_type = SL_ANDROID_RECORDING_PRESET_VOICE_COMMUNICATION;
374 OPENSL_RETURN_ON_FAILURE(
375 (*recorder_config)->SetConfiguration(recorder_config,
376 SL_ANDROID_KEY_RECORDING_PRESET,
377 &stream_type,
378 sizeof(SLint32)),
379 false);
380
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000381 // Realize the recorder in synchronous mode.
382 OPENSL_RETURN_ON_FAILURE((*sles_recorder_)->Realize(sles_recorder_,
383 SL_BOOLEAN_FALSE),
384 false);
385 OPENSL_RETURN_ON_FAILURE(
386 (*sles_recorder_)->GetInterface(sles_recorder_, SL_IID_RECORD,
387 static_cast<void*>(&sles_recorder_itf_)),
388 false);
389 OPENSL_RETURN_ON_FAILURE(
390 (*sles_recorder_)->GetInterface(
391 sles_recorder_,
392 SL_IID_ANDROIDSIMPLEBUFFERQUEUE,
393 static_cast<void*>(&sles_recorder_sbq_itf_)),
394 false);
395 return true;
396}
397
398void OpenSlesInput::DestroyAudioRecorder() {
399 event_.Stop();
400 if (sles_recorder_sbq_itf_) {
401 // Release all buffers currently queued up.
402 OPENSL_RETURN_ON_FAILURE(
403 (*sles_recorder_sbq_itf_)->Clear(sles_recorder_sbq_itf_),
404 VOID_RETURN);
405 sles_recorder_sbq_itf_ = NULL;
406 }
407 sles_recorder_itf_ = NULL;
408
henrike@webrtc.org6138c5c2013-09-11 18:50:06 +0000409 if (sles_recorder_) {
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000410 (*sles_recorder_)->Destroy(sles_recorder_);
411 sles_recorder_ = NULL;
412 }
413}
414
415bool OpenSlesInput::HandleOverrun(int event_id, int event_msg) {
416 if (!recording_) {
417 return false;
418 }
419 if (event_id == kNoOverrun) {
420 return false;
421 }
422 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, id_, "Audio overrun");
423 assert(event_id == kOverrun);
424 assert(event_msg > 0);
425 // Wait for all enqueued buffers be flushed.
426 if (event_msg != kNumOpenSlBuffers) {
427 return true;
428 }
429 // All buffers passed to OpenSL have been flushed. Restart the audio from
430 // scratch.
431 // No need to check sles_recorder_itf_ as recording_ would be false before it
432 // is set to NULL.
433 OPENSL_RETURN_ON_FAILURE(
434 (*sles_recorder_itf_)->SetRecordState(sles_recorder_itf_,
435 SL_RECORDSTATE_STOPPED),
436 true);
437 EnqueueAllBuffers();
438 OPENSL_RETURN_ON_FAILURE(
439 (*sles_recorder_itf_)->SetRecordState(sles_recorder_itf_,
440 SL_RECORDSTATE_RECORDING),
441 true);
442 return true;
443}
444
445void OpenSlesInput::RecorderSimpleBufferQueueCallback(
446 SLAndroidSimpleBufferQueueItf queue_itf,
447 void* context) {
448 OpenSlesInput* audio_device = reinterpret_cast<OpenSlesInput*>(context);
449 audio_device->RecorderSimpleBufferQueueCallbackHandler(queue_itf);
450}
451
452void OpenSlesInput::RecorderSimpleBufferQueueCallbackHandler(
453 SLAndroidSimpleBufferQueueItf queue_itf) {
454 if (fifo_->size() >= fifo_->capacity() || number_overruns_ > 0) {
455 ++number_overruns_;
456 event_.SignalEvent(kOverrun, number_overruns_);
457 return;
458 }
459 int8_t* audio = rec_buf_[active_queue_].get();
460 // There is at least one spot available in the fifo.
461 fifo_->Push(audio);
462 active_queue_ = (active_queue_ + 1) % TotalBuffersUsed();
463 event_.SignalEvent(kNoOverrun, 0);
464 // active_queue_ is indexing the next buffer to record to. Since the current
465 // buffer has been recorded it means that the buffer index
466 // kNumOpenSlBuffers - 1 past |active_queue_| contains the next free buffer.
467 // Since |fifo_| wasn't at capacity, at least one buffer is free to be used.
468 int next_free_buffer =
469 (active_queue_ + kNumOpenSlBuffers - 1) % TotalBuffersUsed();
470 OPENSL_RETURN_ON_FAILURE(
471 (*sles_recorder_sbq_itf_)->Enqueue(
472 sles_recorder_sbq_itf_,
473 reinterpret_cast<void*>(rec_buf_[next_free_buffer].get()),
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000474 buffer_size_bytes()),
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000475 VOID_RETURN);
476}
477
478bool OpenSlesInput::StartCbThreads() {
479 rec_thread_.reset(ThreadWrapper::CreateThread(CbThread,
480 this,
481 kRealtimePriority,
482 "opensl_rec_thread"));
483 assert(rec_thread_.get());
484 unsigned int thread_id = 0;
485 if (!rec_thread_->Start(thread_id)) {
486 assert(false);
487 return false;
488 }
489 OPENSL_RETURN_ON_FAILURE(
490 (*sles_recorder_itf_)->SetRecordState(sles_recorder_itf_,
491 SL_RECORDSTATE_RECORDING),
492 false);
493 return true;
494}
495
496void OpenSlesInput::StopCbThreads() {
497 {
498 CriticalSectionScoped lock(crit_sect_.get());
499 recording_ = false;
500 }
501 if (sles_recorder_itf_) {
502 OPENSL_RETURN_ON_FAILURE(
503 (*sles_recorder_itf_)->SetRecordState(sles_recorder_itf_,
504 SL_RECORDSTATE_STOPPED),
505 VOID_RETURN);
506 }
507 if (rec_thread_.get() == NULL) {
508 return;
509 }
510 event_.Stop();
511 if (rec_thread_->Stop()) {
512 rec_thread_.reset();
513 } else {
514 assert(false);
515 }
516}
517
518bool OpenSlesInput::CbThread(void* context) {
519 return reinterpret_cast<OpenSlesInput*>(context)->CbThreadImpl();
520}
521
522bool OpenSlesInput::CbThreadImpl() {
523 int event_id;
524 int event_msg;
525 // event_ must not be waited on while a lock has been taken.
526 event_.WaitOnEvent(&event_id, &event_msg);
527
528 CriticalSectionScoped lock(crit_sect_.get());
529 if (HandleOverrun(event_id, event_msg)) {
530 return recording_;
531 }
532 // If the fifo_ has audio data process it.
533 while (fifo_->size() > 0 && recording_) {
534 int8_t* audio = fifo_->Pop();
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000535 audio_buffer_->SetRecordedBuffer(audio, buffer_size_samples());
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000536 audio_buffer_->SetVQEData(delay_provider_->PlayoutDelayMs(),
537 recording_delay_, 0);
538 audio_buffer_->DeliverRecordedData();
539 }
540 return recording_;
541}
542
543} // namespace webrtc