blob: e276fcc5230b327c8d1de9f7ed5cfb62f580ba61 [file] [log] [blame]
henrike@webrtc.org82f014a2013-09-10 18:24:07 +00001/*
2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11#include "webrtc/modules/audio_device/android/opensles_input.h"
12
13#include <assert.h>
14
henrike@webrtc.org9ee75e92013-12-11 21:42:44 +000015#include "webrtc/modules/audio_device/android/audio_common.h"
16#include "webrtc/modules/audio_device/android/opensles_common.h"
henrike@webrtc.org82f014a2013-09-10 18:24:07 +000017#include "webrtc/modules/audio_device/android/single_rw_fifo.h"
18#include "webrtc/modules/audio_device/audio_device_buffer.h"
19#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
20#include "webrtc/system_wrappers/interface/thread_wrapper.h"
21#include "webrtc/system_wrappers/interface/trace.h"
22
henrike@webrtc.org82f014a2013-09-10 18:24:07 +000023#define VOID_RETURN
24#define OPENSL_RETURN_ON_FAILURE(op, ret_val) \
25 do { \
26 SLresult err = (op); \
27 if (err != SL_RESULT_SUCCESS) { \
28 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, id_, \
29 "OpenSL error: %d", err); \
30 assert(false); \
31 return ret_val; \
32 } \
33 } while (0)
34
35static const SLEngineOption kOption[] = {
36 { SL_ENGINEOPTION_THREADSAFE, static_cast<SLuint32>(SL_BOOLEAN_TRUE) },
37};
38
39enum {
40 kNoOverrun,
41 kOverrun,
42};
43
44namespace webrtc {
45
46OpenSlesInput::OpenSlesInput(
henrike@webrtc.org9ee75e92013-12-11 21:42:44 +000047 const int32_t id, PlayoutDelayProvider* delay_provider)
henrike@webrtc.org82f014a2013-09-10 18:24:07 +000048 : id_(id),
49 delay_provider_(delay_provider),
50 initialized_(false),
51 mic_initialized_(false),
52 rec_initialized_(false),
53 crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
54 recording_(false),
55 num_fifo_buffers_needed_(0),
56 number_overruns_(0),
57 sles_engine_(NULL),
58 sles_engine_itf_(NULL),
59 sles_recorder_(NULL),
60 sles_recorder_itf_(NULL),
61 sles_recorder_sbq_itf_(NULL),
62 audio_buffer_(NULL),
63 active_queue_(0),
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +000064 rec_sampling_rate_(0),
henrike@webrtc.org82f014a2013-09-10 18:24:07 +000065 agc_enabled_(false),
66 recording_delay_(0) {
67}
68
69OpenSlesInput::~OpenSlesInput() {
70}
71
henrike@webrtc.org9ee75e92013-12-11 21:42:44 +000072int32_t OpenSlesInput::SetAndroidAudioDeviceObjects(void* javaVM,
73 void* env,
74 void* context) {
75 return 0;
76}
77
henrike@webrtc.org82f014a2013-09-10 18:24:07 +000078int32_t OpenSlesInput::Init() {
79 assert(!initialized_);
80
81 // Set up OpenSL engine.
82 OPENSL_RETURN_ON_FAILURE(slCreateEngine(&sles_engine_, 1, kOption, 0,
83 NULL, NULL),
84 -1);
85 OPENSL_RETURN_ON_FAILURE((*sles_engine_)->Realize(sles_engine_,
86 SL_BOOLEAN_FALSE),
87 -1);
88 OPENSL_RETURN_ON_FAILURE((*sles_engine_)->GetInterface(sles_engine_,
89 SL_IID_ENGINE,
90 &sles_engine_itf_),
91 -1);
92
93 if (InitSampleRate() != 0) {
94 return -1;
95 }
96 AllocateBuffers();
97 initialized_ = true;
98 return 0;
99}
100
101int32_t OpenSlesInput::Terminate() {
102 // It is assumed that the caller has stopped recording before terminating.
103 assert(!recording_);
104 (*sles_engine_)->Destroy(sles_engine_);
105 initialized_ = false;
106 mic_initialized_ = false;
107 rec_initialized_ = false;
108 return 0;
109}
110
111int32_t OpenSlesInput::RecordingDeviceName(uint16_t index,
112 char name[kAdmMaxDeviceNameSize],
113 char guid[kAdmMaxGuidSize]) {
114 assert(index == 0);
115 // Empty strings.
116 name[0] = '\0';
117 guid[0] = '\0';
118 return 0;
119}
120
121int32_t OpenSlesInput::SetRecordingDevice(uint16_t index) {
122 assert(index == 0);
123 return 0;
124}
125
126int32_t OpenSlesInput::RecordingIsAvailable(bool& available) { // NOLINT
127 available = true;
128 return 0;
129}
130
131int32_t OpenSlesInput::InitRecording() {
132 assert(initialized_);
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000133 rec_initialized_ = true;
134 return 0;
135}
136
137int32_t OpenSlesInput::StartRecording() {
138 assert(rec_initialized_);
139 assert(!recording_);
140 if (!CreateAudioRecorder()) {
141 return -1;
142 }
143 // Setup to receive buffer queue event callbacks.
144 OPENSL_RETURN_ON_FAILURE(
145 (*sles_recorder_sbq_itf_)->RegisterCallback(
146 sles_recorder_sbq_itf_,
147 RecorderSimpleBufferQueueCallback,
148 this),
149 -1);
150
151 if (!EnqueueAllBuffers()) {
152 return -1;
153 }
154
155 {
156 // To prevent the compiler from e.g. optimizing the code to
157 // recording_ = StartCbThreads() which wouldn't have been thread safe.
158 CriticalSectionScoped lock(crit_sect_.get());
159 recording_ = true;
160 }
161 if (!StartCbThreads()) {
162 recording_ = false;
163 return -1;
164 }
165 return 0;
166}
167
168int32_t OpenSlesInput::StopRecording() {
169 StopCbThreads();
170 DestroyAudioRecorder();
henrike@webrtc.orga7500442013-11-20 22:32:12 +0000171 recording_ = false;
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000172 return 0;
173}
174
175int32_t OpenSlesInput::SetAGC(bool enable) {
176 agc_enabled_ = enable;
177 return 0;
178}
179
180int32_t OpenSlesInput::MicrophoneIsAvailable(bool& available) { // NOLINT
181 available = true;
182 return 0;
183}
184
185int32_t OpenSlesInput::InitMicrophone() {
186 assert(initialized_);
187 assert(!recording_);
188 mic_initialized_ = true;
189 return 0;
190}
191
192int32_t OpenSlesInput::MicrophoneVolumeIsAvailable(bool& available) { // NOLINT
193 available = false;
194 return 0;
195}
196
197int32_t OpenSlesInput::MinMicrophoneVolume(
198 uint32_t& minVolume) const { // NOLINT
199 minVolume = 0;
200 return 0;
201}
202
203int32_t OpenSlesInput::MicrophoneVolumeStepSize(
204 uint16_t& stepSize) const {
205 stepSize = 1;
206 return 0;
207}
208
209int32_t OpenSlesInput::MicrophoneMuteIsAvailable(bool& available) { // NOLINT
210 available = false; // Mic mute not supported on Android
211 return 0;
212}
213
214int32_t OpenSlesInput::MicrophoneBoostIsAvailable(bool& available) { // NOLINT
215 available = false; // Mic boost not supported on Android.
216 return 0;
217}
218
219int32_t OpenSlesInput::SetMicrophoneBoost(bool enable) {
220 assert(false);
221 return -1; // Not supported
222}
223
224int32_t OpenSlesInput::MicrophoneBoost(bool& enabled) const { // NOLINT
225 assert(false);
226 return -1; // Not supported
227}
228
229int32_t OpenSlesInput::StereoRecordingIsAvailable(bool& available) { // NOLINT
230 available = false; // Stereo recording not supported on Android.
231 return 0;
232}
233
234int32_t OpenSlesInput::StereoRecording(bool& enabled) const { // NOLINT
235 enabled = false;
236 return 0;
237}
238
239int32_t OpenSlesInput::RecordingDelay(uint16_t& delayMS) const { // NOLINT
240 delayMS = recording_delay_;
241 return 0;
242}
243
244void OpenSlesInput::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
245 audio_buffer_ = audioBuffer;
246}
247
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000248int OpenSlesInput::InitSampleRate() {
249 UpdateSampleRate();
250 audio_buffer_->SetRecordingSampleRate(rec_sampling_rate_);
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000251 audio_buffer_->SetRecordingChannels(kNumChannels);
252 UpdateRecordingDelay();
253 return 0;
254}
255
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000256int OpenSlesInput::buffer_size_samples() const {
257 // Since there is no low latency recording, use buffer size corresponding to
258 // 10ms of data since that's the framesize WebRTC uses. Getting any other
259 // size would require patching together buffers somewhere before passing them
260 // to WebRTC.
261 return rec_sampling_rate_ * 10 / 1000;
262}
263
264int OpenSlesInput::buffer_size_bytes() const {
265 return buffer_size_samples() * kNumChannels * sizeof(int16_t);
266}
267
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000268void OpenSlesInput::UpdateRecordingDelay() {
269 // TODO(hellner): Add accurate delay estimate.
270 // On average half the current buffer will have been filled with audio.
271 int outstanding_samples =
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000272 (TotalBuffersUsed() - 0.5) * buffer_size_samples();
273 recording_delay_ = outstanding_samples / (rec_sampling_rate_ / 1000);
274}
275
276void OpenSlesInput::UpdateSampleRate() {
277 rec_sampling_rate_ = audio_manager_.low_latency_supported() ?
278 audio_manager_.native_output_sample_rate() : kDefaultSampleRate;
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000279}
280
281void OpenSlesInput::CalculateNumFifoBuffersNeeded() {
282 // Buffer size is 10ms of data.
283 num_fifo_buffers_needed_ = kNum10MsToBuffer;
284}
285
286void OpenSlesInput::AllocateBuffers() {
287 // Allocate FIFO to handle passing buffers between processing and OpenSL
288 // threads.
289 CalculateNumFifoBuffersNeeded();
290 assert(num_fifo_buffers_needed_ > 0);
291 fifo_.reset(new SingleRwFifo(num_fifo_buffers_needed_));
292
293 // Allocate the memory area to be used.
294 rec_buf_.reset(new scoped_array<int8_t>[TotalBuffersUsed()]);
295 for (int i = 0; i < TotalBuffersUsed(); ++i) {
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000296 rec_buf_[i].reset(new int8_t[buffer_size_bytes()]);
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000297 }
298}
299
300int OpenSlesInput::TotalBuffersUsed() const {
301 return num_fifo_buffers_needed_ + kNumOpenSlBuffers;
302}
303
304bool OpenSlesInput::EnqueueAllBuffers() {
305 active_queue_ = 0;
306 number_overruns_ = 0;
307 for (int i = 0; i < kNumOpenSlBuffers; ++i) {
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000308 memset(rec_buf_[i].get(), 0, buffer_size_bytes());
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000309 OPENSL_RETURN_ON_FAILURE(
310 (*sles_recorder_sbq_itf_)->Enqueue(
311 sles_recorder_sbq_itf_,
312 reinterpret_cast<void*>(rec_buf_[i].get()),
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000313 buffer_size_bytes()),
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000314 false);
315 }
316 // In case of underrun the fifo will be at capacity. In case of first enqueue
317 // no audio can have been returned yet meaning fifo must be empty. Any other
318 // values are unexpected.
319 assert(fifo_->size() == fifo_->capacity() ||
320 fifo_->size() == 0);
321 // OpenSL recording has been stopped. I.e. only this thread is touching
322 // |fifo_|.
323 while (fifo_->size() != 0) {
324 // Clear the fifo.
325 fifo_->Pop();
326 }
327 return true;
328}
329
330bool OpenSlesInput::CreateAudioRecorder() {
331 if (!event_.Start()) {
332 assert(false);
333 return false;
334 }
335 SLDataLocator_IODevice micLocator = {
336 SL_DATALOCATOR_IODEVICE, SL_IODEVICE_AUDIOINPUT,
337 SL_DEFAULTDEVICEID_AUDIOINPUT, NULL };
338 SLDataSource audio_source = { &micLocator, NULL };
339
340 SLDataLocator_AndroidSimpleBufferQueue simple_buf_queue = {
341 SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE,
342 static_cast<SLuint32>(TotalBuffersUsed())
343 };
344 SLDataFormat_PCM configuration =
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000345 webrtc_opensl::CreatePcmConfiguration(rec_sampling_rate_);
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000346 SLDataSink audio_sink = { &simple_buf_queue, &configuration };
347
348 // Interfaces for recording android audio data and Android are needed.
349 // Note the interfaces still need to be initialized. This only tells OpenSl
350 // that the interfaces will be needed at some point.
351 const SLInterfaceID id[kNumInterfaces] = {
352 SL_IID_ANDROIDSIMPLEBUFFERQUEUE, SL_IID_ANDROIDCONFIGURATION };
353 const SLboolean req[kNumInterfaces] = {
354 SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE };
355 OPENSL_RETURN_ON_FAILURE(
356 (*sles_engine_itf_)->CreateAudioRecorder(sles_engine_itf_,
357 &sles_recorder_,
358 &audio_source,
359 &audio_sink,
360 kNumInterfaces,
361 id,
362 req),
363 false);
364
365 // Realize the recorder in synchronous mode.
366 OPENSL_RETURN_ON_FAILURE((*sles_recorder_)->Realize(sles_recorder_,
367 SL_BOOLEAN_FALSE),
368 false);
369 OPENSL_RETURN_ON_FAILURE(
370 (*sles_recorder_)->GetInterface(sles_recorder_, SL_IID_RECORD,
371 static_cast<void*>(&sles_recorder_itf_)),
372 false);
373 OPENSL_RETURN_ON_FAILURE(
374 (*sles_recorder_)->GetInterface(
375 sles_recorder_,
376 SL_IID_ANDROIDSIMPLEBUFFERQUEUE,
377 static_cast<void*>(&sles_recorder_sbq_itf_)),
378 false);
379 return true;
380}
381
382void OpenSlesInput::DestroyAudioRecorder() {
383 event_.Stop();
384 if (sles_recorder_sbq_itf_) {
385 // Release all buffers currently queued up.
386 OPENSL_RETURN_ON_FAILURE(
387 (*sles_recorder_sbq_itf_)->Clear(sles_recorder_sbq_itf_),
388 VOID_RETURN);
389 sles_recorder_sbq_itf_ = NULL;
390 }
391 sles_recorder_itf_ = NULL;
392
henrike@webrtc.org6138c5c2013-09-11 18:50:06 +0000393 if (sles_recorder_) {
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000394 (*sles_recorder_)->Destroy(sles_recorder_);
395 sles_recorder_ = NULL;
396 }
397}
398
399bool OpenSlesInput::HandleOverrun(int event_id, int event_msg) {
400 if (!recording_) {
401 return false;
402 }
403 if (event_id == kNoOverrun) {
404 return false;
405 }
406 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, id_, "Audio overrun");
407 assert(event_id == kOverrun);
408 assert(event_msg > 0);
409 // Wait for all enqueued buffers be flushed.
410 if (event_msg != kNumOpenSlBuffers) {
411 return true;
412 }
413 // All buffers passed to OpenSL have been flushed. Restart the audio from
414 // scratch.
415 // No need to check sles_recorder_itf_ as recording_ would be false before it
416 // is set to NULL.
417 OPENSL_RETURN_ON_FAILURE(
418 (*sles_recorder_itf_)->SetRecordState(sles_recorder_itf_,
419 SL_RECORDSTATE_STOPPED),
420 true);
421 EnqueueAllBuffers();
422 OPENSL_RETURN_ON_FAILURE(
423 (*sles_recorder_itf_)->SetRecordState(sles_recorder_itf_,
424 SL_RECORDSTATE_RECORDING),
425 true);
426 return true;
427}
428
429void OpenSlesInput::RecorderSimpleBufferQueueCallback(
430 SLAndroidSimpleBufferQueueItf queue_itf,
431 void* context) {
432 OpenSlesInput* audio_device = reinterpret_cast<OpenSlesInput*>(context);
433 audio_device->RecorderSimpleBufferQueueCallbackHandler(queue_itf);
434}
435
436void OpenSlesInput::RecorderSimpleBufferQueueCallbackHandler(
437 SLAndroidSimpleBufferQueueItf queue_itf) {
438 if (fifo_->size() >= fifo_->capacity() || number_overruns_ > 0) {
439 ++number_overruns_;
440 event_.SignalEvent(kOverrun, number_overruns_);
441 return;
442 }
443 int8_t* audio = rec_buf_[active_queue_].get();
444 // There is at least one spot available in the fifo.
445 fifo_->Push(audio);
446 active_queue_ = (active_queue_ + 1) % TotalBuffersUsed();
447 event_.SignalEvent(kNoOverrun, 0);
448 // active_queue_ is indexing the next buffer to record to. Since the current
449 // buffer has been recorded it means that the buffer index
450 // kNumOpenSlBuffers - 1 past |active_queue_| contains the next free buffer.
451 // Since |fifo_| wasn't at capacity, at least one buffer is free to be used.
452 int next_free_buffer =
453 (active_queue_ + kNumOpenSlBuffers - 1) % TotalBuffersUsed();
454 OPENSL_RETURN_ON_FAILURE(
455 (*sles_recorder_sbq_itf_)->Enqueue(
456 sles_recorder_sbq_itf_,
457 reinterpret_cast<void*>(rec_buf_[next_free_buffer].get()),
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000458 buffer_size_bytes()),
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000459 VOID_RETURN);
460}
461
462bool OpenSlesInput::StartCbThreads() {
463 rec_thread_.reset(ThreadWrapper::CreateThread(CbThread,
464 this,
465 kRealtimePriority,
466 "opensl_rec_thread"));
467 assert(rec_thread_.get());
468 unsigned int thread_id = 0;
469 if (!rec_thread_->Start(thread_id)) {
470 assert(false);
471 return false;
472 }
473 OPENSL_RETURN_ON_FAILURE(
474 (*sles_recorder_itf_)->SetRecordState(sles_recorder_itf_,
475 SL_RECORDSTATE_RECORDING),
476 false);
477 return true;
478}
479
480void OpenSlesInput::StopCbThreads() {
481 {
482 CriticalSectionScoped lock(crit_sect_.get());
483 recording_ = false;
484 }
485 if (sles_recorder_itf_) {
486 OPENSL_RETURN_ON_FAILURE(
487 (*sles_recorder_itf_)->SetRecordState(sles_recorder_itf_,
488 SL_RECORDSTATE_STOPPED),
489 VOID_RETURN);
490 }
491 if (rec_thread_.get() == NULL) {
492 return;
493 }
494 event_.Stop();
495 if (rec_thread_->Stop()) {
496 rec_thread_.reset();
497 } else {
498 assert(false);
499 }
500}
501
502bool OpenSlesInput::CbThread(void* context) {
503 return reinterpret_cast<OpenSlesInput*>(context)->CbThreadImpl();
504}
505
506bool OpenSlesInput::CbThreadImpl() {
507 int event_id;
508 int event_msg;
509 // event_ must not be waited on while a lock has been taken.
510 event_.WaitOnEvent(&event_id, &event_msg);
511
512 CriticalSectionScoped lock(crit_sect_.get());
513 if (HandleOverrun(event_id, event_msg)) {
514 return recording_;
515 }
516 // If the fifo_ has audio data process it.
517 while (fifo_->size() > 0 && recording_) {
518 int8_t* audio = fifo_->Pop();
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000519 audio_buffer_->SetRecordedBuffer(audio, buffer_size_samples());
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000520 audio_buffer_->SetVQEData(delay_provider_->PlayoutDelayMs(),
521 recording_delay_, 0);
522 audio_buffer_->DeliverRecordedData();
523 }
524 return recording_;
525}
526
527} // namespace webrtc