blob: 5bce5392feb5dedc4bf4b6f47afc42d2c2698068 [file] [log] [blame]
henrike@webrtc.org82f014a2013-09-10 18:24:07 +00001/*
2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11#include "webrtc/modules/audio_device/android/opensles_input.h"
12
13#include <assert.h>
14
15#include "webrtc/modules/audio_device/android/single_rw_fifo.h"
16#include "webrtc/modules/audio_device/audio_device_buffer.h"
17#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
18#include "webrtc/system_wrappers/interface/thread_wrapper.h"
19#include "webrtc/system_wrappers/interface/trace.h"
20
21using webrtc_opensl::kDefaultSampleRate;
22using webrtc_opensl::kNumChannels;
23
24#define VOID_RETURN
25#define OPENSL_RETURN_ON_FAILURE(op, ret_val) \
26 do { \
27 SLresult err = (op); \
28 if (err != SL_RESULT_SUCCESS) { \
29 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, id_, \
30 "OpenSL error: %d", err); \
31 assert(false); \
32 return ret_val; \
33 } \
34 } while (0)
35
36static const SLEngineOption kOption[] = {
37 { SL_ENGINEOPTION_THREADSAFE, static_cast<SLuint32>(SL_BOOLEAN_TRUE) },
38};
39
40enum {
41 kNoOverrun,
42 kOverrun,
43};
44
45namespace webrtc {
46
47OpenSlesInput::OpenSlesInput(
48 const int32_t id,
49 webrtc_opensl::PlayoutDelayProvider* delay_provider)
50 : id_(id),
51 delay_provider_(delay_provider),
52 initialized_(false),
53 mic_initialized_(false),
54 rec_initialized_(false),
55 crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
56 recording_(false),
57 num_fifo_buffers_needed_(0),
58 number_overruns_(0),
59 sles_engine_(NULL),
60 sles_engine_itf_(NULL),
61 sles_recorder_(NULL),
62 sles_recorder_itf_(NULL),
63 sles_recorder_sbq_itf_(NULL),
64 audio_buffer_(NULL),
65 active_queue_(0),
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +000066 rec_sampling_rate_(0),
henrike@webrtc.org82f014a2013-09-10 18:24:07 +000067 agc_enabled_(false),
68 recording_delay_(0) {
69}
70
71OpenSlesInput::~OpenSlesInput() {
72}
73
74int32_t OpenSlesInput::Init() {
75 assert(!initialized_);
76
77 // Set up OpenSL engine.
78 OPENSL_RETURN_ON_FAILURE(slCreateEngine(&sles_engine_, 1, kOption, 0,
79 NULL, NULL),
80 -1);
81 OPENSL_RETURN_ON_FAILURE((*sles_engine_)->Realize(sles_engine_,
82 SL_BOOLEAN_FALSE),
83 -1);
84 OPENSL_RETURN_ON_FAILURE((*sles_engine_)->GetInterface(sles_engine_,
85 SL_IID_ENGINE,
86 &sles_engine_itf_),
87 -1);
88
89 if (InitSampleRate() != 0) {
90 return -1;
91 }
92 AllocateBuffers();
93 initialized_ = true;
94 return 0;
95}
96
97int32_t OpenSlesInput::Terminate() {
98 // It is assumed that the caller has stopped recording before terminating.
99 assert(!recording_);
100 (*sles_engine_)->Destroy(sles_engine_);
101 initialized_ = false;
102 mic_initialized_ = false;
103 rec_initialized_ = false;
104 return 0;
105}
106
107int32_t OpenSlesInput::RecordingDeviceName(uint16_t index,
108 char name[kAdmMaxDeviceNameSize],
109 char guid[kAdmMaxGuidSize]) {
110 assert(index == 0);
111 // Empty strings.
112 name[0] = '\0';
113 guid[0] = '\0';
114 return 0;
115}
116
117int32_t OpenSlesInput::SetRecordingDevice(uint16_t index) {
118 assert(index == 0);
119 return 0;
120}
121
122int32_t OpenSlesInput::RecordingIsAvailable(bool& available) { // NOLINT
123 available = true;
124 return 0;
125}
126
127int32_t OpenSlesInput::InitRecording() {
128 assert(initialized_);
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000129 rec_initialized_ = true;
130 return 0;
131}
132
133int32_t OpenSlesInput::StartRecording() {
134 assert(rec_initialized_);
135 assert(!recording_);
136 if (!CreateAudioRecorder()) {
137 return -1;
138 }
139 // Setup to receive buffer queue event callbacks.
140 OPENSL_RETURN_ON_FAILURE(
141 (*sles_recorder_sbq_itf_)->RegisterCallback(
142 sles_recorder_sbq_itf_,
143 RecorderSimpleBufferQueueCallback,
144 this),
145 -1);
146
147 if (!EnqueueAllBuffers()) {
148 return -1;
149 }
150
151 {
152 // To prevent the compiler from e.g. optimizing the code to
153 // recording_ = StartCbThreads() which wouldn't have been thread safe.
154 CriticalSectionScoped lock(crit_sect_.get());
155 recording_ = true;
156 }
157 if (!StartCbThreads()) {
158 recording_ = false;
159 return -1;
160 }
161 return 0;
162}
163
164int32_t OpenSlesInput::StopRecording() {
165 StopCbThreads();
166 DestroyAudioRecorder();
henrike@webrtc.orga7500442013-11-20 22:32:12 +0000167 recording_ = false;
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000168 return 0;
169}
170
171int32_t OpenSlesInput::SetAGC(bool enable) {
172 agc_enabled_ = enable;
173 return 0;
174}
175
176int32_t OpenSlesInput::MicrophoneIsAvailable(bool& available) { // NOLINT
177 available = true;
178 return 0;
179}
180
181int32_t OpenSlesInput::InitMicrophone() {
182 assert(initialized_);
183 assert(!recording_);
184 mic_initialized_ = true;
185 return 0;
186}
187
188int32_t OpenSlesInput::MicrophoneVolumeIsAvailable(bool& available) { // NOLINT
189 available = false;
190 return 0;
191}
192
193int32_t OpenSlesInput::MinMicrophoneVolume(
194 uint32_t& minVolume) const { // NOLINT
195 minVolume = 0;
196 return 0;
197}
198
199int32_t OpenSlesInput::MicrophoneVolumeStepSize(
200 uint16_t& stepSize) const {
201 stepSize = 1;
202 return 0;
203}
204
205int32_t OpenSlesInput::MicrophoneMuteIsAvailable(bool& available) { // NOLINT
206 available = false; // Mic mute not supported on Android
207 return 0;
208}
209
210int32_t OpenSlesInput::MicrophoneBoostIsAvailable(bool& available) { // NOLINT
211 available = false; // Mic boost not supported on Android.
212 return 0;
213}
214
215int32_t OpenSlesInput::SetMicrophoneBoost(bool enable) {
216 assert(false);
217 return -1; // Not supported
218}
219
220int32_t OpenSlesInput::MicrophoneBoost(bool& enabled) const { // NOLINT
221 assert(false);
222 return -1; // Not supported
223}
224
225int32_t OpenSlesInput::StereoRecordingIsAvailable(bool& available) { // NOLINT
226 available = false; // Stereo recording not supported on Android.
227 return 0;
228}
229
230int32_t OpenSlesInput::StereoRecording(bool& enabled) const { // NOLINT
231 enabled = false;
232 return 0;
233}
234
235int32_t OpenSlesInput::RecordingDelay(uint16_t& delayMS) const { // NOLINT
236 delayMS = recording_delay_;
237 return 0;
238}
239
240void OpenSlesInput::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
241 audio_buffer_ = audioBuffer;
242}
243
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000244int OpenSlesInput::InitSampleRate() {
245 UpdateSampleRate();
246 audio_buffer_->SetRecordingSampleRate(rec_sampling_rate_);
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000247 audio_buffer_->SetRecordingChannels(kNumChannels);
248 UpdateRecordingDelay();
249 return 0;
250}
251
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000252int OpenSlesInput::buffer_size_samples() const {
253 // Since there is no low latency recording, use buffer size corresponding to
254 // 10ms of data since that's the framesize WebRTC uses. Getting any other
255 // size would require patching together buffers somewhere before passing them
256 // to WebRTC.
257 return rec_sampling_rate_ * 10 / 1000;
258}
259
260int OpenSlesInput::buffer_size_bytes() const {
261 return buffer_size_samples() * kNumChannels * sizeof(int16_t);
262}
263
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000264void OpenSlesInput::UpdateRecordingDelay() {
265 // TODO(hellner): Add accurate delay estimate.
266 // On average half the current buffer will have been filled with audio.
267 int outstanding_samples =
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000268 (TotalBuffersUsed() - 0.5) * buffer_size_samples();
269 recording_delay_ = outstanding_samples / (rec_sampling_rate_ / 1000);
270}
271
272void OpenSlesInput::UpdateSampleRate() {
273 rec_sampling_rate_ = audio_manager_.low_latency_supported() ?
274 audio_manager_.native_output_sample_rate() : kDefaultSampleRate;
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000275}
276
277void OpenSlesInput::CalculateNumFifoBuffersNeeded() {
278 // Buffer size is 10ms of data.
279 num_fifo_buffers_needed_ = kNum10MsToBuffer;
280}
281
282void OpenSlesInput::AllocateBuffers() {
283 // Allocate FIFO to handle passing buffers between processing and OpenSL
284 // threads.
285 CalculateNumFifoBuffersNeeded();
286 assert(num_fifo_buffers_needed_ > 0);
287 fifo_.reset(new SingleRwFifo(num_fifo_buffers_needed_));
288
289 // Allocate the memory area to be used.
290 rec_buf_.reset(new scoped_array<int8_t>[TotalBuffersUsed()]);
291 for (int i = 0; i < TotalBuffersUsed(); ++i) {
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000292 rec_buf_[i].reset(new int8_t[buffer_size_bytes()]);
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000293 }
294}
295
296int OpenSlesInput::TotalBuffersUsed() const {
297 return num_fifo_buffers_needed_ + kNumOpenSlBuffers;
298}
299
300bool OpenSlesInput::EnqueueAllBuffers() {
301 active_queue_ = 0;
302 number_overruns_ = 0;
303 for (int i = 0; i < kNumOpenSlBuffers; ++i) {
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000304 memset(rec_buf_[i].get(), 0, buffer_size_bytes());
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000305 OPENSL_RETURN_ON_FAILURE(
306 (*sles_recorder_sbq_itf_)->Enqueue(
307 sles_recorder_sbq_itf_,
308 reinterpret_cast<void*>(rec_buf_[i].get()),
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000309 buffer_size_bytes()),
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000310 false);
311 }
312 // In case of underrun the fifo will be at capacity. In case of first enqueue
313 // no audio can have been returned yet meaning fifo must be empty. Any other
314 // values are unexpected.
315 assert(fifo_->size() == fifo_->capacity() ||
316 fifo_->size() == 0);
317 // OpenSL recording has been stopped. I.e. only this thread is touching
318 // |fifo_|.
319 while (fifo_->size() != 0) {
320 // Clear the fifo.
321 fifo_->Pop();
322 }
323 return true;
324}
325
326bool OpenSlesInput::CreateAudioRecorder() {
327 if (!event_.Start()) {
328 assert(false);
329 return false;
330 }
331 SLDataLocator_IODevice micLocator = {
332 SL_DATALOCATOR_IODEVICE, SL_IODEVICE_AUDIOINPUT,
333 SL_DEFAULTDEVICEID_AUDIOINPUT, NULL };
334 SLDataSource audio_source = { &micLocator, NULL };
335
336 SLDataLocator_AndroidSimpleBufferQueue simple_buf_queue = {
337 SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE,
338 static_cast<SLuint32>(TotalBuffersUsed())
339 };
340 SLDataFormat_PCM configuration =
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000341 webrtc_opensl::CreatePcmConfiguration(rec_sampling_rate_);
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000342 SLDataSink audio_sink = { &simple_buf_queue, &configuration };
343
344 // Interfaces for recording android audio data and Android are needed.
345 // Note the interfaces still need to be initialized. This only tells OpenSl
346 // that the interfaces will be needed at some point.
347 const SLInterfaceID id[kNumInterfaces] = {
348 SL_IID_ANDROIDSIMPLEBUFFERQUEUE, SL_IID_ANDROIDCONFIGURATION };
349 const SLboolean req[kNumInterfaces] = {
350 SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE };
351 OPENSL_RETURN_ON_FAILURE(
352 (*sles_engine_itf_)->CreateAudioRecorder(sles_engine_itf_,
353 &sles_recorder_,
354 &audio_source,
355 &audio_sink,
356 kNumInterfaces,
357 id,
358 req),
359 false);
360
361 // Realize the recorder in synchronous mode.
362 OPENSL_RETURN_ON_FAILURE((*sles_recorder_)->Realize(sles_recorder_,
363 SL_BOOLEAN_FALSE),
364 false);
365 OPENSL_RETURN_ON_FAILURE(
366 (*sles_recorder_)->GetInterface(sles_recorder_, SL_IID_RECORD,
367 static_cast<void*>(&sles_recorder_itf_)),
368 false);
369 OPENSL_RETURN_ON_FAILURE(
370 (*sles_recorder_)->GetInterface(
371 sles_recorder_,
372 SL_IID_ANDROIDSIMPLEBUFFERQUEUE,
373 static_cast<void*>(&sles_recorder_sbq_itf_)),
374 false);
375 return true;
376}
377
378void OpenSlesInput::DestroyAudioRecorder() {
379 event_.Stop();
380 if (sles_recorder_sbq_itf_) {
381 // Release all buffers currently queued up.
382 OPENSL_RETURN_ON_FAILURE(
383 (*sles_recorder_sbq_itf_)->Clear(sles_recorder_sbq_itf_),
384 VOID_RETURN);
385 sles_recorder_sbq_itf_ = NULL;
386 }
387 sles_recorder_itf_ = NULL;
388
henrike@webrtc.org6138c5c2013-09-11 18:50:06 +0000389 if (sles_recorder_) {
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000390 (*sles_recorder_)->Destroy(sles_recorder_);
391 sles_recorder_ = NULL;
392 }
393}
394
395bool OpenSlesInput::HandleOverrun(int event_id, int event_msg) {
396 if (!recording_) {
397 return false;
398 }
399 if (event_id == kNoOverrun) {
400 return false;
401 }
402 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, id_, "Audio overrun");
403 assert(event_id == kOverrun);
404 assert(event_msg > 0);
405 // Wait for all enqueued buffers be flushed.
406 if (event_msg != kNumOpenSlBuffers) {
407 return true;
408 }
409 // All buffers passed to OpenSL have been flushed. Restart the audio from
410 // scratch.
411 // No need to check sles_recorder_itf_ as recording_ would be false before it
412 // is set to NULL.
413 OPENSL_RETURN_ON_FAILURE(
414 (*sles_recorder_itf_)->SetRecordState(sles_recorder_itf_,
415 SL_RECORDSTATE_STOPPED),
416 true);
417 EnqueueAllBuffers();
418 OPENSL_RETURN_ON_FAILURE(
419 (*sles_recorder_itf_)->SetRecordState(sles_recorder_itf_,
420 SL_RECORDSTATE_RECORDING),
421 true);
422 return true;
423}
424
425void OpenSlesInput::RecorderSimpleBufferQueueCallback(
426 SLAndroidSimpleBufferQueueItf queue_itf,
427 void* context) {
428 OpenSlesInput* audio_device = reinterpret_cast<OpenSlesInput*>(context);
429 audio_device->RecorderSimpleBufferQueueCallbackHandler(queue_itf);
430}
431
432void OpenSlesInput::RecorderSimpleBufferQueueCallbackHandler(
433 SLAndroidSimpleBufferQueueItf queue_itf) {
434 if (fifo_->size() >= fifo_->capacity() || number_overruns_ > 0) {
435 ++number_overruns_;
436 event_.SignalEvent(kOverrun, number_overruns_);
437 return;
438 }
439 int8_t* audio = rec_buf_[active_queue_].get();
440 // There is at least one spot available in the fifo.
441 fifo_->Push(audio);
442 active_queue_ = (active_queue_ + 1) % TotalBuffersUsed();
443 event_.SignalEvent(kNoOverrun, 0);
444 // active_queue_ is indexing the next buffer to record to. Since the current
445 // buffer has been recorded it means that the buffer index
446 // kNumOpenSlBuffers - 1 past |active_queue_| contains the next free buffer.
447 // Since |fifo_| wasn't at capacity, at least one buffer is free to be used.
448 int next_free_buffer =
449 (active_queue_ + kNumOpenSlBuffers - 1) % TotalBuffersUsed();
450 OPENSL_RETURN_ON_FAILURE(
451 (*sles_recorder_sbq_itf_)->Enqueue(
452 sles_recorder_sbq_itf_,
453 reinterpret_cast<void*>(rec_buf_[next_free_buffer].get()),
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000454 buffer_size_bytes()),
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000455 VOID_RETURN);
456}
457
458bool OpenSlesInput::StartCbThreads() {
459 rec_thread_.reset(ThreadWrapper::CreateThread(CbThread,
460 this,
461 kRealtimePriority,
462 "opensl_rec_thread"));
463 assert(rec_thread_.get());
464 unsigned int thread_id = 0;
465 if (!rec_thread_->Start(thread_id)) {
466 assert(false);
467 return false;
468 }
469 OPENSL_RETURN_ON_FAILURE(
470 (*sles_recorder_itf_)->SetRecordState(sles_recorder_itf_,
471 SL_RECORDSTATE_RECORDING),
472 false);
473 return true;
474}
475
476void OpenSlesInput::StopCbThreads() {
477 {
478 CriticalSectionScoped lock(crit_sect_.get());
479 recording_ = false;
480 }
481 if (sles_recorder_itf_) {
482 OPENSL_RETURN_ON_FAILURE(
483 (*sles_recorder_itf_)->SetRecordState(sles_recorder_itf_,
484 SL_RECORDSTATE_STOPPED),
485 VOID_RETURN);
486 }
487 if (rec_thread_.get() == NULL) {
488 return;
489 }
490 event_.Stop();
491 if (rec_thread_->Stop()) {
492 rec_thread_.reset();
493 } else {
494 assert(false);
495 }
496}
497
498bool OpenSlesInput::CbThread(void* context) {
499 return reinterpret_cast<OpenSlesInput*>(context)->CbThreadImpl();
500}
501
502bool OpenSlesInput::CbThreadImpl() {
503 int event_id;
504 int event_msg;
505 // event_ must not be waited on while a lock has been taken.
506 event_.WaitOnEvent(&event_id, &event_msg);
507
508 CriticalSectionScoped lock(crit_sect_.get());
509 if (HandleOverrun(event_id, event_msg)) {
510 return recording_;
511 }
512 // If the fifo_ has audio data process it.
513 while (fifo_->size() > 0 && recording_) {
514 int8_t* audio = fifo_->Pop();
henrike@webrtc.orgc8dea6a2013-09-17 18:44:51 +0000515 audio_buffer_->SetRecordedBuffer(audio, buffer_size_samples());
henrike@webrtc.org82f014a2013-09-10 18:24:07 +0000516 audio_buffer_->SetVQEData(delay_provider_->PlayoutDelayMs(),
517 recording_delay_, 0);
518 audio_buffer_->DeliverRecordedData();
519 }
520 return recording_;
521}
522
523} // namespace webrtc