blob: 0de5009ba3dd879fef3223ed81faab67dfa69f65 [file] [log] [blame]
xians@google.com68efa212011-08-11 12:41:56 +00001/*
xians@webrtc.org20aabbb2012-02-20 09:17:41 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
xians@google.com68efa212011-08-11 12:41:56 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
Henrik Kjellanderdca1e092017-07-01 16:42:22 +020011#include "webrtc/modules/audio_device/mac/audio_device_mac.h"
Edward Lemurc20978e2017-07-06 19:44:34 +020012#include "webrtc/modules/audio_device/audio_device_config.h"
pbos@webrtc.org811269d2013-07-11 13:24:38 +000013#include "webrtc/modules/audio_device/mac/portaudio/pa_ringbuffer.h"
Edward Lemurc20978e2017-07-06 19:44:34 +020014#include "webrtc/rtc_base/arraysize.h"
15#include "webrtc/rtc_base/checks.h"
Edward Lemurc20978e2017-07-06 19:44:34 +020016#include "webrtc/rtc_base/platform_thread.h"
Henrik Kjellander98f53512015-10-28 18:17:40 +010017#include "webrtc/system_wrappers/include/event_wrapper.h"
xians@google.com68efa212011-08-11 12:41:56 +000018
niklas.enbom@webrtc.org3be565b2013-05-07 21:04:24 +000019#include <ApplicationServices/ApplicationServices.h>
andrew2bc63a12016-01-11 15:59:17 -080020#include <libkern/OSAtomic.h> // OSAtomicCompareAndSwap()
21#include <mach/mach.h> // mach_task_self()
22#include <sys/sysctl.h> // sysctlbyname()
niklas.enbom@webrtc.org3be565b2013-05-07 21:04:24 +000023
andrew2bc63a12016-01-11 15:59:17 -080024namespace webrtc {
niklas.enbom@webrtc.org3be565b2013-05-07 21:04:24 +000025
andrew2bc63a12016-01-11 15:59:17 -080026#define WEBRTC_CA_RETURN_ON_ERR(expr) \
27 do { \
28 err = expr; \
29 if (err != noErr) { \
sazab4aa4eb2017-07-19 01:12:36 -070030 logCAMsg(rtc::LS_ERROR, "Error in " #expr, \
andrew2bc63a12016-01-11 15:59:17 -080031 (const char*) & err); \
32 return -1; \
33 } \
34 } while (0)
xians@google.com68efa212011-08-11 12:41:56 +000035
andrew2bc63a12016-01-11 15:59:17 -080036#define WEBRTC_CA_LOG_ERR(expr) \
37 do { \
38 err = expr; \
39 if (err != noErr) { \
sazab4aa4eb2017-07-19 01:12:36 -070040 logCAMsg(rtc::LS_ERROR, "Error in " #expr, \
andrew2bc63a12016-01-11 15:59:17 -080041 (const char*) & err); \
42 } \
43 } while (0)
xians@google.com68efa212011-08-11 12:41:56 +000044
andrew2bc63a12016-01-11 15:59:17 -080045#define WEBRTC_CA_LOG_WARN(expr) \
46 do { \
47 err = expr; \
48 if (err != noErr) { \
sazab4aa4eb2017-07-19 01:12:36 -070049 logCAMsg(rtc::LS_WARNING, "Error in " #expr, \
andrew2bc63a12016-01-11 15:59:17 -080050 (const char*) & err); \
51 } \
52 } while (0)
xians@google.com68efa212011-08-11 12:41:56 +000053
andrew2bc63a12016-01-11 15:59:17 -080054enum { MaxNumberDevices = 64 };
xians@google.com68efa212011-08-11 12:41:56 +000055
andrew2bc63a12016-01-11 15:59:17 -080056void AudioDeviceMac::AtomicSet32(int32_t* theValue, int32_t newValue) {
57 while (1) {
58 int32_t oldValue = *theValue;
59 if (OSAtomicCompareAndSwap32Barrier(oldValue, newValue, theValue) == true) {
60 return;
xians@google.com68efa212011-08-11 12:41:56 +000061 }
andrew2bc63a12016-01-11 15:59:17 -080062 }
xians@google.com68efa212011-08-11 12:41:56 +000063}
64
andrew2bc63a12016-01-11 15:59:17 -080065int32_t AudioDeviceMac::AtomicGet32(int32_t* theValue) {
66 while (1) {
67 int32_t value = *theValue;
68 if (OSAtomicCompareAndSwap32Barrier(value, value, theValue) == true) {
69 return value;
xians@google.com68efa212011-08-11 12:41:56 +000070 }
andrew2bc63a12016-01-11 15:59:17 -080071 }
xians@google.com68efa212011-08-11 12:41:56 +000072}
73
74// CoreAudio errors are best interpreted as four character strings.
sazab4aa4eb2017-07-19 01:12:36 -070075void AudioDeviceMac::logCAMsg(const rtc::LoggingSeverity sev,
andrew2bc63a12016-01-11 15:59:17 -080076 const char* msg,
77 const char* err) {
henrikg91d6ede2015-09-17 00:24:34 -070078 RTC_DCHECK(msg != NULL);
79 RTC_DCHECK(err != NULL);
xians@google.com68efa212011-08-11 12:41:56 +000080
andrew@webrtc.org621df672013-10-22 10:27:23 +000081#ifdef WEBRTC_ARCH_BIG_ENDIAN
sazab4aa4eb2017-07-19 01:12:36 -070082 switch (sev) {
83 case rtc::LS_ERROR:
84 LOG(LS_ERROR) << msg << ": " << err[0] << err[1] << err[2] << err[3];
85 break;
86 case rtc::LS_WARNING:
87 LOG(LS_WARNING) << msg << ": " << err[0] << err[1] << err[2] << err[3];
88 break;
89 case rtc::LS_VERBOSE:
90 LOG(LS_VERBOSE) << msg << ": " << err[0] << err[1] << err[2] << err[3];
91 break;
92 default:
93 break;
94 }
xians@google.com68efa212011-08-11 12:41:56 +000095#else
andrew2bc63a12016-01-11 15:59:17 -080096 // We need to flip the characters in this case.
sazab4aa4eb2017-07-19 01:12:36 -070097 switch (sev) {
98 case rtc::LS_ERROR:
99 LOG(LS_ERROR) << msg << ": " << err[3] << err[2] << err[1] << err[0];
100 break;
101 case rtc::LS_WARNING:
102 LOG(LS_WARNING) << msg << ": " << err[3] << err[2] << err[1] << err[0];
103 break;
104 case rtc::LS_VERBOSE:
105 LOG(LS_VERBOSE) << msg << ": " << err[3] << err[2] << err[1] << err[0];
106 break;
107 default:
108 break;
109 }
xians@google.com68efa212011-08-11 12:41:56 +0000110#endif
111}
112
sazab4aa4eb2017-07-19 01:12:36 -0700113AudioDeviceMac::AudioDeviceMac()
andrew2bc63a12016-01-11 15:59:17 -0800114 : _ptrAudioBuffer(NULL),
andrew2bc63a12016-01-11 15:59:17 -0800115 _stopEventRec(*EventWrapper::Create()),
116 _stopEvent(*EventWrapper::Create()),
sazab4aa4eb2017-07-19 01:12:36 -0700117 _mixerManager(),
andrew2bc63a12016-01-11 15:59:17 -0800118 _inputDeviceIndex(0),
119 _outputDeviceIndex(0),
120 _inputDeviceID(kAudioObjectUnknown),
121 _outputDeviceID(kAudioObjectUnknown),
122 _inputDeviceIsSpecified(false),
123 _outputDeviceIsSpecified(false),
124 _recChannels(N_REC_CHANNELS),
125 _playChannels(N_PLAY_CHANNELS),
126 _captureBufData(NULL),
127 _renderBufData(NULL),
andrew2bc63a12016-01-11 15:59:17 -0800128 _initialized(false),
129 _isShutDown(false),
130 _recording(false),
131 _playing(false),
132 _recIsInitialized(false),
133 _playIsInitialized(false),
134 _AGC(false),
135 _renderDeviceIsAlive(1),
136 _captureDeviceIsAlive(1),
137 _twoDevices(true),
138 _doStop(false),
139 _doStopRec(false),
140 _macBookPro(false),
141 _macBookProPanRight(false),
142 _captureLatencyUs(0),
143 _renderLatencyUs(0),
144 _captureDelayUs(0),
145 _renderDelayUs(0),
146 _renderDelayOffsetSamples(0),
andrew2bc63a12016-01-11 15:59:17 -0800147 _playWarning(0),
148 _playError(0),
149 _recWarning(0),
150 _recError(0),
151 _paCaptureBuffer(NULL),
152 _paRenderBuffer(NULL),
153 _captureBufSizeSamples(0),
154 _renderBufSizeSamples(0),
155 prev_key_state_(),
156 get_mic_volume_counter_ms_(0) {
sazab4aa4eb2017-07-19 01:12:36 -0700157 LOG(LS_INFO) << __FUNCTION__ << " created";
xians@google.com68efa212011-08-11 12:41:56 +0000158
andrew2bc63a12016-01-11 15:59:17 -0800159 RTC_DCHECK(&_stopEvent != NULL);
160 RTC_DCHECK(&_stopEventRec != NULL);
xians@google.com68efa212011-08-11 12:41:56 +0000161
andrew2bc63a12016-01-11 15:59:17 -0800162 memset(_renderConvertData, 0, sizeof(_renderConvertData));
163 memset(&_outStreamFormat, 0, sizeof(AudioStreamBasicDescription));
164 memset(&_outDesiredFormat, 0, sizeof(AudioStreamBasicDescription));
165 memset(&_inStreamFormat, 0, sizeof(AudioStreamBasicDescription));
166 memset(&_inDesiredFormat, 0, sizeof(AudioStreamBasicDescription));
xians@google.com68efa212011-08-11 12:41:56 +0000167}
168
andrew2bc63a12016-01-11 15:59:17 -0800169AudioDeviceMac::~AudioDeviceMac() {
sazab4aa4eb2017-07-19 01:12:36 -0700170 LOG(LS_INFO) << __FUNCTION__ << " destroyed";
xians@google.com68efa212011-08-11 12:41:56 +0000171
andrew2bc63a12016-01-11 15:59:17 -0800172 if (!_isShutDown) {
173 Terminate();
174 }
xians@google.com68efa212011-08-11 12:41:56 +0000175
andrew2bc63a12016-01-11 15:59:17 -0800176 RTC_DCHECK(!capture_worker_thread_.get());
177 RTC_DCHECK(!render_worker_thread_.get());
xians@google.com68efa212011-08-11 12:41:56 +0000178
andrew2bc63a12016-01-11 15:59:17 -0800179 if (_paRenderBuffer) {
180 delete _paRenderBuffer;
181 _paRenderBuffer = NULL;
182 }
xians@google.com68efa212011-08-11 12:41:56 +0000183
andrew2bc63a12016-01-11 15:59:17 -0800184 if (_paCaptureBuffer) {
185 delete _paCaptureBuffer;
186 _paCaptureBuffer = NULL;
187 }
xians@google.com68efa212011-08-11 12:41:56 +0000188
andrew2bc63a12016-01-11 15:59:17 -0800189 if (_renderBufData) {
190 delete[] _renderBufData;
191 _renderBufData = NULL;
192 }
xians@google.com68efa212011-08-11 12:41:56 +0000193
andrew2bc63a12016-01-11 15:59:17 -0800194 if (_captureBufData) {
195 delete[] _captureBufData;
196 _captureBufData = NULL;
197 }
xians@google.com68efa212011-08-11 12:41:56 +0000198
andrew2bc63a12016-01-11 15:59:17 -0800199 kern_return_t kernErr = KERN_SUCCESS;
200 kernErr = semaphore_destroy(mach_task_self(), _renderSemaphore);
201 if (kernErr != KERN_SUCCESS) {
sazab4aa4eb2017-07-19 01:12:36 -0700202 LOG(LS_ERROR) << "semaphore_destroy() error: " << kernErr;
andrew2bc63a12016-01-11 15:59:17 -0800203 }
xians@google.com68efa212011-08-11 12:41:56 +0000204
andrew2bc63a12016-01-11 15:59:17 -0800205 kernErr = semaphore_destroy(mach_task_self(), _captureSemaphore);
206 if (kernErr != KERN_SUCCESS) {
sazab4aa4eb2017-07-19 01:12:36 -0700207 LOG(LS_ERROR) << "semaphore_destroy() error: " << kernErr;
andrew2bc63a12016-01-11 15:59:17 -0800208 }
xians@google.com68efa212011-08-11 12:41:56 +0000209
andrew2bc63a12016-01-11 15:59:17 -0800210 delete &_stopEvent;
211 delete &_stopEventRec;
xians@google.com68efa212011-08-11 12:41:56 +0000212}
213
214// ============================================================================
215// API
216// ============================================================================
217
andrew2bc63a12016-01-11 15:59:17 -0800218void AudioDeviceMac::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
kthelgasonff046c72017-03-31 02:03:55 -0700219 rtc::CritScope lock(&_critSect);
xians@google.com68efa212011-08-11 12:41:56 +0000220
andrew2bc63a12016-01-11 15:59:17 -0800221 _ptrAudioBuffer = audioBuffer;
xians@google.com68efa212011-08-11 12:41:56 +0000222
andrew2bc63a12016-01-11 15:59:17 -0800223 // inform the AudioBuffer about default settings for this implementation
224 _ptrAudioBuffer->SetRecordingSampleRate(N_REC_SAMPLES_PER_SEC);
225 _ptrAudioBuffer->SetPlayoutSampleRate(N_PLAY_SAMPLES_PER_SEC);
226 _ptrAudioBuffer->SetRecordingChannels(N_REC_CHANNELS);
227 _ptrAudioBuffer->SetPlayoutChannels(N_PLAY_CHANNELS);
xians@google.com68efa212011-08-11 12:41:56 +0000228}
229
pbos@webrtc.org25509882013-04-09 10:30:35 +0000230int32_t AudioDeviceMac::ActiveAudioLayer(
andrew2bc63a12016-01-11 15:59:17 -0800231 AudioDeviceModule::AudioLayer& audioLayer) const {
232 audioLayer = AudioDeviceModule::kPlatformDefaultAudio;
233 return 0;
234}
235
Max Morin84cab202016-07-01 13:35:19 +0200236AudioDeviceGeneric::InitStatus AudioDeviceMac::Init() {
kthelgasonff046c72017-03-31 02:03:55 -0700237 rtc::CritScope lock(&_critSect);
andrew2bc63a12016-01-11 15:59:17 -0800238
239 if (_initialized) {
Max Morin84cab202016-07-01 13:35:19 +0200240 return InitStatus::OK;
andrew2bc63a12016-01-11 15:59:17 -0800241 }
242
243 OSStatus err = noErr;
244
245 _isShutDown = false;
246
247 // PortAudio ring buffers require an elementCount which is a power of two.
248 if (_renderBufData == NULL) {
249 UInt32 powerOfTwo = 1;
250 while (powerOfTwo < PLAY_BUF_SIZE_IN_SAMPLES) {
251 powerOfTwo <<= 1;
252 }
253 _renderBufSizeSamples = powerOfTwo;
254 _renderBufData = new SInt16[_renderBufSizeSamples];
255 }
256
257 if (_paRenderBuffer == NULL) {
258 _paRenderBuffer = new PaUtilRingBuffer;
259 PaRingBufferSize bufSize = -1;
260 bufSize = PaUtil_InitializeRingBuffer(
261 _paRenderBuffer, sizeof(SInt16), _renderBufSizeSamples, _renderBufData);
262 if (bufSize == -1) {
sazab4aa4eb2017-07-19 01:12:36 -0700263 LOG(LS_ERROR) << "PaUtil_InitializeRingBuffer() error";
Max Morin84cab202016-07-01 13:35:19 +0200264 return InitStatus::PLAYOUT_ERROR;
andrew2bc63a12016-01-11 15:59:17 -0800265 }
266 }
267
268 if (_captureBufData == NULL) {
269 UInt32 powerOfTwo = 1;
270 while (powerOfTwo < REC_BUF_SIZE_IN_SAMPLES) {
271 powerOfTwo <<= 1;
272 }
273 _captureBufSizeSamples = powerOfTwo;
274 _captureBufData = new Float32[_captureBufSizeSamples];
275 }
276
277 if (_paCaptureBuffer == NULL) {
278 _paCaptureBuffer = new PaUtilRingBuffer;
279 PaRingBufferSize bufSize = -1;
280 bufSize =
281 PaUtil_InitializeRingBuffer(_paCaptureBuffer, sizeof(Float32),
282 _captureBufSizeSamples, _captureBufData);
283 if (bufSize == -1) {
sazab4aa4eb2017-07-19 01:12:36 -0700284 LOG(LS_ERROR) << "PaUtil_InitializeRingBuffer() error";
Max Morin84cab202016-07-01 13:35:19 +0200285 return InitStatus::RECORDING_ERROR;
andrew2bc63a12016-01-11 15:59:17 -0800286 }
287 }
288
289 kern_return_t kernErr = KERN_SUCCESS;
290 kernErr = semaphore_create(mach_task_self(), &_renderSemaphore,
291 SYNC_POLICY_FIFO, 0);
292 if (kernErr != KERN_SUCCESS) {
sazab4aa4eb2017-07-19 01:12:36 -0700293 LOG(LS_ERROR) << "semaphore_create() error: " << kernErr;
Max Morin84cab202016-07-01 13:35:19 +0200294 return InitStatus::OTHER_ERROR;
andrew2bc63a12016-01-11 15:59:17 -0800295 }
296
297 kernErr = semaphore_create(mach_task_self(), &_captureSemaphore,
298 SYNC_POLICY_FIFO, 0);
299 if (kernErr != KERN_SUCCESS) {
sazab4aa4eb2017-07-19 01:12:36 -0700300 LOG(LS_ERROR) << "semaphore_create() error: " << kernErr;
Max Morin84cab202016-07-01 13:35:19 +0200301 return InitStatus::OTHER_ERROR;
andrew2bc63a12016-01-11 15:59:17 -0800302 }
303
304 // Setting RunLoop to NULL here instructs HAL to manage its own thread for
305 // notifications. This was the default behaviour on OS X 10.5 and earlier,
306 // but now must be explicitly specified. HAL would otherwise try to use the
307 // main thread to issue notifications.
308 AudioObjectPropertyAddress propertyAddress = {
309 kAudioHardwarePropertyRunLoop, kAudioObjectPropertyScopeGlobal,
310 kAudioObjectPropertyElementMaster};
311 CFRunLoopRef runLoop = NULL;
312 UInt32 size = sizeof(CFRunLoopRef);
Max Morin84cab202016-07-01 13:35:19 +0200313 int aoerr = AudioObjectSetPropertyData(
314 kAudioObjectSystemObject, &propertyAddress, 0, NULL, size, &runLoop);
315 if (aoerr != noErr) {
316 LOG(LS_ERROR) << "Error in AudioObjectSetPropertyData: "
317 << (const char*)&aoerr;
318 return InitStatus::OTHER_ERROR;
319 }
andrew2bc63a12016-01-11 15:59:17 -0800320
321 // Listen for any device changes.
322 propertyAddress.mSelector = kAudioHardwarePropertyDevices;
323 WEBRTC_CA_LOG_ERR(AudioObjectAddPropertyListener(
324 kAudioObjectSystemObject, &propertyAddress, &objectListenerProc, this));
325
326 // Determine if this is a MacBook Pro
327 _macBookPro = false;
328 _macBookProPanRight = false;
329 char buf[128];
330 size_t length = sizeof(buf);
331 memset(buf, 0, length);
332
333 int intErr = sysctlbyname("hw.model", buf, &length, NULL, 0);
334 if (intErr != 0) {
sazab4aa4eb2017-07-19 01:12:36 -0700335 LOG(LS_ERROR) << "Error in sysctlbyname(): " << err;
andrew2bc63a12016-01-11 15:59:17 -0800336 } else {
sazab4aa4eb2017-07-19 01:12:36 -0700337 LOG(LS_VERBOSE) << "Hardware model: " << buf;
andrew2bc63a12016-01-11 15:59:17 -0800338 if (strncmp(buf, "MacBookPro", 10) == 0) {
339 _macBookPro = true;
340 }
341 }
342
343 _playWarning = 0;
344 _playError = 0;
345 _recWarning = 0;
346 _recError = 0;
347
348 get_mic_volume_counter_ms_ = 0;
349
350 _initialized = true;
351
Max Morin84cab202016-07-01 13:35:19 +0200352 return InitStatus::OK;
xians@google.com68efa212011-08-11 12:41:56 +0000353}
354
andrew2bc63a12016-01-11 15:59:17 -0800355int32_t AudioDeviceMac::Terminate() {
356 if (!_initialized) {
xians@google.com68efa212011-08-11 12:41:56 +0000357 return 0;
andrew2bc63a12016-01-11 15:59:17 -0800358 }
359
360 if (_recording) {
sazab4aa4eb2017-07-19 01:12:36 -0700361 LOG(LS_ERROR) << "Recording must be stopped";
andrew2bc63a12016-01-11 15:59:17 -0800362 return -1;
363 }
364
365 if (_playing) {
sazab4aa4eb2017-07-19 01:12:36 -0700366 LOG(LS_ERROR) << "Playback must be stopped";
andrew2bc63a12016-01-11 15:59:17 -0800367 return -1;
368 }
369
370 _critSect.Enter();
371
372 _mixerManager.Close();
373
374 OSStatus err = noErr;
375 int retVal = 0;
376
377 AudioObjectPropertyAddress propertyAddress = {
378 kAudioHardwarePropertyDevices, kAudioObjectPropertyScopeGlobal,
379 kAudioObjectPropertyElementMaster};
380 WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(
381 kAudioObjectSystemObject, &propertyAddress, &objectListenerProc, this));
382
383 err = AudioHardwareUnload();
384 if (err != noErr) {
sazab4aa4eb2017-07-19 01:12:36 -0700385 logCAMsg(rtc::LS_ERROR,
andrew2bc63a12016-01-11 15:59:17 -0800386 "Error in AudioHardwareUnload()", (const char*)&err);
387 retVal = -1;
388 }
389
390 _isShutDown = true;
391 _initialized = false;
392 _outputDeviceIsSpecified = false;
393 _inputDeviceIsSpecified = false;
394
395 _critSect.Leave();
396
397 return retVal;
xians@google.com68efa212011-08-11 12:41:56 +0000398}
399
andrew2bc63a12016-01-11 15:59:17 -0800400bool AudioDeviceMac::Initialized() const {
401 return (_initialized);
xians@google.com68efa212011-08-11 12:41:56 +0000402}
403
andrew2bc63a12016-01-11 15:59:17 -0800404int32_t AudioDeviceMac::SpeakerIsAvailable(bool& available) {
405 bool wasInitialized = _mixerManager.SpeakerIsInitialized();
xians@google.com68efa212011-08-11 12:41:56 +0000406
andrew2bc63a12016-01-11 15:59:17 -0800407 // Make an attempt to open up the
408 // output mixer corresponding to the currently selected output device.
409 //
410 if (!wasInitialized && InitSpeaker() == -1) {
411 available = false;
xians@google.com68efa212011-08-11 12:41:56 +0000412 return 0;
andrew2bc63a12016-01-11 15:59:17 -0800413 }
414
415 // Given that InitSpeaker was successful, we know that a valid speaker
416 // exists.
417 available = true;
418
419 // Close the initialized output mixer
420 //
421 if (!wasInitialized) {
422 _mixerManager.CloseSpeaker();
423 }
424
425 return 0;
xians@google.com68efa212011-08-11 12:41:56 +0000426}
427
andrew2bc63a12016-01-11 15:59:17 -0800428int32_t AudioDeviceMac::InitSpeaker() {
kthelgasonff046c72017-03-31 02:03:55 -0700429 rtc::CritScope lock(&_critSect);
xians@google.com68efa212011-08-11 12:41:56 +0000430
andrew2bc63a12016-01-11 15:59:17 -0800431 if (_playing) {
432 return -1;
433 }
xians@google.com68efa212011-08-11 12:41:56 +0000434
andrew2bc63a12016-01-11 15:59:17 -0800435 if (InitDevice(_outputDeviceIndex, _outputDeviceID, false) == -1) {
436 return -1;
437 }
xians@google.com68efa212011-08-11 12:41:56 +0000438
andrew2bc63a12016-01-11 15:59:17 -0800439 if (_inputDeviceID == _outputDeviceID) {
440 _twoDevices = false;
441 } else {
442 _twoDevices = true;
443 }
xians@google.com68efa212011-08-11 12:41:56 +0000444
andrew2bc63a12016-01-11 15:59:17 -0800445 if (_mixerManager.OpenSpeaker(_outputDeviceID) == -1) {
446 return -1;
447 }
xians@google.com68efa212011-08-11 12:41:56 +0000448
andrew2bc63a12016-01-11 15:59:17 -0800449 return 0;
450}
xians@google.com68efa212011-08-11 12:41:56 +0000451
andrew2bc63a12016-01-11 15:59:17 -0800452int32_t AudioDeviceMac::MicrophoneIsAvailable(bool& available) {
453 bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
454
455 // Make an attempt to open up the
456 // input mixer corresponding to the currently selected output device.
457 //
458 if (!wasInitialized && InitMicrophone() == -1) {
459 available = false;
xians@google.com68efa212011-08-11 12:41:56 +0000460 return 0;
andrew2bc63a12016-01-11 15:59:17 -0800461 }
462
463 // Given that InitMicrophone was successful, we know that a valid microphone
464 // exists.
465 available = true;
466
467 // Close the initialized input mixer
468 //
469 if (!wasInitialized) {
470 _mixerManager.CloseMicrophone();
471 }
472
473 return 0;
xians@google.com68efa212011-08-11 12:41:56 +0000474}
475
andrew2bc63a12016-01-11 15:59:17 -0800476int32_t AudioDeviceMac::InitMicrophone() {
kthelgasonff046c72017-03-31 02:03:55 -0700477 rtc::CritScope lock(&_critSect);
xians@google.com68efa212011-08-11 12:41:56 +0000478
andrew2bc63a12016-01-11 15:59:17 -0800479 if (_recording) {
480 return -1;
481 }
xians@google.com68efa212011-08-11 12:41:56 +0000482
andrew2bc63a12016-01-11 15:59:17 -0800483 if (InitDevice(_inputDeviceIndex, _inputDeviceID, true) == -1) {
484 return -1;
485 }
xians@google.com68efa212011-08-11 12:41:56 +0000486
andrew2bc63a12016-01-11 15:59:17 -0800487 if (_inputDeviceID == _outputDeviceID) {
488 _twoDevices = false;
489 } else {
490 _twoDevices = true;
491 }
xians@google.com68efa212011-08-11 12:41:56 +0000492
andrew2bc63a12016-01-11 15:59:17 -0800493 if (_mixerManager.OpenMicrophone(_inputDeviceID) == -1) {
494 return -1;
495 }
xians@google.com68efa212011-08-11 12:41:56 +0000496
andrew2bc63a12016-01-11 15:59:17 -0800497 return 0;
498}
499
500bool AudioDeviceMac::SpeakerIsInitialized() const {
501 return (_mixerManager.SpeakerIsInitialized());
502}
503
504bool AudioDeviceMac::MicrophoneIsInitialized() const {
505 return (_mixerManager.MicrophoneIsInitialized());
506}
507
508int32_t AudioDeviceMac::SpeakerVolumeIsAvailable(bool& available) {
509 bool wasInitialized = _mixerManager.SpeakerIsInitialized();
510
511 // Make an attempt to open up the
512 // output mixer corresponding to the currently selected output device.
513 //
514 if (!wasInitialized && InitSpeaker() == -1) {
515 // If we end up here it means that the selected speaker has no volume
516 // control.
517 available = false;
xians@google.com68efa212011-08-11 12:41:56 +0000518 return 0;
andrew2bc63a12016-01-11 15:59:17 -0800519 }
520
521 // Given that InitSpeaker was successful, we know that a volume control exists
522 //
523 available = true;
524
525 // Close the initialized output mixer
526 //
527 if (!wasInitialized) {
528 _mixerManager.CloseSpeaker();
529 }
530
531 return 0;
xians@google.com68efa212011-08-11 12:41:56 +0000532}
533
andrew2bc63a12016-01-11 15:59:17 -0800534int32_t AudioDeviceMac::SetSpeakerVolume(uint32_t volume) {
535 return (_mixerManager.SetSpeakerVolume(volume));
xians@google.com68efa212011-08-11 12:41:56 +0000536}
537
andrew2bc63a12016-01-11 15:59:17 -0800538int32_t AudioDeviceMac::SpeakerVolume(uint32_t& volume) const {
539 uint32_t level(0);
xians@google.com68efa212011-08-11 12:41:56 +0000540
andrew2bc63a12016-01-11 15:59:17 -0800541 if (_mixerManager.SpeakerVolume(level) == -1) {
542 return -1;
543 }
xians@google.com68efa212011-08-11 12:41:56 +0000544
andrew2bc63a12016-01-11 15:59:17 -0800545 volume = level;
546 return 0;
xians@google.com68efa212011-08-11 12:41:56 +0000547}
548
andrew2bc63a12016-01-11 15:59:17 -0800549int32_t AudioDeviceMac::MaxSpeakerVolume(uint32_t& maxVolume) const {
550 uint32_t maxVol(0);
551
552 if (_mixerManager.MaxSpeakerVolume(maxVol) == -1) {
xians@google.com68efa212011-08-11 12:41:56 +0000553 return -1;
andrew2bc63a12016-01-11 15:59:17 -0800554 }
555
556 maxVolume = maxVol;
557 return 0;
xians@google.com68efa212011-08-11 12:41:56 +0000558}
559
andrew2bc63a12016-01-11 15:59:17 -0800560int32_t AudioDeviceMac::MinSpeakerVolume(uint32_t& minVolume) const {
561 uint32_t minVol(0);
xians@google.com68efa212011-08-11 12:41:56 +0000562
andrew2bc63a12016-01-11 15:59:17 -0800563 if (_mixerManager.MinSpeakerVolume(minVol) == -1) {
xians@google.com68efa212011-08-11 12:41:56 +0000564 return -1;
andrew2bc63a12016-01-11 15:59:17 -0800565 }
566
567 minVolume = minVol;
568 return 0;
xians@google.com68efa212011-08-11 12:41:56 +0000569}
570
andrew2bc63a12016-01-11 15:59:17 -0800571int32_t AudioDeviceMac::SpeakerMuteIsAvailable(bool& available) {
572 bool isAvailable(false);
573 bool wasInitialized = _mixerManager.SpeakerIsInitialized();
574
575 // Make an attempt to open up the
576 // output mixer corresponding to the currently selected output device.
577 //
578 if (!wasInitialized && InitSpeaker() == -1) {
579 // If we end up here it means that the selected speaker has no volume
580 // control, hence it is safe to state that there is no mute control
581 // already at this stage.
582 available = false;
xians@google.com68efa212011-08-11 12:41:56 +0000583 return 0;
andrew2bc63a12016-01-11 15:59:17 -0800584 }
585
586 // Check if the selected speaker has a mute control
587 //
588 _mixerManager.SpeakerMuteIsAvailable(isAvailable);
589
590 available = isAvailable;
591
592 // Close the initialized output mixer
593 //
594 if (!wasInitialized) {
595 _mixerManager.CloseSpeaker();
596 }
597
598 return 0;
xians@google.com68efa212011-08-11 12:41:56 +0000599}
600
andrew2bc63a12016-01-11 15:59:17 -0800601int32_t AudioDeviceMac::SetSpeakerMute(bool enable) {
602 return (_mixerManager.SetSpeakerMute(enable));
603}
xians@google.com68efa212011-08-11 12:41:56 +0000604
andrew2bc63a12016-01-11 15:59:17 -0800605int32_t AudioDeviceMac::SpeakerMute(bool& enabled) const {
606 bool muted(0);
xians@google.com68efa212011-08-11 12:41:56 +0000607
andrew2bc63a12016-01-11 15:59:17 -0800608 if (_mixerManager.SpeakerMute(muted) == -1) {
609 return -1;
610 }
xians@google.com68efa212011-08-11 12:41:56 +0000611
andrew2bc63a12016-01-11 15:59:17 -0800612 enabled = muted;
613 return 0;
614}
615
616int32_t AudioDeviceMac::MicrophoneMuteIsAvailable(bool& available) {
617 bool isAvailable(false);
618 bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
619
620 // Make an attempt to open up the
621 // input mixer corresponding to the currently selected input device.
622 //
623 if (!wasInitialized && InitMicrophone() == -1) {
624 // If we end up here it means that the selected microphone has no volume
625 // control, hence it is safe to state that there is no boost control
626 // already at this stage.
627 available = false;
xians@google.com68efa212011-08-11 12:41:56 +0000628 return 0;
andrew2bc63a12016-01-11 15:59:17 -0800629 }
630
631 // Check if the selected microphone has a mute control
632 //
633 _mixerManager.MicrophoneMuteIsAvailable(isAvailable);
634 available = isAvailable;
635
636 // Close the initialized input mixer
637 //
638 if (!wasInitialized) {
639 _mixerManager.CloseMicrophone();
640 }
641
642 return 0;
xians@google.com68efa212011-08-11 12:41:56 +0000643}
644
andrew2bc63a12016-01-11 15:59:17 -0800645int32_t AudioDeviceMac::SetMicrophoneMute(bool enable) {
646 return (_mixerManager.SetMicrophoneMute(enable));
647}
xians@google.com68efa212011-08-11 12:41:56 +0000648
andrew2bc63a12016-01-11 15:59:17 -0800649int32_t AudioDeviceMac::MicrophoneMute(bool& enabled) const {
650 bool muted(0);
xians@google.com68efa212011-08-11 12:41:56 +0000651
andrew2bc63a12016-01-11 15:59:17 -0800652 if (_mixerManager.MicrophoneMute(muted) == -1) {
653 return -1;
654 }
xians@google.com68efa212011-08-11 12:41:56 +0000655
andrew2bc63a12016-01-11 15:59:17 -0800656 enabled = muted;
657 return 0;
658}
659
andrew2bc63a12016-01-11 15:59:17 -0800660int32_t AudioDeviceMac::StereoRecordingIsAvailable(bool& available) {
661 bool isAvailable(false);
662 bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
xians@google.com68efa212011-08-11 12:41:56 +0000663
andrew2bc63a12016-01-11 15:59:17 -0800664 if (!wasInitialized && InitMicrophone() == -1) {
665 // Cannot open the specified device
666 available = false;
xians@google.com68efa212011-08-11 12:41:56 +0000667 return 0;
andrew2bc63a12016-01-11 15:59:17 -0800668 }
669
670 // Check if the selected microphone can record stereo
671 //
672 _mixerManager.StereoRecordingIsAvailable(isAvailable);
673 available = isAvailable;
674
675 // Close the initialized input mixer
676 //
677 if (!wasInitialized) {
678 _mixerManager.CloseMicrophone();
679 }
680
681 return 0;
xians@google.com68efa212011-08-11 12:41:56 +0000682}
683
andrew2bc63a12016-01-11 15:59:17 -0800684int32_t AudioDeviceMac::SetStereoRecording(bool enable) {
685 if (enable)
686 _recChannels = 2;
687 else
688 _recChannels = 1;
689
690 return 0;
xians@google.com68efa212011-08-11 12:41:56 +0000691}
692
andrew2bc63a12016-01-11 15:59:17 -0800693int32_t AudioDeviceMac::StereoRecording(bool& enabled) const {
694 if (_recChannels == 2)
695 enabled = true;
696 else
697 enabled = false;
xians@google.com68efa212011-08-11 12:41:56 +0000698
andrew2bc63a12016-01-11 15:59:17 -0800699 return 0;
700}
xians@google.com68efa212011-08-11 12:41:56 +0000701
andrew2bc63a12016-01-11 15:59:17 -0800702int32_t AudioDeviceMac::StereoPlayoutIsAvailable(bool& available) {
703 bool isAvailable(false);
704 bool wasInitialized = _mixerManager.SpeakerIsInitialized();
xians@google.com68efa212011-08-11 12:41:56 +0000705
andrew2bc63a12016-01-11 15:59:17 -0800706 if (!wasInitialized && InitSpeaker() == -1) {
707 // Cannot open the specified device
708 available = false;
xians@google.com68efa212011-08-11 12:41:56 +0000709 return 0;
andrew2bc63a12016-01-11 15:59:17 -0800710 }
711
712 // Check if the selected microphone can record stereo
713 //
714 _mixerManager.StereoPlayoutIsAvailable(isAvailable);
715 available = isAvailable;
716
717 // Close the initialized input mixer
718 //
719 if (!wasInitialized) {
720 _mixerManager.CloseSpeaker();
721 }
722
723 return 0;
xians@google.com68efa212011-08-11 12:41:56 +0000724}
725
andrew2bc63a12016-01-11 15:59:17 -0800726int32_t AudioDeviceMac::SetStereoPlayout(bool enable) {
727 if (enable)
728 _playChannels = 2;
729 else
730 _playChannels = 1;
xians@google.com68efa212011-08-11 12:41:56 +0000731
andrew2bc63a12016-01-11 15:59:17 -0800732 return 0;
733}
xians@google.com68efa212011-08-11 12:41:56 +0000734
andrew2bc63a12016-01-11 15:59:17 -0800735int32_t AudioDeviceMac::StereoPlayout(bool& enabled) const {
736 if (_playChannels == 2)
737 enabled = true;
738 else
739 enabled = false;
xians@google.com68efa212011-08-11 12:41:56 +0000740
andrew2bc63a12016-01-11 15:59:17 -0800741 return 0;
742}
xians@google.com68efa212011-08-11 12:41:56 +0000743
andrew2bc63a12016-01-11 15:59:17 -0800744int32_t AudioDeviceMac::SetAGC(bool enable) {
745 _AGC = enable;
xians@google.com68efa212011-08-11 12:41:56 +0000746
andrew2bc63a12016-01-11 15:59:17 -0800747 return 0;
748}
749
750bool AudioDeviceMac::AGC() const {
751 return _AGC;
752}
753
754int32_t AudioDeviceMac::MicrophoneVolumeIsAvailable(bool& available) {
755 bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
756
757 // Make an attempt to open up the
758 // input mixer corresponding to the currently selected output device.
759 //
760 if (!wasInitialized && InitMicrophone() == -1) {
761 // If we end up here it means that the selected microphone has no volume
762 // control.
763 available = false;
xians@google.com68efa212011-08-11 12:41:56 +0000764 return 0;
andrew2bc63a12016-01-11 15:59:17 -0800765 }
766
767 // Given that InitMicrophone was successful, we know that a volume control
768 // exists
769 //
770 available = true;
771
772 // Close the initialized input mixer
773 //
774 if (!wasInitialized) {
775 _mixerManager.CloseMicrophone();
776 }
777
778 return 0;
xians@google.com68efa212011-08-11 12:41:56 +0000779}
780
andrew2bc63a12016-01-11 15:59:17 -0800781int32_t AudioDeviceMac::SetMicrophoneVolume(uint32_t volume) {
782 return (_mixerManager.SetMicrophoneVolume(volume));
xians@google.com68efa212011-08-11 12:41:56 +0000783}
784
andrew2bc63a12016-01-11 15:59:17 -0800785int32_t AudioDeviceMac::MicrophoneVolume(uint32_t& volume) const {
786 uint32_t level(0);
xians@google.com68efa212011-08-11 12:41:56 +0000787
andrew2bc63a12016-01-11 15:59:17 -0800788 if (_mixerManager.MicrophoneVolume(level) == -1) {
sazab4aa4eb2017-07-19 01:12:36 -0700789 LOG(LS_WARNING) << "failed to retrieve current microphone level";
andrew2bc63a12016-01-11 15:59:17 -0800790 return -1;
791 }
xians@google.com68efa212011-08-11 12:41:56 +0000792
andrew2bc63a12016-01-11 15:59:17 -0800793 volume = level;
794 return 0;
xians@google.com68efa212011-08-11 12:41:56 +0000795}
796
andrew2bc63a12016-01-11 15:59:17 -0800797int32_t AudioDeviceMac::MaxMicrophoneVolume(uint32_t& maxVolume) const {
798 uint32_t maxVol(0);
xians@google.com68efa212011-08-11 12:41:56 +0000799
andrew2bc63a12016-01-11 15:59:17 -0800800 if (_mixerManager.MaxMicrophoneVolume(maxVol) == -1) {
801 return -1;
802 }
xians@google.com68efa212011-08-11 12:41:56 +0000803
andrew2bc63a12016-01-11 15:59:17 -0800804 maxVolume = maxVol;
805 return 0;
xians@google.com68efa212011-08-11 12:41:56 +0000806}
807
andrew2bc63a12016-01-11 15:59:17 -0800808int32_t AudioDeviceMac::MinMicrophoneVolume(uint32_t& minVolume) const {
809 uint32_t minVol(0);
xians@google.com68efa212011-08-11 12:41:56 +0000810
andrew2bc63a12016-01-11 15:59:17 -0800811 if (_mixerManager.MinMicrophoneVolume(minVol) == -1) {
812 return -1;
813 }
814
815 minVolume = minVol;
816 return 0;
xians@google.com68efa212011-08-11 12:41:56 +0000817}
818
andrew2bc63a12016-01-11 15:59:17 -0800819int16_t AudioDeviceMac::PlayoutDevices() {
820 AudioDeviceID playDevices[MaxNumberDevices];
821 return GetNumberDevices(kAudioDevicePropertyScopeOutput, playDevices,
822 MaxNumberDevices);
xians@google.com68efa212011-08-11 12:41:56 +0000823}
824
andrew2bc63a12016-01-11 15:59:17 -0800825int32_t AudioDeviceMac::SetPlayoutDevice(uint16_t index) {
kthelgasonff046c72017-03-31 02:03:55 -0700826 rtc::CritScope lock(&_critSect);
xians@google.com68efa212011-08-11 12:41:56 +0000827
andrew2bc63a12016-01-11 15:59:17 -0800828 if (_playIsInitialized) {
829 return -1;
830 }
xians@google.com68efa212011-08-11 12:41:56 +0000831
andrew2bc63a12016-01-11 15:59:17 -0800832 AudioDeviceID playDevices[MaxNumberDevices];
833 uint32_t nDevices = GetNumberDevices(kAudioDevicePropertyScopeOutput,
834 playDevices, MaxNumberDevices);
sazab4aa4eb2017-07-19 01:12:36 -0700835 LOG(LS_VERBOSE) << "number of available waveform-audio output devices is "
836 << nDevices;
xians@google.com68efa212011-08-11 12:41:56 +0000837
andrew2bc63a12016-01-11 15:59:17 -0800838 if (index > (nDevices - 1)) {
sazab4aa4eb2017-07-19 01:12:36 -0700839 LOG(LS_ERROR) << "device index is out of range [0," << (nDevices - 1)
840 << "]";
andrew2bc63a12016-01-11 15:59:17 -0800841 return -1;
842 }
xians@google.com68efa212011-08-11 12:41:56 +0000843
andrew2bc63a12016-01-11 15:59:17 -0800844 _outputDeviceIndex = index;
845 _outputDeviceIsSpecified = true;
xians@google.com68efa212011-08-11 12:41:56 +0000846
andrew2bc63a12016-01-11 15:59:17 -0800847 return 0;
xians@google.com68efa212011-08-11 12:41:56 +0000848}
849
pbos@webrtc.org25509882013-04-09 10:30:35 +0000850int32_t AudioDeviceMac::SetPlayoutDevice(
andrew2bc63a12016-01-11 15:59:17 -0800851 AudioDeviceModule::WindowsDeviceType /*device*/) {
sazab4aa4eb2017-07-19 01:12:36 -0700852 LOG(LS_ERROR) << "WindowsDeviceType not supported";
andrew2bc63a12016-01-11 15:59:17 -0800853 return -1;
854}
855
856int32_t AudioDeviceMac::PlayoutDeviceName(uint16_t index,
857 char name[kAdmMaxDeviceNameSize],
858 char guid[kAdmMaxGuidSize]) {
859 const uint16_t nDevices(PlayoutDevices());
860
861 if ((index > (nDevices - 1)) || (name == NULL)) {
xians@google.com68efa212011-08-11 12:41:56 +0000862 return -1;
andrew2bc63a12016-01-11 15:59:17 -0800863 }
864
865 memset(name, 0, kAdmMaxDeviceNameSize);
866
867 if (guid != NULL) {
868 memset(guid, 0, kAdmMaxGuidSize);
869 }
870
871 return GetDeviceName(kAudioDevicePropertyScopeOutput, index, name);
xians@google.com68efa212011-08-11 12:41:56 +0000872}
873
andrew2bc63a12016-01-11 15:59:17 -0800874int32_t AudioDeviceMac::RecordingDeviceName(uint16_t index,
875 char name[kAdmMaxDeviceNameSize],
876 char guid[kAdmMaxGuidSize]) {
877 const uint16_t nDevices(RecordingDevices());
xians@google.com68efa212011-08-11 12:41:56 +0000878
andrew2bc63a12016-01-11 15:59:17 -0800879 if ((index > (nDevices - 1)) || (name == NULL)) {
xians@google.com68efa212011-08-11 12:41:56 +0000880 return -1;
andrew2bc63a12016-01-11 15:59:17 -0800881 }
882
883 memset(name, 0, kAdmMaxDeviceNameSize);
884
885 if (guid != NULL) {
886 memset(guid, 0, kAdmMaxGuidSize);
887 }
888
889 return GetDeviceName(kAudioDevicePropertyScopeInput, index, name);
xians@google.com68efa212011-08-11 12:41:56 +0000890}
891
andrew2bc63a12016-01-11 15:59:17 -0800892int16_t AudioDeviceMac::RecordingDevices() {
893 AudioDeviceID recDevices[MaxNumberDevices];
894 return GetNumberDevices(kAudioDevicePropertyScopeInput, recDevices,
895 MaxNumberDevices);
896}
xians@google.com68efa212011-08-11 12:41:56 +0000897
andrew2bc63a12016-01-11 15:59:17 -0800898int32_t AudioDeviceMac::SetRecordingDevice(uint16_t index) {
899 if (_recIsInitialized) {
900 return -1;
901 }
xians@google.com68efa212011-08-11 12:41:56 +0000902
andrew2bc63a12016-01-11 15:59:17 -0800903 AudioDeviceID recDevices[MaxNumberDevices];
904 uint32_t nDevices = GetNumberDevices(kAudioDevicePropertyScopeInput,
905 recDevices, MaxNumberDevices);
sazab4aa4eb2017-07-19 01:12:36 -0700906 LOG(LS_VERBOSE) << "number of available waveform-audio input devices is "
907 << nDevices;
punyabrata@google.comeba8c322011-08-30 14:32:22 +0000908
andrew2bc63a12016-01-11 15:59:17 -0800909 if (index > (nDevices - 1)) {
sazab4aa4eb2017-07-19 01:12:36 -0700910 LOG(LS_ERROR) << "device index is out of range [0," << (nDevices - 1)
911 << "]";
andrew2bc63a12016-01-11 15:59:17 -0800912 return -1;
913 }
xians@google.com68efa212011-08-11 12:41:56 +0000914
andrew2bc63a12016-01-11 15:59:17 -0800915 _inputDeviceIndex = index;
916 _inputDeviceIsSpecified = true;
xians@google.com68efa212011-08-11 12:41:56 +0000917
andrew2bc63a12016-01-11 15:59:17 -0800918 return 0;
919}
920
921int32_t AudioDeviceMac::SetRecordingDevice(
922 AudioDeviceModule::WindowsDeviceType /*device*/) {
sazab4aa4eb2017-07-19 01:12:36 -0700923 LOG(LS_ERROR) << "WindowsDeviceType not supported";
andrew2bc63a12016-01-11 15:59:17 -0800924 return -1;
925}
926
927int32_t AudioDeviceMac::PlayoutIsAvailable(bool& available) {
928 available = true;
929
930 // Try to initialize the playout side
931 if (InitPlayout() == -1) {
932 available = false;
933 }
934
935 // We destroy the IOProc created by InitPlayout() in implDeviceIOProc().
936 // We must actually start playout here in order to have the IOProc
937 // deleted by calling StopPlayout().
938 if (StartPlayout() == -1) {
939 available = false;
940 }
941
942 // Cancel effect of initialization
943 if (StopPlayout() == -1) {
944 available = false;
945 }
946
947 return 0;
948}
949
950int32_t AudioDeviceMac::RecordingIsAvailable(bool& available) {
951 available = true;
952
953 // Try to initialize the recording side
954 if (InitRecording() == -1) {
955 available = false;
956 }
957
958 // We destroy the IOProc created by InitRecording() in implInDeviceIOProc().
959 // We must actually start recording here in order to have the IOProc
960 // deleted by calling StopRecording().
961 if (StartRecording() == -1) {
962 available = false;
963 }
964
965 // Cancel effect of initialization
966 if (StopRecording() == -1) {
967 available = false;
968 }
969
970 return 0;
971}
972
973int32_t AudioDeviceMac::InitPlayout() {
kthelgasonff046c72017-03-31 02:03:55 -0700974 rtc::CritScope lock(&_critSect);
andrew2bc63a12016-01-11 15:59:17 -0800975
976 if (_playing) {
977 return -1;
978 }
979
980 if (!_outputDeviceIsSpecified) {
981 return -1;
982 }
983
984 if (_playIsInitialized) {
xians@google.com68efa212011-08-11 12:41:56 +0000985 return 0;
andrew2bc63a12016-01-11 15:59:17 -0800986 }
xians@google.com68efa212011-08-11 12:41:56 +0000987
andrew2bc63a12016-01-11 15:59:17 -0800988 // Initialize the speaker (devices might have been added or removed)
989 if (InitSpeaker() == -1) {
sazab4aa4eb2017-07-19 01:12:36 -0700990 LOG(LS_WARNING) << "InitSpeaker() failed";
andrew2bc63a12016-01-11 15:59:17 -0800991 }
xians@google.com68efa212011-08-11 12:41:56 +0000992
andrew2bc63a12016-01-11 15:59:17 -0800993 if (!MicrophoneIsInitialized()) {
994 // Make this call to check if we are using
995 // one or two devices (_twoDevices)
996 bool available = false;
997 if (MicrophoneIsAvailable(available) == -1) {
sazab4aa4eb2017-07-19 01:12:36 -0700998 LOG(LS_WARNING) << "MicrophoneIsAvailable() failed";
punyabrata@google.comeba8c322011-08-30 14:32:22 +0000999 }
andrew2bc63a12016-01-11 15:59:17 -08001000 }
punyabrata@google.comeba8c322011-08-30 14:32:22 +00001001
andrew2bc63a12016-01-11 15:59:17 -08001002 PaUtil_FlushRingBuffer(_paRenderBuffer);
1003
1004 OSStatus err = noErr;
1005 UInt32 size = 0;
1006 _renderDelayOffsetSamples = 0;
1007 _renderDelayUs = 0;
1008 _renderLatencyUs = 0;
1009 _renderDeviceIsAlive = 1;
1010 _doStop = false;
1011
1012 // The internal microphone of a MacBook Pro is located under the left speaker
1013 // grille. When the internal speakers are in use, we want to fully stereo
1014 // pan to the right.
1015 AudioObjectPropertyAddress propertyAddress = {
1016 kAudioDevicePropertyDataSource, kAudioDevicePropertyScopeOutput, 0};
1017 if (_macBookPro) {
1018 _macBookProPanRight = false;
1019 Boolean hasProperty =
1020 AudioObjectHasProperty(_outputDeviceID, &propertyAddress);
1021 if (hasProperty) {
1022 UInt32 dataSource = 0;
1023 size = sizeof(dataSource);
1024 WEBRTC_CA_LOG_WARN(AudioObjectGetPropertyData(
1025 _outputDeviceID, &propertyAddress, 0, NULL, &size, &dataSource));
1026
1027 if (dataSource == 'ispk') {
1028 _macBookProPanRight = true;
sazab4aa4eb2017-07-19 01:12:36 -07001029 LOG(LS_VERBOSE)
1030 << "MacBook Pro using internal speakers; stereo panning right";
andrew2bc63a12016-01-11 15:59:17 -08001031 } else {
sazab4aa4eb2017-07-19 01:12:36 -07001032 LOG(LS_VERBOSE) << "MacBook Pro not using internal speakers";
andrew2bc63a12016-01-11 15:59:17 -08001033 }
1034
1035 // Add a listener to determine if the status changes.
1036 WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(
1037 _outputDeviceID, &propertyAddress, &objectListenerProc, this));
punyabrata@google.comeba8c322011-08-30 14:32:22 +00001038 }
andrew2bc63a12016-01-11 15:59:17 -08001039 }
xians@google.com68efa212011-08-11 12:41:56 +00001040
andrew2bc63a12016-01-11 15:59:17 -08001041 // Get current stream description
1042 propertyAddress.mSelector = kAudioDevicePropertyStreamFormat;
1043 memset(&_outStreamFormat, 0, sizeof(_outStreamFormat));
1044 size = sizeof(_outStreamFormat);
1045 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
1046 _outputDeviceID, &propertyAddress, 0, NULL, &size, &_outStreamFormat));
xians@google.com68efa212011-08-11 12:41:56 +00001047
andrew2bc63a12016-01-11 15:59:17 -08001048 if (_outStreamFormat.mFormatID != kAudioFormatLinearPCM) {
sazab4aa4eb2017-07-19 01:12:36 -07001049 logCAMsg(rtc::LS_ERROR,
andrew2bc63a12016-01-11 15:59:17 -08001050 "Unacceptable output stream format -> mFormatID",
1051 (const char*)&_outStreamFormat.mFormatID);
1052 return -1;
1053 }
xians@google.com68efa212011-08-11 12:41:56 +00001054
andrew2bc63a12016-01-11 15:59:17 -08001055 if (_outStreamFormat.mChannelsPerFrame > N_DEVICE_CHANNELS) {
sazab4aa4eb2017-07-19 01:12:36 -07001056 LOG(LS_ERROR) << "Too many channels on output device (mChannelsPerFrame = "
1057 << _outStreamFormat.mChannelsPerFrame << ")";
andrew2bc63a12016-01-11 15:59:17 -08001058 return -1;
1059 }
1060
1061 if (_outStreamFormat.mFormatFlags & kAudioFormatFlagIsNonInterleaved) {
sazab4aa4eb2017-07-19 01:12:36 -07001062 LOG(LS_ERROR) << "Non-interleaved audio data is not supported."
1063 << "AudioHardware streams should not have this format.";
andrew2bc63a12016-01-11 15:59:17 -08001064 return -1;
1065 }
1066
sazab4aa4eb2017-07-19 01:12:36 -07001067 LOG(LS_VERBOSE) << "Ouput stream format:";
1068 LOG(LS_VERBOSE) << "mSampleRate = " << _outStreamFormat.mSampleRate
1069 << ", mChannelsPerFrame = "
1070 << _outStreamFormat.mChannelsPerFrame;
1071 LOG(LS_VERBOSE) << "mBytesPerPacket = " << _outStreamFormat.mBytesPerPacket
1072 << ", mFramesPerPacket = "
1073 << _outStreamFormat.mFramesPerPacket;
1074 LOG(LS_VERBOSE) << "mBytesPerFrame = " << _outStreamFormat.mBytesPerFrame
1075 << ", mBitsPerChannel = " << _outStreamFormat.mBitsPerChannel;
1076 LOG(LS_VERBOSE) << "mFormatFlags = " << _outStreamFormat.mFormatFlags;
1077 logCAMsg(rtc::LS_VERBOSE, "mFormatID",
andrew2bc63a12016-01-11 15:59:17 -08001078 (const char*)&_outStreamFormat.mFormatID);
1079
1080 // Our preferred format to work with.
1081 if (_outStreamFormat.mChannelsPerFrame < 2) {
1082 // Disable stereo playout when we only have one channel on the device.
1083 _playChannels = 1;
sazab4aa4eb2017-07-19 01:12:36 -07001084 LOG(LS_VERBOSE) << "Stereo playout unavailable on this device";
andrew2bc63a12016-01-11 15:59:17 -08001085 }
1086 WEBRTC_CA_RETURN_ON_ERR(SetDesiredPlayoutFormat());
xians@google.com68efa212011-08-11 12:41:56 +00001087
andrew2bc63a12016-01-11 15:59:17 -08001088 // Listen for format changes.
1089 propertyAddress.mSelector = kAudioDevicePropertyStreamFormat;
henrikaf5022222016-11-07 15:56:59 +01001090 WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(
andrew2bc63a12016-01-11 15:59:17 -08001091 _outputDeviceID, &propertyAddress, &objectListenerProc, this));
xians@google.com68efa212011-08-11 12:41:56 +00001092
andrew2bc63a12016-01-11 15:59:17 -08001093 // Listen for processor overloads.
1094 propertyAddress.mSelector = kAudioDeviceProcessorOverload;
1095 WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(
1096 _outputDeviceID, &propertyAddress, &objectListenerProc, this));
xians@google.com68efa212011-08-11 12:41:56 +00001097
andrew2bc63a12016-01-11 15:59:17 -08001098 if (_twoDevices || !_recIsInitialized) {
1099 WEBRTC_CA_RETURN_ON_ERR(AudioDeviceCreateIOProcID(
1100 _outputDeviceID, deviceIOProc, this, &_deviceIOProcID));
1101 }
xians@google.com68efa212011-08-11 12:41:56 +00001102
andrew2bc63a12016-01-11 15:59:17 -08001103 _playIsInitialized = true;
xians@google.com68efa212011-08-11 12:41:56 +00001104
andrew2bc63a12016-01-11 15:59:17 -08001105 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00001106}
1107
andrew2bc63a12016-01-11 15:59:17 -08001108int32_t AudioDeviceMac::InitRecording() {
kthelgasonff046c72017-03-31 02:03:55 -07001109 rtc::CritScope lock(&_critSect);
xians@google.com68efa212011-08-11 12:41:56 +00001110
andrew2bc63a12016-01-11 15:59:17 -08001111 if (_recording) {
1112 return -1;
1113 }
xians@google.com68efa212011-08-11 12:41:56 +00001114
andrew2bc63a12016-01-11 15:59:17 -08001115 if (!_inputDeviceIsSpecified) {
1116 return -1;
1117 }
1118
1119 if (_recIsInitialized) {
1120 return 0;
1121 }
1122
1123 // Initialize the microphone (devices might have been added or removed)
1124 if (InitMicrophone() == -1) {
sazab4aa4eb2017-07-19 01:12:36 -07001125 LOG(LS_WARNING) << "InitMicrophone() failed";
andrew2bc63a12016-01-11 15:59:17 -08001126 }
1127
1128 if (!SpeakerIsInitialized()) {
1129 // Make this call to check if we are using
1130 // one or two devices (_twoDevices)
1131 bool available = false;
1132 if (SpeakerIsAvailable(available) == -1) {
sazab4aa4eb2017-07-19 01:12:36 -07001133 LOG(LS_WARNING) << "SpeakerIsAvailable() failed";
xians@google.com68efa212011-08-11 12:41:56 +00001134 }
andrew2bc63a12016-01-11 15:59:17 -08001135 }
xians@google.com68efa212011-08-11 12:41:56 +00001136
andrew2bc63a12016-01-11 15:59:17 -08001137 OSStatus err = noErr;
1138 UInt32 size = 0;
xians@google.com68efa212011-08-11 12:41:56 +00001139
andrew2bc63a12016-01-11 15:59:17 -08001140 PaUtil_FlushRingBuffer(_paCaptureBuffer);
xians@google.com68efa212011-08-11 12:41:56 +00001141
andrew2bc63a12016-01-11 15:59:17 -08001142 _captureDelayUs = 0;
1143 _captureLatencyUs = 0;
1144 _captureDeviceIsAlive = 1;
1145 _doStopRec = false;
xians@google.com68efa212011-08-11 12:41:56 +00001146
andrew2bc63a12016-01-11 15:59:17 -08001147 // Get current stream description
1148 AudioObjectPropertyAddress propertyAddress = {
1149 kAudioDevicePropertyStreamFormat, kAudioDevicePropertyScopeInput, 0};
1150 memset(&_inStreamFormat, 0, sizeof(_inStreamFormat));
1151 size = sizeof(_inStreamFormat);
1152 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
1153 _inputDeviceID, &propertyAddress, 0, NULL, &size, &_inStreamFormat));
xians@google.com68efa212011-08-11 12:41:56 +00001154
andrew2bc63a12016-01-11 15:59:17 -08001155 if (_inStreamFormat.mFormatID != kAudioFormatLinearPCM) {
sazab4aa4eb2017-07-19 01:12:36 -07001156 logCAMsg(rtc::LS_ERROR,
andrew2bc63a12016-01-11 15:59:17 -08001157 "Unacceptable input stream format -> mFormatID",
1158 (const char*)&_inStreamFormat.mFormatID);
1159 return -1;
1160 }
xians@google.com68efa212011-08-11 12:41:56 +00001161
andrew2bc63a12016-01-11 15:59:17 -08001162 if (_inStreamFormat.mChannelsPerFrame > N_DEVICE_CHANNELS) {
sazab4aa4eb2017-07-19 01:12:36 -07001163 LOG(LS_ERROR) << "Too many channels on input device (mChannelsPerFrame = "
1164 << _inStreamFormat.mChannelsPerFrame << ")";
andrew2bc63a12016-01-11 15:59:17 -08001165 return -1;
1166 }
xians@google.com68efa212011-08-11 12:41:56 +00001167
andrew2bc63a12016-01-11 15:59:17 -08001168 const int io_block_size_samples = _inStreamFormat.mChannelsPerFrame *
1169 _inStreamFormat.mSampleRate / 100 *
1170 N_BLOCKS_IO;
1171 if (io_block_size_samples > _captureBufSizeSamples) {
sazab4aa4eb2017-07-19 01:12:36 -07001172 LOG(LS_ERROR) << "Input IO block size (" << io_block_size_samples
1173 << ") is larger than ring buffer (" << _captureBufSizeSamples
1174 << ")";
andrew2bc63a12016-01-11 15:59:17 -08001175 return -1;
1176 }
xians@google.com68efa212011-08-11 12:41:56 +00001177
sazab4aa4eb2017-07-19 01:12:36 -07001178 LOG(LS_VERBOSE) << "Input stream format:";
1179 LOG(LS_VERBOSE) << "mSampleRate = " << _inStreamFormat.mSampleRate
1180 << ", mChannelsPerFrame = "
1181 << _inStreamFormat.mChannelsPerFrame;
1182 LOG(LS_VERBOSE) << "mBytesPerPacket = " << _inStreamFormat.mBytesPerPacket
1183 << ", mFramesPerPacket = "
1184 << _inStreamFormat.mFramesPerPacket;
1185 LOG(LS_VERBOSE) << "mBytesPerFrame = " << _inStreamFormat.mBytesPerFrame
1186 << ", mBitsPerChannel = " << _inStreamFormat.mBitsPerChannel;
1187 LOG(LS_VERBOSE) << "mFormatFlags = " << _inStreamFormat.mFormatFlags;
1188 logCAMsg(rtc::LS_VERBOSE, "mFormatID",
andrew2bc63a12016-01-11 15:59:17 -08001189 (const char*)&_inStreamFormat.mFormatID);
xians@google.com68efa212011-08-11 12:41:56 +00001190
andrew2bc63a12016-01-11 15:59:17 -08001191 // Our preferred format to work with
1192 if (_inStreamFormat.mChannelsPerFrame >= 2 && (_recChannels == 2)) {
1193 _inDesiredFormat.mChannelsPerFrame = 2;
1194 } else {
1195 // Disable stereo recording when we only have one channel on the device.
1196 _inDesiredFormat.mChannelsPerFrame = 1;
1197 _recChannels = 1;
sazab4aa4eb2017-07-19 01:12:36 -07001198 LOG(LS_VERBOSE) << "Stereo recording unavailable on this device";
andrew2bc63a12016-01-11 15:59:17 -08001199 }
xians@google.com68efa212011-08-11 12:41:56 +00001200
andrew2bc63a12016-01-11 15:59:17 -08001201 if (_ptrAudioBuffer) {
1202 // Update audio buffer with the selected parameters
1203 _ptrAudioBuffer->SetRecordingSampleRate(N_REC_SAMPLES_PER_SEC);
1204 _ptrAudioBuffer->SetRecordingChannels((uint8_t)_recChannels);
1205 }
xians@google.com68efa212011-08-11 12:41:56 +00001206
andrew2bc63a12016-01-11 15:59:17 -08001207 _inDesiredFormat.mSampleRate = N_REC_SAMPLES_PER_SEC;
1208 _inDesiredFormat.mBytesPerPacket =
1209 _inDesiredFormat.mChannelsPerFrame * sizeof(SInt16);
1210 _inDesiredFormat.mFramesPerPacket = 1;
1211 _inDesiredFormat.mBytesPerFrame =
1212 _inDesiredFormat.mChannelsPerFrame * sizeof(SInt16);
1213 _inDesiredFormat.mBitsPerChannel = sizeof(SInt16) * 8;
xians@google.com68efa212011-08-11 12:41:56 +00001214
andrew2bc63a12016-01-11 15:59:17 -08001215 _inDesiredFormat.mFormatFlags =
1216 kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
andrew@webrtc.org621df672013-10-22 10:27:23 +00001217#ifdef WEBRTC_ARCH_BIG_ENDIAN
andrew2bc63a12016-01-11 15:59:17 -08001218 _inDesiredFormat.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;
xians@google.com68efa212011-08-11 12:41:56 +00001219#endif
andrew2bc63a12016-01-11 15:59:17 -08001220 _inDesiredFormat.mFormatID = kAudioFormatLinearPCM;
xians@google.com68efa212011-08-11 12:41:56 +00001221
andrew2bc63a12016-01-11 15:59:17 -08001222 WEBRTC_CA_RETURN_ON_ERR(AudioConverterNew(&_inStreamFormat, &_inDesiredFormat,
1223 &_captureConverter));
xians@google.com68efa212011-08-11 12:41:56 +00001224
andrew2bc63a12016-01-11 15:59:17 -08001225 // First try to set buffer size to desired value (10 ms * N_BLOCKS_IO)
1226 // TODO(xians): investigate this block.
1227 UInt32 bufByteCount =
1228 (UInt32)((_inStreamFormat.mSampleRate / 1000.0) * 10.0 * N_BLOCKS_IO *
1229 _inStreamFormat.mChannelsPerFrame * sizeof(Float32));
1230 if (_inStreamFormat.mFramesPerPacket != 0) {
1231 if (bufByteCount % _inStreamFormat.mFramesPerPacket != 0) {
1232 bufByteCount =
1233 ((UInt32)(bufByteCount / _inStreamFormat.mFramesPerPacket) + 1) *
1234 _inStreamFormat.mFramesPerPacket;
xians@google.com68efa212011-08-11 12:41:56 +00001235 }
andrew2bc63a12016-01-11 15:59:17 -08001236 }
xians@google.com68efa212011-08-11 12:41:56 +00001237
andrew2bc63a12016-01-11 15:59:17 -08001238 // Ensure the buffer size is within the acceptable range provided by the
1239 // device.
1240 propertyAddress.mSelector = kAudioDevicePropertyBufferSizeRange;
1241 AudioValueRange range;
1242 size = sizeof(range);
1243 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
1244 _inputDeviceID, &propertyAddress, 0, NULL, &size, &range));
1245 if (range.mMinimum > bufByteCount) {
1246 bufByteCount = range.mMinimum;
1247 } else if (range.mMaximum < bufByteCount) {
1248 bufByteCount = range.mMaximum;
1249 }
xians@google.com68efa212011-08-11 12:41:56 +00001250
andrew2bc63a12016-01-11 15:59:17 -08001251 propertyAddress.mSelector = kAudioDevicePropertyBufferSize;
1252 size = sizeof(bufByteCount);
1253 WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(
1254 _inputDeviceID, &propertyAddress, 0, NULL, size, &bufByteCount));
xians@google.com68efa212011-08-11 12:41:56 +00001255
andrew2bc63a12016-01-11 15:59:17 -08001256 // Get capture device latency
1257 propertyAddress.mSelector = kAudioDevicePropertyLatency;
1258 UInt32 latency = 0;
1259 size = sizeof(UInt32);
1260 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
1261 _inputDeviceID, &propertyAddress, 0, NULL, &size, &latency));
1262 _captureLatencyUs = (UInt32)((1.0e6 * latency) / _inStreamFormat.mSampleRate);
xians@google.com68efa212011-08-11 12:41:56 +00001263
andrew2bc63a12016-01-11 15:59:17 -08001264 // Get capture stream latency
1265 propertyAddress.mSelector = kAudioDevicePropertyStreams;
1266 AudioStreamID stream = 0;
1267 size = sizeof(AudioStreamID);
1268 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
1269 _inputDeviceID, &propertyAddress, 0, NULL, &size, &stream));
1270 propertyAddress.mSelector = kAudioStreamPropertyLatency;
1271 size = sizeof(UInt32);
1272 latency = 0;
1273 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
1274 _inputDeviceID, &propertyAddress, 0, NULL, &size, &latency));
1275 _captureLatencyUs +=
1276 (UInt32)((1.0e6 * latency) / _inStreamFormat.mSampleRate);
xians@google.com68efa212011-08-11 12:41:56 +00001277
andrew2bc63a12016-01-11 15:59:17 -08001278 // Listen for format changes
1279 // TODO(xians): should we be using kAudioDevicePropertyDeviceHasChanged?
1280 propertyAddress.mSelector = kAudioDevicePropertyStreamFormat;
henrikaf5022222016-11-07 15:56:59 +01001281 WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(
andrew2bc63a12016-01-11 15:59:17 -08001282 _inputDeviceID, &propertyAddress, &objectListenerProc, this));
xians@google.com68efa212011-08-11 12:41:56 +00001283
andrew2bc63a12016-01-11 15:59:17 -08001284 // Listen for processor overloads
1285 propertyAddress.mSelector = kAudioDeviceProcessorOverload;
1286 WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(
1287 _inputDeviceID, &propertyAddress, &objectListenerProc, this));
xians@google.com68efa212011-08-11 12:41:56 +00001288
andrew2bc63a12016-01-11 15:59:17 -08001289 if (_twoDevices) {
1290 WEBRTC_CA_RETURN_ON_ERR(AudioDeviceCreateIOProcID(
1291 _inputDeviceID, inDeviceIOProc, this, &_inDeviceIOProcID));
1292 } else if (!_playIsInitialized) {
1293 WEBRTC_CA_RETURN_ON_ERR(AudioDeviceCreateIOProcID(
1294 _inputDeviceID, deviceIOProc, this, &_deviceIOProcID));
1295 }
xians@google.com68efa212011-08-11 12:41:56 +00001296
andrew2bc63a12016-01-11 15:59:17 -08001297 // Mark recording side as initialized
1298 _recIsInitialized = true;
xians@google.com68efa212011-08-11 12:41:56 +00001299
andrew2bc63a12016-01-11 15:59:17 -08001300 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00001301}
1302
andrew2bc63a12016-01-11 15:59:17 -08001303int32_t AudioDeviceMac::StartRecording() {
kthelgasonff046c72017-03-31 02:03:55 -07001304 rtc::CritScope lock(&_critSect);
andrew@webrtc.orgb0be7aa2011-12-08 20:15:36 +00001305
andrew2bc63a12016-01-11 15:59:17 -08001306 if (!_recIsInitialized) {
1307 return -1;
1308 }
xians@google.com68efa212011-08-11 12:41:56 +00001309
andrew2bc63a12016-01-11 15:59:17 -08001310 if (_recording) {
1311 return 0;
1312 }
1313
1314 if (!_initialized) {
sazab4aa4eb2017-07-19 01:12:36 -07001315 LOG(LS_ERROR) << "Recording worker thread has not been started";
andrew2bc63a12016-01-11 15:59:17 -08001316 return -1;
1317 }
1318
1319 RTC_DCHECK(!capture_worker_thread_.get());
1320 capture_worker_thread_.reset(
1321 new rtc::PlatformThread(RunCapture, this, "CaptureWorkerThread"));
1322 RTC_DCHECK(capture_worker_thread_.get());
1323 capture_worker_thread_->Start();
1324 capture_worker_thread_->SetPriority(rtc::kRealtimePriority);
1325
1326 OSStatus err = noErr;
1327 if (_twoDevices) {
1328 WEBRTC_CA_RETURN_ON_ERR(
1329 AudioDeviceStart(_inputDeviceID, _inDeviceIOProcID));
1330 } else if (!_playing) {
1331 WEBRTC_CA_RETURN_ON_ERR(AudioDeviceStart(_inputDeviceID, _deviceIOProcID));
1332 }
1333
1334 _recording = true;
1335
1336 return 0;
1337}
1338
1339int32_t AudioDeviceMac::StopRecording() {
kthelgasonff046c72017-03-31 02:03:55 -07001340 rtc::CritScope lock(&_critSect);
andrew2bc63a12016-01-11 15:59:17 -08001341
1342 if (!_recIsInitialized) {
1343 return 0;
1344 }
1345
1346 OSStatus err = noErr;
1347
1348 // Stop device
1349 int32_t captureDeviceIsAlive = AtomicGet32(&_captureDeviceIsAlive);
1350 if (_twoDevices) {
1351 if (_recording && captureDeviceIsAlive == 1) {
1352 _recording = false;
1353 _doStopRec = true; // Signal to io proc to stop audio device
1354 _critSect.Leave(); // Cannot be under lock, risk of deadlock
1355 if (kEventTimeout == _stopEventRec.Wait(2000)) {
kthelgasonff046c72017-03-31 02:03:55 -07001356 rtc::CritScope critScoped(&_critSect);
sazab4aa4eb2017-07-19 01:12:36 -07001357 LOG(LS_WARNING)
1358 << "Timed out stopping the capture IOProc."
1359 << "We may have failed to detect a device removal.";
andrew2bc63a12016-01-11 15:59:17 -08001360
1361 WEBRTC_CA_LOG_WARN(AudioDeviceStop(_inputDeviceID, _inDeviceIOProcID));
1362 WEBRTC_CA_LOG_WARN(
1363 AudioDeviceDestroyIOProcID(_inputDeviceID, _inDeviceIOProcID));
1364 }
1365 _critSect.Enter();
1366 _doStopRec = false;
sazab4aa4eb2017-07-19 01:12:36 -07001367 LOG(LS_VERBOSE) << "Recording stopped";
xians@google.com68efa212011-08-11 12:41:56 +00001368 }
andrew2bc63a12016-01-11 15:59:17 -08001369 } else {
1370 // We signal a stop for a shared device even when rendering has
1371 // not yet ended. This is to ensure the IOProc will return early as
1372 // intended (by checking |_recording|) before accessing
1373 // resources we free below (e.g. the capture converter).
1374 //
1375 // In the case of a shared devcie, the IOProc will verify
1376 // rendering has ended before stopping itself.
1377 if (_recording && captureDeviceIsAlive == 1) {
1378 _recording = false;
1379 _doStop = true; // Signal to io proc to stop audio device
1380 _critSect.Leave(); // Cannot be under lock, risk of deadlock
1381 if (kEventTimeout == _stopEvent.Wait(2000)) {
kthelgasonff046c72017-03-31 02:03:55 -07001382 rtc::CritScope critScoped(&_critSect);
sazab4aa4eb2017-07-19 01:12:36 -07001383 LOG(LS_WARNING)
1384 << "Timed out stopping the shared IOProc."
1385 << "We may have failed to detect a device removal.";
xians@google.com68efa212011-08-11 12:41:56 +00001386
andrew2bc63a12016-01-11 15:59:17 -08001387 // We assume rendering on a shared device has stopped as well if
1388 // the IOProc times out.
1389 WEBRTC_CA_LOG_WARN(AudioDeviceStop(_outputDeviceID, _deviceIOProcID));
1390 WEBRTC_CA_LOG_WARN(
1391 AudioDeviceDestroyIOProcID(_outputDeviceID, _deviceIOProcID));
1392 }
1393 _critSect.Enter();
1394 _doStop = false;
sazab4aa4eb2017-07-19 01:12:36 -07001395 LOG(LS_VERBOSE) << "Recording stopped (shared)";
xians@google.com68efa212011-08-11 12:41:56 +00001396 }
andrew2bc63a12016-01-11 15:59:17 -08001397 }
xians@google.com68efa212011-08-11 12:41:56 +00001398
andrew2bc63a12016-01-11 15:59:17 -08001399 // Setting this signal will allow the worker thread to be stopped.
1400 AtomicSet32(&_captureDeviceIsAlive, 0);
xians@google.com68efa212011-08-11 12:41:56 +00001401
andrew2bc63a12016-01-11 15:59:17 -08001402 if (capture_worker_thread_.get()) {
1403 _critSect.Leave();
1404 capture_worker_thread_->Stop();
1405 capture_worker_thread_.reset();
1406 _critSect.Enter();
1407 }
tommi@webrtc.orgd43bdf52015-02-03 16:29:57 +00001408
andrew2bc63a12016-01-11 15:59:17 -08001409 WEBRTC_CA_LOG_WARN(AudioConverterDispose(_captureConverter));
1410
1411 // Remove listeners.
1412 AudioObjectPropertyAddress propertyAddress = {
1413 kAudioDevicePropertyStreamFormat, kAudioDevicePropertyScopeInput, 0};
1414 WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(
1415 _inputDeviceID, &propertyAddress, &objectListenerProc, this));
1416
1417 propertyAddress.mSelector = kAudioDeviceProcessorOverload;
1418 WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(
1419 _inputDeviceID, &propertyAddress, &objectListenerProc, this));
1420
1421 _recIsInitialized = false;
1422 _recording = false;
1423
1424 return 0;
1425}
1426
1427bool AudioDeviceMac::RecordingIsInitialized() const {
1428 return (_recIsInitialized);
1429}
1430
1431bool AudioDeviceMac::Recording() const {
1432 return (_recording);
1433}
1434
1435bool AudioDeviceMac::PlayoutIsInitialized() const {
1436 return (_playIsInitialized);
1437}
1438
1439int32_t AudioDeviceMac::StartPlayout() {
kthelgasonff046c72017-03-31 02:03:55 -07001440 rtc::CritScope lock(&_critSect);
andrew2bc63a12016-01-11 15:59:17 -08001441
1442 if (!_playIsInitialized) {
1443 return -1;
1444 }
1445
1446 if (_playing) {
1447 return 0;
1448 }
1449
1450 RTC_DCHECK(!render_worker_thread_.get());
1451 render_worker_thread_.reset(
1452 new rtc::PlatformThread(RunRender, this, "RenderWorkerThread"));
1453 render_worker_thread_->Start();
1454 render_worker_thread_->SetPriority(rtc::kRealtimePriority);
1455
1456 if (_twoDevices || !_recording) {
xians@google.com68efa212011-08-11 12:41:56 +00001457 OSStatus err = noErr;
andrew2bc63a12016-01-11 15:59:17 -08001458 WEBRTC_CA_RETURN_ON_ERR(AudioDeviceStart(_outputDeviceID, _deviceIOProcID));
1459 }
1460 _playing = true;
xians@google.com68efa212011-08-11 12:41:56 +00001461
andrew2bc63a12016-01-11 15:59:17 -08001462 return 0;
1463}
xians@google.com68efa212011-08-11 12:41:56 +00001464
andrew2bc63a12016-01-11 15:59:17 -08001465int32_t AudioDeviceMac::StopPlayout() {
kthelgasonff046c72017-03-31 02:03:55 -07001466 rtc::CritScope lock(&_critSect);
andrew2bc63a12016-01-11 15:59:17 -08001467
1468 if (!_playIsInitialized) {
xians@google.com68efa212011-08-11 12:41:56 +00001469 return 0;
andrew2bc63a12016-01-11 15:59:17 -08001470 }
xians@google.com68efa212011-08-11 12:41:56 +00001471
andrew2bc63a12016-01-11 15:59:17 -08001472 OSStatus err = noErr;
xians@google.com68efa212011-08-11 12:41:56 +00001473
andrew2bc63a12016-01-11 15:59:17 -08001474 int32_t renderDeviceIsAlive = AtomicGet32(&_renderDeviceIsAlive);
1475 if (_playing && renderDeviceIsAlive == 1) {
1476 // We signal a stop for a shared device even when capturing has not
1477 // yet ended. This is to ensure the IOProc will return early as
1478 // intended (by checking |_playing|) before accessing resources we
1479 // free below (e.g. the render converter).
1480 //
1481 // In the case of a shared device, the IOProc will verify capturing
1482 // has ended before stopping itself.
xians@google.com68efa212011-08-11 12:41:56 +00001483 _playing = false;
andrew2bc63a12016-01-11 15:59:17 -08001484 _doStop = true; // Signal to io proc to stop audio device
1485 _critSect.Leave(); // Cannot be under lock, risk of deadlock
1486 if (kEventTimeout == _stopEvent.Wait(2000)) {
kthelgasonff046c72017-03-31 02:03:55 -07001487 rtc::CritScope critScoped(&_critSect);
sazab4aa4eb2017-07-19 01:12:36 -07001488 LOG(LS_WARNING)
1489 << "Timed out stopping the render IOProc."
1490 << "We may have failed to detect a device removal.";
xians@google.com68efa212011-08-11 12:41:56 +00001491
andrew2bc63a12016-01-11 15:59:17 -08001492 // We assume capturing on a shared device has stopped as well if the
1493 // IOProc times out.
1494 WEBRTC_CA_LOG_WARN(AudioDeviceStop(_outputDeviceID, _deviceIOProcID));
1495 WEBRTC_CA_LOG_WARN(
1496 AudioDeviceDestroyIOProcID(_outputDeviceID, _deviceIOProcID));
1497 }
1498 _critSect.Enter();
1499 _doStop = false;
sazab4aa4eb2017-07-19 01:12:36 -07001500 LOG(LS_VERBOSE) << "Playout stopped";
andrew2bc63a12016-01-11 15:59:17 -08001501 }
1502
1503 // Setting this signal will allow the worker thread to be stopped.
1504 AtomicSet32(&_renderDeviceIsAlive, 0);
1505 if (render_worker_thread_.get()) {
1506 _critSect.Leave();
1507 render_worker_thread_->Stop();
1508 render_worker_thread_.reset();
1509 _critSect.Enter();
1510 }
1511
1512 WEBRTC_CA_LOG_WARN(AudioConverterDispose(_renderConverter));
1513
1514 // Remove listeners.
1515 AudioObjectPropertyAddress propertyAddress = {
1516 kAudioDevicePropertyStreamFormat, kAudioDevicePropertyScopeOutput, 0};
1517 WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(
1518 _outputDeviceID, &propertyAddress, &objectListenerProc, this));
1519
1520 propertyAddress.mSelector = kAudioDeviceProcessorOverload;
1521 WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(
1522 _outputDeviceID, &propertyAddress, &objectListenerProc, this));
1523
1524 if (_macBookPro) {
1525 Boolean hasProperty =
1526 AudioObjectHasProperty(_outputDeviceID, &propertyAddress);
1527 if (hasProperty) {
1528 propertyAddress.mSelector = kAudioDevicePropertyDataSource;
1529 WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(
1530 _outputDeviceID, &propertyAddress, &objectListenerProc, this));
1531 }
1532 }
1533
1534 _playIsInitialized = false;
1535 _playing = false;
1536
1537 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00001538}
1539
andrew2bc63a12016-01-11 15:59:17 -08001540int32_t AudioDeviceMac::PlayoutDelay(uint16_t& delayMS) const {
1541 int32_t renderDelayUs = AtomicGet32(&_renderDelayUs);
1542 delayMS =
1543 static_cast<uint16_t>(1e-3 * (renderDelayUs + _renderLatencyUs) + 0.5);
1544 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00001545}
1546
andrew2bc63a12016-01-11 15:59:17 -08001547int32_t AudioDeviceMac::RecordingDelay(uint16_t& delayMS) const {
1548 int32_t captureDelayUs = AtomicGet32(&_captureDelayUs);
1549 delayMS =
1550 static_cast<uint16_t>(1e-3 * (captureDelayUs + _captureLatencyUs) + 0.5);
1551 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00001552}
1553
andrew2bc63a12016-01-11 15:59:17 -08001554bool AudioDeviceMac::Playing() const {
1555 return (_playing);
xians@google.com68efa212011-08-11 12:41:56 +00001556}
1557
andrew2bc63a12016-01-11 15:59:17 -08001558bool AudioDeviceMac::PlayoutWarning() const {
1559 return (_playWarning > 0);
xians@google.com68efa212011-08-11 12:41:56 +00001560}
1561
andrew2bc63a12016-01-11 15:59:17 -08001562bool AudioDeviceMac::PlayoutError() const {
1563 return (_playError > 0);
xians@google.com68efa212011-08-11 12:41:56 +00001564}
1565
andrew2bc63a12016-01-11 15:59:17 -08001566bool AudioDeviceMac::RecordingWarning() const {
1567 return (_recWarning > 0);
xians@google.com68efa212011-08-11 12:41:56 +00001568}
1569
andrew2bc63a12016-01-11 15:59:17 -08001570bool AudioDeviceMac::RecordingError() const {
1571 return (_recError > 0);
xians@google.com68efa212011-08-11 12:41:56 +00001572}
1573
andrew2bc63a12016-01-11 15:59:17 -08001574void AudioDeviceMac::ClearPlayoutWarning() {
1575 _playWarning = 0;
xians@google.com68efa212011-08-11 12:41:56 +00001576}
1577
andrew2bc63a12016-01-11 15:59:17 -08001578void AudioDeviceMac::ClearPlayoutError() {
1579 _playError = 0;
xians@google.com68efa212011-08-11 12:41:56 +00001580}
1581
andrew2bc63a12016-01-11 15:59:17 -08001582void AudioDeviceMac::ClearRecordingWarning() {
1583 _recWarning = 0;
xians@google.com68efa212011-08-11 12:41:56 +00001584}
1585
andrew2bc63a12016-01-11 15:59:17 -08001586void AudioDeviceMac::ClearRecordingError() {
1587 _recError = 0;
xians@google.com68efa212011-08-11 12:41:56 +00001588}
1589
1590// ============================================================================
1591// Private Methods
1592// ============================================================================
1593
andrew2bc63a12016-01-11 15:59:17 -08001594int32_t AudioDeviceMac::GetNumberDevices(const AudioObjectPropertyScope scope,
1595 AudioDeviceID scopedDeviceIds[],
1596 const uint32_t deviceListLength) {
1597 OSStatus err = noErr;
xians@google.com68efa212011-08-11 12:41:56 +00001598
andrew2bc63a12016-01-11 15:59:17 -08001599 AudioObjectPropertyAddress propertyAddress = {
1600 kAudioHardwarePropertyDevices, kAudioObjectPropertyScopeGlobal,
1601 kAudioObjectPropertyElementMaster};
1602 UInt32 size = 0;
1603 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyDataSize(
1604 kAudioObjectSystemObject, &propertyAddress, 0, NULL, &size));
1605 if (size == 0) {
sazab4aa4eb2017-07-19 01:12:36 -07001606 LOG(LS_WARNING) << "No devices";
andrew2bc63a12016-01-11 15:59:17 -08001607 return 0;
1608 }
xians@google.com68efa212011-08-11 12:41:56 +00001609
andrew2bc63a12016-01-11 15:59:17 -08001610 AudioDeviceID* deviceIds = (AudioDeviceID*)malloc(size);
1611 UInt32 numberDevices = size / sizeof(AudioDeviceID);
1612 AudioBufferList* bufferList = NULL;
1613 UInt32 numberScopedDevices = 0;
xians@google.com68efa212011-08-11 12:41:56 +00001614
andrew2bc63a12016-01-11 15:59:17 -08001615 // First check if there is a default device and list it
1616 UInt32 hardwareProperty = 0;
1617 if (scope == kAudioDevicePropertyScopeOutput) {
1618 hardwareProperty = kAudioHardwarePropertyDefaultOutputDevice;
1619 } else {
1620 hardwareProperty = kAudioHardwarePropertyDefaultInputDevice;
1621 }
xians@google.com68efa212011-08-11 12:41:56 +00001622
andrew2bc63a12016-01-11 15:59:17 -08001623 AudioObjectPropertyAddress propertyAddressDefault = {
1624 hardwareProperty, kAudioObjectPropertyScopeGlobal,
1625 kAudioObjectPropertyElementMaster};
xians@google.com68efa212011-08-11 12:41:56 +00001626
andrew2bc63a12016-01-11 15:59:17 -08001627 AudioDeviceID usedID;
1628 UInt32 uintSize = sizeof(UInt32);
1629 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(kAudioObjectSystemObject,
1630 &propertyAddressDefault, 0,
1631 NULL, &uintSize, &usedID));
1632 if (usedID != kAudioDeviceUnknown) {
1633 scopedDeviceIds[numberScopedDevices] = usedID;
1634 numberScopedDevices++;
1635 } else {
sazab4aa4eb2017-07-19 01:12:36 -07001636 LOG(LS_WARNING) << "GetNumberDevices(): Default device unknown";
andrew2bc63a12016-01-11 15:59:17 -08001637 }
xians@google.com68efa212011-08-11 12:41:56 +00001638
andrew2bc63a12016-01-11 15:59:17 -08001639 // Then list the rest of the devices
1640 bool listOK = true;
xians@google.com68efa212011-08-11 12:41:56 +00001641
andrew2bc63a12016-01-11 15:59:17 -08001642 WEBRTC_CA_LOG_ERR(AudioObjectGetPropertyData(
1643 kAudioObjectSystemObject, &propertyAddress, 0, NULL, &size, deviceIds));
1644 if (err != noErr) {
1645 listOK = false;
1646 } else {
1647 propertyAddress.mSelector = kAudioDevicePropertyStreamConfiguration;
1648 propertyAddress.mScope = scope;
1649 propertyAddress.mElement = 0;
1650 for (UInt32 i = 0; i < numberDevices; i++) {
1651 // Check for input channels
1652 WEBRTC_CA_LOG_ERR(AudioObjectGetPropertyDataSize(
1653 deviceIds[i], &propertyAddress, 0, NULL, &size));
1654 if (err == kAudioHardwareBadDeviceError) {
1655 // This device doesn't actually exist; continue iterating.
1656 continue;
1657 } else if (err != noErr) {
xians@google.com68efa212011-08-11 12:41:56 +00001658 listOK = false;
andrew2bc63a12016-01-11 15:59:17 -08001659 break;
1660 }
xians@google.com68efa212011-08-11 12:41:56 +00001661
andrew2bc63a12016-01-11 15:59:17 -08001662 bufferList = (AudioBufferList*)malloc(size);
1663 WEBRTC_CA_LOG_ERR(AudioObjectGetPropertyData(
1664 deviceIds[i], &propertyAddress, 0, NULL, &size, bufferList));
1665 if (err != noErr) {
1666 listOK = false;
1667 break;
1668 }
xians@google.com68efa212011-08-11 12:41:56 +00001669
andrew2bc63a12016-01-11 15:59:17 -08001670 if (bufferList->mNumberBuffers > 0) {
1671 if (numberScopedDevices >= deviceListLength) {
sazab4aa4eb2017-07-19 01:12:36 -07001672 LOG(LS_ERROR) << "Device list is not long enough";
andrew2bc63a12016-01-11 15:59:17 -08001673 listOK = false;
1674 break;
xians@google.com68efa212011-08-11 12:41:56 +00001675 }
1676
andrew2bc63a12016-01-11 15:59:17 -08001677 scopedDeviceIds[numberScopedDevices] = deviceIds[i];
1678 numberScopedDevices++;
1679 }
xians@google.com68efa212011-08-11 12:41:56 +00001680
andrew2bc63a12016-01-11 15:59:17 -08001681 free(bufferList);
1682 bufferList = NULL;
1683 } // for
1684 }
1685
1686 if (!listOK) {
1687 if (deviceIds) {
1688 free(deviceIds);
1689 deviceIds = NULL;
xians@google.com68efa212011-08-11 12:41:56 +00001690 }
1691
andrew2bc63a12016-01-11 15:59:17 -08001692 if (bufferList) {
1693 free(bufferList);
1694 bufferList = NULL;
xians@google.com68efa212011-08-11 12:41:56 +00001695 }
1696
andrew2bc63a12016-01-11 15:59:17 -08001697 return -1;
1698 }
1699
1700 // Happy ending
1701 if (deviceIds) {
1702 free(deviceIds);
1703 deviceIds = NULL;
1704 }
1705
1706 return numberScopedDevices;
xians@google.com68efa212011-08-11 12:41:56 +00001707}
1708
andrew2bc63a12016-01-11 15:59:17 -08001709int32_t AudioDeviceMac::GetDeviceName(const AudioObjectPropertyScope scope,
1710 const uint16_t index,
1711 char* name) {
1712 OSStatus err = noErr;
1713 UInt32 len = kAdmMaxDeviceNameSize;
1714 AudioDeviceID deviceIds[MaxNumberDevices];
xians@google.com68efa212011-08-11 12:41:56 +00001715
andrew2bc63a12016-01-11 15:59:17 -08001716 int numberDevices = GetNumberDevices(scope, deviceIds, MaxNumberDevices);
1717 if (numberDevices < 0) {
1718 return -1;
1719 } else if (numberDevices == 0) {
sazab4aa4eb2017-07-19 01:12:36 -07001720 LOG(LS_ERROR) << "No devices";
andrew2bc63a12016-01-11 15:59:17 -08001721 return -1;
1722 }
1723
1724 // If the number is below the number of devices, assume it's "WEBRTC ID"
1725 // otherwise assume it's a CoreAudio ID
1726 AudioDeviceID usedID;
1727
1728 // Check if there is a default device
1729 bool isDefaultDevice = false;
1730 if (index == 0) {
1731 UInt32 hardwareProperty = 0;
1732 if (scope == kAudioDevicePropertyScopeOutput) {
1733 hardwareProperty = kAudioHardwarePropertyDefaultOutputDevice;
1734 } else {
1735 hardwareProperty = kAudioHardwarePropertyDefaultInputDevice;
xians@google.com68efa212011-08-11 12:41:56 +00001736 }
xians@google.com68efa212011-08-11 12:41:56 +00001737 AudioObjectPropertyAddress propertyAddress = {
andrew2bc63a12016-01-11 15:59:17 -08001738 hardwareProperty, kAudioObjectPropertyScopeGlobal,
1739 kAudioObjectPropertyElementMaster};
1740 UInt32 size = sizeof(UInt32);
1741 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
1742 kAudioObjectSystemObject, &propertyAddress, 0, NULL, &size, &usedID));
1743 if (usedID == kAudioDeviceUnknown) {
sazab4aa4eb2017-07-19 01:12:36 -07001744 LOG(LS_WARNING) << "GetDeviceName(): Default device unknown";
andrew2bc63a12016-01-11 15:59:17 -08001745 } else {
1746 isDefaultDevice = true;
1747 }
1748 }
xians@google.com68efa212011-08-11 12:41:56 +00001749
andrew2bc63a12016-01-11 15:59:17 -08001750 AudioObjectPropertyAddress propertyAddress = {kAudioDevicePropertyDeviceName,
1751 scope, 0};
xians@google.com68efa212011-08-11 12:41:56 +00001752
andrew2bc63a12016-01-11 15:59:17 -08001753 if (isDefaultDevice) {
1754 char devName[len];
xians@google.com68efa212011-08-11 12:41:56 +00001755
andrew2bc63a12016-01-11 15:59:17 -08001756 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(usedID, &propertyAddress,
1757 0, NULL, &len, devName));
xians@google.com68efa212011-08-11 12:41:56 +00001758
andrew2bc63a12016-01-11 15:59:17 -08001759 sprintf(name, "default (%s)", devName);
1760 } else {
1761 if (index < numberDevices) {
1762 usedID = deviceIds[index];
1763 } else {
1764 usedID = index;
xians@google.com68efa212011-08-11 12:41:56 +00001765 }
1766
andrew2bc63a12016-01-11 15:59:17 -08001767 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(usedID, &propertyAddress,
1768 0, NULL, &len, name));
1769 }
1770
1771 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00001772}
1773
pbos@webrtc.org25509882013-04-09 10:30:35 +00001774int32_t AudioDeviceMac::InitDevice(const uint16_t userDeviceIndex,
1775 AudioDeviceID& deviceId,
andrew2bc63a12016-01-11 15:59:17 -08001776 const bool isInput) {
1777 OSStatus err = noErr;
1778 UInt32 size = 0;
1779 AudioObjectPropertyScope deviceScope;
1780 AudioObjectPropertySelector defaultDeviceSelector;
1781 AudioDeviceID deviceIds[MaxNumberDevices];
xians@google.com68efa212011-08-11 12:41:56 +00001782
andrew2bc63a12016-01-11 15:59:17 -08001783 if (isInput) {
1784 deviceScope = kAudioDevicePropertyScopeInput;
1785 defaultDeviceSelector = kAudioHardwarePropertyDefaultInputDevice;
1786 } else {
1787 deviceScope = kAudioDevicePropertyScopeOutput;
1788 defaultDeviceSelector = kAudioHardwarePropertyDefaultOutputDevice;
1789 }
1790
1791 AudioObjectPropertyAddress propertyAddress = {
1792 defaultDeviceSelector, kAudioObjectPropertyScopeGlobal,
1793 kAudioObjectPropertyElementMaster};
1794
1795 // Get the actual device IDs
1796 int numberDevices =
1797 GetNumberDevices(deviceScope, deviceIds, MaxNumberDevices);
1798 if (numberDevices < 0) {
1799 return -1;
1800 } else if (numberDevices == 0) {
sazab4aa4eb2017-07-19 01:12:36 -07001801 LOG(LS_ERROR) << "InitDevice(): No devices";
andrew2bc63a12016-01-11 15:59:17 -08001802 return -1;
1803 }
1804
1805 bool isDefaultDevice = false;
1806 deviceId = kAudioDeviceUnknown;
1807 if (userDeviceIndex == 0) {
1808 // Try to use default system device
1809 size = sizeof(AudioDeviceID);
1810 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
1811 kAudioObjectSystemObject, &propertyAddress, 0, NULL, &size, &deviceId));
1812 if (deviceId == kAudioDeviceUnknown) {
sazab4aa4eb2017-07-19 01:12:36 -07001813 LOG(LS_WARNING) << "No default device exists";
andrew2bc63a12016-01-11 15:59:17 -08001814 } else {
1815 isDefaultDevice = true;
xians@google.com68efa212011-08-11 12:41:56 +00001816 }
andrew2bc63a12016-01-11 15:59:17 -08001817 }
xians@google.com68efa212011-08-11 12:41:56 +00001818
andrew2bc63a12016-01-11 15:59:17 -08001819 if (!isDefaultDevice) {
1820 deviceId = deviceIds[userDeviceIndex];
1821 }
xians@google.com68efa212011-08-11 12:41:56 +00001822
andrew2bc63a12016-01-11 15:59:17 -08001823 // Obtain device name and manufacturer for logging.
1824 // Also use this as a test to ensure a user-set device ID is valid.
1825 char devName[128];
1826 char devManf[128];
1827 memset(devName, 0, sizeof(devName));
1828 memset(devManf, 0, sizeof(devManf));
xians@google.com68efa212011-08-11 12:41:56 +00001829
andrew2bc63a12016-01-11 15:59:17 -08001830 propertyAddress.mSelector = kAudioDevicePropertyDeviceName;
1831 propertyAddress.mScope = deviceScope;
1832 propertyAddress.mElement = 0;
1833 size = sizeof(devName);
1834 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(deviceId, &propertyAddress,
1835 0, NULL, &size, devName));
xians@google.com68efa212011-08-11 12:41:56 +00001836
andrew2bc63a12016-01-11 15:59:17 -08001837 propertyAddress.mSelector = kAudioDevicePropertyDeviceManufacturer;
1838 size = sizeof(devManf);
1839 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(deviceId, &propertyAddress,
1840 0, NULL, &size, devManf));
xians@google.com68efa212011-08-11 12:41:56 +00001841
andrew2bc63a12016-01-11 15:59:17 -08001842 if (isInput) {
sazab4aa4eb2017-07-19 01:12:36 -07001843 LOG(LS_VERBOSE) << "Input device: " << devManf << " " << devName;
andrew2bc63a12016-01-11 15:59:17 -08001844 } else {
sazab4aa4eb2017-07-19 01:12:36 -07001845 LOG(LS_VERBOSE) << "Output device: " << devManf << " " << devName;
andrew2bc63a12016-01-11 15:59:17 -08001846 }
xians@google.com68efa212011-08-11 12:41:56 +00001847
andrew2bc63a12016-01-11 15:59:17 -08001848 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00001849}
1850
andrew2bc63a12016-01-11 15:59:17 -08001851OSStatus AudioDeviceMac::SetDesiredPlayoutFormat() {
1852 // Our preferred format to work with.
1853 _outDesiredFormat.mSampleRate = N_PLAY_SAMPLES_PER_SEC;
1854 _outDesiredFormat.mChannelsPerFrame = _playChannels;
braveyao@webrtc.org346a64b2015-03-21 01:05:56 +00001855
andrew2bc63a12016-01-11 15:59:17 -08001856 if (_ptrAudioBuffer) {
1857 // Update audio buffer with the selected parameters.
1858 _ptrAudioBuffer->SetPlayoutSampleRate(N_PLAY_SAMPLES_PER_SEC);
1859 _ptrAudioBuffer->SetPlayoutChannels((uint8_t)_playChannels);
1860 }
braveyao@webrtc.org346a64b2015-03-21 01:05:56 +00001861
andrew2bc63a12016-01-11 15:59:17 -08001862 _renderDelayOffsetSamples = _renderBufSizeSamples -
1863 N_BUFFERS_OUT * ENGINE_PLAY_BUF_SIZE_IN_SAMPLES *
1864 _outDesiredFormat.mChannelsPerFrame;
braveyao@webrtc.org346a64b2015-03-21 01:05:56 +00001865
andrew2bc63a12016-01-11 15:59:17 -08001866 _outDesiredFormat.mBytesPerPacket =
1867 _outDesiredFormat.mChannelsPerFrame * sizeof(SInt16);
1868 // In uncompressed audio, a packet is one frame.
1869 _outDesiredFormat.mFramesPerPacket = 1;
1870 _outDesiredFormat.mBytesPerFrame =
1871 _outDesiredFormat.mChannelsPerFrame * sizeof(SInt16);
1872 _outDesiredFormat.mBitsPerChannel = sizeof(SInt16) * 8;
braveyao@webrtc.org346a64b2015-03-21 01:05:56 +00001873
andrew2bc63a12016-01-11 15:59:17 -08001874 _outDesiredFormat.mFormatFlags =
1875 kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
braveyao@webrtc.org346a64b2015-03-21 01:05:56 +00001876#ifdef WEBRTC_ARCH_BIG_ENDIAN
andrew2bc63a12016-01-11 15:59:17 -08001877 _outDesiredFormat.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;
braveyao@webrtc.org346a64b2015-03-21 01:05:56 +00001878#endif
andrew2bc63a12016-01-11 15:59:17 -08001879 _outDesiredFormat.mFormatID = kAudioFormatLinearPCM;
braveyao@webrtc.org346a64b2015-03-21 01:05:56 +00001880
andrew2bc63a12016-01-11 15:59:17 -08001881 OSStatus err = noErr;
1882 WEBRTC_CA_RETURN_ON_ERR(AudioConverterNew(
1883 &_outDesiredFormat, &_outStreamFormat, &_renderConverter));
braveyao@webrtc.org346a64b2015-03-21 01:05:56 +00001884
henrika98680422017-08-31 06:47:32 -07001885 // Try to set buffer size to desired value set to 20ms.
1886 const uint16_t kPlayBufDelayFixed = 20;
andrew2bc63a12016-01-11 15:59:17 -08001887 UInt32 bufByteCount = static_cast<UInt32>(
henrika98680422017-08-31 06:47:32 -07001888 (_outStreamFormat.mSampleRate / 1000.0) * kPlayBufDelayFixed *
andrew2bc63a12016-01-11 15:59:17 -08001889 _outStreamFormat.mChannelsPerFrame * sizeof(Float32));
1890 if (_outStreamFormat.mFramesPerPacket != 0) {
1891 if (bufByteCount % _outStreamFormat.mFramesPerPacket != 0) {
1892 bufByteCount = (static_cast<UInt32>(bufByteCount /
1893 _outStreamFormat.mFramesPerPacket) +
1894 1) *
1895 _outStreamFormat.mFramesPerPacket;
braveyao@webrtc.org346a64b2015-03-21 01:05:56 +00001896 }
andrew2bc63a12016-01-11 15:59:17 -08001897 }
braveyao@webrtc.org346a64b2015-03-21 01:05:56 +00001898
andrew2bc63a12016-01-11 15:59:17 -08001899 // Ensure the buffer size is within the range provided by the device.
1900 AudioObjectPropertyAddress propertyAddress = {
1901 kAudioDevicePropertyDataSource, kAudioDevicePropertyScopeOutput, 0};
1902 propertyAddress.mSelector = kAudioDevicePropertyBufferSizeRange;
1903 AudioValueRange range;
1904 UInt32 size = sizeof(range);
1905 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
1906 _outputDeviceID, &propertyAddress, 0, NULL, &size, &range));
1907 if (range.mMinimum > bufByteCount) {
1908 bufByteCount = range.mMinimum;
1909 } else if (range.mMaximum < bufByteCount) {
1910 bufByteCount = range.mMaximum;
1911 }
braveyao@webrtc.org346a64b2015-03-21 01:05:56 +00001912
andrew2bc63a12016-01-11 15:59:17 -08001913 propertyAddress.mSelector = kAudioDevicePropertyBufferSize;
1914 size = sizeof(bufByteCount);
1915 WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(
1916 _outputDeviceID, &propertyAddress, 0, NULL, size, &bufByteCount));
braveyao@webrtc.org346a64b2015-03-21 01:05:56 +00001917
andrew2bc63a12016-01-11 15:59:17 -08001918 // Get render device latency.
1919 propertyAddress.mSelector = kAudioDevicePropertyLatency;
1920 UInt32 latency = 0;
1921 size = sizeof(UInt32);
1922 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
1923 _outputDeviceID, &propertyAddress, 0, NULL, &size, &latency));
1924 _renderLatencyUs =
1925 static_cast<uint32_t>((1.0e6 * latency) / _outStreamFormat.mSampleRate);
braveyao@webrtc.org346a64b2015-03-21 01:05:56 +00001926
andrew2bc63a12016-01-11 15:59:17 -08001927 // Get render stream latency.
1928 propertyAddress.mSelector = kAudioDevicePropertyStreams;
1929 AudioStreamID stream = 0;
1930 size = sizeof(AudioStreamID);
1931 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
1932 _outputDeviceID, &propertyAddress, 0, NULL, &size, &stream));
1933 propertyAddress.mSelector = kAudioStreamPropertyLatency;
1934 size = sizeof(UInt32);
1935 latency = 0;
1936 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
1937 _outputDeviceID, &propertyAddress, 0, NULL, &size, &latency));
1938 _renderLatencyUs +=
1939 static_cast<uint32_t>((1.0e6 * latency) / _outStreamFormat.mSampleRate);
braveyao@webrtc.org346a64b2015-03-21 01:05:56 +00001940
sazab4aa4eb2017-07-19 01:12:36 -07001941 LOG(LS_VERBOSE) << "initial playout status: _renderDelayOffsetSamples="
1942 << _renderDelayOffsetSamples << ", _renderDelayUs="
1943 << _renderDelayUs << ", _renderLatencyUs="
1944 << _renderLatencyUs;
andrew2bc63a12016-01-11 15:59:17 -08001945 return 0;
braveyao@webrtc.org346a64b2015-03-21 01:05:56 +00001946}
1947
xians@google.com68efa212011-08-11 12:41:56 +00001948OSStatus AudioDeviceMac::objectListenerProc(
1949 AudioObjectID objectId,
1950 UInt32 numberAddresses,
1951 const AudioObjectPropertyAddress addresses[],
andrew2bc63a12016-01-11 15:59:17 -08001952 void* clientData) {
1953 AudioDeviceMac* ptrThis = (AudioDeviceMac*)clientData;
1954 RTC_DCHECK(ptrThis != NULL);
xians@google.com68efa212011-08-11 12:41:56 +00001955
andrew2bc63a12016-01-11 15:59:17 -08001956 ptrThis->implObjectListenerProc(objectId, numberAddresses, addresses);
xians@google.com68efa212011-08-11 12:41:56 +00001957
andrew2bc63a12016-01-11 15:59:17 -08001958 // AudioObjectPropertyListenerProc functions are supposed to return 0
1959 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00001960}
1961
1962OSStatus AudioDeviceMac::implObjectListenerProc(
1963 const AudioObjectID objectId,
1964 const UInt32 numberAddresses,
andrew2bc63a12016-01-11 15:59:17 -08001965 const AudioObjectPropertyAddress addresses[]) {
sazab4aa4eb2017-07-19 01:12:36 -07001966 LOG(LS_VERBOSE) << "AudioDeviceMac::implObjectListenerProc()";
andrew@webrtc.org6f69eb72013-06-07 17:56:50 +00001967
andrew2bc63a12016-01-11 15:59:17 -08001968 for (UInt32 i = 0; i < numberAddresses; i++) {
1969 if (addresses[i].mSelector == kAudioHardwarePropertyDevices) {
1970 HandleDeviceChange();
1971 } else if (addresses[i].mSelector == kAudioDevicePropertyStreamFormat) {
1972 HandleStreamFormatChange(objectId, addresses[i]);
1973 } else if (addresses[i].mSelector == kAudioDevicePropertyDataSource) {
1974 HandleDataSourceChange(objectId, addresses[i]);
1975 } else if (addresses[i].mSelector == kAudioDeviceProcessorOverload) {
1976 HandleProcessorOverload(addresses[i]);
xians@google.com68efa212011-08-11 12:41:56 +00001977 }
andrew2bc63a12016-01-11 15:59:17 -08001978 }
xians@google.com68efa212011-08-11 12:41:56 +00001979
andrew2bc63a12016-01-11 15:59:17 -08001980 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00001981}
1982
andrew2bc63a12016-01-11 15:59:17 -08001983int32_t AudioDeviceMac::HandleDeviceChange() {
1984 OSStatus err = noErr;
xians@google.com68efa212011-08-11 12:41:56 +00001985
sazab4aa4eb2017-07-19 01:12:36 -07001986 LOG(LS_VERBOSE) << "kAudioHardwarePropertyDevices";
xians@google.com68efa212011-08-11 12:41:56 +00001987
andrew2bc63a12016-01-11 15:59:17 -08001988 // A device has changed. Check if our registered devices have been removed.
1989 // Ensure the devices have been initialized, meaning the IDs are valid.
1990 if (MicrophoneIsInitialized()) {
1991 AudioObjectPropertyAddress propertyAddress = {
1992 kAudioDevicePropertyDeviceIsAlive, kAudioDevicePropertyScopeInput, 0};
1993 UInt32 deviceIsAlive = 1;
1994 UInt32 size = sizeof(UInt32);
1995 err = AudioObjectGetPropertyData(_inputDeviceID, &propertyAddress, 0, NULL,
1996 &size, &deviceIsAlive);
xians@google.com68efa212011-08-11 12:41:56 +00001997
andrew2bc63a12016-01-11 15:59:17 -08001998 if (err == kAudioHardwareBadDeviceError || deviceIsAlive == 0) {
sazab4aa4eb2017-07-19 01:12:36 -07001999 LOG(LS_WARNING) << "Capture device is not alive (probably removed)";
andrew2bc63a12016-01-11 15:59:17 -08002000 AtomicSet32(&_captureDeviceIsAlive, 0);
2001 _mixerManager.CloseMicrophone();
2002 if (_recError == 1) {
sazab4aa4eb2017-07-19 01:12:36 -07002003 LOG(LS_WARNING) << "pending recording error exists";
andrew2bc63a12016-01-11 15:59:17 -08002004 }
2005 _recError = 1; // triggers callback from module process thread
2006 } else if (err != noErr) {
sazab4aa4eb2017-07-19 01:12:36 -07002007 logCAMsg(rtc::LS_ERROR,
andrew2bc63a12016-01-11 15:59:17 -08002008 "Error in AudioDeviceGetProperty()", (const char*)&err);
2009 return -1;
xians@google.com68efa212011-08-11 12:41:56 +00002010 }
andrew2bc63a12016-01-11 15:59:17 -08002011 }
xians@google.com68efa212011-08-11 12:41:56 +00002012
andrew2bc63a12016-01-11 15:59:17 -08002013 if (SpeakerIsInitialized()) {
2014 AudioObjectPropertyAddress propertyAddress = {
2015 kAudioDevicePropertyDeviceIsAlive, kAudioDevicePropertyScopeOutput, 0};
2016 UInt32 deviceIsAlive = 1;
2017 UInt32 size = sizeof(UInt32);
2018 err = AudioObjectGetPropertyData(_outputDeviceID, &propertyAddress, 0, NULL,
2019 &size, &deviceIsAlive);
xians@google.com68efa212011-08-11 12:41:56 +00002020
andrew2bc63a12016-01-11 15:59:17 -08002021 if (err == kAudioHardwareBadDeviceError || deviceIsAlive == 0) {
sazab4aa4eb2017-07-19 01:12:36 -07002022 LOG(LS_WARNING) << "Render device is not alive (probably removed)";
andrew2bc63a12016-01-11 15:59:17 -08002023 AtomicSet32(&_renderDeviceIsAlive, 0);
2024 _mixerManager.CloseSpeaker();
2025 if (_playError == 1) {
sazab4aa4eb2017-07-19 01:12:36 -07002026 LOG(LS_WARNING) << "pending playout error exists";
andrew2bc63a12016-01-11 15:59:17 -08002027 }
2028 _playError = 1; // triggers callback from module process thread
2029 } else if (err != noErr) {
sazab4aa4eb2017-07-19 01:12:36 -07002030 logCAMsg(rtc::LS_ERROR,
andrew2bc63a12016-01-11 15:59:17 -08002031 "Error in AudioDeviceGetProperty()", (const char*)&err);
2032 return -1;
xians@google.com68efa212011-08-11 12:41:56 +00002033 }
andrew2bc63a12016-01-11 15:59:17 -08002034 }
xians@google.com68efa212011-08-11 12:41:56 +00002035
andrew2bc63a12016-01-11 15:59:17 -08002036 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00002037}
2038
pbos@webrtc.org25509882013-04-09 10:30:35 +00002039int32_t AudioDeviceMac::HandleStreamFormatChange(
xians@google.com68efa212011-08-11 12:41:56 +00002040 const AudioObjectID objectId,
andrew2bc63a12016-01-11 15:59:17 -08002041 const AudioObjectPropertyAddress propertyAddress) {
2042 OSStatus err = noErr;
xians@google.com68efa212011-08-11 12:41:56 +00002043
sazab4aa4eb2017-07-19 01:12:36 -07002044 LOG(LS_VERBOSE) << "Stream format changed";
xians@google.com68efa212011-08-11 12:41:56 +00002045
andrew2bc63a12016-01-11 15:59:17 -08002046 if (objectId != _inputDeviceID && objectId != _outputDeviceID) {
xians@google.com68efa212011-08-11 12:41:56 +00002047 return 0;
andrew2bc63a12016-01-11 15:59:17 -08002048 }
2049
2050 // Get the new device format
2051 AudioStreamBasicDescription streamFormat;
2052 UInt32 size = sizeof(streamFormat);
2053 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
2054 objectId, &propertyAddress, 0, NULL, &size, &streamFormat));
2055
2056 if (streamFormat.mFormatID != kAudioFormatLinearPCM) {
sazab4aa4eb2017-07-19 01:12:36 -07002057 logCAMsg(rtc::LS_ERROR,
andrew2bc63a12016-01-11 15:59:17 -08002058 "Unacceptable input stream format -> mFormatID",
2059 (const char*)&streamFormat.mFormatID);
2060 return -1;
2061 }
2062
2063 if (streamFormat.mChannelsPerFrame > N_DEVICE_CHANNELS) {
sazab4aa4eb2017-07-19 01:12:36 -07002064 LOG(LS_ERROR) << "Too many channels on device (mChannelsPerFrame = "
2065 << streamFormat.mChannelsPerFrame << ")";
andrew2bc63a12016-01-11 15:59:17 -08002066 return -1;
2067 }
2068
sazab4aa4eb2017-07-19 01:12:36 -07002069 LOG(LS_VERBOSE) << "Stream format:";
2070 LOG(LS_VERBOSE) << "mSampleRate = " << streamFormat.mSampleRate
2071 << ", mChannelsPerFrame = " << streamFormat.mChannelsPerFrame;
2072 LOG(LS_VERBOSE) << "mBytesPerPacket = " << streamFormat.mBytesPerPacket
2073 << ", mFramesPerPacket = " << streamFormat.mFramesPerPacket;
2074 LOG(LS_VERBOSE) << "mBytesPerFrame = " << streamFormat.mBytesPerFrame
2075 << ", mBitsPerChannel = " << streamFormat.mBitsPerChannel;
2076 LOG(LS_VERBOSE) << "mFormatFlags = " << streamFormat.mFormatFlags;
2077 logCAMsg(rtc::LS_VERBOSE, "mFormatID",
andrew2bc63a12016-01-11 15:59:17 -08002078 (const char*)&streamFormat.mFormatID);
2079
2080 if (propertyAddress.mScope == kAudioDevicePropertyScopeInput) {
2081 const int io_block_size_samples = streamFormat.mChannelsPerFrame *
2082 streamFormat.mSampleRate / 100 *
2083 N_BLOCKS_IO;
2084 if (io_block_size_samples > _captureBufSizeSamples) {
sazab4aa4eb2017-07-19 01:12:36 -07002085 LOG(LS_ERROR) << "Input IO block size (" << io_block_size_samples
2086 << ") is larger than ring buffer ("
2087 << _captureBufSizeSamples << ")";
andrew2bc63a12016-01-11 15:59:17 -08002088 return -1;
2089 }
2090
2091 memcpy(&_inStreamFormat, &streamFormat, sizeof(streamFormat));
2092
2093 if (_inStreamFormat.mChannelsPerFrame >= 2 && (_recChannels == 2)) {
2094 _inDesiredFormat.mChannelsPerFrame = 2;
2095 } else {
2096 // Disable stereo recording when we only have one channel on the device.
2097 _inDesiredFormat.mChannelsPerFrame = 1;
2098 _recChannels = 1;
sazab4aa4eb2017-07-19 01:12:36 -07002099 LOG(LS_VERBOSE) << "Stereo recording unavailable on this device";
andrew2bc63a12016-01-11 15:59:17 -08002100 }
2101
2102 if (_ptrAudioBuffer) {
2103 // Update audio buffer with the selected parameters
2104 _ptrAudioBuffer->SetRecordingSampleRate(N_REC_SAMPLES_PER_SEC);
2105 _ptrAudioBuffer->SetRecordingChannels((uint8_t)_recChannels);
2106 }
2107
2108 // Recreate the converter with the new format
2109 // TODO(xians): make this thread safe
2110 WEBRTC_CA_RETURN_ON_ERR(AudioConverterDispose(_captureConverter));
2111
2112 WEBRTC_CA_RETURN_ON_ERR(AudioConverterNew(&streamFormat, &_inDesiredFormat,
2113 &_captureConverter));
2114 } else {
2115 memcpy(&_outStreamFormat, &streamFormat, sizeof(streamFormat));
2116
2117 // Our preferred format to work with
2118 if (_outStreamFormat.mChannelsPerFrame < 2) {
2119 _playChannels = 1;
sazab4aa4eb2017-07-19 01:12:36 -07002120 LOG(LS_VERBOSE) << "Stereo playout unavailable on this device";
andrew2bc63a12016-01-11 15:59:17 -08002121 }
2122 WEBRTC_CA_RETURN_ON_ERR(SetDesiredPlayoutFormat());
2123 }
2124 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00002125}
2126
pbos@webrtc.org25509882013-04-09 10:30:35 +00002127int32_t AudioDeviceMac::HandleDataSourceChange(
xians@google.com68efa212011-08-11 12:41:56 +00002128 const AudioObjectID objectId,
andrew2bc63a12016-01-11 15:59:17 -08002129 const AudioObjectPropertyAddress propertyAddress) {
2130 OSStatus err = noErr;
xians@google.com68efa212011-08-11 12:41:56 +00002131
andrew2bc63a12016-01-11 15:59:17 -08002132 if (_macBookPro &&
2133 propertyAddress.mScope == kAudioDevicePropertyScopeOutput) {
sazab4aa4eb2017-07-19 01:12:36 -07002134 LOG(LS_VERBOSE) << "Data source changed";
xians@google.com68efa212011-08-11 12:41:56 +00002135
andrew2bc63a12016-01-11 15:59:17 -08002136 _macBookProPanRight = false;
2137 UInt32 dataSource = 0;
2138 UInt32 size = sizeof(UInt32);
2139 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
2140 objectId, &propertyAddress, 0, NULL, &size, &dataSource));
2141 if (dataSource == 'ispk') {
2142 _macBookProPanRight = true;
sazab4aa4eb2017-07-19 01:12:36 -07002143 LOG(LS_VERBOSE)
2144 << "MacBook Pro using internal speakers; stereo panning right";
andrew2bc63a12016-01-11 15:59:17 -08002145 } else {
sazab4aa4eb2017-07-19 01:12:36 -07002146 LOG(LS_VERBOSE) << "MacBook Pro not using internal speakers";
xians@google.com68efa212011-08-11 12:41:56 +00002147 }
andrew2bc63a12016-01-11 15:59:17 -08002148 }
xians@google.com68efa212011-08-11 12:41:56 +00002149
andrew2bc63a12016-01-11 15:59:17 -08002150 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00002151}
pbos@webrtc.org25509882013-04-09 10:30:35 +00002152int32_t AudioDeviceMac::HandleProcessorOverload(
andrew2bc63a12016-01-11 15:59:17 -08002153 const AudioObjectPropertyAddress propertyAddress) {
2154 // TODO(xians): we probably want to notify the user in some way of the
2155 // overload. However, the Windows interpretations of these errors seem to
2156 // be more severe than what ProcessorOverload is thrown for.
2157 //
2158 // We don't log the notification, as it's sent from the HAL's IO thread. We
2159 // don't want to slow it down even further.
2160 if (propertyAddress.mScope == kAudioDevicePropertyScopeInput) {
sazab4aa4eb2017-07-19 01:12:36 -07002161 // LOG(LS_WARNING) << "Capture processor // overload";
andrew2bc63a12016-01-11 15:59:17 -08002162 //_callback->ProblemIsReported(
2163 // SndCardStreamObserver::ERecordingProblem);
2164 } else {
sazab4aa4eb2017-07-19 01:12:36 -07002165 // LOG(LS_WARNING) << "Render processor overload";
andrew2bc63a12016-01-11 15:59:17 -08002166 //_callback->ProblemIsReported(
2167 // SndCardStreamObserver::EPlaybackProblem);
2168 }
xians@google.com68efa212011-08-11 12:41:56 +00002169
andrew2bc63a12016-01-11 15:59:17 -08002170 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00002171}
2172
2173// ============================================================================
2174// Thread Methods
2175// ============================================================================
2176
andrew2bc63a12016-01-11 15:59:17 -08002177OSStatus AudioDeviceMac::deviceIOProc(AudioDeviceID,
2178 const AudioTimeStamp*,
xians@google.com68efa212011-08-11 12:41:56 +00002179 const AudioBufferList* inputData,
2180 const AudioTimeStamp* inputTime,
2181 AudioBufferList* outputData,
2182 const AudioTimeStamp* outputTime,
andrew2bc63a12016-01-11 15:59:17 -08002183 void* clientData) {
2184 AudioDeviceMac* ptrThis = (AudioDeviceMac*)clientData;
2185 RTC_DCHECK(ptrThis != NULL);
xians@google.com68efa212011-08-11 12:41:56 +00002186
andrew2bc63a12016-01-11 15:59:17 -08002187 ptrThis->implDeviceIOProc(inputData, inputTime, outputData, outputTime);
xians@google.com68efa212011-08-11 12:41:56 +00002188
andrew2bc63a12016-01-11 15:59:17 -08002189 // AudioDeviceIOProc functions are supposed to return 0
2190 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00002191}
2192
2193OSStatus AudioDeviceMac::outConverterProc(AudioConverterRef,
andrew2bc63a12016-01-11 15:59:17 -08002194 UInt32* numberDataPackets,
2195 AudioBufferList* data,
2196 AudioStreamPacketDescription**,
2197 void* userData) {
2198 AudioDeviceMac* ptrThis = (AudioDeviceMac*)userData;
2199 RTC_DCHECK(ptrThis != NULL);
xians@google.com68efa212011-08-11 12:41:56 +00002200
andrew2bc63a12016-01-11 15:59:17 -08002201 return ptrThis->implOutConverterProc(numberDataPackets, data);
xians@google.com68efa212011-08-11 12:41:56 +00002202}
2203
andrew2bc63a12016-01-11 15:59:17 -08002204OSStatus AudioDeviceMac::inDeviceIOProc(AudioDeviceID,
2205 const AudioTimeStamp*,
xians@google.com68efa212011-08-11 12:41:56 +00002206 const AudioBufferList* inputData,
2207 const AudioTimeStamp* inputTime,
2208 AudioBufferList*,
andrew2bc63a12016-01-11 15:59:17 -08002209 const AudioTimeStamp*,
2210 void* clientData) {
2211 AudioDeviceMac* ptrThis = (AudioDeviceMac*)clientData;
2212 RTC_DCHECK(ptrThis != NULL);
xians@google.com68efa212011-08-11 12:41:56 +00002213
andrew2bc63a12016-01-11 15:59:17 -08002214 ptrThis->implInDeviceIOProc(inputData, inputTime);
xians@google.com68efa212011-08-11 12:41:56 +00002215
andrew2bc63a12016-01-11 15:59:17 -08002216 // AudioDeviceIOProc functions are supposed to return 0
2217 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00002218}
2219
2220OSStatus AudioDeviceMac::inConverterProc(
2221 AudioConverterRef,
andrew2bc63a12016-01-11 15:59:17 -08002222 UInt32* numberDataPackets,
2223 AudioBufferList* data,
2224 AudioStreamPacketDescription** /*dataPacketDescription*/,
2225 void* userData) {
2226 AudioDeviceMac* ptrThis = static_cast<AudioDeviceMac*>(userData);
2227 RTC_DCHECK(ptrThis != NULL);
xians@google.com68efa212011-08-11 12:41:56 +00002228
andrew2bc63a12016-01-11 15:59:17 -08002229 return ptrThis->implInConverterProc(numberDataPackets, data);
xians@google.com68efa212011-08-11 12:41:56 +00002230}
2231
andrew2bc63a12016-01-11 15:59:17 -08002232OSStatus AudioDeviceMac::implDeviceIOProc(const AudioBufferList* inputData,
2233 const AudioTimeStamp* inputTime,
2234 AudioBufferList* outputData,
2235 const AudioTimeStamp* outputTime) {
2236 OSStatus err = noErr;
2237 UInt64 outputTimeNs = AudioConvertHostTimeToNanos(outputTime->mHostTime);
2238 UInt64 nowNs = AudioConvertHostTimeToNanos(AudioGetCurrentHostTime());
xians@google.com68efa212011-08-11 12:41:56 +00002239
andrew2bc63a12016-01-11 15:59:17 -08002240 if (!_twoDevices && _recording) {
2241 implInDeviceIOProc(inputData, inputTime);
2242 }
xians@google.com68efa212011-08-11 12:41:56 +00002243
andrew2bc63a12016-01-11 15:59:17 -08002244 // Check if we should close down audio device
2245 // Double-checked locking optimization to remove locking overhead
2246 if (_doStop) {
2247 _critSect.Enter();
2248 if (_doStop) {
2249 if (_twoDevices || (!_recording && !_playing)) {
2250 // In the case of a shared device, the single driving ioProc
2251 // is stopped here
2252 WEBRTC_CA_LOG_ERR(AudioDeviceStop(_outputDeviceID, _deviceIOProcID));
2253 WEBRTC_CA_LOG_WARN(
2254 AudioDeviceDestroyIOProcID(_outputDeviceID, _deviceIOProcID));
2255 if (err == noErr) {
sazab4aa4eb2017-07-19 01:12:36 -07002256 LOG(LS_VERBOSE) << "Playout or shared device stopped";
xians@google.com68efa212011-08-11 12:41:56 +00002257 }
andrew2bc63a12016-01-11 15:59:17 -08002258 }
2259
2260 _doStop = false;
2261 _stopEvent.Set();
2262 _critSect.Leave();
2263 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00002264 }
andrew2bc63a12016-01-11 15:59:17 -08002265 _critSect.Leave();
2266 }
xians@google.com68efa212011-08-11 12:41:56 +00002267
andrew2bc63a12016-01-11 15:59:17 -08002268 if (!_playing) {
2269 // This can be the case when a shared device is capturing but not
2270 // rendering. We allow the checks above before returning to avoid a
2271 // timeout when capturing is stopped.
xians@google.com68efa212011-08-11 12:41:56 +00002272 return 0;
andrew2bc63a12016-01-11 15:59:17 -08002273 }
2274
2275 RTC_DCHECK(_outStreamFormat.mBytesPerFrame != 0);
2276 UInt32 size =
2277 outputData->mBuffers->mDataByteSize / _outStreamFormat.mBytesPerFrame;
2278
2279 // TODO(xians): signal an error somehow?
2280 err = AudioConverterFillComplexBuffer(_renderConverter, outConverterProc,
2281 this, &size, outputData, NULL);
2282 if (err != noErr) {
2283 if (err == 1) {
2284 // This is our own error.
sazab4aa4eb2017-07-19 01:12:36 -07002285 LOG(LS_ERROR) << "Error in AudioConverterFillComplexBuffer()";
andrew2bc63a12016-01-11 15:59:17 -08002286 return 1;
2287 } else {
sazab4aa4eb2017-07-19 01:12:36 -07002288 logCAMsg(rtc::LS_ERROR,
andrew2bc63a12016-01-11 15:59:17 -08002289 "Error in AudioConverterFillComplexBuffer()", (const char*)&err);
2290 return 1;
2291 }
2292 }
2293
2294 PaRingBufferSize bufSizeSamples =
2295 PaUtil_GetRingBufferReadAvailable(_paRenderBuffer);
2296
2297 int32_t renderDelayUs =
2298 static_cast<int32_t>(1e-3 * (outputTimeNs - nowNs) + 0.5);
2299 renderDelayUs += static_cast<int32_t>(
2300 (1.0e6 * bufSizeSamples) / _outDesiredFormat.mChannelsPerFrame /
2301 _outDesiredFormat.mSampleRate +
2302 0.5);
2303
2304 AtomicSet32(&_renderDelayUs, renderDelayUs);
2305
2306 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00002307}
2308
andrew2bc63a12016-01-11 15:59:17 -08002309OSStatus AudioDeviceMac::implOutConverterProc(UInt32* numberDataPackets,
2310 AudioBufferList* data) {
henrikg91d6ede2015-09-17 00:24:34 -07002311 RTC_DCHECK(data->mNumberBuffers == 1);
andrew2bc63a12016-01-11 15:59:17 -08002312 PaRingBufferSize numSamples =
2313 *numberDataPackets * _outDesiredFormat.mChannelsPerFrame;
xians@google.com68efa212011-08-11 12:41:56 +00002314
andrew2bc63a12016-01-11 15:59:17 -08002315 data->mBuffers->mNumberChannels = _outDesiredFormat.mChannelsPerFrame;
2316 // Always give the converter as much as it wants, zero padding as required.
2317 data->mBuffers->mDataByteSize =
2318 *numberDataPackets * _outDesiredFormat.mBytesPerPacket;
2319 data->mBuffers->mData = _renderConvertData;
2320 memset(_renderConvertData, 0, sizeof(_renderConvertData));
xians@google.com68efa212011-08-11 12:41:56 +00002321
andrew2bc63a12016-01-11 15:59:17 -08002322 PaUtil_ReadRingBuffer(_paRenderBuffer, _renderConvertData, numSamples);
xians@google.com68efa212011-08-11 12:41:56 +00002323
andrew2bc63a12016-01-11 15:59:17 -08002324 kern_return_t kernErr = semaphore_signal_all(_renderSemaphore);
2325 if (kernErr != KERN_SUCCESS) {
sazab4aa4eb2017-07-19 01:12:36 -07002326 LOG(LS_ERROR) << "semaphore_signal_all() error: " << kernErr;
andrew2bc63a12016-01-11 15:59:17 -08002327 return 1;
2328 }
2329
2330 return 0;
2331}
2332
2333OSStatus AudioDeviceMac::implInDeviceIOProc(const AudioBufferList* inputData,
2334 const AudioTimeStamp* inputTime) {
2335 OSStatus err = noErr;
2336 UInt64 inputTimeNs = AudioConvertHostTimeToNanos(inputTime->mHostTime);
2337 UInt64 nowNs = AudioConvertHostTimeToNanos(AudioGetCurrentHostTime());
2338
2339 // Check if we should close down audio device
2340 // Double-checked locking optimization to remove locking overhead
2341 if (_doStopRec) {
2342 _critSect.Enter();
2343 if (_doStopRec) {
2344 // This will be signalled only when a shared device is not in use.
2345 WEBRTC_CA_LOG_ERR(AudioDeviceStop(_inputDeviceID, _inDeviceIOProcID));
2346 WEBRTC_CA_LOG_WARN(
2347 AudioDeviceDestroyIOProcID(_inputDeviceID, _inDeviceIOProcID));
2348 if (err == noErr) {
sazab4aa4eb2017-07-19 01:12:36 -07002349 LOG(LS_VERBOSE) << "Recording device stopped";
andrew2bc63a12016-01-11 15:59:17 -08002350 }
2351
2352 _doStopRec = false;
2353 _stopEventRec.Set();
2354 _critSect.Leave();
2355 return 0;
2356 }
2357 _critSect.Leave();
2358 }
2359
2360 if (!_recording) {
2361 // Allow above checks to avoid a timeout on stopping capture.
2362 return 0;
2363 }
2364
2365 PaRingBufferSize bufSizeSamples =
2366 PaUtil_GetRingBufferReadAvailable(_paCaptureBuffer);
2367
2368 int32_t captureDelayUs =
2369 static_cast<int32_t>(1e-3 * (nowNs - inputTimeNs) + 0.5);
2370 captureDelayUs += static_cast<int32_t>((1.0e6 * bufSizeSamples) /
2371 _inStreamFormat.mChannelsPerFrame /
2372 _inStreamFormat.mSampleRate +
2373 0.5);
2374
2375 AtomicSet32(&_captureDelayUs, captureDelayUs);
2376
2377 RTC_DCHECK(inputData->mNumberBuffers == 1);
2378 PaRingBufferSize numSamples = inputData->mBuffers->mDataByteSize *
2379 _inStreamFormat.mChannelsPerFrame /
2380 _inStreamFormat.mBytesPerPacket;
2381 PaUtil_WriteRingBuffer(_paCaptureBuffer, inputData->mBuffers->mData,
2382 numSamples);
2383
2384 kern_return_t kernErr = semaphore_signal_all(_captureSemaphore);
2385 if (kernErr != KERN_SUCCESS) {
sazab4aa4eb2017-07-19 01:12:36 -07002386 LOG(LS_ERROR) << "semaphore_signal_all() error: " << kernErr;
andrew2bc63a12016-01-11 15:59:17 -08002387 }
2388
2389 return err;
2390}
2391
2392OSStatus AudioDeviceMac::implInConverterProc(UInt32* numberDataPackets,
2393 AudioBufferList* data) {
2394 RTC_DCHECK(data->mNumberBuffers == 1);
2395 PaRingBufferSize numSamples =
2396 *numberDataPackets * _inStreamFormat.mChannelsPerFrame;
2397
2398 while (PaUtil_GetRingBufferReadAvailable(_paCaptureBuffer) < numSamples) {
2399 mach_timespec_t timeout;
2400 timeout.tv_sec = 0;
2401 timeout.tv_nsec = TIMER_PERIOD_MS;
2402
2403 kern_return_t kernErr = semaphore_timedwait(_captureSemaphore, timeout);
2404 if (kernErr == KERN_OPERATION_TIMED_OUT) {
2405 int32_t signal = AtomicGet32(&_captureDeviceIsAlive);
2406 if (signal == 0) {
2407 // The capture device is no longer alive; stop the worker thread.
2408 *numberDataPackets = 0;
xians@google.com68efa212011-08-11 12:41:56 +00002409 return 1;
andrew2bc63a12016-01-11 15:59:17 -08002410 }
2411 } else if (kernErr != KERN_SUCCESS) {
sazab4aa4eb2017-07-19 01:12:36 -07002412 LOG(LS_ERROR) << "semaphore_wait() error: " << kernErr;
xians@google.com68efa212011-08-11 12:41:56 +00002413 }
andrew2bc63a12016-01-11 15:59:17 -08002414 }
xians@google.com68efa212011-08-11 12:41:56 +00002415
andrew2bc63a12016-01-11 15:59:17 -08002416 // Pass the read pointer directly to the converter to avoid a memcpy.
2417 void* dummyPtr;
2418 PaRingBufferSize dummySize;
2419 PaUtil_GetRingBufferReadRegions(_paCaptureBuffer, numSamples,
2420 &data->mBuffers->mData, &numSamples,
2421 &dummyPtr, &dummySize);
2422 PaUtil_AdvanceRingBufferReadIndex(_paCaptureBuffer, numSamples);
2423
2424 data->mBuffers->mNumberChannels = _inStreamFormat.mChannelsPerFrame;
2425 *numberDataPackets = numSamples / _inStreamFormat.mChannelsPerFrame;
2426 data->mBuffers->mDataByteSize =
2427 *numberDataPackets * _inStreamFormat.mBytesPerPacket;
2428
2429 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00002430}
2431
andrew2bc63a12016-01-11 15:59:17 -08002432bool AudioDeviceMac::RunRender(void* ptrThis) {
2433 return static_cast<AudioDeviceMac*>(ptrThis)->RenderWorkerThread();
xians@google.com68efa212011-08-11 12:41:56 +00002434}
2435
andrew2bc63a12016-01-11 15:59:17 -08002436bool AudioDeviceMac::RenderWorkerThread() {
2437 PaRingBufferSize numSamples =
2438 ENGINE_PLAY_BUF_SIZE_IN_SAMPLES * _outDesiredFormat.mChannelsPerFrame;
2439 while (PaUtil_GetRingBufferWriteAvailable(_paRenderBuffer) -
2440 _renderDelayOffsetSamples <
2441 numSamples) {
2442 mach_timespec_t timeout;
2443 timeout.tv_sec = 0;
2444 timeout.tv_nsec = TIMER_PERIOD_MS;
xians@google.com68efa212011-08-11 12:41:56 +00002445
andrew2bc63a12016-01-11 15:59:17 -08002446 kern_return_t kernErr = semaphore_timedwait(_renderSemaphore, timeout);
2447 if (kernErr == KERN_OPERATION_TIMED_OUT) {
2448 int32_t signal = AtomicGet32(&_renderDeviceIsAlive);
2449 if (signal == 0) {
2450 // The render device is no longer alive; stop the worker thread.
xians@google.com68efa212011-08-11 12:41:56 +00002451 return false;
andrew2bc63a12016-01-11 15:59:17 -08002452 }
2453 } else if (kernErr != KERN_SUCCESS) {
sazab4aa4eb2017-07-19 01:12:36 -07002454 LOG(LS_ERROR) << "semaphore_timedwait() error: " << kernErr;
xians@google.com68efa212011-08-11 12:41:56 +00002455 }
andrew2bc63a12016-01-11 15:59:17 -08002456 }
xians@google.com68efa212011-08-11 12:41:56 +00002457
andrew2bc63a12016-01-11 15:59:17 -08002458 int8_t playBuffer[4 * ENGINE_PLAY_BUF_SIZE_IN_SAMPLES];
xians@google.com68efa212011-08-11 12:41:56 +00002459
andrew2bc63a12016-01-11 15:59:17 -08002460 if (!_ptrAudioBuffer) {
sazab4aa4eb2017-07-19 01:12:36 -07002461 LOG(LS_ERROR) << "capture AudioBuffer is invalid";
andrew2bc63a12016-01-11 15:59:17 -08002462 return false;
2463 }
2464
2465 // Ask for new PCM data to be played out using the AudioDeviceBuffer.
2466 uint32_t nSamples =
2467 _ptrAudioBuffer->RequestPlayoutData(ENGINE_PLAY_BUF_SIZE_IN_SAMPLES);
2468
2469 nSamples = _ptrAudioBuffer->GetPlayoutData(playBuffer);
2470 if (nSamples != ENGINE_PLAY_BUF_SIZE_IN_SAMPLES) {
sazab4aa4eb2017-07-19 01:12:36 -07002471 LOG(LS_ERROR) << "invalid number of output samples(" << nSamples << ")";
andrew2bc63a12016-01-11 15:59:17 -08002472 }
2473
2474 uint32_t nOutSamples = nSamples * _outDesiredFormat.mChannelsPerFrame;
2475
2476 SInt16* pPlayBuffer = (SInt16*)&playBuffer;
2477 if (_macBookProPanRight && (_playChannels == 2)) {
2478 // Mix entirely into the right channel and zero the left channel.
2479 SInt32 sampleInt32 = 0;
2480 for (uint32_t sampleIdx = 0; sampleIdx < nOutSamples; sampleIdx += 2) {
2481 sampleInt32 = pPlayBuffer[sampleIdx];
2482 sampleInt32 += pPlayBuffer[sampleIdx + 1];
2483 sampleInt32 /= 2;
2484
2485 if (sampleInt32 > 32767) {
2486 sampleInt32 = 32767;
2487 } else if (sampleInt32 < -32768) {
2488 sampleInt32 = -32768;
2489 }
2490
2491 pPlayBuffer[sampleIdx] = 0;
2492 pPlayBuffer[sampleIdx + 1] = static_cast<SInt16>(sampleInt32);
xians@google.com68efa212011-08-11 12:41:56 +00002493 }
andrew2bc63a12016-01-11 15:59:17 -08002494 }
xians@google.com68efa212011-08-11 12:41:56 +00002495
andrew2bc63a12016-01-11 15:59:17 -08002496 PaUtil_WriteRingBuffer(_paRenderBuffer, pPlayBuffer, nOutSamples);
xians@google.com68efa212011-08-11 12:41:56 +00002497
andrew2bc63a12016-01-11 15:59:17 -08002498 return true;
xians@google.com68efa212011-08-11 12:41:56 +00002499}
2500
andrew2bc63a12016-01-11 15:59:17 -08002501bool AudioDeviceMac::RunCapture(void* ptrThis) {
2502 return static_cast<AudioDeviceMac*>(ptrThis)->CaptureWorkerThread();
xians@google.com68efa212011-08-11 12:41:56 +00002503}
2504
andrew2bc63a12016-01-11 15:59:17 -08002505bool AudioDeviceMac::CaptureWorkerThread() {
2506 OSStatus err = noErr;
2507 UInt32 noRecSamples =
2508 ENGINE_REC_BUF_SIZE_IN_SAMPLES * _inDesiredFormat.mChannelsPerFrame;
2509 SInt16 recordBuffer[noRecSamples];
2510 UInt32 size = ENGINE_REC_BUF_SIZE_IN_SAMPLES;
xians@google.com68efa212011-08-11 12:41:56 +00002511
andrew2bc63a12016-01-11 15:59:17 -08002512 AudioBufferList engineBuffer;
2513 engineBuffer.mNumberBuffers = 1; // Interleaved channels.
2514 engineBuffer.mBuffers->mNumberChannels = _inDesiredFormat.mChannelsPerFrame;
2515 engineBuffer.mBuffers->mDataByteSize =
2516 _inDesiredFormat.mBytesPerPacket * noRecSamples;
2517 engineBuffer.mBuffers->mData = recordBuffer;
xians@google.com68efa212011-08-11 12:41:56 +00002518
andrew2bc63a12016-01-11 15:59:17 -08002519 err = AudioConverterFillComplexBuffer(_captureConverter, inConverterProc,
2520 this, &size, &engineBuffer, NULL);
2521 if (err != noErr) {
2522 if (err == 1) {
2523 // This is our own error.
2524 return false;
2525 } else {
sazab4aa4eb2017-07-19 01:12:36 -07002526 logCAMsg(rtc::LS_ERROR,
andrew2bc63a12016-01-11 15:59:17 -08002527 "Error in AudioConverterFillComplexBuffer()", (const char*)&err);
2528 return false;
2529 }
2530 }
2531
2532 // TODO(xians): what if the returned size is incorrect?
2533 if (size == ENGINE_REC_BUF_SIZE_IN_SAMPLES) {
2534 uint32_t currentMicLevel(0);
2535 uint32_t newMicLevel(0);
2536 int32_t msecOnPlaySide;
2537 int32_t msecOnRecordSide;
2538
2539 int32_t captureDelayUs = AtomicGet32(&_captureDelayUs);
2540 int32_t renderDelayUs = AtomicGet32(&_renderDelayUs);
2541
2542 msecOnPlaySide =
2543 static_cast<int32_t>(1e-3 * (renderDelayUs + _renderLatencyUs) + 0.5);
2544 msecOnRecordSide =
2545 static_cast<int32_t>(1e-3 * (captureDelayUs + _captureLatencyUs) + 0.5);
2546
2547 if (!_ptrAudioBuffer) {
sazab4aa4eb2017-07-19 01:12:36 -07002548 LOG(LS_ERROR) << "capture AudioBuffer is invalid";
andrew2bc63a12016-01-11 15:59:17 -08002549 return false;
xians@google.com68efa212011-08-11 12:41:56 +00002550 }
2551
andrew2bc63a12016-01-11 15:59:17 -08002552 // store the recorded buffer (no action will be taken if the
2553 // #recorded samples is not a full buffer)
2554 _ptrAudioBuffer->SetRecordedBuffer((int8_t*)&recordBuffer, (uint32_t)size);
xians@google.com68efa212011-08-11 12:41:56 +00002555
andrew2bc63a12016-01-11 15:59:17 -08002556 if (AGC()) {
2557 // Use mod to ensure we check the volume on the first pass.
2558 if (get_mic_volume_counter_ms_ % kGetMicVolumeIntervalMs == 0) {
2559 get_mic_volume_counter_ms_ = 0;
2560 // store current mic level in the audio buffer if AGC is enabled
2561 if (MicrophoneVolume(currentMicLevel) == 0) {
2562 // this call does not affect the actual microphone volume
2563 _ptrAudioBuffer->SetCurrentMicLevel(currentMicLevel);
xians@google.com68efa212011-08-11 12:41:56 +00002564 }
andrew2bc63a12016-01-11 15:59:17 -08002565 }
2566 get_mic_volume_counter_ms_ += kBufferSizeMs;
xians@google.com68efa212011-08-11 12:41:56 +00002567 }
2568
andrew2bc63a12016-01-11 15:59:17 -08002569 _ptrAudioBuffer->SetVQEData(msecOnPlaySide, msecOnRecordSide, 0);
2570
2571 _ptrAudioBuffer->SetTypingStatus(KeyPressed());
2572
2573 // deliver recorded samples at specified sample rate, mic level etc.
2574 // to the observer using callback
2575 _ptrAudioBuffer->DeliverRecordedData();
2576
2577 if (AGC()) {
2578 newMicLevel = _ptrAudioBuffer->NewMicLevel();
2579 if (newMicLevel != 0) {
2580 // The VQE will only deliver non-zero microphone levels when
2581 // a change is needed.
2582 // Set this new mic level (received from the observer as return
2583 // value in the callback).
sazab4aa4eb2017-07-19 01:12:36 -07002584 LOG(LS_VERBOSE) << "AGC change of volume: old=" << currentMicLevel
2585 << " => new=" << newMicLevel;
andrew2bc63a12016-01-11 15:59:17 -08002586 if (SetMicrophoneVolume(newMicLevel) == -1) {
sazab4aa4eb2017-07-19 01:12:36 -07002587 LOG(LS_WARNING)
2588 << "the required modification of the microphone volume failed";
andrew2bc63a12016-01-11 15:59:17 -08002589 }
2590 }
2591 }
2592 }
2593
2594 return true;
xians@google.com68efa212011-08-11 12:41:56 +00002595}
2596
niklas.enbom@webrtc.orgcc9238e2013-08-15 14:19:12 +00002597bool AudioDeviceMac::KeyPressed() {
niklas.enbom@webrtc.org3be565b2013-05-07 21:04:24 +00002598 bool key_down = false;
niklas.enbom@webrtc.orgcc9238e2013-08-15 14:19:12 +00002599 // Loop through all Mac virtual key constant values.
andrew2bc63a12016-01-11 15:59:17 -08002600 for (unsigned int key_index = 0; key_index < arraysize(prev_key_state_);
2601 ++key_index) {
2602 bool keyState =
2603 CGEventSourceKeyState(kCGEventSourceStateHIDSystemState, key_index);
niklas.enbom@webrtc.orgcc9238e2013-08-15 14:19:12 +00002604 // A false -> true change in keymap means a key is pressed.
2605 key_down |= (keyState && !prev_key_state_[key_index]);
2606 // Save current state.
2607 prev_key_state_[key_index] = keyState;
niklas.enbom@webrtc.org3be565b2013-05-07 21:04:24 +00002608 }
niklas.enbom@webrtc.orgcc9238e2013-08-15 14:19:12 +00002609 return key_down;
niklas.enbom@webrtc.org3be565b2013-05-07 21:04:24 +00002610}
pbos@webrtc.orgd900e8b2013-07-03 15:12:26 +00002611} // namespace webrtc