blob: 110bfcd0d5dfc0b3092675def13694fa57715c26 [file] [log] [blame]
xians@google.com68efa212011-08-11 12:41:56 +00001/*
xians@webrtc.org20aabbb2012-02-20 09:17:41 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
xians@google.com68efa212011-08-11 12:41:56 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020011#include "modules/audio_device/mac/audio_device_mac.h"
12#include "modules/audio_device/audio_device_config.h"
13#include "modules/audio_device/mac/portaudio/pa_ringbuffer.h"
14#include "rtc_base/arraysize.h"
15#include "rtc_base/checks.h"
16#include "rtc_base/platform_thread.h"
17#include "system_wrappers/include/event_wrapper.h"
xians@google.com68efa212011-08-11 12:41:56 +000018
niklas.enbom@webrtc.org3be565b2013-05-07 21:04:24 +000019#include <ApplicationServices/ApplicationServices.h>
andrew2bc63a12016-01-11 15:59:17 -080020#include <libkern/OSAtomic.h> // OSAtomicCompareAndSwap()
21#include <mach/mach.h> // mach_task_self()
22#include <sys/sysctl.h> // sysctlbyname()
niklas.enbom@webrtc.org3be565b2013-05-07 21:04:24 +000023
andrew2bc63a12016-01-11 15:59:17 -080024namespace webrtc {
niklas.enbom@webrtc.org3be565b2013-05-07 21:04:24 +000025
Mirko Bonadei72c42502017-11-09 09:33:23 +010026#define WEBRTC_CA_RETURN_ON_ERR(expr) \
27 do { \
28 err = expr; \
29 if (err != noErr) { \
30 logCAMsg(rtc::LS_ERROR, "Error in " #expr, (const char*)&err); \
31 return -1; \
32 } \
33 } while (0)
34
35#define WEBRTC_CA_LOG_ERR(expr) \
36 do { \
37 err = expr; \
38 if (err != noErr) { \
39 logCAMsg(rtc::LS_ERROR, "Error in " #expr, (const char*)&err); \
40 } \
41 } while (0)
42
43#define WEBRTC_CA_LOG_WARN(expr) \
andrew2bc63a12016-01-11 15:59:17 -080044 do { \
45 err = expr; \
46 if (err != noErr) { \
Mirko Bonadei72c42502017-11-09 09:33:23 +010047 logCAMsg(rtc::LS_WARNING, "Error in " #expr, (const char*)&err); \
andrew2bc63a12016-01-11 15:59:17 -080048 } \
49 } while (0)
xians@google.com68efa212011-08-11 12:41:56 +000050
andrew2bc63a12016-01-11 15:59:17 -080051enum { MaxNumberDevices = 64 };
xians@google.com68efa212011-08-11 12:41:56 +000052
andrew2bc63a12016-01-11 15:59:17 -080053void AudioDeviceMac::AtomicSet32(int32_t* theValue, int32_t newValue) {
54 while (1) {
55 int32_t oldValue = *theValue;
56 if (OSAtomicCompareAndSwap32Barrier(oldValue, newValue, theValue) == true) {
57 return;
xians@google.com68efa212011-08-11 12:41:56 +000058 }
andrew2bc63a12016-01-11 15:59:17 -080059 }
xians@google.com68efa212011-08-11 12:41:56 +000060}
61
andrew2bc63a12016-01-11 15:59:17 -080062int32_t AudioDeviceMac::AtomicGet32(int32_t* theValue) {
63 while (1) {
64 int32_t value = *theValue;
65 if (OSAtomicCompareAndSwap32Barrier(value, value, theValue) == true) {
66 return value;
xians@google.com68efa212011-08-11 12:41:56 +000067 }
andrew2bc63a12016-01-11 15:59:17 -080068 }
xians@google.com68efa212011-08-11 12:41:56 +000069}
70
71// CoreAudio errors are best interpreted as four character strings.
sazab4aa4eb2017-07-19 01:12:36 -070072void AudioDeviceMac::logCAMsg(const rtc::LoggingSeverity sev,
andrew2bc63a12016-01-11 15:59:17 -080073 const char* msg,
74 const char* err) {
henrikg91d6ede2015-09-17 00:24:34 -070075 RTC_DCHECK(msg != NULL);
76 RTC_DCHECK(err != NULL);
xians@google.com68efa212011-08-11 12:41:56 +000077
andrew@webrtc.org621df672013-10-22 10:27:23 +000078#ifdef WEBRTC_ARCH_BIG_ENDIAN
sazab4aa4eb2017-07-19 01:12:36 -070079 switch (sev) {
80 case rtc::LS_ERROR:
Mirko Bonadei675513b2017-11-09 11:09:25 +010081 RTC_LOG(LS_ERROR) << msg << ": " << err[0] << err[1] << err[2] << err[3];
sazab4aa4eb2017-07-19 01:12:36 -070082 break;
83 case rtc::LS_WARNING:
Mirko Bonadei675513b2017-11-09 11:09:25 +010084 RTC_LOG(LS_WARNING) << msg << ": " << err[0] << err[1] << err[2]
85 << err[3];
sazab4aa4eb2017-07-19 01:12:36 -070086 break;
87 case rtc::LS_VERBOSE:
Mirko Bonadei675513b2017-11-09 11:09:25 +010088 RTC_LOG(LS_VERBOSE) << msg << ": " << err[0] << err[1] << err[2]
89 << err[3];
sazab4aa4eb2017-07-19 01:12:36 -070090 break;
91 default:
92 break;
93 }
xians@google.com68efa212011-08-11 12:41:56 +000094#else
andrew2bc63a12016-01-11 15:59:17 -080095 // We need to flip the characters in this case.
Mirko Bonadei72c42502017-11-09 09:33:23 +010096 switch (sev) {
sazab4aa4eb2017-07-19 01:12:36 -070097 case rtc::LS_ERROR:
Mirko Bonadei675513b2017-11-09 11:09:25 +010098 RTC_LOG(LS_ERROR) << msg << ": " << err[3] << err[2] << err[1] << err[0];
sazab4aa4eb2017-07-19 01:12:36 -070099 break;
100 case rtc::LS_WARNING:
Mirko Bonadei675513b2017-11-09 11:09:25 +0100101 RTC_LOG(LS_WARNING) << msg << ": " << err[3] << err[2] << err[1]
102 << err[0];
sazab4aa4eb2017-07-19 01:12:36 -0700103 break;
104 case rtc::LS_VERBOSE:
Mirko Bonadei675513b2017-11-09 11:09:25 +0100105 RTC_LOG(LS_VERBOSE) << msg << ": " << err[3] << err[2] << err[1]
106 << err[0];
sazab4aa4eb2017-07-19 01:12:36 -0700107 break;
108 default:
109 break;
110 }
xians@google.com68efa212011-08-11 12:41:56 +0000111#endif
112}
113
sazab4aa4eb2017-07-19 01:12:36 -0700114AudioDeviceMac::AudioDeviceMac()
andrew2bc63a12016-01-11 15:59:17 -0800115 : _ptrAudioBuffer(NULL),
andrew2bc63a12016-01-11 15:59:17 -0800116 _stopEventRec(*EventWrapper::Create()),
117 _stopEvent(*EventWrapper::Create()),
sazab4aa4eb2017-07-19 01:12:36 -0700118 _mixerManager(),
andrew2bc63a12016-01-11 15:59:17 -0800119 _inputDeviceIndex(0),
120 _outputDeviceIndex(0),
121 _inputDeviceID(kAudioObjectUnknown),
122 _outputDeviceID(kAudioObjectUnknown),
123 _inputDeviceIsSpecified(false),
124 _outputDeviceIsSpecified(false),
125 _recChannels(N_REC_CHANNELS),
126 _playChannels(N_PLAY_CHANNELS),
127 _captureBufData(NULL),
128 _renderBufData(NULL),
andrew2bc63a12016-01-11 15:59:17 -0800129 _initialized(false),
130 _isShutDown(false),
131 _recording(false),
132 _playing(false),
133 _recIsInitialized(false),
134 _playIsInitialized(false),
135 _AGC(false),
136 _renderDeviceIsAlive(1),
137 _captureDeviceIsAlive(1),
138 _twoDevices(true),
139 _doStop(false),
140 _doStopRec(false),
141 _macBookPro(false),
142 _macBookProPanRight(false),
143 _captureLatencyUs(0),
144 _renderLatencyUs(0),
145 _captureDelayUs(0),
146 _renderDelayUs(0),
147 _renderDelayOffsetSamples(0),
andrew2bc63a12016-01-11 15:59:17 -0800148 _paCaptureBuffer(NULL),
149 _paRenderBuffer(NULL),
150 _captureBufSizeSamples(0),
151 _renderBufSizeSamples(0),
152 prev_key_state_(),
153 get_mic_volume_counter_ms_(0) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100154 RTC_LOG(LS_INFO) << __FUNCTION__ << " created";
xians@google.com68efa212011-08-11 12:41:56 +0000155
andrew2bc63a12016-01-11 15:59:17 -0800156 RTC_DCHECK(&_stopEvent != NULL);
157 RTC_DCHECK(&_stopEventRec != NULL);
xians@google.com68efa212011-08-11 12:41:56 +0000158
andrew2bc63a12016-01-11 15:59:17 -0800159 memset(_renderConvertData, 0, sizeof(_renderConvertData));
160 memset(&_outStreamFormat, 0, sizeof(AudioStreamBasicDescription));
161 memset(&_outDesiredFormat, 0, sizeof(AudioStreamBasicDescription));
162 memset(&_inStreamFormat, 0, sizeof(AudioStreamBasicDescription));
163 memset(&_inDesiredFormat, 0, sizeof(AudioStreamBasicDescription));
xians@google.com68efa212011-08-11 12:41:56 +0000164}
165
andrew2bc63a12016-01-11 15:59:17 -0800166AudioDeviceMac::~AudioDeviceMac() {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100167 RTC_LOG(LS_INFO) << __FUNCTION__ << " destroyed";
xians@google.com68efa212011-08-11 12:41:56 +0000168
andrew2bc63a12016-01-11 15:59:17 -0800169 if (!_isShutDown) {
170 Terminate();
171 }
xians@google.com68efa212011-08-11 12:41:56 +0000172
andrew2bc63a12016-01-11 15:59:17 -0800173 RTC_DCHECK(!capture_worker_thread_.get());
174 RTC_DCHECK(!render_worker_thread_.get());
xians@google.com68efa212011-08-11 12:41:56 +0000175
andrew2bc63a12016-01-11 15:59:17 -0800176 if (_paRenderBuffer) {
177 delete _paRenderBuffer;
178 _paRenderBuffer = NULL;
179 }
xians@google.com68efa212011-08-11 12:41:56 +0000180
andrew2bc63a12016-01-11 15:59:17 -0800181 if (_paCaptureBuffer) {
182 delete _paCaptureBuffer;
183 _paCaptureBuffer = NULL;
184 }
xians@google.com68efa212011-08-11 12:41:56 +0000185
andrew2bc63a12016-01-11 15:59:17 -0800186 if (_renderBufData) {
187 delete[] _renderBufData;
188 _renderBufData = NULL;
189 }
xians@google.com68efa212011-08-11 12:41:56 +0000190
andrew2bc63a12016-01-11 15:59:17 -0800191 if (_captureBufData) {
192 delete[] _captureBufData;
193 _captureBufData = NULL;
194 }
xians@google.com68efa212011-08-11 12:41:56 +0000195
andrew2bc63a12016-01-11 15:59:17 -0800196 kern_return_t kernErr = KERN_SUCCESS;
197 kernErr = semaphore_destroy(mach_task_self(), _renderSemaphore);
198 if (kernErr != KERN_SUCCESS) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100199 RTC_LOG(LS_ERROR) << "semaphore_destroy() error: " << kernErr;
andrew2bc63a12016-01-11 15:59:17 -0800200 }
xians@google.com68efa212011-08-11 12:41:56 +0000201
andrew2bc63a12016-01-11 15:59:17 -0800202 kernErr = semaphore_destroy(mach_task_self(), _captureSemaphore);
203 if (kernErr != KERN_SUCCESS) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100204 RTC_LOG(LS_ERROR) << "semaphore_destroy() error: " << kernErr;
andrew2bc63a12016-01-11 15:59:17 -0800205 }
xians@google.com68efa212011-08-11 12:41:56 +0000206
andrew2bc63a12016-01-11 15:59:17 -0800207 delete &_stopEvent;
208 delete &_stopEventRec;
xians@google.com68efa212011-08-11 12:41:56 +0000209}
210
211// ============================================================================
212// API
213// ============================================================================
214
andrew2bc63a12016-01-11 15:59:17 -0800215void AudioDeviceMac::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
kthelgasonff046c72017-03-31 02:03:55 -0700216 rtc::CritScope lock(&_critSect);
xians@google.com68efa212011-08-11 12:41:56 +0000217
andrew2bc63a12016-01-11 15:59:17 -0800218 _ptrAudioBuffer = audioBuffer;
xians@google.com68efa212011-08-11 12:41:56 +0000219
andrew2bc63a12016-01-11 15:59:17 -0800220 // inform the AudioBuffer about default settings for this implementation
221 _ptrAudioBuffer->SetRecordingSampleRate(N_REC_SAMPLES_PER_SEC);
222 _ptrAudioBuffer->SetPlayoutSampleRate(N_PLAY_SAMPLES_PER_SEC);
223 _ptrAudioBuffer->SetRecordingChannels(N_REC_CHANNELS);
224 _ptrAudioBuffer->SetPlayoutChannels(N_PLAY_CHANNELS);
xians@google.com68efa212011-08-11 12:41:56 +0000225}
226
pbos@webrtc.org25509882013-04-09 10:30:35 +0000227int32_t AudioDeviceMac::ActiveAudioLayer(
andrew2bc63a12016-01-11 15:59:17 -0800228 AudioDeviceModule::AudioLayer& audioLayer) const {
229 audioLayer = AudioDeviceModule::kPlatformDefaultAudio;
230 return 0;
231}
232
Max Morin84cab202016-07-01 13:35:19 +0200233AudioDeviceGeneric::InitStatus AudioDeviceMac::Init() {
kthelgasonff046c72017-03-31 02:03:55 -0700234 rtc::CritScope lock(&_critSect);
andrew2bc63a12016-01-11 15:59:17 -0800235
236 if (_initialized) {
Max Morin84cab202016-07-01 13:35:19 +0200237 return InitStatus::OK;
andrew2bc63a12016-01-11 15:59:17 -0800238 }
239
240 OSStatus err = noErr;
241
242 _isShutDown = false;
243
244 // PortAudio ring buffers require an elementCount which is a power of two.
245 if (_renderBufData == NULL) {
246 UInt32 powerOfTwo = 1;
247 while (powerOfTwo < PLAY_BUF_SIZE_IN_SAMPLES) {
248 powerOfTwo <<= 1;
249 }
250 _renderBufSizeSamples = powerOfTwo;
251 _renderBufData = new SInt16[_renderBufSizeSamples];
252 }
253
254 if (_paRenderBuffer == NULL) {
255 _paRenderBuffer = new PaUtilRingBuffer;
256 PaRingBufferSize bufSize = -1;
257 bufSize = PaUtil_InitializeRingBuffer(
258 _paRenderBuffer, sizeof(SInt16), _renderBufSizeSamples, _renderBufData);
259 if (bufSize == -1) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100260 RTC_LOG(LS_ERROR) << "PaUtil_InitializeRingBuffer() error";
Max Morin84cab202016-07-01 13:35:19 +0200261 return InitStatus::PLAYOUT_ERROR;
andrew2bc63a12016-01-11 15:59:17 -0800262 }
263 }
264
265 if (_captureBufData == NULL) {
266 UInt32 powerOfTwo = 1;
267 while (powerOfTwo < REC_BUF_SIZE_IN_SAMPLES) {
268 powerOfTwo <<= 1;
269 }
270 _captureBufSizeSamples = powerOfTwo;
271 _captureBufData = new Float32[_captureBufSizeSamples];
272 }
273
274 if (_paCaptureBuffer == NULL) {
275 _paCaptureBuffer = new PaUtilRingBuffer;
276 PaRingBufferSize bufSize = -1;
277 bufSize =
278 PaUtil_InitializeRingBuffer(_paCaptureBuffer, sizeof(Float32),
279 _captureBufSizeSamples, _captureBufData);
280 if (bufSize == -1) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100281 RTC_LOG(LS_ERROR) << "PaUtil_InitializeRingBuffer() error";
Max Morin84cab202016-07-01 13:35:19 +0200282 return InitStatus::RECORDING_ERROR;
andrew2bc63a12016-01-11 15:59:17 -0800283 }
284 }
285
286 kern_return_t kernErr = KERN_SUCCESS;
287 kernErr = semaphore_create(mach_task_self(), &_renderSemaphore,
288 SYNC_POLICY_FIFO, 0);
289 if (kernErr != KERN_SUCCESS) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100290 RTC_LOG(LS_ERROR) << "semaphore_create() error: " << kernErr;
Max Morin84cab202016-07-01 13:35:19 +0200291 return InitStatus::OTHER_ERROR;
andrew2bc63a12016-01-11 15:59:17 -0800292 }
293
294 kernErr = semaphore_create(mach_task_self(), &_captureSemaphore,
295 SYNC_POLICY_FIFO, 0);
296 if (kernErr != KERN_SUCCESS) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100297 RTC_LOG(LS_ERROR) << "semaphore_create() error: " << kernErr;
Max Morin84cab202016-07-01 13:35:19 +0200298 return InitStatus::OTHER_ERROR;
andrew2bc63a12016-01-11 15:59:17 -0800299 }
300
301 // Setting RunLoop to NULL here instructs HAL to manage its own thread for
302 // notifications. This was the default behaviour on OS X 10.5 and earlier,
303 // but now must be explicitly specified. HAL would otherwise try to use the
304 // main thread to issue notifications.
305 AudioObjectPropertyAddress propertyAddress = {
306 kAudioHardwarePropertyRunLoop, kAudioObjectPropertyScopeGlobal,
307 kAudioObjectPropertyElementMaster};
308 CFRunLoopRef runLoop = NULL;
309 UInt32 size = sizeof(CFRunLoopRef);
Max Morin84cab202016-07-01 13:35:19 +0200310 int aoerr = AudioObjectSetPropertyData(
311 kAudioObjectSystemObject, &propertyAddress, 0, NULL, size, &runLoop);
312 if (aoerr != noErr) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100313 RTC_LOG(LS_ERROR) << "Error in AudioObjectSetPropertyData: "
314 << (const char*)&aoerr;
Max Morin84cab202016-07-01 13:35:19 +0200315 return InitStatus::OTHER_ERROR;
316 }
andrew2bc63a12016-01-11 15:59:17 -0800317
318 // Listen for any device changes.
319 propertyAddress.mSelector = kAudioHardwarePropertyDevices;
320 WEBRTC_CA_LOG_ERR(AudioObjectAddPropertyListener(
321 kAudioObjectSystemObject, &propertyAddress, &objectListenerProc, this));
322
323 // Determine if this is a MacBook Pro
324 _macBookPro = false;
325 _macBookProPanRight = false;
326 char buf[128];
327 size_t length = sizeof(buf);
328 memset(buf, 0, length);
329
330 int intErr = sysctlbyname("hw.model", buf, &length, NULL, 0);
331 if (intErr != 0) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100332 RTC_LOG(LS_ERROR) << "Error in sysctlbyname(): " << err;
andrew2bc63a12016-01-11 15:59:17 -0800333 } else {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100334 RTC_LOG(LS_VERBOSE) << "Hardware model: " << buf;
andrew2bc63a12016-01-11 15:59:17 -0800335 if (strncmp(buf, "MacBookPro", 10) == 0) {
336 _macBookPro = true;
337 }
338 }
339
andrew2bc63a12016-01-11 15:59:17 -0800340 get_mic_volume_counter_ms_ = 0;
341
342 _initialized = true;
343
Max Morin84cab202016-07-01 13:35:19 +0200344 return InitStatus::OK;
xians@google.com68efa212011-08-11 12:41:56 +0000345}
346
andrew2bc63a12016-01-11 15:59:17 -0800347int32_t AudioDeviceMac::Terminate() {
348 if (!_initialized) {
xians@google.com68efa212011-08-11 12:41:56 +0000349 return 0;
andrew2bc63a12016-01-11 15:59:17 -0800350 }
351
352 if (_recording) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100353 RTC_LOG(LS_ERROR) << "Recording must be stopped";
andrew2bc63a12016-01-11 15:59:17 -0800354 return -1;
355 }
356
357 if (_playing) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100358 RTC_LOG(LS_ERROR) << "Playback must be stopped";
andrew2bc63a12016-01-11 15:59:17 -0800359 return -1;
360 }
361
362 _critSect.Enter();
363
364 _mixerManager.Close();
365
366 OSStatus err = noErr;
367 int retVal = 0;
368
369 AudioObjectPropertyAddress propertyAddress = {
370 kAudioHardwarePropertyDevices, kAudioObjectPropertyScopeGlobal,
371 kAudioObjectPropertyElementMaster};
372 WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(
373 kAudioObjectSystemObject, &propertyAddress, &objectListenerProc, this));
374
375 err = AudioHardwareUnload();
376 if (err != noErr) {
Mirko Bonadei72c42502017-11-09 09:33:23 +0100377 logCAMsg(rtc::LS_ERROR, "Error in AudioHardwareUnload()",
378 (const char*)&err);
andrew2bc63a12016-01-11 15:59:17 -0800379 retVal = -1;
380 }
381
382 _isShutDown = true;
383 _initialized = false;
384 _outputDeviceIsSpecified = false;
385 _inputDeviceIsSpecified = false;
386
387 _critSect.Leave();
388
389 return retVal;
xians@google.com68efa212011-08-11 12:41:56 +0000390}
391
andrew2bc63a12016-01-11 15:59:17 -0800392bool AudioDeviceMac::Initialized() const {
393 return (_initialized);
xians@google.com68efa212011-08-11 12:41:56 +0000394}
395
andrew2bc63a12016-01-11 15:59:17 -0800396int32_t AudioDeviceMac::SpeakerIsAvailable(bool& available) {
397 bool wasInitialized = _mixerManager.SpeakerIsInitialized();
xians@google.com68efa212011-08-11 12:41:56 +0000398
andrew2bc63a12016-01-11 15:59:17 -0800399 // Make an attempt to open up the
400 // output mixer corresponding to the currently selected output device.
401 //
402 if (!wasInitialized && InitSpeaker() == -1) {
403 available = false;
xians@google.com68efa212011-08-11 12:41:56 +0000404 return 0;
andrew2bc63a12016-01-11 15:59:17 -0800405 }
406
407 // Given that InitSpeaker was successful, we know that a valid speaker
408 // exists.
409 available = true;
410
411 // Close the initialized output mixer
412 //
413 if (!wasInitialized) {
414 _mixerManager.CloseSpeaker();
415 }
416
417 return 0;
xians@google.com68efa212011-08-11 12:41:56 +0000418}
419
andrew2bc63a12016-01-11 15:59:17 -0800420int32_t AudioDeviceMac::InitSpeaker() {
kthelgasonff046c72017-03-31 02:03:55 -0700421 rtc::CritScope lock(&_critSect);
xians@google.com68efa212011-08-11 12:41:56 +0000422
andrew2bc63a12016-01-11 15:59:17 -0800423 if (_playing) {
424 return -1;
425 }
xians@google.com68efa212011-08-11 12:41:56 +0000426
andrew2bc63a12016-01-11 15:59:17 -0800427 if (InitDevice(_outputDeviceIndex, _outputDeviceID, false) == -1) {
428 return -1;
429 }
xians@google.com68efa212011-08-11 12:41:56 +0000430
andrew2bc63a12016-01-11 15:59:17 -0800431 if (_inputDeviceID == _outputDeviceID) {
432 _twoDevices = false;
433 } else {
434 _twoDevices = true;
435 }
xians@google.com68efa212011-08-11 12:41:56 +0000436
andrew2bc63a12016-01-11 15:59:17 -0800437 if (_mixerManager.OpenSpeaker(_outputDeviceID) == -1) {
438 return -1;
439 }
xians@google.com68efa212011-08-11 12:41:56 +0000440
andrew2bc63a12016-01-11 15:59:17 -0800441 return 0;
442}
xians@google.com68efa212011-08-11 12:41:56 +0000443
andrew2bc63a12016-01-11 15:59:17 -0800444int32_t AudioDeviceMac::MicrophoneIsAvailable(bool& available) {
445 bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
446
447 // Make an attempt to open up the
448 // input mixer corresponding to the currently selected output device.
449 //
450 if (!wasInitialized && InitMicrophone() == -1) {
451 available = false;
xians@google.com68efa212011-08-11 12:41:56 +0000452 return 0;
andrew2bc63a12016-01-11 15:59:17 -0800453 }
454
455 // Given that InitMicrophone was successful, we know that a valid microphone
456 // exists.
457 available = true;
458
459 // Close the initialized input mixer
460 //
461 if (!wasInitialized) {
462 _mixerManager.CloseMicrophone();
463 }
464
465 return 0;
xians@google.com68efa212011-08-11 12:41:56 +0000466}
467
andrew2bc63a12016-01-11 15:59:17 -0800468int32_t AudioDeviceMac::InitMicrophone() {
kthelgasonff046c72017-03-31 02:03:55 -0700469 rtc::CritScope lock(&_critSect);
xians@google.com68efa212011-08-11 12:41:56 +0000470
andrew2bc63a12016-01-11 15:59:17 -0800471 if (_recording) {
472 return -1;
473 }
xians@google.com68efa212011-08-11 12:41:56 +0000474
andrew2bc63a12016-01-11 15:59:17 -0800475 if (InitDevice(_inputDeviceIndex, _inputDeviceID, true) == -1) {
476 return -1;
477 }
xians@google.com68efa212011-08-11 12:41:56 +0000478
andrew2bc63a12016-01-11 15:59:17 -0800479 if (_inputDeviceID == _outputDeviceID) {
480 _twoDevices = false;
481 } else {
482 _twoDevices = true;
483 }
xians@google.com68efa212011-08-11 12:41:56 +0000484
andrew2bc63a12016-01-11 15:59:17 -0800485 if (_mixerManager.OpenMicrophone(_inputDeviceID) == -1) {
486 return -1;
487 }
xians@google.com68efa212011-08-11 12:41:56 +0000488
andrew2bc63a12016-01-11 15:59:17 -0800489 return 0;
490}
491
492bool AudioDeviceMac::SpeakerIsInitialized() const {
493 return (_mixerManager.SpeakerIsInitialized());
494}
495
496bool AudioDeviceMac::MicrophoneIsInitialized() const {
497 return (_mixerManager.MicrophoneIsInitialized());
498}
499
500int32_t AudioDeviceMac::SpeakerVolumeIsAvailable(bool& available) {
501 bool wasInitialized = _mixerManager.SpeakerIsInitialized();
502
503 // Make an attempt to open up the
504 // output mixer corresponding to the currently selected output device.
505 //
506 if (!wasInitialized && InitSpeaker() == -1) {
507 // If we end up here it means that the selected speaker has no volume
508 // control.
509 available = false;
xians@google.com68efa212011-08-11 12:41:56 +0000510 return 0;
andrew2bc63a12016-01-11 15:59:17 -0800511 }
512
513 // Given that InitSpeaker was successful, we know that a volume control exists
514 //
515 available = true;
516
517 // Close the initialized output mixer
518 //
519 if (!wasInitialized) {
520 _mixerManager.CloseSpeaker();
521 }
522
523 return 0;
xians@google.com68efa212011-08-11 12:41:56 +0000524}
525
andrew2bc63a12016-01-11 15:59:17 -0800526int32_t AudioDeviceMac::SetSpeakerVolume(uint32_t volume) {
527 return (_mixerManager.SetSpeakerVolume(volume));
xians@google.com68efa212011-08-11 12:41:56 +0000528}
529
andrew2bc63a12016-01-11 15:59:17 -0800530int32_t AudioDeviceMac::SpeakerVolume(uint32_t& volume) const {
531 uint32_t level(0);
xians@google.com68efa212011-08-11 12:41:56 +0000532
andrew2bc63a12016-01-11 15:59:17 -0800533 if (_mixerManager.SpeakerVolume(level) == -1) {
534 return -1;
535 }
xians@google.com68efa212011-08-11 12:41:56 +0000536
andrew2bc63a12016-01-11 15:59:17 -0800537 volume = level;
538 return 0;
xians@google.com68efa212011-08-11 12:41:56 +0000539}
540
andrew2bc63a12016-01-11 15:59:17 -0800541int32_t AudioDeviceMac::MaxSpeakerVolume(uint32_t& maxVolume) const {
542 uint32_t maxVol(0);
543
544 if (_mixerManager.MaxSpeakerVolume(maxVol) == -1) {
xians@google.com68efa212011-08-11 12:41:56 +0000545 return -1;
andrew2bc63a12016-01-11 15:59:17 -0800546 }
547
548 maxVolume = maxVol;
549 return 0;
xians@google.com68efa212011-08-11 12:41:56 +0000550}
551
andrew2bc63a12016-01-11 15:59:17 -0800552int32_t AudioDeviceMac::MinSpeakerVolume(uint32_t& minVolume) const {
553 uint32_t minVol(0);
xians@google.com68efa212011-08-11 12:41:56 +0000554
andrew2bc63a12016-01-11 15:59:17 -0800555 if (_mixerManager.MinSpeakerVolume(minVol) == -1) {
xians@google.com68efa212011-08-11 12:41:56 +0000556 return -1;
andrew2bc63a12016-01-11 15:59:17 -0800557 }
558
559 minVolume = minVol;
560 return 0;
xians@google.com68efa212011-08-11 12:41:56 +0000561}
562
andrew2bc63a12016-01-11 15:59:17 -0800563int32_t AudioDeviceMac::SpeakerMuteIsAvailable(bool& available) {
564 bool isAvailable(false);
565 bool wasInitialized = _mixerManager.SpeakerIsInitialized();
566
567 // Make an attempt to open up the
568 // output mixer corresponding to the currently selected output device.
569 //
570 if (!wasInitialized && InitSpeaker() == -1) {
571 // If we end up here it means that the selected speaker has no volume
572 // control, hence it is safe to state that there is no mute control
573 // already at this stage.
574 available = false;
xians@google.com68efa212011-08-11 12:41:56 +0000575 return 0;
andrew2bc63a12016-01-11 15:59:17 -0800576 }
577
578 // Check if the selected speaker has a mute control
579 //
580 _mixerManager.SpeakerMuteIsAvailable(isAvailable);
581
582 available = isAvailable;
583
584 // Close the initialized output mixer
585 //
586 if (!wasInitialized) {
587 _mixerManager.CloseSpeaker();
588 }
589
590 return 0;
xians@google.com68efa212011-08-11 12:41:56 +0000591}
592
andrew2bc63a12016-01-11 15:59:17 -0800593int32_t AudioDeviceMac::SetSpeakerMute(bool enable) {
594 return (_mixerManager.SetSpeakerMute(enable));
595}
xians@google.com68efa212011-08-11 12:41:56 +0000596
andrew2bc63a12016-01-11 15:59:17 -0800597int32_t AudioDeviceMac::SpeakerMute(bool& enabled) const {
598 bool muted(0);
xians@google.com68efa212011-08-11 12:41:56 +0000599
andrew2bc63a12016-01-11 15:59:17 -0800600 if (_mixerManager.SpeakerMute(muted) == -1) {
601 return -1;
602 }
xians@google.com68efa212011-08-11 12:41:56 +0000603
andrew2bc63a12016-01-11 15:59:17 -0800604 enabled = muted;
605 return 0;
606}
607
608int32_t AudioDeviceMac::MicrophoneMuteIsAvailable(bool& available) {
609 bool isAvailable(false);
610 bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
611
612 // Make an attempt to open up the
613 // input mixer corresponding to the currently selected input device.
614 //
615 if (!wasInitialized && InitMicrophone() == -1) {
616 // If we end up here it means that the selected microphone has no volume
617 // control, hence it is safe to state that there is no boost control
618 // already at this stage.
619 available = false;
xians@google.com68efa212011-08-11 12:41:56 +0000620 return 0;
andrew2bc63a12016-01-11 15:59:17 -0800621 }
622
623 // Check if the selected microphone has a mute control
624 //
625 _mixerManager.MicrophoneMuteIsAvailable(isAvailable);
626 available = isAvailable;
627
628 // Close the initialized input mixer
629 //
630 if (!wasInitialized) {
631 _mixerManager.CloseMicrophone();
632 }
633
634 return 0;
xians@google.com68efa212011-08-11 12:41:56 +0000635}
636
andrew2bc63a12016-01-11 15:59:17 -0800637int32_t AudioDeviceMac::SetMicrophoneMute(bool enable) {
638 return (_mixerManager.SetMicrophoneMute(enable));
639}
xians@google.com68efa212011-08-11 12:41:56 +0000640
andrew2bc63a12016-01-11 15:59:17 -0800641int32_t AudioDeviceMac::MicrophoneMute(bool& enabled) const {
642 bool muted(0);
xians@google.com68efa212011-08-11 12:41:56 +0000643
andrew2bc63a12016-01-11 15:59:17 -0800644 if (_mixerManager.MicrophoneMute(muted) == -1) {
645 return -1;
646 }
xians@google.com68efa212011-08-11 12:41:56 +0000647
andrew2bc63a12016-01-11 15:59:17 -0800648 enabled = muted;
649 return 0;
650}
651
andrew2bc63a12016-01-11 15:59:17 -0800652int32_t AudioDeviceMac::StereoRecordingIsAvailable(bool& available) {
653 bool isAvailable(false);
654 bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
xians@google.com68efa212011-08-11 12:41:56 +0000655
andrew2bc63a12016-01-11 15:59:17 -0800656 if (!wasInitialized && InitMicrophone() == -1) {
657 // Cannot open the specified device
658 available = false;
xians@google.com68efa212011-08-11 12:41:56 +0000659 return 0;
andrew2bc63a12016-01-11 15:59:17 -0800660 }
661
662 // Check if the selected microphone can record stereo
663 //
664 _mixerManager.StereoRecordingIsAvailable(isAvailable);
665 available = isAvailable;
666
667 // Close the initialized input mixer
668 //
669 if (!wasInitialized) {
670 _mixerManager.CloseMicrophone();
671 }
672
673 return 0;
xians@google.com68efa212011-08-11 12:41:56 +0000674}
675
andrew2bc63a12016-01-11 15:59:17 -0800676int32_t AudioDeviceMac::SetStereoRecording(bool enable) {
677 if (enable)
678 _recChannels = 2;
679 else
680 _recChannels = 1;
681
682 return 0;
xians@google.com68efa212011-08-11 12:41:56 +0000683}
684
andrew2bc63a12016-01-11 15:59:17 -0800685int32_t AudioDeviceMac::StereoRecording(bool& enabled) const {
686 if (_recChannels == 2)
687 enabled = true;
688 else
689 enabled = false;
xians@google.com68efa212011-08-11 12:41:56 +0000690
andrew2bc63a12016-01-11 15:59:17 -0800691 return 0;
692}
xians@google.com68efa212011-08-11 12:41:56 +0000693
andrew2bc63a12016-01-11 15:59:17 -0800694int32_t AudioDeviceMac::StereoPlayoutIsAvailable(bool& available) {
695 bool isAvailable(false);
696 bool wasInitialized = _mixerManager.SpeakerIsInitialized();
xians@google.com68efa212011-08-11 12:41:56 +0000697
andrew2bc63a12016-01-11 15:59:17 -0800698 if (!wasInitialized && InitSpeaker() == -1) {
699 // Cannot open the specified device
700 available = false;
xians@google.com68efa212011-08-11 12:41:56 +0000701 return 0;
andrew2bc63a12016-01-11 15:59:17 -0800702 }
703
704 // Check if the selected microphone can record stereo
705 //
706 _mixerManager.StereoPlayoutIsAvailable(isAvailable);
707 available = isAvailable;
708
709 // Close the initialized input mixer
710 //
711 if (!wasInitialized) {
712 _mixerManager.CloseSpeaker();
713 }
714
715 return 0;
xians@google.com68efa212011-08-11 12:41:56 +0000716}
717
andrew2bc63a12016-01-11 15:59:17 -0800718int32_t AudioDeviceMac::SetStereoPlayout(bool enable) {
719 if (enable)
720 _playChannels = 2;
721 else
722 _playChannels = 1;
xians@google.com68efa212011-08-11 12:41:56 +0000723
andrew2bc63a12016-01-11 15:59:17 -0800724 return 0;
725}
xians@google.com68efa212011-08-11 12:41:56 +0000726
andrew2bc63a12016-01-11 15:59:17 -0800727int32_t AudioDeviceMac::StereoPlayout(bool& enabled) const {
728 if (_playChannels == 2)
729 enabled = true;
730 else
731 enabled = false;
xians@google.com68efa212011-08-11 12:41:56 +0000732
andrew2bc63a12016-01-11 15:59:17 -0800733 return 0;
734}
xians@google.com68efa212011-08-11 12:41:56 +0000735
andrew2bc63a12016-01-11 15:59:17 -0800736int32_t AudioDeviceMac::SetAGC(bool enable) {
737 _AGC = enable;
xians@google.com68efa212011-08-11 12:41:56 +0000738
andrew2bc63a12016-01-11 15:59:17 -0800739 return 0;
740}
741
742bool AudioDeviceMac::AGC() const {
743 return _AGC;
744}
745
746int32_t AudioDeviceMac::MicrophoneVolumeIsAvailable(bool& available) {
747 bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
748
749 // Make an attempt to open up the
750 // input mixer corresponding to the currently selected output device.
751 //
752 if (!wasInitialized && InitMicrophone() == -1) {
753 // If we end up here it means that the selected microphone has no volume
754 // control.
755 available = false;
xians@google.com68efa212011-08-11 12:41:56 +0000756 return 0;
andrew2bc63a12016-01-11 15:59:17 -0800757 }
758
759 // Given that InitMicrophone was successful, we know that a volume control
760 // exists
761 //
762 available = true;
763
764 // Close the initialized input mixer
765 //
766 if (!wasInitialized) {
767 _mixerManager.CloseMicrophone();
768 }
769
770 return 0;
xians@google.com68efa212011-08-11 12:41:56 +0000771}
772
andrew2bc63a12016-01-11 15:59:17 -0800773int32_t AudioDeviceMac::SetMicrophoneVolume(uint32_t volume) {
774 return (_mixerManager.SetMicrophoneVolume(volume));
xians@google.com68efa212011-08-11 12:41:56 +0000775}
776
andrew2bc63a12016-01-11 15:59:17 -0800777int32_t AudioDeviceMac::MicrophoneVolume(uint32_t& volume) const {
778 uint32_t level(0);
xians@google.com68efa212011-08-11 12:41:56 +0000779
andrew2bc63a12016-01-11 15:59:17 -0800780 if (_mixerManager.MicrophoneVolume(level) == -1) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100781 RTC_LOG(LS_WARNING) << "failed to retrieve current microphone level";
andrew2bc63a12016-01-11 15:59:17 -0800782 return -1;
783 }
xians@google.com68efa212011-08-11 12:41:56 +0000784
andrew2bc63a12016-01-11 15:59:17 -0800785 volume = level;
786 return 0;
xians@google.com68efa212011-08-11 12:41:56 +0000787}
788
andrew2bc63a12016-01-11 15:59:17 -0800789int32_t AudioDeviceMac::MaxMicrophoneVolume(uint32_t& maxVolume) const {
790 uint32_t maxVol(0);
xians@google.com68efa212011-08-11 12:41:56 +0000791
andrew2bc63a12016-01-11 15:59:17 -0800792 if (_mixerManager.MaxMicrophoneVolume(maxVol) == -1) {
793 return -1;
794 }
xians@google.com68efa212011-08-11 12:41:56 +0000795
andrew2bc63a12016-01-11 15:59:17 -0800796 maxVolume = maxVol;
797 return 0;
xians@google.com68efa212011-08-11 12:41:56 +0000798}
799
andrew2bc63a12016-01-11 15:59:17 -0800800int32_t AudioDeviceMac::MinMicrophoneVolume(uint32_t& minVolume) const {
801 uint32_t minVol(0);
xians@google.com68efa212011-08-11 12:41:56 +0000802
andrew2bc63a12016-01-11 15:59:17 -0800803 if (_mixerManager.MinMicrophoneVolume(minVol) == -1) {
804 return -1;
805 }
806
807 minVolume = minVol;
808 return 0;
xians@google.com68efa212011-08-11 12:41:56 +0000809}
810
andrew2bc63a12016-01-11 15:59:17 -0800811int16_t AudioDeviceMac::PlayoutDevices() {
812 AudioDeviceID playDevices[MaxNumberDevices];
813 return GetNumberDevices(kAudioDevicePropertyScopeOutput, playDevices,
814 MaxNumberDevices);
xians@google.com68efa212011-08-11 12:41:56 +0000815}
816
andrew2bc63a12016-01-11 15:59:17 -0800817int32_t AudioDeviceMac::SetPlayoutDevice(uint16_t index) {
kthelgasonff046c72017-03-31 02:03:55 -0700818 rtc::CritScope lock(&_critSect);
xians@google.com68efa212011-08-11 12:41:56 +0000819
andrew2bc63a12016-01-11 15:59:17 -0800820 if (_playIsInitialized) {
821 return -1;
822 }
xians@google.com68efa212011-08-11 12:41:56 +0000823
andrew2bc63a12016-01-11 15:59:17 -0800824 AudioDeviceID playDevices[MaxNumberDevices];
825 uint32_t nDevices = GetNumberDevices(kAudioDevicePropertyScopeOutput,
826 playDevices, MaxNumberDevices);
Mirko Bonadei675513b2017-11-09 11:09:25 +0100827 RTC_LOG(LS_VERBOSE) << "number of available waveform-audio output devices is "
828 << nDevices;
xians@google.com68efa212011-08-11 12:41:56 +0000829
andrew2bc63a12016-01-11 15:59:17 -0800830 if (index > (nDevices - 1)) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100831 RTC_LOG(LS_ERROR) << "device index is out of range [0," << (nDevices - 1)
832 << "]";
andrew2bc63a12016-01-11 15:59:17 -0800833 return -1;
834 }
xians@google.com68efa212011-08-11 12:41:56 +0000835
andrew2bc63a12016-01-11 15:59:17 -0800836 _outputDeviceIndex = index;
837 _outputDeviceIsSpecified = true;
xians@google.com68efa212011-08-11 12:41:56 +0000838
andrew2bc63a12016-01-11 15:59:17 -0800839 return 0;
xians@google.com68efa212011-08-11 12:41:56 +0000840}
841
pbos@webrtc.org25509882013-04-09 10:30:35 +0000842int32_t AudioDeviceMac::SetPlayoutDevice(
andrew2bc63a12016-01-11 15:59:17 -0800843 AudioDeviceModule::WindowsDeviceType /*device*/) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100844 RTC_LOG(LS_ERROR) << "WindowsDeviceType not supported";
andrew2bc63a12016-01-11 15:59:17 -0800845 return -1;
846}
847
848int32_t AudioDeviceMac::PlayoutDeviceName(uint16_t index,
849 char name[kAdmMaxDeviceNameSize],
850 char guid[kAdmMaxGuidSize]) {
851 const uint16_t nDevices(PlayoutDevices());
852
853 if ((index > (nDevices - 1)) || (name == NULL)) {
xians@google.com68efa212011-08-11 12:41:56 +0000854 return -1;
andrew2bc63a12016-01-11 15:59:17 -0800855 }
856
857 memset(name, 0, kAdmMaxDeviceNameSize);
858
859 if (guid != NULL) {
860 memset(guid, 0, kAdmMaxGuidSize);
861 }
862
863 return GetDeviceName(kAudioDevicePropertyScopeOutput, index, name);
xians@google.com68efa212011-08-11 12:41:56 +0000864}
865
andrew2bc63a12016-01-11 15:59:17 -0800866int32_t AudioDeviceMac::RecordingDeviceName(uint16_t index,
867 char name[kAdmMaxDeviceNameSize],
868 char guid[kAdmMaxGuidSize]) {
869 const uint16_t nDevices(RecordingDevices());
xians@google.com68efa212011-08-11 12:41:56 +0000870
andrew2bc63a12016-01-11 15:59:17 -0800871 if ((index > (nDevices - 1)) || (name == NULL)) {
xians@google.com68efa212011-08-11 12:41:56 +0000872 return -1;
andrew2bc63a12016-01-11 15:59:17 -0800873 }
874
875 memset(name, 0, kAdmMaxDeviceNameSize);
876
877 if (guid != NULL) {
878 memset(guid, 0, kAdmMaxGuidSize);
879 }
880
881 return GetDeviceName(kAudioDevicePropertyScopeInput, index, name);
xians@google.com68efa212011-08-11 12:41:56 +0000882}
883
andrew2bc63a12016-01-11 15:59:17 -0800884int16_t AudioDeviceMac::RecordingDevices() {
885 AudioDeviceID recDevices[MaxNumberDevices];
886 return GetNumberDevices(kAudioDevicePropertyScopeInput, recDevices,
887 MaxNumberDevices);
888}
xians@google.com68efa212011-08-11 12:41:56 +0000889
andrew2bc63a12016-01-11 15:59:17 -0800890int32_t AudioDeviceMac::SetRecordingDevice(uint16_t index) {
891 if (_recIsInitialized) {
892 return -1;
893 }
xians@google.com68efa212011-08-11 12:41:56 +0000894
andrew2bc63a12016-01-11 15:59:17 -0800895 AudioDeviceID recDevices[MaxNumberDevices];
896 uint32_t nDevices = GetNumberDevices(kAudioDevicePropertyScopeInput,
897 recDevices, MaxNumberDevices);
Mirko Bonadei675513b2017-11-09 11:09:25 +0100898 RTC_LOG(LS_VERBOSE) << "number of available waveform-audio input devices is "
899 << nDevices;
punyabrata@google.comeba8c322011-08-30 14:32:22 +0000900
andrew2bc63a12016-01-11 15:59:17 -0800901 if (index > (nDevices - 1)) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100902 RTC_LOG(LS_ERROR) << "device index is out of range [0," << (nDevices - 1)
903 << "]";
andrew2bc63a12016-01-11 15:59:17 -0800904 return -1;
905 }
xians@google.com68efa212011-08-11 12:41:56 +0000906
andrew2bc63a12016-01-11 15:59:17 -0800907 _inputDeviceIndex = index;
908 _inputDeviceIsSpecified = true;
xians@google.com68efa212011-08-11 12:41:56 +0000909
andrew2bc63a12016-01-11 15:59:17 -0800910 return 0;
911}
912
913int32_t AudioDeviceMac::SetRecordingDevice(
914 AudioDeviceModule::WindowsDeviceType /*device*/) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100915 RTC_LOG(LS_ERROR) << "WindowsDeviceType not supported";
andrew2bc63a12016-01-11 15:59:17 -0800916 return -1;
917}
918
919int32_t AudioDeviceMac::PlayoutIsAvailable(bool& available) {
920 available = true;
921
922 // Try to initialize the playout side
923 if (InitPlayout() == -1) {
924 available = false;
925 }
926
927 // We destroy the IOProc created by InitPlayout() in implDeviceIOProc().
928 // We must actually start playout here in order to have the IOProc
929 // deleted by calling StopPlayout().
930 if (StartPlayout() == -1) {
931 available = false;
932 }
933
934 // Cancel effect of initialization
935 if (StopPlayout() == -1) {
936 available = false;
937 }
938
939 return 0;
940}
941
942int32_t AudioDeviceMac::RecordingIsAvailable(bool& available) {
943 available = true;
944
945 // Try to initialize the recording side
946 if (InitRecording() == -1) {
947 available = false;
948 }
949
950 // We destroy the IOProc created by InitRecording() in implInDeviceIOProc().
951 // We must actually start recording here in order to have the IOProc
952 // deleted by calling StopRecording().
953 if (StartRecording() == -1) {
954 available = false;
955 }
956
957 // Cancel effect of initialization
958 if (StopRecording() == -1) {
959 available = false;
960 }
961
962 return 0;
963}
964
965int32_t AudioDeviceMac::InitPlayout() {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100966 RTC_LOG(LS_INFO) << "InitPlayout";
kthelgasonff046c72017-03-31 02:03:55 -0700967 rtc::CritScope lock(&_critSect);
andrew2bc63a12016-01-11 15:59:17 -0800968
969 if (_playing) {
970 return -1;
971 }
972
973 if (!_outputDeviceIsSpecified) {
974 return -1;
975 }
976
977 if (_playIsInitialized) {
xians@google.com68efa212011-08-11 12:41:56 +0000978 return 0;
andrew2bc63a12016-01-11 15:59:17 -0800979 }
xians@google.com68efa212011-08-11 12:41:56 +0000980
andrew2bc63a12016-01-11 15:59:17 -0800981 // Initialize the speaker (devices might have been added or removed)
982 if (InitSpeaker() == -1) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100983 RTC_LOG(LS_WARNING) << "InitSpeaker() failed";
andrew2bc63a12016-01-11 15:59:17 -0800984 }
xians@google.com68efa212011-08-11 12:41:56 +0000985
andrew2bc63a12016-01-11 15:59:17 -0800986 if (!MicrophoneIsInitialized()) {
987 // Make this call to check if we are using
988 // one or two devices (_twoDevices)
989 bool available = false;
990 if (MicrophoneIsAvailable(available) == -1) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100991 RTC_LOG(LS_WARNING) << "MicrophoneIsAvailable() failed";
punyabrata@google.comeba8c322011-08-30 14:32:22 +0000992 }
andrew2bc63a12016-01-11 15:59:17 -0800993 }
punyabrata@google.comeba8c322011-08-30 14:32:22 +0000994
andrew2bc63a12016-01-11 15:59:17 -0800995 PaUtil_FlushRingBuffer(_paRenderBuffer);
996
997 OSStatus err = noErr;
998 UInt32 size = 0;
999 _renderDelayOffsetSamples = 0;
1000 _renderDelayUs = 0;
1001 _renderLatencyUs = 0;
1002 _renderDeviceIsAlive = 1;
1003 _doStop = false;
1004
1005 // The internal microphone of a MacBook Pro is located under the left speaker
1006 // grille. When the internal speakers are in use, we want to fully stereo
1007 // pan to the right.
1008 AudioObjectPropertyAddress propertyAddress = {
1009 kAudioDevicePropertyDataSource, kAudioDevicePropertyScopeOutput, 0};
1010 if (_macBookPro) {
1011 _macBookProPanRight = false;
1012 Boolean hasProperty =
1013 AudioObjectHasProperty(_outputDeviceID, &propertyAddress);
1014 if (hasProperty) {
1015 UInt32 dataSource = 0;
1016 size = sizeof(dataSource);
1017 WEBRTC_CA_LOG_WARN(AudioObjectGetPropertyData(
1018 _outputDeviceID, &propertyAddress, 0, NULL, &size, &dataSource));
1019
1020 if (dataSource == 'ispk') {
1021 _macBookProPanRight = true;
Mirko Bonadei675513b2017-11-09 11:09:25 +01001022 RTC_LOG(LS_VERBOSE)
sazab4aa4eb2017-07-19 01:12:36 -07001023 << "MacBook Pro using internal speakers; stereo panning right";
andrew2bc63a12016-01-11 15:59:17 -08001024 } else {
Mirko Bonadei675513b2017-11-09 11:09:25 +01001025 RTC_LOG(LS_VERBOSE) << "MacBook Pro not using internal speakers";
andrew2bc63a12016-01-11 15:59:17 -08001026 }
1027
1028 // Add a listener to determine if the status changes.
1029 WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(
1030 _outputDeviceID, &propertyAddress, &objectListenerProc, this));
punyabrata@google.comeba8c322011-08-30 14:32:22 +00001031 }
andrew2bc63a12016-01-11 15:59:17 -08001032 }
xians@google.com68efa212011-08-11 12:41:56 +00001033
andrew2bc63a12016-01-11 15:59:17 -08001034 // Get current stream description
1035 propertyAddress.mSelector = kAudioDevicePropertyStreamFormat;
1036 memset(&_outStreamFormat, 0, sizeof(_outStreamFormat));
1037 size = sizeof(_outStreamFormat);
1038 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
1039 _outputDeviceID, &propertyAddress, 0, NULL, &size, &_outStreamFormat));
xians@google.com68efa212011-08-11 12:41:56 +00001040
andrew2bc63a12016-01-11 15:59:17 -08001041 if (_outStreamFormat.mFormatID != kAudioFormatLinearPCM) {
Mirko Bonadei72c42502017-11-09 09:33:23 +01001042 logCAMsg(rtc::LS_ERROR, "Unacceptable output stream format -> mFormatID",
andrew2bc63a12016-01-11 15:59:17 -08001043 (const char*)&_outStreamFormat.mFormatID);
1044 return -1;
1045 }
xians@google.com68efa212011-08-11 12:41:56 +00001046
andrew2bc63a12016-01-11 15:59:17 -08001047 if (_outStreamFormat.mChannelsPerFrame > N_DEVICE_CHANNELS) {
Mirko Bonadei675513b2017-11-09 11:09:25 +01001048 RTC_LOG(LS_ERROR)
1049 << "Too many channels on output device (mChannelsPerFrame = "
1050 << _outStreamFormat.mChannelsPerFrame << ")";
andrew2bc63a12016-01-11 15:59:17 -08001051 return -1;
1052 }
1053
1054 if (_outStreamFormat.mFormatFlags & kAudioFormatFlagIsNonInterleaved) {
Mirko Bonadei675513b2017-11-09 11:09:25 +01001055 RTC_LOG(LS_ERROR) << "Non-interleaved audio data is not supported."
1056 << "AudioHardware streams should not have this format.";
andrew2bc63a12016-01-11 15:59:17 -08001057 return -1;
1058 }
1059
Mirko Bonadei675513b2017-11-09 11:09:25 +01001060 RTC_LOG(LS_VERBOSE) << "Ouput stream format:";
1061 RTC_LOG(LS_VERBOSE) << "mSampleRate = " << _outStreamFormat.mSampleRate
1062 << ", mChannelsPerFrame = "
1063 << _outStreamFormat.mChannelsPerFrame;
1064 RTC_LOG(LS_VERBOSE) << "mBytesPerPacket = "
1065 << _outStreamFormat.mBytesPerPacket
1066 << ", mFramesPerPacket = "
1067 << _outStreamFormat.mFramesPerPacket;
1068 RTC_LOG(LS_VERBOSE) << "mBytesPerFrame = " << _outStreamFormat.mBytesPerFrame
1069 << ", mBitsPerChannel = "
1070 << _outStreamFormat.mBitsPerChannel;
1071 RTC_LOG(LS_VERBOSE) << "mFormatFlags = " << _outStreamFormat.mFormatFlags;
sazab4aa4eb2017-07-19 01:12:36 -07001072 logCAMsg(rtc::LS_VERBOSE, "mFormatID",
andrew2bc63a12016-01-11 15:59:17 -08001073 (const char*)&_outStreamFormat.mFormatID);
1074
1075 // Our preferred format to work with.
1076 if (_outStreamFormat.mChannelsPerFrame < 2) {
1077 // Disable stereo playout when we only have one channel on the device.
1078 _playChannels = 1;
Mirko Bonadei675513b2017-11-09 11:09:25 +01001079 RTC_LOG(LS_VERBOSE) << "Stereo playout unavailable on this device";
andrew2bc63a12016-01-11 15:59:17 -08001080 }
1081 WEBRTC_CA_RETURN_ON_ERR(SetDesiredPlayoutFormat());
xians@google.com68efa212011-08-11 12:41:56 +00001082
andrew2bc63a12016-01-11 15:59:17 -08001083 // Listen for format changes.
1084 propertyAddress.mSelector = kAudioDevicePropertyStreamFormat;
henrikaf5022222016-11-07 15:56:59 +01001085 WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(
andrew2bc63a12016-01-11 15:59:17 -08001086 _outputDeviceID, &propertyAddress, &objectListenerProc, this));
xians@google.com68efa212011-08-11 12:41:56 +00001087
andrew2bc63a12016-01-11 15:59:17 -08001088 // Listen for processor overloads.
1089 propertyAddress.mSelector = kAudioDeviceProcessorOverload;
1090 WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(
1091 _outputDeviceID, &propertyAddress, &objectListenerProc, this));
xians@google.com68efa212011-08-11 12:41:56 +00001092
andrew2bc63a12016-01-11 15:59:17 -08001093 if (_twoDevices || !_recIsInitialized) {
1094 WEBRTC_CA_RETURN_ON_ERR(AudioDeviceCreateIOProcID(
1095 _outputDeviceID, deviceIOProc, this, &_deviceIOProcID));
1096 }
xians@google.com68efa212011-08-11 12:41:56 +00001097
andrew2bc63a12016-01-11 15:59:17 -08001098 _playIsInitialized = true;
xians@google.com68efa212011-08-11 12:41:56 +00001099
andrew2bc63a12016-01-11 15:59:17 -08001100 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00001101}
1102
andrew2bc63a12016-01-11 15:59:17 -08001103int32_t AudioDeviceMac::InitRecording() {
Mirko Bonadei675513b2017-11-09 11:09:25 +01001104 RTC_LOG(LS_INFO) << "InitRecording";
kthelgasonff046c72017-03-31 02:03:55 -07001105 rtc::CritScope lock(&_critSect);
xians@google.com68efa212011-08-11 12:41:56 +00001106
andrew2bc63a12016-01-11 15:59:17 -08001107 if (_recording) {
1108 return -1;
1109 }
xians@google.com68efa212011-08-11 12:41:56 +00001110
andrew2bc63a12016-01-11 15:59:17 -08001111 if (!_inputDeviceIsSpecified) {
1112 return -1;
1113 }
1114
1115 if (_recIsInitialized) {
1116 return 0;
1117 }
1118
1119 // Initialize the microphone (devices might have been added or removed)
1120 if (InitMicrophone() == -1) {
Mirko Bonadei675513b2017-11-09 11:09:25 +01001121 RTC_LOG(LS_WARNING) << "InitMicrophone() failed";
andrew2bc63a12016-01-11 15:59:17 -08001122 }
1123
1124 if (!SpeakerIsInitialized()) {
1125 // Make this call to check if we are using
1126 // one or two devices (_twoDevices)
1127 bool available = false;
1128 if (SpeakerIsAvailable(available) == -1) {
Mirko Bonadei675513b2017-11-09 11:09:25 +01001129 RTC_LOG(LS_WARNING) << "SpeakerIsAvailable() failed";
xians@google.com68efa212011-08-11 12:41:56 +00001130 }
andrew2bc63a12016-01-11 15:59:17 -08001131 }
xians@google.com68efa212011-08-11 12:41:56 +00001132
andrew2bc63a12016-01-11 15:59:17 -08001133 OSStatus err = noErr;
1134 UInt32 size = 0;
xians@google.com68efa212011-08-11 12:41:56 +00001135
andrew2bc63a12016-01-11 15:59:17 -08001136 PaUtil_FlushRingBuffer(_paCaptureBuffer);
xians@google.com68efa212011-08-11 12:41:56 +00001137
andrew2bc63a12016-01-11 15:59:17 -08001138 _captureDelayUs = 0;
1139 _captureLatencyUs = 0;
1140 _captureDeviceIsAlive = 1;
1141 _doStopRec = false;
xians@google.com68efa212011-08-11 12:41:56 +00001142
andrew2bc63a12016-01-11 15:59:17 -08001143 // Get current stream description
1144 AudioObjectPropertyAddress propertyAddress = {
1145 kAudioDevicePropertyStreamFormat, kAudioDevicePropertyScopeInput, 0};
1146 memset(&_inStreamFormat, 0, sizeof(_inStreamFormat));
1147 size = sizeof(_inStreamFormat);
1148 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
1149 _inputDeviceID, &propertyAddress, 0, NULL, &size, &_inStreamFormat));
xians@google.com68efa212011-08-11 12:41:56 +00001150
andrew2bc63a12016-01-11 15:59:17 -08001151 if (_inStreamFormat.mFormatID != kAudioFormatLinearPCM) {
Mirko Bonadei72c42502017-11-09 09:33:23 +01001152 logCAMsg(rtc::LS_ERROR, "Unacceptable input stream format -> mFormatID",
andrew2bc63a12016-01-11 15:59:17 -08001153 (const char*)&_inStreamFormat.mFormatID);
1154 return -1;
1155 }
xians@google.com68efa212011-08-11 12:41:56 +00001156
andrew2bc63a12016-01-11 15:59:17 -08001157 if (_inStreamFormat.mChannelsPerFrame > N_DEVICE_CHANNELS) {
Mirko Bonadei675513b2017-11-09 11:09:25 +01001158 RTC_LOG(LS_ERROR)
1159 << "Too many channels on input device (mChannelsPerFrame = "
1160 << _inStreamFormat.mChannelsPerFrame << ")";
andrew2bc63a12016-01-11 15:59:17 -08001161 return -1;
1162 }
xians@google.com68efa212011-08-11 12:41:56 +00001163
andrew2bc63a12016-01-11 15:59:17 -08001164 const int io_block_size_samples = _inStreamFormat.mChannelsPerFrame *
1165 _inStreamFormat.mSampleRate / 100 *
1166 N_BLOCKS_IO;
1167 if (io_block_size_samples > _captureBufSizeSamples) {
Mirko Bonadei675513b2017-11-09 11:09:25 +01001168 RTC_LOG(LS_ERROR) << "Input IO block size (" << io_block_size_samples
1169 << ") is larger than ring buffer ("
1170 << _captureBufSizeSamples << ")";
andrew2bc63a12016-01-11 15:59:17 -08001171 return -1;
1172 }
xians@google.com68efa212011-08-11 12:41:56 +00001173
Mirko Bonadei675513b2017-11-09 11:09:25 +01001174 RTC_LOG(LS_VERBOSE) << "Input stream format:";
1175 RTC_LOG(LS_VERBOSE) << "mSampleRate = " << _inStreamFormat.mSampleRate
1176 << ", mChannelsPerFrame = "
1177 << _inStreamFormat.mChannelsPerFrame;
1178 RTC_LOG(LS_VERBOSE) << "mBytesPerPacket = " << _inStreamFormat.mBytesPerPacket
1179 << ", mFramesPerPacket = "
1180 << _inStreamFormat.mFramesPerPacket;
1181 RTC_LOG(LS_VERBOSE) << "mBytesPerFrame = " << _inStreamFormat.mBytesPerFrame
1182 << ", mBitsPerChannel = "
1183 << _inStreamFormat.mBitsPerChannel;
1184 RTC_LOG(LS_VERBOSE) << "mFormatFlags = " << _inStreamFormat.mFormatFlags;
sazab4aa4eb2017-07-19 01:12:36 -07001185 logCAMsg(rtc::LS_VERBOSE, "mFormatID",
andrew2bc63a12016-01-11 15:59:17 -08001186 (const char*)&_inStreamFormat.mFormatID);
xians@google.com68efa212011-08-11 12:41:56 +00001187
andrew2bc63a12016-01-11 15:59:17 -08001188 // Our preferred format to work with
1189 if (_inStreamFormat.mChannelsPerFrame >= 2 && (_recChannels == 2)) {
1190 _inDesiredFormat.mChannelsPerFrame = 2;
1191 } else {
1192 // Disable stereo recording when we only have one channel on the device.
1193 _inDesiredFormat.mChannelsPerFrame = 1;
1194 _recChannels = 1;
Mirko Bonadei675513b2017-11-09 11:09:25 +01001195 RTC_LOG(LS_VERBOSE) << "Stereo recording unavailable on this device";
andrew2bc63a12016-01-11 15:59:17 -08001196 }
xians@google.com68efa212011-08-11 12:41:56 +00001197
andrew2bc63a12016-01-11 15:59:17 -08001198 if (_ptrAudioBuffer) {
1199 // Update audio buffer with the selected parameters
1200 _ptrAudioBuffer->SetRecordingSampleRate(N_REC_SAMPLES_PER_SEC);
1201 _ptrAudioBuffer->SetRecordingChannels((uint8_t)_recChannels);
1202 }
xians@google.com68efa212011-08-11 12:41:56 +00001203
andrew2bc63a12016-01-11 15:59:17 -08001204 _inDesiredFormat.mSampleRate = N_REC_SAMPLES_PER_SEC;
1205 _inDesiredFormat.mBytesPerPacket =
1206 _inDesiredFormat.mChannelsPerFrame * sizeof(SInt16);
1207 _inDesiredFormat.mFramesPerPacket = 1;
1208 _inDesiredFormat.mBytesPerFrame =
1209 _inDesiredFormat.mChannelsPerFrame * sizeof(SInt16);
1210 _inDesiredFormat.mBitsPerChannel = sizeof(SInt16) * 8;
xians@google.com68efa212011-08-11 12:41:56 +00001211
andrew2bc63a12016-01-11 15:59:17 -08001212 _inDesiredFormat.mFormatFlags =
1213 kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
andrew@webrtc.org621df672013-10-22 10:27:23 +00001214#ifdef WEBRTC_ARCH_BIG_ENDIAN
andrew2bc63a12016-01-11 15:59:17 -08001215 _inDesiredFormat.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;
xians@google.com68efa212011-08-11 12:41:56 +00001216#endif
andrew2bc63a12016-01-11 15:59:17 -08001217 _inDesiredFormat.mFormatID = kAudioFormatLinearPCM;
xians@google.com68efa212011-08-11 12:41:56 +00001218
andrew2bc63a12016-01-11 15:59:17 -08001219 WEBRTC_CA_RETURN_ON_ERR(AudioConverterNew(&_inStreamFormat, &_inDesiredFormat,
1220 &_captureConverter));
xians@google.com68efa212011-08-11 12:41:56 +00001221
andrew2bc63a12016-01-11 15:59:17 -08001222 // First try to set buffer size to desired value (10 ms * N_BLOCKS_IO)
1223 // TODO(xians): investigate this block.
1224 UInt32 bufByteCount =
1225 (UInt32)((_inStreamFormat.mSampleRate / 1000.0) * 10.0 * N_BLOCKS_IO *
1226 _inStreamFormat.mChannelsPerFrame * sizeof(Float32));
1227 if (_inStreamFormat.mFramesPerPacket != 0) {
1228 if (bufByteCount % _inStreamFormat.mFramesPerPacket != 0) {
1229 bufByteCount =
1230 ((UInt32)(bufByteCount / _inStreamFormat.mFramesPerPacket) + 1) *
1231 _inStreamFormat.mFramesPerPacket;
xians@google.com68efa212011-08-11 12:41:56 +00001232 }
andrew2bc63a12016-01-11 15:59:17 -08001233 }
xians@google.com68efa212011-08-11 12:41:56 +00001234
andrew2bc63a12016-01-11 15:59:17 -08001235 // Ensure the buffer size is within the acceptable range provided by the
1236 // device.
1237 propertyAddress.mSelector = kAudioDevicePropertyBufferSizeRange;
1238 AudioValueRange range;
1239 size = sizeof(range);
1240 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
1241 _inputDeviceID, &propertyAddress, 0, NULL, &size, &range));
1242 if (range.mMinimum > bufByteCount) {
1243 bufByteCount = range.mMinimum;
1244 } else if (range.mMaximum < bufByteCount) {
1245 bufByteCount = range.mMaximum;
1246 }
xians@google.com68efa212011-08-11 12:41:56 +00001247
andrew2bc63a12016-01-11 15:59:17 -08001248 propertyAddress.mSelector = kAudioDevicePropertyBufferSize;
1249 size = sizeof(bufByteCount);
1250 WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(
1251 _inputDeviceID, &propertyAddress, 0, NULL, size, &bufByteCount));
xians@google.com68efa212011-08-11 12:41:56 +00001252
andrew2bc63a12016-01-11 15:59:17 -08001253 // Get capture device latency
1254 propertyAddress.mSelector = kAudioDevicePropertyLatency;
1255 UInt32 latency = 0;
1256 size = sizeof(UInt32);
1257 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
1258 _inputDeviceID, &propertyAddress, 0, NULL, &size, &latency));
1259 _captureLatencyUs = (UInt32)((1.0e6 * latency) / _inStreamFormat.mSampleRate);
xians@google.com68efa212011-08-11 12:41:56 +00001260
andrew2bc63a12016-01-11 15:59:17 -08001261 // Get capture stream latency
1262 propertyAddress.mSelector = kAudioDevicePropertyStreams;
1263 AudioStreamID stream = 0;
1264 size = sizeof(AudioStreamID);
1265 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
1266 _inputDeviceID, &propertyAddress, 0, NULL, &size, &stream));
1267 propertyAddress.mSelector = kAudioStreamPropertyLatency;
1268 size = sizeof(UInt32);
1269 latency = 0;
1270 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
1271 _inputDeviceID, &propertyAddress, 0, NULL, &size, &latency));
1272 _captureLatencyUs +=
1273 (UInt32)((1.0e6 * latency) / _inStreamFormat.mSampleRate);
xians@google.com68efa212011-08-11 12:41:56 +00001274
andrew2bc63a12016-01-11 15:59:17 -08001275 // Listen for format changes
1276 // TODO(xians): should we be using kAudioDevicePropertyDeviceHasChanged?
1277 propertyAddress.mSelector = kAudioDevicePropertyStreamFormat;
henrikaf5022222016-11-07 15:56:59 +01001278 WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(
andrew2bc63a12016-01-11 15:59:17 -08001279 _inputDeviceID, &propertyAddress, &objectListenerProc, this));
xians@google.com68efa212011-08-11 12:41:56 +00001280
andrew2bc63a12016-01-11 15:59:17 -08001281 // Listen for processor overloads
1282 propertyAddress.mSelector = kAudioDeviceProcessorOverload;
1283 WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(
1284 _inputDeviceID, &propertyAddress, &objectListenerProc, this));
xians@google.com68efa212011-08-11 12:41:56 +00001285
andrew2bc63a12016-01-11 15:59:17 -08001286 if (_twoDevices) {
1287 WEBRTC_CA_RETURN_ON_ERR(AudioDeviceCreateIOProcID(
1288 _inputDeviceID, inDeviceIOProc, this, &_inDeviceIOProcID));
1289 } else if (!_playIsInitialized) {
1290 WEBRTC_CA_RETURN_ON_ERR(AudioDeviceCreateIOProcID(
1291 _inputDeviceID, deviceIOProc, this, &_deviceIOProcID));
1292 }
xians@google.com68efa212011-08-11 12:41:56 +00001293
andrew2bc63a12016-01-11 15:59:17 -08001294 // Mark recording side as initialized
1295 _recIsInitialized = true;
xians@google.com68efa212011-08-11 12:41:56 +00001296
andrew2bc63a12016-01-11 15:59:17 -08001297 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00001298}
1299
andrew2bc63a12016-01-11 15:59:17 -08001300int32_t AudioDeviceMac::StartRecording() {
Mirko Bonadei675513b2017-11-09 11:09:25 +01001301 RTC_LOG(LS_INFO) << "StartRecording";
kthelgasonff046c72017-03-31 02:03:55 -07001302 rtc::CritScope lock(&_critSect);
andrew@webrtc.orgb0be7aa2011-12-08 20:15:36 +00001303
andrew2bc63a12016-01-11 15:59:17 -08001304 if (!_recIsInitialized) {
1305 return -1;
1306 }
xians@google.com68efa212011-08-11 12:41:56 +00001307
andrew2bc63a12016-01-11 15:59:17 -08001308 if (_recording) {
1309 return 0;
1310 }
1311
1312 if (!_initialized) {
Mirko Bonadei675513b2017-11-09 11:09:25 +01001313 RTC_LOG(LS_ERROR) << "Recording worker thread has not been started";
andrew2bc63a12016-01-11 15:59:17 -08001314 return -1;
1315 }
1316
1317 RTC_DCHECK(!capture_worker_thread_.get());
1318 capture_worker_thread_.reset(
1319 new rtc::PlatformThread(RunCapture, this, "CaptureWorkerThread"));
1320 RTC_DCHECK(capture_worker_thread_.get());
1321 capture_worker_thread_->Start();
1322 capture_worker_thread_->SetPriority(rtc::kRealtimePriority);
1323
1324 OSStatus err = noErr;
1325 if (_twoDevices) {
1326 WEBRTC_CA_RETURN_ON_ERR(
1327 AudioDeviceStart(_inputDeviceID, _inDeviceIOProcID));
1328 } else if (!_playing) {
1329 WEBRTC_CA_RETURN_ON_ERR(AudioDeviceStart(_inputDeviceID, _deviceIOProcID));
1330 }
1331
1332 _recording = true;
1333
1334 return 0;
1335}
1336
1337int32_t AudioDeviceMac::StopRecording() {
Mirko Bonadei675513b2017-11-09 11:09:25 +01001338 RTC_LOG(LS_INFO) << "StopRecording";
kthelgasonff046c72017-03-31 02:03:55 -07001339 rtc::CritScope lock(&_critSect);
andrew2bc63a12016-01-11 15:59:17 -08001340
1341 if (!_recIsInitialized) {
1342 return 0;
1343 }
1344
1345 OSStatus err = noErr;
andrew2bc63a12016-01-11 15:59:17 -08001346 int32_t captureDeviceIsAlive = AtomicGet32(&_captureDeviceIsAlive);
henrika6b3e1a22017-09-25 16:34:30 +02001347 if (_twoDevices && captureDeviceIsAlive == 1) {
1348 // Recording side uses its own dedicated device and IOProc.
1349 if (_recording) {
andrew2bc63a12016-01-11 15:59:17 -08001350 _recording = false;
1351 _doStopRec = true; // Signal to io proc to stop audio device
1352 _critSect.Leave(); // Cannot be under lock, risk of deadlock
1353 if (kEventTimeout == _stopEventRec.Wait(2000)) {
kthelgasonff046c72017-03-31 02:03:55 -07001354 rtc::CritScope critScoped(&_critSect);
Mirko Bonadei675513b2017-11-09 11:09:25 +01001355 RTC_LOG(LS_WARNING) << "Timed out stopping the capture IOProc."
1356 << "We may have failed to detect a device removal.";
andrew2bc63a12016-01-11 15:59:17 -08001357 WEBRTC_CA_LOG_WARN(AudioDeviceStop(_inputDeviceID, _inDeviceIOProcID));
1358 WEBRTC_CA_LOG_WARN(
Mirko Bonadei72c42502017-11-09 09:33:23 +01001359 AudioDeviceDestroyIOProcID(_inputDeviceID, _inDeviceIOProcID));
andrew2bc63a12016-01-11 15:59:17 -08001360 }
1361 _critSect.Enter();
1362 _doStopRec = false;
Mirko Bonadei675513b2017-11-09 11:09:25 +01001363 RTC_LOG(LS_INFO) << "Recording stopped (input device)";
henrika6b3e1a22017-09-25 16:34:30 +02001364 } else if (_recIsInitialized) {
1365 WEBRTC_CA_LOG_WARN(
1366 AudioDeviceDestroyIOProcID(_inputDeviceID, _inDeviceIOProcID));
Mirko Bonadei675513b2017-11-09 11:09:25 +01001367 RTC_LOG(LS_INFO) << "Recording uninitialized (input device)";
xians@google.com68efa212011-08-11 12:41:56 +00001368 }
andrew2bc63a12016-01-11 15:59:17 -08001369 } else {
1370 // We signal a stop for a shared device even when rendering has
1371 // not yet ended. This is to ensure the IOProc will return early as
1372 // intended (by checking |_recording|) before accessing
1373 // resources we free below (e.g. the capture converter).
1374 //
1375 // In the case of a shared devcie, the IOProc will verify
1376 // rendering has ended before stopping itself.
1377 if (_recording && captureDeviceIsAlive == 1) {
1378 _recording = false;
1379 _doStop = true; // Signal to io proc to stop audio device
1380 _critSect.Leave(); // Cannot be under lock, risk of deadlock
1381 if (kEventTimeout == _stopEvent.Wait(2000)) {
kthelgasonff046c72017-03-31 02:03:55 -07001382 rtc::CritScope critScoped(&_critSect);
Mirko Bonadei675513b2017-11-09 11:09:25 +01001383 RTC_LOG(LS_WARNING) << "Timed out stopping the shared IOProc."
1384 << "We may have failed to detect a device removal.";
andrew2bc63a12016-01-11 15:59:17 -08001385 // We assume rendering on a shared device has stopped as well if
1386 // the IOProc times out.
1387 WEBRTC_CA_LOG_WARN(AudioDeviceStop(_outputDeviceID, _deviceIOProcID));
1388 WEBRTC_CA_LOG_WARN(
1389 AudioDeviceDestroyIOProcID(_outputDeviceID, _deviceIOProcID));
1390 }
1391 _critSect.Enter();
1392 _doStop = false;
Mirko Bonadei675513b2017-11-09 11:09:25 +01001393 RTC_LOG(LS_INFO) << "Recording stopped (shared device)";
henrika6b3e1a22017-09-25 16:34:30 +02001394 } else if (_recIsInitialized && !_playing && !_playIsInitialized) {
1395 WEBRTC_CA_LOG_WARN(
Mirko Bonadei72c42502017-11-09 09:33:23 +01001396 AudioDeviceDestroyIOProcID(_outputDeviceID, _deviceIOProcID));
Mirko Bonadei675513b2017-11-09 11:09:25 +01001397 RTC_LOG(LS_INFO) << "Recording uninitialized (shared device)";
xians@google.com68efa212011-08-11 12:41:56 +00001398 }
andrew2bc63a12016-01-11 15:59:17 -08001399 }
xians@google.com68efa212011-08-11 12:41:56 +00001400
andrew2bc63a12016-01-11 15:59:17 -08001401 // Setting this signal will allow the worker thread to be stopped.
1402 AtomicSet32(&_captureDeviceIsAlive, 0);
xians@google.com68efa212011-08-11 12:41:56 +00001403
andrew2bc63a12016-01-11 15:59:17 -08001404 if (capture_worker_thread_.get()) {
1405 _critSect.Leave();
1406 capture_worker_thread_->Stop();
1407 capture_worker_thread_.reset();
1408 _critSect.Enter();
1409 }
tommi@webrtc.orgd43bdf52015-02-03 16:29:57 +00001410
andrew2bc63a12016-01-11 15:59:17 -08001411 WEBRTC_CA_LOG_WARN(AudioConverterDispose(_captureConverter));
1412
1413 // Remove listeners.
1414 AudioObjectPropertyAddress propertyAddress = {
1415 kAudioDevicePropertyStreamFormat, kAudioDevicePropertyScopeInput, 0};
1416 WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(
1417 _inputDeviceID, &propertyAddress, &objectListenerProc, this));
1418
1419 propertyAddress.mSelector = kAudioDeviceProcessorOverload;
1420 WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(
1421 _inputDeviceID, &propertyAddress, &objectListenerProc, this));
1422
1423 _recIsInitialized = false;
1424 _recording = false;
1425
1426 return 0;
1427}
1428
1429bool AudioDeviceMac::RecordingIsInitialized() const {
1430 return (_recIsInitialized);
1431}
1432
1433bool AudioDeviceMac::Recording() const {
1434 return (_recording);
1435}
1436
1437bool AudioDeviceMac::PlayoutIsInitialized() const {
1438 return (_playIsInitialized);
1439}
1440
1441int32_t AudioDeviceMac::StartPlayout() {
Mirko Bonadei675513b2017-11-09 11:09:25 +01001442 RTC_LOG(LS_INFO) << "StartPlayout";
kthelgasonff046c72017-03-31 02:03:55 -07001443 rtc::CritScope lock(&_critSect);
andrew2bc63a12016-01-11 15:59:17 -08001444
1445 if (!_playIsInitialized) {
1446 return -1;
1447 }
1448
1449 if (_playing) {
1450 return 0;
1451 }
1452
1453 RTC_DCHECK(!render_worker_thread_.get());
1454 render_worker_thread_.reset(
1455 new rtc::PlatformThread(RunRender, this, "RenderWorkerThread"));
1456 render_worker_thread_->Start();
1457 render_worker_thread_->SetPriority(rtc::kRealtimePriority);
1458
1459 if (_twoDevices || !_recording) {
xians@google.com68efa212011-08-11 12:41:56 +00001460 OSStatus err = noErr;
andrew2bc63a12016-01-11 15:59:17 -08001461 WEBRTC_CA_RETURN_ON_ERR(AudioDeviceStart(_outputDeviceID, _deviceIOProcID));
1462 }
1463 _playing = true;
xians@google.com68efa212011-08-11 12:41:56 +00001464
andrew2bc63a12016-01-11 15:59:17 -08001465 return 0;
1466}
xians@google.com68efa212011-08-11 12:41:56 +00001467
andrew2bc63a12016-01-11 15:59:17 -08001468int32_t AudioDeviceMac::StopPlayout() {
Mirko Bonadei675513b2017-11-09 11:09:25 +01001469 RTC_LOG(LS_INFO) << "StopPlayout";
kthelgasonff046c72017-03-31 02:03:55 -07001470 rtc::CritScope lock(&_critSect);
andrew2bc63a12016-01-11 15:59:17 -08001471
1472 if (!_playIsInitialized) {
xians@google.com68efa212011-08-11 12:41:56 +00001473 return 0;
andrew2bc63a12016-01-11 15:59:17 -08001474 }
xians@google.com68efa212011-08-11 12:41:56 +00001475
andrew2bc63a12016-01-11 15:59:17 -08001476 OSStatus err = noErr;
andrew2bc63a12016-01-11 15:59:17 -08001477 int32_t renderDeviceIsAlive = AtomicGet32(&_renderDeviceIsAlive);
1478 if (_playing && renderDeviceIsAlive == 1) {
1479 // We signal a stop for a shared device even when capturing has not
1480 // yet ended. This is to ensure the IOProc will return early as
1481 // intended (by checking |_playing|) before accessing resources we
1482 // free below (e.g. the render converter).
1483 //
1484 // In the case of a shared device, the IOProc will verify capturing
1485 // has ended before stopping itself.
xians@google.com68efa212011-08-11 12:41:56 +00001486 _playing = false;
andrew2bc63a12016-01-11 15:59:17 -08001487 _doStop = true; // Signal to io proc to stop audio device
1488 _critSect.Leave(); // Cannot be under lock, risk of deadlock
1489 if (kEventTimeout == _stopEvent.Wait(2000)) {
kthelgasonff046c72017-03-31 02:03:55 -07001490 rtc::CritScope critScoped(&_critSect);
Mirko Bonadei675513b2017-11-09 11:09:25 +01001491 RTC_LOG(LS_WARNING) << "Timed out stopping the render IOProc."
1492 << "We may have failed to detect a device removal.";
xians@google.com68efa212011-08-11 12:41:56 +00001493
andrew2bc63a12016-01-11 15:59:17 -08001494 // We assume capturing on a shared device has stopped as well if the
1495 // IOProc times out.
1496 WEBRTC_CA_LOG_WARN(AudioDeviceStop(_outputDeviceID, _deviceIOProcID));
1497 WEBRTC_CA_LOG_WARN(
1498 AudioDeviceDestroyIOProcID(_outputDeviceID, _deviceIOProcID));
1499 }
1500 _critSect.Enter();
1501 _doStop = false;
Mirko Bonadei675513b2017-11-09 11:09:25 +01001502 RTC_LOG(LS_INFO) << "Playout stopped";
henrika6b3e1a22017-09-25 16:34:30 +02001503 } else if (_twoDevices && _playIsInitialized) {
1504 WEBRTC_CA_LOG_WARN(
Mirko Bonadei72c42502017-11-09 09:33:23 +01001505 AudioDeviceDestroyIOProcID(_outputDeviceID, _deviceIOProcID));
Mirko Bonadei675513b2017-11-09 11:09:25 +01001506 RTC_LOG(LS_INFO) << "Playout uninitialized (output device)";
henrika6b3e1a22017-09-25 16:34:30 +02001507 } else if (!_twoDevices && _playIsInitialized && !_recIsInitialized) {
1508 WEBRTC_CA_LOG_WARN(
Mirko Bonadei72c42502017-11-09 09:33:23 +01001509 AudioDeviceDestroyIOProcID(_outputDeviceID, _deviceIOProcID));
Mirko Bonadei675513b2017-11-09 11:09:25 +01001510 RTC_LOG(LS_INFO) << "Playout uninitialized (shared device)";
andrew2bc63a12016-01-11 15:59:17 -08001511 }
1512
1513 // Setting this signal will allow the worker thread to be stopped.
1514 AtomicSet32(&_renderDeviceIsAlive, 0);
1515 if (render_worker_thread_.get()) {
1516 _critSect.Leave();
1517 render_worker_thread_->Stop();
1518 render_worker_thread_.reset();
1519 _critSect.Enter();
1520 }
1521
1522 WEBRTC_CA_LOG_WARN(AudioConverterDispose(_renderConverter));
1523
1524 // Remove listeners.
1525 AudioObjectPropertyAddress propertyAddress = {
1526 kAudioDevicePropertyStreamFormat, kAudioDevicePropertyScopeOutput, 0};
1527 WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(
1528 _outputDeviceID, &propertyAddress, &objectListenerProc, this));
1529
1530 propertyAddress.mSelector = kAudioDeviceProcessorOverload;
1531 WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(
1532 _outputDeviceID, &propertyAddress, &objectListenerProc, this));
1533
1534 if (_macBookPro) {
1535 Boolean hasProperty =
1536 AudioObjectHasProperty(_outputDeviceID, &propertyAddress);
1537 if (hasProperty) {
1538 propertyAddress.mSelector = kAudioDevicePropertyDataSource;
1539 WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(
1540 _outputDeviceID, &propertyAddress, &objectListenerProc, this));
1541 }
1542 }
1543
1544 _playIsInitialized = false;
1545 _playing = false;
1546
1547 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00001548}
1549
andrew2bc63a12016-01-11 15:59:17 -08001550int32_t AudioDeviceMac::PlayoutDelay(uint16_t& delayMS) const {
1551 int32_t renderDelayUs = AtomicGet32(&_renderDelayUs);
1552 delayMS =
1553 static_cast<uint16_t>(1e-3 * (renderDelayUs + _renderLatencyUs) + 0.5);
1554 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00001555}
1556
andrew2bc63a12016-01-11 15:59:17 -08001557bool AudioDeviceMac::Playing() const {
1558 return (_playing);
xians@google.com68efa212011-08-11 12:41:56 +00001559}
1560
xians@google.com68efa212011-08-11 12:41:56 +00001561// ============================================================================
1562// Private Methods
1563// ============================================================================
1564
andrew2bc63a12016-01-11 15:59:17 -08001565int32_t AudioDeviceMac::GetNumberDevices(const AudioObjectPropertyScope scope,
1566 AudioDeviceID scopedDeviceIds[],
1567 const uint32_t deviceListLength) {
1568 OSStatus err = noErr;
xians@google.com68efa212011-08-11 12:41:56 +00001569
andrew2bc63a12016-01-11 15:59:17 -08001570 AudioObjectPropertyAddress propertyAddress = {
1571 kAudioHardwarePropertyDevices, kAudioObjectPropertyScopeGlobal,
1572 kAudioObjectPropertyElementMaster};
1573 UInt32 size = 0;
1574 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyDataSize(
1575 kAudioObjectSystemObject, &propertyAddress, 0, NULL, &size));
1576 if (size == 0) {
Mirko Bonadei675513b2017-11-09 11:09:25 +01001577 RTC_LOG(LS_WARNING) << "No devices";
andrew2bc63a12016-01-11 15:59:17 -08001578 return 0;
1579 }
xians@google.com68efa212011-08-11 12:41:56 +00001580
andrew2bc63a12016-01-11 15:59:17 -08001581 AudioDeviceID* deviceIds = (AudioDeviceID*)malloc(size);
1582 UInt32 numberDevices = size / sizeof(AudioDeviceID);
1583 AudioBufferList* bufferList = NULL;
1584 UInt32 numberScopedDevices = 0;
xians@google.com68efa212011-08-11 12:41:56 +00001585
andrew2bc63a12016-01-11 15:59:17 -08001586 // First check if there is a default device and list it
1587 UInt32 hardwareProperty = 0;
1588 if (scope == kAudioDevicePropertyScopeOutput) {
1589 hardwareProperty = kAudioHardwarePropertyDefaultOutputDevice;
1590 } else {
1591 hardwareProperty = kAudioHardwarePropertyDefaultInputDevice;
1592 }
xians@google.com68efa212011-08-11 12:41:56 +00001593
andrew2bc63a12016-01-11 15:59:17 -08001594 AudioObjectPropertyAddress propertyAddressDefault = {
1595 hardwareProperty, kAudioObjectPropertyScopeGlobal,
1596 kAudioObjectPropertyElementMaster};
xians@google.com68efa212011-08-11 12:41:56 +00001597
andrew2bc63a12016-01-11 15:59:17 -08001598 AudioDeviceID usedID;
1599 UInt32 uintSize = sizeof(UInt32);
1600 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(kAudioObjectSystemObject,
1601 &propertyAddressDefault, 0,
1602 NULL, &uintSize, &usedID));
1603 if (usedID != kAudioDeviceUnknown) {
1604 scopedDeviceIds[numberScopedDevices] = usedID;
1605 numberScopedDevices++;
1606 } else {
Mirko Bonadei675513b2017-11-09 11:09:25 +01001607 RTC_LOG(LS_WARNING) << "GetNumberDevices(): Default device unknown";
andrew2bc63a12016-01-11 15:59:17 -08001608 }
xians@google.com68efa212011-08-11 12:41:56 +00001609
andrew2bc63a12016-01-11 15:59:17 -08001610 // Then list the rest of the devices
1611 bool listOK = true;
xians@google.com68efa212011-08-11 12:41:56 +00001612
andrew2bc63a12016-01-11 15:59:17 -08001613 WEBRTC_CA_LOG_ERR(AudioObjectGetPropertyData(
1614 kAudioObjectSystemObject, &propertyAddress, 0, NULL, &size, deviceIds));
1615 if (err != noErr) {
1616 listOK = false;
1617 } else {
1618 propertyAddress.mSelector = kAudioDevicePropertyStreamConfiguration;
1619 propertyAddress.mScope = scope;
1620 propertyAddress.mElement = 0;
1621 for (UInt32 i = 0; i < numberDevices; i++) {
1622 // Check for input channels
1623 WEBRTC_CA_LOG_ERR(AudioObjectGetPropertyDataSize(
1624 deviceIds[i], &propertyAddress, 0, NULL, &size));
1625 if (err == kAudioHardwareBadDeviceError) {
1626 // This device doesn't actually exist; continue iterating.
1627 continue;
1628 } else if (err != noErr) {
xians@google.com68efa212011-08-11 12:41:56 +00001629 listOK = false;
andrew2bc63a12016-01-11 15:59:17 -08001630 break;
1631 }
xians@google.com68efa212011-08-11 12:41:56 +00001632
andrew2bc63a12016-01-11 15:59:17 -08001633 bufferList = (AudioBufferList*)malloc(size);
1634 WEBRTC_CA_LOG_ERR(AudioObjectGetPropertyData(
1635 deviceIds[i], &propertyAddress, 0, NULL, &size, bufferList));
1636 if (err != noErr) {
1637 listOK = false;
1638 break;
1639 }
xians@google.com68efa212011-08-11 12:41:56 +00001640
andrew2bc63a12016-01-11 15:59:17 -08001641 if (bufferList->mNumberBuffers > 0) {
1642 if (numberScopedDevices >= deviceListLength) {
Mirko Bonadei675513b2017-11-09 11:09:25 +01001643 RTC_LOG(LS_ERROR) << "Device list is not long enough";
andrew2bc63a12016-01-11 15:59:17 -08001644 listOK = false;
1645 break;
xians@google.com68efa212011-08-11 12:41:56 +00001646 }
1647
andrew2bc63a12016-01-11 15:59:17 -08001648 scopedDeviceIds[numberScopedDevices] = deviceIds[i];
1649 numberScopedDevices++;
1650 }
xians@google.com68efa212011-08-11 12:41:56 +00001651
andrew2bc63a12016-01-11 15:59:17 -08001652 free(bufferList);
1653 bufferList = NULL;
1654 } // for
1655 }
1656
1657 if (!listOK) {
1658 if (deviceIds) {
1659 free(deviceIds);
1660 deviceIds = NULL;
xians@google.com68efa212011-08-11 12:41:56 +00001661 }
1662
andrew2bc63a12016-01-11 15:59:17 -08001663 if (bufferList) {
1664 free(bufferList);
1665 bufferList = NULL;
xians@google.com68efa212011-08-11 12:41:56 +00001666 }
1667
andrew2bc63a12016-01-11 15:59:17 -08001668 return -1;
1669 }
1670
1671 // Happy ending
1672 if (deviceIds) {
1673 free(deviceIds);
1674 deviceIds = NULL;
1675 }
1676
1677 return numberScopedDevices;
xians@google.com68efa212011-08-11 12:41:56 +00001678}
1679
andrew2bc63a12016-01-11 15:59:17 -08001680int32_t AudioDeviceMac::GetDeviceName(const AudioObjectPropertyScope scope,
1681 const uint16_t index,
1682 char* name) {
1683 OSStatus err = noErr;
1684 UInt32 len = kAdmMaxDeviceNameSize;
1685 AudioDeviceID deviceIds[MaxNumberDevices];
xians@google.com68efa212011-08-11 12:41:56 +00001686
andrew2bc63a12016-01-11 15:59:17 -08001687 int numberDevices = GetNumberDevices(scope, deviceIds, MaxNumberDevices);
1688 if (numberDevices < 0) {
1689 return -1;
1690 } else if (numberDevices == 0) {
Mirko Bonadei675513b2017-11-09 11:09:25 +01001691 RTC_LOG(LS_ERROR) << "No devices";
andrew2bc63a12016-01-11 15:59:17 -08001692 return -1;
1693 }
1694
1695 // If the number is below the number of devices, assume it's "WEBRTC ID"
1696 // otherwise assume it's a CoreAudio ID
1697 AudioDeviceID usedID;
1698
1699 // Check if there is a default device
1700 bool isDefaultDevice = false;
1701 if (index == 0) {
1702 UInt32 hardwareProperty = 0;
1703 if (scope == kAudioDevicePropertyScopeOutput) {
1704 hardwareProperty = kAudioHardwarePropertyDefaultOutputDevice;
1705 } else {
1706 hardwareProperty = kAudioHardwarePropertyDefaultInputDevice;
xians@google.com68efa212011-08-11 12:41:56 +00001707 }
xians@google.com68efa212011-08-11 12:41:56 +00001708 AudioObjectPropertyAddress propertyAddress = {
andrew2bc63a12016-01-11 15:59:17 -08001709 hardwareProperty, kAudioObjectPropertyScopeGlobal,
1710 kAudioObjectPropertyElementMaster};
1711 UInt32 size = sizeof(UInt32);
1712 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
1713 kAudioObjectSystemObject, &propertyAddress, 0, NULL, &size, &usedID));
1714 if (usedID == kAudioDeviceUnknown) {
Mirko Bonadei675513b2017-11-09 11:09:25 +01001715 RTC_LOG(LS_WARNING) << "GetDeviceName(): Default device unknown";
andrew2bc63a12016-01-11 15:59:17 -08001716 } else {
1717 isDefaultDevice = true;
1718 }
1719 }
xians@google.com68efa212011-08-11 12:41:56 +00001720
andrew2bc63a12016-01-11 15:59:17 -08001721 AudioObjectPropertyAddress propertyAddress = {kAudioDevicePropertyDeviceName,
1722 scope, 0};
xians@google.com68efa212011-08-11 12:41:56 +00001723
andrew2bc63a12016-01-11 15:59:17 -08001724 if (isDefaultDevice) {
1725 char devName[len];
xians@google.com68efa212011-08-11 12:41:56 +00001726
andrew2bc63a12016-01-11 15:59:17 -08001727 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(usedID, &propertyAddress,
1728 0, NULL, &len, devName));
xians@google.com68efa212011-08-11 12:41:56 +00001729
andrew2bc63a12016-01-11 15:59:17 -08001730 sprintf(name, "default (%s)", devName);
1731 } else {
1732 if (index < numberDevices) {
1733 usedID = deviceIds[index];
1734 } else {
1735 usedID = index;
xians@google.com68efa212011-08-11 12:41:56 +00001736 }
1737
andrew2bc63a12016-01-11 15:59:17 -08001738 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(usedID, &propertyAddress,
1739 0, NULL, &len, name));
1740 }
1741
1742 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00001743}
1744
pbos@webrtc.org25509882013-04-09 10:30:35 +00001745int32_t AudioDeviceMac::InitDevice(const uint16_t userDeviceIndex,
1746 AudioDeviceID& deviceId,
andrew2bc63a12016-01-11 15:59:17 -08001747 const bool isInput) {
1748 OSStatus err = noErr;
1749 UInt32 size = 0;
1750 AudioObjectPropertyScope deviceScope;
1751 AudioObjectPropertySelector defaultDeviceSelector;
1752 AudioDeviceID deviceIds[MaxNumberDevices];
xians@google.com68efa212011-08-11 12:41:56 +00001753
andrew2bc63a12016-01-11 15:59:17 -08001754 if (isInput) {
1755 deviceScope = kAudioDevicePropertyScopeInput;
1756 defaultDeviceSelector = kAudioHardwarePropertyDefaultInputDevice;
1757 } else {
1758 deviceScope = kAudioDevicePropertyScopeOutput;
1759 defaultDeviceSelector = kAudioHardwarePropertyDefaultOutputDevice;
1760 }
1761
1762 AudioObjectPropertyAddress propertyAddress = {
1763 defaultDeviceSelector, kAudioObjectPropertyScopeGlobal,
1764 kAudioObjectPropertyElementMaster};
1765
1766 // Get the actual device IDs
1767 int numberDevices =
1768 GetNumberDevices(deviceScope, deviceIds, MaxNumberDevices);
1769 if (numberDevices < 0) {
1770 return -1;
1771 } else if (numberDevices == 0) {
Mirko Bonadei675513b2017-11-09 11:09:25 +01001772 RTC_LOG(LS_ERROR) << "InitDevice(): No devices";
andrew2bc63a12016-01-11 15:59:17 -08001773 return -1;
1774 }
1775
1776 bool isDefaultDevice = false;
1777 deviceId = kAudioDeviceUnknown;
1778 if (userDeviceIndex == 0) {
1779 // Try to use default system device
1780 size = sizeof(AudioDeviceID);
1781 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
1782 kAudioObjectSystemObject, &propertyAddress, 0, NULL, &size, &deviceId));
1783 if (deviceId == kAudioDeviceUnknown) {
Mirko Bonadei675513b2017-11-09 11:09:25 +01001784 RTC_LOG(LS_WARNING) << "No default device exists";
andrew2bc63a12016-01-11 15:59:17 -08001785 } else {
1786 isDefaultDevice = true;
xians@google.com68efa212011-08-11 12:41:56 +00001787 }
andrew2bc63a12016-01-11 15:59:17 -08001788 }
xians@google.com68efa212011-08-11 12:41:56 +00001789
andrew2bc63a12016-01-11 15:59:17 -08001790 if (!isDefaultDevice) {
1791 deviceId = deviceIds[userDeviceIndex];
1792 }
xians@google.com68efa212011-08-11 12:41:56 +00001793
andrew2bc63a12016-01-11 15:59:17 -08001794 // Obtain device name and manufacturer for logging.
1795 // Also use this as a test to ensure a user-set device ID is valid.
1796 char devName[128];
1797 char devManf[128];
1798 memset(devName, 0, sizeof(devName));
1799 memset(devManf, 0, sizeof(devManf));
xians@google.com68efa212011-08-11 12:41:56 +00001800
andrew2bc63a12016-01-11 15:59:17 -08001801 propertyAddress.mSelector = kAudioDevicePropertyDeviceName;
1802 propertyAddress.mScope = deviceScope;
1803 propertyAddress.mElement = 0;
1804 size = sizeof(devName);
1805 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(deviceId, &propertyAddress,
1806 0, NULL, &size, devName));
xians@google.com68efa212011-08-11 12:41:56 +00001807
andrew2bc63a12016-01-11 15:59:17 -08001808 propertyAddress.mSelector = kAudioDevicePropertyDeviceManufacturer;
1809 size = sizeof(devManf);
1810 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(deviceId, &propertyAddress,
1811 0, NULL, &size, devManf));
xians@google.com68efa212011-08-11 12:41:56 +00001812
andrew2bc63a12016-01-11 15:59:17 -08001813 if (isInput) {
Mirko Bonadei675513b2017-11-09 11:09:25 +01001814 RTC_LOG(LS_INFO) << "Input device: " << devManf << " " << devName;
andrew2bc63a12016-01-11 15:59:17 -08001815 } else {
Mirko Bonadei675513b2017-11-09 11:09:25 +01001816 RTC_LOG(LS_INFO) << "Output device: " << devManf << " " << devName;
andrew2bc63a12016-01-11 15:59:17 -08001817 }
xians@google.com68efa212011-08-11 12:41:56 +00001818
andrew2bc63a12016-01-11 15:59:17 -08001819 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00001820}
1821
andrew2bc63a12016-01-11 15:59:17 -08001822OSStatus AudioDeviceMac::SetDesiredPlayoutFormat() {
1823 // Our preferred format to work with.
1824 _outDesiredFormat.mSampleRate = N_PLAY_SAMPLES_PER_SEC;
1825 _outDesiredFormat.mChannelsPerFrame = _playChannels;
braveyao@webrtc.org346a64b2015-03-21 01:05:56 +00001826
andrew2bc63a12016-01-11 15:59:17 -08001827 if (_ptrAudioBuffer) {
1828 // Update audio buffer with the selected parameters.
1829 _ptrAudioBuffer->SetPlayoutSampleRate(N_PLAY_SAMPLES_PER_SEC);
1830 _ptrAudioBuffer->SetPlayoutChannels((uint8_t)_playChannels);
1831 }
braveyao@webrtc.org346a64b2015-03-21 01:05:56 +00001832
Mirko Bonadei72c42502017-11-09 09:33:23 +01001833 _renderDelayOffsetSamples =
1834 _renderBufSizeSamples - N_BUFFERS_OUT * ENGINE_PLAY_BUF_SIZE_IN_SAMPLES *
andrew2bc63a12016-01-11 15:59:17 -08001835 _outDesiredFormat.mChannelsPerFrame;
braveyao@webrtc.org346a64b2015-03-21 01:05:56 +00001836
andrew2bc63a12016-01-11 15:59:17 -08001837 _outDesiredFormat.mBytesPerPacket =
1838 _outDesiredFormat.mChannelsPerFrame * sizeof(SInt16);
1839 // In uncompressed audio, a packet is one frame.
1840 _outDesiredFormat.mFramesPerPacket = 1;
1841 _outDesiredFormat.mBytesPerFrame =
1842 _outDesiredFormat.mChannelsPerFrame * sizeof(SInt16);
1843 _outDesiredFormat.mBitsPerChannel = sizeof(SInt16) * 8;
braveyao@webrtc.org346a64b2015-03-21 01:05:56 +00001844
andrew2bc63a12016-01-11 15:59:17 -08001845 _outDesiredFormat.mFormatFlags =
1846 kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
braveyao@webrtc.org346a64b2015-03-21 01:05:56 +00001847#ifdef WEBRTC_ARCH_BIG_ENDIAN
andrew2bc63a12016-01-11 15:59:17 -08001848 _outDesiredFormat.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;
braveyao@webrtc.org346a64b2015-03-21 01:05:56 +00001849#endif
andrew2bc63a12016-01-11 15:59:17 -08001850 _outDesiredFormat.mFormatID = kAudioFormatLinearPCM;
braveyao@webrtc.org346a64b2015-03-21 01:05:56 +00001851
andrew2bc63a12016-01-11 15:59:17 -08001852 OSStatus err = noErr;
1853 WEBRTC_CA_RETURN_ON_ERR(AudioConverterNew(
1854 &_outDesiredFormat, &_outStreamFormat, &_renderConverter));
braveyao@webrtc.org346a64b2015-03-21 01:05:56 +00001855
henrika98680422017-08-31 06:47:32 -07001856 // Try to set buffer size to desired value set to 20ms.
1857 const uint16_t kPlayBufDelayFixed = 20;
andrew2bc63a12016-01-11 15:59:17 -08001858 UInt32 bufByteCount = static_cast<UInt32>(
henrika98680422017-08-31 06:47:32 -07001859 (_outStreamFormat.mSampleRate / 1000.0) * kPlayBufDelayFixed *
andrew2bc63a12016-01-11 15:59:17 -08001860 _outStreamFormat.mChannelsPerFrame * sizeof(Float32));
1861 if (_outStreamFormat.mFramesPerPacket != 0) {
1862 if (bufByteCount % _outStreamFormat.mFramesPerPacket != 0) {
1863 bufByteCount = (static_cast<UInt32>(bufByteCount /
1864 _outStreamFormat.mFramesPerPacket) +
1865 1) *
1866 _outStreamFormat.mFramesPerPacket;
braveyao@webrtc.org346a64b2015-03-21 01:05:56 +00001867 }
andrew2bc63a12016-01-11 15:59:17 -08001868 }
braveyao@webrtc.org346a64b2015-03-21 01:05:56 +00001869
andrew2bc63a12016-01-11 15:59:17 -08001870 // Ensure the buffer size is within the range provided by the device.
1871 AudioObjectPropertyAddress propertyAddress = {
1872 kAudioDevicePropertyDataSource, kAudioDevicePropertyScopeOutput, 0};
1873 propertyAddress.mSelector = kAudioDevicePropertyBufferSizeRange;
1874 AudioValueRange range;
1875 UInt32 size = sizeof(range);
1876 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
1877 _outputDeviceID, &propertyAddress, 0, NULL, &size, &range));
1878 if (range.mMinimum > bufByteCount) {
1879 bufByteCount = range.mMinimum;
1880 } else if (range.mMaximum < bufByteCount) {
1881 bufByteCount = range.mMaximum;
1882 }
braveyao@webrtc.org346a64b2015-03-21 01:05:56 +00001883
andrew2bc63a12016-01-11 15:59:17 -08001884 propertyAddress.mSelector = kAudioDevicePropertyBufferSize;
1885 size = sizeof(bufByteCount);
1886 WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(
1887 _outputDeviceID, &propertyAddress, 0, NULL, size, &bufByteCount));
braveyao@webrtc.org346a64b2015-03-21 01:05:56 +00001888
andrew2bc63a12016-01-11 15:59:17 -08001889 // Get render device latency.
1890 propertyAddress.mSelector = kAudioDevicePropertyLatency;
1891 UInt32 latency = 0;
1892 size = sizeof(UInt32);
1893 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
1894 _outputDeviceID, &propertyAddress, 0, NULL, &size, &latency));
1895 _renderLatencyUs =
1896 static_cast<uint32_t>((1.0e6 * latency) / _outStreamFormat.mSampleRate);
braveyao@webrtc.org346a64b2015-03-21 01:05:56 +00001897
andrew2bc63a12016-01-11 15:59:17 -08001898 // Get render stream latency.
1899 propertyAddress.mSelector = kAudioDevicePropertyStreams;
1900 AudioStreamID stream = 0;
1901 size = sizeof(AudioStreamID);
1902 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
1903 _outputDeviceID, &propertyAddress, 0, NULL, &size, &stream));
1904 propertyAddress.mSelector = kAudioStreamPropertyLatency;
1905 size = sizeof(UInt32);
1906 latency = 0;
1907 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
1908 _outputDeviceID, &propertyAddress, 0, NULL, &size, &latency));
1909 _renderLatencyUs +=
1910 static_cast<uint32_t>((1.0e6 * latency) / _outStreamFormat.mSampleRate);
braveyao@webrtc.org346a64b2015-03-21 01:05:56 +00001911
Mirko Bonadei675513b2017-11-09 11:09:25 +01001912 RTC_LOG(LS_VERBOSE) << "initial playout status: _renderDelayOffsetSamples="
1913 << _renderDelayOffsetSamples
1914 << ", _renderDelayUs=" << _renderDelayUs
1915 << ", _renderLatencyUs=" << _renderLatencyUs;
andrew2bc63a12016-01-11 15:59:17 -08001916 return 0;
braveyao@webrtc.org346a64b2015-03-21 01:05:56 +00001917}
1918
xians@google.com68efa212011-08-11 12:41:56 +00001919OSStatus AudioDeviceMac::objectListenerProc(
1920 AudioObjectID objectId,
1921 UInt32 numberAddresses,
1922 const AudioObjectPropertyAddress addresses[],
andrew2bc63a12016-01-11 15:59:17 -08001923 void* clientData) {
1924 AudioDeviceMac* ptrThis = (AudioDeviceMac*)clientData;
1925 RTC_DCHECK(ptrThis != NULL);
xians@google.com68efa212011-08-11 12:41:56 +00001926
andrew2bc63a12016-01-11 15:59:17 -08001927 ptrThis->implObjectListenerProc(objectId, numberAddresses, addresses);
xians@google.com68efa212011-08-11 12:41:56 +00001928
andrew2bc63a12016-01-11 15:59:17 -08001929 // AudioObjectPropertyListenerProc functions are supposed to return 0
1930 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00001931}
1932
1933OSStatus AudioDeviceMac::implObjectListenerProc(
1934 const AudioObjectID objectId,
1935 const UInt32 numberAddresses,
andrew2bc63a12016-01-11 15:59:17 -08001936 const AudioObjectPropertyAddress addresses[]) {
Mirko Bonadei675513b2017-11-09 11:09:25 +01001937 RTC_LOG(LS_VERBOSE) << "AudioDeviceMac::implObjectListenerProc()";
andrew@webrtc.org6f69eb72013-06-07 17:56:50 +00001938
andrew2bc63a12016-01-11 15:59:17 -08001939 for (UInt32 i = 0; i < numberAddresses; i++) {
1940 if (addresses[i].mSelector == kAudioHardwarePropertyDevices) {
1941 HandleDeviceChange();
1942 } else if (addresses[i].mSelector == kAudioDevicePropertyStreamFormat) {
1943 HandleStreamFormatChange(objectId, addresses[i]);
1944 } else if (addresses[i].mSelector == kAudioDevicePropertyDataSource) {
1945 HandleDataSourceChange(objectId, addresses[i]);
1946 } else if (addresses[i].mSelector == kAudioDeviceProcessorOverload) {
1947 HandleProcessorOverload(addresses[i]);
xians@google.com68efa212011-08-11 12:41:56 +00001948 }
andrew2bc63a12016-01-11 15:59:17 -08001949 }
xians@google.com68efa212011-08-11 12:41:56 +00001950
andrew2bc63a12016-01-11 15:59:17 -08001951 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00001952}
1953
andrew2bc63a12016-01-11 15:59:17 -08001954int32_t AudioDeviceMac::HandleDeviceChange() {
1955 OSStatus err = noErr;
xians@google.com68efa212011-08-11 12:41:56 +00001956
Mirko Bonadei675513b2017-11-09 11:09:25 +01001957 RTC_LOG(LS_VERBOSE) << "kAudioHardwarePropertyDevices";
xians@google.com68efa212011-08-11 12:41:56 +00001958
andrew2bc63a12016-01-11 15:59:17 -08001959 // A device has changed. Check if our registered devices have been removed.
1960 // Ensure the devices have been initialized, meaning the IDs are valid.
1961 if (MicrophoneIsInitialized()) {
1962 AudioObjectPropertyAddress propertyAddress = {
1963 kAudioDevicePropertyDeviceIsAlive, kAudioDevicePropertyScopeInput, 0};
1964 UInt32 deviceIsAlive = 1;
1965 UInt32 size = sizeof(UInt32);
1966 err = AudioObjectGetPropertyData(_inputDeviceID, &propertyAddress, 0, NULL,
1967 &size, &deviceIsAlive);
xians@google.com68efa212011-08-11 12:41:56 +00001968
andrew2bc63a12016-01-11 15:59:17 -08001969 if (err == kAudioHardwareBadDeviceError || deviceIsAlive == 0) {
Mirko Bonadei675513b2017-11-09 11:09:25 +01001970 RTC_LOG(LS_WARNING) << "Capture device is not alive (probably removed)";
andrew2bc63a12016-01-11 15:59:17 -08001971 AtomicSet32(&_captureDeviceIsAlive, 0);
1972 _mixerManager.CloseMicrophone();
andrew2bc63a12016-01-11 15:59:17 -08001973 } else if (err != noErr) {
Mirko Bonadei72c42502017-11-09 09:33:23 +01001974 logCAMsg(rtc::LS_ERROR, "Error in AudioDeviceGetProperty()",
1975 (const char*)&err);
andrew2bc63a12016-01-11 15:59:17 -08001976 return -1;
xians@google.com68efa212011-08-11 12:41:56 +00001977 }
andrew2bc63a12016-01-11 15:59:17 -08001978 }
xians@google.com68efa212011-08-11 12:41:56 +00001979
andrew2bc63a12016-01-11 15:59:17 -08001980 if (SpeakerIsInitialized()) {
1981 AudioObjectPropertyAddress propertyAddress = {
1982 kAudioDevicePropertyDeviceIsAlive, kAudioDevicePropertyScopeOutput, 0};
1983 UInt32 deviceIsAlive = 1;
1984 UInt32 size = sizeof(UInt32);
1985 err = AudioObjectGetPropertyData(_outputDeviceID, &propertyAddress, 0, NULL,
1986 &size, &deviceIsAlive);
xians@google.com68efa212011-08-11 12:41:56 +00001987
andrew2bc63a12016-01-11 15:59:17 -08001988 if (err == kAudioHardwareBadDeviceError || deviceIsAlive == 0) {
Mirko Bonadei675513b2017-11-09 11:09:25 +01001989 RTC_LOG(LS_WARNING) << "Render device is not alive (probably removed)";
andrew2bc63a12016-01-11 15:59:17 -08001990 AtomicSet32(&_renderDeviceIsAlive, 0);
1991 _mixerManager.CloseSpeaker();
andrew2bc63a12016-01-11 15:59:17 -08001992 } else if (err != noErr) {
Mirko Bonadei72c42502017-11-09 09:33:23 +01001993 logCAMsg(rtc::LS_ERROR, "Error in AudioDeviceGetProperty()",
1994 (const char*)&err);
andrew2bc63a12016-01-11 15:59:17 -08001995 return -1;
xians@google.com68efa212011-08-11 12:41:56 +00001996 }
andrew2bc63a12016-01-11 15:59:17 -08001997 }
xians@google.com68efa212011-08-11 12:41:56 +00001998
andrew2bc63a12016-01-11 15:59:17 -08001999 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00002000}
2001
pbos@webrtc.org25509882013-04-09 10:30:35 +00002002int32_t AudioDeviceMac::HandleStreamFormatChange(
xians@google.com68efa212011-08-11 12:41:56 +00002003 const AudioObjectID objectId,
andrew2bc63a12016-01-11 15:59:17 -08002004 const AudioObjectPropertyAddress propertyAddress) {
2005 OSStatus err = noErr;
xians@google.com68efa212011-08-11 12:41:56 +00002006
Mirko Bonadei675513b2017-11-09 11:09:25 +01002007 RTC_LOG(LS_VERBOSE) << "Stream format changed";
xians@google.com68efa212011-08-11 12:41:56 +00002008
andrew2bc63a12016-01-11 15:59:17 -08002009 if (objectId != _inputDeviceID && objectId != _outputDeviceID) {
xians@google.com68efa212011-08-11 12:41:56 +00002010 return 0;
andrew2bc63a12016-01-11 15:59:17 -08002011 }
2012
2013 // Get the new device format
2014 AudioStreamBasicDescription streamFormat;
2015 UInt32 size = sizeof(streamFormat);
2016 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
2017 objectId, &propertyAddress, 0, NULL, &size, &streamFormat));
2018
2019 if (streamFormat.mFormatID != kAudioFormatLinearPCM) {
Mirko Bonadei72c42502017-11-09 09:33:23 +01002020 logCAMsg(rtc::LS_ERROR, "Unacceptable input stream format -> mFormatID",
andrew2bc63a12016-01-11 15:59:17 -08002021 (const char*)&streamFormat.mFormatID);
2022 return -1;
2023 }
2024
2025 if (streamFormat.mChannelsPerFrame > N_DEVICE_CHANNELS) {
Mirko Bonadei675513b2017-11-09 11:09:25 +01002026 RTC_LOG(LS_ERROR) << "Too many channels on device (mChannelsPerFrame = "
2027 << streamFormat.mChannelsPerFrame << ")";
andrew2bc63a12016-01-11 15:59:17 -08002028 return -1;
2029 }
2030
Peter Hanspersd9317052017-10-06 14:13:51 +02002031 if (_ptrAudioBuffer && streamFormat.mChannelsPerFrame != _recChannels) {
Mirko Bonadei675513b2017-11-09 11:09:25 +01002032 RTC_LOG(LS_ERROR) << "Changing channels not supported (mChannelsPerFrame = "
2033 << streamFormat.mChannelsPerFrame << ")";
Peter Hanspersd9317052017-10-06 14:13:51 +02002034 return -1;
2035 }
2036
Mirko Bonadei675513b2017-11-09 11:09:25 +01002037 RTC_LOG(LS_VERBOSE) << "Stream format:";
2038 RTC_LOG(LS_VERBOSE) << "mSampleRate = " << streamFormat.mSampleRate
2039 << ", mChannelsPerFrame = "
2040 << streamFormat.mChannelsPerFrame;
2041 RTC_LOG(LS_VERBOSE) << "mBytesPerPacket = " << streamFormat.mBytesPerPacket
2042 << ", mFramesPerPacket = "
2043 << streamFormat.mFramesPerPacket;
2044 RTC_LOG(LS_VERBOSE) << "mBytesPerFrame = " << streamFormat.mBytesPerFrame
2045 << ", mBitsPerChannel = " << streamFormat.mBitsPerChannel;
2046 RTC_LOG(LS_VERBOSE) << "mFormatFlags = " << streamFormat.mFormatFlags;
Mirko Bonadei72c42502017-11-09 09:33:23 +01002047 logCAMsg(rtc::LS_VERBOSE, "mFormatID", (const char*)&streamFormat.mFormatID);
andrew2bc63a12016-01-11 15:59:17 -08002048
2049 if (propertyAddress.mScope == kAudioDevicePropertyScopeInput) {
2050 const int io_block_size_samples = streamFormat.mChannelsPerFrame *
2051 streamFormat.mSampleRate / 100 *
2052 N_BLOCKS_IO;
2053 if (io_block_size_samples > _captureBufSizeSamples) {
Mirko Bonadei675513b2017-11-09 11:09:25 +01002054 RTC_LOG(LS_ERROR) << "Input IO block size (" << io_block_size_samples
2055 << ") is larger than ring buffer ("
2056 << _captureBufSizeSamples << ")";
andrew2bc63a12016-01-11 15:59:17 -08002057 return -1;
2058 }
2059
2060 memcpy(&_inStreamFormat, &streamFormat, sizeof(streamFormat));
2061
2062 if (_inStreamFormat.mChannelsPerFrame >= 2 && (_recChannels == 2)) {
2063 _inDesiredFormat.mChannelsPerFrame = 2;
2064 } else {
2065 // Disable stereo recording when we only have one channel on the device.
2066 _inDesiredFormat.mChannelsPerFrame = 1;
2067 _recChannels = 1;
Mirko Bonadei675513b2017-11-09 11:09:25 +01002068 RTC_LOG(LS_VERBOSE) << "Stereo recording unavailable on this device";
andrew2bc63a12016-01-11 15:59:17 -08002069 }
2070
andrew2bc63a12016-01-11 15:59:17 -08002071 // Recreate the converter with the new format
2072 // TODO(xians): make this thread safe
2073 WEBRTC_CA_RETURN_ON_ERR(AudioConverterDispose(_captureConverter));
2074
2075 WEBRTC_CA_RETURN_ON_ERR(AudioConverterNew(&streamFormat, &_inDesiredFormat,
2076 &_captureConverter));
2077 } else {
2078 memcpy(&_outStreamFormat, &streamFormat, sizeof(streamFormat));
2079
2080 // Our preferred format to work with
2081 if (_outStreamFormat.mChannelsPerFrame < 2) {
2082 _playChannels = 1;
Mirko Bonadei675513b2017-11-09 11:09:25 +01002083 RTC_LOG(LS_VERBOSE) << "Stereo playout unavailable on this device";
andrew2bc63a12016-01-11 15:59:17 -08002084 }
2085 WEBRTC_CA_RETURN_ON_ERR(SetDesiredPlayoutFormat());
2086 }
2087 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00002088}
2089
pbos@webrtc.org25509882013-04-09 10:30:35 +00002090int32_t AudioDeviceMac::HandleDataSourceChange(
xians@google.com68efa212011-08-11 12:41:56 +00002091 const AudioObjectID objectId,
andrew2bc63a12016-01-11 15:59:17 -08002092 const AudioObjectPropertyAddress propertyAddress) {
2093 OSStatus err = noErr;
xians@google.com68efa212011-08-11 12:41:56 +00002094
andrew2bc63a12016-01-11 15:59:17 -08002095 if (_macBookPro &&
2096 propertyAddress.mScope == kAudioDevicePropertyScopeOutput) {
Mirko Bonadei675513b2017-11-09 11:09:25 +01002097 RTC_LOG(LS_VERBOSE) << "Data source changed";
xians@google.com68efa212011-08-11 12:41:56 +00002098
andrew2bc63a12016-01-11 15:59:17 -08002099 _macBookProPanRight = false;
2100 UInt32 dataSource = 0;
2101 UInt32 size = sizeof(UInt32);
2102 WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
2103 objectId, &propertyAddress, 0, NULL, &size, &dataSource));
2104 if (dataSource == 'ispk') {
2105 _macBookProPanRight = true;
Mirko Bonadei675513b2017-11-09 11:09:25 +01002106 RTC_LOG(LS_VERBOSE)
sazab4aa4eb2017-07-19 01:12:36 -07002107 << "MacBook Pro using internal speakers; stereo panning right";
andrew2bc63a12016-01-11 15:59:17 -08002108 } else {
Mirko Bonadei675513b2017-11-09 11:09:25 +01002109 RTC_LOG(LS_VERBOSE) << "MacBook Pro not using internal speakers";
xians@google.com68efa212011-08-11 12:41:56 +00002110 }
andrew2bc63a12016-01-11 15:59:17 -08002111 }
xians@google.com68efa212011-08-11 12:41:56 +00002112
andrew2bc63a12016-01-11 15:59:17 -08002113 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00002114}
pbos@webrtc.org25509882013-04-09 10:30:35 +00002115int32_t AudioDeviceMac::HandleProcessorOverload(
andrew2bc63a12016-01-11 15:59:17 -08002116 const AudioObjectPropertyAddress propertyAddress) {
2117 // TODO(xians): we probably want to notify the user in some way of the
2118 // overload. However, the Windows interpretations of these errors seem to
2119 // be more severe than what ProcessorOverload is thrown for.
2120 //
2121 // We don't log the notification, as it's sent from the HAL's IO thread. We
2122 // don't want to slow it down even further.
2123 if (propertyAddress.mScope == kAudioDevicePropertyScopeInput) {
Mirko Bonadei675513b2017-11-09 11:09:25 +01002124 // RTC_LOG(LS_WARNING) << "Capture processor // overload";
andrew2bc63a12016-01-11 15:59:17 -08002125 //_callback->ProblemIsReported(
2126 // SndCardStreamObserver::ERecordingProblem);
2127 } else {
Mirko Bonadei675513b2017-11-09 11:09:25 +01002128 // RTC_LOG(LS_WARNING) << "Render processor overload";
andrew2bc63a12016-01-11 15:59:17 -08002129 //_callback->ProblemIsReported(
2130 // SndCardStreamObserver::EPlaybackProblem);
2131 }
xians@google.com68efa212011-08-11 12:41:56 +00002132
andrew2bc63a12016-01-11 15:59:17 -08002133 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00002134}
2135
2136// ============================================================================
2137// Thread Methods
2138// ============================================================================
2139
andrew2bc63a12016-01-11 15:59:17 -08002140OSStatus AudioDeviceMac::deviceIOProc(AudioDeviceID,
2141 const AudioTimeStamp*,
xians@google.com68efa212011-08-11 12:41:56 +00002142 const AudioBufferList* inputData,
2143 const AudioTimeStamp* inputTime,
2144 AudioBufferList* outputData,
2145 const AudioTimeStamp* outputTime,
andrew2bc63a12016-01-11 15:59:17 -08002146 void* clientData) {
2147 AudioDeviceMac* ptrThis = (AudioDeviceMac*)clientData;
2148 RTC_DCHECK(ptrThis != NULL);
xians@google.com68efa212011-08-11 12:41:56 +00002149
andrew2bc63a12016-01-11 15:59:17 -08002150 ptrThis->implDeviceIOProc(inputData, inputTime, outputData, outputTime);
xians@google.com68efa212011-08-11 12:41:56 +00002151
andrew2bc63a12016-01-11 15:59:17 -08002152 // AudioDeviceIOProc functions are supposed to return 0
2153 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00002154}
2155
2156OSStatus AudioDeviceMac::outConverterProc(AudioConverterRef,
andrew2bc63a12016-01-11 15:59:17 -08002157 UInt32* numberDataPackets,
2158 AudioBufferList* data,
2159 AudioStreamPacketDescription**,
2160 void* userData) {
2161 AudioDeviceMac* ptrThis = (AudioDeviceMac*)userData;
2162 RTC_DCHECK(ptrThis != NULL);
xians@google.com68efa212011-08-11 12:41:56 +00002163
andrew2bc63a12016-01-11 15:59:17 -08002164 return ptrThis->implOutConverterProc(numberDataPackets, data);
xians@google.com68efa212011-08-11 12:41:56 +00002165}
2166
andrew2bc63a12016-01-11 15:59:17 -08002167OSStatus AudioDeviceMac::inDeviceIOProc(AudioDeviceID,
2168 const AudioTimeStamp*,
xians@google.com68efa212011-08-11 12:41:56 +00002169 const AudioBufferList* inputData,
2170 const AudioTimeStamp* inputTime,
2171 AudioBufferList*,
andrew2bc63a12016-01-11 15:59:17 -08002172 const AudioTimeStamp*,
2173 void* clientData) {
2174 AudioDeviceMac* ptrThis = (AudioDeviceMac*)clientData;
2175 RTC_DCHECK(ptrThis != NULL);
xians@google.com68efa212011-08-11 12:41:56 +00002176
andrew2bc63a12016-01-11 15:59:17 -08002177 ptrThis->implInDeviceIOProc(inputData, inputTime);
xians@google.com68efa212011-08-11 12:41:56 +00002178
andrew2bc63a12016-01-11 15:59:17 -08002179 // AudioDeviceIOProc functions are supposed to return 0
2180 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00002181}
2182
2183OSStatus AudioDeviceMac::inConverterProc(
2184 AudioConverterRef,
andrew2bc63a12016-01-11 15:59:17 -08002185 UInt32* numberDataPackets,
2186 AudioBufferList* data,
2187 AudioStreamPacketDescription** /*dataPacketDescription*/,
2188 void* userData) {
2189 AudioDeviceMac* ptrThis = static_cast<AudioDeviceMac*>(userData);
2190 RTC_DCHECK(ptrThis != NULL);
xians@google.com68efa212011-08-11 12:41:56 +00002191
andrew2bc63a12016-01-11 15:59:17 -08002192 return ptrThis->implInConverterProc(numberDataPackets, data);
xians@google.com68efa212011-08-11 12:41:56 +00002193}
2194
andrew2bc63a12016-01-11 15:59:17 -08002195OSStatus AudioDeviceMac::implDeviceIOProc(const AudioBufferList* inputData,
2196 const AudioTimeStamp* inputTime,
2197 AudioBufferList* outputData,
2198 const AudioTimeStamp* outputTime) {
2199 OSStatus err = noErr;
2200 UInt64 outputTimeNs = AudioConvertHostTimeToNanos(outputTime->mHostTime);
2201 UInt64 nowNs = AudioConvertHostTimeToNanos(AudioGetCurrentHostTime());
xians@google.com68efa212011-08-11 12:41:56 +00002202
andrew2bc63a12016-01-11 15:59:17 -08002203 if (!_twoDevices && _recording) {
2204 implInDeviceIOProc(inputData, inputTime);
2205 }
xians@google.com68efa212011-08-11 12:41:56 +00002206
andrew2bc63a12016-01-11 15:59:17 -08002207 // Check if we should close down audio device
2208 // Double-checked locking optimization to remove locking overhead
2209 if (_doStop) {
2210 _critSect.Enter();
2211 if (_doStop) {
2212 if (_twoDevices || (!_recording && !_playing)) {
2213 // In the case of a shared device, the single driving ioProc
2214 // is stopped here
2215 WEBRTC_CA_LOG_ERR(AudioDeviceStop(_outputDeviceID, _deviceIOProcID));
2216 WEBRTC_CA_LOG_WARN(
2217 AudioDeviceDestroyIOProcID(_outputDeviceID, _deviceIOProcID));
2218 if (err == noErr) {
Mirko Bonadei675513b2017-11-09 11:09:25 +01002219 RTC_LOG(LS_VERBOSE) << "Playout or shared device stopped";
xians@google.com68efa212011-08-11 12:41:56 +00002220 }
andrew2bc63a12016-01-11 15:59:17 -08002221 }
2222
2223 _doStop = false;
2224 _stopEvent.Set();
2225 _critSect.Leave();
2226 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00002227 }
andrew2bc63a12016-01-11 15:59:17 -08002228 _critSect.Leave();
2229 }
xians@google.com68efa212011-08-11 12:41:56 +00002230
andrew2bc63a12016-01-11 15:59:17 -08002231 if (!_playing) {
2232 // This can be the case when a shared device is capturing but not
2233 // rendering. We allow the checks above before returning to avoid a
2234 // timeout when capturing is stopped.
xians@google.com68efa212011-08-11 12:41:56 +00002235 return 0;
andrew2bc63a12016-01-11 15:59:17 -08002236 }
2237
2238 RTC_DCHECK(_outStreamFormat.mBytesPerFrame != 0);
2239 UInt32 size =
2240 outputData->mBuffers->mDataByteSize / _outStreamFormat.mBytesPerFrame;
2241
2242 // TODO(xians): signal an error somehow?
2243 err = AudioConverterFillComplexBuffer(_renderConverter, outConverterProc,
2244 this, &size, outputData, NULL);
2245 if (err != noErr) {
2246 if (err == 1) {
2247 // This is our own error.
Mirko Bonadei675513b2017-11-09 11:09:25 +01002248 RTC_LOG(LS_ERROR) << "Error in AudioConverterFillComplexBuffer()";
andrew2bc63a12016-01-11 15:59:17 -08002249 return 1;
2250 } else {
Mirko Bonadei72c42502017-11-09 09:33:23 +01002251 logCAMsg(rtc::LS_ERROR, "Error in AudioConverterFillComplexBuffer()",
2252 (const char*)&err);
andrew2bc63a12016-01-11 15:59:17 -08002253 return 1;
2254 }
2255 }
2256
2257 PaRingBufferSize bufSizeSamples =
2258 PaUtil_GetRingBufferReadAvailable(_paRenderBuffer);
2259
2260 int32_t renderDelayUs =
2261 static_cast<int32_t>(1e-3 * (outputTimeNs - nowNs) + 0.5);
2262 renderDelayUs += static_cast<int32_t>(
2263 (1.0e6 * bufSizeSamples) / _outDesiredFormat.mChannelsPerFrame /
2264 _outDesiredFormat.mSampleRate +
2265 0.5);
2266
2267 AtomicSet32(&_renderDelayUs, renderDelayUs);
2268
2269 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00002270}
2271
andrew2bc63a12016-01-11 15:59:17 -08002272OSStatus AudioDeviceMac::implOutConverterProc(UInt32* numberDataPackets,
2273 AudioBufferList* data) {
henrikg91d6ede2015-09-17 00:24:34 -07002274 RTC_DCHECK(data->mNumberBuffers == 1);
andrew2bc63a12016-01-11 15:59:17 -08002275 PaRingBufferSize numSamples =
2276 *numberDataPackets * _outDesiredFormat.mChannelsPerFrame;
xians@google.com68efa212011-08-11 12:41:56 +00002277
andrew2bc63a12016-01-11 15:59:17 -08002278 data->mBuffers->mNumberChannels = _outDesiredFormat.mChannelsPerFrame;
2279 // Always give the converter as much as it wants, zero padding as required.
2280 data->mBuffers->mDataByteSize =
2281 *numberDataPackets * _outDesiredFormat.mBytesPerPacket;
2282 data->mBuffers->mData = _renderConvertData;
2283 memset(_renderConvertData, 0, sizeof(_renderConvertData));
xians@google.com68efa212011-08-11 12:41:56 +00002284
andrew2bc63a12016-01-11 15:59:17 -08002285 PaUtil_ReadRingBuffer(_paRenderBuffer, _renderConvertData, numSamples);
xians@google.com68efa212011-08-11 12:41:56 +00002286
andrew2bc63a12016-01-11 15:59:17 -08002287 kern_return_t kernErr = semaphore_signal_all(_renderSemaphore);
2288 if (kernErr != KERN_SUCCESS) {
Mirko Bonadei675513b2017-11-09 11:09:25 +01002289 RTC_LOG(LS_ERROR) << "semaphore_signal_all() error: " << kernErr;
andrew2bc63a12016-01-11 15:59:17 -08002290 return 1;
2291 }
2292
2293 return 0;
2294}
2295
2296OSStatus AudioDeviceMac::implInDeviceIOProc(const AudioBufferList* inputData,
2297 const AudioTimeStamp* inputTime) {
2298 OSStatus err = noErr;
2299 UInt64 inputTimeNs = AudioConvertHostTimeToNanos(inputTime->mHostTime);
2300 UInt64 nowNs = AudioConvertHostTimeToNanos(AudioGetCurrentHostTime());
2301
2302 // Check if we should close down audio device
2303 // Double-checked locking optimization to remove locking overhead
2304 if (_doStopRec) {
2305 _critSect.Enter();
2306 if (_doStopRec) {
2307 // This will be signalled only when a shared device is not in use.
2308 WEBRTC_CA_LOG_ERR(AudioDeviceStop(_inputDeviceID, _inDeviceIOProcID));
2309 WEBRTC_CA_LOG_WARN(
2310 AudioDeviceDestroyIOProcID(_inputDeviceID, _inDeviceIOProcID));
2311 if (err == noErr) {
Mirko Bonadei675513b2017-11-09 11:09:25 +01002312 RTC_LOG(LS_VERBOSE) << "Recording device stopped";
andrew2bc63a12016-01-11 15:59:17 -08002313 }
2314
2315 _doStopRec = false;
2316 _stopEventRec.Set();
2317 _critSect.Leave();
2318 return 0;
2319 }
2320 _critSect.Leave();
2321 }
2322
2323 if (!_recording) {
2324 // Allow above checks to avoid a timeout on stopping capture.
2325 return 0;
2326 }
2327
2328 PaRingBufferSize bufSizeSamples =
2329 PaUtil_GetRingBufferReadAvailable(_paCaptureBuffer);
2330
2331 int32_t captureDelayUs =
2332 static_cast<int32_t>(1e-3 * (nowNs - inputTimeNs) + 0.5);
2333 captureDelayUs += static_cast<int32_t>((1.0e6 * bufSizeSamples) /
2334 _inStreamFormat.mChannelsPerFrame /
2335 _inStreamFormat.mSampleRate +
2336 0.5);
2337
2338 AtomicSet32(&_captureDelayUs, captureDelayUs);
2339
2340 RTC_DCHECK(inputData->mNumberBuffers == 1);
2341 PaRingBufferSize numSamples = inputData->mBuffers->mDataByteSize *
2342 _inStreamFormat.mChannelsPerFrame /
2343 _inStreamFormat.mBytesPerPacket;
2344 PaUtil_WriteRingBuffer(_paCaptureBuffer, inputData->mBuffers->mData,
2345 numSamples);
2346
2347 kern_return_t kernErr = semaphore_signal_all(_captureSemaphore);
2348 if (kernErr != KERN_SUCCESS) {
Mirko Bonadei675513b2017-11-09 11:09:25 +01002349 RTC_LOG(LS_ERROR) << "semaphore_signal_all() error: " << kernErr;
andrew2bc63a12016-01-11 15:59:17 -08002350 }
2351
2352 return err;
2353}
2354
2355OSStatus AudioDeviceMac::implInConverterProc(UInt32* numberDataPackets,
2356 AudioBufferList* data) {
2357 RTC_DCHECK(data->mNumberBuffers == 1);
2358 PaRingBufferSize numSamples =
2359 *numberDataPackets * _inStreamFormat.mChannelsPerFrame;
2360
2361 while (PaUtil_GetRingBufferReadAvailable(_paCaptureBuffer) < numSamples) {
2362 mach_timespec_t timeout;
2363 timeout.tv_sec = 0;
2364 timeout.tv_nsec = TIMER_PERIOD_MS;
2365
2366 kern_return_t kernErr = semaphore_timedwait(_captureSemaphore, timeout);
2367 if (kernErr == KERN_OPERATION_TIMED_OUT) {
2368 int32_t signal = AtomicGet32(&_captureDeviceIsAlive);
2369 if (signal == 0) {
2370 // The capture device is no longer alive; stop the worker thread.
2371 *numberDataPackets = 0;
xians@google.com68efa212011-08-11 12:41:56 +00002372 return 1;
andrew2bc63a12016-01-11 15:59:17 -08002373 }
2374 } else if (kernErr != KERN_SUCCESS) {
Mirko Bonadei675513b2017-11-09 11:09:25 +01002375 RTC_LOG(LS_ERROR) << "semaphore_wait() error: " << kernErr;
xians@google.com68efa212011-08-11 12:41:56 +00002376 }
andrew2bc63a12016-01-11 15:59:17 -08002377 }
xians@google.com68efa212011-08-11 12:41:56 +00002378
andrew2bc63a12016-01-11 15:59:17 -08002379 // Pass the read pointer directly to the converter to avoid a memcpy.
2380 void* dummyPtr;
2381 PaRingBufferSize dummySize;
2382 PaUtil_GetRingBufferReadRegions(_paCaptureBuffer, numSamples,
2383 &data->mBuffers->mData, &numSamples,
2384 &dummyPtr, &dummySize);
2385 PaUtil_AdvanceRingBufferReadIndex(_paCaptureBuffer, numSamples);
2386
2387 data->mBuffers->mNumberChannels = _inStreamFormat.mChannelsPerFrame;
2388 *numberDataPackets = numSamples / _inStreamFormat.mChannelsPerFrame;
2389 data->mBuffers->mDataByteSize =
2390 *numberDataPackets * _inStreamFormat.mBytesPerPacket;
2391
2392 return 0;
xians@google.com68efa212011-08-11 12:41:56 +00002393}
2394
andrew2bc63a12016-01-11 15:59:17 -08002395bool AudioDeviceMac::RunRender(void* ptrThis) {
2396 return static_cast<AudioDeviceMac*>(ptrThis)->RenderWorkerThread();
xians@google.com68efa212011-08-11 12:41:56 +00002397}
2398
andrew2bc63a12016-01-11 15:59:17 -08002399bool AudioDeviceMac::RenderWorkerThread() {
2400 PaRingBufferSize numSamples =
2401 ENGINE_PLAY_BUF_SIZE_IN_SAMPLES * _outDesiredFormat.mChannelsPerFrame;
2402 while (PaUtil_GetRingBufferWriteAvailable(_paRenderBuffer) -
2403 _renderDelayOffsetSamples <
2404 numSamples) {
2405 mach_timespec_t timeout;
2406 timeout.tv_sec = 0;
2407 timeout.tv_nsec = TIMER_PERIOD_MS;
xians@google.com68efa212011-08-11 12:41:56 +00002408
andrew2bc63a12016-01-11 15:59:17 -08002409 kern_return_t kernErr = semaphore_timedwait(_renderSemaphore, timeout);
2410 if (kernErr == KERN_OPERATION_TIMED_OUT) {
2411 int32_t signal = AtomicGet32(&_renderDeviceIsAlive);
2412 if (signal == 0) {
2413 // The render device is no longer alive; stop the worker thread.
xians@google.com68efa212011-08-11 12:41:56 +00002414 return false;
andrew2bc63a12016-01-11 15:59:17 -08002415 }
2416 } else if (kernErr != KERN_SUCCESS) {
Mirko Bonadei675513b2017-11-09 11:09:25 +01002417 RTC_LOG(LS_ERROR) << "semaphore_timedwait() error: " << kernErr;
xians@google.com68efa212011-08-11 12:41:56 +00002418 }
andrew2bc63a12016-01-11 15:59:17 -08002419 }
xians@google.com68efa212011-08-11 12:41:56 +00002420
andrew2bc63a12016-01-11 15:59:17 -08002421 int8_t playBuffer[4 * ENGINE_PLAY_BUF_SIZE_IN_SAMPLES];
xians@google.com68efa212011-08-11 12:41:56 +00002422
andrew2bc63a12016-01-11 15:59:17 -08002423 if (!_ptrAudioBuffer) {
Mirko Bonadei675513b2017-11-09 11:09:25 +01002424 RTC_LOG(LS_ERROR) << "capture AudioBuffer is invalid";
andrew2bc63a12016-01-11 15:59:17 -08002425 return false;
2426 }
2427
2428 // Ask for new PCM data to be played out using the AudioDeviceBuffer.
2429 uint32_t nSamples =
2430 _ptrAudioBuffer->RequestPlayoutData(ENGINE_PLAY_BUF_SIZE_IN_SAMPLES);
2431
2432 nSamples = _ptrAudioBuffer->GetPlayoutData(playBuffer);
2433 if (nSamples != ENGINE_PLAY_BUF_SIZE_IN_SAMPLES) {
Mirko Bonadei675513b2017-11-09 11:09:25 +01002434 RTC_LOG(LS_ERROR) << "invalid number of output samples(" << nSamples << ")";
andrew2bc63a12016-01-11 15:59:17 -08002435 }
2436
2437 uint32_t nOutSamples = nSamples * _outDesiredFormat.mChannelsPerFrame;
2438
2439 SInt16* pPlayBuffer = (SInt16*)&playBuffer;
2440 if (_macBookProPanRight && (_playChannels == 2)) {
2441 // Mix entirely into the right channel and zero the left channel.
2442 SInt32 sampleInt32 = 0;
2443 for (uint32_t sampleIdx = 0; sampleIdx < nOutSamples; sampleIdx += 2) {
2444 sampleInt32 = pPlayBuffer[sampleIdx];
2445 sampleInt32 += pPlayBuffer[sampleIdx + 1];
2446 sampleInt32 /= 2;
2447
2448 if (sampleInt32 > 32767) {
2449 sampleInt32 = 32767;
2450 } else if (sampleInt32 < -32768) {
2451 sampleInt32 = -32768;
2452 }
2453
2454 pPlayBuffer[sampleIdx] = 0;
2455 pPlayBuffer[sampleIdx + 1] = static_cast<SInt16>(sampleInt32);
xians@google.com68efa212011-08-11 12:41:56 +00002456 }
andrew2bc63a12016-01-11 15:59:17 -08002457 }
xians@google.com68efa212011-08-11 12:41:56 +00002458
andrew2bc63a12016-01-11 15:59:17 -08002459 PaUtil_WriteRingBuffer(_paRenderBuffer, pPlayBuffer, nOutSamples);
xians@google.com68efa212011-08-11 12:41:56 +00002460
andrew2bc63a12016-01-11 15:59:17 -08002461 return true;
xians@google.com68efa212011-08-11 12:41:56 +00002462}
2463
andrew2bc63a12016-01-11 15:59:17 -08002464bool AudioDeviceMac::RunCapture(void* ptrThis) {
2465 return static_cast<AudioDeviceMac*>(ptrThis)->CaptureWorkerThread();
xians@google.com68efa212011-08-11 12:41:56 +00002466}
2467
andrew2bc63a12016-01-11 15:59:17 -08002468bool AudioDeviceMac::CaptureWorkerThread() {
2469 OSStatus err = noErr;
2470 UInt32 noRecSamples =
2471 ENGINE_REC_BUF_SIZE_IN_SAMPLES * _inDesiredFormat.mChannelsPerFrame;
2472 SInt16 recordBuffer[noRecSamples];
2473 UInt32 size = ENGINE_REC_BUF_SIZE_IN_SAMPLES;
xians@google.com68efa212011-08-11 12:41:56 +00002474
andrew2bc63a12016-01-11 15:59:17 -08002475 AudioBufferList engineBuffer;
2476 engineBuffer.mNumberBuffers = 1; // Interleaved channels.
2477 engineBuffer.mBuffers->mNumberChannels = _inDesiredFormat.mChannelsPerFrame;
2478 engineBuffer.mBuffers->mDataByteSize =
2479 _inDesiredFormat.mBytesPerPacket * noRecSamples;
2480 engineBuffer.mBuffers->mData = recordBuffer;
xians@google.com68efa212011-08-11 12:41:56 +00002481
andrew2bc63a12016-01-11 15:59:17 -08002482 err = AudioConverterFillComplexBuffer(_captureConverter, inConverterProc,
2483 this, &size, &engineBuffer, NULL);
2484 if (err != noErr) {
2485 if (err == 1) {
2486 // This is our own error.
2487 return false;
2488 } else {
Mirko Bonadei72c42502017-11-09 09:33:23 +01002489 logCAMsg(rtc::LS_ERROR, "Error in AudioConverterFillComplexBuffer()",
2490 (const char*)&err);
andrew2bc63a12016-01-11 15:59:17 -08002491 return false;
2492 }
2493 }
2494
2495 // TODO(xians): what if the returned size is incorrect?
2496 if (size == ENGINE_REC_BUF_SIZE_IN_SAMPLES) {
2497 uint32_t currentMicLevel(0);
2498 uint32_t newMicLevel(0);
2499 int32_t msecOnPlaySide;
2500 int32_t msecOnRecordSide;
2501
2502 int32_t captureDelayUs = AtomicGet32(&_captureDelayUs);
2503 int32_t renderDelayUs = AtomicGet32(&_renderDelayUs);
2504
2505 msecOnPlaySide =
2506 static_cast<int32_t>(1e-3 * (renderDelayUs + _renderLatencyUs) + 0.5);
2507 msecOnRecordSide =
2508 static_cast<int32_t>(1e-3 * (captureDelayUs + _captureLatencyUs) + 0.5);
2509
2510 if (!_ptrAudioBuffer) {
Mirko Bonadei675513b2017-11-09 11:09:25 +01002511 RTC_LOG(LS_ERROR) << "capture AudioBuffer is invalid";
andrew2bc63a12016-01-11 15:59:17 -08002512 return false;
xians@google.com68efa212011-08-11 12:41:56 +00002513 }
2514
andrew2bc63a12016-01-11 15:59:17 -08002515 // store the recorded buffer (no action will be taken if the
2516 // #recorded samples is not a full buffer)
2517 _ptrAudioBuffer->SetRecordedBuffer((int8_t*)&recordBuffer, (uint32_t)size);
xians@google.com68efa212011-08-11 12:41:56 +00002518
andrew2bc63a12016-01-11 15:59:17 -08002519 if (AGC()) {
2520 // Use mod to ensure we check the volume on the first pass.
2521 if (get_mic_volume_counter_ms_ % kGetMicVolumeIntervalMs == 0) {
2522 get_mic_volume_counter_ms_ = 0;
2523 // store current mic level in the audio buffer if AGC is enabled
2524 if (MicrophoneVolume(currentMicLevel) == 0) {
2525 // this call does not affect the actual microphone volume
2526 _ptrAudioBuffer->SetCurrentMicLevel(currentMicLevel);
xians@google.com68efa212011-08-11 12:41:56 +00002527 }
andrew2bc63a12016-01-11 15:59:17 -08002528 }
2529 get_mic_volume_counter_ms_ += kBufferSizeMs;
xians@google.com68efa212011-08-11 12:41:56 +00002530 }
2531
andrew2bc63a12016-01-11 15:59:17 -08002532 _ptrAudioBuffer->SetVQEData(msecOnPlaySide, msecOnRecordSide, 0);
2533
2534 _ptrAudioBuffer->SetTypingStatus(KeyPressed());
2535
2536 // deliver recorded samples at specified sample rate, mic level etc.
2537 // to the observer using callback
2538 _ptrAudioBuffer->DeliverRecordedData();
2539
2540 if (AGC()) {
2541 newMicLevel = _ptrAudioBuffer->NewMicLevel();
2542 if (newMicLevel != 0) {
2543 // The VQE will only deliver non-zero microphone levels when
2544 // a change is needed.
2545 // Set this new mic level (received from the observer as return
2546 // value in the callback).
Mirko Bonadei675513b2017-11-09 11:09:25 +01002547 RTC_LOG(LS_VERBOSE) << "AGC change of volume: old=" << currentMicLevel
2548 << " => new=" << newMicLevel;
andrew2bc63a12016-01-11 15:59:17 -08002549 if (SetMicrophoneVolume(newMicLevel) == -1) {
Mirko Bonadei675513b2017-11-09 11:09:25 +01002550 RTC_LOG(LS_WARNING)
sazab4aa4eb2017-07-19 01:12:36 -07002551 << "the required modification of the microphone volume failed";
andrew2bc63a12016-01-11 15:59:17 -08002552 }
2553 }
2554 }
2555 }
2556
2557 return true;
xians@google.com68efa212011-08-11 12:41:56 +00002558}
2559
niklas.enbom@webrtc.orgcc9238e2013-08-15 14:19:12 +00002560bool AudioDeviceMac::KeyPressed() {
niklas.enbom@webrtc.org3be565b2013-05-07 21:04:24 +00002561 bool key_down = false;
niklas.enbom@webrtc.orgcc9238e2013-08-15 14:19:12 +00002562 // Loop through all Mac virtual key constant values.
andrew2bc63a12016-01-11 15:59:17 -08002563 for (unsigned int key_index = 0; key_index < arraysize(prev_key_state_);
2564 ++key_index) {
2565 bool keyState =
2566 CGEventSourceKeyState(kCGEventSourceStateHIDSystemState, key_index);
niklas.enbom@webrtc.orgcc9238e2013-08-15 14:19:12 +00002567 // A false -> true change in keymap means a key is pressed.
2568 key_down |= (keyState && !prev_key_state_[key_index]);
2569 // Save current state.
2570 prev_key_state_[key_index] = keyState;
niklas.enbom@webrtc.org3be565b2013-05-07 21:04:24 +00002571 }
niklas.enbom@webrtc.orgcc9238e2013-08-15 14:19:12 +00002572 return key_down;
niklas.enbom@webrtc.org3be565b2013-05-07 21:04:24 +00002573}
pbos@webrtc.orgd900e8b2013-07-03 15:12:26 +00002574} // namespace webrtc