niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 1 | /* |
xians@webrtc.org | 20aabbb | 2012-02-20 09:17:41 +0000 | [diff] [blame] | 2 | * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license |
| 5 | * that can be found in the LICENSE file in the root of the source |
| 6 | * tree. An additional intellectual property rights grant can be found |
| 7 | * in the file PATENTS. All contributing project authors may |
| 8 | * be found in the AUTHORS file in the root of the source tree. |
| 9 | */ |
| 10 | |
| 11 | #include "trace.h" |
| 12 | #include "critical_section_wrapper.h" |
| 13 | #include "audio_device_buffer.h" |
| 14 | #include "audio_device_utility.h" |
| 15 | #include "audio_device_config.h" |
| 16 | |
| 17 | #include <stdlib.h> |
| 18 | #include <string.h> |
| 19 | #include <cassert> |
| 20 | |
| 21 | #include "signal_processing_library.h" |
| 22 | |
| 23 | namespace webrtc { |
| 24 | |
| 25 | // ---------------------------------------------------------------------------- |
| 26 | // ctor |
| 27 | // ---------------------------------------------------------------------------- |
| 28 | |
| 29 | AudioDeviceBuffer::AudioDeviceBuffer() : |
| 30 | _id(-1), |
| 31 | _critSect(*CriticalSectionWrapper::CreateCriticalSection()), |
| 32 | _critSectCb(*CriticalSectionWrapper::CreateCriticalSection()), |
| 33 | _ptrCbAudioTransport(NULL), |
| 34 | _recSampleRate(0), |
| 35 | _playSampleRate(0), |
| 36 | _recChannels(0), |
| 37 | _playChannels(0), |
| 38 | _recChannel(AudioDeviceModule::kChannelBoth), |
| 39 | _recBytesPerSample(0), |
| 40 | _playBytesPerSample(0), |
| 41 | _recSamples(0), |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 42 | _recSize(0), |
xians@google.com | 88bd440 | 2011-08-04 15:33:30 +0000 | [diff] [blame] | 43 | _playSamples(0), |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 44 | _playSize(0), |
| 45 | _recFile(*FileWrapper::Create()), |
| 46 | _playFile(*FileWrapper::Create()), |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 47 | _currentMicLevel(0), |
xians@google.com | 88bd440 | 2011-08-04 15:33:30 +0000 | [diff] [blame] | 48 | _newMicLevel(0), |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 49 | _playDelayMS(0), |
| 50 | _recDelayMS(0), |
| 51 | _clockDrift(0), |
| 52 | _measureDelay(false), // should always be 'false' (EXPERIMENTAL) |
| 53 | _pulseList(), |
| 54 | _lastPulseTime(AudioDeviceUtility::GetTimeInMS()) |
| 55 | { |
| 56 | // valid ID will be set later by SetId, use -1 for now |
| 57 | WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s created", __FUNCTION__); |
xians@webrtc.org | eff3c89 | 2011-12-06 10:02:56 +0000 | [diff] [blame] | 58 | memset(_recBuffer, 0, kMaxBufferSizeBytes); |
| 59 | memset(_playBuffer, 0, kMaxBufferSizeBytes); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 60 | } |
| 61 | |
| 62 | // ---------------------------------------------------------------------------- |
| 63 | // dtor |
| 64 | // ---------------------------------------------------------------------------- |
| 65 | |
| 66 | AudioDeviceBuffer::~AudioDeviceBuffer() |
| 67 | { |
| 68 | WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s destroyed", __FUNCTION__); |
| 69 | { |
mflodman@webrtc.org | a014ecc | 2012-04-12 12:15:51 +0000 | [diff] [blame] | 70 | CriticalSectionScoped lock(&_critSect); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 71 | |
| 72 | _recFile.Flush(); |
| 73 | _recFile.CloseFile(); |
| 74 | delete &_recFile; |
| 75 | |
| 76 | _playFile.Flush(); |
| 77 | _playFile.CloseFile(); |
| 78 | delete &_playFile; |
| 79 | |
| 80 | _EmptyList(); |
| 81 | } |
| 82 | |
| 83 | delete &_critSect; |
| 84 | delete &_critSectCb; |
| 85 | } |
| 86 | |
| 87 | // ---------------------------------------------------------------------------- |
| 88 | // SetId |
| 89 | // ---------------------------------------------------------------------------- |
| 90 | |
| 91 | void AudioDeviceBuffer::SetId(WebRtc_UWord32 id) |
| 92 | { |
| 93 | WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id, "AudioDeviceBuffer::SetId(id=%d)", id); |
| 94 | _id = id; |
| 95 | } |
| 96 | |
| 97 | // ---------------------------------------------------------------------------- |
| 98 | // RegisterAudioCallback |
| 99 | // ---------------------------------------------------------------------------- |
| 100 | |
| 101 | WebRtc_Word32 AudioDeviceBuffer::RegisterAudioCallback(AudioTransport* audioCallback) |
| 102 | { |
mflodman@webrtc.org | a014ecc | 2012-04-12 12:15:51 +0000 | [diff] [blame] | 103 | CriticalSectionScoped lock(&_critSectCb); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 104 | _ptrCbAudioTransport = audioCallback; |
| 105 | |
| 106 | return 0; |
| 107 | } |
| 108 | |
| 109 | // ---------------------------------------------------------------------------- |
| 110 | // InitPlayout |
| 111 | // ---------------------------------------------------------------------------- |
| 112 | |
| 113 | WebRtc_Word32 AudioDeviceBuffer::InitPlayout() |
| 114 | { |
| 115 | WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s", __FUNCTION__); |
| 116 | |
mflodman@webrtc.org | a014ecc | 2012-04-12 12:15:51 +0000 | [diff] [blame] | 117 | CriticalSectionScoped lock(&_critSect); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 118 | |
| 119 | if (_measureDelay) |
| 120 | { |
| 121 | _EmptyList(); |
| 122 | _lastPulseTime = AudioDeviceUtility::GetTimeInMS(); |
| 123 | } |
| 124 | |
| 125 | return 0; |
| 126 | } |
| 127 | |
| 128 | // ---------------------------------------------------------------------------- |
| 129 | // InitRecording |
| 130 | // ---------------------------------------------------------------------------- |
| 131 | |
| 132 | WebRtc_Word32 AudioDeviceBuffer::InitRecording() |
| 133 | { |
| 134 | WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s", __FUNCTION__); |
| 135 | |
mflodman@webrtc.org | a014ecc | 2012-04-12 12:15:51 +0000 | [diff] [blame] | 136 | CriticalSectionScoped lock(&_critSect); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 137 | |
| 138 | if (_measureDelay) |
| 139 | { |
| 140 | _EmptyList(); |
| 141 | _lastPulseTime = AudioDeviceUtility::GetTimeInMS(); |
| 142 | } |
| 143 | |
| 144 | return 0; |
| 145 | } |
| 146 | |
| 147 | // ---------------------------------------------------------------------------- |
| 148 | // SetRecordingSampleRate |
| 149 | // ---------------------------------------------------------------------------- |
| 150 | |
| 151 | WebRtc_Word32 AudioDeviceBuffer::SetRecordingSampleRate(WebRtc_UWord32 fsHz) |
| 152 | { |
| 153 | WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "AudioDeviceBuffer::SetRecordingSampleRate(fsHz=%u)", fsHz); |
| 154 | |
mflodman@webrtc.org | a014ecc | 2012-04-12 12:15:51 +0000 | [diff] [blame] | 155 | CriticalSectionScoped lock(&_critSect); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 156 | _recSampleRate = fsHz; |
| 157 | return 0; |
| 158 | } |
| 159 | |
| 160 | // ---------------------------------------------------------------------------- |
| 161 | // SetPlayoutSampleRate |
| 162 | // ---------------------------------------------------------------------------- |
| 163 | |
| 164 | WebRtc_Word32 AudioDeviceBuffer::SetPlayoutSampleRate(WebRtc_UWord32 fsHz) |
| 165 | { |
| 166 | WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "AudioDeviceBuffer::SetPlayoutSampleRate(fsHz=%u)", fsHz); |
| 167 | |
mflodman@webrtc.org | a014ecc | 2012-04-12 12:15:51 +0000 | [diff] [blame] | 168 | CriticalSectionScoped lock(&_critSect); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 169 | _playSampleRate = fsHz; |
| 170 | return 0; |
| 171 | } |
| 172 | |
| 173 | // ---------------------------------------------------------------------------- |
| 174 | // RecordingSampleRate |
| 175 | // ---------------------------------------------------------------------------- |
| 176 | |
| 177 | WebRtc_Word32 AudioDeviceBuffer::RecordingSampleRate() const |
| 178 | { |
| 179 | return _recSampleRate; |
| 180 | } |
| 181 | |
| 182 | // ---------------------------------------------------------------------------- |
| 183 | // PlayoutSampleRate |
| 184 | // ---------------------------------------------------------------------------- |
| 185 | |
| 186 | WebRtc_Word32 AudioDeviceBuffer::PlayoutSampleRate() const |
| 187 | { |
| 188 | return _playSampleRate; |
| 189 | } |
| 190 | |
| 191 | // ---------------------------------------------------------------------------- |
| 192 | // SetRecordingChannels |
| 193 | // ---------------------------------------------------------------------------- |
| 194 | |
| 195 | WebRtc_Word32 AudioDeviceBuffer::SetRecordingChannels(WebRtc_UWord8 channels) |
| 196 | { |
| 197 | WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "AudioDeviceBuffer::SetRecordingChannels(channels=%u)", channels); |
| 198 | |
mflodman@webrtc.org | a014ecc | 2012-04-12 12:15:51 +0000 | [diff] [blame] | 199 | CriticalSectionScoped lock(&_critSect); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 200 | _recChannels = channels; |
| 201 | _recBytesPerSample = 2*channels; // 16 bits per sample in mono, 32 bits in stereo |
| 202 | return 0; |
| 203 | } |
| 204 | |
| 205 | // ---------------------------------------------------------------------------- |
| 206 | // SetPlayoutChannels |
| 207 | // ---------------------------------------------------------------------------- |
| 208 | |
| 209 | WebRtc_Word32 AudioDeviceBuffer::SetPlayoutChannels(WebRtc_UWord8 channels) |
| 210 | { |
| 211 | WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "AudioDeviceBuffer::SetPlayoutChannels(channels=%u)", channels); |
| 212 | |
mflodman@webrtc.org | a014ecc | 2012-04-12 12:15:51 +0000 | [diff] [blame] | 213 | CriticalSectionScoped lock(&_critSect); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 214 | _playChannels = channels; |
| 215 | // 16 bits per sample in mono, 32 bits in stereo |
| 216 | _playBytesPerSample = 2*channels; |
| 217 | return 0; |
| 218 | } |
| 219 | |
| 220 | // ---------------------------------------------------------------------------- |
| 221 | // SetRecordingChannel |
| 222 | // |
| 223 | // Select which channel to use while recording. |
| 224 | // This API requires that stereo is enabled. |
| 225 | // |
| 226 | // Note that, the nChannel parameter in RecordedDataIsAvailable will be |
| 227 | // set to 2 even for kChannelLeft and kChannelRight. However, nBytesPerSample |
| 228 | // will be 2 instead of 4 four these cases. |
| 229 | // ---------------------------------------------------------------------------- |
| 230 | |
| 231 | WebRtc_Word32 AudioDeviceBuffer::SetRecordingChannel(const AudioDeviceModule::ChannelType channel) |
| 232 | { |
mflodman@webrtc.org | a014ecc | 2012-04-12 12:15:51 +0000 | [diff] [blame] | 233 | CriticalSectionScoped lock(&_critSect); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 234 | |
| 235 | if (_recChannels == 1) |
| 236 | { |
| 237 | return -1; |
| 238 | } |
| 239 | |
| 240 | if (channel == AudioDeviceModule::kChannelBoth) |
| 241 | { |
| 242 | // two bytes per channel |
| 243 | _recBytesPerSample = 4; |
| 244 | } |
| 245 | else |
| 246 | { |
| 247 | // only utilize one out of two possible channels (left or right) |
| 248 | _recBytesPerSample = 2; |
| 249 | } |
| 250 | _recChannel = channel; |
| 251 | |
| 252 | return 0; |
| 253 | } |
| 254 | |
| 255 | // ---------------------------------------------------------------------------- |
| 256 | // RecordingChannel |
| 257 | // ---------------------------------------------------------------------------- |
| 258 | |
| 259 | WebRtc_Word32 AudioDeviceBuffer::RecordingChannel(AudioDeviceModule::ChannelType& channel) const |
| 260 | { |
| 261 | channel = _recChannel; |
| 262 | return 0; |
| 263 | } |
| 264 | |
| 265 | // ---------------------------------------------------------------------------- |
| 266 | // RecordingChannels |
| 267 | // ---------------------------------------------------------------------------- |
| 268 | |
| 269 | WebRtc_UWord8 AudioDeviceBuffer::RecordingChannels() const |
| 270 | { |
| 271 | return _recChannels; |
| 272 | } |
| 273 | |
| 274 | // ---------------------------------------------------------------------------- |
| 275 | // PlayoutChannels |
| 276 | // ---------------------------------------------------------------------------- |
| 277 | |
| 278 | WebRtc_UWord8 AudioDeviceBuffer::PlayoutChannels() const |
| 279 | { |
| 280 | return _playChannels; |
| 281 | } |
| 282 | |
| 283 | // ---------------------------------------------------------------------------- |
| 284 | // SetCurrentMicLevel |
| 285 | // ---------------------------------------------------------------------------- |
| 286 | |
| 287 | WebRtc_Word32 AudioDeviceBuffer::SetCurrentMicLevel(WebRtc_UWord32 level) |
| 288 | { |
| 289 | _currentMicLevel = level; |
| 290 | return 0; |
| 291 | } |
| 292 | |
| 293 | // ---------------------------------------------------------------------------- |
| 294 | // NewMicLevel |
| 295 | // ---------------------------------------------------------------------------- |
| 296 | |
| 297 | WebRtc_UWord32 AudioDeviceBuffer::NewMicLevel() const |
| 298 | { |
| 299 | return _newMicLevel; |
| 300 | } |
| 301 | |
| 302 | // ---------------------------------------------------------------------------- |
| 303 | // SetVQEData |
| 304 | // ---------------------------------------------------------------------------- |
| 305 | |
| 306 | WebRtc_Word32 AudioDeviceBuffer::SetVQEData(WebRtc_UWord32 playDelayMS, WebRtc_UWord32 recDelayMS, WebRtc_Word32 clockDrift) |
| 307 | { |
| 308 | if ((playDelayMS + recDelayMS) > 300) |
| 309 | { |
| 310 | WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, "too long delay (play:%i rec:%i)", playDelayMS, recDelayMS, clockDrift); |
| 311 | } |
| 312 | |
| 313 | _playDelayMS = playDelayMS; |
| 314 | _recDelayMS = recDelayMS; |
| 315 | _clockDrift = clockDrift; |
| 316 | |
| 317 | return 0; |
| 318 | } |
| 319 | |
| 320 | // ---------------------------------------------------------------------------- |
| 321 | // StartInputFileRecording |
| 322 | // ---------------------------------------------------------------------------- |
| 323 | |
leozwang@webrtc.org | 28f3913 | 2012-03-01 18:01:48 +0000 | [diff] [blame] | 324 | WebRtc_Word32 AudioDeviceBuffer::StartInputFileRecording( |
| 325 | const char fileName[kAdmMaxFileNameSize]) |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 326 | { |
| 327 | WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s", __FUNCTION__); |
| 328 | |
mflodman@webrtc.org | a014ecc | 2012-04-12 12:15:51 +0000 | [diff] [blame] | 329 | CriticalSectionScoped lock(&_critSect); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 330 | |
| 331 | _recFile.Flush(); |
| 332 | _recFile.CloseFile(); |
| 333 | |
| 334 | return (_recFile.OpenFile(fileName, false, false, false)); |
| 335 | } |
| 336 | |
| 337 | // ---------------------------------------------------------------------------- |
| 338 | // StopInputFileRecording |
| 339 | // ---------------------------------------------------------------------------- |
| 340 | |
| 341 | WebRtc_Word32 AudioDeviceBuffer::StopInputFileRecording() |
| 342 | { |
| 343 | WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s", __FUNCTION__); |
| 344 | |
mflodman@webrtc.org | a014ecc | 2012-04-12 12:15:51 +0000 | [diff] [blame] | 345 | CriticalSectionScoped lock(&_critSect); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 346 | |
| 347 | _recFile.Flush(); |
| 348 | _recFile.CloseFile(); |
| 349 | |
| 350 | return 0; |
| 351 | } |
| 352 | |
| 353 | // ---------------------------------------------------------------------------- |
| 354 | // StartOutputFileRecording |
| 355 | // ---------------------------------------------------------------------------- |
| 356 | |
leozwang@webrtc.org | 28f3913 | 2012-03-01 18:01:48 +0000 | [diff] [blame] | 357 | WebRtc_Word32 AudioDeviceBuffer::StartOutputFileRecording( |
| 358 | const char fileName[kAdmMaxFileNameSize]) |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 359 | { |
| 360 | WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s", __FUNCTION__); |
| 361 | |
mflodman@webrtc.org | a014ecc | 2012-04-12 12:15:51 +0000 | [diff] [blame] | 362 | CriticalSectionScoped lock(&_critSect); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 363 | |
| 364 | _playFile.Flush(); |
| 365 | _playFile.CloseFile(); |
| 366 | |
| 367 | return (_playFile.OpenFile(fileName, false, false, false)); |
| 368 | } |
| 369 | |
| 370 | // ---------------------------------------------------------------------------- |
| 371 | // StopOutputFileRecording |
| 372 | // ---------------------------------------------------------------------------- |
| 373 | |
| 374 | WebRtc_Word32 AudioDeviceBuffer::StopOutputFileRecording() |
| 375 | { |
| 376 | WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s", __FUNCTION__); |
| 377 | |
mflodman@webrtc.org | a014ecc | 2012-04-12 12:15:51 +0000 | [diff] [blame] | 378 | CriticalSectionScoped lock(&_critSect); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 379 | |
| 380 | _playFile.Flush(); |
| 381 | _playFile.CloseFile(); |
| 382 | |
| 383 | return 0; |
| 384 | } |
| 385 | |
| 386 | // ---------------------------------------------------------------------------- |
| 387 | // SetRecordedBuffer |
| 388 | // |
| 389 | // Store recorded audio buffer in local memory ready for the actual |
| 390 | // "delivery" using a callback. |
| 391 | // |
| 392 | // This method can also parse out left or right channel from a stereo |
| 393 | // input signal, i.e., emulate mono. |
| 394 | // |
| 395 | // Examples: |
| 396 | // |
| 397 | // 16-bit,48kHz mono, 10ms => nSamples=480 => _recSize=2*480=960 bytes |
braveyao@webrtc.org | 0a18522 | 2011-11-25 02:45:39 +0000 | [diff] [blame] | 398 | // 16-bit,48kHz stereo,10ms => nSamples=480 => _recSize=4*480=1920 bytes |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 399 | // ---------------------------------------------------------------------------- |
| 400 | |
henrika@webrtc.org | 907bc55 | 2012-03-09 08:59:19 +0000 | [diff] [blame] | 401 | WebRtc_Word32 AudioDeviceBuffer::SetRecordedBuffer(const void* audioBuffer, |
| 402 | WebRtc_UWord32 nSamples) |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 403 | { |
mflodman@webrtc.org | a014ecc | 2012-04-12 12:15:51 +0000 | [diff] [blame] | 404 | CriticalSectionScoped lock(&_critSect); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 405 | |
| 406 | if (_recBytesPerSample == 0) |
| 407 | { |
| 408 | assert(false); |
| 409 | return -1; |
| 410 | } |
| 411 | |
| 412 | _recSamples = nSamples; |
| 413 | _recSize = _recBytesPerSample*nSamples; // {2,4}*nSamples |
braveyao@webrtc.org | 0a18522 | 2011-11-25 02:45:39 +0000 | [diff] [blame] | 414 | if (_recSize > kMaxBufferSizeBytes) |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 415 | { |
| 416 | assert(false); |
| 417 | return -1; |
| 418 | } |
| 419 | |
| 420 | if (nSamples != _recSamples) |
| 421 | { |
| 422 | WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "invalid number of recorded samples (%d)", nSamples); |
| 423 | return -1; |
| 424 | } |
| 425 | |
| 426 | if (_recChannel == AudioDeviceModule::kChannelBoth) |
| 427 | { |
| 428 | // (default) copy the complete input buffer to the local buffer |
| 429 | memcpy(&_recBuffer[0], audioBuffer, _recSize); |
| 430 | } |
| 431 | else |
| 432 | { |
| 433 | WebRtc_Word16* ptr16In = (WebRtc_Word16*)audioBuffer; |
| 434 | WebRtc_Word16* ptr16Out = (WebRtc_Word16*)&_recBuffer[0]; |
| 435 | |
| 436 | if (AudioDeviceModule::kChannelRight == _recChannel) |
| 437 | { |
| 438 | ptr16In++; |
| 439 | } |
| 440 | |
| 441 | // exctract left or right channel from input buffer to the local buffer |
| 442 | for (WebRtc_UWord32 i = 0; i < _recSamples; i++) |
| 443 | { |
| 444 | *ptr16Out = *ptr16In; |
| 445 | ptr16Out++; |
| 446 | ptr16In++; |
| 447 | ptr16In++; |
| 448 | } |
| 449 | } |
| 450 | |
| 451 | if (_recFile.Open()) |
| 452 | { |
| 453 | // write to binary file in mono or stereo (interleaved) |
| 454 | _recFile.Write(&_recBuffer[0], _recSize); |
| 455 | } |
| 456 | |
| 457 | return 0; |
| 458 | } |
| 459 | |
| 460 | // ---------------------------------------------------------------------------- |
| 461 | // DeliverRecordedData |
| 462 | // ---------------------------------------------------------------------------- |
| 463 | |
| 464 | WebRtc_Word32 AudioDeviceBuffer::DeliverRecordedData() |
| 465 | { |
mflodman@webrtc.org | a014ecc | 2012-04-12 12:15:51 +0000 | [diff] [blame] | 466 | CriticalSectionScoped lock(&_critSectCb); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 467 | |
| 468 | // Ensure that user has initialized all essential members |
| 469 | if ((_recSampleRate == 0) || |
| 470 | (_recSamples == 0) || |
| 471 | (_recBytesPerSample == 0) || |
| 472 | (_recChannels == 0)) |
| 473 | { |
| 474 | assert(false); |
| 475 | return -1; |
| 476 | } |
| 477 | |
| 478 | if (_ptrCbAudioTransport == NULL) |
| 479 | { |
| 480 | WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "failed to deliver recorded data (AudioTransport does not exist)"); |
| 481 | return 0; |
| 482 | } |
| 483 | |
| 484 | WebRtc_Word32 res(0); |
| 485 | WebRtc_UWord32 newMicLevel(0); |
| 486 | WebRtc_UWord32 totalDelayMS = _playDelayMS +_recDelayMS; |
| 487 | |
| 488 | if (_measureDelay) |
| 489 | { |
mflodman@webrtc.org | a014ecc | 2012-04-12 12:15:51 +0000 | [diff] [blame] | 490 | CriticalSectionScoped lock(&_critSect); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 491 | |
| 492 | memset(&_recBuffer[0], 0, _recSize); |
| 493 | WebRtc_UWord32 time = AudioDeviceUtility::GetTimeInMS(); |
| 494 | if (time - _lastPulseTime > 500) |
| 495 | { |
| 496 | _pulseList.PushBack(time); |
| 497 | _lastPulseTime = time; |
| 498 | |
| 499 | WebRtc_Word16* ptr16 = (WebRtc_Word16*)&_recBuffer[0]; |
| 500 | *ptr16 = 30000; |
| 501 | } |
| 502 | } |
| 503 | |
| 504 | res = _ptrCbAudioTransport->RecordedDataIsAvailable(&_recBuffer[0], |
| 505 | _recSamples, |
| 506 | _recBytesPerSample, |
| 507 | _recChannels, |
| 508 | _recSampleRate, |
| 509 | totalDelayMS, |
| 510 | _clockDrift, |
| 511 | _currentMicLevel, |
| 512 | newMicLevel); |
| 513 | if (res != -1) |
| 514 | { |
| 515 | _newMicLevel = newMicLevel; |
| 516 | } |
| 517 | |
| 518 | return 0; |
| 519 | } |
| 520 | |
| 521 | // ---------------------------------------------------------------------------- |
| 522 | // RequestPlayoutData |
| 523 | // ---------------------------------------------------------------------------- |
| 524 | |
| 525 | WebRtc_Word32 AudioDeviceBuffer::RequestPlayoutData(WebRtc_UWord32 nSamples) |
| 526 | { |
| 527 | { |
mflodman@webrtc.org | a014ecc | 2012-04-12 12:15:51 +0000 | [diff] [blame] | 528 | CriticalSectionScoped lock(&_critSect); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 529 | |
| 530 | // Ensure that user has initialized all essential members |
| 531 | if ((_playBytesPerSample == 0) || |
| 532 | (_playChannels == 0) || |
| 533 | (_playSampleRate == 0)) |
| 534 | { |
| 535 | assert(false); |
| 536 | return -1; |
| 537 | } |
| 538 | |
| 539 | _playSamples = nSamples; |
| 540 | _playSize = _playBytesPerSample * nSamples; // {2,4}*nSamples |
braveyao@webrtc.org | 0a18522 | 2011-11-25 02:45:39 +0000 | [diff] [blame] | 541 | if (_playSize > kMaxBufferSizeBytes) |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 542 | { |
| 543 | assert(false); |
| 544 | return -1; |
| 545 | } |
| 546 | |
| 547 | if (nSamples != _playSamples) |
| 548 | { |
| 549 | WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "invalid number of samples to be played out (%d)", nSamples); |
| 550 | return -1; |
| 551 | } |
| 552 | } |
| 553 | |
| 554 | WebRtc_UWord32 nSamplesOut(0); |
| 555 | |
mflodman@webrtc.org | a014ecc | 2012-04-12 12:15:51 +0000 | [diff] [blame] | 556 | CriticalSectionScoped lock(&_critSectCb); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 557 | |
| 558 | if (_ptrCbAudioTransport == NULL) |
| 559 | { |
| 560 | WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "failed to feed data to playout (AudioTransport does not exist)"); |
| 561 | return 0; |
| 562 | } |
| 563 | |
| 564 | if (_ptrCbAudioTransport) |
| 565 | { |
| 566 | WebRtc_UWord32 res(0); |
| 567 | |
| 568 | res = _ptrCbAudioTransport->NeedMorePlayData(_playSamples, |
| 569 | _playBytesPerSample, |
| 570 | _playChannels, |
| 571 | _playSampleRate, |
| 572 | &_playBuffer[0], |
| 573 | nSamplesOut); |
| 574 | if (res != 0) |
| 575 | { |
| 576 | WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "NeedMorePlayData() failed"); |
| 577 | } |
| 578 | |
| 579 | // --- Experimental delay-measurement implementation |
| 580 | // *** not be used in released code *** |
| 581 | |
| 582 | if (_measureDelay) |
| 583 | { |
mflodman@webrtc.org | a014ecc | 2012-04-12 12:15:51 +0000 | [diff] [blame] | 584 | CriticalSectionScoped lock(&_critSect); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 585 | |
| 586 | WebRtc_Word16 maxAbs = WebRtcSpl_MaxAbsValueW16((const WebRtc_Word16*)&_playBuffer[0], (WebRtc_Word16)nSamplesOut*_playChannels); |
| 587 | if (maxAbs > 1000) |
| 588 | { |
| 589 | WebRtc_UWord32 nowTime = AudioDeviceUtility::GetTimeInMS(); |
| 590 | |
| 591 | if (!_pulseList.Empty()) |
| 592 | { |
| 593 | ListItem* item = _pulseList.First(); |
| 594 | if (item) |
| 595 | { |
| 596 | WebRtc_Word16 maxIndex = WebRtcSpl_MaxAbsIndexW16((const WebRtc_Word16*)&_playBuffer[0], (WebRtc_Word16)nSamplesOut*_playChannels); |
| 597 | WebRtc_UWord32 pulseTime = item->GetUnsignedItem(); |
| 598 | WebRtc_UWord32 diff = nowTime - pulseTime + (10*maxIndex)/(nSamplesOut*_playChannels); |
xians@google.com | 3ab5d5f | 2011-08-15 11:07:54 +0000 | [diff] [blame] | 599 | WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "diff time in playout delay (%d)", diff); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 600 | } |
| 601 | _pulseList.PopFront(); |
| 602 | } |
| 603 | } |
| 604 | } |
| 605 | } |
| 606 | |
| 607 | return nSamplesOut; |
| 608 | } |
| 609 | |
| 610 | // ---------------------------------------------------------------------------- |
| 611 | // GetPlayoutData |
| 612 | // ---------------------------------------------------------------------------- |
| 613 | |
henrika@webrtc.org | 907bc55 | 2012-03-09 08:59:19 +0000 | [diff] [blame] | 614 | WebRtc_Word32 AudioDeviceBuffer::GetPlayoutData(void* audioBuffer) |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 615 | { |
mflodman@webrtc.org | a014ecc | 2012-04-12 12:15:51 +0000 | [diff] [blame] | 616 | CriticalSectionScoped lock(&_critSect); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 617 | |
punyabrata@webrtc.org | c980146 | 2011-11-29 18:49:54 +0000 | [diff] [blame] | 618 | if (_playSize > kMaxBufferSizeBytes) |
| 619 | { |
| 620 | WEBRTC_TRACE(kTraceError, kTraceUtility, _id, "_playSize %i exceeds " |
| 621 | "kMaxBufferSizeBytes in AudioDeviceBuffer::GetPlayoutData", _playSize); |
| 622 | assert(false); |
leozwang@webrtc.org | 28f3913 | 2012-03-01 18:01:48 +0000 | [diff] [blame] | 623 | return -1; |
| 624 | } |
punyabrata@webrtc.org | c980146 | 2011-11-29 18:49:54 +0000 | [diff] [blame] | 625 | |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 626 | memcpy(audioBuffer, &_playBuffer[0], _playSize); |
| 627 | |
| 628 | if (_playFile.Open()) |
| 629 | { |
| 630 | // write to binary file in mono or stereo (interleaved) |
| 631 | _playFile.Write(&_playBuffer[0], _playSize); |
| 632 | } |
| 633 | |
| 634 | return _playSamples; |
| 635 | } |
| 636 | |
| 637 | // ---------------------------------------------------------------------------- |
| 638 | // _EmptyList |
| 639 | // ---------------------------------------------------------------------------- |
| 640 | |
| 641 | void AudioDeviceBuffer::_EmptyList() |
| 642 | { |
| 643 | while (!_pulseList.Empty()) |
| 644 | { |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 645 | ListItem* item = _pulseList.First(); |
| 646 | if (item) |
| 647 | { |
| 648 | // WebRtc_UWord32 ts = item->GetUnsignedItem(); |
| 649 | } |
| 650 | _pulseList.PopFront(); |
| 651 | } |
| 652 | } |
| 653 | |
| 654 | } // namespace webrtc |