henrika | b261989 | 2015-05-18 16:49:16 +0200 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license |
| 5 | * that can be found in the LICENSE file in the root of the source |
| 6 | * tree. An additional intellectual property rights grant can be found |
| 7 | * in the file PATENTS. All contributing project authors may |
| 8 | * be found in the AUTHORS file in the root of the source tree. |
| 9 | */ |
| 10 | |
| 11 | #ifndef WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_OPENSLES_PLAYER_H_ |
| 12 | #define WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_OPENSLES_PLAYER_H_ |
| 13 | |
| 14 | #include <SLES/OpenSLES.h> |
| 15 | #include <SLES/OpenSLES_Android.h> |
| 16 | #include <SLES/OpenSLES_AndroidConfiguration.h> |
| 17 | |
henrika | b261989 | 2015-05-18 16:49:16 +0200 | [diff] [blame] | 18 | #include "webrtc/base/thread_checker.h" |
| 19 | #include "webrtc/modules/audio_device/android/audio_common.h" |
| 20 | #include "webrtc/modules/audio_device/android/audio_manager.h" |
| 21 | #include "webrtc/modules/audio_device/android/opensles_common.h" |
| 22 | #include "webrtc/modules/audio_device/include/audio_device_defines.h" |
| 23 | #include "webrtc/modules/audio_device/audio_device_generic.h" |
Henrik Kjellander | ff761fb | 2015-11-04 08:31:52 +0100 | [diff] [blame] | 24 | #include "webrtc/modules/utility/include/helpers_android.h" |
henrika | b261989 | 2015-05-18 16:49:16 +0200 | [diff] [blame] | 25 | |
| 26 | namespace webrtc { |
| 27 | |
| 28 | class FineAudioBuffer; |
| 29 | |
| 30 | // Implements 16-bit mono PCM audio output support for Android using the |
| 31 | // C based OpenSL ES API. No calls from C/C++ to Java using JNI is done. |
| 32 | // |
| 33 | // An instance must be created and destroyed on one and the same thread. |
| 34 | // All public methods must also be called on the same thread. A thread checker |
henrikg | 91d6ede | 2015-09-17 00:24:34 -0700 | [diff] [blame] | 35 | // will RTC_DCHECK if any method is called on an invalid thread. Decoded audio |
henrika | b261989 | 2015-05-18 16:49:16 +0200 | [diff] [blame] | 36 | // buffers are requested on a dedicated internal thread managed by the OpenSL |
| 37 | // ES layer. |
| 38 | // |
| 39 | // The existing design forces the user to call InitPlayout() after Stoplayout() |
| 40 | // to be able to call StartPlayout() again. This is inline with how the Java- |
| 41 | // based implementation works. |
| 42 | // |
| 43 | // OpenSL ES is a native C API which have no Dalvik-related overhead such as |
| 44 | // garbage collection pauses and it supports reduced audio output latency. |
| 45 | // If the device doesn't claim this feature but supports API level 9 (Android |
| 46 | // platform version 2.3) or later, then we can still use the OpenSL ES APIs but |
| 47 | // the output latency may be higher. |
| 48 | class OpenSLESPlayer { |
| 49 | public: |
henrika | 918b554 | 2016-09-19 15:44:09 +0200 | [diff] [blame] | 50 | // Beginning with API level 17 (Android 4.2), a buffer count of 2 or more is |
| 51 | // required for lower latency. Beginning with API level 18 (Android 4.3), a |
| 52 | // buffer count of 1 is sufficient for lower latency. In addition, the buffer |
| 53 | // size and sample rate must be compatible with the device's native output |
| 54 | // configuration provided via the audio manager at construction. |
| 55 | // TODO(henrika): perhaps set this value dynamically based on OS version. |
| 56 | static const int kNumOfOpenSLESBuffers = 2; |
henrika | b261989 | 2015-05-18 16:49:16 +0200 | [diff] [blame] | 57 | |
henrika | b261989 | 2015-05-18 16:49:16 +0200 | [diff] [blame] | 58 | explicit OpenSLESPlayer(AudioManager* audio_manager); |
| 59 | ~OpenSLESPlayer(); |
| 60 | |
| 61 | int Init(); |
| 62 | int Terminate(); |
| 63 | |
| 64 | int InitPlayout(); |
| 65 | bool PlayoutIsInitialized() const { return initialized_; } |
| 66 | |
| 67 | int StartPlayout(); |
| 68 | int StopPlayout(); |
| 69 | bool Playing() const { return playing_; } |
| 70 | |
| 71 | int SpeakerVolumeIsAvailable(bool& available); |
| 72 | int SetSpeakerVolume(uint32_t volume); |
| 73 | int SpeakerVolume(uint32_t& volume) const; |
| 74 | int MaxSpeakerVolume(uint32_t& maxVolume) const; |
| 75 | int MinSpeakerVolume(uint32_t& minVolume) const; |
| 76 | |
| 77 | void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer); |
| 78 | |
| 79 | private: |
| 80 | // These callback methods are called when data is required for playout. |
| 81 | // They are both called from an internal "OpenSL ES thread" which is not |
| 82 | // attached to the Dalvik VM. |
| 83 | static void SimpleBufferQueueCallback(SLAndroidSimpleBufferQueueItf caller, |
| 84 | void* context); |
| 85 | void FillBufferQueue(); |
| 86 | // Reads audio data in PCM format using the AudioDeviceBuffer. |
| 87 | // Can be called both on the main thread (during Start()) and from the |
| 88 | // internal audio thread while output streaming is active. |
henrika | 14acf65 | 2016-10-11 06:15:41 -0700 | [diff] [blame] | 89 | // If the |silence| flag is set, the audio is filled with zeros instead of |
| 90 | // asking the WebRTC layer for real audio data. This procedure is also known |
| 91 | // as audio priming. |
| 92 | void EnqueuePlayoutData(bool silence); |
henrika | b261989 | 2015-05-18 16:49:16 +0200 | [diff] [blame] | 93 | |
henrika | b261989 | 2015-05-18 16:49:16 +0200 | [diff] [blame] | 94 | // Allocate memory for audio buffers which will be used to render audio |
| 95 | // via the SLAndroidSimpleBufferQueueItf interface. |
| 96 | void AllocateDataBuffers(); |
| 97 | |
henrika | 521f7a8 | 2016-05-31 07:03:17 -0700 | [diff] [blame] | 98 | // Obtaines the SL Engine Interface from the existing global Engine object. |
| 99 | // The interface exposes creation methods of all the OpenSL ES object types. |
| 100 | // This method defines the |engine_| member variable. |
| 101 | bool ObtainEngineInterface(); |
henrika | b261989 | 2015-05-18 16:49:16 +0200 | [diff] [blame] | 102 | |
| 103 | // Creates/destroys the output mix object. |
| 104 | bool CreateMix(); |
| 105 | void DestroyMix(); |
| 106 | |
| 107 | // Creates/destroys the audio player and the simple-buffer object. |
| 108 | // Also creates the volume object. |
| 109 | bool CreateAudioPlayer(); |
| 110 | void DestroyAudioPlayer(); |
| 111 | |
| 112 | SLuint32 GetPlayState() const; |
| 113 | |
| 114 | // Ensures that methods are called from the same thread as this object is |
| 115 | // created on. |
| 116 | rtc::ThreadChecker thread_checker_; |
| 117 | |
| 118 | // Stores thread ID in first call to SimpleBufferQueueCallback() from internal |
| 119 | // non-application thread which is not attached to the Dalvik JVM. |
| 120 | // Detached during construction of this object. |
| 121 | rtc::ThreadChecker thread_checker_opensles_; |
| 122 | |
henrika | 521f7a8 | 2016-05-31 07:03:17 -0700 | [diff] [blame] | 123 | // Raw pointer to the audio manager injected at construction. Used to cache |
| 124 | // audio parameters and to access the global SL engine object needed by the |
| 125 | // ObtainEngineInterface() method. The audio manager outlives any instance of |
| 126 | // this class. |
| 127 | AudioManager* audio_manager_; |
| 128 | |
henrika | b261989 | 2015-05-18 16:49:16 +0200 | [diff] [blame] | 129 | // Contains audio parameters provided to this class at construction by the |
| 130 | // AudioManager. |
| 131 | const AudioParameters audio_parameters_; |
| 132 | |
| 133 | // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the |
Peter Boström | 4adbbcf | 2016-05-03 15:51:26 -0400 | [diff] [blame] | 134 | // AudioDeviceModuleImpl class and called by AudioDeviceModule::Create(). |
henrika | b261989 | 2015-05-18 16:49:16 +0200 | [diff] [blame] | 135 | AudioDeviceBuffer* audio_device_buffer_; |
| 136 | |
| 137 | bool initialized_; |
| 138 | bool playing_; |
| 139 | |
| 140 | // PCM-type format definition. |
| 141 | // TODO(henrika): add support for SLAndroidDataFormat_PCM_EX (android-21) if |
| 142 | // 32-bit float representation is needed. |
| 143 | SLDataFormat_PCM pcm_format_; |
| 144 | |
henrika | b261989 | 2015-05-18 16:49:16 +0200 | [diff] [blame] | 145 | // Queue of audio buffers to be used by the player object for rendering |
| 146 | // audio. They will be used in a Round-robin way and the size of each buffer |
| 147 | // is given by FineAudioBuffer::RequiredBufferSizeBytes(). |
kwiberg | f01633e | 2016-02-24 05:00:36 -0800 | [diff] [blame] | 148 | std::unique_ptr<SLint8[]> audio_buffers_[kNumOfOpenSLESBuffers]; |
henrika | b261989 | 2015-05-18 16:49:16 +0200 | [diff] [blame] | 149 | |
| 150 | // FineAudioBuffer takes an AudioDeviceBuffer which delivers audio data |
| 151 | // in chunks of 10ms. It then allows for this data to be pulled in |
| 152 | // a finer or coarser granularity. I.e. interacting with this class instead |
| 153 | // of directly with the AudioDeviceBuffer one can ask for any number of |
| 154 | // audio data samples. |
henrika | 918b554 | 2016-09-19 15:44:09 +0200 | [diff] [blame] | 155 | // Example: native buffer size can be 192 audio frames at 48kHz sample rate. |
| 156 | // WebRTC will provide 480 audio frames per 10ms but OpenSL ES asks for 192 |
| 157 | // in each callback (one every 4th ms). This class can then ask for 192 and |
| 158 | // the FineAudioBuffer will ask WebRTC for new data approximately only every |
| 159 | // second callback and also cache non-utilized audio. |
| 160 | std::unique_ptr<FineAudioBuffer> fine_audio_buffer_; |
henrika | b261989 | 2015-05-18 16:49:16 +0200 | [diff] [blame] | 161 | |
| 162 | // Keeps track of active audio buffer 'n' in the audio_buffers_[n] queue. |
| 163 | // Example (kNumOfOpenSLESBuffers = 2): counts 0, 1, 0, 1, ... |
| 164 | int buffer_index_; |
| 165 | |
henrika | b261989 | 2015-05-18 16:49:16 +0200 | [diff] [blame] | 166 | // This interface exposes creation methods for all the OpenSL ES object types. |
| 167 | // It is the OpenSL ES API entry point. |
| 168 | SLEngineItf engine_; |
| 169 | |
| 170 | // Output mix object to be used by the player object. |
| 171 | webrtc::ScopedSLObjectItf output_mix_; |
| 172 | |
| 173 | // The audio player media object plays out audio to the speakers. It also |
| 174 | // supports volume control. |
| 175 | webrtc::ScopedSLObjectItf player_object_; |
| 176 | |
| 177 | // This interface is supported on the audio player and it controls the state |
| 178 | // of the audio player. |
| 179 | SLPlayItf player_; |
| 180 | |
| 181 | // The Android Simple Buffer Queue interface is supported on the audio player |
| 182 | // and it provides methods to send audio data from the source to the audio |
| 183 | // player for rendering. |
| 184 | SLAndroidSimpleBufferQueueItf simple_buffer_queue_; |
| 185 | |
| 186 | // This interface exposes controls for manipulating the object’s audio volume |
| 187 | // properties. This interface is supported on the Audio Player object. |
| 188 | SLVolumeItf volume_; |
henrika | e71b24e | 2015-11-12 01:48:32 -0800 | [diff] [blame] | 189 | |
| 190 | // Last time the OpenSL ES layer asked for audio data to play out. |
| 191 | uint32_t last_play_time_; |
henrika | b261989 | 2015-05-18 16:49:16 +0200 | [diff] [blame] | 192 | }; |
| 193 | |
| 194 | } // namespace webrtc |
| 195 | |
| 196 | #endif // WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_OPENSLES_PLAYER_H_ |