blob: bf20cf6487f2e931a7caf6293c1e48df6345260a [file] [log] [blame]
henrike@webrtc.org82f014a2013-09-10 18:24:07 +00001/*
2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11#ifndef WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_OPENSLES_OUTPUT_H_
12#define WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_OPENSLES_OUTPUT_H_
13
14#include <SLES/OpenSLES.h>
15#include <SLES/OpenSLES_Android.h>
16#include <SLES/OpenSLES_AndroidConfiguration.h>
17
18#include "webrtc/modules/audio_device/android/audio_manager_jni.h"
19#include "webrtc/modules/audio_device/android/low_latency_event.h"
20#include "webrtc/modules/audio_device/android/opensles_common.h"
21#include "webrtc/modules/audio_device/include/audio_device_defines.h"
22#include "webrtc/modules/audio_device/include/audio_device.h"
23#include "webrtc/system_wrappers/interface/scoped_ptr.h"
24
25namespace webrtc {
26
27class AudioDeviceBuffer;
28class CriticalSectionWrapper;
29class FineAudioBuffer;
30class SingleRwFifo;
31class ThreadWrapper;
32
33// OpenSL implementation that facilitate playing PCM data to an android device.
34// This class is Thread-compatible. I.e. Given an instance of this class, calls
35// to non-const methods require exclusive access to the object.
36class OpenSlesOutput : public webrtc_opensl::PlayoutDelayProvider {
37 public:
38 explicit OpenSlesOutput(const int32_t id);
39 virtual ~OpenSlesOutput();
40
41 // Main initializaton and termination
42 int32_t Init();
43 int32_t Terminate();
44 bool Initialized() const { return initialized_; }
45
46 // Device enumeration
47 int16_t PlayoutDevices() { return 1; }
48
49 int32_t PlayoutDeviceName(uint16_t index,
50 char name[kAdmMaxDeviceNameSize],
51 char guid[kAdmMaxGuidSize]);
52
53 // Device selection
54 int32_t SetPlayoutDevice(uint16_t index);
55 int32_t SetPlayoutDevice(
56 AudioDeviceModule::WindowsDeviceType device) { return 0; }
57
58 // Audio transport initialization
59 int32_t PlayoutIsAvailable(bool& available); // NOLINT
60 int32_t InitPlayout();
61 bool PlayoutIsInitialized() const { return play_initialized_; }
62
63 // Audio transport control
64 int32_t StartPlayout();
65 int32_t StopPlayout();
66 bool Playing() const { return playing_; }
67
68 // Audio mixer initialization
69 int32_t SpeakerIsAvailable(bool& available); // NOLINT
70 int32_t InitSpeaker();
71 bool SpeakerIsInitialized() const { return speaker_initialized_; }
72
73 // Speaker volume controls
74 int32_t SpeakerVolumeIsAvailable(bool& available); // NOLINT
75 int32_t SetSpeakerVolume(uint32_t volume);
76 int32_t SpeakerVolume(uint32_t& volume) const { return 0; } // NOLINT
77 int32_t MaxSpeakerVolume(uint32_t& maxVolume) const; // NOLINT
78 int32_t MinSpeakerVolume(uint32_t& minVolume) const; // NOLINT
79 int32_t SpeakerVolumeStepSize(uint16_t& stepSize) const; // NOLINT
80
81 // Speaker mute control
82 int32_t SpeakerMuteIsAvailable(bool& available); // NOLINT
83 int32_t SetSpeakerMute(bool enable) { return -1; }
84 int32_t SpeakerMute(bool& enabled) const { return -1; } // NOLINT
85
86
87 // Stereo support
88 int32_t StereoPlayoutIsAvailable(bool& available); // NOLINT
89 int32_t SetStereoPlayout(bool enable);
90 int32_t StereoPlayout(bool& enabled) const; // NOLINT
91
92 // Delay information and control
93 int32_t SetPlayoutBuffer(const AudioDeviceModule::BufferType type,
94 uint16_t sizeMS) { return -1; }
95 int32_t PlayoutBuffer(AudioDeviceModule::BufferType& type, // NOLINT
96 uint16_t& sizeMS) const;
97 int32_t PlayoutDelay(uint16_t& delayMS) const; // NOLINT
98
99
100 // Error and warning information
101 bool PlayoutWarning() const { return false; }
102 bool PlayoutError() const { return false; }
103 void ClearPlayoutWarning() {}
104 void ClearPlayoutError() {}
105
106 // Attach audio buffer
107 void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
108
109 // Speaker audio routing
110 int32_t SetLoudspeakerStatus(bool enable);
111 int32_t GetLoudspeakerStatus(bool& enable) const; // NOLINT
112
113 protected:
114 virtual int PlayoutDelayMs();
115
116 private:
117 enum {
118 kNumInterfaces = 3,
119 // TODO(xians): Reduce the numbers of buffers to improve the latency.
120 // Currently 30ms worth of buffers are needed due to audio
121 // pipeline processing jitter. Note: kNumOpenSlBuffers must
122 // not be changed.
123 // According to the opensles documentation in the ndk:
124 // The lower output latency path is used only if the application requests a
125 // buffer count of 2 or more. Use minimum number of buffers to keep delay
126 // as low as possible.
127 kNumOpenSlBuffers = 2,
128 // NetEq delivers frames on a 10ms basis. This means that every 10ms there
129 // will be a time consuming task. Keeping 10ms worth of buffers will ensure
130 // that there is 10ms to perform the time consuming task without running
131 // into underflow.
132 // In addition to the 10ms that needs to be stored for NetEq processing
133 // there will be jitter in audio pipe line due to the acquisition of locks.
134 // Note: The buffers in the OpenSL queue do not count towards the 10ms of
135 // frames needed since OpenSL needs to have them ready for playout.
136 kNum10MsToBuffer = 4,
137 };
138
139 bool InitSampleRate();
140 bool SetLowLatency();
141 void UpdatePlayoutDelay();
142 // It might be possible to dynamically add or remove buffers based on how
143 // close to depletion the fifo is. Few buffers means low delay. Too few
144 // buffers will cause underrun. Dynamically changing the number of buffer
145 // will greatly increase code complexity.
146 void CalculateNumFifoBuffersNeeded();
147 void AllocateBuffers();
148 int TotalBuffersUsed() const;
149 bool EnqueueAllBuffers();
150 // This function also configures the audio player, e.g. sample rate to use
151 // etc, so it should be called when starting playout.
152 bool CreateAudioPlayer();
153 void DestroyAudioPlayer();
154
155 // When underrun happens there won't be a new frame ready for playout that
156 // can be retrieved yet. Since the OpenSL thread must return ASAP there will
157 // be one less queue available to OpenSL. This function handles this case
158 // gracefully by restarting the audio, pushing silent frames to OpenSL for
159 // playout. This will sound like a click. Underruns are also logged to
160 // make it possible to identify these types of audio artifacts.
161 // This function returns true if there has been underrun. Further processing
162 // of audio data should be avoided until this function returns false again.
163 // The function needs to be protected by |crit_sect_|.
164 bool HandleUnderrun(int event_id, int event_msg);
165
166 static void PlayerSimpleBufferQueueCallback(
167 SLAndroidSimpleBufferQueueItf queueItf,
168 void* pContext);
169 // This function must not take any locks or do any heavy work. It is a
170 // requirement for the OpenSL implementation to work as intended. The reason
171 // for this is that taking locks exposes the OpenSL thread to the risk of
172 // priority inversion.
173 void PlayerSimpleBufferQueueCallbackHandler(
174 SLAndroidSimpleBufferQueueItf queueItf);
175
176 bool StartCbThreads();
177 void StopCbThreads();
178 static bool CbThread(void* context);
179 // This function must be protected against data race with threads calling this
180 // class' public functions. It is a requirement for this class to be
181 // Thread-compatible.
182 bool CbThreadImpl();
183
184 // Java API handle
185 AudioManagerJni audio_manager_;
186
187 int id_;
188 bool initialized_;
189 bool speaker_initialized_;
190 bool play_initialized_;
191
192 // Members that are read/write accessed concurrently by the process thread and
193 // threads calling public functions of this class.
194 scoped_ptr<ThreadWrapper> play_thread_; // Processing thread
195 scoped_ptr<CriticalSectionWrapper> crit_sect_;
196 // This member controls the starting and stopping of playing audio to the
197 // the device.
198 bool playing_;
199
200 // Only one thread, T1, may push and only one thread, T2, may pull. T1 may or
201 // may not be the same thread as T2. T1 is the process thread and T2 is the
202 // OpenSL thread.
203 scoped_ptr<SingleRwFifo> fifo_;
204 int num_fifo_buffers_needed_;
205 LowLatencyEvent event_;
206 int number_underruns_;
207
208 // OpenSL handles
209 SLObjectItf sles_engine_;
210 SLEngineItf sles_engine_itf_;
211 SLObjectItf sles_player_;
212 SLPlayItf sles_player_itf_;
213 SLAndroidSimpleBufferQueueItf sles_player_sbq_itf_;
214 SLObjectItf sles_output_mixer_;
215
216 // Audio buffers
217 AudioDeviceBuffer* audio_buffer_;
218 scoped_ptr<FineAudioBuffer> fine_buffer_;
219 scoped_array<scoped_array<int8_t> > play_buf_;
220 // Index in |rec_buf_| pointing to the audio buffer that will be ready the
221 // next time PlayerSimpleBufferQueueCallbackHandler is invoked.
222 // Ready means buffer is ready to be played out to device.
223 int active_queue_;
224
225 // Audio settings
226 uint32_t speaker_sampling_rate_;
227 int buffer_size_samples_;
228 int buffer_size_bytes_;
229
230 // Audio status
231 uint16_t playout_delay_;
232};
233
234} // namespace webrtc
235
236#endif // WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_OPENSLES_OUTPUT_H_