blob: bc46f03cbcc68d89309f602b8dd7c2cce1b8887f [file] [log] [blame]
henrika8324b522015-03-27 10:56:23 +01001/*
2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11#ifndef WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_MANAGER_H_
12#define WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_MANAGER_H_
13
14#include <jni.h>
15
henrikab2619892015-05-18 16:49:16 +020016#include "webrtc/base/scoped_ptr.h"
henrika8324b522015-03-27 10:56:23 +010017#include "webrtc/base/thread_checker.h"
18#include "webrtc/modules/audio_device/android/audio_common.h"
19#include "webrtc/modules/audio_device/include/audio_device_defines.h"
20#include "webrtc/modules/audio_device/audio_device_generic.h"
21#include "webrtc/modules/utility/interface/helpers_android.h"
henrikab2619892015-05-18 16:49:16 +020022#include "webrtc/modules/utility/interface/jvm_android.h"
henrika8324b522015-03-27 10:56:23 +010023
24namespace webrtc {
25
26class AudioParameters {
27 public:
28 enum { kBitsPerSample = 16 };
29 AudioParameters()
30 : sample_rate_(0),
31 channels_(0),
32 frames_per_buffer_(0),
henrikab2619892015-05-18 16:49:16 +020033 frames_per_10ms_buffer_(0),
henrika8324b522015-03-27 10:56:23 +010034 bits_per_sample_(kBitsPerSample) {}
henrikab2619892015-05-18 16:49:16 +020035 AudioParameters(int sample_rate, int channels, int frames_per_buffer)
henrika8324b522015-03-27 10:56:23 +010036 : sample_rate_(sample_rate),
37 channels_(channels),
henrikab2619892015-05-18 16:49:16 +020038 frames_per_buffer_(frames_per_buffer),
39 frames_per_10ms_buffer_(sample_rate / 100),
henrika8324b522015-03-27 10:56:23 +010040 bits_per_sample_(kBitsPerSample) {}
henrikab2619892015-05-18 16:49:16 +020041 void reset(int sample_rate, int channels, int frames_per_buffer) {
henrika8324b522015-03-27 10:56:23 +010042 sample_rate_ = sample_rate;
43 channels_ = channels;
henrikab2619892015-05-18 16:49:16 +020044 frames_per_buffer_ = frames_per_buffer;
45 frames_per_10ms_buffer_ = (sample_rate / 100);
henrika8324b522015-03-27 10:56:23 +010046 }
47 int sample_rate() const { return sample_rate_; }
48 int channels() const { return channels_; }
49 int frames_per_buffer() const { return frames_per_buffer_; }
henrikab2619892015-05-18 16:49:16 +020050 int frames_per_10ms_buffer() const { return frames_per_10ms_buffer_; }
51 int bits_per_sample() const { return bits_per_sample_; }
henrika8324b522015-03-27 10:56:23 +010052 bool is_valid() const {
53 return ((sample_rate_ > 0) && (channels_ > 0) && (frames_per_buffer_ > 0));
54 }
55 int GetBytesPerFrame() const { return channels_ * bits_per_sample_ / 8; }
56 int GetBytesPerBuffer() const {
57 return frames_per_buffer_ * GetBytesPerFrame();
58 }
henrikab2619892015-05-18 16:49:16 +020059 int GetBytesPer10msBuffer() const {
60 return frames_per_10ms_buffer_ * GetBytesPerFrame();
61 }
62 float GetBufferSizeInMilliseconds() const {
63 if (sample_rate_ == 0)
64 return 0.0f;
65 return frames_per_buffer_ / (sample_rate_ / 1000.0f);
66 }
henrika8324b522015-03-27 10:56:23 +010067
68 private:
69 int sample_rate_;
70 int channels_;
henrikab2619892015-05-18 16:49:16 +020071 // Lowest possible size of native audio buffer. Measured in number of frames.
72 // This size is injected into the OpenSL ES output (since it does not "talk
73 // Java") implementation but is currently not utilized by the Java
74 // implementation since it aquires the same value internally.
henrika8324b522015-03-27 10:56:23 +010075 int frames_per_buffer_;
henrikab2619892015-05-18 16:49:16 +020076 int frames_per_10ms_buffer_;
77 int bits_per_sample_;
henrika8324b522015-03-27 10:56:23 +010078};
79
80// Implements support for functions in the WebRTC audio stack for Android that
81// relies on the AudioManager in android.media. It also populates an
82// AudioParameter structure with native audio parameters detected at
83// construction. This class does not make any audio-related modifications
84// unless Init() is called. Caching audio parameters makes no changes but only
85// reads data from the Java side.
henrika8324b522015-03-27 10:56:23 +010086class AudioManager {
87 public:
henrikab2619892015-05-18 16:49:16 +020088 // Wraps the Java specific parts of the AudioManager into one helper class.
89 // Stores method IDs for all supported methods at construction and then
90 // allows calls like JavaAudioManager::Close() while hiding the Java/JNI
91 // parts that are associated with this call.
92 class JavaAudioManager {
93 public:
94 JavaAudioManager(NativeRegistration* native_registration,
95 rtc::scoped_ptr<GlobalRef> audio_manager);
96 ~JavaAudioManager();
97
98 bool Init();
99 void Close();
100 void SetCommunicationMode(bool enable);
101
102 private:
103 rtc::scoped_ptr<GlobalRef> audio_manager_;
104 jmethodID init_;
105 jmethodID dispose_;
106 jmethodID set_communication_mode_;
107 };
henrika8324b522015-03-27 10:56:23 +0100108
109 AudioManager();
110 ~AudioManager();
111
henrikab2619892015-05-18 16:49:16 +0200112 // Sets the currently active audio layer combination. Must be called before
113 // Init().
114 void SetActiveAudioLayer(AudioDeviceModule::AudioLayer audio_layer);
115
henrika09bf1a12015-04-10 11:46:55 +0200116 // Initializes the audio manager and stores the current audio mode.
henrika8324b522015-03-27 10:56:23 +0100117 bool Init();
118 // Revert any setting done by Init().
119 bool Close();
120
henrika09bf1a12015-04-10 11:46:55 +0200121 // Sets audio mode to AudioManager.MODE_IN_COMMUNICATION if |enable| is true.
122 // Restores audio mode that was stored in Init() if |enable| is false.
123 void SetCommunicationMode(bool enable);
124
henrika8324b522015-03-27 10:56:23 +0100125 // Native audio parameters stored during construction.
henrikab2619892015-05-18 16:49:16 +0200126 const AudioParameters& GetPlayoutAudioParameters();
127 const AudioParameters& GetRecordAudioParameters();
henrika8324b522015-03-27 10:56:23 +0100128
henrikab2619892015-05-18 16:49:16 +0200129 // Returns true if the device supports a built-in Acoustic Echo Canceler.
130 // Some devices can also be blacklisted for use in combination with an AEC
131 // and these devices will return false.
132 // Can currently only be used in combination with a Java based audio backend
133 // for the recoring side (i.e. using the android.media.AudioRecord API).
134 bool IsAcousticEchoCancelerSupported() const;
135
136 // Returns true if the device supports the low-latency audio paths in
137 // combination with OpenSL ES.
138 bool IsLowLatencyPlayoutSupported() const;
139
140 // Returns the estimated total delay of this device. Unit is in milliseconds.
141 // The vaule is set once at construction and never changes after that.
142 // Possible values are webrtc::kLowLatencyModeDelayEstimateInMilliseconds and
143 // webrtc::kHighLatencyModeDelayEstimateInMilliseconds.
144 int GetDelayEstimateInMilliseconds() const;
henrika8324b522015-03-27 10:56:23 +0100145
146 private:
147 // Called from Java side so we can cache the native audio parameters.
148 // This method will be called by the WebRtcAudioManager constructor, i.e.
149 // on the same thread that this object is created on.
henrikab2619892015-05-18 16:49:16 +0200150 static void JNICALL CacheAudioParameters(JNIEnv* env,
151 jobject obj,
152 jint sample_rate,
153 jint channels,
154 jboolean hardware_aec,
155 jboolean low_latency_output,
156 jint output_buffer_size,
157 jint input_buffer_size,
158 jlong native_audio_manager);
159 void OnCacheAudioParameters(JNIEnv* env,
160 jint sample_rate,
161 jint channels,
162 jboolean hardware_aec,
163 jboolean low_latency_output,
164 jint output_buffer_size,
165 jint input_buffer_size);
henrika8324b522015-03-27 10:56:23 +0100166
167 // Stores thread ID in the constructor.
168 // We can then use ThreadChecker::CalledOnValidThread() to ensure that
169 // other methods are called from the same thread.
170 rtc::ThreadChecker thread_checker_;
171
henrikab2619892015-05-18 16:49:16 +0200172 // Calls AttachCurrentThread() if this thread is not attached at construction.
173 // Also ensures that DetachCurrentThread() is called at destruction.
174 AttachCurrentThreadIfNeeded attach_thread_if_needed_;
175
henrikaee369e42015-05-25 10:11:27 +0200176 // Wraps the JNI interface pointer and methods associated with it.
henrikab2619892015-05-18 16:49:16 +0200177 rtc::scoped_ptr<JNIEnvironment> j_environment_;
178
henrikaee369e42015-05-25 10:11:27 +0200179 // Contains factory method for creating the Java object.
henrikab2619892015-05-18 16:49:16 +0200180 rtc::scoped_ptr<NativeRegistration> j_native_registration_;
181
henrikaee369e42015-05-25 10:11:27 +0200182 // Wraps the Java specific parts of the AudioManager.
henrikab2619892015-05-18 16:49:16 +0200183 rtc::scoped_ptr<AudioManager::JavaAudioManager> j_audio_manager_;
184
185 AudioDeviceModule::AudioLayer audio_layer_;
henrika8324b522015-03-27 10:56:23 +0100186
187 // Set to true by Init() and false by Close().
188 bool initialized_;
189
henrikab2619892015-05-18 16:49:16 +0200190 // True if device supports hardware (or built-in) AEC.
191 bool hardware_aec_;
192
193 // True if device supports the low-latency OpenSL ES audio path.
194 bool low_latency_playout_;
195
196 // The delay estimate can take one of two fixed values depending on if the
197 // device supports low-latency output or not.
198 int delay_estimate_in_milliseconds_;
199
henrika8324b522015-03-27 10:56:23 +0100200 // Contains native parameters (e.g. sample rate, channel configuration).
201 // Set at construction in OnCacheAudioParameters() which is called from
202 // Java on the same thread as this object is created on.
203 AudioParameters playout_parameters_;
204 AudioParameters record_parameters_;
205};
206
207} // namespace webrtc
208
209#endif // WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_MANAGER_H_