blob: d1421f336196c49f625b4ae690ad8876447a86a9 [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
andrew@webrtc.org40654032012-01-30 20:51:15 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
andrew@webrtc.org78693fe2013-03-01 16:36:19 +000011#include "webrtc/modules/audio_processing/audio_processing_impl.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
ajm@google.com808e0e02011-08-03 21:08:51 +000013#include <assert.h>
niklase@google.com470e71d2011-07-07 08:21:25 +000014
xians@webrtc.orge46bc772014-10-10 08:36:56 +000015#include "webrtc/base/platform_file.h"
andrew@webrtc.org17e40642014-03-04 20:58:13 +000016#include "webrtc/common_audio/include/audio_util.h"
andrew@webrtc.org60730cf2014-01-07 17:45:09 +000017#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
pbos@webrtc.org788acd12014-12-15 09:41:24 +000018#include "webrtc/modules/audio_processing/agc/agc_manager_direct.h"
andrew@webrtc.org78693fe2013-03-01 16:36:19 +000019#include "webrtc/modules/audio_processing/audio_buffer.h"
mgraczyk@chromium.org0f663de2015-03-13 00:13:32 +000020#include "webrtc/modules/audio_processing/beamformer/nonlinear_beamformer.h"
kjellander@webrtc.org035e9122015-01-28 19:57:00 +000021#include "webrtc/common_audio/channel_buffer.h"
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +000022#include "webrtc/modules/audio_processing/common.h"
andrew@webrtc.org56e4a052014-02-27 22:23:17 +000023#include "webrtc/modules/audio_processing/echo_cancellation_impl.h"
andrew@webrtc.org78693fe2013-03-01 16:36:19 +000024#include "webrtc/modules/audio_processing/echo_control_mobile_impl.h"
25#include "webrtc/modules/audio_processing/gain_control_impl.h"
26#include "webrtc/modules/audio_processing/high_pass_filter_impl.h"
27#include "webrtc/modules/audio_processing/level_estimator_impl.h"
28#include "webrtc/modules/audio_processing/noise_suppression_impl.h"
29#include "webrtc/modules/audio_processing/processing_component.h"
aluebs@webrtc.orgae643ce2014-12-19 19:57:34 +000030#include "webrtc/modules/audio_processing/transient/transient_suppressor.h"
andrew@webrtc.org78693fe2013-03-01 16:36:19 +000031#include "webrtc/modules/audio_processing/voice_detection_impl.h"
32#include "webrtc/modules/interface/module_common_types.h"
33#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
34#include "webrtc/system_wrappers/interface/file_wrapper.h"
35#include "webrtc/system_wrappers/interface/logging.h"
andrew@webrtc.org7bf26462011-12-03 00:03:31 +000036
37#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
38// Files generated at build-time by the protobuf compiler.
leozwang@webrtc.orga3736342012-03-16 21:36:00 +000039#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
leozwang@webrtc.org534e4952012-10-22 21:21:52 +000040#include "external/webrtc/webrtc/modules/audio_processing/debug.pb.h"
leozwang@google.comce9bfbb2011-08-03 23:34:31 +000041#else
ajm@google.com808e0e02011-08-03 21:08:51 +000042#include "webrtc/audio_processing/debug.pb.h"
leozwang@google.comce9bfbb2011-08-03 23:34:31 +000043#endif
andrew@webrtc.org7bf26462011-12-03 00:03:31 +000044#endif // WEBRTC_AUDIOPROC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +000045
andrew@webrtc.org60730cf2014-01-07 17:45:09 +000046#define RETURN_ON_ERR(expr) \
47 do { \
mgraczyk@chromium.org0f663de2015-03-13 00:13:32 +000048 int err = (expr); \
andrew@webrtc.org60730cf2014-01-07 17:45:09 +000049 if (err != kNoError) { \
50 return err; \
51 } \
52 } while (0)
53
niklase@google.com470e71d2011-07-07 08:21:25 +000054namespace webrtc {
andrew@webrtc.org60730cf2014-01-07 17:45:09 +000055
56// Throughout webrtc, it's assumed that success is represented by zero.
kwiberg@webrtc.org2ebfac52015-01-14 10:51:54 +000057static_assert(AudioProcessing::kNoError == 0, "kNoError must be zero");
andrew@webrtc.org60730cf2014-01-07 17:45:09 +000058
pbos@webrtc.org788acd12014-12-15 09:41:24 +000059// This class has two main functionalities:
60//
61// 1) It is returned instead of the real GainControl after the new AGC has been
62// enabled in order to prevent an outside user from overriding compression
63// settings. It doesn't do anything in its implementation, except for
64// delegating the const methods and Enable calls to the real GainControl, so
65// AGC can still be disabled.
66//
67// 2) It is injected into AgcManagerDirect and implements volume callbacks for
68// getting and setting the volume level. It just caches this value to be used
69// in VoiceEngine later.
70class GainControlForNewAgc : public GainControl, public VolumeCallbacks {
71 public:
72 explicit GainControlForNewAgc(GainControlImpl* gain_control)
73 : real_gain_control_(gain_control),
74 volume_(0) {
75 }
76
77 // GainControl implementation.
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +000078 int Enable(bool enable) override {
pbos@webrtc.org788acd12014-12-15 09:41:24 +000079 return real_gain_control_->Enable(enable);
80 }
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +000081 bool is_enabled() const override { return real_gain_control_->is_enabled(); }
82 int set_stream_analog_level(int level) override {
pbos@webrtc.org788acd12014-12-15 09:41:24 +000083 volume_ = level;
84 return AudioProcessing::kNoError;
85 }
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +000086 int stream_analog_level() override { return volume_; }
87 int set_mode(Mode mode) override { return AudioProcessing::kNoError; }
88 Mode mode() const override { return GainControl::kAdaptiveAnalog; }
89 int set_target_level_dbfs(int level) override {
pbos@webrtc.org788acd12014-12-15 09:41:24 +000090 return AudioProcessing::kNoError;
91 }
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +000092 int target_level_dbfs() const override {
pbos@webrtc.org788acd12014-12-15 09:41:24 +000093 return real_gain_control_->target_level_dbfs();
94 }
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +000095 int set_compression_gain_db(int gain) override {
pbos@webrtc.org788acd12014-12-15 09:41:24 +000096 return AudioProcessing::kNoError;
97 }
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +000098 int compression_gain_db() const override {
pbos@webrtc.org788acd12014-12-15 09:41:24 +000099 return real_gain_control_->compression_gain_db();
100 }
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000101 int enable_limiter(bool enable) override { return AudioProcessing::kNoError; }
102 bool is_limiter_enabled() const override {
pbos@webrtc.org788acd12014-12-15 09:41:24 +0000103 return real_gain_control_->is_limiter_enabled();
104 }
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000105 int set_analog_level_limits(int minimum, int maximum) override {
pbos@webrtc.org788acd12014-12-15 09:41:24 +0000106 return AudioProcessing::kNoError;
107 }
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000108 int analog_level_minimum() const override {
pbos@webrtc.org788acd12014-12-15 09:41:24 +0000109 return real_gain_control_->analog_level_minimum();
110 }
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000111 int analog_level_maximum() const override {
pbos@webrtc.org788acd12014-12-15 09:41:24 +0000112 return real_gain_control_->analog_level_maximum();
113 }
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000114 bool stream_is_saturated() const override {
pbos@webrtc.org788acd12014-12-15 09:41:24 +0000115 return real_gain_control_->stream_is_saturated();
116 }
117
118 // VolumeCallbacks implementation.
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000119 void SetMicVolume(int volume) override { volume_ = volume; }
120 int GetMicVolume() override { return volume_; }
pbos@webrtc.org788acd12014-12-15 09:41:24 +0000121
122 private:
123 GainControl* real_gain_control_;
124 int volume_;
125};
126
andrew@webrtc.orge84978f2014-01-25 02:09:06 +0000127AudioProcessing* AudioProcessing::Create() {
128 Config config;
aluebs@webrtc.orgd82f55d2015-01-15 18:07:21 +0000129 return Create(config, nullptr);
andrew@webrtc.orge84978f2014-01-25 02:09:06 +0000130}
131
132AudioProcessing* AudioProcessing::Create(const Config& config) {
aluebs@webrtc.orgd82f55d2015-01-15 18:07:21 +0000133 return Create(config, nullptr);
134}
135
136AudioProcessing* AudioProcessing::Create(const Config& config,
mgraczyk@chromium.org0f663de2015-03-13 00:13:32 +0000137 NonlinearBeamformer* beamformer) {
aluebs@webrtc.orgd82f55d2015-01-15 18:07:21 +0000138 AudioProcessingImpl* apm = new AudioProcessingImpl(config, beamformer);
niklase@google.com470e71d2011-07-07 08:21:25 +0000139 if (apm->Initialize() != kNoError) {
140 delete apm;
141 apm = NULL;
142 }
143
144 return apm;
145}
146
andrew@webrtc.orge84978f2014-01-25 02:09:06 +0000147AudioProcessingImpl::AudioProcessingImpl(const Config& config)
aluebs@webrtc.orgd82f55d2015-01-15 18:07:21 +0000148 : AudioProcessingImpl(config, nullptr) {}
149
150AudioProcessingImpl::AudioProcessingImpl(const Config& config,
mgraczyk@chromium.org0f663de2015-03-13 00:13:32 +0000151 NonlinearBeamformer* beamformer)
andrew@webrtc.org60730cf2014-01-07 17:45:09 +0000152 : echo_cancellation_(NULL),
niklase@google.com470e71d2011-07-07 08:21:25 +0000153 echo_control_mobile_(NULL),
154 gain_control_(NULL),
155 high_pass_filter_(NULL),
156 level_estimator_(NULL),
157 noise_suppression_(NULL),
158 voice_detection_(NULL),
niklase@google.com470e71d2011-07-07 08:21:25 +0000159 crit_(CriticalSectionWrapper::CreateCriticalSection()),
andrew@webrtc.org7bf26462011-12-03 00:03:31 +0000160#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
161 debug_file_(FileWrapper::Create()),
162 event_msg_(new audioproc::Event()),
163#endif
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000164 fwd_in_format_(kSampleRate16kHz, 1),
aluebs@webrtc.org27d106b2014-12-11 17:09:21 +0000165 fwd_proc_format_(kSampleRate16kHz),
166 fwd_out_format_(kSampleRate16kHz, 1),
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000167 rev_in_format_(kSampleRate16kHz, 1),
168 rev_proc_format_(kSampleRate16kHz, 1),
169 split_rate_(kSampleRate16kHz),
niklase@google.com470e71d2011-07-07 08:21:25 +0000170 stream_delay_ms_(0),
andrew@webrtc.org6f9f8172012-03-06 19:03:39 +0000171 delay_offset_ms_(0),
niklase@google.com470e71d2011-07-07 08:21:25 +0000172 was_stream_delay_set_(false),
andrew@webrtc.org38bf2492014-02-13 17:43:44 +0000173 output_will_be_muted_(false),
pbos@webrtc.org788acd12014-12-15 09:41:24 +0000174 key_pressed_(false),
175#if defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS)
176 use_new_agc_(false),
177#else
178 use_new_agc_(config.Get<ExperimentalAgc>().enabled),
179#endif
aluebs@webrtc.orgae643ce2014-12-19 19:57:34 +0000180 transient_suppressor_enabled_(config.Get<ExperimentalNs>().enabled),
aluebs@webrtc.orgfb7a0392015-01-05 21:58:58 +0000181 beamformer_enabled_(config.Get<Beamforming>().enabled),
aluebs@webrtc.orgd82f55d2015-01-15 18:07:21 +0000182 beamformer_(beamformer),
aluebs@webrtc.orgc9ce07e2015-03-02 20:07:31 +0000183 array_geometry_(config.Get<Beamforming>().array_geometry),
184 supports_48kHz_(config.Get<AudioProcessing48kHzSupport>().enabled) {
andrew@webrtc.org56e4a052014-02-27 22:23:17 +0000185 echo_cancellation_ = new EchoCancellationImpl(this, crit_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000186 component_list_.push_back(echo_cancellation_);
187
andrew@webrtc.org56e4a052014-02-27 22:23:17 +0000188 echo_control_mobile_ = new EchoControlMobileImpl(this, crit_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000189 component_list_.push_back(echo_control_mobile_);
190
andrew@webrtc.org56e4a052014-02-27 22:23:17 +0000191 gain_control_ = new GainControlImpl(this, crit_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000192 component_list_.push_back(gain_control_);
193
andrew@webrtc.org56e4a052014-02-27 22:23:17 +0000194 high_pass_filter_ = new HighPassFilterImpl(this, crit_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000195 component_list_.push_back(high_pass_filter_);
196
andrew@webrtc.org56e4a052014-02-27 22:23:17 +0000197 level_estimator_ = new LevelEstimatorImpl(this, crit_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000198 component_list_.push_back(level_estimator_);
199
andrew@webrtc.org56e4a052014-02-27 22:23:17 +0000200 noise_suppression_ = new NoiseSuppressionImpl(this, crit_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000201 component_list_.push_back(noise_suppression_);
202
andrew@webrtc.org56e4a052014-02-27 22:23:17 +0000203 voice_detection_ = new VoiceDetectionImpl(this, crit_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000204 component_list_.push_back(voice_detection_);
andrew@webrtc.orge84978f2014-01-25 02:09:06 +0000205
pbos@webrtc.org788acd12014-12-15 09:41:24 +0000206 gain_control_for_new_agc_.reset(new GainControlForNewAgc(gain_control_));
207
andrew@webrtc.orge84978f2014-01-25 02:09:06 +0000208 SetExtraOptions(config);
niklase@google.com470e71d2011-07-07 08:21:25 +0000209}
210
211AudioProcessingImpl::~AudioProcessingImpl() {
andrew@webrtc.org81865342012-10-27 00:28:27 +0000212 {
213 CriticalSectionScoped crit_scoped(crit_);
pbos@webrtc.org788acd12014-12-15 09:41:24 +0000214 // Depends on gain_control_ and gain_control_for_new_agc_.
215 agc_manager_.reset();
216 // Depends on gain_control_.
217 gain_control_for_new_agc_.reset();
andrew@webrtc.org81865342012-10-27 00:28:27 +0000218 while (!component_list_.empty()) {
219 ProcessingComponent* component = component_list_.front();
220 component->Destroy();
221 delete component;
222 component_list_.pop_front();
223 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000224
andrew@webrtc.org7bf26462011-12-03 00:03:31 +0000225#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
andrew@webrtc.org81865342012-10-27 00:28:27 +0000226 if (debug_file_->Open()) {
227 debug_file_->CloseFile();
228 }
andrew@webrtc.org7bf26462011-12-03 00:03:31 +0000229#endif
niklase@google.com470e71d2011-07-07 08:21:25 +0000230 }
andrew@webrtc.org16cfbe22012-08-29 16:58:25 +0000231 delete crit_;
232 crit_ = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +0000233}
234
niklase@google.com470e71d2011-07-07 08:21:25 +0000235int AudioProcessingImpl::Initialize() {
andrew@webrtc.org40654032012-01-30 20:51:15 +0000236 CriticalSectionScoped crit_scoped(crit_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000237 return InitializeLocked();
238}
239
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000240int AudioProcessingImpl::set_sample_rate_hz(int rate) {
andrew@webrtc.orga8b97372014-03-10 22:26:12 +0000241 CriticalSectionScoped crit_scoped(crit_);
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000242 return InitializeLocked(rate,
243 rate,
244 rev_in_format_.rate(),
245 fwd_in_format_.num_channels(),
aluebs@webrtc.org27d106b2014-12-11 17:09:21 +0000246 fwd_out_format_.num_channels(),
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000247 rev_in_format_.num_channels());
248}
249
250int AudioProcessingImpl::Initialize(int input_sample_rate_hz,
251 int output_sample_rate_hz,
252 int reverse_sample_rate_hz,
253 ChannelLayout input_layout,
254 ChannelLayout output_layout,
255 ChannelLayout reverse_layout) {
256 CriticalSectionScoped crit_scoped(crit_);
257 return InitializeLocked(input_sample_rate_hz,
258 output_sample_rate_hz,
andrew@webrtc.orga8b97372014-03-10 22:26:12 +0000259 reverse_sample_rate_hz,
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000260 ChannelsFromLayout(input_layout),
261 ChannelsFromLayout(output_layout),
262 ChannelsFromLayout(reverse_layout));
andrew@webrtc.orga8b97372014-03-10 22:26:12 +0000263}
264
niklase@google.com470e71d2011-07-07 08:21:25 +0000265int AudioProcessingImpl::InitializeLocked() {
aluebs@webrtc.orgae643ce2014-12-19 19:57:34 +0000266 const int fwd_audio_buffer_channels = beamformer_enabled_ ?
267 fwd_in_format_.num_channels() :
268 fwd_out_format_.num_channels();
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000269 render_audio_.reset(new AudioBuffer(rev_in_format_.samples_per_channel(),
270 rev_in_format_.num_channels(),
271 rev_proc_format_.samples_per_channel(),
272 rev_proc_format_.num_channels(),
273 rev_proc_format_.samples_per_channel()));
274 capture_audio_.reset(new AudioBuffer(fwd_in_format_.samples_per_channel(),
275 fwd_in_format_.num_channels(),
276 fwd_proc_format_.samples_per_channel(),
aluebs@webrtc.orgae643ce2014-12-19 19:57:34 +0000277 fwd_audio_buffer_channels,
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000278 fwd_out_format_.samples_per_channel()));
niklase@google.com470e71d2011-07-07 08:21:25 +0000279
niklase@google.com470e71d2011-07-07 08:21:25 +0000280 // Initialize all components.
mgraczyk@chromium.orge5340862015-03-12 23:23:38 +0000281 for (auto item : component_list_) {
282 int err = item->Initialize();
niklase@google.com470e71d2011-07-07 08:21:25 +0000283 if (err != kNoError) {
284 return err;
285 }
286 }
287
pbos@webrtc.org788acd12014-12-15 09:41:24 +0000288 int err = InitializeExperimentalAgc();
289 if (err != kNoError) {
290 return err;
291 }
292
293 err = InitializeTransient();
294 if (err != kNoError) {
295 return err;
296 }
297
aluebs@webrtc.orgae643ce2014-12-19 19:57:34 +0000298 InitializeBeamformer();
299
andrew@webrtc.org7bf26462011-12-03 00:03:31 +0000300#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
ajm@google.com808e0e02011-08-03 21:08:51 +0000301 if (debug_file_->Open()) {
302 int err = WriteInitMessage();
303 if (err != kNoError) {
304 return err;
305 }
306 }
andrew@webrtc.org7bf26462011-12-03 00:03:31 +0000307#endif
ajm@google.com808e0e02011-08-03 21:08:51 +0000308
niklase@google.com470e71d2011-07-07 08:21:25 +0000309 return kNoError;
310}
311
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000312int AudioProcessingImpl::InitializeLocked(int input_sample_rate_hz,
313 int output_sample_rate_hz,
andrew@webrtc.orga8b97372014-03-10 22:26:12 +0000314 int reverse_sample_rate_hz,
315 int num_input_channels,
316 int num_output_channels,
317 int num_reverse_channels) {
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000318 if (input_sample_rate_hz <= 0 ||
319 output_sample_rate_hz <= 0 ||
320 reverse_sample_rate_hz <= 0) {
andrew@webrtc.orga8b97372014-03-10 22:26:12 +0000321 return kBadSampleRateError;
322 }
323 if (num_output_channels > num_input_channels) {
324 return kBadNumberChannelsError;
325 }
326 // Only mono and stereo supported currently.
327 if (num_input_channels > 2 || num_input_channels < 1 ||
328 num_output_channels > 2 || num_output_channels < 1 ||
329 num_reverse_channels > 2 || num_reverse_channels < 1) {
330 return kBadNumberChannelsError;
331 }
aluebs@webrtc.orgd82f55d2015-01-15 18:07:21 +0000332 if (beamformer_enabled_ &&
333 (static_cast<size_t>(num_input_channels) != array_geometry_.size() ||
334 num_output_channels > 1)) {
335 return kBadNumberChannelsError;
336 }
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000337
338 fwd_in_format_.set(input_sample_rate_hz, num_input_channels);
aluebs@webrtc.org27d106b2014-12-11 17:09:21 +0000339 fwd_out_format_.set(output_sample_rate_hz, num_output_channels);
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000340 rev_in_format_.set(reverse_sample_rate_hz, num_reverse_channels);
341
342 // We process at the closest native rate >= min(input rate, output rate)...
343 int min_proc_rate = std::min(fwd_in_format_.rate(), fwd_out_format_.rate());
344 int fwd_proc_rate;
aluebs@webrtc.orgc9ce07e2015-03-02 20:07:31 +0000345 if (supports_48kHz_ && min_proc_rate > kSampleRate32kHz) {
346 fwd_proc_rate = kSampleRate48kHz;
347 } else if (min_proc_rate > kSampleRate16kHz) {
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000348 fwd_proc_rate = kSampleRate32kHz;
349 } else if (min_proc_rate > kSampleRate8kHz) {
350 fwd_proc_rate = kSampleRate16kHz;
351 } else {
352 fwd_proc_rate = kSampleRate8kHz;
353 }
354 // ...with one exception.
355 if (echo_control_mobile_->is_enabled() && min_proc_rate > kSampleRate16kHz) {
356 fwd_proc_rate = kSampleRate16kHz;
andrew@webrtc.orga8b97372014-03-10 22:26:12 +0000357 }
358
aluebs@webrtc.org27d106b2014-12-11 17:09:21 +0000359 fwd_proc_format_.set(fwd_proc_rate);
andrew@webrtc.orga8b97372014-03-10 22:26:12 +0000360
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000361 // We normally process the reverse stream at 16 kHz. Unless...
362 int rev_proc_rate = kSampleRate16kHz;
363 if (fwd_proc_format_.rate() == kSampleRate8kHz) {
364 // ...the forward stream is at 8 kHz.
365 rev_proc_rate = kSampleRate8kHz;
andrew@webrtc.orga8b97372014-03-10 22:26:12 +0000366 } else {
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000367 if (rev_in_format_.rate() == kSampleRate32kHz) {
368 // ...or the input is at 32 kHz, in which case we use the splitting
369 // filter rather than the resampler.
370 rev_proc_rate = kSampleRate32kHz;
371 }
372 }
373
andrew@webrtc.org30be8272014-09-24 20:06:23 +0000374 // Always downmix the reverse stream to mono for analysis. This has been
375 // demonstrated to work well for AEC in most practical scenarios.
376 rev_proc_format_.set(rev_proc_rate, 1);
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000377
aluebs@webrtc.org087da132014-11-17 23:01:23 +0000378 if (fwd_proc_format_.rate() == kSampleRate32kHz ||
379 fwd_proc_format_.rate() == kSampleRate48kHz) {
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000380 split_rate_ = kSampleRate16kHz;
381 } else {
382 split_rate_ = fwd_proc_format_.rate();
andrew@webrtc.orga8b97372014-03-10 22:26:12 +0000383 }
384
385 return InitializeLocked();
386}
387
388// Calls InitializeLocked() if any of the audio parameters have changed from
389// their current values.
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000390int AudioProcessingImpl::MaybeInitializeLocked(int input_sample_rate_hz,
391 int output_sample_rate_hz,
andrew@webrtc.orga8b97372014-03-10 22:26:12 +0000392 int reverse_sample_rate_hz,
393 int num_input_channels,
394 int num_output_channels,
395 int num_reverse_channels) {
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000396 if (input_sample_rate_hz == fwd_in_format_.rate() &&
397 output_sample_rate_hz == fwd_out_format_.rate() &&
398 reverse_sample_rate_hz == rev_in_format_.rate() &&
399 num_input_channels == fwd_in_format_.num_channels() &&
aluebs@webrtc.org27d106b2014-12-11 17:09:21 +0000400 num_output_channels == fwd_out_format_.num_channels() &&
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000401 num_reverse_channels == rev_in_format_.num_channels()) {
andrew@webrtc.orga8b97372014-03-10 22:26:12 +0000402 return kNoError;
403 }
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000404 return InitializeLocked(input_sample_rate_hz,
405 output_sample_rate_hz,
andrew@webrtc.orga8b97372014-03-10 22:26:12 +0000406 reverse_sample_rate_hz,
407 num_input_channels,
408 num_output_channels,
409 num_reverse_channels);
410}
411
andrew@webrtc.org61e596f2013-07-25 18:28:29 +0000412void AudioProcessingImpl::SetExtraOptions(const Config& config) {
andrew@webrtc.orge84978f2014-01-25 02:09:06 +0000413 CriticalSectionScoped crit_scoped(crit_);
mgraczyk@chromium.orge5340862015-03-12 23:23:38 +0000414 for (auto item : component_list_) {
415 item->SetExtraOptions(config);
416 }
pbos@webrtc.org788acd12014-12-15 09:41:24 +0000417
418 if (transient_suppressor_enabled_ != config.Get<ExperimentalNs>().enabled) {
419 transient_suppressor_enabled_ = config.Get<ExperimentalNs>().enabled;
420 InitializeTransient();
421 }
andrew@webrtc.org61e596f2013-07-25 18:28:29 +0000422}
423
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000424int AudioProcessingImpl::input_sample_rate_hz() const {
andrew@webrtc.org40654032012-01-30 20:51:15 +0000425 CriticalSectionScoped crit_scoped(crit_);
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000426 return fwd_in_format_.rate();
niklase@google.com470e71d2011-07-07 08:21:25 +0000427}
428
andrew@webrtc.org46b31b12014-04-23 03:33:54 +0000429int AudioProcessingImpl::sample_rate_hz() const {
430 CriticalSectionScoped crit_scoped(crit_);
431 return fwd_in_format_.rate();
432}
433
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000434int AudioProcessingImpl::proc_sample_rate_hz() const {
435 return fwd_proc_format_.rate();
niklase@google.com470e71d2011-07-07 08:21:25 +0000436}
437
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000438int AudioProcessingImpl::proc_split_sample_rate_hz() const {
439 return split_rate_;
niklase@google.com470e71d2011-07-07 08:21:25 +0000440}
441
442int AudioProcessingImpl::num_reverse_channels() const {
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000443 return rev_proc_format_.num_channels();
niklase@google.com470e71d2011-07-07 08:21:25 +0000444}
445
446int AudioProcessingImpl::num_input_channels() const {
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000447 return fwd_in_format_.num_channels();
niklase@google.com470e71d2011-07-07 08:21:25 +0000448}
449
450int AudioProcessingImpl::num_output_channels() const {
aluebs@webrtc.org27d106b2014-12-11 17:09:21 +0000451 return fwd_out_format_.num_channels();
niklase@google.com470e71d2011-07-07 08:21:25 +0000452}
453
andrew@webrtc.org17342e52014-02-12 22:28:31 +0000454void AudioProcessingImpl::set_output_will_be_muted(bool muted) {
455 output_will_be_muted_ = muted;
pbos@webrtc.org788acd12014-12-15 09:41:24 +0000456 CriticalSectionScoped lock(crit_);
457 if (agc_manager_.get()) {
458 agc_manager_->SetCaptureMuted(output_will_be_muted_);
459 }
andrew@webrtc.org17342e52014-02-12 22:28:31 +0000460}
461
462bool AudioProcessingImpl::output_will_be_muted() const {
463 return output_will_be_muted_;
464}
465
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000466int AudioProcessingImpl::ProcessStream(const float* const* src,
andrew@webrtc.org17e40642014-03-04 20:58:13 +0000467 int samples_per_channel,
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000468 int input_sample_rate_hz,
andrew@webrtc.org17e40642014-03-04 20:58:13 +0000469 ChannelLayout input_layout,
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000470 int output_sample_rate_hz,
471 ChannelLayout output_layout,
472 float* const* dest) {
andrew@webrtc.org40654032012-01-30 20:51:15 +0000473 CriticalSectionScoped crit_scoped(crit_);
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000474 if (!src || !dest) {
niklase@google.com470e71d2011-07-07 08:21:25 +0000475 return kNullPointerError;
476 }
andrew@webrtc.org17e40642014-03-04 20:58:13 +0000477
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000478 RETURN_ON_ERR(MaybeInitializeLocked(input_sample_rate_hz,
479 output_sample_rate_hz,
480 rev_in_format_.rate(),
481 ChannelsFromLayout(input_layout),
482 ChannelsFromLayout(output_layout),
483 rev_in_format_.num_channels()));
484 if (samples_per_channel != fwd_in_format_.samples_per_channel()) {
andrew@webrtc.org17e40642014-03-04 20:58:13 +0000485 return kBadDataLengthError;
486 }
487
488#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
489 if (debug_file_->Open()) {
490 event_msg_->set_type(audioproc::Event::STREAM);
491 audioproc::Stream* msg = event_msg_->mutable_stream();
aluebs@webrtc.org59a1b1b2014-08-28 10:43:09 +0000492 const size_t channel_size =
493 sizeof(float) * fwd_in_format_.samples_per_channel();
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000494 for (int i = 0; i < fwd_in_format_.num_channels(); ++i)
495 msg->add_input_channel(src[i], channel_size);
andrew@webrtc.org17e40642014-03-04 20:58:13 +0000496 }
497#endif
498
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000499 capture_audio_->CopyFrom(src, samples_per_channel, input_layout);
andrew@webrtc.org17e40642014-03-04 20:58:13 +0000500 RETURN_ON_ERR(ProcessStreamLocked());
mgraczyk@chromium.orgd6e84d92015-01-14 01:33:54 +0000501 capture_audio_->CopyTo(fwd_out_format_.samples_per_channel(),
502 output_layout,
503 dest);
andrew@webrtc.org17e40642014-03-04 20:58:13 +0000504
505#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
506 if (debug_file_->Open()) {
507 audioproc::Stream* msg = event_msg_->mutable_stream();
aluebs@webrtc.org59a1b1b2014-08-28 10:43:09 +0000508 const size_t channel_size =
509 sizeof(float) * fwd_out_format_.samples_per_channel();
aluebs@webrtc.org27d106b2014-12-11 17:09:21 +0000510 for (int i = 0; i < fwd_out_format_.num_channels(); ++i)
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000511 msg->add_output_channel(dest[i], channel_size);
andrew@webrtc.org17e40642014-03-04 20:58:13 +0000512 RETURN_ON_ERR(WriteMessageToDebugFile());
513 }
514#endif
515
516 return kNoError;
517}
518
519int AudioProcessingImpl::ProcessStream(AudioFrame* frame) {
520 CriticalSectionScoped crit_scoped(crit_);
521 if (!frame) {
522 return kNullPointerError;
523 }
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000524 // Must be a native rate.
525 if (frame->sample_rate_hz_ != kSampleRate8kHz &&
526 frame->sample_rate_hz_ != kSampleRate16kHz &&
aluebs@webrtc.org087da132014-11-17 23:01:23 +0000527 frame->sample_rate_hz_ != kSampleRate32kHz &&
528 frame->sample_rate_hz_ != kSampleRate48kHz) {
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000529 return kBadSampleRateError;
530 }
531 if (echo_control_mobile_->is_enabled() &&
532 frame->sample_rate_hz_ > kSampleRate16kHz) {
533 LOG(LS_ERROR) << "AECM only supports 16 or 8 kHz sample rates";
534 return kUnsupportedComponentError;
535 }
andrew@webrtc.org17e40642014-03-04 20:58:13 +0000536
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000537 // TODO(ajm): The input and output rates and channels are currently
538 // constrained to be identical in the int16 interface.
andrew@webrtc.org60730cf2014-01-07 17:45:09 +0000539 RETURN_ON_ERR(MaybeInitializeLocked(frame->sample_rate_hz_,
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000540 frame->sample_rate_hz_,
541 rev_in_format_.rate(),
542 frame->num_channels_,
543 frame->num_channels_,
544 rev_in_format_.num_channels()));
545 if (frame->samples_per_channel_ != fwd_in_format_.samples_per_channel()) {
niklase@google.com470e71d2011-07-07 08:21:25 +0000546 return kBadDataLengthError;
547 }
548
andrew@webrtc.org7bf26462011-12-03 00:03:31 +0000549#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +0000550 if (debug_file_->Open()) {
ajm@google.com808e0e02011-08-03 21:08:51 +0000551 event_msg_->set_type(audioproc::Event::STREAM);
552 audioproc::Stream* msg = event_msg_->mutable_stream();
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000553 const size_t data_size = sizeof(int16_t) *
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000554 frame->samples_per_channel_ *
555 frame->num_channels_;
556 msg->set_input_data(frame->data_, data_size);
andrew@webrtc.org17e40642014-03-04 20:58:13 +0000557 }
558#endif
559
560 capture_audio_->DeinterleaveFrom(frame);
andrew@webrtc.org17e40642014-03-04 20:58:13 +0000561 RETURN_ON_ERR(ProcessStreamLocked());
562 capture_audio_->InterleaveTo(frame, output_copy_needed(is_data_processed()));
563
564#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
565 if (debug_file_->Open()) {
566 audioproc::Stream* msg = event_msg_->mutable_stream();
567 const size_t data_size = sizeof(int16_t) *
568 frame->samples_per_channel_ *
569 frame->num_channels_;
570 msg->set_output_data(frame->data_, data_size);
571 RETURN_ON_ERR(WriteMessageToDebugFile());
572 }
573#endif
574
575 return kNoError;
576}
577
578
579int AudioProcessingImpl::ProcessStreamLocked() {
580#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
581 if (debug_file_->Open()) {
582 audioproc::Stream* msg = event_msg_->mutable_stream();
ajm@google.com808e0e02011-08-03 21:08:51 +0000583 msg->set_delay(stream_delay_ms_);
584 msg->set_drift(echo_cancellation_->stream_drift_samples());
bjornv@webrtc.org63da1dd2015-02-06 19:44:21 +0000585 msg->set_level(gain_control()->stream_analog_level());
andrew@webrtc.orgce8e0772014-02-12 15:28:30 +0000586 msg->set_keypress(key_pressed_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000587 }
andrew@webrtc.org7bf26462011-12-03 00:03:31 +0000588#endif
niklase@google.com470e71d2011-07-07 08:21:25 +0000589
andrew@webrtc.org103657b2014-04-24 18:28:56 +0000590 AudioBuffer* ca = capture_audio_.get(); // For brevity.
pbos@webrtc.org788acd12014-12-15 09:41:24 +0000591 if (use_new_agc_ && gain_control_->is_enabled()) {
aluebs@webrtc.orgd35a5c32015-02-10 22:52:15 +0000592 agc_manager_->AnalyzePreProcess(ca->channels()[0],
pbos@webrtc.org788acd12014-12-15 09:41:24 +0000593 ca->num_channels(),
594 fwd_proc_format_.samples_per_channel());
595 }
596
andrew@webrtc.org369166a2012-04-24 18:38:03 +0000597 bool data_processed = is_data_processed();
598 if (analysis_needed(data_processed)) {
aluebs@webrtc.orgbe05c742014-11-14 22:18:10 +0000599 ca->SplitIntoFrequencyBands();
niklase@google.com470e71d2011-07-07 08:21:25 +0000600 }
601
aluebs@webrtc.orgae643ce2014-12-19 19:57:34 +0000602#ifdef WEBRTC_BEAMFORMER
603 if (beamformer_enabled_) {
aluebs@webrtc.org3aca0b02015-02-26 21:52:20 +0000604 beamformer_->ProcessChunk(ca->split_data_f(), ca->split_data_f());
aluebs@webrtc.orgae643ce2014-12-19 19:57:34 +0000605 ca->set_num_channels(1);
606 }
607#endif
608
andrew@webrtc.org103657b2014-04-24 18:28:56 +0000609 RETURN_ON_ERR(high_pass_filter_->ProcessCaptureAudio(ca));
610 RETURN_ON_ERR(gain_control_->AnalyzeCaptureAudio(ca));
aluebs@webrtc.orga0ce9fa2014-09-24 14:18:03 +0000611 RETURN_ON_ERR(noise_suppression_->AnalyzeCaptureAudio(ca));
andrew@webrtc.org103657b2014-04-24 18:28:56 +0000612 RETURN_ON_ERR(echo_cancellation_->ProcessCaptureAudio(ca));
niklase@google.com470e71d2011-07-07 08:21:25 +0000613
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000614 if (echo_control_mobile_->is_enabled() && noise_suppression_->is_enabled()) {
andrew@webrtc.org103657b2014-04-24 18:28:56 +0000615 ca->CopyLowPassToReference();
niklase@google.com470e71d2011-07-07 08:21:25 +0000616 }
andrew@webrtc.org103657b2014-04-24 18:28:56 +0000617 RETURN_ON_ERR(noise_suppression_->ProcessCaptureAudio(ca));
618 RETURN_ON_ERR(echo_control_mobile_->ProcessCaptureAudio(ca));
619 RETURN_ON_ERR(voice_detection_->ProcessCaptureAudio(ca));
pbos@webrtc.org788acd12014-12-15 09:41:24 +0000620
aluebs@webrtc.orgd82f55d2015-01-15 18:07:21 +0000621 if (use_new_agc_ &&
622 gain_control_->is_enabled() &&
623 (!beamformer_enabled_ || beamformer_->is_target_present())) {
pbos@webrtc.org788acd12014-12-15 09:41:24 +0000624 agc_manager_->Process(ca->split_bands_const(0)[kBand0To8kHz],
aluebs@webrtc.orgd35a5c32015-02-10 22:52:15 +0000625 ca->num_frames_per_band(),
pbos@webrtc.org788acd12014-12-15 09:41:24 +0000626 split_rate_);
627 }
andrew@webrtc.org103657b2014-04-24 18:28:56 +0000628 RETURN_ON_ERR(gain_control_->ProcessCaptureAudio(ca));
niklase@google.com470e71d2011-07-07 08:21:25 +0000629
andrew@webrtc.org369166a2012-04-24 18:38:03 +0000630 if (synthesis_needed(data_processed)) {
aluebs@webrtc.orgbe05c742014-11-14 22:18:10 +0000631 ca->MergeFrequencyBands();
niklase@google.com470e71d2011-07-07 08:21:25 +0000632 }
633
pbos@webrtc.org788acd12014-12-15 09:41:24 +0000634 // TODO(aluebs): Investigate if the transient suppression placement should be
635 // before or after the AGC.
636 if (transient_suppressor_enabled_) {
637 float voice_probability =
638 agc_manager_.get() ? agc_manager_->voice_probability() : 1.f;
639
aluebs@webrtc.orgd35a5c32015-02-10 22:52:15 +0000640 transient_suppressor_->Suppress(ca->channels_f()[0],
641 ca->num_frames(),
pbos@webrtc.org788acd12014-12-15 09:41:24 +0000642 ca->num_channels(),
643 ca->split_bands_const_f(0)[kBand0To8kHz],
aluebs@webrtc.orgd35a5c32015-02-10 22:52:15 +0000644 ca->num_frames_per_band(),
pbos@webrtc.org788acd12014-12-15 09:41:24 +0000645 ca->keyboard_data(),
aluebs@webrtc.orgd35a5c32015-02-10 22:52:15 +0000646 ca->num_keyboard_frames(),
pbos@webrtc.org788acd12014-12-15 09:41:24 +0000647 voice_probability,
648 key_pressed_);
649 }
650
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000651 // The level estimator operates on the recombined data.
andrew@webrtc.org103657b2014-04-24 18:28:56 +0000652 RETURN_ON_ERR(level_estimator_->ProcessStream(ca));
ajm@google.com808e0e02011-08-03 21:08:51 +0000653
andrew@webrtc.org1e916932011-11-29 18:28:57 +0000654 was_stream_delay_set_ = false;
niklase@google.com470e71d2011-07-07 08:21:25 +0000655 return kNoError;
656}
657
andrew@webrtc.org17e40642014-03-04 20:58:13 +0000658int AudioProcessingImpl::AnalyzeReverseStream(const float* const* data,
659 int samples_per_channel,
660 int sample_rate_hz,
661 ChannelLayout layout) {
662 CriticalSectionScoped crit_scoped(crit_);
663 if (data == NULL) {
664 return kNullPointerError;
665 }
andrew@webrtc.org17e40642014-03-04 20:58:13 +0000666
667 const int num_channels = ChannelsFromLayout(layout);
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000668 RETURN_ON_ERR(MaybeInitializeLocked(fwd_in_format_.rate(),
669 fwd_out_format_.rate(),
670 sample_rate_hz,
671 fwd_in_format_.num_channels(),
aluebs@webrtc.org27d106b2014-12-11 17:09:21 +0000672 fwd_out_format_.num_channels(),
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000673 num_channels));
674 if (samples_per_channel != rev_in_format_.samples_per_channel()) {
andrew@webrtc.org17e40642014-03-04 20:58:13 +0000675 return kBadDataLengthError;
676 }
677
678#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
679 if (debug_file_->Open()) {
680 event_msg_->set_type(audioproc::Event::REVERSE_STREAM);
681 audioproc::ReverseStream* msg = event_msg_->mutable_reverse_stream();
aluebs@webrtc.org59a1b1b2014-08-28 10:43:09 +0000682 const size_t channel_size =
683 sizeof(float) * rev_in_format_.samples_per_channel();
andrew@webrtc.org17e40642014-03-04 20:58:13 +0000684 for (int i = 0; i < num_channels; ++i)
andrew@webrtc.orga8b97372014-03-10 22:26:12 +0000685 msg->add_channel(data[i], channel_size);
andrew@webrtc.org17e40642014-03-04 20:58:13 +0000686 RETURN_ON_ERR(WriteMessageToDebugFile());
687 }
688#endif
689
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000690 render_audio_->CopyFrom(data, samples_per_channel, layout);
andrew@webrtc.org17e40642014-03-04 20:58:13 +0000691 return AnalyzeReverseStreamLocked();
692}
693
niklase@google.com470e71d2011-07-07 08:21:25 +0000694int AudioProcessingImpl::AnalyzeReverseStream(AudioFrame* frame) {
andrew@webrtc.org40654032012-01-30 20:51:15 +0000695 CriticalSectionScoped crit_scoped(crit_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000696 if (frame == NULL) {
697 return kNullPointerError;
698 }
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000699 // Must be a native rate.
700 if (frame->sample_rate_hz_ != kSampleRate8kHz &&
701 frame->sample_rate_hz_ != kSampleRate16kHz &&
aluebs@webrtc.org087da132014-11-17 23:01:23 +0000702 frame->sample_rate_hz_ != kSampleRate32kHz &&
703 frame->sample_rate_hz_ != kSampleRate48kHz) {
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000704 return kBadSampleRateError;
705 }
706 // This interface does not tolerate different forward and reverse rates.
707 if (frame->sample_rate_hz_ != fwd_in_format_.rate()) {
niklase@google.com470e71d2011-07-07 08:21:25 +0000708 return kBadSampleRateError;
709 }
andrew@webrtc.orga8b97372014-03-10 22:26:12 +0000710
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000711 RETURN_ON_ERR(MaybeInitializeLocked(fwd_in_format_.rate(),
712 fwd_out_format_.rate(),
713 frame->sample_rate_hz_,
714 fwd_in_format_.num_channels(),
715 fwd_in_format_.num_channels(),
716 frame->num_channels_));
717 if (frame->samples_per_channel_ != rev_in_format_.samples_per_channel()) {
andrew@webrtc.org17e40642014-03-04 20:58:13 +0000718 return kBadDataLengthError;
719 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000720
andrew@webrtc.org7bf26462011-12-03 00:03:31 +0000721#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +0000722 if (debug_file_->Open()) {
ajm@google.com808e0e02011-08-03 21:08:51 +0000723 event_msg_->set_type(audioproc::Event::REVERSE_STREAM);
724 audioproc::ReverseStream* msg = event_msg_->mutable_reverse_stream();
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000725 const size_t data_size = sizeof(int16_t) *
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000726 frame->samples_per_channel_ *
727 frame->num_channels_;
728 msg->set_data(frame->data_, data_size);
andrew@webrtc.org17e40642014-03-04 20:58:13 +0000729 RETURN_ON_ERR(WriteMessageToDebugFile());
niklase@google.com470e71d2011-07-07 08:21:25 +0000730 }
andrew@webrtc.org7bf26462011-12-03 00:03:31 +0000731#endif
niklase@google.com470e71d2011-07-07 08:21:25 +0000732
733 render_audio_->DeinterleaveFrom(frame);
andrew@webrtc.org17e40642014-03-04 20:58:13 +0000734 return AnalyzeReverseStreamLocked();
735}
niklase@google.com470e71d2011-07-07 08:21:25 +0000736
andrew@webrtc.org17e40642014-03-04 20:58:13 +0000737int AudioProcessingImpl::AnalyzeReverseStreamLocked() {
andrew@webrtc.org103657b2014-04-24 18:28:56 +0000738 AudioBuffer* ra = render_audio_.get(); // For brevity.
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000739 if (rev_proc_format_.rate() == kSampleRate32kHz) {
aluebs@webrtc.orgbe05c742014-11-14 22:18:10 +0000740 ra->SplitIntoFrequencyBands();
niklase@google.com470e71d2011-07-07 08:21:25 +0000741 }
742
andrew@webrtc.org103657b2014-04-24 18:28:56 +0000743 RETURN_ON_ERR(echo_cancellation_->ProcessRenderAudio(ra));
744 RETURN_ON_ERR(echo_control_mobile_->ProcessRenderAudio(ra));
pbos@webrtc.org788acd12014-12-15 09:41:24 +0000745 if (!use_new_agc_) {
746 RETURN_ON_ERR(gain_control_->ProcessRenderAudio(ra));
747 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000748
andrew@webrtc.org17e40642014-03-04 20:58:13 +0000749 return kNoError;
niklase@google.com470e71d2011-07-07 08:21:25 +0000750}
751
752int AudioProcessingImpl::set_stream_delay_ms(int delay) {
andrew@webrtc.org5f23d642012-05-29 21:14:06 +0000753 Error retval = kNoError;
niklase@google.com470e71d2011-07-07 08:21:25 +0000754 was_stream_delay_set_ = true;
andrew@webrtc.org6f9f8172012-03-06 19:03:39 +0000755 delay += delay_offset_ms_;
756
niklase@google.com470e71d2011-07-07 08:21:25 +0000757 if (delay < 0) {
andrew@webrtc.org5f23d642012-05-29 21:14:06 +0000758 delay = 0;
759 retval = kBadStreamParameterWarning;
niklase@google.com470e71d2011-07-07 08:21:25 +0000760 }
761
762 // TODO(ajm): the max is rather arbitrarily chosen; investigate.
763 if (delay > 500) {
andrew@webrtc.org5f23d642012-05-29 21:14:06 +0000764 delay = 500;
765 retval = kBadStreamParameterWarning;
niklase@google.com470e71d2011-07-07 08:21:25 +0000766 }
767
768 stream_delay_ms_ = delay;
andrew@webrtc.org5f23d642012-05-29 21:14:06 +0000769 return retval;
niklase@google.com470e71d2011-07-07 08:21:25 +0000770}
771
772int AudioProcessingImpl::stream_delay_ms() const {
773 return stream_delay_ms_;
774}
775
776bool AudioProcessingImpl::was_stream_delay_set() const {
777 return was_stream_delay_set_;
778}
779
andrew@webrtc.org17e40642014-03-04 20:58:13 +0000780void AudioProcessingImpl::set_stream_key_pressed(bool key_pressed) {
781 key_pressed_ = key_pressed;
782}
783
784bool AudioProcessingImpl::stream_key_pressed() const {
785 return key_pressed_;
786}
787
andrew@webrtc.org6f9f8172012-03-06 19:03:39 +0000788void AudioProcessingImpl::set_delay_offset_ms(int offset) {
789 CriticalSectionScoped crit_scoped(crit_);
790 delay_offset_ms_ = offset;
791}
792
793int AudioProcessingImpl::delay_offset_ms() const {
794 return delay_offset_ms_;
795}
796
niklase@google.com470e71d2011-07-07 08:21:25 +0000797int AudioProcessingImpl::StartDebugRecording(
798 const char filename[AudioProcessing::kMaxFilenameSize]) {
andrew@webrtc.org40654032012-01-30 20:51:15 +0000799 CriticalSectionScoped crit_scoped(crit_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000800 assert(kMaxFilenameSize == FileWrapper::kMaxFileNameSize);
801
802 if (filename == NULL) {
803 return kNullPointerError;
804 }
805
andrew@webrtc.org7bf26462011-12-03 00:03:31 +0000806#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +0000807 // Stop any ongoing recording.
808 if (debug_file_->Open()) {
809 if (debug_file_->CloseFile() == -1) {
810 return kFileError;
811 }
812 }
813
814 if (debug_file_->OpenFile(filename, false) == -1) {
815 debug_file_->CloseFile();
816 return kFileError;
817 }
818
ajm@google.com808e0e02011-08-03 21:08:51 +0000819 int err = WriteInitMessage();
820 if (err != kNoError) {
821 return err;
niklase@google.com470e71d2011-07-07 08:21:25 +0000822 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000823 return kNoError;
andrew@webrtc.org7bf26462011-12-03 00:03:31 +0000824#else
825 return kUnsupportedFunctionError;
826#endif // WEBRTC_AUDIOPROC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +0000827}
828
henrikg@webrtc.org863b5362013-12-06 16:05:17 +0000829int AudioProcessingImpl::StartDebugRecording(FILE* handle) {
830 CriticalSectionScoped crit_scoped(crit_);
831
832 if (handle == NULL) {
833 return kNullPointerError;
834 }
835
836#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
837 // Stop any ongoing recording.
838 if (debug_file_->Open()) {
839 if (debug_file_->CloseFile() == -1) {
840 return kFileError;
841 }
842 }
843
844 if (debug_file_->OpenFromFileHandle(handle, true, false) == -1) {
845 return kFileError;
846 }
847
848 int err = WriteInitMessage();
849 if (err != kNoError) {
850 return err;
851 }
852 return kNoError;
853#else
854 return kUnsupportedFunctionError;
855#endif // WEBRTC_AUDIOPROC_DEBUG_DUMP
856}
857
xians@webrtc.orge46bc772014-10-10 08:36:56 +0000858int AudioProcessingImpl::StartDebugRecordingForPlatformFile(
859 rtc::PlatformFile handle) {
860 FILE* stream = rtc::FdopenPlatformFileForWriting(handle);
861 return StartDebugRecording(stream);
862}
863
niklase@google.com470e71d2011-07-07 08:21:25 +0000864int AudioProcessingImpl::StopDebugRecording() {
andrew@webrtc.org40654032012-01-30 20:51:15 +0000865 CriticalSectionScoped crit_scoped(crit_);
andrew@webrtc.org7bf26462011-12-03 00:03:31 +0000866
867#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +0000868 // We just return if recording hasn't started.
869 if (debug_file_->Open()) {
870 if (debug_file_->CloseFile() == -1) {
871 return kFileError;
872 }
873 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000874 return kNoError;
andrew@webrtc.org7bf26462011-12-03 00:03:31 +0000875#else
876 return kUnsupportedFunctionError;
877#endif // WEBRTC_AUDIOPROC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +0000878}
879
880EchoCancellation* AudioProcessingImpl::echo_cancellation() const {
881 return echo_cancellation_;
882}
883
884EchoControlMobile* AudioProcessingImpl::echo_control_mobile() const {
885 return echo_control_mobile_;
886}
887
888GainControl* AudioProcessingImpl::gain_control() const {
pbos@webrtc.org788acd12014-12-15 09:41:24 +0000889 if (use_new_agc_) {
890 return gain_control_for_new_agc_.get();
891 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000892 return gain_control_;
893}
894
895HighPassFilter* AudioProcessingImpl::high_pass_filter() const {
896 return high_pass_filter_;
897}
898
899LevelEstimator* AudioProcessingImpl::level_estimator() const {
900 return level_estimator_;
901}
902
903NoiseSuppression* AudioProcessingImpl::noise_suppression() const {
904 return noise_suppression_;
905}
906
907VoiceDetection* AudioProcessingImpl::voice_detection() const {
908 return voice_detection_;
909}
910
andrew@webrtc.org369166a2012-04-24 18:38:03 +0000911bool AudioProcessingImpl::is_data_processed() const {
aluebs@webrtc.orgae643ce2014-12-19 19:57:34 +0000912 if (beamformer_enabled_) {
913 return true;
914 }
915
andrew@webrtc.org7bf26462011-12-03 00:03:31 +0000916 int enabled_count = 0;
mgraczyk@chromium.orge5340862015-03-12 23:23:38 +0000917 for (auto item : component_list_) {
918 if (item->is_component_enabled()) {
andrew@webrtc.org7bf26462011-12-03 00:03:31 +0000919 enabled_count++;
920 }
921 }
922
923 // Data is unchanged if no components are enabled, or if only level_estimator_
924 // or voice_detection_ is enabled.
925 if (enabled_count == 0) {
926 return false;
927 } else if (enabled_count == 1) {
928 if (level_estimator_->is_enabled() || voice_detection_->is_enabled()) {
929 return false;
930 }
931 } else if (enabled_count == 2) {
932 if (level_estimator_->is_enabled() && voice_detection_->is_enabled()) {
933 return false;
934 }
935 }
936 return true;
937}
938
andrew@webrtc.org17e40642014-03-04 20:58:13 +0000939bool AudioProcessingImpl::output_copy_needed(bool is_data_processed) const {
andrew@webrtc.org369166a2012-04-24 18:38:03 +0000940 // Check if we've upmixed or downmixed the audio.
aluebs@webrtc.org27d106b2014-12-11 17:09:21 +0000941 return ((fwd_out_format_.num_channels() != fwd_in_format_.num_channels()) ||
pbos@webrtc.org788acd12014-12-15 09:41:24 +0000942 is_data_processed || transient_suppressor_enabled_);
andrew@webrtc.org7bf26462011-12-03 00:03:31 +0000943}
944
andrew@webrtc.org369166a2012-04-24 18:38:03 +0000945bool AudioProcessingImpl::synthesis_needed(bool is_data_processed) const {
aluebs@webrtc.org087da132014-11-17 23:01:23 +0000946 return (is_data_processed && (fwd_proc_format_.rate() == kSampleRate32kHz ||
947 fwd_proc_format_.rate() == kSampleRate48kHz));
andrew@webrtc.org369166a2012-04-24 18:38:03 +0000948}
949
950bool AudioProcessingImpl::analysis_needed(bool is_data_processed) const {
pbos@webrtc.org788acd12014-12-15 09:41:24 +0000951 if (!is_data_processed && !voice_detection_->is_enabled() &&
952 !transient_suppressor_enabled_) {
andrew@webrtc.org7bf26462011-12-03 00:03:31 +0000953 // Only level_estimator_ is enabled.
954 return false;
aluebs@webrtc.org087da132014-11-17 23:01:23 +0000955 } else if (fwd_proc_format_.rate() == kSampleRate32kHz ||
956 fwd_proc_format_.rate() == kSampleRate48kHz) {
andrew@webrtc.org7bf26462011-12-03 00:03:31 +0000957 // Something besides level_estimator_ is enabled, and we have super-wb.
958 return true;
959 }
960 return false;
961}
962
pbos@webrtc.org788acd12014-12-15 09:41:24 +0000963int AudioProcessingImpl::InitializeExperimentalAgc() {
964 if (use_new_agc_) {
965 if (!agc_manager_.get()) {
966 agc_manager_.reset(
967 new AgcManagerDirect(gain_control_, gain_control_for_new_agc_.get()));
968 }
969 agc_manager_->Initialize();
970 agc_manager_->SetCaptureMuted(output_will_be_muted_);
971 }
972 return kNoError;
973}
974
975int AudioProcessingImpl::InitializeTransient() {
976 if (transient_suppressor_enabled_) {
977 if (!transient_suppressor_.get()) {
978 transient_suppressor_.reset(new TransientSuppressor());
979 }
980 transient_suppressor_->Initialize(fwd_proc_format_.rate(),
981 split_rate_,
982 fwd_out_format_.num_channels());
983 }
984 return kNoError;
985}
986
aluebs@webrtc.orgae643ce2014-12-19 19:57:34 +0000987void AudioProcessingImpl::InitializeBeamformer() {
988 if (beamformer_enabled_) {
989#ifdef WEBRTC_BEAMFORMER
aluebs@webrtc.orgd82f55d2015-01-15 18:07:21 +0000990 if (!beamformer_) {
mgraczyk@chromium.org0f663de2015-03-13 00:13:32 +0000991 beamformer_.reset(new NonlinearBeamformer(array_geometry_));
aluebs@webrtc.orgd82f55d2015-01-15 18:07:21 +0000992 }
993 beamformer_->Initialize(kChunkSizeMs, split_rate_);
aluebs@webrtc.orgae643ce2014-12-19 19:57:34 +0000994#else
995 assert(false);
996#endif
997 }
998}
999
andrew@webrtc.org7bf26462011-12-03 00:03:31 +00001000#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
ajm@google.com808e0e02011-08-03 21:08:51 +00001001int AudioProcessingImpl::WriteMessageToDebugFile() {
1002 int32_t size = event_msg_->ByteSize();
1003 if (size <= 0) {
1004 return kUnspecifiedError;
1005 }
andrew@webrtc.org621df672013-10-22 10:27:23 +00001006#if defined(WEBRTC_ARCH_BIG_ENDIAN)
ajm@google.com808e0e02011-08-03 21:08:51 +00001007 // TODO(ajm): Use little-endian "on the wire". For the moment, we can be
1008 // pretty safe in assuming little-endian.
1009#endif
1010
1011 if (!event_msg_->SerializeToString(&event_str_)) {
1012 return kUnspecifiedError;
1013 }
1014
1015 // Write message preceded by its size.
1016 if (!debug_file_->Write(&size, sizeof(int32_t))) {
1017 return kFileError;
1018 }
1019 if (!debug_file_->Write(event_str_.data(), event_str_.length())) {
1020 return kFileError;
1021 }
1022
1023 event_msg_->Clear();
1024
andrew@webrtc.org17e40642014-03-04 20:58:13 +00001025 return kNoError;
ajm@google.com808e0e02011-08-03 21:08:51 +00001026}
1027
1028int AudioProcessingImpl::WriteInitMessage() {
1029 event_msg_->set_type(audioproc::Event::INIT);
1030 audioproc::Init* msg = event_msg_->mutable_init();
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +00001031 msg->set_sample_rate(fwd_in_format_.rate());
1032 msg->set_num_input_channels(fwd_in_format_.num_channels());
aluebs@webrtc.org27d106b2014-12-11 17:09:21 +00001033 msg->set_num_output_channels(fwd_out_format_.num_channels());
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +00001034 msg->set_num_reverse_channels(rev_in_format_.num_channels());
1035 msg->set_reverse_sample_rate(rev_in_format_.rate());
1036 msg->set_output_sample_rate(fwd_out_format_.rate());
ajm@google.com808e0e02011-08-03 21:08:51 +00001037
1038 int err = WriteMessageToDebugFile();
1039 if (err != kNoError) {
1040 return err;
1041 }
1042
1043 return kNoError;
1044}
andrew@webrtc.org7bf26462011-12-03 00:03:31 +00001045#endif // WEBRTC_AUDIOPROC_DEBUG_DUMP
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +00001046
niklase@google.com470e71d2011-07-07 08:21:25 +00001047} // namespace webrtc