blob: 6d0373d758ecc5a1c163be904686d5bb2e617ba4 [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
bjornv@webrtc.org0c6f9312012-01-30 09:39:08 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
bjornv@webrtc.org21a2fc92013-02-15 17:01:03 +000011#include "webrtc/modules/audio_processing/echo_cancellation_impl.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
bjornv@webrtc.org21a2fc92013-02-15 17:01:03 +000013#include <assert.h>
niklase@google.com470e71d2011-07-07 08:21:25 +000014#include <string.h>
15
andrew@webrtc.org1760a172013-09-25 23:17:38 +000016extern "C" {
17#include "webrtc/modules/audio_processing/aec/aec_core.h"
18}
Henrik Kjellander9b72af92015-11-11 20:16:11 +010019#include "webrtc/modules/audio_processing/aec/echo_cancellation.h"
bjornv@webrtc.org21a2fc92013-02-15 17:01:03 +000020#include "webrtc/modules/audio_processing/audio_buffer.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000021
niklase@google.com470e71d2011-07-07 08:21:25 +000022namespace webrtc {
23
24typedef void Handle;
25
26namespace {
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +000027int16_t MapSetting(EchoCancellation::SuppressionLevel level) {
niklase@google.com470e71d2011-07-07 08:21:25 +000028 switch (level) {
29 case EchoCancellation::kLowSuppression:
30 return kAecNlpConservative;
31 case EchoCancellation::kModerateSuppression:
32 return kAecNlpModerate;
33 case EchoCancellation::kHighSuppression:
34 return kAecNlpAggressive;
niklase@google.com470e71d2011-07-07 08:21:25 +000035 }
andrew@webrtc.org648af742012-02-08 01:57:29 +000036 assert(false);
mflodman@webrtc.org657b2a42012-02-06 11:06:01 +000037 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +000038}
39
andrew@webrtc.org648af742012-02-08 01:57:29 +000040AudioProcessing::Error MapError(int err) {
niklase@google.com470e71d2011-07-07 08:21:25 +000041 switch (err) {
42 case AEC_UNSUPPORTED_FUNCTION_ERROR:
43 return AudioProcessing::kUnsupportedFunctionError;
niklase@google.com470e71d2011-07-07 08:21:25 +000044 case AEC_BAD_PARAMETER_ERROR:
45 return AudioProcessing::kBadParameterError;
niklase@google.com470e71d2011-07-07 08:21:25 +000046 case AEC_BAD_PARAMETER_WARNING:
47 return AudioProcessing::kBadStreamParameterWarning;
niklase@google.com470e71d2011-07-07 08:21:25 +000048 default:
49 // AEC_UNSPECIFIED_ERROR
50 // AEC_UNINITIALIZED_ERROR
51 // AEC_NULL_POINTER_ERROR
52 return AudioProcessing::kUnspecifiedError;
53 }
54}
niklase@google.com470e71d2011-07-07 08:21:25 +000055
peah2446e5a2015-11-18 06:11:13 -080056// Maximum length that a frame of samples can have.
57static const size_t kMaxAllowedValuesOfSamplesPerFrame = 160;
58// Maximum number of frames to buffer in the render queue.
59// TODO(peah): Decrease this once we properly handle hugely unbalanced
60// reverse and forward call numbers.
61static const size_t kMaxNumFramesToBuffer = 100;
62} // namespace
peahfa6228e2015-11-16 16:27:42 -080063
andrew@webrtc.org56e4a052014-02-27 22:23:17 +000064EchoCancellationImpl::EchoCancellationImpl(const AudioProcessing* apm,
peahdf3efa82015-11-28 12:35:15 -080065 rtc::CriticalSection* crit_render,
66 rtc::CriticalSection* crit_capture)
Henrik Lundin441f6342015-06-09 16:03:13 +020067 : ProcessingComponent(),
68 apm_(apm),
peahdf3efa82015-11-28 12:35:15 -080069 crit_render_(crit_render),
70 crit_capture_(crit_capture),
Henrik Lundin441f6342015-06-09 16:03:13 +020071 drift_compensation_enabled_(false),
72 metrics_enabled_(false),
73 suppression_level_(kModerateSuppression),
74 stream_drift_samples_(0),
75 was_stream_drift_set_(false),
76 stream_has_echo_(false),
77 delay_logging_enabled_(false),
78 extended_filter_enabled_(false),
peahfa6228e2015-11-16 16:27:42 -080079 delay_agnostic_enabled_(false),
peahdf3efa82015-11-28 12:35:15 -080080 render_queue_element_max_size_(0) {
81 RTC_DCHECK(apm);
82 RTC_DCHECK(crit_render);
83 RTC_DCHECK(crit_capture);
84}
niklase@google.com470e71d2011-07-07 08:21:25 +000085
86EchoCancellationImpl::~EchoCancellationImpl() {}
87
88int EchoCancellationImpl::ProcessRenderAudio(const AudioBuffer* audio) {
peahdf3efa82015-11-28 12:35:15 -080089 rtc::CritScope cs_render(crit_render_);
niklase@google.com470e71d2011-07-07 08:21:25 +000090 if (!is_component_enabled()) {
peahdf3efa82015-11-28 12:35:15 -080091 return AudioProcessing::kNoError;
niklase@google.com470e71d2011-07-07 08:21:25 +000092 }
93
aluebs@webrtc.orgd35a5c32015-02-10 22:52:15 +000094 assert(audio->num_frames_per_band() <= 160);
niklase@google.com470e71d2011-07-07 08:21:25 +000095 assert(audio->num_channels() == apm_->num_reverse_channels());
96
peahdf3efa82015-11-28 12:35:15 -080097 int err = AudioProcessing::kNoError;
niklase@google.com470e71d2011-07-07 08:21:25 +000098
99 // The ordering convention must be followed to pass to the correct AEC.
100 size_t handle_index = 0;
peahfa6228e2015-11-16 16:27:42 -0800101 render_queue_buffer_.clear();
niklase@google.com470e71d2011-07-07 08:21:25 +0000102 for (int i = 0; i < apm_->num_output_channels(); i++) {
103 for (int j = 0; j < audio->num_channels(); j++) {
104 Handle* my_handle = static_cast<Handle*>(handle(handle_index));
peahfa6228e2015-11-16 16:27:42 -0800105 // Retrieve any error code produced by the buffering of the farend
106 // signal
107 err = WebRtcAec_GetBufferFarendError(
108 my_handle, audio->split_bands_const_f(j)[kBand0To8kHz],
Peter Kastingdce40cf2015-08-24 14:52:23 -0700109 audio->num_frames_per_band());
niklase@google.com470e71d2011-07-07 08:21:25 +0000110
peahdf3efa82015-11-28 12:35:15 -0800111 if (err != AudioProcessing::kNoError) {
peahc12be392015-11-09 23:53:50 -0800112 return MapError(err); // TODO(ajm): warning possible?
niklase@google.com470e71d2011-07-07 08:21:25 +0000113 }
114
peahfa6228e2015-11-16 16:27:42 -0800115 // Buffer the samples in the render queue.
116 render_queue_buffer_.insert(render_queue_buffer_.end(),
117 audio->split_bands_const_f(j)[kBand0To8kHz],
118 (audio->split_bands_const_f(j)[kBand0To8kHz] +
119 audio->num_frames_per_band()));
niklase@google.com470e71d2011-07-07 08:21:25 +0000120 }
121 }
122
peahfa6228e2015-11-16 16:27:42 -0800123 // Insert the samples into the queue.
124 if (!render_signal_queue_->Insert(&render_queue_buffer_)) {
peahdf3efa82015-11-28 12:35:15 -0800125 // The data queue is full and needs to be emptied.
peahfa6228e2015-11-16 16:27:42 -0800126 ReadQueuedRenderData();
127
128 // Retry the insert (should always work).
129 RTC_DCHECK_EQ(render_signal_queue_->Insert(&render_queue_buffer_), true);
130 }
131
peahdf3efa82015-11-28 12:35:15 -0800132 return AudioProcessing::kNoError;
niklase@google.com470e71d2011-07-07 08:21:25 +0000133}
134
peahfa6228e2015-11-16 16:27:42 -0800135// Read chunks of data that were received and queued on the render side from
136// a queue. All the data chunks are buffered into the farend signal of the AEC.
137void EchoCancellationImpl::ReadQueuedRenderData() {
peahdf3efa82015-11-28 12:35:15 -0800138 rtc::CritScope cs_capture(crit_capture_);
peahfa6228e2015-11-16 16:27:42 -0800139 if (!is_component_enabled()) {
140 return;
141 }
142
143 while (render_signal_queue_->Remove(&capture_queue_buffer_)) {
144 size_t handle_index = 0;
145 int buffer_index = 0;
146 const int num_frames_per_band =
147 capture_queue_buffer_.size() /
148 (apm_->num_output_channels() * apm_->num_reverse_channels());
149 for (int i = 0; i < apm_->num_output_channels(); i++) {
150 for (int j = 0; j < apm_->num_reverse_channels(); j++) {
151 Handle* my_handle = static_cast<Handle*>(handle(handle_index));
152 WebRtcAec_BufferFarend(my_handle, &capture_queue_buffer_[buffer_index],
153 num_frames_per_band);
154
155 buffer_index += num_frames_per_band;
156 handle_index++;
157 }
158 }
159 }
160}
161
niklase@google.com470e71d2011-07-07 08:21:25 +0000162int EchoCancellationImpl::ProcessCaptureAudio(AudioBuffer* audio) {
peahdf3efa82015-11-28 12:35:15 -0800163 rtc::CritScope cs_capture(crit_capture_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000164 if (!is_component_enabled()) {
peahdf3efa82015-11-28 12:35:15 -0800165 return AudioProcessing::kNoError;
niklase@google.com470e71d2011-07-07 08:21:25 +0000166 }
167
168 if (!apm_->was_stream_delay_set()) {
peahdf3efa82015-11-28 12:35:15 -0800169 return AudioProcessing::kStreamParameterNotSetError;
niklase@google.com470e71d2011-07-07 08:21:25 +0000170 }
171
172 if (drift_compensation_enabled_ && !was_stream_drift_set_) {
peahdf3efa82015-11-28 12:35:15 -0800173 return AudioProcessing::kStreamParameterNotSetError;
niklase@google.com470e71d2011-07-07 08:21:25 +0000174 }
175
aluebs@webrtc.orgd35a5c32015-02-10 22:52:15 +0000176 assert(audio->num_frames_per_band() <= 160);
niklase@google.com470e71d2011-07-07 08:21:25 +0000177 assert(audio->num_channels() == apm_->num_output_channels());
178
peahdf3efa82015-11-28 12:35:15 -0800179 int err = AudioProcessing::kNoError;
niklase@google.com470e71d2011-07-07 08:21:25 +0000180
181 // The ordering convention must be followed to pass to the correct AEC.
182 size_t handle_index = 0;
183 stream_has_echo_ = false;
184 for (int i = 0; i < audio->num_channels(); i++) {
185 for (int j = 0; j < apm_->num_reverse_channels(); j++) {
186 Handle* my_handle = handle(handle_index);
peahdf3efa82015-11-28 12:35:15 -0800187 err = WebRtcAec_Process(my_handle, audio->split_bands_const_f(i),
188 audio->num_bands(), audio->split_bands_f(i),
189 audio->num_frames_per_band(),
190 apm_->stream_delay_ms(), stream_drift_samples_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000191
peahdf3efa82015-11-28 12:35:15 -0800192 if (err != AudioProcessing::kNoError) {
peahc12be392015-11-09 23:53:50 -0800193 err = MapError(err);
niklase@google.com470e71d2011-07-07 08:21:25 +0000194 // TODO(ajm): Figure out how to return warnings properly.
peahdf3efa82015-11-28 12:35:15 -0800195 if (err != AudioProcessing::kBadStreamParameterWarning) {
niklase@google.com470e71d2011-07-07 08:21:25 +0000196 return err;
197 }
198 }
199
bjornv@webrtc.org21a2fc92013-02-15 17:01:03 +0000200 int status = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000201 err = WebRtcAec_get_echo_status(my_handle, &status);
peahdf3efa82015-11-28 12:35:15 -0800202 if (err != AudioProcessing::kNoError) {
peahc12be392015-11-09 23:53:50 -0800203 return MapError(err);
niklase@google.com470e71d2011-07-07 08:21:25 +0000204 }
205
206 if (status == 1) {
207 stream_has_echo_ = true;
208 }
209
210 handle_index++;
211 }
212 }
213
214 was_stream_drift_set_ = false;
peahdf3efa82015-11-28 12:35:15 -0800215 return AudioProcessing::kNoError;
niklase@google.com470e71d2011-07-07 08:21:25 +0000216}
217
218int EchoCancellationImpl::Enable(bool enable) {
peahdf3efa82015-11-28 12:35:15 -0800219 // Run in a single-threaded manner.
220 rtc::CritScope cs_render(crit_render_);
221 rtc::CritScope cs_capture(crit_capture_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000222 // Ensure AEC and AECM are not both enabled.
peahdf3efa82015-11-28 12:35:15 -0800223 // The is_enabled call is safe from a deadlock perspective
224 // as both locks are already held in the correct order.
niklase@google.com470e71d2011-07-07 08:21:25 +0000225 if (enable && apm_->echo_control_mobile()->is_enabled()) {
peahdf3efa82015-11-28 12:35:15 -0800226 return AudioProcessing::kBadParameterError;
niklase@google.com470e71d2011-07-07 08:21:25 +0000227 }
228
229 return EnableComponent(enable);
230}
231
232bool EchoCancellationImpl::is_enabled() const {
peahdf3efa82015-11-28 12:35:15 -0800233 rtc::CritScope cs(crit_capture_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000234 return is_component_enabled();
235}
236
237int EchoCancellationImpl::set_suppression_level(SuppressionLevel level) {
peahdf3efa82015-11-28 12:35:15 -0800238 {
239 if (MapSetting(level) == -1) {
240 return AudioProcessing::kBadParameterError;
241 }
242 rtc::CritScope cs(crit_capture_);
243 suppression_level_ = level;
niklase@google.com470e71d2011-07-07 08:21:25 +0000244 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000245 return Configure();
246}
247
248EchoCancellation::SuppressionLevel EchoCancellationImpl::suppression_level()
249 const {
peahdf3efa82015-11-28 12:35:15 -0800250 rtc::CritScope cs(crit_capture_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000251 return suppression_level_;
252}
253
254int EchoCancellationImpl::enable_drift_compensation(bool enable) {
peahdf3efa82015-11-28 12:35:15 -0800255 {
256 rtc::CritScope cs(crit_capture_);
257 drift_compensation_enabled_ = enable;
258 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000259 return Configure();
260}
261
262bool EchoCancellationImpl::is_drift_compensation_enabled() const {
peahdf3efa82015-11-28 12:35:15 -0800263 rtc::CritScope cs(crit_capture_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000264 return drift_compensation_enabled_;
265}
266
andrew@webrtc.org6be1e932013-03-01 18:47:28 +0000267void EchoCancellationImpl::set_stream_drift_samples(int drift) {
peahdf3efa82015-11-28 12:35:15 -0800268 rtc::CritScope cs(crit_capture_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000269 was_stream_drift_set_ = true;
270 stream_drift_samples_ = drift;
niklase@google.com470e71d2011-07-07 08:21:25 +0000271}
272
273int EchoCancellationImpl::stream_drift_samples() const {
peahdf3efa82015-11-28 12:35:15 -0800274 rtc::CritScope cs(crit_capture_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000275 return stream_drift_samples_;
276}
277
278int EchoCancellationImpl::enable_metrics(bool enable) {
peahdf3efa82015-11-28 12:35:15 -0800279 {
280 rtc::CritScope cs(crit_capture_);
281 metrics_enabled_ = enable;
282 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000283 return Configure();
284}
285
286bool EchoCancellationImpl::are_metrics_enabled() const {
peahdf3efa82015-11-28 12:35:15 -0800287 rtc::CritScope cs(crit_capture_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000288 return metrics_enabled_;
289}
290
291// TODO(ajm): we currently just use the metrics from the first AEC. Think more
292// aboue the best way to extend this to multi-channel.
293int EchoCancellationImpl::GetMetrics(Metrics* metrics) {
peahdf3efa82015-11-28 12:35:15 -0800294 rtc::CritScope cs(crit_capture_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000295 if (metrics == NULL) {
peahdf3efa82015-11-28 12:35:15 -0800296 return AudioProcessing::kNullPointerError;
niklase@google.com470e71d2011-07-07 08:21:25 +0000297 }
298
299 if (!is_component_enabled() || !metrics_enabled_) {
peahdf3efa82015-11-28 12:35:15 -0800300 return AudioProcessing::kNotEnabledError;
niklase@google.com470e71d2011-07-07 08:21:25 +0000301 }
302
303 AecMetrics my_metrics;
304 memset(&my_metrics, 0, sizeof(my_metrics));
305 memset(metrics, 0, sizeof(Metrics));
306
307 Handle* my_handle = static_cast<Handle*>(handle(0));
308 int err = WebRtcAec_GetMetrics(my_handle, &my_metrics);
peahdf3efa82015-11-28 12:35:15 -0800309 if (err != AudioProcessing::kNoError) {
peahc12be392015-11-09 23:53:50 -0800310 return MapError(err);
niklase@google.com470e71d2011-07-07 08:21:25 +0000311 }
312
313 metrics->residual_echo_return_loss.instant = my_metrics.rerl.instant;
314 metrics->residual_echo_return_loss.average = my_metrics.rerl.average;
315 metrics->residual_echo_return_loss.maximum = my_metrics.rerl.max;
316 metrics->residual_echo_return_loss.minimum = my_metrics.rerl.min;
317
318 metrics->echo_return_loss.instant = my_metrics.erl.instant;
319 metrics->echo_return_loss.average = my_metrics.erl.average;
320 metrics->echo_return_loss.maximum = my_metrics.erl.max;
321 metrics->echo_return_loss.minimum = my_metrics.erl.min;
322
323 metrics->echo_return_loss_enhancement.instant = my_metrics.erle.instant;
324 metrics->echo_return_loss_enhancement.average = my_metrics.erle.average;
325 metrics->echo_return_loss_enhancement.maximum = my_metrics.erle.max;
326 metrics->echo_return_loss_enhancement.minimum = my_metrics.erle.min;
327
328 metrics->a_nlp.instant = my_metrics.aNlp.instant;
329 metrics->a_nlp.average = my_metrics.aNlp.average;
330 metrics->a_nlp.maximum = my_metrics.aNlp.max;
331 metrics->a_nlp.minimum = my_metrics.aNlp.min;
332
peahdf3efa82015-11-28 12:35:15 -0800333 return AudioProcessing::kNoError;
niklase@google.com470e71d2011-07-07 08:21:25 +0000334}
335
336bool EchoCancellationImpl::stream_has_echo() const {
peahdf3efa82015-11-28 12:35:15 -0800337 rtc::CritScope cs(crit_capture_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000338 return stream_has_echo_;
339}
340
bjornv@google.com1ba3dbe2011-10-03 08:18:10 +0000341int EchoCancellationImpl::enable_delay_logging(bool enable) {
peahdf3efa82015-11-28 12:35:15 -0800342 {
343 rtc::CritScope cs(crit_capture_);
344 delay_logging_enabled_ = enable;
345 }
bjornv@google.com1ba3dbe2011-10-03 08:18:10 +0000346 return Configure();
347}
348
349bool EchoCancellationImpl::is_delay_logging_enabled() const {
peahdf3efa82015-11-28 12:35:15 -0800350 rtc::CritScope cs(crit_capture_);
bjornv@google.com1ba3dbe2011-10-03 08:18:10 +0000351 return delay_logging_enabled_;
352}
353
Minyue13b96ba2015-10-03 00:39:14 +0200354bool EchoCancellationImpl::is_delay_agnostic_enabled() const {
peahdf3efa82015-11-28 12:35:15 -0800355 rtc::CritScope cs(crit_capture_);
Minyue13b96ba2015-10-03 00:39:14 +0200356 return delay_agnostic_enabled_;
357}
358
359bool EchoCancellationImpl::is_extended_filter_enabled() const {
peahdf3efa82015-11-28 12:35:15 -0800360 rtc::CritScope cs(crit_capture_);
Minyue13b96ba2015-10-03 00:39:14 +0200361 return extended_filter_enabled_;
362}
363
bjornv@google.com1ba3dbe2011-10-03 08:18:10 +0000364// TODO(bjornv): How should we handle the multi-channel case?
365int EchoCancellationImpl::GetDelayMetrics(int* median, int* std) {
peahdf3efa82015-11-28 12:35:15 -0800366 rtc::CritScope cs(crit_capture_);
bjornv@webrtc.orgb1786db2015-02-03 06:06:26 +0000367 float fraction_poor_delays = 0;
368 return GetDelayMetrics(median, std, &fraction_poor_delays);
369}
370
371int EchoCancellationImpl::GetDelayMetrics(int* median, int* std,
372 float* fraction_poor_delays) {
peahdf3efa82015-11-28 12:35:15 -0800373 rtc::CritScope cs(crit_capture_);
bjornv@google.com1ba3dbe2011-10-03 08:18:10 +0000374 if (median == NULL) {
peahdf3efa82015-11-28 12:35:15 -0800375 return AudioProcessing::kNullPointerError;
bjornv@google.com1ba3dbe2011-10-03 08:18:10 +0000376 }
377 if (std == NULL) {
peahdf3efa82015-11-28 12:35:15 -0800378 return AudioProcessing::kNullPointerError;
bjornv@google.com1ba3dbe2011-10-03 08:18:10 +0000379 }
380
381 if (!is_component_enabled() || !delay_logging_enabled_) {
peahdf3efa82015-11-28 12:35:15 -0800382 return AudioProcessing::kNotEnabledError;
bjornv@google.com1ba3dbe2011-10-03 08:18:10 +0000383 }
384
385 Handle* my_handle = static_cast<Handle*>(handle(0));
peahc12be392015-11-09 23:53:50 -0800386 const int err =
387 WebRtcAec_GetDelayMetrics(my_handle, median, std, fraction_poor_delays);
peahdf3efa82015-11-28 12:35:15 -0800388 if (err != AudioProcessing::kNoError) {
peahc12be392015-11-09 23:53:50 -0800389 return MapError(err);
bjornv@google.com1ba3dbe2011-10-03 08:18:10 +0000390 }
391
peahdf3efa82015-11-28 12:35:15 -0800392 return AudioProcessing::kNoError;
bjornv@google.com1ba3dbe2011-10-03 08:18:10 +0000393}
394
bjornv@webrtc.org91d11b32013-03-05 16:53:09 +0000395struct AecCore* EchoCancellationImpl::aec_core() const {
peahdf3efa82015-11-28 12:35:15 -0800396 rtc::CritScope cs(crit_capture_);
bjornv@webrtc.org91d11b32013-03-05 16:53:09 +0000397 if (!is_component_enabled()) {
398 return NULL;
399 }
400 Handle* my_handle = static_cast<Handle*>(handle(0));
401 return WebRtcAec_aec_core(my_handle);
402}
403
niklase@google.com470e71d2011-07-07 08:21:25 +0000404int EchoCancellationImpl::Initialize() {
405 int err = ProcessingComponent::Initialize();
peahdf3efa82015-11-28 12:35:15 -0800406 {
407 rtc::CritScope cs(crit_capture_);
408 if (err != AudioProcessing::kNoError || !is_component_enabled()) {
409 return err;
410 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000411 }
412
peahfa6228e2015-11-16 16:27:42 -0800413 AllocateRenderQueue();
414
peahdf3efa82015-11-28 12:35:15 -0800415 return AudioProcessing::kNoError;
niklase@google.com470e71d2011-07-07 08:21:25 +0000416}
417
peahfa6228e2015-11-16 16:27:42 -0800418void EchoCancellationImpl::AllocateRenderQueue() {
peahfa6228e2015-11-16 16:27:42 -0800419 const size_t new_render_queue_element_max_size = std::max<size_t>(
peah2446e5a2015-11-18 06:11:13 -0800420 static_cast<size_t>(1),
421 kMaxAllowedValuesOfSamplesPerFrame * num_handles_required());
peahfa6228e2015-11-16 16:27:42 -0800422
peahdf3efa82015-11-28 12:35:15 -0800423 rtc::CritScope cs_render(crit_render_);
424 rtc::CritScope cs_capture(crit_capture_);
425
peahfa6228e2015-11-16 16:27:42 -0800426 // Reallocate the queue if the queue item size is too small to fit the
427 // data to put in the queue.
peah2446e5a2015-11-18 06:11:13 -0800428 if (render_queue_element_max_size_ < new_render_queue_element_max_size) {
peahfa6228e2015-11-16 16:27:42 -0800429 render_queue_element_max_size_ = new_render_queue_element_max_size;
430
431 std::vector<float> template_queue_element(render_queue_element_max_size_);
432
433 render_signal_queue_.reset(
434 new SwapQueue<std::vector<float>, RenderQueueItemVerifier<float>>(
435 kMaxNumFramesToBuffer, template_queue_element,
436 RenderQueueItemVerifier<float>(render_queue_element_max_size_)));
peah2446e5a2015-11-18 06:11:13 -0800437
438 render_queue_buffer_.resize(render_queue_element_max_size_);
439 capture_queue_buffer_.resize(render_queue_element_max_size_);
peahfa6228e2015-11-16 16:27:42 -0800440 } else {
441 render_signal_queue_->Clear();
442 }
peahfa6228e2015-11-16 16:27:42 -0800443}
444
andrew@webrtc.org1760a172013-09-25 23:17:38 +0000445void EchoCancellationImpl::SetExtraOptions(const Config& config) {
peahdf3efa82015-11-28 12:35:15 -0800446 {
447 rtc::CritScope cs(crit_capture_);
448 extended_filter_enabled_ = config.Get<ExtendedFilter>().enabled;
449 delay_agnostic_enabled_ = config.Get<DelayAgnostic>().enabled;
450 }
andrew@webrtc.org1760a172013-09-25 23:17:38 +0000451 Configure();
452}
453
niklase@google.com470e71d2011-07-07 08:21:25 +0000454void* EchoCancellationImpl::CreateHandle() const {
Bjorn Volcker9345e862015-06-10 21:43:36 +0200455 return WebRtcAec_Create();
niklase@google.com470e71d2011-07-07 08:21:25 +0000456}
457
bjornv@webrtc.org5964fe02014-04-22 06:52:28 +0000458void EchoCancellationImpl::DestroyHandle(void* handle) const {
niklase@google.com470e71d2011-07-07 08:21:25 +0000459 assert(handle != NULL);
bjornv@webrtc.org5964fe02014-04-22 06:52:28 +0000460 WebRtcAec_Free(static_cast<Handle*>(handle));
niklase@google.com470e71d2011-07-07 08:21:25 +0000461}
462
463int EchoCancellationImpl::InitializeHandle(void* handle) const {
peahdf3efa82015-11-28 12:35:15 -0800464 // Not locked as it only relies on APM public API which is threadsafe.
465
niklase@google.com470e71d2011-07-07 08:21:25 +0000466 assert(handle != NULL);
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000467 // TODO(ajm): Drift compensation is disabled in practice. If restored, it
468 // should be managed internally and not depend on the hardware sample rate.
469 // For now, just hardcode a 48 kHz value.
niklase@google.com470e71d2011-07-07 08:21:25 +0000470 return WebRtcAec_Init(static_cast<Handle*>(handle),
peahdf3efa82015-11-28 12:35:15 -0800471 apm_->proc_sample_rate_hz(), 48000);
niklase@google.com470e71d2011-07-07 08:21:25 +0000472}
473
474int EchoCancellationImpl::ConfigureHandle(void* handle) const {
peahdf3efa82015-11-28 12:35:15 -0800475 rtc::CritScope cs_render(crit_render_);
476 rtc::CritScope cs_capture(crit_capture_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000477 assert(handle != NULL);
478 AecConfig config;
479 config.metricsMode = metrics_enabled_;
480 config.nlpMode = MapSetting(suppression_level_);
481 config.skewMode = drift_compensation_enabled_;
bjornv@google.com1ba3dbe2011-10-03 08:18:10 +0000482 config.delay_logging = delay_logging_enabled_;
Henrik Lundin441f6342015-06-09 16:03:13 +0200483 WebRtcAec_enable_extended_filter(
484 WebRtcAec_aec_core(static_cast<Handle*>(handle)),
485 extended_filter_enabled_ ? 1 : 0);
henrik.lundin0f133b92015-07-02 00:17:55 -0700486 WebRtcAec_enable_delay_agnostic(
487 WebRtcAec_aec_core(static_cast<Handle*>(handle)),
488 delay_agnostic_enabled_ ? 1 : 0);
niklase@google.com470e71d2011-07-07 08:21:25 +0000489 return WebRtcAec_set_config(static_cast<Handle*>(handle), config);
490}
491
492int EchoCancellationImpl::num_handles_required() const {
peahdf3efa82015-11-28 12:35:15 -0800493 // Not locked as it only relies on APM public API which is threadsafe.
niklase@google.com470e71d2011-07-07 08:21:25 +0000494 return apm_->num_output_channels() *
495 apm_->num_reverse_channels();
496}
497
498int EchoCancellationImpl::GetHandleError(void* handle) const {
peahdf3efa82015-11-28 12:35:15 -0800499 // Not locked as it does not rely on anything in the state.
niklase@google.com470e71d2011-07-07 08:21:25 +0000500 assert(handle != NULL);
peahc12be392015-11-09 23:53:50 -0800501 return AudioProcessing::kUnspecifiedError;
niklase@google.com470e71d2011-07-07 08:21:25 +0000502}
503} // namespace webrtc