niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 1 | /* |
bjornv@webrtc.org | 0c6f931 | 2012-01-30 09:39:08 +0000 | [diff] [blame] | 2 | * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license |
| 5 | * that can be found in the LICENSE file in the root of the source |
| 6 | * tree. An additional intellectual property rights grant can be found |
| 7 | * in the file PATENTS. All contributing project authors may |
| 8 | * be found in the AUTHORS file in the root of the source tree. |
| 9 | */ |
| 10 | |
bjornv@webrtc.org | 21a2fc9 | 2013-02-15 17:01:03 +0000 | [diff] [blame] | 11 | #include "webrtc/modules/audio_processing/echo_cancellation_impl.h" |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 12 | |
bjornv@webrtc.org | 21a2fc9 | 2013-02-15 17:01:03 +0000 | [diff] [blame] | 13 | #include <assert.h> |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 14 | #include <string.h> |
| 15 | |
andrew@webrtc.org | 1760a17 | 2013-09-25 23:17:38 +0000 | [diff] [blame] | 16 | extern "C" { |
| 17 | #include "webrtc/modules/audio_processing/aec/aec_core.h" |
| 18 | } |
Henrik Kjellander | 9b72af9 | 2015-11-11 20:16:11 +0100 | [diff] [blame] | 19 | #include "webrtc/modules/audio_processing/aec/echo_cancellation.h" |
bjornv@webrtc.org | 21a2fc9 | 2013-02-15 17:01:03 +0000 | [diff] [blame] | 20 | #include "webrtc/modules/audio_processing/audio_buffer.h" |
Henrik Kjellander | 98f5351 | 2015-10-28 18:17:40 +0100 | [diff] [blame] | 21 | #include "webrtc/system_wrappers/include/critical_section_wrapper.h" |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 22 | |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 23 | namespace webrtc { |
| 24 | |
| 25 | typedef void Handle; |
| 26 | |
| 27 | namespace { |
pbos@webrtc.org | b7192b8 | 2013-04-10 07:50:54 +0000 | [diff] [blame] | 28 | int16_t MapSetting(EchoCancellation::SuppressionLevel level) { |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 29 | switch (level) { |
| 30 | case EchoCancellation::kLowSuppression: |
| 31 | return kAecNlpConservative; |
| 32 | case EchoCancellation::kModerateSuppression: |
| 33 | return kAecNlpModerate; |
| 34 | case EchoCancellation::kHighSuppression: |
| 35 | return kAecNlpAggressive; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 36 | } |
andrew@webrtc.org | 648af74 | 2012-02-08 01:57:29 +0000 | [diff] [blame] | 37 | assert(false); |
mflodman@webrtc.org | 657b2a4 | 2012-02-06 11:06:01 +0000 | [diff] [blame] | 38 | return -1; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 39 | } |
| 40 | |
andrew@webrtc.org | 648af74 | 2012-02-08 01:57:29 +0000 | [diff] [blame] | 41 | AudioProcessing::Error MapError(int err) { |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 42 | switch (err) { |
| 43 | case AEC_UNSUPPORTED_FUNCTION_ERROR: |
| 44 | return AudioProcessing::kUnsupportedFunctionError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 45 | case AEC_BAD_PARAMETER_ERROR: |
| 46 | return AudioProcessing::kBadParameterError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 47 | case AEC_BAD_PARAMETER_WARNING: |
| 48 | return AudioProcessing::kBadStreamParameterWarning; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 49 | default: |
| 50 | // AEC_UNSPECIFIED_ERROR |
| 51 | // AEC_UNINITIALIZED_ERROR |
| 52 | // AEC_NULL_POINTER_ERROR |
| 53 | return AudioProcessing::kUnspecifiedError; |
| 54 | } |
| 55 | } |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 56 | |
peah | 2446e5a | 2015-11-18 06:11:13 -0800 | [diff] [blame] | 57 | // Maximum length that a frame of samples can have. |
| 58 | static const size_t kMaxAllowedValuesOfSamplesPerFrame = 160; |
| 59 | // Maximum number of frames to buffer in the render queue. |
| 60 | // TODO(peah): Decrease this once we properly handle hugely unbalanced |
| 61 | // reverse and forward call numbers. |
| 62 | static const size_t kMaxNumFramesToBuffer = 100; |
| 63 | } // namespace |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 64 | |
andrew@webrtc.org | 56e4a05 | 2014-02-27 22:23:17 +0000 | [diff] [blame] | 65 | EchoCancellationImpl::EchoCancellationImpl(const AudioProcessing* apm, |
| 66 | CriticalSectionWrapper* crit) |
Henrik Lundin | 441f634 | 2015-06-09 16:03:13 +0200 | [diff] [blame] | 67 | : ProcessingComponent(), |
| 68 | apm_(apm), |
| 69 | crit_(crit), |
| 70 | drift_compensation_enabled_(false), |
| 71 | metrics_enabled_(false), |
| 72 | suppression_level_(kModerateSuppression), |
| 73 | stream_drift_samples_(0), |
| 74 | was_stream_drift_set_(false), |
| 75 | stream_has_echo_(false), |
| 76 | delay_logging_enabled_(false), |
| 77 | extended_filter_enabled_(false), |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 78 | delay_agnostic_enabled_(false), |
peah | 2446e5a | 2015-11-18 06:11:13 -0800 | [diff] [blame] | 79 | render_queue_element_max_size_(0) {} |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 80 | |
| 81 | EchoCancellationImpl::~EchoCancellationImpl() {} |
| 82 | |
| 83 | int EchoCancellationImpl::ProcessRenderAudio(const AudioBuffer* audio) { |
| 84 | if (!is_component_enabled()) { |
| 85 | return apm_->kNoError; |
| 86 | } |
| 87 | |
aluebs@webrtc.org | d35a5c3 | 2015-02-10 22:52:15 +0000 | [diff] [blame] | 88 | assert(audio->num_frames_per_band() <= 160); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 89 | assert(audio->num_channels() == apm_->num_reverse_channels()); |
| 90 | |
| 91 | int err = apm_->kNoError; |
| 92 | |
| 93 | // The ordering convention must be followed to pass to the correct AEC. |
| 94 | size_t handle_index = 0; |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 95 | render_queue_buffer_.clear(); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 96 | for (int i = 0; i < apm_->num_output_channels(); i++) { |
| 97 | for (int j = 0; j < audio->num_channels(); j++) { |
| 98 | Handle* my_handle = static_cast<Handle*>(handle(handle_index)); |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 99 | // Retrieve any error code produced by the buffering of the farend |
| 100 | // signal |
| 101 | err = WebRtcAec_GetBufferFarendError( |
| 102 | my_handle, audio->split_bands_const_f(j)[kBand0To8kHz], |
Peter Kasting | dce40cf | 2015-08-24 14:52:23 -0700 | [diff] [blame] | 103 | audio->num_frames_per_band()); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 104 | |
| 105 | if (err != apm_->kNoError) { |
peah | c12be39 | 2015-11-09 23:53:50 -0800 | [diff] [blame] | 106 | return MapError(err); // TODO(ajm): warning possible? |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 107 | } |
| 108 | |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 109 | // Buffer the samples in the render queue. |
| 110 | render_queue_buffer_.insert(render_queue_buffer_.end(), |
| 111 | audio->split_bands_const_f(j)[kBand0To8kHz], |
| 112 | (audio->split_bands_const_f(j)[kBand0To8kHz] + |
| 113 | audio->num_frames_per_band())); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 114 | } |
| 115 | } |
| 116 | |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 117 | // Insert the samples into the queue. |
| 118 | if (!render_signal_queue_->Insert(&render_queue_buffer_)) { |
| 119 | ReadQueuedRenderData(); |
| 120 | |
| 121 | // Retry the insert (should always work). |
| 122 | RTC_DCHECK_EQ(render_signal_queue_->Insert(&render_queue_buffer_), true); |
| 123 | } |
| 124 | |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 125 | return apm_->kNoError; |
| 126 | } |
| 127 | |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 128 | // Read chunks of data that were received and queued on the render side from |
| 129 | // a queue. All the data chunks are buffered into the farend signal of the AEC. |
| 130 | void EchoCancellationImpl::ReadQueuedRenderData() { |
| 131 | if (!is_component_enabled()) { |
| 132 | return; |
| 133 | } |
| 134 | |
| 135 | while (render_signal_queue_->Remove(&capture_queue_buffer_)) { |
| 136 | size_t handle_index = 0; |
| 137 | int buffer_index = 0; |
| 138 | const int num_frames_per_band = |
| 139 | capture_queue_buffer_.size() / |
| 140 | (apm_->num_output_channels() * apm_->num_reverse_channels()); |
| 141 | for (int i = 0; i < apm_->num_output_channels(); i++) { |
| 142 | for (int j = 0; j < apm_->num_reverse_channels(); j++) { |
| 143 | Handle* my_handle = static_cast<Handle*>(handle(handle_index)); |
| 144 | WebRtcAec_BufferFarend(my_handle, &capture_queue_buffer_[buffer_index], |
| 145 | num_frames_per_band); |
| 146 | |
| 147 | buffer_index += num_frames_per_band; |
| 148 | handle_index++; |
| 149 | } |
| 150 | } |
| 151 | } |
| 152 | } |
| 153 | |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 154 | int EchoCancellationImpl::ProcessCaptureAudio(AudioBuffer* audio) { |
| 155 | if (!is_component_enabled()) { |
| 156 | return apm_->kNoError; |
| 157 | } |
| 158 | |
| 159 | if (!apm_->was_stream_delay_set()) { |
| 160 | return apm_->kStreamParameterNotSetError; |
| 161 | } |
| 162 | |
| 163 | if (drift_compensation_enabled_ && !was_stream_drift_set_) { |
| 164 | return apm_->kStreamParameterNotSetError; |
| 165 | } |
| 166 | |
aluebs@webrtc.org | d35a5c3 | 2015-02-10 22:52:15 +0000 | [diff] [blame] | 167 | assert(audio->num_frames_per_band() <= 160); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 168 | assert(audio->num_channels() == apm_->num_output_channels()); |
| 169 | |
| 170 | int err = apm_->kNoError; |
| 171 | |
| 172 | // The ordering convention must be followed to pass to the correct AEC. |
| 173 | size_t handle_index = 0; |
| 174 | stream_has_echo_ = false; |
| 175 | for (int i = 0; i < audio->num_channels(); i++) { |
| 176 | for (int j = 0; j < apm_->num_reverse_channels(); j++) { |
| 177 | Handle* my_handle = handle(handle_index); |
| 178 | err = WebRtcAec_Process( |
| 179 | my_handle, |
aluebs@webrtc.org | c78d81a | 2015-01-21 19:10:55 +0000 | [diff] [blame] | 180 | audio->split_bands_const_f(i), |
| 181 | audio->num_bands(), |
| 182 | audio->split_bands_f(i), |
Peter Kasting | dce40cf | 2015-08-24 14:52:23 -0700 | [diff] [blame] | 183 | audio->num_frames_per_band(), |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 184 | apm_->stream_delay_ms(), |
| 185 | stream_drift_samples_); |
| 186 | |
| 187 | if (err != apm_->kNoError) { |
peah | c12be39 | 2015-11-09 23:53:50 -0800 | [diff] [blame] | 188 | err = MapError(err); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 189 | // TODO(ajm): Figure out how to return warnings properly. |
| 190 | if (err != apm_->kBadStreamParameterWarning) { |
| 191 | return err; |
| 192 | } |
| 193 | } |
| 194 | |
bjornv@webrtc.org | 21a2fc9 | 2013-02-15 17:01:03 +0000 | [diff] [blame] | 195 | int status = 0; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 196 | err = WebRtcAec_get_echo_status(my_handle, &status); |
| 197 | if (err != apm_->kNoError) { |
peah | c12be39 | 2015-11-09 23:53:50 -0800 | [diff] [blame] | 198 | return MapError(err); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 199 | } |
| 200 | |
| 201 | if (status == 1) { |
| 202 | stream_has_echo_ = true; |
| 203 | } |
| 204 | |
| 205 | handle_index++; |
| 206 | } |
| 207 | } |
| 208 | |
| 209 | was_stream_drift_set_ = false; |
| 210 | return apm_->kNoError; |
| 211 | } |
| 212 | |
| 213 | int EchoCancellationImpl::Enable(bool enable) { |
andrew@webrtc.org | 56e4a05 | 2014-02-27 22:23:17 +0000 | [diff] [blame] | 214 | CriticalSectionScoped crit_scoped(crit_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 215 | // Ensure AEC and AECM are not both enabled. |
| 216 | if (enable && apm_->echo_control_mobile()->is_enabled()) { |
| 217 | return apm_->kBadParameterError; |
| 218 | } |
| 219 | |
| 220 | return EnableComponent(enable); |
| 221 | } |
| 222 | |
| 223 | bool EchoCancellationImpl::is_enabled() const { |
| 224 | return is_component_enabled(); |
| 225 | } |
| 226 | |
| 227 | int EchoCancellationImpl::set_suppression_level(SuppressionLevel level) { |
andrew@webrtc.org | 56e4a05 | 2014-02-27 22:23:17 +0000 | [diff] [blame] | 228 | CriticalSectionScoped crit_scoped(crit_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 229 | if (MapSetting(level) == -1) { |
| 230 | return apm_->kBadParameterError; |
| 231 | } |
| 232 | |
| 233 | suppression_level_ = level; |
| 234 | return Configure(); |
| 235 | } |
| 236 | |
| 237 | EchoCancellation::SuppressionLevel EchoCancellationImpl::suppression_level() |
| 238 | const { |
| 239 | return suppression_level_; |
| 240 | } |
| 241 | |
| 242 | int EchoCancellationImpl::enable_drift_compensation(bool enable) { |
andrew@webrtc.org | 56e4a05 | 2014-02-27 22:23:17 +0000 | [diff] [blame] | 243 | CriticalSectionScoped crit_scoped(crit_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 244 | drift_compensation_enabled_ = enable; |
| 245 | return Configure(); |
| 246 | } |
| 247 | |
| 248 | bool EchoCancellationImpl::is_drift_compensation_enabled() const { |
| 249 | return drift_compensation_enabled_; |
| 250 | } |
| 251 | |
andrew@webrtc.org | 6be1e93 | 2013-03-01 18:47:28 +0000 | [diff] [blame] | 252 | void EchoCancellationImpl::set_stream_drift_samples(int drift) { |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 253 | was_stream_drift_set_ = true; |
| 254 | stream_drift_samples_ = drift; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 255 | } |
| 256 | |
| 257 | int EchoCancellationImpl::stream_drift_samples() const { |
| 258 | return stream_drift_samples_; |
| 259 | } |
| 260 | |
| 261 | int EchoCancellationImpl::enable_metrics(bool enable) { |
andrew@webrtc.org | 56e4a05 | 2014-02-27 22:23:17 +0000 | [diff] [blame] | 262 | CriticalSectionScoped crit_scoped(crit_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 263 | metrics_enabled_ = enable; |
| 264 | return Configure(); |
| 265 | } |
| 266 | |
| 267 | bool EchoCancellationImpl::are_metrics_enabled() const { |
| 268 | return metrics_enabled_; |
| 269 | } |
| 270 | |
| 271 | // TODO(ajm): we currently just use the metrics from the first AEC. Think more |
| 272 | // aboue the best way to extend this to multi-channel. |
| 273 | int EchoCancellationImpl::GetMetrics(Metrics* metrics) { |
andrew@webrtc.org | 56e4a05 | 2014-02-27 22:23:17 +0000 | [diff] [blame] | 274 | CriticalSectionScoped crit_scoped(crit_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 275 | if (metrics == NULL) { |
| 276 | return apm_->kNullPointerError; |
| 277 | } |
| 278 | |
| 279 | if (!is_component_enabled() || !metrics_enabled_) { |
| 280 | return apm_->kNotEnabledError; |
| 281 | } |
| 282 | |
| 283 | AecMetrics my_metrics; |
| 284 | memset(&my_metrics, 0, sizeof(my_metrics)); |
| 285 | memset(metrics, 0, sizeof(Metrics)); |
| 286 | |
| 287 | Handle* my_handle = static_cast<Handle*>(handle(0)); |
| 288 | int err = WebRtcAec_GetMetrics(my_handle, &my_metrics); |
| 289 | if (err != apm_->kNoError) { |
peah | c12be39 | 2015-11-09 23:53:50 -0800 | [diff] [blame] | 290 | return MapError(err); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 291 | } |
| 292 | |
| 293 | metrics->residual_echo_return_loss.instant = my_metrics.rerl.instant; |
| 294 | metrics->residual_echo_return_loss.average = my_metrics.rerl.average; |
| 295 | metrics->residual_echo_return_loss.maximum = my_metrics.rerl.max; |
| 296 | metrics->residual_echo_return_loss.minimum = my_metrics.rerl.min; |
| 297 | |
| 298 | metrics->echo_return_loss.instant = my_metrics.erl.instant; |
| 299 | metrics->echo_return_loss.average = my_metrics.erl.average; |
| 300 | metrics->echo_return_loss.maximum = my_metrics.erl.max; |
| 301 | metrics->echo_return_loss.minimum = my_metrics.erl.min; |
| 302 | |
| 303 | metrics->echo_return_loss_enhancement.instant = my_metrics.erle.instant; |
| 304 | metrics->echo_return_loss_enhancement.average = my_metrics.erle.average; |
| 305 | metrics->echo_return_loss_enhancement.maximum = my_metrics.erle.max; |
| 306 | metrics->echo_return_loss_enhancement.minimum = my_metrics.erle.min; |
| 307 | |
| 308 | metrics->a_nlp.instant = my_metrics.aNlp.instant; |
| 309 | metrics->a_nlp.average = my_metrics.aNlp.average; |
| 310 | metrics->a_nlp.maximum = my_metrics.aNlp.max; |
| 311 | metrics->a_nlp.minimum = my_metrics.aNlp.min; |
| 312 | |
| 313 | return apm_->kNoError; |
| 314 | } |
| 315 | |
| 316 | bool EchoCancellationImpl::stream_has_echo() const { |
| 317 | return stream_has_echo_; |
| 318 | } |
| 319 | |
bjornv@google.com | 1ba3dbe | 2011-10-03 08:18:10 +0000 | [diff] [blame] | 320 | int EchoCancellationImpl::enable_delay_logging(bool enable) { |
andrew@webrtc.org | 56e4a05 | 2014-02-27 22:23:17 +0000 | [diff] [blame] | 321 | CriticalSectionScoped crit_scoped(crit_); |
bjornv@google.com | 1ba3dbe | 2011-10-03 08:18:10 +0000 | [diff] [blame] | 322 | delay_logging_enabled_ = enable; |
| 323 | return Configure(); |
| 324 | } |
| 325 | |
| 326 | bool EchoCancellationImpl::is_delay_logging_enabled() const { |
| 327 | return delay_logging_enabled_; |
| 328 | } |
| 329 | |
Minyue | 13b96ba | 2015-10-03 00:39:14 +0200 | [diff] [blame] | 330 | bool EchoCancellationImpl::is_delay_agnostic_enabled() const { |
| 331 | return delay_agnostic_enabled_; |
| 332 | } |
| 333 | |
| 334 | bool EchoCancellationImpl::is_extended_filter_enabled() const { |
| 335 | return extended_filter_enabled_; |
| 336 | } |
| 337 | |
bjornv@google.com | 1ba3dbe | 2011-10-03 08:18:10 +0000 | [diff] [blame] | 338 | // TODO(bjornv): How should we handle the multi-channel case? |
| 339 | int EchoCancellationImpl::GetDelayMetrics(int* median, int* std) { |
bjornv@webrtc.org | b1786db | 2015-02-03 06:06:26 +0000 | [diff] [blame] | 340 | float fraction_poor_delays = 0; |
| 341 | return GetDelayMetrics(median, std, &fraction_poor_delays); |
| 342 | } |
| 343 | |
| 344 | int EchoCancellationImpl::GetDelayMetrics(int* median, int* std, |
| 345 | float* fraction_poor_delays) { |
andrew@webrtc.org | 56e4a05 | 2014-02-27 22:23:17 +0000 | [diff] [blame] | 346 | CriticalSectionScoped crit_scoped(crit_); |
bjornv@google.com | 1ba3dbe | 2011-10-03 08:18:10 +0000 | [diff] [blame] | 347 | if (median == NULL) { |
| 348 | return apm_->kNullPointerError; |
| 349 | } |
| 350 | if (std == NULL) { |
| 351 | return apm_->kNullPointerError; |
| 352 | } |
| 353 | |
| 354 | if (!is_component_enabled() || !delay_logging_enabled_) { |
| 355 | return apm_->kNotEnabledError; |
| 356 | } |
| 357 | |
| 358 | Handle* my_handle = static_cast<Handle*>(handle(0)); |
peah | c12be39 | 2015-11-09 23:53:50 -0800 | [diff] [blame] | 359 | const int err = |
| 360 | WebRtcAec_GetDelayMetrics(my_handle, median, std, fraction_poor_delays); |
| 361 | if (err != apm_->kNoError) { |
| 362 | return MapError(err); |
bjornv@google.com | 1ba3dbe | 2011-10-03 08:18:10 +0000 | [diff] [blame] | 363 | } |
| 364 | |
| 365 | return apm_->kNoError; |
| 366 | } |
| 367 | |
bjornv@webrtc.org | 91d11b3 | 2013-03-05 16:53:09 +0000 | [diff] [blame] | 368 | struct AecCore* EchoCancellationImpl::aec_core() const { |
andrew@webrtc.org | 56e4a05 | 2014-02-27 22:23:17 +0000 | [diff] [blame] | 369 | CriticalSectionScoped crit_scoped(crit_); |
bjornv@webrtc.org | 91d11b3 | 2013-03-05 16:53:09 +0000 | [diff] [blame] | 370 | if (!is_component_enabled()) { |
| 371 | return NULL; |
| 372 | } |
| 373 | Handle* my_handle = static_cast<Handle*>(handle(0)); |
| 374 | return WebRtcAec_aec_core(my_handle); |
| 375 | } |
| 376 | |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 377 | int EchoCancellationImpl::Initialize() { |
| 378 | int err = ProcessingComponent::Initialize(); |
| 379 | if (err != apm_->kNoError || !is_component_enabled()) { |
| 380 | return err; |
| 381 | } |
| 382 | |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 383 | AllocateRenderQueue(); |
| 384 | |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 385 | return apm_->kNoError; |
| 386 | } |
| 387 | |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 388 | void EchoCancellationImpl::AllocateRenderQueue() { |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 389 | const size_t new_render_queue_element_max_size = std::max<size_t>( |
peah | 2446e5a | 2015-11-18 06:11:13 -0800 | [diff] [blame] | 390 | static_cast<size_t>(1), |
| 391 | kMaxAllowedValuesOfSamplesPerFrame * num_handles_required()); |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 392 | |
| 393 | // Reallocate the queue if the queue item size is too small to fit the |
| 394 | // data to put in the queue. |
peah | 2446e5a | 2015-11-18 06:11:13 -0800 | [diff] [blame] | 395 | if (render_queue_element_max_size_ < new_render_queue_element_max_size) { |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 396 | render_queue_element_max_size_ = new_render_queue_element_max_size; |
| 397 | |
| 398 | std::vector<float> template_queue_element(render_queue_element_max_size_); |
| 399 | |
| 400 | render_signal_queue_.reset( |
| 401 | new SwapQueue<std::vector<float>, RenderQueueItemVerifier<float>>( |
| 402 | kMaxNumFramesToBuffer, template_queue_element, |
| 403 | RenderQueueItemVerifier<float>(render_queue_element_max_size_))); |
peah | 2446e5a | 2015-11-18 06:11:13 -0800 | [diff] [blame] | 404 | |
| 405 | render_queue_buffer_.resize(render_queue_element_max_size_); |
| 406 | capture_queue_buffer_.resize(render_queue_element_max_size_); |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 407 | } else { |
| 408 | render_signal_queue_->Clear(); |
| 409 | } |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 410 | } |
| 411 | |
andrew@webrtc.org | 1760a17 | 2013-09-25 23:17:38 +0000 | [diff] [blame] | 412 | void EchoCancellationImpl::SetExtraOptions(const Config& config) { |
Henrik Lundin | b02af18 | 2015-06-16 09:53:23 +0200 | [diff] [blame] | 413 | extended_filter_enabled_ = config.Get<ExtendedFilter>().enabled; |
henrik.lundin | 366e952 | 2015-07-03 00:50:05 -0700 | [diff] [blame] | 414 | delay_agnostic_enabled_ = config.Get<DelayAgnostic>().enabled; |
andrew@webrtc.org | 1760a17 | 2013-09-25 23:17:38 +0000 | [diff] [blame] | 415 | Configure(); |
| 416 | } |
| 417 | |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 418 | void* EchoCancellationImpl::CreateHandle() const { |
Bjorn Volcker | 9345e86 | 2015-06-10 21:43:36 +0200 | [diff] [blame] | 419 | return WebRtcAec_Create(); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 420 | } |
| 421 | |
bjornv@webrtc.org | 5964fe0 | 2014-04-22 06:52:28 +0000 | [diff] [blame] | 422 | void EchoCancellationImpl::DestroyHandle(void* handle) const { |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 423 | assert(handle != NULL); |
bjornv@webrtc.org | 5964fe0 | 2014-04-22 06:52:28 +0000 | [diff] [blame] | 424 | WebRtcAec_Free(static_cast<Handle*>(handle)); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 425 | } |
| 426 | |
| 427 | int EchoCancellationImpl::InitializeHandle(void* handle) const { |
| 428 | assert(handle != NULL); |
andrew@webrtc.org | ddbb8a2 | 2014-04-22 21:00:04 +0000 | [diff] [blame] | 429 | // TODO(ajm): Drift compensation is disabled in practice. If restored, it |
| 430 | // should be managed internally and not depend on the hardware sample rate. |
| 431 | // For now, just hardcode a 48 kHz value. |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 432 | return WebRtcAec_Init(static_cast<Handle*>(handle), |
andrew@webrtc.org | ddbb8a2 | 2014-04-22 21:00:04 +0000 | [diff] [blame] | 433 | apm_->proc_sample_rate_hz(), |
| 434 | 48000); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 435 | } |
| 436 | |
| 437 | int EchoCancellationImpl::ConfigureHandle(void* handle) const { |
| 438 | assert(handle != NULL); |
| 439 | AecConfig config; |
| 440 | config.metricsMode = metrics_enabled_; |
| 441 | config.nlpMode = MapSetting(suppression_level_); |
| 442 | config.skewMode = drift_compensation_enabled_; |
bjornv@google.com | 1ba3dbe | 2011-10-03 08:18:10 +0000 | [diff] [blame] | 443 | config.delay_logging = delay_logging_enabled_; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 444 | |
Henrik Lundin | 441f634 | 2015-06-09 16:03:13 +0200 | [diff] [blame] | 445 | WebRtcAec_enable_extended_filter( |
| 446 | WebRtcAec_aec_core(static_cast<Handle*>(handle)), |
| 447 | extended_filter_enabled_ ? 1 : 0); |
henrik.lundin | 0f133b9 | 2015-07-02 00:17:55 -0700 | [diff] [blame] | 448 | WebRtcAec_enable_delay_agnostic( |
| 449 | WebRtcAec_aec_core(static_cast<Handle*>(handle)), |
| 450 | delay_agnostic_enabled_ ? 1 : 0); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 451 | return WebRtcAec_set_config(static_cast<Handle*>(handle), config); |
| 452 | } |
| 453 | |
| 454 | int EchoCancellationImpl::num_handles_required() const { |
| 455 | return apm_->num_output_channels() * |
| 456 | apm_->num_reverse_channels(); |
| 457 | } |
| 458 | |
| 459 | int EchoCancellationImpl::GetHandleError(void* handle) const { |
| 460 | assert(handle != NULL); |
peah | c12be39 | 2015-11-09 23:53:50 -0800 | [diff] [blame] | 461 | return AudioProcessing::kUnspecifiedError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 462 | } |
| 463 | } // namespace webrtc |