niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 1 | /* |
bjornv@webrtc.org | 0c6f931 | 2012-01-30 09:39:08 +0000 | [diff] [blame] | 2 | * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license |
| 5 | * that can be found in the LICENSE file in the root of the source |
| 6 | * tree. An additional intellectual property rights grant can be found |
| 7 | * in the file PATENTS. All contributing project authors may |
| 8 | * be found in the AUTHORS file in the root of the source tree. |
| 9 | */ |
| 10 | |
bjornv@webrtc.org | 21a2fc9 | 2013-02-15 17:01:03 +0000 | [diff] [blame] | 11 | #include "webrtc/modules/audio_processing/echo_cancellation_impl.h" |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 12 | |
bjornv@webrtc.org | 21a2fc9 | 2013-02-15 17:01:03 +0000 | [diff] [blame] | 13 | #include <assert.h> |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 14 | #include <string.h> |
| 15 | |
andrew@webrtc.org | 1760a17 | 2013-09-25 23:17:38 +0000 | [diff] [blame] | 16 | extern "C" { |
| 17 | #include "webrtc/modules/audio_processing/aec/aec_core.h" |
| 18 | } |
Henrik Kjellander | 9b72af9 | 2015-11-11 20:16:11 +0100 | [diff] [blame] | 19 | #include "webrtc/modules/audio_processing/aec/echo_cancellation.h" |
bjornv@webrtc.org | 21a2fc9 | 2013-02-15 17:01:03 +0000 | [diff] [blame] | 20 | #include "webrtc/modules/audio_processing/audio_buffer.h" |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 21 | |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 22 | namespace webrtc { |
| 23 | |
| 24 | typedef void Handle; |
| 25 | |
| 26 | namespace { |
pbos@webrtc.org | b7192b8 | 2013-04-10 07:50:54 +0000 | [diff] [blame] | 27 | int16_t MapSetting(EchoCancellation::SuppressionLevel level) { |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 28 | switch (level) { |
| 29 | case EchoCancellation::kLowSuppression: |
| 30 | return kAecNlpConservative; |
| 31 | case EchoCancellation::kModerateSuppression: |
| 32 | return kAecNlpModerate; |
| 33 | case EchoCancellation::kHighSuppression: |
| 34 | return kAecNlpAggressive; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 35 | } |
andrew@webrtc.org | 648af74 | 2012-02-08 01:57:29 +0000 | [diff] [blame] | 36 | assert(false); |
mflodman@webrtc.org | 657b2a4 | 2012-02-06 11:06:01 +0000 | [diff] [blame] | 37 | return -1; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 38 | } |
| 39 | |
andrew@webrtc.org | 648af74 | 2012-02-08 01:57:29 +0000 | [diff] [blame] | 40 | AudioProcessing::Error MapError(int err) { |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 41 | switch (err) { |
| 42 | case AEC_UNSUPPORTED_FUNCTION_ERROR: |
| 43 | return AudioProcessing::kUnsupportedFunctionError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 44 | case AEC_BAD_PARAMETER_ERROR: |
| 45 | return AudioProcessing::kBadParameterError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 46 | case AEC_BAD_PARAMETER_WARNING: |
| 47 | return AudioProcessing::kBadStreamParameterWarning; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 48 | default: |
| 49 | // AEC_UNSPECIFIED_ERROR |
| 50 | // AEC_UNINITIALIZED_ERROR |
| 51 | // AEC_NULL_POINTER_ERROR |
| 52 | return AudioProcessing::kUnspecifiedError; |
| 53 | } |
| 54 | } |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 55 | |
peah | 2446e5a | 2015-11-18 06:11:13 -0800 | [diff] [blame] | 56 | // Maximum length that a frame of samples can have. |
| 57 | static const size_t kMaxAllowedValuesOfSamplesPerFrame = 160; |
| 58 | // Maximum number of frames to buffer in the render queue. |
| 59 | // TODO(peah): Decrease this once we properly handle hugely unbalanced |
| 60 | // reverse and forward call numbers. |
| 61 | static const size_t kMaxNumFramesToBuffer = 100; |
| 62 | } // namespace |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 63 | |
andrew@webrtc.org | 56e4a05 | 2014-02-27 22:23:17 +0000 | [diff] [blame] | 64 | EchoCancellationImpl::EchoCancellationImpl(const AudioProcessing* apm, |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 65 | rtc::CriticalSection* crit_render, |
| 66 | rtc::CriticalSection* crit_capture) |
Henrik Lundin | 441f634 | 2015-06-09 16:03:13 +0200 | [diff] [blame] | 67 | : ProcessingComponent(), |
| 68 | apm_(apm), |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 69 | crit_render_(crit_render), |
| 70 | crit_capture_(crit_capture), |
Henrik Lundin | 441f634 | 2015-06-09 16:03:13 +0200 | [diff] [blame] | 71 | drift_compensation_enabled_(false), |
| 72 | metrics_enabled_(false), |
| 73 | suppression_level_(kModerateSuppression), |
| 74 | stream_drift_samples_(0), |
| 75 | was_stream_drift_set_(false), |
| 76 | stream_has_echo_(false), |
| 77 | delay_logging_enabled_(false), |
| 78 | extended_filter_enabled_(false), |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 79 | delay_agnostic_enabled_(false), |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 80 | render_queue_element_max_size_(0) { |
| 81 | RTC_DCHECK(apm); |
| 82 | RTC_DCHECK(crit_render); |
| 83 | RTC_DCHECK(crit_capture); |
| 84 | } |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 85 | |
| 86 | EchoCancellationImpl::~EchoCancellationImpl() {} |
| 87 | |
| 88 | int EchoCancellationImpl::ProcessRenderAudio(const AudioBuffer* audio) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 89 | rtc::CritScope cs_render(crit_render_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 90 | if (!is_component_enabled()) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 91 | return AudioProcessing::kNoError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 92 | } |
| 93 | |
aluebs@webrtc.org | d35a5c3 | 2015-02-10 22:52:15 +0000 | [diff] [blame] | 94 | assert(audio->num_frames_per_band() <= 160); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 95 | assert(audio->num_channels() == apm_->num_reverse_channels()); |
| 96 | |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 97 | int err = AudioProcessing::kNoError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 98 | |
| 99 | // The ordering convention must be followed to pass to the correct AEC. |
| 100 | size_t handle_index = 0; |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 101 | render_queue_buffer_.clear(); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 102 | for (int i = 0; i < apm_->num_output_channels(); i++) { |
| 103 | for (int j = 0; j < audio->num_channels(); j++) { |
| 104 | Handle* my_handle = static_cast<Handle*>(handle(handle_index)); |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 105 | // Retrieve any error code produced by the buffering of the farend |
| 106 | // signal |
| 107 | err = WebRtcAec_GetBufferFarendError( |
| 108 | my_handle, audio->split_bands_const_f(j)[kBand0To8kHz], |
Peter Kasting | dce40cf | 2015-08-24 14:52:23 -0700 | [diff] [blame] | 109 | audio->num_frames_per_band()); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 110 | |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 111 | if (err != AudioProcessing::kNoError) { |
peah | c12be39 | 2015-11-09 23:53:50 -0800 | [diff] [blame] | 112 | return MapError(err); // TODO(ajm): warning possible? |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 113 | } |
| 114 | |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 115 | // Buffer the samples in the render queue. |
| 116 | render_queue_buffer_.insert(render_queue_buffer_.end(), |
| 117 | audio->split_bands_const_f(j)[kBand0To8kHz], |
| 118 | (audio->split_bands_const_f(j)[kBand0To8kHz] + |
| 119 | audio->num_frames_per_band())); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 120 | } |
| 121 | } |
| 122 | |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 123 | // Insert the samples into the queue. |
| 124 | if (!render_signal_queue_->Insert(&render_queue_buffer_)) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 125 | // The data queue is full and needs to be emptied. |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 126 | ReadQueuedRenderData(); |
| 127 | |
| 128 | // Retry the insert (should always work). |
| 129 | RTC_DCHECK_EQ(render_signal_queue_->Insert(&render_queue_buffer_), true); |
| 130 | } |
| 131 | |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 132 | return AudioProcessing::kNoError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 133 | } |
| 134 | |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 135 | // Read chunks of data that were received and queued on the render side from |
| 136 | // a queue. All the data chunks are buffered into the farend signal of the AEC. |
| 137 | void EchoCancellationImpl::ReadQueuedRenderData() { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 138 | rtc::CritScope cs_capture(crit_capture_); |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 139 | if (!is_component_enabled()) { |
| 140 | return; |
| 141 | } |
| 142 | |
| 143 | while (render_signal_queue_->Remove(&capture_queue_buffer_)) { |
| 144 | size_t handle_index = 0; |
| 145 | int buffer_index = 0; |
| 146 | const int num_frames_per_band = |
| 147 | capture_queue_buffer_.size() / |
| 148 | (apm_->num_output_channels() * apm_->num_reverse_channels()); |
| 149 | for (int i = 0; i < apm_->num_output_channels(); i++) { |
| 150 | for (int j = 0; j < apm_->num_reverse_channels(); j++) { |
| 151 | Handle* my_handle = static_cast<Handle*>(handle(handle_index)); |
| 152 | WebRtcAec_BufferFarend(my_handle, &capture_queue_buffer_[buffer_index], |
| 153 | num_frames_per_band); |
| 154 | |
| 155 | buffer_index += num_frames_per_band; |
| 156 | handle_index++; |
| 157 | } |
| 158 | } |
| 159 | } |
| 160 | } |
| 161 | |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 162 | int EchoCancellationImpl::ProcessCaptureAudio(AudioBuffer* audio) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 163 | rtc::CritScope cs_capture(crit_capture_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 164 | if (!is_component_enabled()) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 165 | return AudioProcessing::kNoError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 166 | } |
| 167 | |
| 168 | if (!apm_->was_stream_delay_set()) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 169 | return AudioProcessing::kStreamParameterNotSetError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 170 | } |
| 171 | |
| 172 | if (drift_compensation_enabled_ && !was_stream_drift_set_) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 173 | return AudioProcessing::kStreamParameterNotSetError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 174 | } |
| 175 | |
aluebs@webrtc.org | d35a5c3 | 2015-02-10 22:52:15 +0000 | [diff] [blame] | 176 | assert(audio->num_frames_per_band() <= 160); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 177 | assert(audio->num_channels() == apm_->num_output_channels()); |
| 178 | |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 179 | int err = AudioProcessing::kNoError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 180 | |
| 181 | // The ordering convention must be followed to pass to the correct AEC. |
| 182 | size_t handle_index = 0; |
| 183 | stream_has_echo_ = false; |
| 184 | for (int i = 0; i < audio->num_channels(); i++) { |
| 185 | for (int j = 0; j < apm_->num_reverse_channels(); j++) { |
| 186 | Handle* my_handle = handle(handle_index); |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 187 | err = WebRtcAec_Process(my_handle, audio->split_bands_const_f(i), |
| 188 | audio->num_bands(), audio->split_bands_f(i), |
| 189 | audio->num_frames_per_band(), |
| 190 | apm_->stream_delay_ms(), stream_drift_samples_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 191 | |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 192 | if (err != AudioProcessing::kNoError) { |
peah | c12be39 | 2015-11-09 23:53:50 -0800 | [diff] [blame] | 193 | err = MapError(err); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 194 | // TODO(ajm): Figure out how to return warnings properly. |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 195 | if (err != AudioProcessing::kBadStreamParameterWarning) { |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 196 | return err; |
| 197 | } |
| 198 | } |
| 199 | |
bjornv@webrtc.org | 21a2fc9 | 2013-02-15 17:01:03 +0000 | [diff] [blame] | 200 | int status = 0; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 201 | err = WebRtcAec_get_echo_status(my_handle, &status); |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 202 | if (err != AudioProcessing::kNoError) { |
peah | c12be39 | 2015-11-09 23:53:50 -0800 | [diff] [blame] | 203 | return MapError(err); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 204 | } |
| 205 | |
| 206 | if (status == 1) { |
| 207 | stream_has_echo_ = true; |
| 208 | } |
| 209 | |
| 210 | handle_index++; |
| 211 | } |
| 212 | } |
| 213 | |
| 214 | was_stream_drift_set_ = false; |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 215 | return AudioProcessing::kNoError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 216 | } |
| 217 | |
| 218 | int EchoCancellationImpl::Enable(bool enable) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 219 | // Run in a single-threaded manner. |
| 220 | rtc::CritScope cs_render(crit_render_); |
| 221 | rtc::CritScope cs_capture(crit_capture_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 222 | // Ensure AEC and AECM are not both enabled. |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 223 | // The is_enabled call is safe from a deadlock perspective |
| 224 | // as both locks are already held in the correct order. |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 225 | if (enable && apm_->echo_control_mobile()->is_enabled()) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 226 | return AudioProcessing::kBadParameterError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 227 | } |
| 228 | |
| 229 | return EnableComponent(enable); |
| 230 | } |
| 231 | |
| 232 | bool EchoCancellationImpl::is_enabled() const { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 233 | rtc::CritScope cs(crit_capture_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 234 | return is_component_enabled(); |
| 235 | } |
| 236 | |
| 237 | int EchoCancellationImpl::set_suppression_level(SuppressionLevel level) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 238 | { |
| 239 | if (MapSetting(level) == -1) { |
| 240 | return AudioProcessing::kBadParameterError; |
| 241 | } |
| 242 | rtc::CritScope cs(crit_capture_); |
| 243 | suppression_level_ = level; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 244 | } |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 245 | return Configure(); |
| 246 | } |
| 247 | |
| 248 | EchoCancellation::SuppressionLevel EchoCancellationImpl::suppression_level() |
| 249 | const { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 250 | rtc::CritScope cs(crit_capture_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 251 | return suppression_level_; |
| 252 | } |
| 253 | |
| 254 | int EchoCancellationImpl::enable_drift_compensation(bool enable) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 255 | { |
| 256 | rtc::CritScope cs(crit_capture_); |
| 257 | drift_compensation_enabled_ = enable; |
| 258 | } |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 259 | return Configure(); |
| 260 | } |
| 261 | |
| 262 | bool EchoCancellationImpl::is_drift_compensation_enabled() const { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 263 | rtc::CritScope cs(crit_capture_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 264 | return drift_compensation_enabled_; |
| 265 | } |
| 266 | |
andrew@webrtc.org | 6be1e93 | 2013-03-01 18:47:28 +0000 | [diff] [blame] | 267 | void EchoCancellationImpl::set_stream_drift_samples(int drift) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 268 | rtc::CritScope cs(crit_capture_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 269 | was_stream_drift_set_ = true; |
| 270 | stream_drift_samples_ = drift; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 271 | } |
| 272 | |
| 273 | int EchoCancellationImpl::stream_drift_samples() const { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 274 | rtc::CritScope cs(crit_capture_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 275 | return stream_drift_samples_; |
| 276 | } |
| 277 | |
| 278 | int EchoCancellationImpl::enable_metrics(bool enable) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 279 | { |
| 280 | rtc::CritScope cs(crit_capture_); |
| 281 | metrics_enabled_ = enable; |
| 282 | } |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 283 | return Configure(); |
| 284 | } |
| 285 | |
| 286 | bool EchoCancellationImpl::are_metrics_enabled() const { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 287 | rtc::CritScope cs(crit_capture_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 288 | return metrics_enabled_; |
| 289 | } |
| 290 | |
| 291 | // TODO(ajm): we currently just use the metrics from the first AEC. Think more |
| 292 | // aboue the best way to extend this to multi-channel. |
| 293 | int EchoCancellationImpl::GetMetrics(Metrics* metrics) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 294 | rtc::CritScope cs(crit_capture_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 295 | if (metrics == NULL) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 296 | return AudioProcessing::kNullPointerError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 297 | } |
| 298 | |
| 299 | if (!is_component_enabled() || !metrics_enabled_) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 300 | return AudioProcessing::kNotEnabledError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 301 | } |
| 302 | |
| 303 | AecMetrics my_metrics; |
| 304 | memset(&my_metrics, 0, sizeof(my_metrics)); |
| 305 | memset(metrics, 0, sizeof(Metrics)); |
| 306 | |
| 307 | Handle* my_handle = static_cast<Handle*>(handle(0)); |
| 308 | int err = WebRtcAec_GetMetrics(my_handle, &my_metrics); |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 309 | if (err != AudioProcessing::kNoError) { |
peah | c12be39 | 2015-11-09 23:53:50 -0800 | [diff] [blame] | 310 | return MapError(err); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 311 | } |
| 312 | |
| 313 | metrics->residual_echo_return_loss.instant = my_metrics.rerl.instant; |
| 314 | metrics->residual_echo_return_loss.average = my_metrics.rerl.average; |
| 315 | metrics->residual_echo_return_loss.maximum = my_metrics.rerl.max; |
| 316 | metrics->residual_echo_return_loss.minimum = my_metrics.rerl.min; |
| 317 | |
| 318 | metrics->echo_return_loss.instant = my_metrics.erl.instant; |
| 319 | metrics->echo_return_loss.average = my_metrics.erl.average; |
| 320 | metrics->echo_return_loss.maximum = my_metrics.erl.max; |
| 321 | metrics->echo_return_loss.minimum = my_metrics.erl.min; |
| 322 | |
| 323 | metrics->echo_return_loss_enhancement.instant = my_metrics.erle.instant; |
| 324 | metrics->echo_return_loss_enhancement.average = my_metrics.erle.average; |
| 325 | metrics->echo_return_loss_enhancement.maximum = my_metrics.erle.max; |
| 326 | metrics->echo_return_loss_enhancement.minimum = my_metrics.erle.min; |
| 327 | |
| 328 | metrics->a_nlp.instant = my_metrics.aNlp.instant; |
| 329 | metrics->a_nlp.average = my_metrics.aNlp.average; |
| 330 | metrics->a_nlp.maximum = my_metrics.aNlp.max; |
| 331 | metrics->a_nlp.minimum = my_metrics.aNlp.min; |
| 332 | |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 333 | return AudioProcessing::kNoError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 334 | } |
| 335 | |
| 336 | bool EchoCancellationImpl::stream_has_echo() const { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 337 | rtc::CritScope cs(crit_capture_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 338 | return stream_has_echo_; |
| 339 | } |
| 340 | |
bjornv@google.com | 1ba3dbe | 2011-10-03 08:18:10 +0000 | [diff] [blame] | 341 | int EchoCancellationImpl::enable_delay_logging(bool enable) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 342 | { |
| 343 | rtc::CritScope cs(crit_capture_); |
| 344 | delay_logging_enabled_ = enable; |
| 345 | } |
bjornv@google.com | 1ba3dbe | 2011-10-03 08:18:10 +0000 | [diff] [blame] | 346 | return Configure(); |
| 347 | } |
| 348 | |
| 349 | bool EchoCancellationImpl::is_delay_logging_enabled() const { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 350 | rtc::CritScope cs(crit_capture_); |
bjornv@google.com | 1ba3dbe | 2011-10-03 08:18:10 +0000 | [diff] [blame] | 351 | return delay_logging_enabled_; |
| 352 | } |
| 353 | |
Minyue | 13b96ba | 2015-10-03 00:39:14 +0200 | [diff] [blame] | 354 | bool EchoCancellationImpl::is_delay_agnostic_enabled() const { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 355 | rtc::CritScope cs(crit_capture_); |
Minyue | 13b96ba | 2015-10-03 00:39:14 +0200 | [diff] [blame] | 356 | return delay_agnostic_enabled_; |
| 357 | } |
| 358 | |
| 359 | bool EchoCancellationImpl::is_extended_filter_enabled() const { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 360 | rtc::CritScope cs(crit_capture_); |
Minyue | 13b96ba | 2015-10-03 00:39:14 +0200 | [diff] [blame] | 361 | return extended_filter_enabled_; |
| 362 | } |
| 363 | |
bjornv@google.com | 1ba3dbe | 2011-10-03 08:18:10 +0000 | [diff] [blame] | 364 | // TODO(bjornv): How should we handle the multi-channel case? |
| 365 | int EchoCancellationImpl::GetDelayMetrics(int* median, int* std) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 366 | rtc::CritScope cs(crit_capture_); |
bjornv@webrtc.org | b1786db | 2015-02-03 06:06:26 +0000 | [diff] [blame] | 367 | float fraction_poor_delays = 0; |
| 368 | return GetDelayMetrics(median, std, &fraction_poor_delays); |
| 369 | } |
| 370 | |
| 371 | int EchoCancellationImpl::GetDelayMetrics(int* median, int* std, |
| 372 | float* fraction_poor_delays) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 373 | rtc::CritScope cs(crit_capture_); |
bjornv@google.com | 1ba3dbe | 2011-10-03 08:18:10 +0000 | [diff] [blame] | 374 | if (median == NULL) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 375 | return AudioProcessing::kNullPointerError; |
bjornv@google.com | 1ba3dbe | 2011-10-03 08:18:10 +0000 | [diff] [blame] | 376 | } |
| 377 | if (std == NULL) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 378 | return AudioProcessing::kNullPointerError; |
bjornv@google.com | 1ba3dbe | 2011-10-03 08:18:10 +0000 | [diff] [blame] | 379 | } |
| 380 | |
| 381 | if (!is_component_enabled() || !delay_logging_enabled_) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 382 | return AudioProcessing::kNotEnabledError; |
bjornv@google.com | 1ba3dbe | 2011-10-03 08:18:10 +0000 | [diff] [blame] | 383 | } |
| 384 | |
| 385 | Handle* my_handle = static_cast<Handle*>(handle(0)); |
peah | c12be39 | 2015-11-09 23:53:50 -0800 | [diff] [blame] | 386 | const int err = |
| 387 | WebRtcAec_GetDelayMetrics(my_handle, median, std, fraction_poor_delays); |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 388 | if (err != AudioProcessing::kNoError) { |
peah | c12be39 | 2015-11-09 23:53:50 -0800 | [diff] [blame] | 389 | return MapError(err); |
bjornv@google.com | 1ba3dbe | 2011-10-03 08:18:10 +0000 | [diff] [blame] | 390 | } |
| 391 | |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 392 | return AudioProcessing::kNoError; |
bjornv@google.com | 1ba3dbe | 2011-10-03 08:18:10 +0000 | [diff] [blame] | 393 | } |
| 394 | |
bjornv@webrtc.org | 91d11b3 | 2013-03-05 16:53:09 +0000 | [diff] [blame] | 395 | struct AecCore* EchoCancellationImpl::aec_core() const { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 396 | rtc::CritScope cs(crit_capture_); |
bjornv@webrtc.org | 91d11b3 | 2013-03-05 16:53:09 +0000 | [diff] [blame] | 397 | if (!is_component_enabled()) { |
| 398 | return NULL; |
| 399 | } |
| 400 | Handle* my_handle = static_cast<Handle*>(handle(0)); |
| 401 | return WebRtcAec_aec_core(my_handle); |
| 402 | } |
| 403 | |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 404 | int EchoCancellationImpl::Initialize() { |
| 405 | int err = ProcessingComponent::Initialize(); |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 406 | { |
| 407 | rtc::CritScope cs(crit_capture_); |
| 408 | if (err != AudioProcessing::kNoError || !is_component_enabled()) { |
| 409 | return err; |
| 410 | } |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 411 | } |
| 412 | |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 413 | AllocateRenderQueue(); |
| 414 | |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 415 | return AudioProcessing::kNoError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 416 | } |
| 417 | |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 418 | void EchoCancellationImpl::AllocateRenderQueue() { |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 419 | const size_t new_render_queue_element_max_size = std::max<size_t>( |
peah | 2446e5a | 2015-11-18 06:11:13 -0800 | [diff] [blame] | 420 | static_cast<size_t>(1), |
| 421 | kMaxAllowedValuesOfSamplesPerFrame * num_handles_required()); |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 422 | |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 423 | rtc::CritScope cs_render(crit_render_); |
| 424 | rtc::CritScope cs_capture(crit_capture_); |
| 425 | |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 426 | // Reallocate the queue if the queue item size is too small to fit the |
| 427 | // data to put in the queue. |
peah | 2446e5a | 2015-11-18 06:11:13 -0800 | [diff] [blame] | 428 | if (render_queue_element_max_size_ < new_render_queue_element_max_size) { |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 429 | render_queue_element_max_size_ = new_render_queue_element_max_size; |
| 430 | |
| 431 | std::vector<float> template_queue_element(render_queue_element_max_size_); |
| 432 | |
| 433 | render_signal_queue_.reset( |
| 434 | new SwapQueue<std::vector<float>, RenderQueueItemVerifier<float>>( |
| 435 | kMaxNumFramesToBuffer, template_queue_element, |
| 436 | RenderQueueItemVerifier<float>(render_queue_element_max_size_))); |
peah | 2446e5a | 2015-11-18 06:11:13 -0800 | [diff] [blame] | 437 | |
| 438 | render_queue_buffer_.resize(render_queue_element_max_size_); |
| 439 | capture_queue_buffer_.resize(render_queue_element_max_size_); |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 440 | } else { |
| 441 | render_signal_queue_->Clear(); |
| 442 | } |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 443 | } |
| 444 | |
andrew@webrtc.org | 1760a17 | 2013-09-25 23:17:38 +0000 | [diff] [blame] | 445 | void EchoCancellationImpl::SetExtraOptions(const Config& config) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 446 | { |
| 447 | rtc::CritScope cs(crit_capture_); |
| 448 | extended_filter_enabled_ = config.Get<ExtendedFilter>().enabled; |
| 449 | delay_agnostic_enabled_ = config.Get<DelayAgnostic>().enabled; |
| 450 | } |
andrew@webrtc.org | 1760a17 | 2013-09-25 23:17:38 +0000 | [diff] [blame] | 451 | Configure(); |
| 452 | } |
| 453 | |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 454 | void* EchoCancellationImpl::CreateHandle() const { |
Bjorn Volcker | 9345e86 | 2015-06-10 21:43:36 +0200 | [diff] [blame] | 455 | return WebRtcAec_Create(); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 456 | } |
| 457 | |
bjornv@webrtc.org | 5964fe0 | 2014-04-22 06:52:28 +0000 | [diff] [blame] | 458 | void EchoCancellationImpl::DestroyHandle(void* handle) const { |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 459 | assert(handle != NULL); |
bjornv@webrtc.org | 5964fe0 | 2014-04-22 06:52:28 +0000 | [diff] [blame] | 460 | WebRtcAec_Free(static_cast<Handle*>(handle)); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 461 | } |
| 462 | |
| 463 | int EchoCancellationImpl::InitializeHandle(void* handle) const { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 464 | // Not locked as it only relies on APM public API which is threadsafe. |
| 465 | |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 466 | assert(handle != NULL); |
andrew@webrtc.org | ddbb8a2 | 2014-04-22 21:00:04 +0000 | [diff] [blame] | 467 | // TODO(ajm): Drift compensation is disabled in practice. If restored, it |
| 468 | // should be managed internally and not depend on the hardware sample rate. |
| 469 | // For now, just hardcode a 48 kHz value. |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 470 | return WebRtcAec_Init(static_cast<Handle*>(handle), |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 471 | apm_->proc_sample_rate_hz(), 48000); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 472 | } |
| 473 | |
| 474 | int EchoCancellationImpl::ConfigureHandle(void* handle) const { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 475 | rtc::CritScope cs_render(crit_render_); |
| 476 | rtc::CritScope cs_capture(crit_capture_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 477 | assert(handle != NULL); |
| 478 | AecConfig config; |
| 479 | config.metricsMode = metrics_enabled_; |
| 480 | config.nlpMode = MapSetting(suppression_level_); |
| 481 | config.skewMode = drift_compensation_enabled_; |
bjornv@google.com | 1ba3dbe | 2011-10-03 08:18:10 +0000 | [diff] [blame] | 482 | config.delay_logging = delay_logging_enabled_; |
Henrik Lundin | 441f634 | 2015-06-09 16:03:13 +0200 | [diff] [blame] | 483 | WebRtcAec_enable_extended_filter( |
| 484 | WebRtcAec_aec_core(static_cast<Handle*>(handle)), |
| 485 | extended_filter_enabled_ ? 1 : 0); |
henrik.lundin | 0f133b9 | 2015-07-02 00:17:55 -0700 | [diff] [blame] | 486 | WebRtcAec_enable_delay_agnostic( |
| 487 | WebRtcAec_aec_core(static_cast<Handle*>(handle)), |
| 488 | delay_agnostic_enabled_ ? 1 : 0); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 489 | return WebRtcAec_set_config(static_cast<Handle*>(handle), config); |
| 490 | } |
| 491 | |
| 492 | int EchoCancellationImpl::num_handles_required() const { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 493 | // Not locked as it only relies on APM public API which is threadsafe. |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 494 | return apm_->num_output_channels() * |
| 495 | apm_->num_reverse_channels(); |
| 496 | } |
| 497 | |
| 498 | int EchoCancellationImpl::GetHandleError(void* handle) const { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame^] | 499 | // Not locked as it does not rely on anything in the state. |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 500 | assert(handle != NULL); |
peah | c12be39 | 2015-11-09 23:53:50 -0800 | [diff] [blame] | 501 | return AudioProcessing::kUnspecifiedError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 502 | } |
| 503 | } // namespace webrtc |