niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 1 | /* |
bjornv@webrtc.org | 0c6f931 | 2012-01-30 09:39:08 +0000 | [diff] [blame] | 2 | * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license |
| 5 | * that can be found in the LICENSE file in the root of the source |
| 6 | * tree. An additional intellectual property rights grant can be found |
| 7 | * in the file PATENTS. All contributing project authors may |
| 8 | * be found in the AUTHORS file in the root of the source tree. |
| 9 | */ |
| 10 | |
bjornv@webrtc.org | 21a2fc9 | 2013-02-15 17:01:03 +0000 | [diff] [blame] | 11 | #include "webrtc/modules/audio_processing/echo_cancellation_impl.h" |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 12 | |
bjornv@webrtc.org | 21a2fc9 | 2013-02-15 17:01:03 +0000 | [diff] [blame] | 13 | #include <assert.h> |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 14 | #include <string.h> |
| 15 | |
andrew@webrtc.org | 1760a17 | 2013-09-25 23:17:38 +0000 | [diff] [blame] | 16 | #include "webrtc/modules/audio_processing/aec/aec_core.h" |
Henrik Kjellander | 9b72af9 | 2015-11-11 20:16:11 +0100 | [diff] [blame] | 17 | #include "webrtc/modules/audio_processing/aec/echo_cancellation.h" |
bjornv@webrtc.org | 21a2fc9 | 2013-02-15 17:01:03 +0000 | [diff] [blame] | 18 | #include "webrtc/modules/audio_processing/audio_buffer.h" |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 19 | |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 20 | namespace webrtc { |
| 21 | |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 22 | namespace { |
pbos@webrtc.org | b7192b8 | 2013-04-10 07:50:54 +0000 | [diff] [blame] | 23 | int16_t MapSetting(EchoCancellation::SuppressionLevel level) { |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 24 | switch (level) { |
| 25 | case EchoCancellation::kLowSuppression: |
| 26 | return kAecNlpConservative; |
| 27 | case EchoCancellation::kModerateSuppression: |
| 28 | return kAecNlpModerate; |
| 29 | case EchoCancellation::kHighSuppression: |
| 30 | return kAecNlpAggressive; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 31 | } |
andrew@webrtc.org | 648af74 | 2012-02-08 01:57:29 +0000 | [diff] [blame] | 32 | assert(false); |
mflodman@webrtc.org | 657b2a4 | 2012-02-06 11:06:01 +0000 | [diff] [blame] | 33 | return -1; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 34 | } |
| 35 | |
andrew@webrtc.org | 648af74 | 2012-02-08 01:57:29 +0000 | [diff] [blame] | 36 | AudioProcessing::Error MapError(int err) { |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 37 | switch (err) { |
| 38 | case AEC_UNSUPPORTED_FUNCTION_ERROR: |
| 39 | return AudioProcessing::kUnsupportedFunctionError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 40 | case AEC_BAD_PARAMETER_ERROR: |
| 41 | return AudioProcessing::kBadParameterError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 42 | case AEC_BAD_PARAMETER_WARNING: |
| 43 | return AudioProcessing::kBadStreamParameterWarning; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 44 | default: |
| 45 | // AEC_UNSPECIFIED_ERROR |
| 46 | // AEC_UNINITIALIZED_ERROR |
| 47 | // AEC_NULL_POINTER_ERROR |
| 48 | return AudioProcessing::kUnspecifiedError; |
| 49 | } |
| 50 | } |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 51 | |
peah | 2446e5a | 2015-11-18 06:11:13 -0800 | [diff] [blame] | 52 | // Maximum length that a frame of samples can have. |
| 53 | static const size_t kMaxAllowedValuesOfSamplesPerFrame = 160; |
| 54 | // Maximum number of frames to buffer in the render queue. |
| 55 | // TODO(peah): Decrease this once we properly handle hugely unbalanced |
| 56 | // reverse and forward call numbers. |
| 57 | static const size_t kMaxNumFramesToBuffer = 100; |
| 58 | } // namespace |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 59 | |
peah | b624d8c | 2016-03-05 03:01:14 -0800 | [diff] [blame] | 60 | class EchoCancellationImpl::Canceller { |
| 61 | public: |
peah | 4510bbd | 2016-03-07 22:50:14 -0800 | [diff] [blame^] | 62 | Canceller() { |
peah | b624d8c | 2016-03-05 03:01:14 -0800 | [diff] [blame] | 63 | state_ = WebRtcAec_Create(); |
| 64 | RTC_DCHECK(state_); |
| 65 | } |
| 66 | |
| 67 | ~Canceller() { |
| 68 | RTC_CHECK(state_); |
| 69 | WebRtcAec_Free(state_); |
| 70 | } |
| 71 | |
peah | 4510bbd | 2016-03-07 22:50:14 -0800 | [diff] [blame^] | 72 | void* state() { return state_; } |
peah | b624d8c | 2016-03-05 03:01:14 -0800 | [diff] [blame] | 73 | |
| 74 | void Initialize(int sample_rate_hz) { |
| 75 | // TODO(ajm): Drift compensation is disabled in practice. If restored, it |
| 76 | // should be managed internally and not depend on the hardware sample rate. |
| 77 | // For now, just hardcode a 48 kHz value. |
| 78 | const int error = WebRtcAec_Init(state_, sample_rate_hz, 48000); |
| 79 | RTC_DCHECK_EQ(0, error); |
| 80 | } |
| 81 | |
| 82 | private: |
peah | 4510bbd | 2016-03-07 22:50:14 -0800 | [diff] [blame^] | 83 | void* state_; |
peah | b624d8c | 2016-03-05 03:01:14 -0800 | [diff] [blame] | 84 | }; |
| 85 | |
andrew@webrtc.org | 56e4a05 | 2014-02-27 22:23:17 +0000 | [diff] [blame] | 86 | EchoCancellationImpl::EchoCancellationImpl(const AudioProcessing* apm, |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 87 | rtc::CriticalSection* crit_render, |
| 88 | rtc::CriticalSection* crit_capture) |
peah | b624d8c | 2016-03-05 03:01:14 -0800 | [diff] [blame] | 89 | : apm_(apm), |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 90 | crit_render_(crit_render), |
| 91 | crit_capture_(crit_capture), |
Henrik Lundin | 441f634 | 2015-06-09 16:03:13 +0200 | [diff] [blame] | 92 | drift_compensation_enabled_(false), |
| 93 | metrics_enabled_(false), |
| 94 | suppression_level_(kModerateSuppression), |
| 95 | stream_drift_samples_(0), |
| 96 | was_stream_drift_set_(false), |
| 97 | stream_has_echo_(false), |
| 98 | delay_logging_enabled_(false), |
| 99 | extended_filter_enabled_(false), |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 100 | delay_agnostic_enabled_(false), |
peah | 6ebc4d3 | 2016-03-07 16:59:39 -0800 | [diff] [blame] | 101 | aec3_enabled_(false), |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 102 | render_queue_element_max_size_(0) { |
| 103 | RTC_DCHECK(apm); |
| 104 | RTC_DCHECK(crit_render); |
| 105 | RTC_DCHECK(crit_capture); |
| 106 | } |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 107 | |
| 108 | EchoCancellationImpl::~EchoCancellationImpl() {} |
| 109 | |
| 110 | int EchoCancellationImpl::ProcessRenderAudio(const AudioBuffer* audio) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 111 | rtc::CritScope cs_render(crit_render_); |
peah | b624d8c | 2016-03-05 03:01:14 -0800 | [diff] [blame] | 112 | if (!enabled_) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 113 | return AudioProcessing::kNoError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 114 | } |
| 115 | |
peah | b624d8c | 2016-03-05 03:01:14 -0800 | [diff] [blame] | 116 | RTC_DCHECK_GE(160u, audio->num_frames_per_band()); |
| 117 | RTC_DCHECK_EQ(audio->num_channels(), apm_->num_reverse_channels()); |
| 118 | RTC_DCHECK_GE(cancellers_.size(), |
| 119 | apm_->num_output_channels() * audio->num_channels()); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 120 | |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 121 | int err = AudioProcessing::kNoError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 122 | |
| 123 | // The ordering convention must be followed to pass to the correct AEC. |
| 124 | size_t handle_index = 0; |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 125 | render_queue_buffer_.clear(); |
Peter Kasting | 6955870 | 2016-01-12 16:26:35 -0800 | [diff] [blame] | 126 | for (size_t i = 0; i < apm_->num_output_channels(); i++) { |
| 127 | for (size_t j = 0; j < audio->num_channels(); j++) { |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 128 | // Retrieve any error code produced by the buffering of the farend |
peah | b624d8c | 2016-03-05 03:01:14 -0800 | [diff] [blame] | 129 | // signal. |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 130 | err = WebRtcAec_GetBufferFarendError( |
peah | 4510bbd | 2016-03-07 22:50:14 -0800 | [diff] [blame^] | 131 | cancellers_[handle_index++]->state(), |
| 132 | audio->split_bands_const_f(j)[kBand0To8kHz], |
Peter Kasting | dce40cf | 2015-08-24 14:52:23 -0700 | [diff] [blame] | 133 | audio->num_frames_per_band()); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 134 | |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 135 | if (err != AudioProcessing::kNoError) { |
peah | c12be39 | 2015-11-09 23:53:50 -0800 | [diff] [blame] | 136 | return MapError(err); // TODO(ajm): warning possible? |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 137 | } |
| 138 | |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 139 | // Buffer the samples in the render queue. |
| 140 | render_queue_buffer_.insert(render_queue_buffer_.end(), |
| 141 | audio->split_bands_const_f(j)[kBand0To8kHz], |
| 142 | (audio->split_bands_const_f(j)[kBand0To8kHz] + |
| 143 | audio->num_frames_per_band())); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 144 | } |
| 145 | } |
| 146 | |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 147 | // Insert the samples into the queue. |
| 148 | if (!render_signal_queue_->Insert(&render_queue_buffer_)) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 149 | // The data queue is full and needs to be emptied. |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 150 | ReadQueuedRenderData(); |
| 151 | |
| 152 | // Retry the insert (should always work). |
| 153 | RTC_DCHECK_EQ(render_signal_queue_->Insert(&render_queue_buffer_), true); |
| 154 | } |
| 155 | |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 156 | return AudioProcessing::kNoError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 157 | } |
| 158 | |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 159 | // Read chunks of data that were received and queued on the render side from |
| 160 | // a queue. All the data chunks are buffered into the farend signal of the AEC. |
| 161 | void EchoCancellationImpl::ReadQueuedRenderData() { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 162 | rtc::CritScope cs_capture(crit_capture_); |
peah | b624d8c | 2016-03-05 03:01:14 -0800 | [diff] [blame] | 163 | if (!enabled_) { |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 164 | return; |
| 165 | } |
| 166 | |
| 167 | while (render_signal_queue_->Remove(&capture_queue_buffer_)) { |
| 168 | size_t handle_index = 0; |
pkasting | 25702cb | 2016-01-08 13:50:27 -0800 | [diff] [blame] | 169 | size_t buffer_index = 0; |
| 170 | const size_t num_frames_per_band = |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 171 | capture_queue_buffer_.size() / |
| 172 | (apm_->num_output_channels() * apm_->num_reverse_channels()); |
Peter Kasting | 6955870 | 2016-01-12 16:26:35 -0800 | [diff] [blame] | 173 | for (size_t i = 0; i < apm_->num_output_channels(); i++) { |
| 174 | for (size_t j = 0; j < apm_->num_reverse_channels(); j++) { |
peah | 4510bbd | 2016-03-07 22:50:14 -0800 | [diff] [blame^] | 175 | WebRtcAec_BufferFarend(cancellers_[handle_index++]->state(), |
| 176 | &capture_queue_buffer_[buffer_index], |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 177 | num_frames_per_band); |
| 178 | |
| 179 | buffer_index += num_frames_per_band; |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 180 | } |
| 181 | } |
| 182 | } |
| 183 | } |
| 184 | |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 185 | int EchoCancellationImpl::ProcessCaptureAudio(AudioBuffer* audio) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 186 | rtc::CritScope cs_capture(crit_capture_); |
peah | b624d8c | 2016-03-05 03:01:14 -0800 | [diff] [blame] | 187 | if (!enabled_) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 188 | return AudioProcessing::kNoError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 189 | } |
| 190 | |
| 191 | if (!apm_->was_stream_delay_set()) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 192 | return AudioProcessing::kStreamParameterNotSetError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 193 | } |
| 194 | |
| 195 | if (drift_compensation_enabled_ && !was_stream_drift_set_) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 196 | return AudioProcessing::kStreamParameterNotSetError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 197 | } |
| 198 | |
peah | b624d8c | 2016-03-05 03:01:14 -0800 | [diff] [blame] | 199 | RTC_DCHECK_GE(160u, audio->num_frames_per_band()); |
| 200 | RTC_DCHECK_EQ(audio->num_channels(), apm_->num_proc_channels()); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 201 | |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 202 | int err = AudioProcessing::kNoError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 203 | |
| 204 | // The ordering convention must be followed to pass to the correct AEC. |
| 205 | size_t handle_index = 0; |
| 206 | stream_has_echo_ = false; |
Peter Kasting | 6955870 | 2016-01-12 16:26:35 -0800 | [diff] [blame] | 207 | for (size_t i = 0; i < audio->num_channels(); i++) { |
| 208 | for (size_t j = 0; j < apm_->num_reverse_channels(); j++) { |
peah | 4510bbd | 2016-03-07 22:50:14 -0800 | [diff] [blame^] | 209 | err = WebRtcAec_Process(cancellers_[handle_index]->state(), |
| 210 | audio->split_bands_const_f(i), audio->num_bands(), |
| 211 | audio->split_bands_f(i), |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 212 | audio->num_frames_per_band(), |
| 213 | apm_->stream_delay_ms(), stream_drift_samples_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 214 | |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 215 | if (err != AudioProcessing::kNoError) { |
peah | c12be39 | 2015-11-09 23:53:50 -0800 | [diff] [blame] | 216 | err = MapError(err); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 217 | // TODO(ajm): Figure out how to return warnings properly. |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 218 | if (err != AudioProcessing::kBadStreamParameterWarning) { |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 219 | return err; |
| 220 | } |
| 221 | } |
| 222 | |
bjornv@webrtc.org | 21a2fc9 | 2013-02-15 17:01:03 +0000 | [diff] [blame] | 223 | int status = 0; |
peah | 4510bbd | 2016-03-07 22:50:14 -0800 | [diff] [blame^] | 224 | err = WebRtcAec_get_echo_status(cancellers_[handle_index]->state(), |
| 225 | &status); |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 226 | if (err != AudioProcessing::kNoError) { |
peah | c12be39 | 2015-11-09 23:53:50 -0800 | [diff] [blame] | 227 | return MapError(err); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 228 | } |
| 229 | |
| 230 | if (status == 1) { |
| 231 | stream_has_echo_ = true; |
| 232 | } |
peah | 4510bbd | 2016-03-07 22:50:14 -0800 | [diff] [blame^] | 233 | |
| 234 | handle_index++; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 235 | } |
| 236 | } |
| 237 | |
| 238 | was_stream_drift_set_ = false; |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 239 | return AudioProcessing::kNoError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 240 | } |
| 241 | |
| 242 | int EchoCancellationImpl::Enable(bool enable) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 243 | // Run in a single-threaded manner. |
| 244 | rtc::CritScope cs_render(crit_render_); |
| 245 | rtc::CritScope cs_capture(crit_capture_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 246 | // Ensure AEC and AECM are not both enabled. |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 247 | // The is_enabled call is safe from a deadlock perspective |
| 248 | // as both locks are already held in the correct order. |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 249 | if (enable && apm_->echo_control_mobile()->is_enabled()) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 250 | return AudioProcessing::kBadParameterError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 251 | } |
| 252 | |
peah | b624d8c | 2016-03-05 03:01:14 -0800 | [diff] [blame] | 253 | if (enable && !enabled_) { |
| 254 | enabled_ = enable; // Must be set before Initialize() is called. |
| 255 | Initialize(); |
| 256 | } else { |
| 257 | enabled_ = enable; |
| 258 | } |
| 259 | return AudioProcessing::kNoError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 260 | } |
| 261 | |
| 262 | bool EchoCancellationImpl::is_enabled() const { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 263 | rtc::CritScope cs(crit_capture_); |
peah | b624d8c | 2016-03-05 03:01:14 -0800 | [diff] [blame] | 264 | return enabled_; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 265 | } |
| 266 | |
| 267 | int EchoCancellationImpl::set_suppression_level(SuppressionLevel level) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 268 | { |
| 269 | if (MapSetting(level) == -1) { |
| 270 | return AudioProcessing::kBadParameterError; |
| 271 | } |
| 272 | rtc::CritScope cs(crit_capture_); |
| 273 | suppression_level_ = level; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 274 | } |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 275 | return Configure(); |
| 276 | } |
| 277 | |
| 278 | EchoCancellation::SuppressionLevel EchoCancellationImpl::suppression_level() |
| 279 | const { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 280 | rtc::CritScope cs(crit_capture_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 281 | return suppression_level_; |
| 282 | } |
| 283 | |
| 284 | int EchoCancellationImpl::enable_drift_compensation(bool enable) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 285 | { |
| 286 | rtc::CritScope cs(crit_capture_); |
| 287 | drift_compensation_enabled_ = enable; |
| 288 | } |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 289 | return Configure(); |
| 290 | } |
| 291 | |
| 292 | bool EchoCancellationImpl::is_drift_compensation_enabled() const { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 293 | rtc::CritScope cs(crit_capture_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 294 | return drift_compensation_enabled_; |
| 295 | } |
| 296 | |
andrew@webrtc.org | 6be1e93 | 2013-03-01 18:47:28 +0000 | [diff] [blame] | 297 | void EchoCancellationImpl::set_stream_drift_samples(int drift) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 298 | rtc::CritScope cs(crit_capture_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 299 | was_stream_drift_set_ = true; |
| 300 | stream_drift_samples_ = drift; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 301 | } |
| 302 | |
| 303 | int EchoCancellationImpl::stream_drift_samples() const { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 304 | rtc::CritScope cs(crit_capture_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 305 | return stream_drift_samples_; |
| 306 | } |
| 307 | |
| 308 | int EchoCancellationImpl::enable_metrics(bool enable) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 309 | { |
| 310 | rtc::CritScope cs(crit_capture_); |
| 311 | metrics_enabled_ = enable; |
| 312 | } |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 313 | return Configure(); |
| 314 | } |
| 315 | |
| 316 | bool EchoCancellationImpl::are_metrics_enabled() const { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 317 | rtc::CritScope cs(crit_capture_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 318 | return metrics_enabled_; |
| 319 | } |
| 320 | |
| 321 | // TODO(ajm): we currently just use the metrics from the first AEC. Think more |
| 322 | // aboue the best way to extend this to multi-channel. |
| 323 | int EchoCancellationImpl::GetMetrics(Metrics* metrics) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 324 | rtc::CritScope cs(crit_capture_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 325 | if (metrics == NULL) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 326 | return AudioProcessing::kNullPointerError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 327 | } |
| 328 | |
peah | b624d8c | 2016-03-05 03:01:14 -0800 | [diff] [blame] | 329 | if (!enabled_ || !metrics_enabled_) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 330 | return AudioProcessing::kNotEnabledError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 331 | } |
| 332 | |
| 333 | AecMetrics my_metrics; |
| 334 | memset(&my_metrics, 0, sizeof(my_metrics)); |
| 335 | memset(metrics, 0, sizeof(Metrics)); |
| 336 | |
peah | 4510bbd | 2016-03-07 22:50:14 -0800 | [diff] [blame^] | 337 | const int err = WebRtcAec_GetMetrics(cancellers_[0]->state(), &my_metrics); |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 338 | if (err != AudioProcessing::kNoError) { |
peah | c12be39 | 2015-11-09 23:53:50 -0800 | [diff] [blame] | 339 | return MapError(err); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 340 | } |
| 341 | |
| 342 | metrics->residual_echo_return_loss.instant = my_metrics.rerl.instant; |
| 343 | metrics->residual_echo_return_loss.average = my_metrics.rerl.average; |
| 344 | metrics->residual_echo_return_loss.maximum = my_metrics.rerl.max; |
| 345 | metrics->residual_echo_return_loss.minimum = my_metrics.rerl.min; |
| 346 | |
| 347 | metrics->echo_return_loss.instant = my_metrics.erl.instant; |
| 348 | metrics->echo_return_loss.average = my_metrics.erl.average; |
| 349 | metrics->echo_return_loss.maximum = my_metrics.erl.max; |
| 350 | metrics->echo_return_loss.minimum = my_metrics.erl.min; |
| 351 | |
| 352 | metrics->echo_return_loss_enhancement.instant = my_metrics.erle.instant; |
| 353 | metrics->echo_return_loss_enhancement.average = my_metrics.erle.average; |
| 354 | metrics->echo_return_loss_enhancement.maximum = my_metrics.erle.max; |
| 355 | metrics->echo_return_loss_enhancement.minimum = my_metrics.erle.min; |
| 356 | |
| 357 | metrics->a_nlp.instant = my_metrics.aNlp.instant; |
| 358 | metrics->a_nlp.average = my_metrics.aNlp.average; |
| 359 | metrics->a_nlp.maximum = my_metrics.aNlp.max; |
| 360 | metrics->a_nlp.minimum = my_metrics.aNlp.min; |
| 361 | |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 362 | return AudioProcessing::kNoError; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 363 | } |
| 364 | |
| 365 | bool EchoCancellationImpl::stream_has_echo() const { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 366 | rtc::CritScope cs(crit_capture_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 367 | return stream_has_echo_; |
| 368 | } |
| 369 | |
bjornv@google.com | 1ba3dbe | 2011-10-03 08:18:10 +0000 | [diff] [blame] | 370 | int EchoCancellationImpl::enable_delay_logging(bool enable) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 371 | { |
| 372 | rtc::CritScope cs(crit_capture_); |
| 373 | delay_logging_enabled_ = enable; |
| 374 | } |
bjornv@google.com | 1ba3dbe | 2011-10-03 08:18:10 +0000 | [diff] [blame] | 375 | return Configure(); |
| 376 | } |
| 377 | |
| 378 | bool EchoCancellationImpl::is_delay_logging_enabled() const { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 379 | rtc::CritScope cs(crit_capture_); |
bjornv@google.com | 1ba3dbe | 2011-10-03 08:18:10 +0000 | [diff] [blame] | 380 | return delay_logging_enabled_; |
| 381 | } |
| 382 | |
Minyue | 13b96ba | 2015-10-03 00:39:14 +0200 | [diff] [blame] | 383 | bool EchoCancellationImpl::is_delay_agnostic_enabled() const { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 384 | rtc::CritScope cs(crit_capture_); |
Minyue | 13b96ba | 2015-10-03 00:39:14 +0200 | [diff] [blame] | 385 | return delay_agnostic_enabled_; |
| 386 | } |
| 387 | |
peah | 6ebc4d3 | 2016-03-07 16:59:39 -0800 | [diff] [blame] | 388 | bool EchoCancellationImpl::is_aec3_enabled() const { |
peah | a332e2d | 2016-02-17 01:11:16 -0800 | [diff] [blame] | 389 | rtc::CritScope cs(crit_capture_); |
peah | 6ebc4d3 | 2016-03-07 16:59:39 -0800 | [diff] [blame] | 390 | return aec3_enabled_; |
peah | a332e2d | 2016-02-17 01:11:16 -0800 | [diff] [blame] | 391 | } |
| 392 | |
Minyue | 13b96ba | 2015-10-03 00:39:14 +0200 | [diff] [blame] | 393 | bool EchoCancellationImpl::is_extended_filter_enabled() const { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 394 | rtc::CritScope cs(crit_capture_); |
Minyue | 13b96ba | 2015-10-03 00:39:14 +0200 | [diff] [blame] | 395 | return extended_filter_enabled_; |
| 396 | } |
| 397 | |
bjornv@google.com | 1ba3dbe | 2011-10-03 08:18:10 +0000 | [diff] [blame] | 398 | // TODO(bjornv): How should we handle the multi-channel case? |
| 399 | int EchoCancellationImpl::GetDelayMetrics(int* median, int* std) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 400 | rtc::CritScope cs(crit_capture_); |
bjornv@webrtc.org | b1786db | 2015-02-03 06:06:26 +0000 | [diff] [blame] | 401 | float fraction_poor_delays = 0; |
| 402 | return GetDelayMetrics(median, std, &fraction_poor_delays); |
| 403 | } |
| 404 | |
| 405 | int EchoCancellationImpl::GetDelayMetrics(int* median, int* std, |
| 406 | float* fraction_poor_delays) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 407 | rtc::CritScope cs(crit_capture_); |
bjornv@google.com | 1ba3dbe | 2011-10-03 08:18:10 +0000 | [diff] [blame] | 408 | if (median == NULL) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 409 | return AudioProcessing::kNullPointerError; |
bjornv@google.com | 1ba3dbe | 2011-10-03 08:18:10 +0000 | [diff] [blame] | 410 | } |
| 411 | if (std == NULL) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 412 | return AudioProcessing::kNullPointerError; |
bjornv@google.com | 1ba3dbe | 2011-10-03 08:18:10 +0000 | [diff] [blame] | 413 | } |
| 414 | |
peah | b624d8c | 2016-03-05 03:01:14 -0800 | [diff] [blame] | 415 | if (!enabled_ || !delay_logging_enabled_) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 416 | return AudioProcessing::kNotEnabledError; |
bjornv@google.com | 1ba3dbe | 2011-10-03 08:18:10 +0000 | [diff] [blame] | 417 | } |
| 418 | |
peah | 4510bbd | 2016-03-07 22:50:14 -0800 | [diff] [blame^] | 419 | const int err = WebRtcAec_GetDelayMetrics(cancellers_[0]->state(), median, |
| 420 | std, fraction_poor_delays); |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 421 | if (err != AudioProcessing::kNoError) { |
peah | c12be39 | 2015-11-09 23:53:50 -0800 | [diff] [blame] | 422 | return MapError(err); |
bjornv@google.com | 1ba3dbe | 2011-10-03 08:18:10 +0000 | [diff] [blame] | 423 | } |
| 424 | |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 425 | return AudioProcessing::kNoError; |
bjornv@google.com | 1ba3dbe | 2011-10-03 08:18:10 +0000 | [diff] [blame] | 426 | } |
| 427 | |
bjornv@webrtc.org | 91d11b3 | 2013-03-05 16:53:09 +0000 | [diff] [blame] | 428 | struct AecCore* EchoCancellationImpl::aec_core() const { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 429 | rtc::CritScope cs(crit_capture_); |
peah | b624d8c | 2016-03-05 03:01:14 -0800 | [diff] [blame] | 430 | if (!enabled_) { |
bjornv@webrtc.org | 91d11b3 | 2013-03-05 16:53:09 +0000 | [diff] [blame] | 431 | return NULL; |
| 432 | } |
peah | 4510bbd | 2016-03-07 22:50:14 -0800 | [diff] [blame^] | 433 | return WebRtcAec_aec_core(cancellers_[0]->state()); |
bjornv@webrtc.org | 91d11b3 | 2013-03-05 16:53:09 +0000 | [diff] [blame] | 434 | } |
| 435 | |
peah | b624d8c | 2016-03-05 03:01:14 -0800 | [diff] [blame] | 436 | void EchoCancellationImpl::Initialize() { |
| 437 | rtc::CritScope cs_render(crit_render_); |
| 438 | rtc::CritScope cs_capture(crit_capture_); |
| 439 | if (!enabled_) { |
| 440 | return; |
| 441 | } |
peah | b624d8c | 2016-03-05 03:01:14 -0800 | [diff] [blame] | 442 | |
| 443 | if (num_handles_required() > cancellers_.size()) { |
| 444 | const size_t cancellers_old_size = cancellers_.size(); |
| 445 | cancellers_.resize(num_handles_required()); |
| 446 | |
| 447 | for (size_t i = cancellers_old_size; i < cancellers_.size(); ++i) { |
peah | 4510bbd | 2016-03-07 22:50:14 -0800 | [diff] [blame^] | 448 | cancellers_[i].reset(new Canceller()); |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 449 | } |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 450 | } |
| 451 | |
peah | 4510bbd | 2016-03-07 22:50:14 -0800 | [diff] [blame^] | 452 | const int sample_rate_hz = apm_->proc_sample_rate_hz(); |
| 453 | for (auto& canceller : cancellers_) { |
| 454 | canceller->Initialize(sample_rate_hz); |
peah | b624d8c | 2016-03-05 03:01:14 -0800 | [diff] [blame] | 455 | } |
solenberg | 92586f0 | 2016-03-04 12:29:03 -0800 | [diff] [blame] | 456 | |
peah | b624d8c | 2016-03-05 03:01:14 -0800 | [diff] [blame] | 457 | Configure(); |
| 458 | |
| 459 | AllocateRenderQueue(); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 460 | } |
| 461 | |
peah | 20028c4 | 2016-03-04 11:50:54 -0800 | [diff] [blame] | 462 | int EchoCancellationImpl::GetSystemDelayInSamples() const { |
| 463 | rtc::CritScope cs(crit_capture_); |
peah | b624d8c | 2016-03-05 03:01:14 -0800 | [diff] [blame] | 464 | RTC_DCHECK(enabled_); |
peah | 20028c4 | 2016-03-04 11:50:54 -0800 | [diff] [blame] | 465 | // Report the delay for the first AEC component. |
| 466 | return WebRtcAec_system_delay( |
peah | b624d8c | 2016-03-05 03:01:14 -0800 | [diff] [blame] | 467 | WebRtcAec_aec_core(cancellers_[0]->state())); |
peah | 20028c4 | 2016-03-04 11:50:54 -0800 | [diff] [blame] | 468 | } |
| 469 | |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 470 | void EchoCancellationImpl::AllocateRenderQueue() { |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 471 | const size_t new_render_queue_element_max_size = std::max<size_t>( |
peah | 2446e5a | 2015-11-18 06:11:13 -0800 | [diff] [blame] | 472 | static_cast<size_t>(1), |
| 473 | kMaxAllowedValuesOfSamplesPerFrame * num_handles_required()); |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 474 | |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 475 | rtc::CritScope cs_render(crit_render_); |
| 476 | rtc::CritScope cs_capture(crit_capture_); |
| 477 | |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 478 | // Reallocate the queue if the queue item size is too small to fit the |
| 479 | // data to put in the queue. |
peah | 2446e5a | 2015-11-18 06:11:13 -0800 | [diff] [blame] | 480 | if (render_queue_element_max_size_ < new_render_queue_element_max_size) { |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 481 | render_queue_element_max_size_ = new_render_queue_element_max_size; |
| 482 | |
| 483 | std::vector<float> template_queue_element(render_queue_element_max_size_); |
| 484 | |
| 485 | render_signal_queue_.reset( |
| 486 | new SwapQueue<std::vector<float>, RenderQueueItemVerifier<float>>( |
| 487 | kMaxNumFramesToBuffer, template_queue_element, |
| 488 | RenderQueueItemVerifier<float>(render_queue_element_max_size_))); |
peah | 2446e5a | 2015-11-18 06:11:13 -0800 | [diff] [blame] | 489 | |
| 490 | render_queue_buffer_.resize(render_queue_element_max_size_); |
| 491 | capture_queue_buffer_.resize(render_queue_element_max_size_); |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 492 | } else { |
| 493 | render_signal_queue_->Clear(); |
| 494 | } |
peah | fa6228e | 2015-11-16 16:27:42 -0800 | [diff] [blame] | 495 | } |
| 496 | |
andrew@webrtc.org | 1760a17 | 2013-09-25 23:17:38 +0000 | [diff] [blame] | 497 | void EchoCancellationImpl::SetExtraOptions(const Config& config) { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 498 | { |
| 499 | rtc::CritScope cs(crit_capture_); |
| 500 | extended_filter_enabled_ = config.Get<ExtendedFilter>().enabled; |
| 501 | delay_agnostic_enabled_ = config.Get<DelayAgnostic>().enabled; |
peah | 6ebc4d3 | 2016-03-07 16:59:39 -0800 | [diff] [blame] | 502 | aec3_enabled_ = config.Get<EchoCanceller3>().enabled; |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 503 | } |
andrew@webrtc.org | 1760a17 | 2013-09-25 23:17:38 +0000 | [diff] [blame] | 504 | Configure(); |
| 505 | } |
| 506 | |
peah | b624d8c | 2016-03-05 03:01:14 -0800 | [diff] [blame] | 507 | int EchoCancellationImpl::Configure() { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 508 | rtc::CritScope cs_render(crit_render_); |
| 509 | rtc::CritScope cs_capture(crit_capture_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 510 | AecConfig config; |
| 511 | config.metricsMode = metrics_enabled_; |
| 512 | config.nlpMode = MapSetting(suppression_level_); |
| 513 | config.skewMode = drift_compensation_enabled_; |
bjornv@google.com | 1ba3dbe | 2011-10-03 08:18:10 +0000 | [diff] [blame] | 514 | config.delay_logging = delay_logging_enabled_; |
peah | b624d8c | 2016-03-05 03:01:14 -0800 | [diff] [blame] | 515 | |
| 516 | int error = AudioProcessing::kNoError; |
peah | 4510bbd | 2016-03-07 22:50:14 -0800 | [diff] [blame^] | 517 | for (auto& canceller : cancellers_) { |
| 518 | WebRtcAec_enable_extended_filter(WebRtcAec_aec_core(canceller->state()), |
peah | b624d8c | 2016-03-05 03:01:14 -0800 | [diff] [blame] | 519 | extended_filter_enabled_ ? 1 : 0); |
peah | 4510bbd | 2016-03-07 22:50:14 -0800 | [diff] [blame^] | 520 | WebRtcAec_enable_delay_agnostic(WebRtcAec_aec_core(canceller->state()), |
peah | b624d8c | 2016-03-05 03:01:14 -0800 | [diff] [blame] | 521 | delay_agnostic_enabled_ ? 1 : 0); |
peah | 4510bbd | 2016-03-07 22:50:14 -0800 | [diff] [blame^] | 522 | WebRtcAec_enable_aec3(WebRtcAec_aec_core(canceller->state()), |
| 523 | aec3_enabled_ ? 1 : 0); |
| 524 | const int handle_error = WebRtcAec_set_config(canceller->state(), config); |
peah | b624d8c | 2016-03-05 03:01:14 -0800 | [diff] [blame] | 525 | if (handle_error != AudioProcessing::kNoError) { |
| 526 | error = AudioProcessing::kNoError; |
| 527 | } |
| 528 | } |
| 529 | return error; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 530 | } |
| 531 | |
Peter Kasting | 6955870 | 2016-01-12 16:26:35 -0800 | [diff] [blame] | 532 | size_t EchoCancellationImpl::num_handles_required() const { |
peah | df3efa8 | 2015-11-28 12:35:15 -0800 | [diff] [blame] | 533 | // Not locked as it only relies on APM public API which is threadsafe. |
pkasting | 25702cb | 2016-01-08 13:50:27 -0800 | [diff] [blame] | 534 | return apm_->num_output_channels() * apm_->num_reverse_channels(); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 535 | } |
| 536 | |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 537 | } // namespace webrtc |