blob: 079de39ffd0b0c7808c8e21fc31a75e23fec86af [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
andrew@webrtc.org63a50982012-05-02 23:56:37 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
pbos@webrtc.org7fad4b82013-05-28 08:11:59 +000011#include "webrtc/modules/audio_processing/audio_buffer.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +000013#include "webrtc/common_audio/resampler/push_sinc_resampler.h"
pbos@webrtc.org7fad4b82013-05-28 08:11:59 +000014#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
aluebs@webrtc.org79b9eba2014-11-26 20:21:38 +000015#include "webrtc/modules/audio_processing/channel_buffer.h"
aluebs@webrtc.org87893762014-11-27 23:40:25 +000016#include "webrtc/modules/audio_processing/common.h"
andrew@webrtc.org755b04a2011-11-15 16:57:56 +000017
niklase@google.com470e71d2011-07-07 08:21:25 +000018namespace webrtc {
19namespace {
20
andrew@webrtc.org103657b2014-04-24 18:28:56 +000021bool HasKeyboardChannel(AudioProcessing::ChannelLayout layout) {
22 switch (layout) {
23 case AudioProcessing::kMono:
24 case AudioProcessing::kStereo:
25 return false;
26 case AudioProcessing::kMonoAndKeyboard:
27 case AudioProcessing::kStereoAndKeyboard:
28 return true;
29 }
30 assert(false);
31 return false;
32}
33
34int KeyboardChannelIndex(AudioProcessing::ChannelLayout layout) {
35 switch (layout) {
36 case AudioProcessing::kMono:
37 case AudioProcessing::kStereo:
38 assert(false);
39 return -1;
40 case AudioProcessing::kMonoAndKeyboard:
41 return 1;
42 case AudioProcessing::kStereoAndKeyboard:
43 return 2;
44 }
45 assert(false);
46 return -1;
47}
48
andrew@webrtc.org8328e7c2014-10-31 04:58:14 +000049template <typename T>
50void StereoToMono(const T* left, const T* right, T* out,
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +000051 int samples_per_channel) {
andrew@webrtc.org8328e7c2014-10-31 04:58:14 +000052 for (int i = 0; i < samples_per_channel; ++i)
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +000053 out[i] = (left[i] + right[i]) / 2;
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +000054}
55
niklase@google.com470e71d2011-07-07 08:21:25 +000056} // namespace
57
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +000058AudioBuffer::AudioBuffer(int input_samples_per_channel,
59 int num_input_channels,
60 int process_samples_per_channel,
61 int num_process_channels,
62 int output_samples_per_channel)
63 : input_samples_per_channel_(input_samples_per_channel),
64 num_input_channels_(num_input_channels),
65 proc_samples_per_channel_(process_samples_per_channel),
66 num_proc_channels_(num_process_channels),
67 output_samples_per_channel_(output_samples_per_channel),
aluebs@webrtc.orgc5ebbd92014-12-10 19:30:57 +000068 num_bands_(1),
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +000069 samples_per_split_channel_(proc_samples_per_channel_),
aluebs@webrtc.org2561d522014-07-17 08:27:39 +000070 mixed_low_pass_valid_(false),
andrew@webrtc.orged083d42011-09-19 15:28:51 +000071 reference_copied_(false),
72 activity_(AudioFrame::kVadUnknown),
andrew@webrtc.org103657b2014-04-24 18:28:56 +000073 keyboard_data_(NULL),
mflodman@webrtc.orgd5da2502014-05-15 11:17:21 +000074 channels_(new IFChannelBuffer(proc_samples_per_channel_,
75 num_proc_channels_)) {
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +000076 assert(input_samples_per_channel_ > 0);
77 assert(proc_samples_per_channel_ > 0);
78 assert(output_samples_per_channel_ > 0);
79 assert(num_input_channels_ > 0 && num_input_channels_ <= 2);
80 assert(num_proc_channels_ <= num_input_channels);
niklase@google.com470e71d2011-07-07 08:21:25 +000081
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +000082 if (num_input_channels_ == 2 && num_proc_channels_ == 1) {
83 input_buffer_.reset(new ChannelBuffer<float>(input_samples_per_channel_,
84 num_proc_channels_));
85 }
86
87 if (input_samples_per_channel_ != proc_samples_per_channel_ ||
88 output_samples_per_channel_ != proc_samples_per_channel_) {
89 // Create an intermediate buffer for resampling.
90 process_buffer_.reset(new ChannelBuffer<float>(proc_samples_per_channel_,
91 num_proc_channels_));
92 }
93
94 if (input_samples_per_channel_ != proc_samples_per_channel_) {
95 input_resamplers_.reserve(num_proc_channels_);
96 for (int i = 0; i < num_proc_channels_; ++i) {
97 input_resamplers_.push_back(
98 new PushSincResampler(input_samples_per_channel_,
99 proc_samples_per_channel_));
100 }
101 }
102
103 if (output_samples_per_channel_ != proc_samples_per_channel_) {
104 output_resamplers_.reserve(num_proc_channels_);
105 for (int i = 0; i < num_proc_channels_; ++i) {
106 output_resamplers_.push_back(
107 new PushSincResampler(proc_samples_per_channel_,
108 output_samples_per_channel_));
109 }
110 }
111
aluebs@webrtc.org087da132014-11-17 23:01:23 +0000112 if (proc_samples_per_channel_ == kSamplesPer32kHzChannel ||
113 proc_samples_per_channel_ == kSamplesPer48kHzChannel) {
niklase@google.com470e71d2011-07-07 08:21:25 +0000114 samples_per_split_channel_ = kSamplesPer16kHzChannel;
aluebs@webrtc.orgc5ebbd92014-12-10 19:30:57 +0000115 num_bands_ = proc_samples_per_channel_ / samples_per_split_channel_;
aluebs@webrtc.org79b9eba2014-11-26 20:21:38 +0000116 split_channels_.push_back(new IFChannelBuffer(samples_per_split_channel_,
kwiberg@webrtc.org2b6bc8d2014-07-17 09:46:37 +0000117 num_proc_channels_));
aluebs@webrtc.org79b9eba2014-11-26 20:21:38 +0000118 split_channels_.push_back(new IFChannelBuffer(samples_per_split_channel_,
119 num_proc_channels_));
aluebs@webrtc.orgbe05c742014-11-14 22:18:10 +0000120 splitting_filter_.reset(new SplittingFilter(num_proc_channels_));
aluebs@webrtc.org087da132014-11-17 23:01:23 +0000121 if (proc_samples_per_channel_ == kSamplesPer48kHzChannel) {
aluebs@webrtc.org79b9eba2014-11-26 20:21:38 +0000122 split_channels_.push_back(new IFChannelBuffer(samples_per_split_channel_,
123 num_proc_channels_));
aluebs@webrtc.org087da132014-11-17 23:01:23 +0000124 }
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000125 }
aluebs@webrtc.orgc5ebbd92014-12-10 19:30:57 +0000126 bands_.reset(new int16_t*[num_proc_channels_ * kMaxNumBands]);
127 bands_f_.reset(new float*[num_proc_channels_ * kMaxNumBands]);
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000128}
129
andrew@webrtc.org103657b2014-04-24 18:28:56 +0000130AudioBuffer::~AudioBuffer() {}
131
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000132void AudioBuffer::CopyFrom(const float* const* data,
133 int samples_per_channel,
134 AudioProcessing::ChannelLayout layout) {
135 assert(samples_per_channel == input_samples_per_channel_);
136 assert(ChannelsFromLayout(layout) == num_input_channels_);
137 InitForNewData();
138
andrew@webrtc.org103657b2014-04-24 18:28:56 +0000139 if (HasKeyboardChannel(layout)) {
140 keyboard_data_ = data[KeyboardChannelIndex(layout)];
141 }
142
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000143 // Downmix.
144 const float* const* data_ptr = data;
145 if (num_input_channels_ == 2 && num_proc_channels_ == 1) {
146 StereoToMono(data[0],
147 data[1],
148 input_buffer_->channel(0),
149 input_samples_per_channel_);
150 data_ptr = input_buffer_->channels();
151 }
152
153 // Resample.
154 if (input_samples_per_channel_ != proc_samples_per_channel_) {
155 for (int i = 0; i < num_proc_channels_; ++i) {
156 input_resamplers_[i]->Resample(data_ptr[i],
157 input_samples_per_channel_,
158 process_buffer_->channel(i),
159 proc_samples_per_channel_);
160 }
161 data_ptr = process_buffer_->channels();
162 }
163
andrew@webrtc.org8328e7c2014-10-31 04:58:14 +0000164 // Convert to the S16 range.
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000165 for (int i = 0; i < num_proc_channels_; ++i) {
andrew@webrtc.org8328e7c2014-10-31 04:58:14 +0000166 FloatToFloatS16(data_ptr[i], proc_samples_per_channel_,
167 channels_->fbuf()->channel(i));
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000168 }
169}
170
171void AudioBuffer::CopyTo(int samples_per_channel,
172 AudioProcessing::ChannelLayout layout,
173 float* const* data) {
174 assert(samples_per_channel == output_samples_per_channel_);
175 assert(ChannelsFromLayout(layout) == num_proc_channels_);
176
andrew@webrtc.org8328e7c2014-10-31 04:58:14 +0000177 // Convert to the float range.
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000178 float* const* data_ptr = data;
179 if (output_samples_per_channel_ != proc_samples_per_channel_) {
180 // Convert to an intermediate buffer for subsequent resampling.
181 data_ptr = process_buffer_->channels();
182 }
183 for (int i = 0; i < num_proc_channels_; ++i) {
andrew@webrtc.org8328e7c2014-10-31 04:58:14 +0000184 FloatS16ToFloat(channels_->fbuf()->channel(i), proc_samples_per_channel_,
185 data_ptr[i]);
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000186 }
187
188 // Resample.
189 if (output_samples_per_channel_ != proc_samples_per_channel_) {
190 for (int i = 0; i < num_proc_channels_; ++i) {
191 output_resamplers_[i]->Resample(data_ptr[i],
192 proc_samples_per_channel_,
193 data[i],
194 output_samples_per_channel_);
195 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000196 }
197}
198
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000199void AudioBuffer::InitForNewData() {
andrew@webrtc.org103657b2014-04-24 18:28:56 +0000200 keyboard_data_ = NULL;
aluebs@webrtc.org2561d522014-07-17 08:27:39 +0000201 mixed_low_pass_valid_ = false;
andrew@webrtc.org17e40642014-03-04 20:58:13 +0000202 reference_copied_ = false;
203 activity_ = AudioFrame::kVadUnknown;
andrew@webrtc.org17e40642014-03-04 20:58:13 +0000204}
205
aluebs@webrtc.orga7384a12014-12-03 01:06:35 +0000206const int16_t* AudioBuffer::data_const(int channel) const {
207 return channels_const()[channel];
niklase@google.com470e71d2011-07-07 08:21:25 +0000208}
209
andrew@webrtc.org65f93382014-04-30 16:44:13 +0000210int16_t* AudioBuffer::data(int channel) {
aluebs@webrtc.orga7384a12014-12-03 01:06:35 +0000211 return channels()[channel];
andrew@webrtc.org65f93382014-04-30 16:44:13 +0000212}
213
aluebs@webrtc.orga7384a12014-12-03 01:06:35 +0000214const int16_t* const* AudioBuffer::channels_const() const {
aluebs@webrtc.orgbe05c742014-11-14 22:18:10 +0000215 return channels_->ibuf_const()->channels();
216}
217
218int16_t* const* AudioBuffer::channels() {
219 mixed_low_pass_valid_ = false;
220 return channels_->ibuf()->channels();
221}
222
aluebs@webrtc.orgc5ebbd92014-12-10 19:30:57 +0000223const int16_t* const* AudioBuffer::split_bands_const(int channel) const {
224 // This is necessary to make sure that the int16_t data is up to date in the
225 // IFChannelBuffer.
226 // TODO(aluebs): Having to depend on this to get the updated data is bug
227 // prone. One solution is to have ChannelBuffer track the bands as well.
228 for (int i = 0; i < kMaxNumBands; ++i) {
229 int16_t* const* channels =
230 const_cast<int16_t* const*>(split_channels_const(static_cast<Band>(i)));
231 bands_[kMaxNumBands * channel + i] = channels ? channels[channel] : NULL;
232 }
233 return &bands_[kMaxNumBands * channel];
aluebs@webrtc.orga7384a12014-12-03 01:06:35 +0000234}
235
aluebs@webrtc.orgc5ebbd92014-12-10 19:30:57 +0000236int16_t* const* AudioBuffer::split_bands(int channel) {
237 mixed_low_pass_valid_ = false;
238 // This is necessary to make sure that the int16_t data is up to date and the
239 // float data is marked as invalid in the IFChannelBuffer.
240 for (int i = 0; i < kMaxNumBands; ++i) {
241 int16_t* const* channels = split_channels(static_cast<Band>(i));
242 bands_[kMaxNumBands * channel + i] = channels ? channels[channel] : NULL;
243 }
244 return &bands_[kMaxNumBands * channel];
aluebs@webrtc.orga7384a12014-12-03 01:06:35 +0000245}
246
247const int16_t* const* AudioBuffer::split_channels_const(Band band) const {
248 if (split_channels_.size() > static_cast<size_t>(band)) {
249 return split_channels_[band]->ibuf_const()->channels();
250 } else {
251 return band == kBand0To8kHz ? channels_->ibuf_const()->channels() : NULL;
252 }
253}
254
255int16_t* const* AudioBuffer::split_channels(Band band) {
256 mixed_low_pass_valid_ = false;
257 if (split_channels_.size() > static_cast<size_t>(band)) {
258 return split_channels_[band]->ibuf()->channels();
259 } else {
260 return band == kBand0To8kHz ? channels_->ibuf()->channels() : NULL;
261 }
262}
263
264const float* AudioBuffer::data_const_f(int channel) const {
265 return channels_const_f()[channel];
mflodman@webrtc.orgd5da2502014-05-15 11:17:21 +0000266}
267
kwiberg@webrtc.org38214d52014-07-03 09:47:33 +0000268float* AudioBuffer::data_f(int channel) {
aluebs@webrtc.orga7384a12014-12-03 01:06:35 +0000269 return channels_f()[channel];
kwiberg@webrtc.org38214d52014-07-03 09:47:33 +0000270}
271
aluebs@webrtc.orga7384a12014-12-03 01:06:35 +0000272const float* const* AudioBuffer::channels_const_f() const {
claguna@google.combfacaab2014-09-25 20:52:08 +0000273 return channels_->fbuf_const()->channels();
274}
275
276float* const* AudioBuffer::channels_f() {
277 mixed_low_pass_valid_ = false;
278 return channels_->fbuf()->channels();
279}
280
aluebs@webrtc.orgc5ebbd92014-12-10 19:30:57 +0000281const float* const* AudioBuffer::split_bands_const_f(int channel) const {
282 // This is necessary to make sure that the float data is up to date in the
283 // IFChannelBuffer.
284 for (int i = 0; i < kMaxNumBands; ++i) {
285 float* const* channels =
286 const_cast<float* const*>(split_channels_const_f(static_cast<Band>(i)));
287 bands_f_[kMaxNumBands * channel + i] = channels ? channels[channel] : NULL;
288
289 }
290 return &bands_f_[kMaxNumBands * channel];
niklase@google.com470e71d2011-07-07 08:21:25 +0000291}
292
aluebs@webrtc.orgc5ebbd92014-12-10 19:30:57 +0000293float* const* AudioBuffer::split_bands_f(int channel) {
294 mixed_low_pass_valid_ = false;
295 // This is necessary to make sure that the float data is up to date and the
296 // int16_t data is marked as invalid in the IFChannelBuffer.
297 for (int i = 0; i < kMaxNumBands; ++i) {
298 float* const* channels = split_channels_f(static_cast<Band>(i));
299 bands_f_[kMaxNumBands * channel + i] = channels ? channels[channel] : NULL;
300
301 }
302 return &bands_f_[kMaxNumBands * channel];
aluebs@webrtc.orga7384a12014-12-03 01:06:35 +0000303}
304
305const float* const* AudioBuffer::split_channels_const_f(Band band) const {
306 if (split_channels_.size() > static_cast<size_t>(band)) {
307 return split_channels_[band]->fbuf_const()->channels();
308 } else {
309 return band == kBand0To8kHz ? channels_->fbuf_const()->channels() : NULL;
310 }
311}
312
313float* const* AudioBuffer::split_channels_f(Band band) {
aluebs@webrtc.org2561d522014-07-17 08:27:39 +0000314 mixed_low_pass_valid_ = false;
aluebs@webrtc.orga7384a12014-12-03 01:06:35 +0000315 if (split_channels_.size() > static_cast<size_t>(band)) {
316 return split_channels_[band]->fbuf()->channels();
317 } else {
318 return band == kBand0To8kHz ? channels_->fbuf()->channels() : NULL;
319 }
aluebs@webrtc.org087da132014-11-17 23:01:23 +0000320}
321
aluebs@webrtc.org2561d522014-07-17 08:27:39 +0000322const int16_t* AudioBuffer::mixed_low_pass_data() {
323 // Currently only mixing stereo to mono is supported.
324 assert(num_proc_channels_ == 1 || num_proc_channels_ == 2);
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000325
aluebs@webrtc.org2561d522014-07-17 08:27:39 +0000326 if (num_proc_channels_ == 1) {
aluebs@webrtc.orgc5ebbd92014-12-10 19:30:57 +0000327 return split_bands_const(0)[kBand0To8kHz];
aluebs@webrtc.org2561d522014-07-17 08:27:39 +0000328 }
329
330 if (!mixed_low_pass_valid_) {
331 if (!mixed_low_pass_channels_.get()) {
332 mixed_low_pass_channels_.reset(
333 new ChannelBuffer<int16_t>(samples_per_split_channel_, 1));
334 }
aluebs@webrtc.orgc5ebbd92014-12-10 19:30:57 +0000335 StereoToMono(split_bands_const(0)[kBand0To8kHz],
336 split_bands_const(1)[kBand0To8kHz],
aluebs@webrtc.org2561d522014-07-17 08:27:39 +0000337 mixed_low_pass_channels_->data(),
338 samples_per_split_channel_);
339 mixed_low_pass_valid_ = true;
340 }
341 return mixed_low_pass_channels_->data();
niklase@google.com470e71d2011-07-07 08:21:25 +0000342}
343
andrew@webrtc.org65f93382014-04-30 16:44:13 +0000344const int16_t* AudioBuffer::low_pass_reference(int channel) const {
niklase@google.com470e71d2011-07-07 08:21:25 +0000345 if (!reference_copied_) {
346 return NULL;
347 }
348
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000349 return low_pass_reference_channels_->channel(channel);
niklase@google.com470e71d2011-07-07 08:21:25 +0000350}
351
andrew@webrtc.org103657b2014-04-24 18:28:56 +0000352const float* AudioBuffer::keyboard_data() const {
353 return keyboard_data_;
354}
355
andrew@webrtc.orged083d42011-09-19 15:28:51 +0000356void AudioBuffer::set_activity(AudioFrame::VADActivity activity) {
357 activity_ = activity;
358}
359
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000360AudioFrame::VADActivity AudioBuffer::activity() const {
andrew@webrtc.orged083d42011-09-19 15:28:51 +0000361 return activity_;
362}
363
364int AudioBuffer::num_channels() const {
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000365 return num_proc_channels_;
niklase@google.com470e71d2011-07-07 08:21:25 +0000366}
367
andrew@webrtc.orged083d42011-09-19 15:28:51 +0000368int AudioBuffer::samples_per_channel() const {
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000369 return proc_samples_per_channel_;
niklase@google.com470e71d2011-07-07 08:21:25 +0000370}
371
andrew@webrtc.orged083d42011-09-19 15:28:51 +0000372int AudioBuffer::samples_per_split_channel() const {
niklase@google.com470e71d2011-07-07 08:21:25 +0000373 return samples_per_split_channel_;
374}
375
andrew@webrtc.org103657b2014-04-24 18:28:56 +0000376int AudioBuffer::samples_per_keyboard_channel() const {
377 // We don't resample the keyboard channel.
378 return input_samples_per_channel_;
379}
380
aluebs@webrtc.orgc5ebbd92014-12-10 19:30:57 +0000381int AudioBuffer::num_bands() const {
382 return num_bands_;
383}
384
andrew@webrtc.orged083d42011-09-19 15:28:51 +0000385// TODO(andrew): Do deinterleaving and mixing in one step?
386void AudioBuffer::DeinterleaveFrom(AudioFrame* frame) {
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000387 assert(proc_samples_per_channel_ == input_samples_per_channel_);
andrew@webrtc.org30be8272014-09-24 20:06:23 +0000388 assert(frame->num_channels_ == num_input_channels_);
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000389 assert(frame->samples_per_channel_ == proc_samples_per_channel_);
390 InitForNewData();
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000391 activity_ = frame->vad_activity_;
niklase@google.com470e71d2011-07-07 08:21:25 +0000392
andrew@webrtc.org30be8272014-09-24 20:06:23 +0000393 if (num_input_channels_ == 2 && num_proc_channels_ == 1) {
394 // Downmix directly; no explicit deinterleaving needed.
395 int16_t* downmixed = channels_->ibuf()->channel(0);
396 for (int i = 0; i < input_samples_per_channel_; ++i) {
andrew@webrtc.org8328e7c2014-10-31 04:58:14 +0000397 downmixed[i] = (frame->data_[i * 2] + frame->data_[i * 2 + 1]) / 2;
andrew@webrtc.org30be8272014-09-24 20:06:23 +0000398 }
399 } else {
400 assert(num_proc_channels_ == num_input_channels_);
401 int16_t* interleaved = frame->data_;
402 for (int i = 0; i < num_proc_channels_; ++i) {
403 int16_t* deinterleaved = channels_->ibuf()->channel(i);
404 int interleaved_idx = i;
405 for (int j = 0; j < proc_samples_per_channel_; ++j) {
406 deinterleaved[j] = interleaved[interleaved_idx];
407 interleaved_idx += num_proc_channels_;
408 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000409 }
410 }
411}
412
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000413void AudioBuffer::InterleaveTo(AudioFrame* frame, bool data_changed) const {
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000414 assert(proc_samples_per_channel_ == output_samples_per_channel_);
415 assert(num_proc_channels_ == num_input_channels_);
416 assert(frame->num_channels_ == num_proc_channels_);
417 assert(frame->samples_per_channel_ == proc_samples_per_channel_);
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000418 frame->vad_activity_ = activity_;
niklase@google.com470e71d2011-07-07 08:21:25 +0000419
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000420 if (!data_changed) {
421 return;
422 }
423
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000424 int16_t* interleaved = frame->data_;
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000425 for (int i = 0; i < num_proc_channels_; i++) {
mflodman@webrtc.orgd5da2502014-05-15 11:17:21 +0000426 int16_t* deinterleaved = channels_->ibuf()->channel(i);
andrew@webrtc.orged083d42011-09-19 15:28:51 +0000427 int interleaved_idx = i;
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000428 for (int j = 0; j < proc_samples_per_channel_; j++) {
niklase@google.com470e71d2011-07-07 08:21:25 +0000429 interleaved[interleaved_idx] = deinterleaved[j];
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000430 interleaved_idx += num_proc_channels_;
niklase@google.com470e71d2011-07-07 08:21:25 +0000431 }
432 }
433}
434
niklase@google.com470e71d2011-07-07 08:21:25 +0000435void AudioBuffer::CopyLowPassToReference() {
436 reference_copied_ = true;
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000437 if (!low_pass_reference_channels_.get()) {
438 low_pass_reference_channels_.reset(
439 new ChannelBuffer<int16_t>(samples_per_split_channel_,
440 num_proc_channels_));
441 }
442 for (int i = 0; i < num_proc_channels_; i++) {
aluebs@webrtc.orgc5ebbd92014-12-10 19:30:57 +0000443 low_pass_reference_channels_->CopyFrom(split_bands_const(i)[kBand0To8kHz],
aluebs@webrtc.orga7384a12014-12-03 01:06:35 +0000444 i);
niklase@google.com470e71d2011-07-07 08:21:25 +0000445 }
446}
andrew@webrtc.orgddbb8a22014-04-22 21:00:04 +0000447
aluebs@webrtc.orgbe05c742014-11-14 22:18:10 +0000448void AudioBuffer::SplitIntoFrequencyBands() {
aluebs@webrtc.org79b9eba2014-11-26 20:21:38 +0000449 splitting_filter_->Analysis(channels_.get(),
450 split_channels_.get());
aluebs@webrtc.orgbe05c742014-11-14 22:18:10 +0000451}
452
453void AudioBuffer::MergeFrequencyBands() {
aluebs@webrtc.org79b9eba2014-11-26 20:21:38 +0000454 splitting_filter_->Synthesis(split_channels_.get(),
455 channels_.get());
aluebs@webrtc.orgbe05c742014-11-14 22:18:10 +0000456}
457
niklase@google.com470e71d2011-07-07 08:21:25 +0000458} // namespace webrtc