blob: 6d23731a05dcafb18562a9014c36c781f586180d [file] [log] [blame]
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +00001/*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020011#include "audio/utility/audio_frame_operations.h"
12#include "modules/include/module_common_types.h"
13#include "rtc_base/checks.h"
14#include "test/gtest.h"
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +000015
16namespace webrtc {
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +000017namespace {
18
19class AudioFrameOperationsTest : public ::testing::Test {
20 protected:
21 AudioFrameOperationsTest() {
22 // Set typical values.
andrew@webrtc.org63a50982012-05-02 23:56:37 +000023 frame_.samples_per_channel_ = 320;
24 frame_.num_channels_ = 2;
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +000025 }
26
27 AudioFrame frame_;
28};
29
jens.nielsen228c2682017-03-01 05:11:22 -080030void SetFrameData(int16_t ch1,
31 int16_t ch2,
32 int16_t ch3,
33 int16_t ch4,
34 AudioFrame* frame) {
yujo36b1a5f2017-06-12 12:45:32 -070035 int16_t* frame_data = frame->mutable_data();
jens.nielsen228c2682017-03-01 05:11:22 -080036 for (size_t i = 0; i < frame->samples_per_channel_ * 4; i += 4) {
yujo36b1a5f2017-06-12 12:45:32 -070037 frame_data[i] = ch1;
38 frame_data[i + 1] = ch2;
39 frame_data[i + 2] = ch3;
40 frame_data[i + 3] = ch4;
jens.nielsen228c2682017-03-01 05:11:22 -080041 }
42}
43
44void SetFrameData(int16_t left, int16_t right, AudioFrame* frame) {
yujo36b1a5f2017-06-12 12:45:32 -070045 int16_t* frame_data = frame->mutable_data();
Peter Kastingdce40cf2015-08-24 14:52:23 -070046 for (size_t i = 0; i < frame->samples_per_channel_ * 2; i += 2) {
yujo36b1a5f2017-06-12 12:45:32 -070047 frame_data[i] = left;
48 frame_data[i + 1] = right;
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +000049 }
50}
51
jens.nielsen228c2682017-03-01 05:11:22 -080052void SetFrameData(int16_t data, AudioFrame* frame) {
yujo36b1a5f2017-06-12 12:45:32 -070053 int16_t* frame_data = frame->mutable_data();
54 for (size_t i = 0;
55 i < frame->samples_per_channel_ * frame->num_channels_; i++) {
56 frame_data[i] = data;
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +000057 }
58}
59
60void VerifyFramesAreEqual(const AudioFrame& frame1, const AudioFrame& frame2) {
andrew@webrtc.org63a50982012-05-02 23:56:37 +000061 EXPECT_EQ(frame1.num_channels_, frame2.num_channels_);
62 EXPECT_EQ(frame1.samples_per_channel_,
63 frame2.samples_per_channel_);
yujo36b1a5f2017-06-12 12:45:32 -070064 const int16_t* frame1_data = frame1.data();
65 const int16_t* frame2_data = frame2.data();
Peter Kastingdce40cf2015-08-24 14:52:23 -070066 for (size_t i = 0; i < frame1.samples_per_channel_ * frame1.num_channels_;
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +000067 i++) {
yujo36b1a5f2017-06-12 12:45:32 -070068 EXPECT_EQ(frame1_data[i], frame2_data[i]);
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +000069 }
yujo36b1a5f2017-06-12 12:45:32 -070070 EXPECT_EQ(frame1.muted(), frame2.muted());
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +000071}
72
solenberg1c2af8e2016-03-24 10:36:00 -070073void InitFrame(AudioFrame* frame, size_t channels, size_t samples_per_channel,
74 int16_t left_data, int16_t right_data) {
75 RTC_DCHECK(frame);
kwibergaf476c72016-11-28 15:21:39 -080076 RTC_DCHECK_GE(2, channels);
solenberg1c2af8e2016-03-24 10:36:00 -070077 RTC_DCHECK_GE(AudioFrame::kMaxDataSizeSamples,
78 samples_per_channel * channels);
79 frame->samples_per_channel_ = samples_per_channel;
80 frame->num_channels_ = channels;
81 if (channels == 2) {
jens.nielsen228c2682017-03-01 05:11:22 -080082 SetFrameData(left_data, right_data, frame);
solenberg1c2af8e2016-03-24 10:36:00 -070083 } else if (channels == 1) {
jens.nielsen228c2682017-03-01 05:11:22 -080084 SetFrameData(left_data, frame);
solenberg1c2af8e2016-03-24 10:36:00 -070085 }
86}
87
88int16_t GetChannelData(const AudioFrame& frame, size_t channel, size_t index) {
89 RTC_DCHECK_LT(channel, frame.num_channels_);
90 RTC_DCHECK_LT(index, frame.samples_per_channel_);
yujo36b1a5f2017-06-12 12:45:32 -070091 return frame.data()[index * frame.num_channels_ + channel];
solenberg1c2af8e2016-03-24 10:36:00 -070092}
93
94void VerifyFrameDataBounds(const AudioFrame& frame, size_t channel, int16_t max,
95 int16_t min) {
96 for (size_t i = 0; i < frame.samples_per_channel_; ++i) {
97 int16_t s = GetChannelData(frame, channel, i);
98 EXPECT_LE(min, s);
99 EXPECT_GE(max, s);
100 }
101}
102
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000103TEST_F(AudioFrameOperationsTest, MonoToStereoFailsWithBadParameters) {
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +0000104 EXPECT_EQ(-1, AudioFrameOperations::MonoToStereo(&frame_));
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000105
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000106 frame_.samples_per_channel_ = AudioFrame::kMaxDataSizeSamples;
107 frame_.num_channels_ = 1;
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +0000108 EXPECT_EQ(-1, AudioFrameOperations::MonoToStereo(&frame_));
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000109}
110
111TEST_F(AudioFrameOperationsTest, MonoToStereoSucceeds) {
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000112 frame_.num_channels_ = 1;
jens.nielsen228c2682017-03-01 05:11:22 -0800113 SetFrameData(1, &frame_);
114
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +0000115 EXPECT_EQ(0, AudioFrameOperations::MonoToStereo(&frame_));
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000116
117 AudioFrame stereo_frame;
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000118 stereo_frame.samples_per_channel_ = 320;
119 stereo_frame.num_channels_ = 2;
jens.nielsen228c2682017-03-01 05:11:22 -0800120 SetFrameData(1, 1, &stereo_frame);
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000121 VerifyFramesAreEqual(stereo_frame, frame_);
jens.nielsen228c2682017-03-01 05:11:22 -0800122}
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +0000123
yujo36b1a5f2017-06-12 12:45:32 -0700124TEST_F(AudioFrameOperationsTest, MonoToStereoMuted) {
125 frame_.num_channels_ = 1;
126 ASSERT_TRUE(frame_.muted());
127 EXPECT_EQ(0, AudioFrameOperations::MonoToStereo(&frame_));
128 EXPECT_TRUE(frame_.muted());
129}
130
jens.nielsen228c2682017-03-01 05:11:22 -0800131TEST_F(AudioFrameOperationsTest, MonoToStereoBufferSucceeds) {
132 AudioFrame target_frame;
133 frame_.num_channels_ = 1;
134 SetFrameData(4, &frame_);
135
136 target_frame.num_channels_ = 2;
137 target_frame.samples_per_channel_ = frame_.samples_per_channel_;
138
yujo36b1a5f2017-06-12 12:45:32 -0700139 AudioFrameOperations::MonoToStereo(frame_.data(), frame_.samples_per_channel_,
140 target_frame.mutable_data());
jens.nielsen228c2682017-03-01 05:11:22 -0800141
142 AudioFrame stereo_frame;
143 stereo_frame.samples_per_channel_ = 320;
144 stereo_frame.num_channels_ = 2;
145 SetFrameData(4, 4, &stereo_frame);
146 VerifyFramesAreEqual(stereo_frame, target_frame);
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000147}
148
149TEST_F(AudioFrameOperationsTest, StereoToMonoFailsWithBadParameters) {
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000150 frame_.num_channels_ = 1;
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +0000151 EXPECT_EQ(-1, AudioFrameOperations::StereoToMono(&frame_));
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000152}
153
154TEST_F(AudioFrameOperationsTest, StereoToMonoSucceeds) {
jens.nielsen228c2682017-03-01 05:11:22 -0800155 SetFrameData(4, 2, &frame_);
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +0000156 EXPECT_EQ(0, AudioFrameOperations::StereoToMono(&frame_));
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000157
158 AudioFrame mono_frame;
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000159 mono_frame.samples_per_channel_ = 320;
160 mono_frame.num_channels_ = 1;
jens.nielsen228c2682017-03-01 05:11:22 -0800161 SetFrameData(3, &mono_frame);
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000162 VerifyFramesAreEqual(mono_frame, frame_);
jens.nielsen228c2682017-03-01 05:11:22 -0800163}
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +0000164
yujo36b1a5f2017-06-12 12:45:32 -0700165TEST_F(AudioFrameOperationsTest, StereoToMonoMuted) {
166 ASSERT_TRUE(frame_.muted());
167 EXPECT_EQ(0, AudioFrameOperations::StereoToMono(&frame_));
168 EXPECT_TRUE(frame_.muted());
169}
170
jens.nielsen228c2682017-03-01 05:11:22 -0800171TEST_F(AudioFrameOperationsTest, StereoToMonoBufferSucceeds) {
172 AudioFrame target_frame;
173 SetFrameData(4, 2, &frame_);
174
175 target_frame.num_channels_ = 1;
176 target_frame.samples_per_channel_ = frame_.samples_per_channel_;
177
yujo36b1a5f2017-06-12 12:45:32 -0700178 AudioFrameOperations::StereoToMono(frame_.data(), frame_.samples_per_channel_,
179 target_frame.mutable_data());
jens.nielsen228c2682017-03-01 05:11:22 -0800180
181 AudioFrame mono_frame;
182 mono_frame.samples_per_channel_ = 320;
183 mono_frame.num_channels_ = 1;
184 SetFrameData(3, &mono_frame);
185 VerifyFramesAreEqual(mono_frame, target_frame);
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000186}
187
188TEST_F(AudioFrameOperationsTest, StereoToMonoDoesNotWrapAround) {
jens.nielsen228c2682017-03-01 05:11:22 -0800189 SetFrameData(-32768, -32768, &frame_);
andrew@webrtc.org4ecea3e2012-06-27 03:25:31 +0000190 EXPECT_EQ(0, AudioFrameOperations::StereoToMono(&frame_));
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000191
192 AudioFrame mono_frame;
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000193 mono_frame.samples_per_channel_ = 320;
194 mono_frame.num_channels_ = 1;
jens.nielsen228c2682017-03-01 05:11:22 -0800195 SetFrameData(-32768, &mono_frame);
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000196 VerifyFramesAreEqual(mono_frame, frame_);
197}
198
jens.nielsen228c2682017-03-01 05:11:22 -0800199TEST_F(AudioFrameOperationsTest, QuadToMonoFailsWithBadParameters) {
200 frame_.num_channels_ = 1;
201 EXPECT_EQ(-1, AudioFrameOperations::QuadToMono(&frame_));
202 frame_.num_channels_ = 2;
203 EXPECT_EQ(-1, AudioFrameOperations::QuadToMono(&frame_));
204}
205
206TEST_F(AudioFrameOperationsTest, QuadToMonoSucceeds) {
207 frame_.num_channels_ = 4;
208 SetFrameData(4, 2, 6, 8, &frame_);
209
210 EXPECT_EQ(0, AudioFrameOperations::QuadToMono(&frame_));
211
212 AudioFrame mono_frame;
213 mono_frame.samples_per_channel_ = 320;
214 mono_frame.num_channels_ = 1;
215 SetFrameData(5, &mono_frame);
216 VerifyFramesAreEqual(mono_frame, frame_);
217}
218
yujo36b1a5f2017-06-12 12:45:32 -0700219TEST_F(AudioFrameOperationsTest, QuadToMonoMuted) {
220 frame_.num_channels_ = 4;
221 ASSERT_TRUE(frame_.muted());
222 EXPECT_EQ(0, AudioFrameOperations::QuadToMono(&frame_));
223 EXPECT_TRUE(frame_.muted());
224}
225
jens.nielsen228c2682017-03-01 05:11:22 -0800226TEST_F(AudioFrameOperationsTest, QuadToMonoBufferSucceeds) {
227 AudioFrame target_frame;
228 frame_.num_channels_ = 4;
229 SetFrameData(4, 2, 6, 8, &frame_);
230
231 target_frame.num_channels_ = 1;
232 target_frame.samples_per_channel_ = frame_.samples_per_channel_;
233
yujo36b1a5f2017-06-12 12:45:32 -0700234 AudioFrameOperations::QuadToMono(frame_.data(), frame_.samples_per_channel_,
235 target_frame.mutable_data());
jens.nielsen228c2682017-03-01 05:11:22 -0800236 AudioFrame mono_frame;
237 mono_frame.samples_per_channel_ = 320;
238 mono_frame.num_channels_ = 1;
239 SetFrameData(5, &mono_frame);
240 VerifyFramesAreEqual(mono_frame, target_frame);
241}
242
243TEST_F(AudioFrameOperationsTest, QuadToMonoDoesNotWrapAround) {
244 frame_.num_channels_ = 4;
245 SetFrameData(-32768, -32768, -32768, -32768, &frame_);
246 EXPECT_EQ(0, AudioFrameOperations::QuadToMono(&frame_));
247
248 AudioFrame mono_frame;
249 mono_frame.samples_per_channel_ = 320;
250 mono_frame.num_channels_ = 1;
251 SetFrameData(-32768, &mono_frame);
252 VerifyFramesAreEqual(mono_frame, frame_);
253}
254
255TEST_F(AudioFrameOperationsTest, QuadToStereoFailsWithBadParameters) {
256 frame_.num_channels_ = 1;
257 EXPECT_EQ(-1, AudioFrameOperations::QuadToStereo(&frame_));
258 frame_.num_channels_ = 2;
259 EXPECT_EQ(-1, AudioFrameOperations::QuadToStereo(&frame_));
260}
261
262TEST_F(AudioFrameOperationsTest, QuadToStereoSucceeds) {
263 frame_.num_channels_ = 4;
264 SetFrameData(4, 2, 6, 8, &frame_);
265 EXPECT_EQ(0, AudioFrameOperations::QuadToStereo(&frame_));
266
267 AudioFrame stereo_frame;
268 stereo_frame.samples_per_channel_ = 320;
269 stereo_frame.num_channels_ = 2;
270 SetFrameData(3, 7, &stereo_frame);
271 VerifyFramesAreEqual(stereo_frame, frame_);
272}
273
yujo36b1a5f2017-06-12 12:45:32 -0700274TEST_F(AudioFrameOperationsTest, QuadToStereoMuted) {
275 frame_.num_channels_ = 4;
276 ASSERT_TRUE(frame_.muted());
277 EXPECT_EQ(0, AudioFrameOperations::QuadToStereo(&frame_));
278 EXPECT_TRUE(frame_.muted());
279}
280
jens.nielsen228c2682017-03-01 05:11:22 -0800281TEST_F(AudioFrameOperationsTest, QuadToStereoBufferSucceeds) {
282 AudioFrame target_frame;
283 frame_.num_channels_ = 4;
284 SetFrameData(4, 2, 6, 8, &frame_);
285
286 target_frame.num_channels_ = 2;
287 target_frame.samples_per_channel_ = frame_.samples_per_channel_;
288
yujo36b1a5f2017-06-12 12:45:32 -0700289 AudioFrameOperations::QuadToStereo(frame_.data(), frame_.samples_per_channel_,
290 target_frame.mutable_data());
jens.nielsen228c2682017-03-01 05:11:22 -0800291 AudioFrame stereo_frame;
292 stereo_frame.samples_per_channel_ = 320;
293 stereo_frame.num_channels_ = 2;
294 SetFrameData(3, 7, &stereo_frame);
295 VerifyFramesAreEqual(stereo_frame, target_frame);
296}
297
298TEST_F(AudioFrameOperationsTest, QuadToStereoDoesNotWrapAround) {
299 frame_.num_channels_ = 4;
300 SetFrameData(-32768, -32768, -32768, -32768, &frame_);
301 EXPECT_EQ(0, AudioFrameOperations::QuadToStereo(&frame_));
302
303 AudioFrame stereo_frame;
304 stereo_frame.samples_per_channel_ = 320;
305 stereo_frame.num_channels_ = 2;
306 SetFrameData(-32768, -32768, &stereo_frame);
307 VerifyFramesAreEqual(stereo_frame, frame_);
308}
309
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000310TEST_F(AudioFrameOperationsTest, SwapStereoChannelsSucceedsOnStereo) {
jens.nielsen228c2682017-03-01 05:11:22 -0800311 SetFrameData(0, 1, &frame_);
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000312
313 AudioFrame swapped_frame;
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000314 swapped_frame.samples_per_channel_ = 320;
315 swapped_frame.num_channels_ = 2;
jens.nielsen228c2682017-03-01 05:11:22 -0800316 SetFrameData(1, 0, &swapped_frame);
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000317
318 AudioFrameOperations::SwapStereoChannels(&frame_);
319 VerifyFramesAreEqual(swapped_frame, frame_);
320}
321
yujo36b1a5f2017-06-12 12:45:32 -0700322TEST_F(AudioFrameOperationsTest, SwapStereoChannelsMuted) {
323 ASSERT_TRUE(frame_.muted());
324 AudioFrameOperations::SwapStereoChannels(&frame_);
325 EXPECT_TRUE(frame_.muted());
326}
327
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000328TEST_F(AudioFrameOperationsTest, SwapStereoChannelsFailsOnMono) {
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000329 frame_.num_channels_ = 1;
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000330 // Set data to "stereo", despite it being a mono frame.
jens.nielsen228c2682017-03-01 05:11:22 -0800331 SetFrameData(0, 1, &frame_);
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000332
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +0000333 AudioFrame orig_frame;
334 orig_frame.CopyFrom(frame_);
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000335 AudioFrameOperations::SwapStereoChannels(&frame_);
336 // Verify that no swap occurred.
337 VerifyFramesAreEqual(orig_frame, frame_);
338}
339
solenberg1c2af8e2016-03-24 10:36:00 -0700340TEST_F(AudioFrameOperationsTest, MuteDisabled) {
jens.nielsen228c2682017-03-01 05:11:22 -0800341 SetFrameData(1000, -1000, &frame_);
solenberg1c2af8e2016-03-24 10:36:00 -0700342 AudioFrameOperations::Mute(&frame_, false, false);
343
344 AudioFrame muted_frame;
345 muted_frame.samples_per_channel_ = 320;
346 muted_frame.num_channels_ = 2;
jens.nielsen228c2682017-03-01 05:11:22 -0800347 SetFrameData(1000, -1000, &muted_frame);
solenberg1c2af8e2016-03-24 10:36:00 -0700348 VerifyFramesAreEqual(muted_frame, frame_);
349}
350
351TEST_F(AudioFrameOperationsTest, MuteEnabled) {
jens.nielsen228c2682017-03-01 05:11:22 -0800352 SetFrameData(1000, -1000, &frame_);
solenberg1c2af8e2016-03-24 10:36:00 -0700353 AudioFrameOperations::Mute(&frame_, true, true);
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000354
355 AudioFrame muted_frame;
yujo36b1a5f2017-06-12 12:45:32 -0700356 muted_frame.samples_per_channel_ = frame_.samples_per_channel_;
357 muted_frame.num_channels_ = frame_.num_channels_;
358 ASSERT_TRUE(muted_frame.muted());
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000359 VerifyFramesAreEqual(muted_frame, frame_);
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000360}
361
solenberg1c2af8e2016-03-24 10:36:00 -0700362// Verify that *beginning* to mute works for short and long (>128) frames, mono
363// and stereo. Beginning mute should yield a ramp down to zero.
364TEST_F(AudioFrameOperationsTest, MuteBeginMonoLong) {
365 InitFrame(&frame_, 1, 228, 1000, -1000);
366 AudioFrameOperations::Mute(&frame_, false, true);
367 VerifyFrameDataBounds(frame_, 0, 1000, 0);
368 EXPECT_EQ(1000, GetChannelData(frame_, 0, 99));
369 EXPECT_EQ(992, GetChannelData(frame_, 0, 100));
370 EXPECT_EQ(7, GetChannelData(frame_, 0, 226));
371 EXPECT_EQ(0, GetChannelData(frame_, 0, 227));
372}
373
374TEST_F(AudioFrameOperationsTest, MuteBeginMonoShort) {
375 InitFrame(&frame_, 1, 93, 1000, -1000);
376 AudioFrameOperations::Mute(&frame_, false, true);
377 VerifyFrameDataBounds(frame_, 0, 1000, 0);
378 EXPECT_EQ(989, GetChannelData(frame_, 0, 0));
379 EXPECT_EQ(978, GetChannelData(frame_, 0, 1));
380 EXPECT_EQ(10, GetChannelData(frame_, 0, 91));
381 EXPECT_EQ(0, GetChannelData(frame_, 0, 92));
382}
383
384TEST_F(AudioFrameOperationsTest, MuteBeginStereoLong) {
385 InitFrame(&frame_, 2, 228, 1000, -1000);
386 AudioFrameOperations::Mute(&frame_, false, true);
387 VerifyFrameDataBounds(frame_, 0, 1000, 0);
388 VerifyFrameDataBounds(frame_, 1, 0, -1000);
389 EXPECT_EQ(1000, GetChannelData(frame_, 0, 99));
390 EXPECT_EQ(-1000, GetChannelData(frame_, 1, 99));
391 EXPECT_EQ(992, GetChannelData(frame_, 0, 100));
392 EXPECT_EQ(-992, GetChannelData(frame_, 1, 100));
393 EXPECT_EQ(7, GetChannelData(frame_, 0, 226));
394 EXPECT_EQ(-7, GetChannelData(frame_, 1, 226));
395 EXPECT_EQ(0, GetChannelData(frame_, 0, 227));
396 EXPECT_EQ(0, GetChannelData(frame_, 1, 227));
397}
398
399TEST_F(AudioFrameOperationsTest, MuteBeginStereoShort) {
400 InitFrame(&frame_, 2, 93, 1000, -1000);
401 AudioFrameOperations::Mute(&frame_, false, true);
402 VerifyFrameDataBounds(frame_, 0, 1000, 0);
403 VerifyFrameDataBounds(frame_, 1, 0, -1000);
404 EXPECT_EQ(989, GetChannelData(frame_, 0, 0));
405 EXPECT_EQ(-989, GetChannelData(frame_, 1, 0));
406 EXPECT_EQ(978, GetChannelData(frame_, 0, 1));
407 EXPECT_EQ(-978, GetChannelData(frame_, 1, 1));
408 EXPECT_EQ(10, GetChannelData(frame_, 0, 91));
409 EXPECT_EQ(-10, GetChannelData(frame_, 1, 91));
410 EXPECT_EQ(0, GetChannelData(frame_, 0, 92));
411 EXPECT_EQ(0, GetChannelData(frame_, 1, 92));
412}
413
414// Verify that *ending* to mute works for short and long (>128) frames, mono
415// and stereo. Ending mute should yield a ramp up from zero.
416TEST_F(AudioFrameOperationsTest, MuteEndMonoLong) {
417 InitFrame(&frame_, 1, 228, 1000, -1000);
418 AudioFrameOperations::Mute(&frame_, true, false);
419 VerifyFrameDataBounds(frame_, 0, 1000, 0);
420 EXPECT_EQ(7, GetChannelData(frame_, 0, 0));
421 EXPECT_EQ(15, GetChannelData(frame_, 0, 1));
422 EXPECT_EQ(1000, GetChannelData(frame_, 0, 127));
423 EXPECT_EQ(1000, GetChannelData(frame_, 0, 128));
424}
425
426TEST_F(AudioFrameOperationsTest, MuteEndMonoShort) {
427 InitFrame(&frame_, 1, 93, 1000, -1000);
428 AudioFrameOperations::Mute(&frame_, true, false);
429 VerifyFrameDataBounds(frame_, 0, 1000, 0);
430 EXPECT_EQ(10, GetChannelData(frame_, 0, 0));
431 EXPECT_EQ(21, GetChannelData(frame_, 0, 1));
432 EXPECT_EQ(989, GetChannelData(frame_, 0, 91));
433 EXPECT_EQ(999, GetChannelData(frame_, 0, 92));
434}
435
436TEST_F(AudioFrameOperationsTest, MuteEndStereoLong) {
437 InitFrame(&frame_, 2, 228, 1000, -1000);
438 AudioFrameOperations::Mute(&frame_, true, false);
439 VerifyFrameDataBounds(frame_, 0, 1000, 0);
440 VerifyFrameDataBounds(frame_, 1, 0, -1000);
441 EXPECT_EQ(7, GetChannelData(frame_, 0, 0));
442 EXPECT_EQ(-7, GetChannelData(frame_, 1, 0));
443 EXPECT_EQ(15, GetChannelData(frame_, 0, 1));
444 EXPECT_EQ(-15, GetChannelData(frame_, 1, 1));
445 EXPECT_EQ(1000, GetChannelData(frame_, 0, 127));
446 EXPECT_EQ(-1000, GetChannelData(frame_, 1, 127));
447 EXPECT_EQ(1000, GetChannelData(frame_, 0, 128));
448 EXPECT_EQ(-1000, GetChannelData(frame_, 1, 128));
449}
450
451TEST_F(AudioFrameOperationsTest, MuteEndStereoShort) {
452 InitFrame(&frame_, 2, 93, 1000, -1000);
453 AudioFrameOperations::Mute(&frame_, true, false);
454 VerifyFrameDataBounds(frame_, 0, 1000, 0);
455 VerifyFrameDataBounds(frame_, 1, 0, -1000);
456 EXPECT_EQ(10, GetChannelData(frame_, 0, 0));
457 EXPECT_EQ(-10, GetChannelData(frame_, 1, 0));
458 EXPECT_EQ(21, GetChannelData(frame_, 0, 1));
459 EXPECT_EQ(-21, GetChannelData(frame_, 1, 1));
460 EXPECT_EQ(989, GetChannelData(frame_, 0, 91));
461 EXPECT_EQ(-989, GetChannelData(frame_, 1, 91));
462 EXPECT_EQ(999, GetChannelData(frame_, 0, 92));
463 EXPECT_EQ(-999, GetChannelData(frame_, 1, 92));
464}
465
yujo36b1a5f2017-06-12 12:45:32 -0700466TEST_F(AudioFrameOperationsTest, MuteBeginAlreadyMuted) {
467 ASSERT_TRUE(frame_.muted());
468 AudioFrameOperations::Mute(&frame_, false, true);
469 EXPECT_TRUE(frame_.muted());
470}
471
472TEST_F(AudioFrameOperationsTest, MuteEndAlreadyMuted) {
473 ASSERT_TRUE(frame_.muted());
474 AudioFrameOperations::Mute(&frame_, true, false);
475 EXPECT_TRUE(frame_.muted());
476}
477
478TEST_F(AudioFrameOperationsTest, ApplyHalfGainSucceeds) {
479 SetFrameData(2, &frame_);
480
481 AudioFrame half_gain_frame;
482 half_gain_frame.num_channels_ = frame_.num_channels_;
483 half_gain_frame.samples_per_channel_ = frame_.samples_per_channel_;
484 SetFrameData(1, &half_gain_frame);
485
486 AudioFrameOperations::ApplyHalfGain(&frame_);
487 VerifyFramesAreEqual(half_gain_frame, frame_);
488}
489
490TEST_F(AudioFrameOperationsTest, ApplyHalfGainMuted) {
491 ASSERT_TRUE(frame_.muted());
492 AudioFrameOperations::ApplyHalfGain(&frame_);
493 EXPECT_TRUE(frame_.muted());
494}
495
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000496// TODO(andrew): should not allow negative scales.
497TEST_F(AudioFrameOperationsTest, DISABLED_ScaleFailsWithBadParameters) {
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000498 frame_.num_channels_ = 1;
oprypin67fdb802017-03-09 06:25:06 -0800499 EXPECT_EQ(-1, AudioFrameOperations::Scale(1.0, 1.0, &frame_));
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000500
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000501 frame_.num_channels_ = 3;
oprypin67fdb802017-03-09 06:25:06 -0800502 EXPECT_EQ(-1, AudioFrameOperations::Scale(1.0, 1.0, &frame_));
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000503
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000504 frame_.num_channels_ = 2;
oprypin67fdb802017-03-09 06:25:06 -0800505 EXPECT_EQ(-1, AudioFrameOperations::Scale(-1.0, 1.0, &frame_));
506 EXPECT_EQ(-1, AudioFrameOperations::Scale(1.0, -1.0, &frame_));
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000507}
508
509// TODO(andrew): fix the wraparound bug. We should always saturate.
510TEST_F(AudioFrameOperationsTest, DISABLED_ScaleDoesNotWrapAround) {
jens.nielsen228c2682017-03-01 05:11:22 -0800511 SetFrameData(4000, -4000, &frame_);
oprypin67fdb802017-03-09 06:25:06 -0800512 EXPECT_EQ(0, AudioFrameOperations::Scale(10.0, 10.0, &frame_));
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000513
514 AudioFrame clipped_frame;
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000515 clipped_frame.samples_per_channel_ = 320;
516 clipped_frame.num_channels_ = 2;
jens.nielsen228c2682017-03-01 05:11:22 -0800517 SetFrameData(32767, -32768, &clipped_frame);
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000518 VerifyFramesAreEqual(clipped_frame, frame_);
519}
520
521TEST_F(AudioFrameOperationsTest, ScaleSucceeds) {
jens.nielsen228c2682017-03-01 05:11:22 -0800522 SetFrameData(1, -1, &frame_);
oprypin67fdb802017-03-09 06:25:06 -0800523 EXPECT_EQ(0, AudioFrameOperations::Scale(2.0, 3.0, &frame_));
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000524
525 AudioFrame scaled_frame;
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000526 scaled_frame.samples_per_channel_ = 320;
527 scaled_frame.num_channels_ = 2;
jens.nielsen228c2682017-03-01 05:11:22 -0800528 SetFrameData(2, -3, &scaled_frame);
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000529 VerifyFramesAreEqual(scaled_frame, frame_);
530}
531
yujo36b1a5f2017-06-12 12:45:32 -0700532TEST_F(AudioFrameOperationsTest, ScaleMuted) {
533 ASSERT_TRUE(frame_.muted());
534 EXPECT_EQ(0, AudioFrameOperations::Scale(2.0, 3.0, &frame_));
535 EXPECT_TRUE(frame_.muted());
536}
537
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000538// TODO(andrew): should fail with a negative scale.
539TEST_F(AudioFrameOperationsTest, DISABLED_ScaleWithSatFailsWithBadParameters) {
oprypin67fdb802017-03-09 06:25:06 -0800540 EXPECT_EQ(-1, AudioFrameOperations::ScaleWithSat(-1.0, &frame_));
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000541}
542
543TEST_F(AudioFrameOperationsTest, ScaleWithSatDoesNotWrapAround) {
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000544 frame_.num_channels_ = 1;
jens.nielsen228c2682017-03-01 05:11:22 -0800545 SetFrameData(4000, &frame_);
oprypin67fdb802017-03-09 06:25:06 -0800546 EXPECT_EQ(0, AudioFrameOperations::ScaleWithSat(10.0, &frame_));
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000547
548 AudioFrame clipped_frame;
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000549 clipped_frame.samples_per_channel_ = 320;
550 clipped_frame.num_channels_ = 1;
jens.nielsen228c2682017-03-01 05:11:22 -0800551 SetFrameData(32767, &clipped_frame);
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000552 VerifyFramesAreEqual(clipped_frame, frame_);
553
jens.nielsen228c2682017-03-01 05:11:22 -0800554 SetFrameData(-4000, &frame_);
oprypin67fdb802017-03-09 06:25:06 -0800555 EXPECT_EQ(0, AudioFrameOperations::ScaleWithSat(10.0, &frame_));
jens.nielsen228c2682017-03-01 05:11:22 -0800556 SetFrameData(-32768, &clipped_frame);
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000557 VerifyFramesAreEqual(clipped_frame, frame_);
558}
559
560TEST_F(AudioFrameOperationsTest, ScaleWithSatSucceeds) {
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000561 frame_.num_channels_ = 1;
jens.nielsen228c2682017-03-01 05:11:22 -0800562 SetFrameData(1, &frame_);
oprypin67fdb802017-03-09 06:25:06 -0800563 EXPECT_EQ(0, AudioFrameOperations::ScaleWithSat(2.0, &frame_));
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000564
565 AudioFrame scaled_frame;
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000566 scaled_frame.samples_per_channel_ = 320;
567 scaled_frame.num_channels_ = 1;
jens.nielsen228c2682017-03-01 05:11:22 -0800568 SetFrameData(2, &scaled_frame);
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000569 VerifyFramesAreEqual(scaled_frame, frame_);
570}
571
yujo36b1a5f2017-06-12 12:45:32 -0700572TEST_F(AudioFrameOperationsTest, ScaleWithSatMuted) {
573 ASSERT_TRUE(frame_.muted());
574 EXPECT_EQ(0, AudioFrameOperations::ScaleWithSat(2.0, &frame_));
575 EXPECT_TRUE(frame_.muted());
576}
577
aleloi6321b492016-12-05 01:46:09 -0800578TEST_F(AudioFrameOperationsTest, AddingXToEmptyGivesX) {
579 // When samples_per_channel_ is 0, the frame counts as empty and zero.
580 AudioFrame frame_to_add_to;
yujo36b1a5f2017-06-12 12:45:32 -0700581 frame_to_add_to.mutable_data(); // Unmute the frame.
582 ASSERT_FALSE(frame_to_add_to.muted());
aleloi6321b492016-12-05 01:46:09 -0800583 frame_to_add_to.samples_per_channel_ = 0;
584 frame_to_add_to.num_channels_ = frame_.num_channels_;
585
yujo36b1a5f2017-06-12 12:45:32 -0700586 SetFrameData(1000, &frame_);
aleloi6321b492016-12-05 01:46:09 -0800587 AudioFrameOperations::Add(frame_, &frame_to_add_to);
588 VerifyFramesAreEqual(frame_, frame_to_add_to);
589}
590
yujo36b1a5f2017-06-12 12:45:32 -0700591TEST_F(AudioFrameOperationsTest, AddingXToMutedGivesX) {
592 AudioFrame frame_to_add_to;
593 ASSERT_TRUE(frame_to_add_to.muted());
594 frame_to_add_to.samples_per_channel_ = frame_.samples_per_channel_;
595 frame_to_add_to.num_channels_ = frame_.num_channels_;
596
597 SetFrameData(1000, &frame_);
598 AudioFrameOperations::Add(frame_, &frame_to_add_to);
599 VerifyFramesAreEqual(frame_, frame_to_add_to);
600}
601
602TEST_F(AudioFrameOperationsTest, AddingMutedToXGivesX) {
603 AudioFrame frame_to_add_to;
604 frame_to_add_to.samples_per_channel_ = frame_.samples_per_channel_;
605 frame_to_add_to.num_channels_ = frame_.num_channels_;
606 SetFrameData(1000, &frame_to_add_to);
607
608 AudioFrame frame_copy;
609 frame_copy.CopyFrom(frame_to_add_to);
610
611 ASSERT_TRUE(frame_.muted());
612 AudioFrameOperations::Add(frame_, &frame_to_add_to);
613 VerifyFramesAreEqual(frame_copy, frame_to_add_to);
614}
615
aleloi6321b492016-12-05 01:46:09 -0800616TEST_F(AudioFrameOperationsTest, AddingTwoFramesProducesTheirSum) {
617 AudioFrame frame_to_add_to;
618 frame_to_add_to.samples_per_channel_ = frame_.samples_per_channel_;
619 frame_to_add_to.num_channels_ = frame_.num_channels_;
jens.nielsen228c2682017-03-01 05:11:22 -0800620 SetFrameData(1000, &frame_to_add_to);
yujo36b1a5f2017-06-12 12:45:32 -0700621 SetFrameData(2000, &frame_);
aleloi6321b492016-12-05 01:46:09 -0800622
623 AudioFrameOperations::Add(frame_, &frame_to_add_to);
yujo36b1a5f2017-06-12 12:45:32 -0700624 SetFrameData(frame_.data()[0] + 1000, &frame_);
aleloi6321b492016-12-05 01:46:09 -0800625 VerifyFramesAreEqual(frame_, frame_to_add_to);
626}
yujo36b1a5f2017-06-12 12:45:32 -0700627
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000628} // namespace
andrew@webrtc.org9c4f6a52012-04-26 22:32:03 +0000629} // namespace webrtc