andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license |
| 5 | * that can be found in the LICENSE file in the root of the source |
| 6 | * tree. An additional intellectual property rights grant can be found |
| 7 | * in the file PATENTS. All contributing project authors may |
| 8 | * be found in the AUTHORS file in the root of the source tree. |
| 9 | */ |
| 10 | |
Mirko Bonadei | 92ea95e | 2017-09-15 06:47:31 +0200 | [diff] [blame] | 11 | #include "audio/utility/audio_frame_operations.h" |
Jonas Olsson | a4d8737 | 2019-07-05 19:08:33 +0200 | [diff] [blame] | 12 | |
Mirko Bonadei | 92ea95e | 2017-09-15 06:47:31 +0200 | [diff] [blame] | 13 | #include "rtc_base/checks.h" |
| 14 | #include "test/gtest.h" |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 15 | |
| 16 | namespace webrtc { |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 17 | namespace { |
| 18 | |
| 19 | class AudioFrameOperationsTest : public ::testing::Test { |
| 20 | protected: |
| 21 | AudioFrameOperationsTest() { |
| 22 | // Set typical values. |
andrew@webrtc.org | 63a5098 | 2012-05-02 23:56:37 +0000 | [diff] [blame] | 23 | frame_.samples_per_channel_ = 320; |
| 24 | frame_.num_channels_ = 2; |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 25 | } |
| 26 | |
| 27 | AudioFrame frame_; |
| 28 | }; |
| 29 | |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 30 | void SetFrameData(int16_t ch1, |
| 31 | int16_t ch2, |
| 32 | int16_t ch3, |
| 33 | int16_t ch4, |
| 34 | AudioFrame* frame) { |
yujo | 36b1a5f | 2017-06-12 12:45:32 -0700 | [diff] [blame] | 35 | int16_t* frame_data = frame->mutable_data(); |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 36 | for (size_t i = 0; i < frame->samples_per_channel_ * 4; i += 4) { |
yujo | 36b1a5f | 2017-06-12 12:45:32 -0700 | [diff] [blame] | 37 | frame_data[i] = ch1; |
| 38 | frame_data[i + 1] = ch2; |
| 39 | frame_data[i + 2] = ch3; |
| 40 | frame_data[i + 3] = ch4; |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 41 | } |
| 42 | } |
| 43 | |
| 44 | void SetFrameData(int16_t left, int16_t right, AudioFrame* frame) { |
yujo | 36b1a5f | 2017-06-12 12:45:32 -0700 | [diff] [blame] | 45 | int16_t* frame_data = frame->mutable_data(); |
Peter Kasting | dce40cf | 2015-08-24 14:52:23 -0700 | [diff] [blame] | 46 | for (size_t i = 0; i < frame->samples_per_channel_ * 2; i += 2) { |
yujo | 36b1a5f | 2017-06-12 12:45:32 -0700 | [diff] [blame] | 47 | frame_data[i] = left; |
| 48 | frame_data[i + 1] = right; |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 49 | } |
| 50 | } |
| 51 | |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 52 | void SetFrameData(int16_t data, AudioFrame* frame) { |
yujo | 36b1a5f | 2017-06-12 12:45:32 -0700 | [diff] [blame] | 53 | int16_t* frame_data = frame->mutable_data(); |
Yves Gerey | 665174f | 2018-06-19 15:03:05 +0200 | [diff] [blame] | 54 | for (size_t i = 0; i < frame->samples_per_channel_ * frame->num_channels_; |
| 55 | i++) { |
yujo | 36b1a5f | 2017-06-12 12:45:32 -0700 | [diff] [blame] | 56 | frame_data[i] = data; |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 57 | } |
| 58 | } |
| 59 | |
| 60 | void VerifyFramesAreEqual(const AudioFrame& frame1, const AudioFrame& frame2) { |
andrew@webrtc.org | 63a5098 | 2012-05-02 23:56:37 +0000 | [diff] [blame] | 61 | EXPECT_EQ(frame1.num_channels_, frame2.num_channels_); |
Yves Gerey | 665174f | 2018-06-19 15:03:05 +0200 | [diff] [blame] | 62 | EXPECT_EQ(frame1.samples_per_channel_, frame2.samples_per_channel_); |
yujo | 36b1a5f | 2017-06-12 12:45:32 -0700 | [diff] [blame] | 63 | const int16_t* frame1_data = frame1.data(); |
| 64 | const int16_t* frame2_data = frame2.data(); |
Peter Kasting | dce40cf | 2015-08-24 14:52:23 -0700 | [diff] [blame] | 65 | for (size_t i = 0; i < frame1.samples_per_channel_ * frame1.num_channels_; |
Yves Gerey | 665174f | 2018-06-19 15:03:05 +0200 | [diff] [blame] | 66 | i++) { |
yujo | 36b1a5f | 2017-06-12 12:45:32 -0700 | [diff] [blame] | 67 | EXPECT_EQ(frame1_data[i], frame2_data[i]); |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 68 | } |
yujo | 36b1a5f | 2017-06-12 12:45:32 -0700 | [diff] [blame] | 69 | EXPECT_EQ(frame1.muted(), frame2.muted()); |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 70 | } |
| 71 | |
Yves Gerey | 665174f | 2018-06-19 15:03:05 +0200 | [diff] [blame] | 72 | void InitFrame(AudioFrame* frame, |
| 73 | size_t channels, |
| 74 | size_t samples_per_channel, |
| 75 | int16_t left_data, |
| 76 | int16_t right_data) { |
solenberg | 1c2af8e | 2016-03-24 10:36:00 -0700 | [diff] [blame] | 77 | RTC_DCHECK(frame); |
kwiberg | af476c7 | 2016-11-28 15:21:39 -0800 | [diff] [blame] | 78 | RTC_DCHECK_GE(2, channels); |
solenberg | 1c2af8e | 2016-03-24 10:36:00 -0700 | [diff] [blame] | 79 | RTC_DCHECK_GE(AudioFrame::kMaxDataSizeSamples, |
| 80 | samples_per_channel * channels); |
| 81 | frame->samples_per_channel_ = samples_per_channel; |
| 82 | frame->num_channels_ = channels; |
| 83 | if (channels == 2) { |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 84 | SetFrameData(left_data, right_data, frame); |
solenberg | 1c2af8e | 2016-03-24 10:36:00 -0700 | [diff] [blame] | 85 | } else if (channels == 1) { |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 86 | SetFrameData(left_data, frame); |
solenberg | 1c2af8e | 2016-03-24 10:36:00 -0700 | [diff] [blame] | 87 | } |
| 88 | } |
| 89 | |
| 90 | int16_t GetChannelData(const AudioFrame& frame, size_t channel, size_t index) { |
| 91 | RTC_DCHECK_LT(channel, frame.num_channels_); |
| 92 | RTC_DCHECK_LT(index, frame.samples_per_channel_); |
yujo | 36b1a5f | 2017-06-12 12:45:32 -0700 | [diff] [blame] | 93 | return frame.data()[index * frame.num_channels_ + channel]; |
solenberg | 1c2af8e | 2016-03-24 10:36:00 -0700 | [diff] [blame] | 94 | } |
| 95 | |
Yves Gerey | 665174f | 2018-06-19 15:03:05 +0200 | [diff] [blame] | 96 | void VerifyFrameDataBounds(const AudioFrame& frame, |
| 97 | size_t channel, |
| 98 | int16_t max, |
solenberg | 1c2af8e | 2016-03-24 10:36:00 -0700 | [diff] [blame] | 99 | int16_t min) { |
| 100 | for (size_t i = 0; i < frame.samples_per_channel_; ++i) { |
| 101 | int16_t s = GetChannelData(frame, channel, i); |
| 102 | EXPECT_LE(min, s); |
| 103 | EXPECT_GE(max, s); |
| 104 | } |
| 105 | } |
| 106 | |
Alex Loiko | b4977de | 2019-01-28 16:38:38 +0100 | [diff] [blame] | 107 | #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 108 | TEST_F(AudioFrameOperationsTest, MonoToStereoFailsWithBadParameters) { |
Alex Loiko | b4977de | 2019-01-28 16:38:38 +0100 | [diff] [blame] | 109 | EXPECT_DEATH(AudioFrameOperations::UpmixChannels(2, &frame_), ""); |
andrew@webrtc.org | 63a5098 | 2012-05-02 23:56:37 +0000 | [diff] [blame] | 110 | frame_.samples_per_channel_ = AudioFrame::kMaxDataSizeSamples; |
| 111 | frame_.num_channels_ = 1; |
Alex Loiko | b4977de | 2019-01-28 16:38:38 +0100 | [diff] [blame] | 112 | EXPECT_DEATH(AudioFrameOperations::UpmixChannels(2, &frame_), ""); |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 113 | } |
Alex Loiko | b4977de | 2019-01-28 16:38:38 +0100 | [diff] [blame] | 114 | #endif |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 115 | |
| 116 | TEST_F(AudioFrameOperationsTest, MonoToStereoSucceeds) { |
andrew@webrtc.org | 63a5098 | 2012-05-02 23:56:37 +0000 | [diff] [blame] | 117 | frame_.num_channels_ = 1; |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 118 | SetFrameData(1, &frame_); |
| 119 | |
Alex Loiko | b4977de | 2019-01-28 16:38:38 +0100 | [diff] [blame] | 120 | AudioFrameOperations::UpmixChannels(2, &frame_); |
| 121 | EXPECT_EQ(2u, frame_.num_channels_); |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 122 | |
| 123 | AudioFrame stereo_frame; |
andrew@webrtc.org | 63a5098 | 2012-05-02 23:56:37 +0000 | [diff] [blame] | 124 | stereo_frame.samples_per_channel_ = 320; |
| 125 | stereo_frame.num_channels_ = 2; |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 126 | SetFrameData(1, 1, &stereo_frame); |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 127 | VerifyFramesAreEqual(stereo_frame, frame_); |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 128 | } |
andrew@webrtc.org | 4ecea3e | 2012-06-27 03:25:31 +0000 | [diff] [blame] | 129 | |
yujo | 36b1a5f | 2017-06-12 12:45:32 -0700 | [diff] [blame] | 130 | TEST_F(AudioFrameOperationsTest, MonoToStereoMuted) { |
| 131 | frame_.num_channels_ = 1; |
| 132 | ASSERT_TRUE(frame_.muted()); |
Alex Loiko | b4977de | 2019-01-28 16:38:38 +0100 | [diff] [blame] | 133 | AudioFrameOperations::UpmixChannels(2, &frame_); |
| 134 | EXPECT_EQ(2u, frame_.num_channels_); |
yujo | 36b1a5f | 2017-06-12 12:45:32 -0700 | [diff] [blame] | 135 | EXPECT_TRUE(frame_.muted()); |
| 136 | } |
| 137 | |
Alex Loiko | b4977de | 2019-01-28 16:38:38 +0100 | [diff] [blame] | 138 | #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 139 | TEST_F(AudioFrameOperationsTest, StereoToMonoFailsWithBadParameters) { |
andrew@webrtc.org | 63a5098 | 2012-05-02 23:56:37 +0000 | [diff] [blame] | 140 | frame_.num_channels_ = 1; |
Alex Loiko | b4977de | 2019-01-28 16:38:38 +0100 | [diff] [blame] | 141 | EXPECT_DEATH(AudioFrameOperations::DownmixChannels(1, &frame_), ""); |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 142 | } |
Alex Loiko | b4977de | 2019-01-28 16:38:38 +0100 | [diff] [blame] | 143 | #endif |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 144 | |
| 145 | TEST_F(AudioFrameOperationsTest, StereoToMonoSucceeds) { |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 146 | SetFrameData(4, 2, &frame_); |
Alex Loiko | b4977de | 2019-01-28 16:38:38 +0100 | [diff] [blame] | 147 | AudioFrameOperations::DownmixChannels(1, &frame_); |
| 148 | EXPECT_EQ(1u, frame_.num_channels_); |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 149 | |
| 150 | AudioFrame mono_frame; |
andrew@webrtc.org | 63a5098 | 2012-05-02 23:56:37 +0000 | [diff] [blame] | 151 | mono_frame.samples_per_channel_ = 320; |
| 152 | mono_frame.num_channels_ = 1; |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 153 | SetFrameData(3, &mono_frame); |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 154 | VerifyFramesAreEqual(mono_frame, frame_); |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 155 | } |
andrew@webrtc.org | 4ecea3e | 2012-06-27 03:25:31 +0000 | [diff] [blame] | 156 | |
yujo | 36b1a5f | 2017-06-12 12:45:32 -0700 | [diff] [blame] | 157 | TEST_F(AudioFrameOperationsTest, StereoToMonoMuted) { |
| 158 | ASSERT_TRUE(frame_.muted()); |
Alex Loiko | b4977de | 2019-01-28 16:38:38 +0100 | [diff] [blame] | 159 | AudioFrameOperations::DownmixChannels(1, &frame_); |
| 160 | EXPECT_EQ(1u, frame_.num_channels_); |
yujo | 36b1a5f | 2017-06-12 12:45:32 -0700 | [diff] [blame] | 161 | EXPECT_TRUE(frame_.muted()); |
| 162 | } |
| 163 | |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 164 | TEST_F(AudioFrameOperationsTest, StereoToMonoBufferSucceeds) { |
| 165 | AudioFrame target_frame; |
| 166 | SetFrameData(4, 2, &frame_); |
| 167 | |
| 168 | target_frame.num_channels_ = 1; |
| 169 | target_frame.samples_per_channel_ = frame_.samples_per_channel_; |
| 170 | |
Alex Loiko | b4977de | 2019-01-28 16:38:38 +0100 | [diff] [blame] | 171 | AudioFrameOperations::DownmixChannels(frame_.data(), 2, |
| 172 | frame_.samples_per_channel_, 1, |
| 173 | target_frame.mutable_data()); |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 174 | |
| 175 | AudioFrame mono_frame; |
| 176 | mono_frame.samples_per_channel_ = 320; |
| 177 | mono_frame.num_channels_ = 1; |
| 178 | SetFrameData(3, &mono_frame); |
| 179 | VerifyFramesAreEqual(mono_frame, target_frame); |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 180 | } |
| 181 | |
| 182 | TEST_F(AudioFrameOperationsTest, StereoToMonoDoesNotWrapAround) { |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 183 | SetFrameData(-32768, -32768, &frame_); |
Alex Loiko | b4977de | 2019-01-28 16:38:38 +0100 | [diff] [blame] | 184 | AudioFrameOperations::DownmixChannels(1, &frame_); |
| 185 | EXPECT_EQ(1u, frame_.num_channels_); |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 186 | AudioFrame mono_frame; |
andrew@webrtc.org | 63a5098 | 2012-05-02 23:56:37 +0000 | [diff] [blame] | 187 | mono_frame.samples_per_channel_ = 320; |
| 188 | mono_frame.num_channels_ = 1; |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 189 | SetFrameData(-32768, &mono_frame); |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 190 | VerifyFramesAreEqual(mono_frame, frame_); |
| 191 | } |
| 192 | |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 193 | TEST_F(AudioFrameOperationsTest, QuadToMonoSucceeds) { |
| 194 | frame_.num_channels_ = 4; |
| 195 | SetFrameData(4, 2, 6, 8, &frame_); |
| 196 | |
Alex Loiko | b4977de | 2019-01-28 16:38:38 +0100 | [diff] [blame] | 197 | AudioFrameOperations::DownmixChannels(1, &frame_); |
| 198 | EXPECT_EQ(1u, frame_.num_channels_); |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 199 | |
| 200 | AudioFrame mono_frame; |
| 201 | mono_frame.samples_per_channel_ = 320; |
| 202 | mono_frame.num_channels_ = 1; |
| 203 | SetFrameData(5, &mono_frame); |
| 204 | VerifyFramesAreEqual(mono_frame, frame_); |
| 205 | } |
| 206 | |
yujo | 36b1a5f | 2017-06-12 12:45:32 -0700 | [diff] [blame] | 207 | TEST_F(AudioFrameOperationsTest, QuadToMonoMuted) { |
| 208 | frame_.num_channels_ = 4; |
| 209 | ASSERT_TRUE(frame_.muted()); |
Alex Loiko | b4977de | 2019-01-28 16:38:38 +0100 | [diff] [blame] | 210 | AudioFrameOperations::DownmixChannels(1, &frame_); |
| 211 | EXPECT_EQ(1u, frame_.num_channels_); |
yujo | 36b1a5f | 2017-06-12 12:45:32 -0700 | [diff] [blame] | 212 | EXPECT_TRUE(frame_.muted()); |
| 213 | } |
| 214 | |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 215 | TEST_F(AudioFrameOperationsTest, QuadToMonoBufferSucceeds) { |
| 216 | AudioFrame target_frame; |
| 217 | frame_.num_channels_ = 4; |
| 218 | SetFrameData(4, 2, 6, 8, &frame_); |
| 219 | |
| 220 | target_frame.num_channels_ = 1; |
| 221 | target_frame.samples_per_channel_ = frame_.samples_per_channel_; |
| 222 | |
Alex Loiko | b4977de | 2019-01-28 16:38:38 +0100 | [diff] [blame] | 223 | AudioFrameOperations::DownmixChannels(frame_.data(), 4, |
| 224 | frame_.samples_per_channel_, 1, |
| 225 | target_frame.mutable_data()); |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 226 | AudioFrame mono_frame; |
| 227 | mono_frame.samples_per_channel_ = 320; |
| 228 | mono_frame.num_channels_ = 1; |
| 229 | SetFrameData(5, &mono_frame); |
| 230 | VerifyFramesAreEqual(mono_frame, target_frame); |
| 231 | } |
| 232 | |
| 233 | TEST_F(AudioFrameOperationsTest, QuadToMonoDoesNotWrapAround) { |
| 234 | frame_.num_channels_ = 4; |
| 235 | SetFrameData(-32768, -32768, -32768, -32768, &frame_); |
Alex Loiko | b4977de | 2019-01-28 16:38:38 +0100 | [diff] [blame] | 236 | AudioFrameOperations::DownmixChannels(1, &frame_); |
| 237 | EXPECT_EQ(1u, frame_.num_channels_); |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 238 | |
| 239 | AudioFrame mono_frame; |
| 240 | mono_frame.samples_per_channel_ = 320; |
| 241 | mono_frame.num_channels_ = 1; |
| 242 | SetFrameData(-32768, &mono_frame); |
| 243 | VerifyFramesAreEqual(mono_frame, frame_); |
| 244 | } |
| 245 | |
| 246 | TEST_F(AudioFrameOperationsTest, QuadToStereoFailsWithBadParameters) { |
| 247 | frame_.num_channels_ = 1; |
| 248 | EXPECT_EQ(-1, AudioFrameOperations::QuadToStereo(&frame_)); |
| 249 | frame_.num_channels_ = 2; |
| 250 | EXPECT_EQ(-1, AudioFrameOperations::QuadToStereo(&frame_)); |
| 251 | } |
| 252 | |
| 253 | TEST_F(AudioFrameOperationsTest, QuadToStereoSucceeds) { |
| 254 | frame_.num_channels_ = 4; |
| 255 | SetFrameData(4, 2, 6, 8, &frame_); |
| 256 | EXPECT_EQ(0, AudioFrameOperations::QuadToStereo(&frame_)); |
| 257 | |
| 258 | AudioFrame stereo_frame; |
| 259 | stereo_frame.samples_per_channel_ = 320; |
| 260 | stereo_frame.num_channels_ = 2; |
| 261 | SetFrameData(3, 7, &stereo_frame); |
| 262 | VerifyFramesAreEqual(stereo_frame, frame_); |
| 263 | } |
| 264 | |
yujo | 36b1a5f | 2017-06-12 12:45:32 -0700 | [diff] [blame] | 265 | TEST_F(AudioFrameOperationsTest, QuadToStereoMuted) { |
| 266 | frame_.num_channels_ = 4; |
| 267 | ASSERT_TRUE(frame_.muted()); |
| 268 | EXPECT_EQ(0, AudioFrameOperations::QuadToStereo(&frame_)); |
| 269 | EXPECT_TRUE(frame_.muted()); |
| 270 | } |
| 271 | |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 272 | TEST_F(AudioFrameOperationsTest, QuadToStereoBufferSucceeds) { |
| 273 | AudioFrame target_frame; |
| 274 | frame_.num_channels_ = 4; |
| 275 | SetFrameData(4, 2, 6, 8, &frame_); |
| 276 | |
| 277 | target_frame.num_channels_ = 2; |
| 278 | target_frame.samples_per_channel_ = frame_.samples_per_channel_; |
| 279 | |
yujo | 36b1a5f | 2017-06-12 12:45:32 -0700 | [diff] [blame] | 280 | AudioFrameOperations::QuadToStereo(frame_.data(), frame_.samples_per_channel_, |
| 281 | target_frame.mutable_data()); |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 282 | AudioFrame stereo_frame; |
| 283 | stereo_frame.samples_per_channel_ = 320; |
| 284 | stereo_frame.num_channels_ = 2; |
| 285 | SetFrameData(3, 7, &stereo_frame); |
| 286 | VerifyFramesAreEqual(stereo_frame, target_frame); |
| 287 | } |
| 288 | |
| 289 | TEST_F(AudioFrameOperationsTest, QuadToStereoDoesNotWrapAround) { |
| 290 | frame_.num_channels_ = 4; |
| 291 | SetFrameData(-32768, -32768, -32768, -32768, &frame_); |
| 292 | EXPECT_EQ(0, AudioFrameOperations::QuadToStereo(&frame_)); |
| 293 | |
| 294 | AudioFrame stereo_frame; |
| 295 | stereo_frame.samples_per_channel_ = 320; |
| 296 | stereo_frame.num_channels_ = 2; |
| 297 | SetFrameData(-32768, -32768, &stereo_frame); |
| 298 | VerifyFramesAreEqual(stereo_frame, frame_); |
| 299 | } |
| 300 | |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 301 | TEST_F(AudioFrameOperationsTest, SwapStereoChannelsSucceedsOnStereo) { |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 302 | SetFrameData(0, 1, &frame_); |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 303 | |
| 304 | AudioFrame swapped_frame; |
andrew@webrtc.org | 63a5098 | 2012-05-02 23:56:37 +0000 | [diff] [blame] | 305 | swapped_frame.samples_per_channel_ = 320; |
| 306 | swapped_frame.num_channels_ = 2; |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 307 | SetFrameData(1, 0, &swapped_frame); |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 308 | |
| 309 | AudioFrameOperations::SwapStereoChannels(&frame_); |
| 310 | VerifyFramesAreEqual(swapped_frame, frame_); |
| 311 | } |
| 312 | |
yujo | 36b1a5f | 2017-06-12 12:45:32 -0700 | [diff] [blame] | 313 | TEST_F(AudioFrameOperationsTest, SwapStereoChannelsMuted) { |
| 314 | ASSERT_TRUE(frame_.muted()); |
| 315 | AudioFrameOperations::SwapStereoChannels(&frame_); |
| 316 | EXPECT_TRUE(frame_.muted()); |
| 317 | } |
| 318 | |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 319 | TEST_F(AudioFrameOperationsTest, SwapStereoChannelsFailsOnMono) { |
andrew@webrtc.org | 63a5098 | 2012-05-02 23:56:37 +0000 | [diff] [blame] | 320 | frame_.num_channels_ = 1; |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 321 | // Set data to "stereo", despite it being a mono frame. |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 322 | SetFrameData(0, 1, &frame_); |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 323 | |
andrew@webrtc.org | ae1a58b | 2013-01-22 04:44:30 +0000 | [diff] [blame] | 324 | AudioFrame orig_frame; |
| 325 | orig_frame.CopyFrom(frame_); |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 326 | AudioFrameOperations::SwapStereoChannels(&frame_); |
| 327 | // Verify that no swap occurred. |
| 328 | VerifyFramesAreEqual(orig_frame, frame_); |
| 329 | } |
| 330 | |
solenberg | 1c2af8e | 2016-03-24 10:36:00 -0700 | [diff] [blame] | 331 | TEST_F(AudioFrameOperationsTest, MuteDisabled) { |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 332 | SetFrameData(1000, -1000, &frame_); |
solenberg | 1c2af8e | 2016-03-24 10:36:00 -0700 | [diff] [blame] | 333 | AudioFrameOperations::Mute(&frame_, false, false); |
| 334 | |
| 335 | AudioFrame muted_frame; |
| 336 | muted_frame.samples_per_channel_ = 320; |
| 337 | muted_frame.num_channels_ = 2; |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 338 | SetFrameData(1000, -1000, &muted_frame); |
solenberg | 1c2af8e | 2016-03-24 10:36:00 -0700 | [diff] [blame] | 339 | VerifyFramesAreEqual(muted_frame, frame_); |
| 340 | } |
| 341 | |
| 342 | TEST_F(AudioFrameOperationsTest, MuteEnabled) { |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 343 | SetFrameData(1000, -1000, &frame_); |
solenberg | 1c2af8e | 2016-03-24 10:36:00 -0700 | [diff] [blame] | 344 | AudioFrameOperations::Mute(&frame_, true, true); |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 345 | |
| 346 | AudioFrame muted_frame; |
yujo | 36b1a5f | 2017-06-12 12:45:32 -0700 | [diff] [blame] | 347 | muted_frame.samples_per_channel_ = frame_.samples_per_channel_; |
| 348 | muted_frame.num_channels_ = frame_.num_channels_; |
| 349 | ASSERT_TRUE(muted_frame.muted()); |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 350 | VerifyFramesAreEqual(muted_frame, frame_); |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 351 | } |
| 352 | |
solenberg | 1c2af8e | 2016-03-24 10:36:00 -0700 | [diff] [blame] | 353 | // Verify that *beginning* to mute works for short and long (>128) frames, mono |
| 354 | // and stereo. Beginning mute should yield a ramp down to zero. |
| 355 | TEST_F(AudioFrameOperationsTest, MuteBeginMonoLong) { |
| 356 | InitFrame(&frame_, 1, 228, 1000, -1000); |
| 357 | AudioFrameOperations::Mute(&frame_, false, true); |
| 358 | VerifyFrameDataBounds(frame_, 0, 1000, 0); |
| 359 | EXPECT_EQ(1000, GetChannelData(frame_, 0, 99)); |
| 360 | EXPECT_EQ(992, GetChannelData(frame_, 0, 100)); |
| 361 | EXPECT_EQ(7, GetChannelData(frame_, 0, 226)); |
| 362 | EXPECT_EQ(0, GetChannelData(frame_, 0, 227)); |
| 363 | } |
| 364 | |
| 365 | TEST_F(AudioFrameOperationsTest, MuteBeginMonoShort) { |
| 366 | InitFrame(&frame_, 1, 93, 1000, -1000); |
| 367 | AudioFrameOperations::Mute(&frame_, false, true); |
| 368 | VerifyFrameDataBounds(frame_, 0, 1000, 0); |
| 369 | EXPECT_EQ(989, GetChannelData(frame_, 0, 0)); |
| 370 | EXPECT_EQ(978, GetChannelData(frame_, 0, 1)); |
| 371 | EXPECT_EQ(10, GetChannelData(frame_, 0, 91)); |
| 372 | EXPECT_EQ(0, GetChannelData(frame_, 0, 92)); |
| 373 | } |
| 374 | |
| 375 | TEST_F(AudioFrameOperationsTest, MuteBeginStereoLong) { |
| 376 | InitFrame(&frame_, 2, 228, 1000, -1000); |
| 377 | AudioFrameOperations::Mute(&frame_, false, true); |
| 378 | VerifyFrameDataBounds(frame_, 0, 1000, 0); |
| 379 | VerifyFrameDataBounds(frame_, 1, 0, -1000); |
| 380 | EXPECT_EQ(1000, GetChannelData(frame_, 0, 99)); |
| 381 | EXPECT_EQ(-1000, GetChannelData(frame_, 1, 99)); |
| 382 | EXPECT_EQ(992, GetChannelData(frame_, 0, 100)); |
| 383 | EXPECT_EQ(-992, GetChannelData(frame_, 1, 100)); |
| 384 | EXPECT_EQ(7, GetChannelData(frame_, 0, 226)); |
| 385 | EXPECT_EQ(-7, GetChannelData(frame_, 1, 226)); |
| 386 | EXPECT_EQ(0, GetChannelData(frame_, 0, 227)); |
| 387 | EXPECT_EQ(0, GetChannelData(frame_, 1, 227)); |
| 388 | } |
| 389 | |
| 390 | TEST_F(AudioFrameOperationsTest, MuteBeginStereoShort) { |
| 391 | InitFrame(&frame_, 2, 93, 1000, -1000); |
| 392 | AudioFrameOperations::Mute(&frame_, false, true); |
| 393 | VerifyFrameDataBounds(frame_, 0, 1000, 0); |
| 394 | VerifyFrameDataBounds(frame_, 1, 0, -1000); |
| 395 | EXPECT_EQ(989, GetChannelData(frame_, 0, 0)); |
| 396 | EXPECT_EQ(-989, GetChannelData(frame_, 1, 0)); |
| 397 | EXPECT_EQ(978, GetChannelData(frame_, 0, 1)); |
| 398 | EXPECT_EQ(-978, GetChannelData(frame_, 1, 1)); |
| 399 | EXPECT_EQ(10, GetChannelData(frame_, 0, 91)); |
| 400 | EXPECT_EQ(-10, GetChannelData(frame_, 1, 91)); |
| 401 | EXPECT_EQ(0, GetChannelData(frame_, 0, 92)); |
| 402 | EXPECT_EQ(0, GetChannelData(frame_, 1, 92)); |
| 403 | } |
| 404 | |
| 405 | // Verify that *ending* to mute works for short and long (>128) frames, mono |
| 406 | // and stereo. Ending mute should yield a ramp up from zero. |
| 407 | TEST_F(AudioFrameOperationsTest, MuteEndMonoLong) { |
| 408 | InitFrame(&frame_, 1, 228, 1000, -1000); |
| 409 | AudioFrameOperations::Mute(&frame_, true, false); |
| 410 | VerifyFrameDataBounds(frame_, 0, 1000, 0); |
| 411 | EXPECT_EQ(7, GetChannelData(frame_, 0, 0)); |
| 412 | EXPECT_EQ(15, GetChannelData(frame_, 0, 1)); |
| 413 | EXPECT_EQ(1000, GetChannelData(frame_, 0, 127)); |
| 414 | EXPECT_EQ(1000, GetChannelData(frame_, 0, 128)); |
| 415 | } |
| 416 | |
| 417 | TEST_F(AudioFrameOperationsTest, MuteEndMonoShort) { |
| 418 | InitFrame(&frame_, 1, 93, 1000, -1000); |
| 419 | AudioFrameOperations::Mute(&frame_, true, false); |
| 420 | VerifyFrameDataBounds(frame_, 0, 1000, 0); |
| 421 | EXPECT_EQ(10, GetChannelData(frame_, 0, 0)); |
| 422 | EXPECT_EQ(21, GetChannelData(frame_, 0, 1)); |
| 423 | EXPECT_EQ(989, GetChannelData(frame_, 0, 91)); |
| 424 | EXPECT_EQ(999, GetChannelData(frame_, 0, 92)); |
| 425 | } |
| 426 | |
| 427 | TEST_F(AudioFrameOperationsTest, MuteEndStereoLong) { |
| 428 | InitFrame(&frame_, 2, 228, 1000, -1000); |
| 429 | AudioFrameOperations::Mute(&frame_, true, false); |
| 430 | VerifyFrameDataBounds(frame_, 0, 1000, 0); |
| 431 | VerifyFrameDataBounds(frame_, 1, 0, -1000); |
| 432 | EXPECT_EQ(7, GetChannelData(frame_, 0, 0)); |
| 433 | EXPECT_EQ(-7, GetChannelData(frame_, 1, 0)); |
| 434 | EXPECT_EQ(15, GetChannelData(frame_, 0, 1)); |
| 435 | EXPECT_EQ(-15, GetChannelData(frame_, 1, 1)); |
| 436 | EXPECT_EQ(1000, GetChannelData(frame_, 0, 127)); |
| 437 | EXPECT_EQ(-1000, GetChannelData(frame_, 1, 127)); |
| 438 | EXPECT_EQ(1000, GetChannelData(frame_, 0, 128)); |
| 439 | EXPECT_EQ(-1000, GetChannelData(frame_, 1, 128)); |
| 440 | } |
| 441 | |
| 442 | TEST_F(AudioFrameOperationsTest, MuteEndStereoShort) { |
| 443 | InitFrame(&frame_, 2, 93, 1000, -1000); |
| 444 | AudioFrameOperations::Mute(&frame_, true, false); |
| 445 | VerifyFrameDataBounds(frame_, 0, 1000, 0); |
| 446 | VerifyFrameDataBounds(frame_, 1, 0, -1000); |
| 447 | EXPECT_EQ(10, GetChannelData(frame_, 0, 0)); |
| 448 | EXPECT_EQ(-10, GetChannelData(frame_, 1, 0)); |
| 449 | EXPECT_EQ(21, GetChannelData(frame_, 0, 1)); |
| 450 | EXPECT_EQ(-21, GetChannelData(frame_, 1, 1)); |
| 451 | EXPECT_EQ(989, GetChannelData(frame_, 0, 91)); |
| 452 | EXPECT_EQ(-989, GetChannelData(frame_, 1, 91)); |
| 453 | EXPECT_EQ(999, GetChannelData(frame_, 0, 92)); |
| 454 | EXPECT_EQ(-999, GetChannelData(frame_, 1, 92)); |
| 455 | } |
| 456 | |
yujo | 36b1a5f | 2017-06-12 12:45:32 -0700 | [diff] [blame] | 457 | TEST_F(AudioFrameOperationsTest, MuteBeginAlreadyMuted) { |
| 458 | ASSERT_TRUE(frame_.muted()); |
| 459 | AudioFrameOperations::Mute(&frame_, false, true); |
| 460 | EXPECT_TRUE(frame_.muted()); |
| 461 | } |
| 462 | |
| 463 | TEST_F(AudioFrameOperationsTest, MuteEndAlreadyMuted) { |
| 464 | ASSERT_TRUE(frame_.muted()); |
| 465 | AudioFrameOperations::Mute(&frame_, true, false); |
| 466 | EXPECT_TRUE(frame_.muted()); |
| 467 | } |
| 468 | |
| 469 | TEST_F(AudioFrameOperationsTest, ApplyHalfGainSucceeds) { |
| 470 | SetFrameData(2, &frame_); |
| 471 | |
| 472 | AudioFrame half_gain_frame; |
| 473 | half_gain_frame.num_channels_ = frame_.num_channels_; |
| 474 | half_gain_frame.samples_per_channel_ = frame_.samples_per_channel_; |
| 475 | SetFrameData(1, &half_gain_frame); |
| 476 | |
| 477 | AudioFrameOperations::ApplyHalfGain(&frame_); |
| 478 | VerifyFramesAreEqual(half_gain_frame, frame_); |
| 479 | } |
| 480 | |
| 481 | TEST_F(AudioFrameOperationsTest, ApplyHalfGainMuted) { |
| 482 | ASSERT_TRUE(frame_.muted()); |
| 483 | AudioFrameOperations::ApplyHalfGain(&frame_); |
| 484 | EXPECT_TRUE(frame_.muted()); |
| 485 | } |
| 486 | |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 487 | // TODO(andrew): should not allow negative scales. |
| 488 | TEST_F(AudioFrameOperationsTest, DISABLED_ScaleFailsWithBadParameters) { |
andrew@webrtc.org | 63a5098 | 2012-05-02 23:56:37 +0000 | [diff] [blame] | 489 | frame_.num_channels_ = 1; |
oprypin | 67fdb80 | 2017-03-09 06:25:06 -0800 | [diff] [blame] | 490 | EXPECT_EQ(-1, AudioFrameOperations::Scale(1.0, 1.0, &frame_)); |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 491 | |
andrew@webrtc.org | 63a5098 | 2012-05-02 23:56:37 +0000 | [diff] [blame] | 492 | frame_.num_channels_ = 3; |
oprypin | 67fdb80 | 2017-03-09 06:25:06 -0800 | [diff] [blame] | 493 | EXPECT_EQ(-1, AudioFrameOperations::Scale(1.0, 1.0, &frame_)); |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 494 | |
andrew@webrtc.org | 63a5098 | 2012-05-02 23:56:37 +0000 | [diff] [blame] | 495 | frame_.num_channels_ = 2; |
oprypin | 67fdb80 | 2017-03-09 06:25:06 -0800 | [diff] [blame] | 496 | EXPECT_EQ(-1, AudioFrameOperations::Scale(-1.0, 1.0, &frame_)); |
| 497 | EXPECT_EQ(-1, AudioFrameOperations::Scale(1.0, -1.0, &frame_)); |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 498 | } |
| 499 | |
| 500 | // TODO(andrew): fix the wraparound bug. We should always saturate. |
| 501 | TEST_F(AudioFrameOperationsTest, DISABLED_ScaleDoesNotWrapAround) { |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 502 | SetFrameData(4000, -4000, &frame_); |
oprypin | 67fdb80 | 2017-03-09 06:25:06 -0800 | [diff] [blame] | 503 | EXPECT_EQ(0, AudioFrameOperations::Scale(10.0, 10.0, &frame_)); |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 504 | |
| 505 | AudioFrame clipped_frame; |
andrew@webrtc.org | 63a5098 | 2012-05-02 23:56:37 +0000 | [diff] [blame] | 506 | clipped_frame.samples_per_channel_ = 320; |
| 507 | clipped_frame.num_channels_ = 2; |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 508 | SetFrameData(32767, -32768, &clipped_frame); |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 509 | VerifyFramesAreEqual(clipped_frame, frame_); |
| 510 | } |
| 511 | |
| 512 | TEST_F(AudioFrameOperationsTest, ScaleSucceeds) { |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 513 | SetFrameData(1, -1, &frame_); |
oprypin | 67fdb80 | 2017-03-09 06:25:06 -0800 | [diff] [blame] | 514 | EXPECT_EQ(0, AudioFrameOperations::Scale(2.0, 3.0, &frame_)); |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 515 | |
| 516 | AudioFrame scaled_frame; |
andrew@webrtc.org | 63a5098 | 2012-05-02 23:56:37 +0000 | [diff] [blame] | 517 | scaled_frame.samples_per_channel_ = 320; |
| 518 | scaled_frame.num_channels_ = 2; |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 519 | SetFrameData(2, -3, &scaled_frame); |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 520 | VerifyFramesAreEqual(scaled_frame, frame_); |
| 521 | } |
| 522 | |
yujo | 36b1a5f | 2017-06-12 12:45:32 -0700 | [diff] [blame] | 523 | TEST_F(AudioFrameOperationsTest, ScaleMuted) { |
| 524 | ASSERT_TRUE(frame_.muted()); |
| 525 | EXPECT_EQ(0, AudioFrameOperations::Scale(2.0, 3.0, &frame_)); |
| 526 | EXPECT_TRUE(frame_.muted()); |
| 527 | } |
| 528 | |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 529 | // TODO(andrew): should fail with a negative scale. |
| 530 | TEST_F(AudioFrameOperationsTest, DISABLED_ScaleWithSatFailsWithBadParameters) { |
oprypin | 67fdb80 | 2017-03-09 06:25:06 -0800 | [diff] [blame] | 531 | EXPECT_EQ(-1, AudioFrameOperations::ScaleWithSat(-1.0, &frame_)); |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 532 | } |
| 533 | |
| 534 | TEST_F(AudioFrameOperationsTest, ScaleWithSatDoesNotWrapAround) { |
andrew@webrtc.org | 63a5098 | 2012-05-02 23:56:37 +0000 | [diff] [blame] | 535 | frame_.num_channels_ = 1; |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 536 | SetFrameData(4000, &frame_); |
oprypin | 67fdb80 | 2017-03-09 06:25:06 -0800 | [diff] [blame] | 537 | EXPECT_EQ(0, AudioFrameOperations::ScaleWithSat(10.0, &frame_)); |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 538 | |
| 539 | AudioFrame clipped_frame; |
andrew@webrtc.org | 63a5098 | 2012-05-02 23:56:37 +0000 | [diff] [blame] | 540 | clipped_frame.samples_per_channel_ = 320; |
| 541 | clipped_frame.num_channels_ = 1; |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 542 | SetFrameData(32767, &clipped_frame); |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 543 | VerifyFramesAreEqual(clipped_frame, frame_); |
| 544 | |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 545 | SetFrameData(-4000, &frame_); |
oprypin | 67fdb80 | 2017-03-09 06:25:06 -0800 | [diff] [blame] | 546 | EXPECT_EQ(0, AudioFrameOperations::ScaleWithSat(10.0, &frame_)); |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 547 | SetFrameData(-32768, &clipped_frame); |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 548 | VerifyFramesAreEqual(clipped_frame, frame_); |
| 549 | } |
| 550 | |
| 551 | TEST_F(AudioFrameOperationsTest, ScaleWithSatSucceeds) { |
andrew@webrtc.org | 63a5098 | 2012-05-02 23:56:37 +0000 | [diff] [blame] | 552 | frame_.num_channels_ = 1; |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 553 | SetFrameData(1, &frame_); |
oprypin | 67fdb80 | 2017-03-09 06:25:06 -0800 | [diff] [blame] | 554 | EXPECT_EQ(0, AudioFrameOperations::ScaleWithSat(2.0, &frame_)); |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 555 | |
| 556 | AudioFrame scaled_frame; |
andrew@webrtc.org | 63a5098 | 2012-05-02 23:56:37 +0000 | [diff] [blame] | 557 | scaled_frame.samples_per_channel_ = 320; |
| 558 | scaled_frame.num_channels_ = 1; |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 559 | SetFrameData(2, &scaled_frame); |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 560 | VerifyFramesAreEqual(scaled_frame, frame_); |
| 561 | } |
| 562 | |
yujo | 36b1a5f | 2017-06-12 12:45:32 -0700 | [diff] [blame] | 563 | TEST_F(AudioFrameOperationsTest, ScaleWithSatMuted) { |
| 564 | ASSERT_TRUE(frame_.muted()); |
| 565 | EXPECT_EQ(0, AudioFrameOperations::ScaleWithSat(2.0, &frame_)); |
| 566 | EXPECT_TRUE(frame_.muted()); |
| 567 | } |
| 568 | |
aleloi | 6321b49 | 2016-12-05 01:46:09 -0800 | [diff] [blame] | 569 | TEST_F(AudioFrameOperationsTest, AddingXToEmptyGivesX) { |
| 570 | // When samples_per_channel_ is 0, the frame counts as empty and zero. |
| 571 | AudioFrame frame_to_add_to; |
yujo | 36b1a5f | 2017-06-12 12:45:32 -0700 | [diff] [blame] | 572 | frame_to_add_to.mutable_data(); // Unmute the frame. |
| 573 | ASSERT_FALSE(frame_to_add_to.muted()); |
aleloi | 6321b49 | 2016-12-05 01:46:09 -0800 | [diff] [blame] | 574 | frame_to_add_to.samples_per_channel_ = 0; |
| 575 | frame_to_add_to.num_channels_ = frame_.num_channels_; |
| 576 | |
yujo | 36b1a5f | 2017-06-12 12:45:32 -0700 | [diff] [blame] | 577 | SetFrameData(1000, &frame_); |
aleloi | 6321b49 | 2016-12-05 01:46:09 -0800 | [diff] [blame] | 578 | AudioFrameOperations::Add(frame_, &frame_to_add_to); |
| 579 | VerifyFramesAreEqual(frame_, frame_to_add_to); |
| 580 | } |
| 581 | |
yujo | 36b1a5f | 2017-06-12 12:45:32 -0700 | [diff] [blame] | 582 | TEST_F(AudioFrameOperationsTest, AddingXToMutedGivesX) { |
| 583 | AudioFrame frame_to_add_to; |
| 584 | ASSERT_TRUE(frame_to_add_to.muted()); |
| 585 | frame_to_add_to.samples_per_channel_ = frame_.samples_per_channel_; |
| 586 | frame_to_add_to.num_channels_ = frame_.num_channels_; |
| 587 | |
| 588 | SetFrameData(1000, &frame_); |
| 589 | AudioFrameOperations::Add(frame_, &frame_to_add_to); |
| 590 | VerifyFramesAreEqual(frame_, frame_to_add_to); |
| 591 | } |
| 592 | |
| 593 | TEST_F(AudioFrameOperationsTest, AddingMutedToXGivesX) { |
| 594 | AudioFrame frame_to_add_to; |
| 595 | frame_to_add_to.samples_per_channel_ = frame_.samples_per_channel_; |
| 596 | frame_to_add_to.num_channels_ = frame_.num_channels_; |
| 597 | SetFrameData(1000, &frame_to_add_to); |
| 598 | |
| 599 | AudioFrame frame_copy; |
| 600 | frame_copy.CopyFrom(frame_to_add_to); |
| 601 | |
| 602 | ASSERT_TRUE(frame_.muted()); |
| 603 | AudioFrameOperations::Add(frame_, &frame_to_add_to); |
| 604 | VerifyFramesAreEqual(frame_copy, frame_to_add_to); |
| 605 | } |
| 606 | |
aleloi | 6321b49 | 2016-12-05 01:46:09 -0800 | [diff] [blame] | 607 | TEST_F(AudioFrameOperationsTest, AddingTwoFramesProducesTheirSum) { |
| 608 | AudioFrame frame_to_add_to; |
| 609 | frame_to_add_to.samples_per_channel_ = frame_.samples_per_channel_; |
| 610 | frame_to_add_to.num_channels_ = frame_.num_channels_; |
jens.nielsen | 228c268 | 2017-03-01 05:11:22 -0800 | [diff] [blame] | 611 | SetFrameData(1000, &frame_to_add_to); |
yujo | 36b1a5f | 2017-06-12 12:45:32 -0700 | [diff] [blame] | 612 | SetFrameData(2000, &frame_); |
aleloi | 6321b49 | 2016-12-05 01:46:09 -0800 | [diff] [blame] | 613 | |
| 614 | AudioFrameOperations::Add(frame_, &frame_to_add_to); |
yujo | 36b1a5f | 2017-06-12 12:45:32 -0700 | [diff] [blame] | 615 | SetFrameData(frame_.data()[0] + 1000, &frame_); |
aleloi | 6321b49 | 2016-12-05 01:46:09 -0800 | [diff] [blame] | 616 | VerifyFramesAreEqual(frame_, frame_to_add_to); |
| 617 | } |
yujo | 36b1a5f | 2017-06-12 12:45:32 -0700 | [diff] [blame] | 618 | |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 619 | } // namespace |
andrew@webrtc.org | 9c4f6a5 | 2012-04-26 22:32:03 +0000 | [diff] [blame] | 620 | } // namespace webrtc |