blob: 36a9932235e88a97d8ad4531f30e715afd988583 [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
leozwang@webrtc.org9a85d8e2012-03-16 18:03:18 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
andrew@webrtc.org81865342012-10-27 00:28:27 +000011#include <math.h>
niklase@google.com470e71d2011-07-07 08:21:25 +000012#include <stdio.h>
13#include <string.h>
14#ifdef WEBRTC_ANDROID
15#include <sys/stat.h>
16#endif
17
andrew@webrtc.org81865342012-10-27 00:28:27 +000018#include <algorithm>
19
niklase@google.com470e71d2011-07-07 08:21:25 +000020#include "gtest/gtest.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000021
kma@webrtc.org0e739502012-12-07 15:26:28 +000022#include "webrtc/modules/audio_processing/include/audio_processing.h"
23#include "webrtc/modules/interface/module_common_types.h"
24#include "webrtc/system_wrappers/interface/cpu_features_wrapper.h"
25#include "webrtc/system_wrappers/interface/scoped_ptr.h"
26#include "webrtc/system_wrappers/interface/tick_util.h"
kjellander@webrtc.org10abe252012-12-17 18:28:07 +000027#include "webrtc/test/testsupport/fileutils.h"
kma@webrtc.org0e739502012-12-07 15:26:28 +000028#include "webrtc/test/testsupport/perf_test.h"
leozwang@webrtc.orga3736342012-03-16 21:36:00 +000029#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
leozwang@webrtc.org534e4952012-10-22 21:21:52 +000030#include "external/webrtc/webrtc/modules/audio_processing/debug.pb.h"
leozwang@webrtc.orga3736342012-03-16 21:36:00 +000031#else
ajm@google.com808e0e02011-08-03 21:08:51 +000032#include "webrtc/audio_processing/debug.pb.h"
leozwang@webrtc.orga3736342012-03-16 21:36:00 +000033#endif
niklase@google.com470e71d2011-07-07 08:21:25 +000034
35using webrtc::AudioFrame;
niklase@google.com470e71d2011-07-07 08:21:25 +000036using webrtc::AudioProcessing;
andrew@webrtc.org94c74132011-09-19 15:17:57 +000037using webrtc::EchoCancellation;
niklase@google.com470e71d2011-07-07 08:21:25 +000038using webrtc::GainControl;
39using webrtc::NoiseSuppression;
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +000040using webrtc::scoped_array;
ajm@google.com808e0e02011-08-03 21:08:51 +000041using webrtc::TickInterval;
42using webrtc::TickTime;
andrew@webrtc.org89752612012-10-12 16:41:45 +000043using webrtc::VoiceDetection;
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +000044
ajm@google.com808e0e02011-08-03 21:08:51 +000045using webrtc::audioproc::Event;
46using webrtc::audioproc::Init;
47using webrtc::audioproc::ReverseStream;
48using webrtc::audioproc::Stream;
49
50namespace {
51// Returns true on success, false on error or end-of-file.
52bool ReadMessageFromFile(FILE* file,
53 ::google::protobuf::MessageLite* msg) {
54 // The "wire format" for the size is little-endian.
55 // Assume process_test is running on a little-endian machine.
andrew@webrtc.orgcb181212011-10-26 00:27:17 +000056 int32_t size = 0;
ajm@google.com808e0e02011-08-03 21:08:51 +000057 if (fread(&size, sizeof(int32_t), 1, file) != 1) {
58 return false;
59 }
60 if (size <= 0) {
61 return false;
62 }
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +000063 const size_t usize = static_cast<size_t>(size);
ajm@google.com808e0e02011-08-03 21:08:51 +000064
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +000065 scoped_array<char> array(new char[usize]);
66 if (fread(array.get(), sizeof(char), usize, file) != usize) {
ajm@google.com808e0e02011-08-03 21:08:51 +000067 return false;
68 }
69
70 msg->Clear();
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +000071 return msg->ParseFromArray(array.get(), usize);
ajm@google.com808e0e02011-08-03 21:08:51 +000072}
niklase@google.com470e71d2011-07-07 08:21:25 +000073
andrew@webrtc.org94c74132011-09-19 15:17:57 +000074void PrintStat(const AudioProcessing::Statistic& stat) {
75 printf("%d, %d, %d\n", stat.average,
76 stat.maximum,
77 stat.minimum);
78}
79
niklase@google.com470e71d2011-07-07 08:21:25 +000080void usage() {
81 printf(
ajm@google.com808e0e02011-08-03 21:08:51 +000082 "Usage: process_test [options] [-pb PROTOBUF_FILE]\n"
83 " [-ir REVERSE_FILE] [-i PRIMARY_FILE] [-o OUT_FILE]\n");
niklase@google.com470e71d2011-07-07 08:21:25 +000084 printf(
85 "process_test is a test application for AudioProcessing.\n\n"
ajm@google.com808e0e02011-08-03 21:08:51 +000086 "When a protobuf debug file is available, specify it with -pb.\n"
87 "Alternately, when -ir or -i is used, the specified files will be\n"
88 "processed directly in a simulation mode. Otherwise the full set of\n"
89 "legacy test files is expected to be present in the working directory.\n");
niklase@google.com470e71d2011-07-07 08:21:25 +000090 printf("\n");
91 printf("Options\n");
ajm@google.com808e0e02011-08-03 21:08:51 +000092 printf("General configuration (only used for the simulation mode):\n");
niklase@google.com470e71d2011-07-07 08:21:25 +000093 printf(" -fs SAMPLE_RATE_HZ\n");
94 printf(" -ch CHANNELS_IN CHANNELS_OUT\n");
95 printf(" -rch REVERSE_CHANNELS\n");
96 printf("\n");
97 printf("Component configuration:\n");
98 printf(
99 "All components are disabled by default. Each block below begins with a\n"
100 "flag to enable the component with default settings. The subsequent flags\n"
101 "in the block are used to provide configuration settings.\n");
102 printf("\n -aec Echo cancellation\n");
103 printf(" --drift_compensation\n");
104 printf(" --no_drift_compensation\n");
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000105 printf(" --no_echo_metrics\n");
bjornv@google.com1ba3dbe2011-10-03 08:18:10 +0000106 printf(" --no_delay_logging\n");
niklase@google.com470e71d2011-07-07 08:21:25 +0000107 printf("\n -aecm Echo control mobile\n");
bjornv@google.com238a0222011-07-15 14:51:52 +0000108 printf(" --aecm_echo_path_in_file FILE\n");
109 printf(" --aecm_echo_path_out_file FILE\n");
niklase@google.com470e71d2011-07-07 08:21:25 +0000110 printf("\n -agc Gain control\n");
111 printf(" --analog\n");
112 printf(" --adaptive_digital\n");
113 printf(" --fixed_digital\n");
114 printf(" --target_level LEVEL\n");
115 printf(" --compression_gain GAIN\n");
116 printf(" --limiter\n");
117 printf(" --no_limiter\n");
118 printf("\n -hpf High pass filter\n");
119 printf("\n -ns Noise suppression\n");
120 printf(" --ns_low\n");
121 printf(" --ns_moderate\n");
122 printf(" --ns_high\n");
123 printf(" --ns_very_high\n");
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000124 printf(" --ns_prob_file FILE\n");
niklase@google.com470e71d2011-07-07 08:21:25 +0000125 printf("\n -vad Voice activity detection\n");
ajm@google.com808e0e02011-08-03 21:08:51 +0000126 printf(" --vad_out_file FILE\n");
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000127 printf("\n Level metrics (enabled by default)\n");
128 printf(" --no_level_metrics\n");
niklase@google.com470e71d2011-07-07 08:21:25 +0000129 printf("\n");
130 printf("Modifiers:\n");
andrew@webrtc.orgcb181212011-10-26 00:27:17 +0000131 printf(" --noasm Disable SSE optimization.\n");
andrew@webrtc.org4b13fc92011-11-09 19:27:11 +0000132 printf(" --delay DELAY Add DELAY ms to input value.\n");
andrew@webrtc.orgcb181212011-10-26 00:27:17 +0000133 printf(" --perf Measure performance.\n");
134 printf(" --quiet Suppress text output.\n");
135 printf(" --no_progress Suppress progress.\n");
136 printf(" --debug_file FILE Dump a debug recording.\n");
niklase@google.com470e71d2011-07-07 08:21:25 +0000137}
138
andrew@webrtc.orgbafdae32013-01-11 23:11:29 +0000139static float MicLevel2Gain(int level) {
140 return pow(10.0f, ((level - 127.0f) / 128.0f * 40.0f) / 20.0f);
andrew@webrtc.org81865342012-10-27 00:28:27 +0000141}
142
143static void SimulateMic(int mic_level, AudioFrame* frame) {
144 mic_level = std::min(std::max(mic_level, 0), 255);
andrew@webrtc.orgbafdae32013-01-11 23:11:29 +0000145 float mic_gain = MicLevel2Gain(mic_level);
andrew@webrtc.org81865342012-10-27 00:28:27 +0000146 int num_samples = frame->samples_per_channel_ * frame->num_channels_;
andrew@webrtc.orgbafdae32013-01-11 23:11:29 +0000147 float v;
andrew@webrtc.org81865342012-10-27 00:28:27 +0000148 for (int n = 0; n < num_samples; n++) {
149 v = floor(frame->data_[n] * mic_gain + 0.5);
andrew@webrtc.orgbafdae32013-01-11 23:11:29 +0000150 v = std::max(std::min(32767.0f, v), -32768.0f);
andrew@webrtc.org81865342012-10-27 00:28:27 +0000151 frame->data_[n] = static_cast<int16_t>(v);
152 }
153}
154
niklase@google.com470e71d2011-07-07 08:21:25 +0000155// void function for gtest.
156void void_main(int argc, char* argv[]) {
157 if (argc > 1 && strcmp(argv[1], "--help") == 0) {
158 usage();
159 return;
160 }
161
162 if (argc < 2) {
163 printf("Did you mean to run without arguments?\n");
164 printf("Try `process_test --help' for more information.\n\n");
165 }
166
167 AudioProcessing* apm = AudioProcessing::Create(0);
168 ASSERT_TRUE(apm != NULL);
169
ajm@google.com808e0e02011-08-03 21:08:51 +0000170 const char* pb_filename = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +0000171 const char* far_filename = NULL;
172 const char* near_filename = NULL;
173 const char* out_filename = NULL;
174 const char* vad_out_filename = NULL;
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000175 const char* ns_prob_filename = NULL;
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000176 const char* aecm_echo_path_in_filename = NULL;
177 const char* aecm_echo_path_out_filename = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +0000178
179 int32_t sample_rate_hz = 16000;
180 int32_t device_sample_rate_hz = 16000;
181
182 int num_capture_input_channels = 1;
183 int num_capture_output_channels = 1;
184 int num_render_channels = 1;
185
186 int samples_per_channel = sample_rate_hz / 100;
187
188 bool simulating = false;
189 bool perf_testing = false;
190 bool verbose = true;
191 bool progress = true;
andrew@webrtc.org4b13fc92011-11-09 19:27:11 +0000192 int extra_delay_ms = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000193 //bool interleaved = true;
194
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000195 ASSERT_EQ(apm->kNoError, apm->level_estimator()->Enable(true));
niklase@google.com470e71d2011-07-07 08:21:25 +0000196 for (int i = 1; i < argc; i++) {
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000197 if (strcmp(argv[i], "-pb") == 0) {
ajm@google.com808e0e02011-08-03 21:08:51 +0000198 i++;
199 ASSERT_LT(i, argc) << "Specify protobuf filename after -pb";
200 pb_filename = argv[i];
201
202 } else if (strcmp(argv[i], "-ir") == 0) {
niklase@google.com470e71d2011-07-07 08:21:25 +0000203 i++;
204 ASSERT_LT(i, argc) << "Specify filename after -ir";
205 far_filename = argv[i];
206 simulating = true;
207
208 } else if (strcmp(argv[i], "-i") == 0) {
209 i++;
210 ASSERT_LT(i, argc) << "Specify filename after -i";
211 near_filename = argv[i];
212 simulating = true;
213
214 } else if (strcmp(argv[i], "-o") == 0) {
215 i++;
216 ASSERT_LT(i, argc) << "Specify filename after -o";
217 out_filename = argv[i];
218
219 } else if (strcmp(argv[i], "-fs") == 0) {
220 i++;
221 ASSERT_LT(i, argc) << "Specify sample rate after -fs";
222 ASSERT_EQ(1, sscanf(argv[i], "%d", &sample_rate_hz));
223 samples_per_channel = sample_rate_hz / 100;
224
225 ASSERT_EQ(apm->kNoError,
226 apm->set_sample_rate_hz(sample_rate_hz));
227
228 } else if (strcmp(argv[i], "-ch") == 0) {
229 i++;
230 ASSERT_LT(i + 1, argc) << "Specify number of channels after -ch";
231 ASSERT_EQ(1, sscanf(argv[i], "%d", &num_capture_input_channels));
232 i++;
233 ASSERT_EQ(1, sscanf(argv[i], "%d", &num_capture_output_channels));
234
235 ASSERT_EQ(apm->kNoError,
236 apm->set_num_channels(num_capture_input_channels,
237 num_capture_output_channels));
238
239 } else if (strcmp(argv[i], "-rch") == 0) {
240 i++;
241 ASSERT_LT(i, argc) << "Specify number of channels after -rch";
242 ASSERT_EQ(1, sscanf(argv[i], "%d", &num_render_channels));
243
244 ASSERT_EQ(apm->kNoError,
245 apm->set_num_reverse_channels(num_render_channels));
246
247 } else if (strcmp(argv[i], "-aec") == 0) {
248 ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->Enable(true));
bjornv@google.com1ba3dbe2011-10-03 08:18:10 +0000249 ASSERT_EQ(apm->kNoError,
250 apm->echo_cancellation()->enable_metrics(true));
251 ASSERT_EQ(apm->kNoError,
252 apm->echo_cancellation()->enable_delay_logging(true));
niklase@google.com470e71d2011-07-07 08:21:25 +0000253
niklase@google.com470e71d2011-07-07 08:21:25 +0000254 } else if (strcmp(argv[i], "--drift_compensation") == 0) {
255 ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->Enable(true));
256 // TODO(ajm): this is enabled in the VQE test app by default. Investigate
257 // why it can give better performance despite passing zeros.
258 ASSERT_EQ(apm->kNoError,
259 apm->echo_cancellation()->enable_drift_compensation(true));
260 } else if (strcmp(argv[i], "--no_drift_compensation") == 0) {
261 ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->Enable(true));
262 ASSERT_EQ(apm->kNoError,
263 apm->echo_cancellation()->enable_drift_compensation(false));
264
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000265 } else if (strcmp(argv[i], "--no_echo_metrics") == 0) {
266 ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->Enable(true));
267 ASSERT_EQ(apm->kNoError,
268 apm->echo_cancellation()->enable_metrics(false));
269
bjornv@google.com1ba3dbe2011-10-03 08:18:10 +0000270 } else if (strcmp(argv[i], "--no_delay_logging") == 0) {
271 ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->Enable(true));
272 ASSERT_EQ(apm->kNoError,
273 apm->echo_cancellation()->enable_delay_logging(false));
274
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000275 } else if (strcmp(argv[i], "--no_level_metrics") == 0) {
276 ASSERT_EQ(apm->kNoError, apm->level_estimator()->Enable(false));
277
niklase@google.com470e71d2011-07-07 08:21:25 +0000278 } else if (strcmp(argv[i], "-aecm") == 0) {
279 ASSERT_EQ(apm->kNoError, apm->echo_control_mobile()->Enable(true));
280
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000281 } else if (strcmp(argv[i], "--aecm_echo_path_in_file") == 0) {
282 i++;
283 ASSERT_LT(i, argc) << "Specify filename after --aecm_echo_path_in_file";
284 aecm_echo_path_in_filename = argv[i];
285
286 } else if (strcmp(argv[i], "--aecm_echo_path_out_file") == 0) {
287 i++;
288 ASSERT_LT(i, argc) << "Specify filename after --aecm_echo_path_out_file";
289 aecm_echo_path_out_filename = argv[i];
290
niklase@google.com470e71d2011-07-07 08:21:25 +0000291 } else if (strcmp(argv[i], "-agc") == 0) {
292 ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
293
294 } else if (strcmp(argv[i], "--analog") == 0) {
295 ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
296 ASSERT_EQ(apm->kNoError,
297 apm->gain_control()->set_mode(GainControl::kAdaptiveAnalog));
298
299 } else if (strcmp(argv[i], "--adaptive_digital") == 0) {
300 ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
301 ASSERT_EQ(apm->kNoError,
302 apm->gain_control()->set_mode(GainControl::kAdaptiveDigital));
303
304 } else if (strcmp(argv[i], "--fixed_digital") == 0) {
305 ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
306 ASSERT_EQ(apm->kNoError,
307 apm->gain_control()->set_mode(GainControl::kFixedDigital));
308
309 } else if (strcmp(argv[i], "--target_level") == 0) {
310 i++;
311 int level;
312 ASSERT_EQ(1, sscanf(argv[i], "%d", &level));
313
314 ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
315 ASSERT_EQ(apm->kNoError,
316 apm->gain_control()->set_target_level_dbfs(level));
317
318 } else if (strcmp(argv[i], "--compression_gain") == 0) {
319 i++;
320 int gain;
321 ASSERT_EQ(1, sscanf(argv[i], "%d", &gain));
322
323 ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
324 ASSERT_EQ(apm->kNoError,
325 apm->gain_control()->set_compression_gain_db(gain));
326
327 } else if (strcmp(argv[i], "--limiter") == 0) {
328 ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
329 ASSERT_EQ(apm->kNoError,
330 apm->gain_control()->enable_limiter(true));
331
332 } else if (strcmp(argv[i], "--no_limiter") == 0) {
333 ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
334 ASSERT_EQ(apm->kNoError,
335 apm->gain_control()->enable_limiter(false));
336
337 } else if (strcmp(argv[i], "-hpf") == 0) {
338 ASSERT_EQ(apm->kNoError, apm->high_pass_filter()->Enable(true));
339
340 } else if (strcmp(argv[i], "-ns") == 0) {
341 ASSERT_EQ(apm->kNoError, apm->noise_suppression()->Enable(true));
342
343 } else if (strcmp(argv[i], "--ns_low") == 0) {
344 ASSERT_EQ(apm->kNoError, apm->noise_suppression()->Enable(true));
345 ASSERT_EQ(apm->kNoError,
346 apm->noise_suppression()->set_level(NoiseSuppression::kLow));
347
348 } else if (strcmp(argv[i], "--ns_moderate") == 0) {
349 ASSERT_EQ(apm->kNoError, apm->noise_suppression()->Enable(true));
350 ASSERT_EQ(apm->kNoError,
351 apm->noise_suppression()->set_level(NoiseSuppression::kModerate));
352
353 } else if (strcmp(argv[i], "--ns_high") == 0) {
354 ASSERT_EQ(apm->kNoError, apm->noise_suppression()->Enable(true));
355 ASSERT_EQ(apm->kNoError,
356 apm->noise_suppression()->set_level(NoiseSuppression::kHigh));
357
358 } else if (strcmp(argv[i], "--ns_very_high") == 0) {
359 ASSERT_EQ(apm->kNoError, apm->noise_suppression()->Enable(true));
360 ASSERT_EQ(apm->kNoError,
361 apm->noise_suppression()->set_level(NoiseSuppression::kVeryHigh));
362
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000363 } else if (strcmp(argv[i], "--ns_prob_file") == 0) {
364 i++;
365 ASSERT_LT(i, argc) << "Specify filename after --ns_prob_file";
366 ns_prob_filename = argv[i];
367
niklase@google.com470e71d2011-07-07 08:21:25 +0000368 } else if (strcmp(argv[i], "-vad") == 0) {
369 ASSERT_EQ(apm->kNoError, apm->voice_detection()->Enable(true));
370
andrew@webrtc.org89752612012-10-12 16:41:45 +0000371 } else if (strcmp(argv[i], "--vad_very_low") == 0) {
372 ASSERT_EQ(apm->kNoError, apm->voice_detection()->Enable(true));
373 ASSERT_EQ(apm->kNoError,
374 apm->voice_detection()->set_likelihood(
375 VoiceDetection::kVeryLowLikelihood));
376
377 } else if (strcmp(argv[i], "--vad_low") == 0) {
378 ASSERT_EQ(apm->kNoError, apm->voice_detection()->Enable(true));
379 ASSERT_EQ(apm->kNoError,
380 apm->voice_detection()->set_likelihood(
381 VoiceDetection::kLowLikelihood));
382
383 } else if (strcmp(argv[i], "--vad_moderate") == 0) {
384 ASSERT_EQ(apm->kNoError, apm->voice_detection()->Enable(true));
385 ASSERT_EQ(apm->kNoError,
386 apm->voice_detection()->set_likelihood(
387 VoiceDetection::kModerateLikelihood));
388
389 } else if (strcmp(argv[i], "--vad_high") == 0) {
390 ASSERT_EQ(apm->kNoError, apm->voice_detection()->Enable(true));
391 ASSERT_EQ(apm->kNoError,
392 apm->voice_detection()->set_likelihood(
393 VoiceDetection::kHighLikelihood));
394
niklase@google.com470e71d2011-07-07 08:21:25 +0000395 } else if (strcmp(argv[i], "--vad_out_file") == 0) {
396 i++;
397 ASSERT_LT(i, argc) << "Specify filename after --vad_out_file";
398 vad_out_filename = argv[i];
399
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000400 } else if (strcmp(argv[i], "--noasm") == 0) {
401 WebRtc_GetCPUInfo = WebRtc_GetCPUInfoNoASM;
402 // We need to reinitialize here if components have already been enabled.
403 ASSERT_EQ(apm->kNoError, apm->Initialize());
404
andrew@webrtc.org4b13fc92011-11-09 19:27:11 +0000405 } else if (strcmp(argv[i], "--delay") == 0) {
406 i++;
407 ASSERT_EQ(1, sscanf(argv[i], "%d", &extra_delay_ms));
408
niklase@google.com470e71d2011-07-07 08:21:25 +0000409 } else if (strcmp(argv[i], "--perf") == 0) {
410 perf_testing = true;
411
412 } else if (strcmp(argv[i], "--quiet") == 0) {
413 verbose = false;
414 progress = false;
415
416 } else if (strcmp(argv[i], "--no_progress") == 0) {
417 progress = false;
418
andrew@webrtc.orgcb181212011-10-26 00:27:17 +0000419 } else if (strcmp(argv[i], "--debug_file") == 0) {
ajm@google.com808e0e02011-08-03 21:08:51 +0000420 i++;
andrew@webrtc.orgcb181212011-10-26 00:27:17 +0000421 ASSERT_LT(i, argc) << "Specify filename after --debug_file";
ajm@google.com808e0e02011-08-03 21:08:51 +0000422 ASSERT_EQ(apm->kNoError, apm->StartDebugRecording(argv[i]));
niklase@google.com470e71d2011-07-07 08:21:25 +0000423 } else {
424 FAIL() << "Unrecognized argument " << argv[i];
425 }
426 }
ajm@google.com808e0e02011-08-03 21:08:51 +0000427 // If we're reading a protobuf file, ensure a simulation hasn't also
428 // been requested (which makes no sense...)
429 ASSERT_FALSE(pb_filename && simulating);
niklase@google.com470e71d2011-07-07 08:21:25 +0000430
431 if (verbose) {
432 printf("Sample rate: %d Hz\n", sample_rate_hz);
433 printf("Primary channels: %d (in), %d (out)\n",
434 num_capture_input_channels,
435 num_capture_output_channels);
436 printf("Reverse channels: %d \n", num_render_channels);
437 }
438
kjellander@webrtc.org10abe252012-12-17 18:28:07 +0000439 const std::string out_path = webrtc::test::OutputPath();
niklase@google.com470e71d2011-07-07 08:21:25 +0000440 const char far_file_default[] = "apm_far.pcm";
441 const char near_file_default[] = "apm_near.pcm";
kjellander@webrtc.org10abe252012-12-17 18:28:07 +0000442 const std::string out_file_default = out_path + "out.pcm";
niklase@google.com470e71d2011-07-07 08:21:25 +0000443 const char event_filename[] = "apm_event.dat";
444 const char delay_filename[] = "apm_delay.dat";
445 const char drift_filename[] = "apm_drift.dat";
kjellander@webrtc.org10abe252012-12-17 18:28:07 +0000446 const std::string vad_file_default = out_path + "vad_out.dat";
447 const std::string ns_prob_file_default = out_path + "ns_prob.dat";
niklase@google.com470e71d2011-07-07 08:21:25 +0000448
449 if (!simulating) {
450 far_filename = far_file_default;
451 near_filename = near_file_default;
452 }
453
ajm@google.com808e0e02011-08-03 21:08:51 +0000454 if (!out_filename) {
kjellander@webrtc.org10abe252012-12-17 18:28:07 +0000455 out_filename = out_file_default.c_str();
niklase@google.com470e71d2011-07-07 08:21:25 +0000456 }
457
ajm@google.com808e0e02011-08-03 21:08:51 +0000458 if (!vad_out_filename) {
kjellander@webrtc.org10abe252012-12-17 18:28:07 +0000459 vad_out_filename = vad_file_default.c_str();
niklase@google.com470e71d2011-07-07 08:21:25 +0000460 }
461
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000462 if (!ns_prob_filename) {
kjellander@webrtc.org10abe252012-12-17 18:28:07 +0000463 ns_prob_filename = ns_prob_file_default.c_str();
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000464 }
465
ajm@google.com808e0e02011-08-03 21:08:51 +0000466 FILE* pb_file = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +0000467 FILE* far_file = NULL;
468 FILE* near_file = NULL;
469 FILE* out_file = NULL;
470 FILE* event_file = NULL;
471 FILE* delay_file = NULL;
472 FILE* drift_file = NULL;
473 FILE* vad_out_file = NULL;
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000474 FILE* ns_prob_file = NULL;
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000475 FILE* aecm_echo_path_in_file = NULL;
476 FILE* aecm_echo_path_out_file = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +0000477
ajm@google.com808e0e02011-08-03 21:08:51 +0000478 if (pb_filename) {
479 pb_file = fopen(pb_filename, "rb");
480 ASSERT_TRUE(NULL != pb_file) << "Unable to open protobuf file "
481 << pb_filename;
482 } else {
483 if (far_filename) {
484 far_file = fopen(far_filename, "rb");
485 ASSERT_TRUE(NULL != far_file) << "Unable to open far-end audio file "
486 << far_filename;
487 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000488
ajm@google.com808e0e02011-08-03 21:08:51 +0000489 near_file = fopen(near_filename, "rb");
490 ASSERT_TRUE(NULL != near_file) << "Unable to open near-end audio file "
491 << near_filename;
492 if (!simulating) {
493 event_file = fopen(event_filename, "rb");
494 ASSERT_TRUE(NULL != event_file) << "Unable to open event file "
495 << event_filename;
496
497 delay_file = fopen(delay_filename, "rb");
498 ASSERT_TRUE(NULL != delay_file) << "Unable to open buffer file "
499 << delay_filename;
500
501 drift_file = fopen(drift_filename, "rb");
502 ASSERT_TRUE(NULL != drift_file) << "Unable to open drift file "
503 << drift_filename;
504 }
505 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000506
507 out_file = fopen(out_filename, "wb");
508 ASSERT_TRUE(NULL != out_file) << "Unable to open output audio file "
509 << out_filename;
510
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000511 int near_size_bytes = 0;
ajm@google.com808e0e02011-08-03 21:08:51 +0000512 if (pb_file) {
513 struct stat st;
514 stat(pb_filename, &st);
515 // Crude estimate, but should be good enough.
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000516 near_size_bytes = st.st_size / 3;
ajm@google.com808e0e02011-08-03 21:08:51 +0000517 } else {
518 struct stat st;
519 stat(near_filename, &st);
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000520 near_size_bytes = st.st_size;
niklase@google.com470e71d2011-07-07 08:21:25 +0000521 }
522
523 if (apm->voice_detection()->is_enabled()) {
524 vad_out_file = fopen(vad_out_filename, "wb");
525 ASSERT_TRUE(NULL != vad_out_file) << "Unable to open VAD output file "
526 << vad_out_file;
527 }
528
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000529 if (apm->noise_suppression()->is_enabled()) {
530 ns_prob_file = fopen(ns_prob_filename, "wb");
531 ASSERT_TRUE(NULL != ns_prob_file) << "Unable to open NS output file "
532 << ns_prob_file;
533 }
534
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000535 if (aecm_echo_path_in_filename != NULL) {
536 aecm_echo_path_in_file = fopen(aecm_echo_path_in_filename, "rb");
537 ASSERT_TRUE(NULL != aecm_echo_path_in_file) << "Unable to open file "
538 << aecm_echo_path_in_filename;
539
ajm@google.com22e65152011-07-18 18:03:01 +0000540 const size_t path_size =
541 apm->echo_control_mobile()->echo_path_size_bytes();
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +0000542 scoped_array<char> echo_path(new char[path_size]);
543 ASSERT_EQ(path_size, fread(echo_path.get(),
544 sizeof(char),
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000545 path_size,
546 aecm_echo_path_in_file));
547 EXPECT_EQ(apm->kNoError,
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +0000548 apm->echo_control_mobile()->SetEchoPath(echo_path.get(),
549 path_size));
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000550 fclose(aecm_echo_path_in_file);
551 aecm_echo_path_in_file = NULL;
552 }
553
554 if (aecm_echo_path_out_filename != NULL) {
555 aecm_echo_path_out_file = fopen(aecm_echo_path_out_filename, "wb");
556 ASSERT_TRUE(NULL != aecm_echo_path_out_file) << "Unable to open file "
557 << aecm_echo_path_out_filename;
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000558 }
559
niklase@google.com470e71d2011-07-07 08:21:25 +0000560 size_t read_count = 0;
561 int reverse_count = 0;
562 int primary_count = 0;
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000563 int near_read_bytes = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000564 TickInterval acc_ticks;
565
566 AudioFrame far_frame;
niklase@google.com470e71d2011-07-07 08:21:25 +0000567 AudioFrame near_frame;
niklase@google.com470e71d2011-07-07 08:21:25 +0000568
569 int delay_ms = 0;
570 int drift_samples = 0;
571 int capture_level = 127;
572 int8_t stream_has_voice = 0;
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000573 float ns_speech_prob = 0.0f;
niklase@google.com470e71d2011-07-07 08:21:25 +0000574
575 TickTime t0 = TickTime::Now();
576 TickTime t1 = t0;
577 WebRtc_Word64 max_time_us = 0;
578 WebRtc_Word64 max_time_reverse_us = 0;
579 WebRtc_Word64 min_time_us = 1e6;
580 WebRtc_Word64 min_time_reverse_us = 1e6;
581
ajm@google.com808e0e02011-08-03 21:08:51 +0000582 // TODO(ajm): Ideally we would refactor this block into separate functions,
583 // but for now we want to share the variables.
584 if (pb_file) {
585 Event event_msg;
586 while (ReadMessageFromFile(pb_file, &event_msg)) {
587 std::ostringstream trace_stream;
588 trace_stream << "Processed frames: " << reverse_count << " (reverse), "
589 << primary_count << " (primary)";
590 SCOPED_TRACE(trace_stream.str());
niklase@google.com470e71d2011-07-07 08:21:25 +0000591
ajm@google.com808e0e02011-08-03 21:08:51 +0000592 if (event_msg.type() == Event::INIT) {
593 ASSERT_TRUE(event_msg.has_init());
594 const Init msg = event_msg.init();
niklase@google.com470e71d2011-07-07 08:21:25 +0000595
ajm@google.com808e0e02011-08-03 21:08:51 +0000596 ASSERT_TRUE(msg.has_sample_rate());
597 ASSERT_EQ(apm->kNoError,
598 apm->set_sample_rate_hz(msg.sample_rate()));
599
600 ASSERT_TRUE(msg.has_device_sample_rate());
601 ASSERT_EQ(apm->kNoError,
602 apm->echo_cancellation()->set_device_sample_rate_hz(
603 msg.device_sample_rate()));
604
605 ASSERT_TRUE(msg.has_num_input_channels());
606 ASSERT_TRUE(msg.has_num_output_channels());
607 ASSERT_EQ(apm->kNoError,
608 apm->set_num_channels(msg.num_input_channels(),
609 msg.num_output_channels()));
610
611 ASSERT_TRUE(msg.has_num_reverse_channels());
612 ASSERT_EQ(apm->kNoError,
613 apm->set_num_reverse_channels(msg.num_reverse_channels()));
614
615 samples_per_channel = msg.sample_rate() / 100;
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000616 far_frame.sample_rate_hz_ = msg.sample_rate();
617 far_frame.samples_per_channel_ = samples_per_channel;
618 far_frame.num_channels_ = msg.num_reverse_channels();
619 near_frame.sample_rate_hz_ = msg.sample_rate();
620 near_frame.samples_per_channel_ = samples_per_channel;
andrew@webrtc.orgbafdae32013-01-11 23:11:29 +0000621 near_frame.num_channels_ = msg.num_input_channels();
ajm@google.com808e0e02011-08-03 21:08:51 +0000622
623 if (verbose) {
624 printf("Init at frame: %d (primary), %d (reverse)\n",
625 primary_count, reverse_count);
andrew@webrtc.orgba028a32011-11-23 20:37:12 +0000626 printf(" Sample rate: %d Hz\n", msg.sample_rate());
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000627 printf(" Primary channels: %d (in), %d (out)\n",
628 msg.num_input_channels(),
629 msg.num_output_channels());
630 printf(" Reverse channels: %d \n", msg.num_reverse_channels());
ajm@google.com808e0e02011-08-03 21:08:51 +0000631 }
632
633 } else if (event_msg.type() == Event::REVERSE_STREAM) {
634 ASSERT_TRUE(event_msg.has_reverse_stream());
635 const ReverseStream msg = event_msg.reverse_stream();
636 reverse_count++;
637
638 ASSERT_TRUE(msg.has_data());
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000639 ASSERT_EQ(sizeof(int16_t) * samples_per_channel *
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000640 far_frame.num_channels_, msg.data().size());
641 memcpy(far_frame.data_, msg.data().data(), msg.data().size());
ajm@google.com808e0e02011-08-03 21:08:51 +0000642
643 if (perf_testing) {
644 t0 = TickTime::Now();
645 }
646
647 ASSERT_EQ(apm->kNoError,
648 apm->AnalyzeReverseStream(&far_frame));
649
650 if (perf_testing) {
651 t1 = TickTime::Now();
652 TickInterval tick_diff = t1 - t0;
653 acc_ticks += tick_diff;
654 if (tick_diff.Microseconds() > max_time_reverse_us) {
655 max_time_reverse_us = tick_diff.Microseconds();
656 }
657 if (tick_diff.Microseconds() < min_time_reverse_us) {
658 min_time_reverse_us = tick_diff.Microseconds();
659 }
660 }
661
662 } else if (event_msg.type() == Event::STREAM) {
663 ASSERT_TRUE(event_msg.has_stream());
664 const Stream msg = event_msg.stream();
665 primary_count++;
666
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000667 // ProcessStream could have changed this for the output frame.
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000668 near_frame.num_channels_ = apm->num_input_channels();
ajm@google.com808e0e02011-08-03 21:08:51 +0000669
670 ASSERT_TRUE(msg.has_input_data());
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000671 ASSERT_EQ(sizeof(int16_t) * samples_per_channel *
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000672 near_frame.num_channels_, msg.input_data().size());
673 memcpy(near_frame.data_,
ajm@google.com808e0e02011-08-03 21:08:51 +0000674 msg.input_data().data(),
675 msg.input_data().size());
676
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000677 near_read_bytes += msg.input_data().size();
ajm@google.com808e0e02011-08-03 21:08:51 +0000678 if (progress && primary_count % 100 == 0) {
679 printf("%.0f%% complete\r",
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000680 (near_read_bytes * 100.0) / near_size_bytes);
ajm@google.com808e0e02011-08-03 21:08:51 +0000681 fflush(stdout);
682 }
683
684 if (perf_testing) {
685 t0 = TickTime::Now();
686 }
687
688 ASSERT_EQ(apm->kNoError,
689 apm->gain_control()->set_stream_analog_level(msg.level()));
690 ASSERT_EQ(apm->kNoError,
andrew@webrtc.org4b13fc92011-11-09 19:27:11 +0000691 apm->set_stream_delay_ms(msg.delay() + extra_delay_ms));
ajm@google.com808e0e02011-08-03 21:08:51 +0000692 ASSERT_EQ(apm->kNoError,
693 apm->echo_cancellation()->set_stream_drift_samples(msg.drift()));
694
695 int err = apm->ProcessStream(&near_frame);
696 if (err == apm->kBadStreamParameterWarning) {
697 printf("Bad parameter warning. %s\n", trace_stream.str().c_str());
698 }
699 ASSERT_TRUE(err == apm->kNoError ||
700 err == apm->kBadStreamParameterWarning);
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000701 ASSERT_TRUE(near_frame.num_channels_ == apm->num_output_channels());
ajm@google.com808e0e02011-08-03 21:08:51 +0000702
ajm@google.com808e0e02011-08-03 21:08:51 +0000703 stream_has_voice =
704 static_cast<int8_t>(apm->voice_detection()->stream_has_voice());
705 if (vad_out_file != NULL) {
706 ASSERT_EQ(1u, fwrite(&stream_has_voice,
707 sizeof(stream_has_voice),
708 1,
709 vad_out_file));
710 }
711
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000712 if (ns_prob_file != NULL) {
713 ns_speech_prob = apm->noise_suppression()->speech_probability();
714 ASSERT_EQ(1u, fwrite(&ns_speech_prob,
715 sizeof(ns_speech_prob),
716 1,
717 ns_prob_file));
718 }
719
ajm@google.com808e0e02011-08-03 21:08:51 +0000720 if (perf_testing) {
721 t1 = TickTime::Now();
722 TickInterval tick_diff = t1 - t0;
723 acc_ticks += tick_diff;
724 if (tick_diff.Microseconds() > max_time_us) {
725 max_time_us = tick_diff.Microseconds();
726 }
727 if (tick_diff.Microseconds() < min_time_us) {
728 min_time_us = tick_diff.Microseconds();
729 }
730 }
731
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000732 size_t size = samples_per_channel * near_frame.num_channels_;
733 ASSERT_EQ(size, fwrite(near_frame.data_,
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000734 sizeof(int16_t),
735 size,
736 out_file));
ajm@google.com808e0e02011-08-03 21:08:51 +0000737 }
738 }
739
740 ASSERT_TRUE(feof(pb_file));
ajm@google.com808e0e02011-08-03 21:08:51 +0000741
742 } else {
bjornv@google.coma2c6ea02011-09-27 08:04:45 +0000743 enum Events {
744 kInitializeEvent,
745 kRenderEvent,
746 kCaptureEvent,
747 kResetEventDeprecated
748 };
749 int16_t event = 0;
ajm@google.com808e0e02011-08-03 21:08:51 +0000750 while (simulating || feof(event_file) == 0) {
751 std::ostringstream trace_stream;
752 trace_stream << "Processed frames: " << reverse_count << " (reverse), "
753 << primary_count << " (primary)";
754 SCOPED_TRACE(trace_stream.str());
755
756 if (simulating) {
757 if (far_file == NULL) {
niklase@google.com470e71d2011-07-07 08:21:25 +0000758 event = kCaptureEvent;
759 } else {
ajm@google.com808e0e02011-08-03 21:08:51 +0000760 if (event == kRenderEvent) {
761 event = kCaptureEvent;
762 } else {
763 event = kRenderEvent;
764 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000765 }
766 } else {
ajm@google.com808e0e02011-08-03 21:08:51 +0000767 read_count = fread(&event, sizeof(event), 1, event_file);
768 if (read_count != 1) {
769 break;
niklase@google.com470e71d2011-07-07 08:21:25 +0000770 }
771 }
772
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000773 far_frame.sample_rate_hz_ = sample_rate_hz;
774 far_frame.samples_per_channel_ = samples_per_channel;
775 far_frame.num_channels_ = num_render_channels;
776 near_frame.sample_rate_hz_ = sample_rate_hz;
777 near_frame.samples_per_channel_ = samples_per_channel;
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000778
ajm@google.com808e0e02011-08-03 21:08:51 +0000779 if (event == kInitializeEvent || event == kResetEventDeprecated) {
780 ASSERT_EQ(1u,
781 fread(&sample_rate_hz, sizeof(sample_rate_hz), 1, event_file));
782 samples_per_channel = sample_rate_hz / 100;
niklase@google.com470e71d2011-07-07 08:21:25 +0000783
ajm@google.com808e0e02011-08-03 21:08:51 +0000784 ASSERT_EQ(1u,
785 fread(&device_sample_rate_hz,
786 sizeof(device_sample_rate_hz),
787 1,
788 event_file));
789
790 ASSERT_EQ(apm->kNoError,
791 apm->set_sample_rate_hz(sample_rate_hz));
792
793 ASSERT_EQ(apm->kNoError,
794 apm->echo_cancellation()->set_device_sample_rate_hz(
795 device_sample_rate_hz));
796
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000797 far_frame.sample_rate_hz_ = sample_rate_hz;
798 far_frame.samples_per_channel_ = samples_per_channel;
799 far_frame.num_channels_ = num_render_channels;
800 near_frame.sample_rate_hz_ = sample_rate_hz;
801 near_frame.samples_per_channel_ = samples_per_channel;
ajm@google.com808e0e02011-08-03 21:08:51 +0000802
803 if (verbose) {
804 printf("Init at frame: %d (primary), %d (reverse)\n",
805 primary_count, reverse_count);
806 printf(" Sample rate: %d Hz\n", sample_rate_hz);
807 }
808
809 } else if (event == kRenderEvent) {
810 reverse_count++;
ajm@google.com808e0e02011-08-03 21:08:51 +0000811
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000812 size_t size = samples_per_channel * num_render_channels;
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000813 read_count = fread(far_frame.data_,
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000814 sizeof(int16_t),
815 size,
ajm@google.com808e0e02011-08-03 21:08:51 +0000816 far_file);
817
818 if (simulating) {
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000819 if (read_count != size) {
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000820 // Read an equal amount from the near file to avoid errors due to
821 // not reaching end-of-file.
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000822 EXPECT_EQ(0, fseek(near_file, read_count * sizeof(int16_t),
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000823 SEEK_CUR));
ajm@google.com808e0e02011-08-03 21:08:51 +0000824 break; // This is expected.
825 }
826 } else {
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000827 ASSERT_EQ(size, read_count);
ajm@google.com808e0e02011-08-03 21:08:51 +0000828 }
829
830 if (perf_testing) {
831 t0 = TickTime::Now();
832 }
833
834 ASSERT_EQ(apm->kNoError,
835 apm->AnalyzeReverseStream(&far_frame));
836
837 if (perf_testing) {
838 t1 = TickTime::Now();
839 TickInterval tick_diff = t1 - t0;
840 acc_ticks += tick_diff;
841 if (tick_diff.Microseconds() > max_time_reverse_us) {
842 max_time_reverse_us = tick_diff.Microseconds();
843 }
844 if (tick_diff.Microseconds() < min_time_reverse_us) {
845 min_time_reverse_us = tick_diff.Microseconds();
846 }
847 }
848
849 } else if (event == kCaptureEvent) {
850 primary_count++;
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000851 near_frame.num_channels_ = num_capture_input_channels;
ajm@google.com808e0e02011-08-03 21:08:51 +0000852
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000853 size_t size = samples_per_channel * num_capture_input_channels;
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000854 read_count = fread(near_frame.data_,
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000855 sizeof(int16_t),
856 size,
ajm@google.com808e0e02011-08-03 21:08:51 +0000857 near_file);
858
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000859 near_read_bytes += read_count * sizeof(int16_t);
ajm@google.com808e0e02011-08-03 21:08:51 +0000860 if (progress && primary_count % 100 == 0) {
861 printf("%.0f%% complete\r",
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000862 (near_read_bytes * 100.0) / near_size_bytes);
ajm@google.com808e0e02011-08-03 21:08:51 +0000863 fflush(stdout);
864 }
865 if (simulating) {
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000866 if (read_count != size) {
ajm@google.com808e0e02011-08-03 21:08:51 +0000867 break; // This is expected.
868 }
869
870 delay_ms = 0;
871 drift_samples = 0;
872 } else {
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000873 ASSERT_EQ(size, read_count);
ajm@google.com808e0e02011-08-03 21:08:51 +0000874
875 // TODO(ajm): sizeof(delay_ms) for current files?
876 ASSERT_EQ(1u,
877 fread(&delay_ms, 2, 1, delay_file));
878 ASSERT_EQ(1u,
879 fread(&drift_samples, sizeof(drift_samples), 1, drift_file));
880 }
881
andrew@webrtc.orgbafdae32013-01-11 23:11:29 +0000882 if (apm->gain_control()->is_enabled() &&
883 apm->gain_control()->mode() == GainControl::kAdaptiveAnalog) {
andrew@webrtc.org81865342012-10-27 00:28:27 +0000884 SimulateMic(capture_level, &near_frame);
885 }
886
ajm@google.com808e0e02011-08-03 21:08:51 +0000887 if (perf_testing) {
888 t0 = TickTime::Now();
889 }
890
andrew@webrtc.orgbafdae32013-01-11 23:11:29 +0000891 const int capture_level_in = capture_level;
ajm@google.com808e0e02011-08-03 21:08:51 +0000892 ASSERT_EQ(apm->kNoError,
893 apm->gain_control()->set_stream_analog_level(capture_level));
894 ASSERT_EQ(apm->kNoError,
andrew@webrtc.org4b13fc92011-11-09 19:27:11 +0000895 apm->set_stream_delay_ms(delay_ms + extra_delay_ms));
ajm@google.com808e0e02011-08-03 21:08:51 +0000896 ASSERT_EQ(apm->kNoError,
897 apm->echo_cancellation()->set_stream_drift_samples(drift_samples));
898
899 int err = apm->ProcessStream(&near_frame);
900 if (err == apm->kBadStreamParameterWarning) {
901 printf("Bad parameter warning. %s\n", trace_stream.str().c_str());
902 }
903 ASSERT_TRUE(err == apm->kNoError ||
904 err == apm->kBadStreamParameterWarning);
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000905 ASSERT_TRUE(near_frame.num_channels_ == apm->num_output_channels());
ajm@google.com808e0e02011-08-03 21:08:51 +0000906
907 capture_level = apm->gain_control()->stream_analog_level();
908
909 stream_has_voice =
910 static_cast<int8_t>(apm->voice_detection()->stream_has_voice());
911 if (vad_out_file != NULL) {
912 ASSERT_EQ(1u, fwrite(&stream_has_voice,
913 sizeof(stream_has_voice),
914 1,
915 vad_out_file));
916 }
917
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000918 if (ns_prob_file != NULL) {
919 ns_speech_prob = apm->noise_suppression()->speech_probability();
920 ASSERT_EQ(1u, fwrite(&ns_speech_prob,
921 sizeof(ns_speech_prob),
922 1,
923 ns_prob_file));
924 }
925
ajm@google.com808e0e02011-08-03 21:08:51 +0000926 if (apm->gain_control()->mode() != GainControl::kAdaptiveAnalog) {
927 ASSERT_EQ(capture_level_in, capture_level);
928 }
929
930 if (perf_testing) {
931 t1 = TickTime::Now();
932 TickInterval tick_diff = t1 - t0;
933 acc_ticks += tick_diff;
934 if (tick_diff.Microseconds() > max_time_us) {
935 max_time_us = tick_diff.Microseconds();
936 }
937 if (tick_diff.Microseconds() < min_time_us) {
938 min_time_us = tick_diff.Microseconds();
939 }
940 }
941
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000942 size = samples_per_channel * near_frame.num_channels_;
943 ASSERT_EQ(size, fwrite(near_frame.data_,
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000944 sizeof(int16_t),
945 size,
946 out_file));
niklase@google.com470e71d2011-07-07 08:21:25 +0000947 }
ajm@google.com808e0e02011-08-03 21:08:51 +0000948 else {
949 FAIL() << "Event " << event << " is unrecognized";
niklase@google.com470e71d2011-07-07 08:21:25 +0000950 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000951 }
952 }
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000953 printf("100%% complete\r");
niklase@google.com470e71d2011-07-07 08:21:25 +0000954
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000955 if (aecm_echo_path_out_file != NULL) {
ajm@google.com22e65152011-07-18 18:03:01 +0000956 const size_t path_size =
957 apm->echo_control_mobile()->echo_path_size_bytes();
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +0000958 scoped_array<char> echo_path(new char[path_size]);
959 apm->echo_control_mobile()->GetEchoPath(echo_path.get(), path_size);
960 ASSERT_EQ(path_size, fwrite(echo_path.get(),
961 sizeof(char),
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000962 path_size,
963 aecm_echo_path_out_file));
964 fclose(aecm_echo_path_out_file);
965 aecm_echo_path_out_file = NULL;
966 }
967
niklase@google.com470e71d2011-07-07 08:21:25 +0000968 if (verbose) {
969 printf("\nProcessed frames: %d (primary), %d (reverse)\n",
970 primary_count, reverse_count);
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000971
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000972 if (apm->level_estimator()->is_enabled()) {
973 printf("\n--Level metrics--\n");
974 printf("RMS: %d dBFS\n", -apm->level_estimator()->RMS());
975 }
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000976 if (apm->echo_cancellation()->are_metrics_enabled()) {
977 EchoCancellation::Metrics metrics;
978 apm->echo_cancellation()->GetMetrics(&metrics);
979 printf("\n--Echo metrics--\n");
980 printf("(avg, max, min)\n");
981 printf("ERL: ");
982 PrintStat(metrics.echo_return_loss);
983 printf("ERLE: ");
984 PrintStat(metrics.echo_return_loss_enhancement);
985 printf("ANLP: ");
986 PrintStat(metrics.a_nlp);
987 }
bjornv@google.com1ba3dbe2011-10-03 08:18:10 +0000988 if (apm->echo_cancellation()->is_delay_logging_enabled()) {
989 int median = 0;
990 int std = 0;
991 apm->echo_cancellation()->GetDelayMetrics(&median, &std);
992 printf("\n--Delay metrics--\n");
993 printf("Median: %3d\n", median);
994 printf("Standard deviation: %3d\n", std);
995 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000996 }
997
ajm@google.com808e0e02011-08-03 21:08:51 +0000998 if (!pb_file) {
999 int8_t temp_int8;
1000 if (far_file) {
1001 read_count = fread(&temp_int8, sizeof(temp_int8), 1, far_file);
1002 EXPECT_NE(0, feof(far_file)) << "Far-end file not fully processed";
1003 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001004
ajm@google.com808e0e02011-08-03 21:08:51 +00001005 read_count = fread(&temp_int8, sizeof(temp_int8), 1, near_file);
1006 EXPECT_NE(0, feof(near_file)) << "Near-end file not fully processed";
1007
1008 if (!simulating) {
1009 read_count = fread(&temp_int8, sizeof(temp_int8), 1, event_file);
1010 EXPECT_NE(0, feof(event_file)) << "Event file not fully processed";
1011 read_count = fread(&temp_int8, sizeof(temp_int8), 1, delay_file);
1012 EXPECT_NE(0, feof(delay_file)) << "Delay file not fully processed";
1013 read_count = fread(&temp_int8, sizeof(temp_int8), 1, drift_file);
1014 EXPECT_NE(0, feof(drift_file)) << "Drift file not fully processed";
1015 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001016 }
1017
1018 if (perf_testing) {
1019 if (primary_count > 0) {
1020 WebRtc_Word64 exec_time = acc_ticks.Milliseconds();
1021 printf("\nTotal time: %.3f s, file time: %.2f s\n",
1022 exec_time * 0.001, primary_count * 0.01);
1023 printf("Time per frame: %.3f ms (average), %.3f ms (max),"
1024 " %.3f ms (min)\n",
1025 (exec_time * 1.0) / primary_count,
1026 (max_time_us + max_time_reverse_us) / 1000.0,
1027 (min_time_us + min_time_reverse_us) / 1000.0);
kma@webrtc.org0e739502012-12-07 15:26:28 +00001028 // Record the results with Perf test tools.
1029 webrtc::test::PrintResult("time_per_10ms_frame", "", "audioproc",
1030 (exec_time * 1000) / primary_count, "us", false);
niklase@google.com470e71d2011-07-07 08:21:25 +00001031 } else {
1032 printf("Warning: no capture frames\n");
1033 }
1034 }
1035
1036 AudioProcessing::Destroy(apm);
1037 apm = NULL;
1038}
ajm@google.com808e0e02011-08-03 21:08:51 +00001039} // namespace
niklase@google.com470e71d2011-07-07 08:21:25 +00001040
1041int main(int argc, char* argv[])
1042{
1043 void_main(argc, argv);
1044
andrew@webrtc.org64235092011-08-19 21:22:08 +00001045 // Optional, but removes memory leak noise from Valgrind.
1046 google::protobuf::ShutdownProtobufLibrary();
niklase@google.com470e71d2011-07-07 08:21:25 +00001047 return 0;
1048}