blob: 0e78230a8fd6c2cbcc91e07f37a6601ec7545af8 [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
leozwang@webrtc.org9a85d8e2012-03-16 18:03:18 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11#include <stdio.h>
12#include <string.h>
13#ifdef WEBRTC_ANDROID
14#include <sys/stat.h>
15#endif
16
niklase@google.com470e71d2011-07-07 08:21:25 +000017#include "gtest/gtest.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000018
19#include "audio_processing.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000020#include "cpu_features_wrapper.h"
ajm@google.com808e0e02011-08-03 21:08:51 +000021#include "module_common_types.h"
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +000022#include "scoped_ptr.h"
ajm@google.com808e0e02011-08-03 21:08:51 +000023#include "tick_util.h"
leozwang@webrtc.orga3736342012-03-16 21:36:00 +000024#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
25#include "external/webrtc/src/modules/audio_processing/debug.pb.h"
26#else
ajm@google.com808e0e02011-08-03 21:08:51 +000027#include "webrtc/audio_processing/debug.pb.h"
leozwang@webrtc.orga3736342012-03-16 21:36:00 +000028#endif
niklase@google.com470e71d2011-07-07 08:21:25 +000029
30using webrtc::AudioFrame;
niklase@google.com470e71d2011-07-07 08:21:25 +000031using webrtc::AudioProcessing;
andrew@webrtc.org94c74132011-09-19 15:17:57 +000032using webrtc::EchoCancellation;
niklase@google.com470e71d2011-07-07 08:21:25 +000033using webrtc::GainControl;
34using webrtc::NoiseSuppression;
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +000035using webrtc::scoped_array;
ajm@google.com808e0e02011-08-03 21:08:51 +000036using webrtc::TickInterval;
37using webrtc::TickTime;
andrew@webrtc.org89752612012-10-12 16:41:45 +000038using webrtc::VoiceDetection;
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +000039
ajm@google.com808e0e02011-08-03 21:08:51 +000040using webrtc::audioproc::Event;
41using webrtc::audioproc::Init;
42using webrtc::audioproc::ReverseStream;
43using webrtc::audioproc::Stream;
44
45namespace {
46// Returns true on success, false on error or end-of-file.
47bool ReadMessageFromFile(FILE* file,
48 ::google::protobuf::MessageLite* msg) {
49 // The "wire format" for the size is little-endian.
50 // Assume process_test is running on a little-endian machine.
andrew@webrtc.orgcb181212011-10-26 00:27:17 +000051 int32_t size = 0;
ajm@google.com808e0e02011-08-03 21:08:51 +000052 if (fread(&size, sizeof(int32_t), 1, file) != 1) {
53 return false;
54 }
55 if (size <= 0) {
56 return false;
57 }
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +000058 const size_t usize = static_cast<size_t>(size);
ajm@google.com808e0e02011-08-03 21:08:51 +000059
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +000060 scoped_array<char> array(new char[usize]);
61 if (fread(array.get(), sizeof(char), usize, file) != usize) {
ajm@google.com808e0e02011-08-03 21:08:51 +000062 return false;
63 }
64
65 msg->Clear();
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +000066 return msg->ParseFromArray(array.get(), usize);
ajm@google.com808e0e02011-08-03 21:08:51 +000067}
niklase@google.com470e71d2011-07-07 08:21:25 +000068
andrew@webrtc.org94c74132011-09-19 15:17:57 +000069void PrintStat(const AudioProcessing::Statistic& stat) {
70 printf("%d, %d, %d\n", stat.average,
71 stat.maximum,
72 stat.minimum);
73}
74
niklase@google.com470e71d2011-07-07 08:21:25 +000075void usage() {
76 printf(
ajm@google.com808e0e02011-08-03 21:08:51 +000077 "Usage: process_test [options] [-pb PROTOBUF_FILE]\n"
78 " [-ir REVERSE_FILE] [-i PRIMARY_FILE] [-o OUT_FILE]\n");
niklase@google.com470e71d2011-07-07 08:21:25 +000079 printf(
80 "process_test is a test application for AudioProcessing.\n\n"
ajm@google.com808e0e02011-08-03 21:08:51 +000081 "When a protobuf debug file is available, specify it with -pb.\n"
82 "Alternately, when -ir or -i is used, the specified files will be\n"
83 "processed directly in a simulation mode. Otherwise the full set of\n"
84 "legacy test files is expected to be present in the working directory.\n");
niklase@google.com470e71d2011-07-07 08:21:25 +000085 printf("\n");
86 printf("Options\n");
ajm@google.com808e0e02011-08-03 21:08:51 +000087 printf("General configuration (only used for the simulation mode):\n");
niklase@google.com470e71d2011-07-07 08:21:25 +000088 printf(" -fs SAMPLE_RATE_HZ\n");
89 printf(" -ch CHANNELS_IN CHANNELS_OUT\n");
90 printf(" -rch REVERSE_CHANNELS\n");
91 printf("\n");
92 printf("Component configuration:\n");
93 printf(
94 "All components are disabled by default. Each block below begins with a\n"
95 "flag to enable the component with default settings. The subsequent flags\n"
96 "in the block are used to provide configuration settings.\n");
97 printf("\n -aec Echo cancellation\n");
98 printf(" --drift_compensation\n");
99 printf(" --no_drift_compensation\n");
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000100 printf(" --no_echo_metrics\n");
bjornv@google.com1ba3dbe2011-10-03 08:18:10 +0000101 printf(" --no_delay_logging\n");
niklase@google.com470e71d2011-07-07 08:21:25 +0000102 printf("\n -aecm Echo control mobile\n");
bjornv@google.com238a0222011-07-15 14:51:52 +0000103 printf(" --aecm_echo_path_in_file FILE\n");
104 printf(" --aecm_echo_path_out_file FILE\n");
niklase@google.com470e71d2011-07-07 08:21:25 +0000105 printf("\n -agc Gain control\n");
106 printf(" --analog\n");
107 printf(" --adaptive_digital\n");
108 printf(" --fixed_digital\n");
109 printf(" --target_level LEVEL\n");
110 printf(" --compression_gain GAIN\n");
111 printf(" --limiter\n");
112 printf(" --no_limiter\n");
113 printf("\n -hpf High pass filter\n");
114 printf("\n -ns Noise suppression\n");
115 printf(" --ns_low\n");
116 printf(" --ns_moderate\n");
117 printf(" --ns_high\n");
118 printf(" --ns_very_high\n");
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000119 printf(" --ns_prob_file FILE\n");
niklase@google.com470e71d2011-07-07 08:21:25 +0000120 printf("\n -vad Voice activity detection\n");
ajm@google.com808e0e02011-08-03 21:08:51 +0000121 printf(" --vad_out_file FILE\n");
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000122 printf("\n Level metrics (enabled by default)\n");
123 printf(" --no_level_metrics\n");
niklase@google.com470e71d2011-07-07 08:21:25 +0000124 printf("\n");
125 printf("Modifiers:\n");
andrew@webrtc.orgcb181212011-10-26 00:27:17 +0000126 printf(" --noasm Disable SSE optimization.\n");
andrew@webrtc.org4b13fc92011-11-09 19:27:11 +0000127 printf(" --delay DELAY Add DELAY ms to input value.\n");
andrew@webrtc.orgcb181212011-10-26 00:27:17 +0000128 printf(" --perf Measure performance.\n");
129 printf(" --quiet Suppress text output.\n");
130 printf(" --no_progress Suppress progress.\n");
131 printf(" --debug_file FILE Dump a debug recording.\n");
niklase@google.com470e71d2011-07-07 08:21:25 +0000132}
133
134// void function for gtest.
135void void_main(int argc, char* argv[]) {
136 if (argc > 1 && strcmp(argv[1], "--help") == 0) {
137 usage();
138 return;
139 }
140
141 if (argc < 2) {
142 printf("Did you mean to run without arguments?\n");
143 printf("Try `process_test --help' for more information.\n\n");
144 }
145
146 AudioProcessing* apm = AudioProcessing::Create(0);
147 ASSERT_TRUE(apm != NULL);
148
ajm@google.com808e0e02011-08-03 21:08:51 +0000149 const char* pb_filename = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +0000150 const char* far_filename = NULL;
151 const char* near_filename = NULL;
152 const char* out_filename = NULL;
153 const char* vad_out_filename = NULL;
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000154 const char* ns_prob_filename = NULL;
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000155 const char* aecm_echo_path_in_filename = NULL;
156 const char* aecm_echo_path_out_filename = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +0000157
158 int32_t sample_rate_hz = 16000;
159 int32_t device_sample_rate_hz = 16000;
160
161 int num_capture_input_channels = 1;
162 int num_capture_output_channels = 1;
163 int num_render_channels = 1;
164
165 int samples_per_channel = sample_rate_hz / 100;
166
167 bool simulating = false;
168 bool perf_testing = false;
169 bool verbose = true;
170 bool progress = true;
andrew@webrtc.org4b13fc92011-11-09 19:27:11 +0000171 int extra_delay_ms = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000172 //bool interleaved = true;
173
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000174 ASSERT_EQ(apm->kNoError, apm->level_estimator()->Enable(true));
niklase@google.com470e71d2011-07-07 08:21:25 +0000175 for (int i = 1; i < argc; i++) {
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000176 if (strcmp(argv[i], "-pb") == 0) {
ajm@google.com808e0e02011-08-03 21:08:51 +0000177 i++;
178 ASSERT_LT(i, argc) << "Specify protobuf filename after -pb";
179 pb_filename = argv[i];
180
181 } else if (strcmp(argv[i], "-ir") == 0) {
niklase@google.com470e71d2011-07-07 08:21:25 +0000182 i++;
183 ASSERT_LT(i, argc) << "Specify filename after -ir";
184 far_filename = argv[i];
185 simulating = true;
186
187 } else if (strcmp(argv[i], "-i") == 0) {
188 i++;
189 ASSERT_LT(i, argc) << "Specify filename after -i";
190 near_filename = argv[i];
191 simulating = true;
192
193 } else if (strcmp(argv[i], "-o") == 0) {
194 i++;
195 ASSERT_LT(i, argc) << "Specify filename after -o";
196 out_filename = argv[i];
197
198 } else if (strcmp(argv[i], "-fs") == 0) {
199 i++;
200 ASSERT_LT(i, argc) << "Specify sample rate after -fs";
201 ASSERT_EQ(1, sscanf(argv[i], "%d", &sample_rate_hz));
202 samples_per_channel = sample_rate_hz / 100;
203
204 ASSERT_EQ(apm->kNoError,
205 apm->set_sample_rate_hz(sample_rate_hz));
206
207 } else if (strcmp(argv[i], "-ch") == 0) {
208 i++;
209 ASSERT_LT(i + 1, argc) << "Specify number of channels after -ch";
210 ASSERT_EQ(1, sscanf(argv[i], "%d", &num_capture_input_channels));
211 i++;
212 ASSERT_EQ(1, sscanf(argv[i], "%d", &num_capture_output_channels));
213
214 ASSERT_EQ(apm->kNoError,
215 apm->set_num_channels(num_capture_input_channels,
216 num_capture_output_channels));
217
218 } else if (strcmp(argv[i], "-rch") == 0) {
219 i++;
220 ASSERT_LT(i, argc) << "Specify number of channels after -rch";
221 ASSERT_EQ(1, sscanf(argv[i], "%d", &num_render_channels));
222
223 ASSERT_EQ(apm->kNoError,
224 apm->set_num_reverse_channels(num_render_channels));
225
226 } else if (strcmp(argv[i], "-aec") == 0) {
227 ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->Enable(true));
bjornv@google.com1ba3dbe2011-10-03 08:18:10 +0000228 ASSERT_EQ(apm->kNoError,
229 apm->echo_cancellation()->enable_metrics(true));
230 ASSERT_EQ(apm->kNoError,
231 apm->echo_cancellation()->enable_delay_logging(true));
niklase@google.com470e71d2011-07-07 08:21:25 +0000232
niklase@google.com470e71d2011-07-07 08:21:25 +0000233 } else if (strcmp(argv[i], "--drift_compensation") == 0) {
234 ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->Enable(true));
235 // TODO(ajm): this is enabled in the VQE test app by default. Investigate
236 // why it can give better performance despite passing zeros.
237 ASSERT_EQ(apm->kNoError,
238 apm->echo_cancellation()->enable_drift_compensation(true));
239 } else if (strcmp(argv[i], "--no_drift_compensation") == 0) {
240 ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->Enable(true));
241 ASSERT_EQ(apm->kNoError,
242 apm->echo_cancellation()->enable_drift_compensation(false));
243
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000244 } else if (strcmp(argv[i], "--no_echo_metrics") == 0) {
245 ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->Enable(true));
246 ASSERT_EQ(apm->kNoError,
247 apm->echo_cancellation()->enable_metrics(false));
248
bjornv@google.com1ba3dbe2011-10-03 08:18:10 +0000249 } else if (strcmp(argv[i], "--no_delay_logging") == 0) {
250 ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->Enable(true));
251 ASSERT_EQ(apm->kNoError,
252 apm->echo_cancellation()->enable_delay_logging(false));
253
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000254 } else if (strcmp(argv[i], "--no_level_metrics") == 0) {
255 ASSERT_EQ(apm->kNoError, apm->level_estimator()->Enable(false));
256
niklase@google.com470e71d2011-07-07 08:21:25 +0000257 } else if (strcmp(argv[i], "-aecm") == 0) {
258 ASSERT_EQ(apm->kNoError, apm->echo_control_mobile()->Enable(true));
259
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000260 } else if (strcmp(argv[i], "--aecm_echo_path_in_file") == 0) {
261 i++;
262 ASSERT_LT(i, argc) << "Specify filename after --aecm_echo_path_in_file";
263 aecm_echo_path_in_filename = argv[i];
264
265 } else if (strcmp(argv[i], "--aecm_echo_path_out_file") == 0) {
266 i++;
267 ASSERT_LT(i, argc) << "Specify filename after --aecm_echo_path_out_file";
268 aecm_echo_path_out_filename = argv[i];
269
niklase@google.com470e71d2011-07-07 08:21:25 +0000270 } else if (strcmp(argv[i], "-agc") == 0) {
271 ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
272
273 } else if (strcmp(argv[i], "--analog") == 0) {
274 ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
275 ASSERT_EQ(apm->kNoError,
276 apm->gain_control()->set_mode(GainControl::kAdaptiveAnalog));
277
278 } else if (strcmp(argv[i], "--adaptive_digital") == 0) {
279 ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
280 ASSERT_EQ(apm->kNoError,
281 apm->gain_control()->set_mode(GainControl::kAdaptiveDigital));
282
283 } else if (strcmp(argv[i], "--fixed_digital") == 0) {
284 ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
285 ASSERT_EQ(apm->kNoError,
286 apm->gain_control()->set_mode(GainControl::kFixedDigital));
287
288 } else if (strcmp(argv[i], "--target_level") == 0) {
289 i++;
290 int level;
291 ASSERT_EQ(1, sscanf(argv[i], "%d", &level));
292
293 ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
294 ASSERT_EQ(apm->kNoError,
295 apm->gain_control()->set_target_level_dbfs(level));
296
297 } else if (strcmp(argv[i], "--compression_gain") == 0) {
298 i++;
299 int gain;
300 ASSERT_EQ(1, sscanf(argv[i], "%d", &gain));
301
302 ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
303 ASSERT_EQ(apm->kNoError,
304 apm->gain_control()->set_compression_gain_db(gain));
305
306 } else if (strcmp(argv[i], "--limiter") == 0) {
307 ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
308 ASSERT_EQ(apm->kNoError,
309 apm->gain_control()->enable_limiter(true));
310
311 } else if (strcmp(argv[i], "--no_limiter") == 0) {
312 ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
313 ASSERT_EQ(apm->kNoError,
314 apm->gain_control()->enable_limiter(false));
315
316 } else if (strcmp(argv[i], "-hpf") == 0) {
317 ASSERT_EQ(apm->kNoError, apm->high_pass_filter()->Enable(true));
318
319 } else if (strcmp(argv[i], "-ns") == 0) {
320 ASSERT_EQ(apm->kNoError, apm->noise_suppression()->Enable(true));
321
322 } else if (strcmp(argv[i], "--ns_low") == 0) {
323 ASSERT_EQ(apm->kNoError, apm->noise_suppression()->Enable(true));
324 ASSERT_EQ(apm->kNoError,
325 apm->noise_suppression()->set_level(NoiseSuppression::kLow));
326
327 } else if (strcmp(argv[i], "--ns_moderate") == 0) {
328 ASSERT_EQ(apm->kNoError, apm->noise_suppression()->Enable(true));
329 ASSERT_EQ(apm->kNoError,
330 apm->noise_suppression()->set_level(NoiseSuppression::kModerate));
331
332 } else if (strcmp(argv[i], "--ns_high") == 0) {
333 ASSERT_EQ(apm->kNoError, apm->noise_suppression()->Enable(true));
334 ASSERT_EQ(apm->kNoError,
335 apm->noise_suppression()->set_level(NoiseSuppression::kHigh));
336
337 } else if (strcmp(argv[i], "--ns_very_high") == 0) {
338 ASSERT_EQ(apm->kNoError, apm->noise_suppression()->Enable(true));
339 ASSERT_EQ(apm->kNoError,
340 apm->noise_suppression()->set_level(NoiseSuppression::kVeryHigh));
341
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000342 } else if (strcmp(argv[i], "--ns_prob_file") == 0) {
343 i++;
344 ASSERT_LT(i, argc) << "Specify filename after --ns_prob_file";
345 ns_prob_filename = argv[i];
346
niklase@google.com470e71d2011-07-07 08:21:25 +0000347 } else if (strcmp(argv[i], "-vad") == 0) {
348 ASSERT_EQ(apm->kNoError, apm->voice_detection()->Enable(true));
349
andrew@webrtc.org89752612012-10-12 16:41:45 +0000350 } else if (strcmp(argv[i], "--vad_very_low") == 0) {
351 ASSERT_EQ(apm->kNoError, apm->voice_detection()->Enable(true));
352 ASSERT_EQ(apm->kNoError,
353 apm->voice_detection()->set_likelihood(
354 VoiceDetection::kVeryLowLikelihood));
355
356 } else if (strcmp(argv[i], "--vad_low") == 0) {
357 ASSERT_EQ(apm->kNoError, apm->voice_detection()->Enable(true));
358 ASSERT_EQ(apm->kNoError,
359 apm->voice_detection()->set_likelihood(
360 VoiceDetection::kLowLikelihood));
361
362 } else if (strcmp(argv[i], "--vad_moderate") == 0) {
363 ASSERT_EQ(apm->kNoError, apm->voice_detection()->Enable(true));
364 ASSERT_EQ(apm->kNoError,
365 apm->voice_detection()->set_likelihood(
366 VoiceDetection::kModerateLikelihood));
367
368 } else if (strcmp(argv[i], "--vad_high") == 0) {
369 ASSERT_EQ(apm->kNoError, apm->voice_detection()->Enable(true));
370 ASSERT_EQ(apm->kNoError,
371 apm->voice_detection()->set_likelihood(
372 VoiceDetection::kHighLikelihood));
373
niklase@google.com470e71d2011-07-07 08:21:25 +0000374 } else if (strcmp(argv[i], "--vad_out_file") == 0) {
375 i++;
376 ASSERT_LT(i, argc) << "Specify filename after --vad_out_file";
377 vad_out_filename = argv[i];
378
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000379 } else if (strcmp(argv[i], "--noasm") == 0) {
380 WebRtc_GetCPUInfo = WebRtc_GetCPUInfoNoASM;
381 // We need to reinitialize here if components have already been enabled.
382 ASSERT_EQ(apm->kNoError, apm->Initialize());
383
andrew@webrtc.org4b13fc92011-11-09 19:27:11 +0000384 } else if (strcmp(argv[i], "--delay") == 0) {
385 i++;
386 ASSERT_EQ(1, sscanf(argv[i], "%d", &extra_delay_ms));
387
niklase@google.com470e71d2011-07-07 08:21:25 +0000388 } else if (strcmp(argv[i], "--perf") == 0) {
389 perf_testing = true;
390
391 } else if (strcmp(argv[i], "--quiet") == 0) {
392 verbose = false;
393 progress = false;
394
395 } else if (strcmp(argv[i], "--no_progress") == 0) {
396 progress = false;
397
andrew@webrtc.orgcb181212011-10-26 00:27:17 +0000398 } else if (strcmp(argv[i], "--debug_file") == 0) {
ajm@google.com808e0e02011-08-03 21:08:51 +0000399 i++;
andrew@webrtc.orgcb181212011-10-26 00:27:17 +0000400 ASSERT_LT(i, argc) << "Specify filename after --debug_file";
ajm@google.com808e0e02011-08-03 21:08:51 +0000401 ASSERT_EQ(apm->kNoError, apm->StartDebugRecording(argv[i]));
niklase@google.com470e71d2011-07-07 08:21:25 +0000402 } else {
403 FAIL() << "Unrecognized argument " << argv[i];
404 }
405 }
ajm@google.com808e0e02011-08-03 21:08:51 +0000406 // If we're reading a protobuf file, ensure a simulation hasn't also
407 // been requested (which makes no sense...)
408 ASSERT_FALSE(pb_filename && simulating);
niklase@google.com470e71d2011-07-07 08:21:25 +0000409
410 if (verbose) {
411 printf("Sample rate: %d Hz\n", sample_rate_hz);
412 printf("Primary channels: %d (in), %d (out)\n",
413 num_capture_input_channels,
414 num_capture_output_channels);
415 printf("Reverse channels: %d \n", num_render_channels);
416 }
417
418 const char far_file_default[] = "apm_far.pcm";
419 const char near_file_default[] = "apm_near.pcm";
420 const char out_file_default[] = "out.pcm";
421 const char event_filename[] = "apm_event.dat";
422 const char delay_filename[] = "apm_delay.dat";
423 const char drift_filename[] = "apm_drift.dat";
424 const char vad_file_default[] = "vad_out.dat";
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000425 const char ns_prob_file_default[] = "ns_prob.dat";
niklase@google.com470e71d2011-07-07 08:21:25 +0000426
427 if (!simulating) {
428 far_filename = far_file_default;
429 near_filename = near_file_default;
430 }
431
ajm@google.com808e0e02011-08-03 21:08:51 +0000432 if (!out_filename) {
niklase@google.com470e71d2011-07-07 08:21:25 +0000433 out_filename = out_file_default;
434 }
435
ajm@google.com808e0e02011-08-03 21:08:51 +0000436 if (!vad_out_filename) {
niklase@google.com470e71d2011-07-07 08:21:25 +0000437 vad_out_filename = vad_file_default;
438 }
439
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000440 if (!ns_prob_filename) {
441 ns_prob_filename = ns_prob_file_default;
442 }
443
ajm@google.com808e0e02011-08-03 21:08:51 +0000444 FILE* pb_file = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +0000445 FILE* far_file = NULL;
446 FILE* near_file = NULL;
447 FILE* out_file = NULL;
448 FILE* event_file = NULL;
449 FILE* delay_file = NULL;
450 FILE* drift_file = NULL;
451 FILE* vad_out_file = NULL;
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000452 FILE* ns_prob_file = NULL;
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000453 FILE* aecm_echo_path_in_file = NULL;
454 FILE* aecm_echo_path_out_file = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +0000455
ajm@google.com808e0e02011-08-03 21:08:51 +0000456 if (pb_filename) {
457 pb_file = fopen(pb_filename, "rb");
458 ASSERT_TRUE(NULL != pb_file) << "Unable to open protobuf file "
459 << pb_filename;
460 } else {
461 if (far_filename) {
462 far_file = fopen(far_filename, "rb");
463 ASSERT_TRUE(NULL != far_file) << "Unable to open far-end audio file "
464 << far_filename;
465 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000466
ajm@google.com808e0e02011-08-03 21:08:51 +0000467 near_file = fopen(near_filename, "rb");
468 ASSERT_TRUE(NULL != near_file) << "Unable to open near-end audio file "
469 << near_filename;
470 if (!simulating) {
471 event_file = fopen(event_filename, "rb");
472 ASSERT_TRUE(NULL != event_file) << "Unable to open event file "
473 << event_filename;
474
475 delay_file = fopen(delay_filename, "rb");
476 ASSERT_TRUE(NULL != delay_file) << "Unable to open buffer file "
477 << delay_filename;
478
479 drift_file = fopen(drift_filename, "rb");
480 ASSERT_TRUE(NULL != drift_file) << "Unable to open drift file "
481 << drift_filename;
482 }
483 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000484
485 out_file = fopen(out_filename, "wb");
486 ASSERT_TRUE(NULL != out_file) << "Unable to open output audio file "
487 << out_filename;
488
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000489 int near_size_bytes = 0;
ajm@google.com808e0e02011-08-03 21:08:51 +0000490 if (pb_file) {
491 struct stat st;
492 stat(pb_filename, &st);
493 // Crude estimate, but should be good enough.
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000494 near_size_bytes = st.st_size / 3;
ajm@google.com808e0e02011-08-03 21:08:51 +0000495 } else {
496 struct stat st;
497 stat(near_filename, &st);
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000498 near_size_bytes = st.st_size;
niklase@google.com470e71d2011-07-07 08:21:25 +0000499 }
500
501 if (apm->voice_detection()->is_enabled()) {
502 vad_out_file = fopen(vad_out_filename, "wb");
503 ASSERT_TRUE(NULL != vad_out_file) << "Unable to open VAD output file "
504 << vad_out_file;
505 }
506
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000507 if (apm->noise_suppression()->is_enabled()) {
508 ns_prob_file = fopen(ns_prob_filename, "wb");
509 ASSERT_TRUE(NULL != ns_prob_file) << "Unable to open NS output file "
510 << ns_prob_file;
511 }
512
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000513 if (aecm_echo_path_in_filename != NULL) {
514 aecm_echo_path_in_file = fopen(aecm_echo_path_in_filename, "rb");
515 ASSERT_TRUE(NULL != aecm_echo_path_in_file) << "Unable to open file "
516 << aecm_echo_path_in_filename;
517
ajm@google.com22e65152011-07-18 18:03:01 +0000518 const size_t path_size =
519 apm->echo_control_mobile()->echo_path_size_bytes();
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +0000520 scoped_array<char> echo_path(new char[path_size]);
521 ASSERT_EQ(path_size, fread(echo_path.get(),
522 sizeof(char),
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000523 path_size,
524 aecm_echo_path_in_file));
525 EXPECT_EQ(apm->kNoError,
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +0000526 apm->echo_control_mobile()->SetEchoPath(echo_path.get(),
527 path_size));
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000528 fclose(aecm_echo_path_in_file);
529 aecm_echo_path_in_file = NULL;
530 }
531
532 if (aecm_echo_path_out_filename != NULL) {
533 aecm_echo_path_out_file = fopen(aecm_echo_path_out_filename, "wb");
534 ASSERT_TRUE(NULL != aecm_echo_path_out_file) << "Unable to open file "
535 << aecm_echo_path_out_filename;
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000536 }
537
niklase@google.com470e71d2011-07-07 08:21:25 +0000538 size_t read_count = 0;
539 int reverse_count = 0;
540 int primary_count = 0;
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000541 int near_read_bytes = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000542 TickInterval acc_ticks;
543
544 AudioFrame far_frame;
niklase@google.com470e71d2011-07-07 08:21:25 +0000545 AudioFrame near_frame;
niklase@google.com470e71d2011-07-07 08:21:25 +0000546
547 int delay_ms = 0;
548 int drift_samples = 0;
549 int capture_level = 127;
550 int8_t stream_has_voice = 0;
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000551 float ns_speech_prob = 0.0f;
niklase@google.com470e71d2011-07-07 08:21:25 +0000552
553 TickTime t0 = TickTime::Now();
554 TickTime t1 = t0;
555 WebRtc_Word64 max_time_us = 0;
556 WebRtc_Word64 max_time_reverse_us = 0;
557 WebRtc_Word64 min_time_us = 1e6;
558 WebRtc_Word64 min_time_reverse_us = 1e6;
559
ajm@google.com808e0e02011-08-03 21:08:51 +0000560 // TODO(ajm): Ideally we would refactor this block into separate functions,
561 // but for now we want to share the variables.
562 if (pb_file) {
563 Event event_msg;
564 while (ReadMessageFromFile(pb_file, &event_msg)) {
565 std::ostringstream trace_stream;
566 trace_stream << "Processed frames: " << reverse_count << " (reverse), "
567 << primary_count << " (primary)";
568 SCOPED_TRACE(trace_stream.str());
niklase@google.com470e71d2011-07-07 08:21:25 +0000569
ajm@google.com808e0e02011-08-03 21:08:51 +0000570 if (event_msg.type() == Event::INIT) {
571 ASSERT_TRUE(event_msg.has_init());
572 const Init msg = event_msg.init();
niklase@google.com470e71d2011-07-07 08:21:25 +0000573
ajm@google.com808e0e02011-08-03 21:08:51 +0000574 ASSERT_TRUE(msg.has_sample_rate());
575 ASSERT_EQ(apm->kNoError,
576 apm->set_sample_rate_hz(msg.sample_rate()));
577
578 ASSERT_TRUE(msg.has_device_sample_rate());
579 ASSERT_EQ(apm->kNoError,
580 apm->echo_cancellation()->set_device_sample_rate_hz(
581 msg.device_sample_rate()));
582
583 ASSERT_TRUE(msg.has_num_input_channels());
584 ASSERT_TRUE(msg.has_num_output_channels());
585 ASSERT_EQ(apm->kNoError,
586 apm->set_num_channels(msg.num_input_channels(),
587 msg.num_output_channels()));
588
589 ASSERT_TRUE(msg.has_num_reverse_channels());
590 ASSERT_EQ(apm->kNoError,
591 apm->set_num_reverse_channels(msg.num_reverse_channels()));
592
593 samples_per_channel = msg.sample_rate() / 100;
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000594 far_frame.sample_rate_hz_ = msg.sample_rate();
595 far_frame.samples_per_channel_ = samples_per_channel;
596 far_frame.num_channels_ = msg.num_reverse_channels();
597 near_frame.sample_rate_hz_ = msg.sample_rate();
598 near_frame.samples_per_channel_ = samples_per_channel;
ajm@google.com808e0e02011-08-03 21:08:51 +0000599
600 if (verbose) {
601 printf("Init at frame: %d (primary), %d (reverse)\n",
602 primary_count, reverse_count);
andrew@webrtc.orgba028a32011-11-23 20:37:12 +0000603 printf(" Sample rate: %d Hz\n", msg.sample_rate());
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000604 printf(" Primary channels: %d (in), %d (out)\n",
605 msg.num_input_channels(),
606 msg.num_output_channels());
607 printf(" Reverse channels: %d \n", msg.num_reverse_channels());
ajm@google.com808e0e02011-08-03 21:08:51 +0000608 }
609
610 } else if (event_msg.type() == Event::REVERSE_STREAM) {
611 ASSERT_TRUE(event_msg.has_reverse_stream());
612 const ReverseStream msg = event_msg.reverse_stream();
613 reverse_count++;
614
615 ASSERT_TRUE(msg.has_data());
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000616 ASSERT_EQ(sizeof(int16_t) * samples_per_channel *
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000617 far_frame.num_channels_, msg.data().size());
618 memcpy(far_frame.data_, msg.data().data(), msg.data().size());
ajm@google.com808e0e02011-08-03 21:08:51 +0000619
620 if (perf_testing) {
621 t0 = TickTime::Now();
622 }
623
624 ASSERT_EQ(apm->kNoError,
625 apm->AnalyzeReverseStream(&far_frame));
626
627 if (perf_testing) {
628 t1 = TickTime::Now();
629 TickInterval tick_diff = t1 - t0;
630 acc_ticks += tick_diff;
631 if (tick_diff.Microseconds() > max_time_reverse_us) {
632 max_time_reverse_us = tick_diff.Microseconds();
633 }
634 if (tick_diff.Microseconds() < min_time_reverse_us) {
635 min_time_reverse_us = tick_diff.Microseconds();
636 }
637 }
638
639 } else if (event_msg.type() == Event::STREAM) {
640 ASSERT_TRUE(event_msg.has_stream());
641 const Stream msg = event_msg.stream();
642 primary_count++;
643
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000644 // ProcessStream could have changed this for the output frame.
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000645 near_frame.num_channels_ = apm->num_input_channels();
ajm@google.com808e0e02011-08-03 21:08:51 +0000646
647 ASSERT_TRUE(msg.has_input_data());
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000648 ASSERT_EQ(sizeof(int16_t) * samples_per_channel *
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000649 near_frame.num_channels_, msg.input_data().size());
650 memcpy(near_frame.data_,
ajm@google.com808e0e02011-08-03 21:08:51 +0000651 msg.input_data().data(),
652 msg.input_data().size());
653
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000654 near_read_bytes += msg.input_data().size();
ajm@google.com808e0e02011-08-03 21:08:51 +0000655 if (progress && primary_count % 100 == 0) {
656 printf("%.0f%% complete\r",
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000657 (near_read_bytes * 100.0) / near_size_bytes);
ajm@google.com808e0e02011-08-03 21:08:51 +0000658 fflush(stdout);
659 }
660
661 if (perf_testing) {
662 t0 = TickTime::Now();
663 }
664
665 ASSERT_EQ(apm->kNoError,
666 apm->gain_control()->set_stream_analog_level(msg.level()));
667 ASSERT_EQ(apm->kNoError,
andrew@webrtc.org4b13fc92011-11-09 19:27:11 +0000668 apm->set_stream_delay_ms(msg.delay() + extra_delay_ms));
ajm@google.com808e0e02011-08-03 21:08:51 +0000669 ASSERT_EQ(apm->kNoError,
670 apm->echo_cancellation()->set_stream_drift_samples(msg.drift()));
671
672 int err = apm->ProcessStream(&near_frame);
673 if (err == apm->kBadStreamParameterWarning) {
674 printf("Bad parameter warning. %s\n", trace_stream.str().c_str());
675 }
676 ASSERT_TRUE(err == apm->kNoError ||
677 err == apm->kBadStreamParameterWarning);
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000678 ASSERT_TRUE(near_frame.num_channels_ == apm->num_output_channels());
ajm@google.com808e0e02011-08-03 21:08:51 +0000679
680 capture_level = apm->gain_control()->stream_analog_level();
681
682 stream_has_voice =
683 static_cast<int8_t>(apm->voice_detection()->stream_has_voice());
684 if (vad_out_file != NULL) {
685 ASSERT_EQ(1u, fwrite(&stream_has_voice,
686 sizeof(stream_has_voice),
687 1,
688 vad_out_file));
689 }
690
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000691 if (ns_prob_file != NULL) {
692 ns_speech_prob = apm->noise_suppression()->speech_probability();
693 ASSERT_EQ(1u, fwrite(&ns_speech_prob,
694 sizeof(ns_speech_prob),
695 1,
696 ns_prob_file));
697 }
698
ajm@google.com808e0e02011-08-03 21:08:51 +0000699 if (apm->gain_control()->mode() != GainControl::kAdaptiveAnalog) {
700 ASSERT_EQ(msg.level(), capture_level);
701 }
702
703 if (perf_testing) {
704 t1 = TickTime::Now();
705 TickInterval tick_diff = t1 - t0;
706 acc_ticks += tick_diff;
707 if (tick_diff.Microseconds() > max_time_us) {
708 max_time_us = tick_diff.Microseconds();
709 }
710 if (tick_diff.Microseconds() < min_time_us) {
711 min_time_us = tick_diff.Microseconds();
712 }
713 }
714
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000715 size_t size = samples_per_channel * near_frame.num_channels_;
716 ASSERT_EQ(size, fwrite(near_frame.data_,
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000717 sizeof(int16_t),
718 size,
719 out_file));
ajm@google.com808e0e02011-08-03 21:08:51 +0000720 }
721 }
722
723 ASSERT_TRUE(feof(pb_file));
ajm@google.com808e0e02011-08-03 21:08:51 +0000724
725 } else {
bjornv@google.coma2c6ea02011-09-27 08:04:45 +0000726 enum Events {
727 kInitializeEvent,
728 kRenderEvent,
729 kCaptureEvent,
730 kResetEventDeprecated
731 };
732 int16_t event = 0;
ajm@google.com808e0e02011-08-03 21:08:51 +0000733 while (simulating || feof(event_file) == 0) {
734 std::ostringstream trace_stream;
735 trace_stream << "Processed frames: " << reverse_count << " (reverse), "
736 << primary_count << " (primary)";
737 SCOPED_TRACE(trace_stream.str());
738
739 if (simulating) {
740 if (far_file == NULL) {
niklase@google.com470e71d2011-07-07 08:21:25 +0000741 event = kCaptureEvent;
742 } else {
ajm@google.com808e0e02011-08-03 21:08:51 +0000743 if (event == kRenderEvent) {
744 event = kCaptureEvent;
745 } else {
746 event = kRenderEvent;
747 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000748 }
749 } else {
ajm@google.com808e0e02011-08-03 21:08:51 +0000750 read_count = fread(&event, sizeof(event), 1, event_file);
751 if (read_count != 1) {
752 break;
niklase@google.com470e71d2011-07-07 08:21:25 +0000753 }
754 }
755
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000756 far_frame.sample_rate_hz_ = sample_rate_hz;
757 far_frame.samples_per_channel_ = samples_per_channel;
758 far_frame.num_channels_ = num_render_channels;
759 near_frame.sample_rate_hz_ = sample_rate_hz;
760 near_frame.samples_per_channel_ = samples_per_channel;
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000761
ajm@google.com808e0e02011-08-03 21:08:51 +0000762 if (event == kInitializeEvent || event == kResetEventDeprecated) {
763 ASSERT_EQ(1u,
764 fread(&sample_rate_hz, sizeof(sample_rate_hz), 1, event_file));
765 samples_per_channel = sample_rate_hz / 100;
niklase@google.com470e71d2011-07-07 08:21:25 +0000766
ajm@google.com808e0e02011-08-03 21:08:51 +0000767 ASSERT_EQ(1u,
768 fread(&device_sample_rate_hz,
769 sizeof(device_sample_rate_hz),
770 1,
771 event_file));
772
773 ASSERT_EQ(apm->kNoError,
774 apm->set_sample_rate_hz(sample_rate_hz));
775
776 ASSERT_EQ(apm->kNoError,
777 apm->echo_cancellation()->set_device_sample_rate_hz(
778 device_sample_rate_hz));
779
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000780 far_frame.sample_rate_hz_ = sample_rate_hz;
781 far_frame.samples_per_channel_ = samples_per_channel;
782 far_frame.num_channels_ = num_render_channels;
783 near_frame.sample_rate_hz_ = sample_rate_hz;
784 near_frame.samples_per_channel_ = samples_per_channel;
ajm@google.com808e0e02011-08-03 21:08:51 +0000785
786 if (verbose) {
787 printf("Init at frame: %d (primary), %d (reverse)\n",
788 primary_count, reverse_count);
789 printf(" Sample rate: %d Hz\n", sample_rate_hz);
790 }
791
792 } else if (event == kRenderEvent) {
793 reverse_count++;
ajm@google.com808e0e02011-08-03 21:08:51 +0000794
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000795 size_t size = samples_per_channel * num_render_channels;
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000796 read_count = fread(far_frame.data_,
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000797 sizeof(int16_t),
798 size,
ajm@google.com808e0e02011-08-03 21:08:51 +0000799 far_file);
800
801 if (simulating) {
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000802 if (read_count != size) {
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000803 // Read an equal amount from the near file to avoid errors due to
804 // not reaching end-of-file.
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000805 EXPECT_EQ(0, fseek(near_file, read_count * sizeof(int16_t),
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000806 SEEK_CUR));
ajm@google.com808e0e02011-08-03 21:08:51 +0000807 break; // This is expected.
808 }
809 } else {
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000810 ASSERT_EQ(size, read_count);
ajm@google.com808e0e02011-08-03 21:08:51 +0000811 }
812
813 if (perf_testing) {
814 t0 = TickTime::Now();
815 }
816
817 ASSERT_EQ(apm->kNoError,
818 apm->AnalyzeReverseStream(&far_frame));
819
820 if (perf_testing) {
821 t1 = TickTime::Now();
822 TickInterval tick_diff = t1 - t0;
823 acc_ticks += tick_diff;
824 if (tick_diff.Microseconds() > max_time_reverse_us) {
825 max_time_reverse_us = tick_diff.Microseconds();
826 }
827 if (tick_diff.Microseconds() < min_time_reverse_us) {
828 min_time_reverse_us = tick_diff.Microseconds();
829 }
830 }
831
832 } else if (event == kCaptureEvent) {
833 primary_count++;
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000834 near_frame.num_channels_ = num_capture_input_channels;
ajm@google.com808e0e02011-08-03 21:08:51 +0000835
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000836 size_t size = samples_per_channel * num_capture_input_channels;
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000837 read_count = fread(near_frame.data_,
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000838 sizeof(int16_t),
839 size,
ajm@google.com808e0e02011-08-03 21:08:51 +0000840 near_file);
841
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000842 near_read_bytes += read_count * sizeof(int16_t);
ajm@google.com808e0e02011-08-03 21:08:51 +0000843 if (progress && primary_count % 100 == 0) {
844 printf("%.0f%% complete\r",
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000845 (near_read_bytes * 100.0) / near_size_bytes);
ajm@google.com808e0e02011-08-03 21:08:51 +0000846 fflush(stdout);
847 }
848 if (simulating) {
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000849 if (read_count != size) {
ajm@google.com808e0e02011-08-03 21:08:51 +0000850 break; // This is expected.
851 }
852
853 delay_ms = 0;
854 drift_samples = 0;
855 } else {
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000856 ASSERT_EQ(size, read_count);
ajm@google.com808e0e02011-08-03 21:08:51 +0000857
858 // TODO(ajm): sizeof(delay_ms) for current files?
859 ASSERT_EQ(1u,
860 fread(&delay_ms, 2, 1, delay_file));
861 ASSERT_EQ(1u,
862 fread(&drift_samples, sizeof(drift_samples), 1, drift_file));
863 }
864
865 if (perf_testing) {
866 t0 = TickTime::Now();
867 }
868
869 // TODO(ajm): fake an analog gain while simulating.
870
871 int capture_level_in = capture_level;
872 ASSERT_EQ(apm->kNoError,
873 apm->gain_control()->set_stream_analog_level(capture_level));
874 ASSERT_EQ(apm->kNoError,
andrew@webrtc.org4b13fc92011-11-09 19:27:11 +0000875 apm->set_stream_delay_ms(delay_ms + extra_delay_ms));
ajm@google.com808e0e02011-08-03 21:08:51 +0000876 ASSERT_EQ(apm->kNoError,
877 apm->echo_cancellation()->set_stream_drift_samples(drift_samples));
878
879 int err = apm->ProcessStream(&near_frame);
880 if (err == apm->kBadStreamParameterWarning) {
881 printf("Bad parameter warning. %s\n", trace_stream.str().c_str());
882 }
883 ASSERT_TRUE(err == apm->kNoError ||
884 err == apm->kBadStreamParameterWarning);
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000885 ASSERT_TRUE(near_frame.num_channels_ == apm->num_output_channels());
ajm@google.com808e0e02011-08-03 21:08:51 +0000886
887 capture_level = apm->gain_control()->stream_analog_level();
888
889 stream_has_voice =
890 static_cast<int8_t>(apm->voice_detection()->stream_has_voice());
891 if (vad_out_file != NULL) {
892 ASSERT_EQ(1u, fwrite(&stream_has_voice,
893 sizeof(stream_has_voice),
894 1,
895 vad_out_file));
896 }
897
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000898 if (ns_prob_file != NULL) {
899 ns_speech_prob = apm->noise_suppression()->speech_probability();
900 ASSERT_EQ(1u, fwrite(&ns_speech_prob,
901 sizeof(ns_speech_prob),
902 1,
903 ns_prob_file));
904 }
905
ajm@google.com808e0e02011-08-03 21:08:51 +0000906 if (apm->gain_control()->mode() != GainControl::kAdaptiveAnalog) {
907 ASSERT_EQ(capture_level_in, capture_level);
908 }
909
910 if (perf_testing) {
911 t1 = TickTime::Now();
912 TickInterval tick_diff = t1 - t0;
913 acc_ticks += tick_diff;
914 if (tick_diff.Microseconds() > max_time_us) {
915 max_time_us = tick_diff.Microseconds();
916 }
917 if (tick_diff.Microseconds() < min_time_us) {
918 min_time_us = tick_diff.Microseconds();
919 }
920 }
921
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000922 size = samples_per_channel * near_frame.num_channels_;
923 ASSERT_EQ(size, fwrite(near_frame.data_,
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000924 sizeof(int16_t),
925 size,
926 out_file));
niklase@google.com470e71d2011-07-07 08:21:25 +0000927 }
ajm@google.com808e0e02011-08-03 21:08:51 +0000928 else {
929 FAIL() << "Event " << event << " is unrecognized";
niklase@google.com470e71d2011-07-07 08:21:25 +0000930 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000931 }
932 }
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000933 printf("100%% complete\r");
niklase@google.com470e71d2011-07-07 08:21:25 +0000934
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000935 if (aecm_echo_path_out_file != NULL) {
ajm@google.com22e65152011-07-18 18:03:01 +0000936 const size_t path_size =
937 apm->echo_control_mobile()->echo_path_size_bytes();
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +0000938 scoped_array<char> echo_path(new char[path_size]);
939 apm->echo_control_mobile()->GetEchoPath(echo_path.get(), path_size);
940 ASSERT_EQ(path_size, fwrite(echo_path.get(),
941 sizeof(char),
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000942 path_size,
943 aecm_echo_path_out_file));
944 fclose(aecm_echo_path_out_file);
945 aecm_echo_path_out_file = NULL;
946 }
947
niklase@google.com470e71d2011-07-07 08:21:25 +0000948 if (verbose) {
949 printf("\nProcessed frames: %d (primary), %d (reverse)\n",
950 primary_count, reverse_count);
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000951
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000952 if (apm->level_estimator()->is_enabled()) {
953 printf("\n--Level metrics--\n");
954 printf("RMS: %d dBFS\n", -apm->level_estimator()->RMS());
955 }
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000956 if (apm->echo_cancellation()->are_metrics_enabled()) {
957 EchoCancellation::Metrics metrics;
958 apm->echo_cancellation()->GetMetrics(&metrics);
959 printf("\n--Echo metrics--\n");
960 printf("(avg, max, min)\n");
961 printf("ERL: ");
962 PrintStat(metrics.echo_return_loss);
963 printf("ERLE: ");
964 PrintStat(metrics.echo_return_loss_enhancement);
965 printf("ANLP: ");
966 PrintStat(metrics.a_nlp);
967 }
bjornv@google.com1ba3dbe2011-10-03 08:18:10 +0000968 if (apm->echo_cancellation()->is_delay_logging_enabled()) {
969 int median = 0;
970 int std = 0;
971 apm->echo_cancellation()->GetDelayMetrics(&median, &std);
972 printf("\n--Delay metrics--\n");
973 printf("Median: %3d\n", median);
974 printf("Standard deviation: %3d\n", std);
975 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000976 }
977
ajm@google.com808e0e02011-08-03 21:08:51 +0000978 if (!pb_file) {
979 int8_t temp_int8;
980 if (far_file) {
981 read_count = fread(&temp_int8, sizeof(temp_int8), 1, far_file);
982 EXPECT_NE(0, feof(far_file)) << "Far-end file not fully processed";
983 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000984
ajm@google.com808e0e02011-08-03 21:08:51 +0000985 read_count = fread(&temp_int8, sizeof(temp_int8), 1, near_file);
986 EXPECT_NE(0, feof(near_file)) << "Near-end file not fully processed";
987
988 if (!simulating) {
989 read_count = fread(&temp_int8, sizeof(temp_int8), 1, event_file);
990 EXPECT_NE(0, feof(event_file)) << "Event file not fully processed";
991 read_count = fread(&temp_int8, sizeof(temp_int8), 1, delay_file);
992 EXPECT_NE(0, feof(delay_file)) << "Delay file not fully processed";
993 read_count = fread(&temp_int8, sizeof(temp_int8), 1, drift_file);
994 EXPECT_NE(0, feof(drift_file)) << "Drift file not fully processed";
995 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000996 }
997
998 if (perf_testing) {
999 if (primary_count > 0) {
1000 WebRtc_Word64 exec_time = acc_ticks.Milliseconds();
1001 printf("\nTotal time: %.3f s, file time: %.2f s\n",
1002 exec_time * 0.001, primary_count * 0.01);
1003 printf("Time per frame: %.3f ms (average), %.3f ms (max),"
1004 " %.3f ms (min)\n",
1005 (exec_time * 1.0) / primary_count,
1006 (max_time_us + max_time_reverse_us) / 1000.0,
1007 (min_time_us + min_time_reverse_us) / 1000.0);
1008 } else {
1009 printf("Warning: no capture frames\n");
1010 }
1011 }
1012
1013 AudioProcessing::Destroy(apm);
1014 apm = NULL;
1015}
ajm@google.com808e0e02011-08-03 21:08:51 +00001016} // namespace
niklase@google.com470e71d2011-07-07 08:21:25 +00001017
1018int main(int argc, char* argv[])
1019{
1020 void_main(argc, argv);
1021
andrew@webrtc.org64235092011-08-19 21:22:08 +00001022 // Optional, but removes memory leak noise from Valgrind.
1023 google::protobuf::ShutdownProtobufLibrary();
niklase@google.com470e71d2011-07-07 08:21:25 +00001024 return 0;
1025}