blob: e68d1e9128ea7d2187432b882451bf8be5a287d5 [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
leozwang@webrtc.org9a85d8e2012-03-16 18:03:18 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
andrew@webrtc.org81865342012-10-27 00:28:27 +000011#include <math.h>
niklase@google.com470e71d2011-07-07 08:21:25 +000012#include <stdio.h>
13#include <string.h>
14#ifdef WEBRTC_ANDROID
15#include <sys/stat.h>
16#endif
17
andrew@webrtc.org81865342012-10-27 00:28:27 +000018#include <algorithm>
19
niklase@google.com470e71d2011-07-07 08:21:25 +000020#include "gtest/gtest.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000021
andrew@webrtc.orgb43502e2012-11-26 23:57:38 +000022#include "audio_processing.h"
23#include "cpu_features_wrapper.h"
24#include "module_common_types.h"
25#include "scoped_ptr.h"
26#include "tick_util.h"
leozwang@webrtc.orga3736342012-03-16 21:36:00 +000027#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
leozwang@webrtc.org534e4952012-10-22 21:21:52 +000028#include "external/webrtc/webrtc/modules/audio_processing/debug.pb.h"
leozwang@webrtc.orga3736342012-03-16 21:36:00 +000029#else
ajm@google.com808e0e02011-08-03 21:08:51 +000030#include "webrtc/audio_processing/debug.pb.h"
leozwang@webrtc.orga3736342012-03-16 21:36:00 +000031#endif
niklase@google.com470e71d2011-07-07 08:21:25 +000032
33using webrtc::AudioFrame;
niklase@google.com470e71d2011-07-07 08:21:25 +000034using webrtc::AudioProcessing;
andrew@webrtc.org94c74132011-09-19 15:17:57 +000035using webrtc::EchoCancellation;
niklase@google.com470e71d2011-07-07 08:21:25 +000036using webrtc::GainControl;
37using webrtc::NoiseSuppression;
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +000038using webrtc::scoped_array;
ajm@google.com808e0e02011-08-03 21:08:51 +000039using webrtc::TickInterval;
40using webrtc::TickTime;
andrew@webrtc.org89752612012-10-12 16:41:45 +000041using webrtc::VoiceDetection;
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +000042
ajm@google.com808e0e02011-08-03 21:08:51 +000043using webrtc::audioproc::Event;
44using webrtc::audioproc::Init;
45using webrtc::audioproc::ReverseStream;
46using webrtc::audioproc::Stream;
47
48namespace {
49// Returns true on success, false on error or end-of-file.
50bool ReadMessageFromFile(FILE* file,
51 ::google::protobuf::MessageLite* msg) {
52 // The "wire format" for the size is little-endian.
53 // Assume process_test is running on a little-endian machine.
andrew@webrtc.orgcb181212011-10-26 00:27:17 +000054 int32_t size = 0;
ajm@google.com808e0e02011-08-03 21:08:51 +000055 if (fread(&size, sizeof(int32_t), 1, file) != 1) {
56 return false;
57 }
58 if (size <= 0) {
59 return false;
60 }
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +000061 const size_t usize = static_cast<size_t>(size);
ajm@google.com808e0e02011-08-03 21:08:51 +000062
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +000063 scoped_array<char> array(new char[usize]);
64 if (fread(array.get(), sizeof(char), usize, file) != usize) {
ajm@google.com808e0e02011-08-03 21:08:51 +000065 return false;
66 }
67
68 msg->Clear();
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +000069 return msg->ParseFromArray(array.get(), usize);
ajm@google.com808e0e02011-08-03 21:08:51 +000070}
niklase@google.com470e71d2011-07-07 08:21:25 +000071
andrew@webrtc.org94c74132011-09-19 15:17:57 +000072void PrintStat(const AudioProcessing::Statistic& stat) {
73 printf("%d, %d, %d\n", stat.average,
74 stat.maximum,
75 stat.minimum);
76}
77
niklase@google.com470e71d2011-07-07 08:21:25 +000078void usage() {
79 printf(
ajm@google.com808e0e02011-08-03 21:08:51 +000080 "Usage: process_test [options] [-pb PROTOBUF_FILE]\n"
81 " [-ir REVERSE_FILE] [-i PRIMARY_FILE] [-o OUT_FILE]\n");
niklase@google.com470e71d2011-07-07 08:21:25 +000082 printf(
83 "process_test is a test application for AudioProcessing.\n\n"
ajm@google.com808e0e02011-08-03 21:08:51 +000084 "When a protobuf debug file is available, specify it with -pb.\n"
85 "Alternately, when -ir or -i is used, the specified files will be\n"
86 "processed directly in a simulation mode. Otherwise the full set of\n"
87 "legacy test files is expected to be present in the working directory.\n");
niklase@google.com470e71d2011-07-07 08:21:25 +000088 printf("\n");
89 printf("Options\n");
ajm@google.com808e0e02011-08-03 21:08:51 +000090 printf("General configuration (only used for the simulation mode):\n");
niklase@google.com470e71d2011-07-07 08:21:25 +000091 printf(" -fs SAMPLE_RATE_HZ\n");
92 printf(" -ch CHANNELS_IN CHANNELS_OUT\n");
93 printf(" -rch REVERSE_CHANNELS\n");
94 printf("\n");
95 printf("Component configuration:\n");
96 printf(
97 "All components are disabled by default. Each block below begins with a\n"
98 "flag to enable the component with default settings. The subsequent flags\n"
99 "in the block are used to provide configuration settings.\n");
100 printf("\n -aec Echo cancellation\n");
101 printf(" --drift_compensation\n");
102 printf(" --no_drift_compensation\n");
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000103 printf(" --no_echo_metrics\n");
bjornv@google.com1ba3dbe2011-10-03 08:18:10 +0000104 printf(" --no_delay_logging\n");
niklase@google.com470e71d2011-07-07 08:21:25 +0000105 printf("\n -aecm Echo control mobile\n");
bjornv@google.com238a0222011-07-15 14:51:52 +0000106 printf(" --aecm_echo_path_in_file FILE\n");
107 printf(" --aecm_echo_path_out_file FILE\n");
niklase@google.com470e71d2011-07-07 08:21:25 +0000108 printf("\n -agc Gain control\n");
109 printf(" --analog\n");
110 printf(" --adaptive_digital\n");
111 printf(" --fixed_digital\n");
112 printf(" --target_level LEVEL\n");
113 printf(" --compression_gain GAIN\n");
114 printf(" --limiter\n");
115 printf(" --no_limiter\n");
116 printf("\n -hpf High pass filter\n");
117 printf("\n -ns Noise suppression\n");
118 printf(" --ns_low\n");
119 printf(" --ns_moderate\n");
120 printf(" --ns_high\n");
121 printf(" --ns_very_high\n");
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000122 printf(" --ns_prob_file FILE\n");
niklase@google.com470e71d2011-07-07 08:21:25 +0000123 printf("\n -vad Voice activity detection\n");
ajm@google.com808e0e02011-08-03 21:08:51 +0000124 printf(" --vad_out_file FILE\n");
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000125 printf("\n Level metrics (enabled by default)\n");
126 printf(" --no_level_metrics\n");
niklase@google.com470e71d2011-07-07 08:21:25 +0000127 printf("\n");
128 printf("Modifiers:\n");
andrew@webrtc.orgcb181212011-10-26 00:27:17 +0000129 printf(" --noasm Disable SSE optimization.\n");
andrew@webrtc.org4b13fc92011-11-09 19:27:11 +0000130 printf(" --delay DELAY Add DELAY ms to input value.\n");
andrew@webrtc.orgcb181212011-10-26 00:27:17 +0000131 printf(" --perf Measure performance.\n");
132 printf(" --quiet Suppress text output.\n");
133 printf(" --no_progress Suppress progress.\n");
134 printf(" --debug_file FILE Dump a debug recording.\n");
niklase@google.com470e71d2011-07-07 08:21:25 +0000135}
136
andrew@webrtc.org81865342012-10-27 00:28:27 +0000137static double MicLevel2Gain(int level) {
138 return pow(10.0, ((level - 127.0) / 128.0 * 80.) / 20.);
139}
140
141static void SimulateMic(int mic_level, AudioFrame* frame) {
142 mic_level = std::min(std::max(mic_level, 0), 255);
143 double mic_gain = MicLevel2Gain(mic_level);
144 int num_samples = frame->samples_per_channel_ * frame->num_channels_;
145 double v;
146 for (int n = 0; n < num_samples; n++) {
147 v = floor(frame->data_[n] * mic_gain + 0.5);
148 v = std::max(std::min(32767., v), -32768.);
149 frame->data_[n] = static_cast<int16_t>(v);
150 }
151}
152
niklase@google.com470e71d2011-07-07 08:21:25 +0000153// void function for gtest.
154void void_main(int argc, char* argv[]) {
155 if (argc > 1 && strcmp(argv[1], "--help") == 0) {
156 usage();
157 return;
158 }
159
160 if (argc < 2) {
161 printf("Did you mean to run without arguments?\n");
162 printf("Try `process_test --help' for more information.\n\n");
163 }
164
165 AudioProcessing* apm = AudioProcessing::Create(0);
166 ASSERT_TRUE(apm != NULL);
167
ajm@google.com808e0e02011-08-03 21:08:51 +0000168 const char* pb_filename = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +0000169 const char* far_filename = NULL;
170 const char* near_filename = NULL;
171 const char* out_filename = NULL;
172 const char* vad_out_filename = NULL;
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000173 const char* ns_prob_filename = NULL;
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000174 const char* aecm_echo_path_in_filename = NULL;
175 const char* aecm_echo_path_out_filename = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +0000176
177 int32_t sample_rate_hz = 16000;
178 int32_t device_sample_rate_hz = 16000;
179
180 int num_capture_input_channels = 1;
181 int num_capture_output_channels = 1;
182 int num_render_channels = 1;
183
184 int samples_per_channel = sample_rate_hz / 100;
185
186 bool simulating = false;
187 bool perf_testing = false;
188 bool verbose = true;
189 bool progress = true;
andrew@webrtc.org4b13fc92011-11-09 19:27:11 +0000190 int extra_delay_ms = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000191 //bool interleaved = true;
192
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000193 ASSERT_EQ(apm->kNoError, apm->level_estimator()->Enable(true));
niklase@google.com470e71d2011-07-07 08:21:25 +0000194 for (int i = 1; i < argc; i++) {
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000195 if (strcmp(argv[i], "-pb") == 0) {
ajm@google.com808e0e02011-08-03 21:08:51 +0000196 i++;
197 ASSERT_LT(i, argc) << "Specify protobuf filename after -pb";
198 pb_filename = argv[i];
199
200 } else if (strcmp(argv[i], "-ir") == 0) {
niklase@google.com470e71d2011-07-07 08:21:25 +0000201 i++;
202 ASSERT_LT(i, argc) << "Specify filename after -ir";
203 far_filename = argv[i];
204 simulating = true;
205
206 } else if (strcmp(argv[i], "-i") == 0) {
207 i++;
208 ASSERT_LT(i, argc) << "Specify filename after -i";
209 near_filename = argv[i];
210 simulating = true;
211
212 } else if (strcmp(argv[i], "-o") == 0) {
213 i++;
214 ASSERT_LT(i, argc) << "Specify filename after -o";
215 out_filename = argv[i];
216
217 } else if (strcmp(argv[i], "-fs") == 0) {
218 i++;
219 ASSERT_LT(i, argc) << "Specify sample rate after -fs";
220 ASSERT_EQ(1, sscanf(argv[i], "%d", &sample_rate_hz));
221 samples_per_channel = sample_rate_hz / 100;
222
223 ASSERT_EQ(apm->kNoError,
224 apm->set_sample_rate_hz(sample_rate_hz));
225
226 } else if (strcmp(argv[i], "-ch") == 0) {
227 i++;
228 ASSERT_LT(i + 1, argc) << "Specify number of channels after -ch";
229 ASSERT_EQ(1, sscanf(argv[i], "%d", &num_capture_input_channels));
230 i++;
231 ASSERT_EQ(1, sscanf(argv[i], "%d", &num_capture_output_channels));
232
233 ASSERT_EQ(apm->kNoError,
234 apm->set_num_channels(num_capture_input_channels,
235 num_capture_output_channels));
236
237 } else if (strcmp(argv[i], "-rch") == 0) {
238 i++;
239 ASSERT_LT(i, argc) << "Specify number of channels after -rch";
240 ASSERT_EQ(1, sscanf(argv[i], "%d", &num_render_channels));
241
242 ASSERT_EQ(apm->kNoError,
243 apm->set_num_reverse_channels(num_render_channels));
244
245 } else if (strcmp(argv[i], "-aec") == 0) {
246 ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->Enable(true));
bjornv@google.com1ba3dbe2011-10-03 08:18:10 +0000247 ASSERT_EQ(apm->kNoError,
248 apm->echo_cancellation()->enable_metrics(true));
249 ASSERT_EQ(apm->kNoError,
250 apm->echo_cancellation()->enable_delay_logging(true));
niklase@google.com470e71d2011-07-07 08:21:25 +0000251
niklase@google.com470e71d2011-07-07 08:21:25 +0000252 } else if (strcmp(argv[i], "--drift_compensation") == 0) {
253 ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->Enable(true));
254 // TODO(ajm): this is enabled in the VQE test app by default. Investigate
255 // why it can give better performance despite passing zeros.
256 ASSERT_EQ(apm->kNoError,
257 apm->echo_cancellation()->enable_drift_compensation(true));
258 } else if (strcmp(argv[i], "--no_drift_compensation") == 0) {
259 ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->Enable(true));
260 ASSERT_EQ(apm->kNoError,
261 apm->echo_cancellation()->enable_drift_compensation(false));
262
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000263 } else if (strcmp(argv[i], "--no_echo_metrics") == 0) {
264 ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->Enable(true));
265 ASSERT_EQ(apm->kNoError,
266 apm->echo_cancellation()->enable_metrics(false));
267
bjornv@google.com1ba3dbe2011-10-03 08:18:10 +0000268 } else if (strcmp(argv[i], "--no_delay_logging") == 0) {
269 ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->Enable(true));
270 ASSERT_EQ(apm->kNoError,
271 apm->echo_cancellation()->enable_delay_logging(false));
272
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000273 } else if (strcmp(argv[i], "--no_level_metrics") == 0) {
274 ASSERT_EQ(apm->kNoError, apm->level_estimator()->Enable(false));
275
niklase@google.com470e71d2011-07-07 08:21:25 +0000276 } else if (strcmp(argv[i], "-aecm") == 0) {
277 ASSERT_EQ(apm->kNoError, apm->echo_control_mobile()->Enable(true));
278
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000279 } else if (strcmp(argv[i], "--aecm_echo_path_in_file") == 0) {
280 i++;
281 ASSERT_LT(i, argc) << "Specify filename after --aecm_echo_path_in_file";
282 aecm_echo_path_in_filename = argv[i];
283
284 } else if (strcmp(argv[i], "--aecm_echo_path_out_file") == 0) {
285 i++;
286 ASSERT_LT(i, argc) << "Specify filename after --aecm_echo_path_out_file";
287 aecm_echo_path_out_filename = argv[i];
288
niklase@google.com470e71d2011-07-07 08:21:25 +0000289 } else if (strcmp(argv[i], "-agc") == 0) {
290 ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
291
292 } else if (strcmp(argv[i], "--analog") == 0) {
293 ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
294 ASSERT_EQ(apm->kNoError,
295 apm->gain_control()->set_mode(GainControl::kAdaptiveAnalog));
296
297 } else if (strcmp(argv[i], "--adaptive_digital") == 0) {
298 ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
299 ASSERT_EQ(apm->kNoError,
300 apm->gain_control()->set_mode(GainControl::kAdaptiveDigital));
301
302 } else if (strcmp(argv[i], "--fixed_digital") == 0) {
303 ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
304 ASSERT_EQ(apm->kNoError,
305 apm->gain_control()->set_mode(GainControl::kFixedDigital));
306
307 } else if (strcmp(argv[i], "--target_level") == 0) {
308 i++;
309 int level;
310 ASSERT_EQ(1, sscanf(argv[i], "%d", &level));
311
312 ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
313 ASSERT_EQ(apm->kNoError,
314 apm->gain_control()->set_target_level_dbfs(level));
315
316 } else if (strcmp(argv[i], "--compression_gain") == 0) {
317 i++;
318 int gain;
319 ASSERT_EQ(1, sscanf(argv[i], "%d", &gain));
320
321 ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
322 ASSERT_EQ(apm->kNoError,
323 apm->gain_control()->set_compression_gain_db(gain));
324
325 } else if (strcmp(argv[i], "--limiter") == 0) {
326 ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
327 ASSERT_EQ(apm->kNoError,
328 apm->gain_control()->enable_limiter(true));
329
330 } else if (strcmp(argv[i], "--no_limiter") == 0) {
331 ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
332 ASSERT_EQ(apm->kNoError,
333 apm->gain_control()->enable_limiter(false));
334
335 } else if (strcmp(argv[i], "-hpf") == 0) {
336 ASSERT_EQ(apm->kNoError, apm->high_pass_filter()->Enable(true));
337
338 } else if (strcmp(argv[i], "-ns") == 0) {
339 ASSERT_EQ(apm->kNoError, apm->noise_suppression()->Enable(true));
340
341 } else if (strcmp(argv[i], "--ns_low") == 0) {
342 ASSERT_EQ(apm->kNoError, apm->noise_suppression()->Enable(true));
343 ASSERT_EQ(apm->kNoError,
344 apm->noise_suppression()->set_level(NoiseSuppression::kLow));
345
346 } else if (strcmp(argv[i], "--ns_moderate") == 0) {
347 ASSERT_EQ(apm->kNoError, apm->noise_suppression()->Enable(true));
348 ASSERT_EQ(apm->kNoError,
349 apm->noise_suppression()->set_level(NoiseSuppression::kModerate));
350
351 } else if (strcmp(argv[i], "--ns_high") == 0) {
352 ASSERT_EQ(apm->kNoError, apm->noise_suppression()->Enable(true));
353 ASSERT_EQ(apm->kNoError,
354 apm->noise_suppression()->set_level(NoiseSuppression::kHigh));
355
356 } else if (strcmp(argv[i], "--ns_very_high") == 0) {
357 ASSERT_EQ(apm->kNoError, apm->noise_suppression()->Enable(true));
358 ASSERT_EQ(apm->kNoError,
359 apm->noise_suppression()->set_level(NoiseSuppression::kVeryHigh));
360
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000361 } else if (strcmp(argv[i], "--ns_prob_file") == 0) {
362 i++;
363 ASSERT_LT(i, argc) << "Specify filename after --ns_prob_file";
364 ns_prob_filename = argv[i];
365
niklase@google.com470e71d2011-07-07 08:21:25 +0000366 } else if (strcmp(argv[i], "-vad") == 0) {
367 ASSERT_EQ(apm->kNoError, apm->voice_detection()->Enable(true));
368
andrew@webrtc.org89752612012-10-12 16:41:45 +0000369 } else if (strcmp(argv[i], "--vad_very_low") == 0) {
370 ASSERT_EQ(apm->kNoError, apm->voice_detection()->Enable(true));
371 ASSERT_EQ(apm->kNoError,
372 apm->voice_detection()->set_likelihood(
373 VoiceDetection::kVeryLowLikelihood));
374
375 } else if (strcmp(argv[i], "--vad_low") == 0) {
376 ASSERT_EQ(apm->kNoError, apm->voice_detection()->Enable(true));
377 ASSERT_EQ(apm->kNoError,
378 apm->voice_detection()->set_likelihood(
379 VoiceDetection::kLowLikelihood));
380
381 } else if (strcmp(argv[i], "--vad_moderate") == 0) {
382 ASSERT_EQ(apm->kNoError, apm->voice_detection()->Enable(true));
383 ASSERT_EQ(apm->kNoError,
384 apm->voice_detection()->set_likelihood(
385 VoiceDetection::kModerateLikelihood));
386
387 } else if (strcmp(argv[i], "--vad_high") == 0) {
388 ASSERT_EQ(apm->kNoError, apm->voice_detection()->Enable(true));
389 ASSERT_EQ(apm->kNoError,
390 apm->voice_detection()->set_likelihood(
391 VoiceDetection::kHighLikelihood));
392
niklase@google.com470e71d2011-07-07 08:21:25 +0000393 } else if (strcmp(argv[i], "--vad_out_file") == 0) {
394 i++;
395 ASSERT_LT(i, argc) << "Specify filename after --vad_out_file";
396 vad_out_filename = argv[i];
397
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000398 } else if (strcmp(argv[i], "--noasm") == 0) {
399 WebRtc_GetCPUInfo = WebRtc_GetCPUInfoNoASM;
400 // We need to reinitialize here if components have already been enabled.
401 ASSERT_EQ(apm->kNoError, apm->Initialize());
402
andrew@webrtc.org4b13fc92011-11-09 19:27:11 +0000403 } else if (strcmp(argv[i], "--delay") == 0) {
404 i++;
405 ASSERT_EQ(1, sscanf(argv[i], "%d", &extra_delay_ms));
406
niklase@google.com470e71d2011-07-07 08:21:25 +0000407 } else if (strcmp(argv[i], "--perf") == 0) {
408 perf_testing = true;
409
410 } else if (strcmp(argv[i], "--quiet") == 0) {
411 verbose = false;
412 progress = false;
413
414 } else if (strcmp(argv[i], "--no_progress") == 0) {
415 progress = false;
416
andrew@webrtc.orgcb181212011-10-26 00:27:17 +0000417 } else if (strcmp(argv[i], "--debug_file") == 0) {
ajm@google.com808e0e02011-08-03 21:08:51 +0000418 i++;
andrew@webrtc.orgcb181212011-10-26 00:27:17 +0000419 ASSERT_LT(i, argc) << "Specify filename after --debug_file";
ajm@google.com808e0e02011-08-03 21:08:51 +0000420 ASSERT_EQ(apm->kNoError, apm->StartDebugRecording(argv[i]));
niklase@google.com470e71d2011-07-07 08:21:25 +0000421 } else {
422 FAIL() << "Unrecognized argument " << argv[i];
423 }
424 }
ajm@google.com808e0e02011-08-03 21:08:51 +0000425 // If we're reading a protobuf file, ensure a simulation hasn't also
426 // been requested (which makes no sense...)
427 ASSERT_FALSE(pb_filename && simulating);
niklase@google.com470e71d2011-07-07 08:21:25 +0000428
429 if (verbose) {
430 printf("Sample rate: %d Hz\n", sample_rate_hz);
431 printf("Primary channels: %d (in), %d (out)\n",
432 num_capture_input_channels,
433 num_capture_output_channels);
434 printf("Reverse channels: %d \n", num_render_channels);
435 }
436
437 const char far_file_default[] = "apm_far.pcm";
438 const char near_file_default[] = "apm_near.pcm";
439 const char out_file_default[] = "out.pcm";
440 const char event_filename[] = "apm_event.dat";
441 const char delay_filename[] = "apm_delay.dat";
442 const char drift_filename[] = "apm_drift.dat";
443 const char vad_file_default[] = "vad_out.dat";
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000444 const char ns_prob_file_default[] = "ns_prob.dat";
niklase@google.com470e71d2011-07-07 08:21:25 +0000445
446 if (!simulating) {
447 far_filename = far_file_default;
448 near_filename = near_file_default;
449 }
450
ajm@google.com808e0e02011-08-03 21:08:51 +0000451 if (!out_filename) {
niklase@google.com470e71d2011-07-07 08:21:25 +0000452 out_filename = out_file_default;
453 }
454
ajm@google.com808e0e02011-08-03 21:08:51 +0000455 if (!vad_out_filename) {
niklase@google.com470e71d2011-07-07 08:21:25 +0000456 vad_out_filename = vad_file_default;
457 }
458
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000459 if (!ns_prob_filename) {
460 ns_prob_filename = ns_prob_file_default;
461 }
462
ajm@google.com808e0e02011-08-03 21:08:51 +0000463 FILE* pb_file = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +0000464 FILE* far_file = NULL;
465 FILE* near_file = NULL;
466 FILE* out_file = NULL;
467 FILE* event_file = NULL;
468 FILE* delay_file = NULL;
469 FILE* drift_file = NULL;
470 FILE* vad_out_file = NULL;
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000471 FILE* ns_prob_file = NULL;
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000472 FILE* aecm_echo_path_in_file = NULL;
473 FILE* aecm_echo_path_out_file = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +0000474
ajm@google.com808e0e02011-08-03 21:08:51 +0000475 if (pb_filename) {
476 pb_file = fopen(pb_filename, "rb");
477 ASSERT_TRUE(NULL != pb_file) << "Unable to open protobuf file "
478 << pb_filename;
479 } else {
480 if (far_filename) {
481 far_file = fopen(far_filename, "rb");
482 ASSERT_TRUE(NULL != far_file) << "Unable to open far-end audio file "
483 << far_filename;
484 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000485
ajm@google.com808e0e02011-08-03 21:08:51 +0000486 near_file = fopen(near_filename, "rb");
487 ASSERT_TRUE(NULL != near_file) << "Unable to open near-end audio file "
488 << near_filename;
489 if (!simulating) {
490 event_file = fopen(event_filename, "rb");
491 ASSERT_TRUE(NULL != event_file) << "Unable to open event file "
492 << event_filename;
493
494 delay_file = fopen(delay_filename, "rb");
495 ASSERT_TRUE(NULL != delay_file) << "Unable to open buffer file "
496 << delay_filename;
497
498 drift_file = fopen(drift_filename, "rb");
499 ASSERT_TRUE(NULL != drift_file) << "Unable to open drift file "
500 << drift_filename;
501 }
502 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000503
504 out_file = fopen(out_filename, "wb");
505 ASSERT_TRUE(NULL != out_file) << "Unable to open output audio file "
506 << out_filename;
507
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000508 int near_size_bytes = 0;
ajm@google.com808e0e02011-08-03 21:08:51 +0000509 if (pb_file) {
510 struct stat st;
511 stat(pb_filename, &st);
512 // Crude estimate, but should be good enough.
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000513 near_size_bytes = st.st_size / 3;
ajm@google.com808e0e02011-08-03 21:08:51 +0000514 } else {
515 struct stat st;
516 stat(near_filename, &st);
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000517 near_size_bytes = st.st_size;
niklase@google.com470e71d2011-07-07 08:21:25 +0000518 }
519
520 if (apm->voice_detection()->is_enabled()) {
521 vad_out_file = fopen(vad_out_filename, "wb");
522 ASSERT_TRUE(NULL != vad_out_file) << "Unable to open VAD output file "
523 << vad_out_file;
524 }
525
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000526 if (apm->noise_suppression()->is_enabled()) {
527 ns_prob_file = fopen(ns_prob_filename, "wb");
528 ASSERT_TRUE(NULL != ns_prob_file) << "Unable to open NS output file "
529 << ns_prob_file;
530 }
531
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000532 if (aecm_echo_path_in_filename != NULL) {
533 aecm_echo_path_in_file = fopen(aecm_echo_path_in_filename, "rb");
534 ASSERT_TRUE(NULL != aecm_echo_path_in_file) << "Unable to open file "
535 << aecm_echo_path_in_filename;
536
ajm@google.com22e65152011-07-18 18:03:01 +0000537 const size_t path_size =
538 apm->echo_control_mobile()->echo_path_size_bytes();
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +0000539 scoped_array<char> echo_path(new char[path_size]);
540 ASSERT_EQ(path_size, fread(echo_path.get(),
541 sizeof(char),
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000542 path_size,
543 aecm_echo_path_in_file));
544 EXPECT_EQ(apm->kNoError,
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +0000545 apm->echo_control_mobile()->SetEchoPath(echo_path.get(),
546 path_size));
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000547 fclose(aecm_echo_path_in_file);
548 aecm_echo_path_in_file = NULL;
549 }
550
551 if (aecm_echo_path_out_filename != NULL) {
552 aecm_echo_path_out_file = fopen(aecm_echo_path_out_filename, "wb");
553 ASSERT_TRUE(NULL != aecm_echo_path_out_file) << "Unable to open file "
554 << aecm_echo_path_out_filename;
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000555 }
556
niklase@google.com470e71d2011-07-07 08:21:25 +0000557 size_t read_count = 0;
558 int reverse_count = 0;
559 int primary_count = 0;
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000560 int near_read_bytes = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000561 TickInterval acc_ticks;
562
563 AudioFrame far_frame;
niklase@google.com470e71d2011-07-07 08:21:25 +0000564 AudioFrame near_frame;
niklase@google.com470e71d2011-07-07 08:21:25 +0000565
566 int delay_ms = 0;
567 int drift_samples = 0;
568 int capture_level = 127;
569 int8_t stream_has_voice = 0;
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000570 float ns_speech_prob = 0.0f;
niklase@google.com470e71d2011-07-07 08:21:25 +0000571
572 TickTime t0 = TickTime::Now();
573 TickTime t1 = t0;
574 WebRtc_Word64 max_time_us = 0;
575 WebRtc_Word64 max_time_reverse_us = 0;
576 WebRtc_Word64 min_time_us = 1e6;
577 WebRtc_Word64 min_time_reverse_us = 1e6;
578
ajm@google.com808e0e02011-08-03 21:08:51 +0000579 // TODO(ajm): Ideally we would refactor this block into separate functions,
580 // but for now we want to share the variables.
581 if (pb_file) {
582 Event event_msg;
583 while (ReadMessageFromFile(pb_file, &event_msg)) {
584 std::ostringstream trace_stream;
585 trace_stream << "Processed frames: " << reverse_count << " (reverse), "
586 << primary_count << " (primary)";
587 SCOPED_TRACE(trace_stream.str());
niklase@google.com470e71d2011-07-07 08:21:25 +0000588
ajm@google.com808e0e02011-08-03 21:08:51 +0000589 if (event_msg.type() == Event::INIT) {
590 ASSERT_TRUE(event_msg.has_init());
591 const Init msg = event_msg.init();
niklase@google.com470e71d2011-07-07 08:21:25 +0000592
ajm@google.com808e0e02011-08-03 21:08:51 +0000593 ASSERT_TRUE(msg.has_sample_rate());
594 ASSERT_EQ(apm->kNoError,
595 apm->set_sample_rate_hz(msg.sample_rate()));
596
597 ASSERT_TRUE(msg.has_device_sample_rate());
598 ASSERT_EQ(apm->kNoError,
599 apm->echo_cancellation()->set_device_sample_rate_hz(
600 msg.device_sample_rate()));
601
602 ASSERT_TRUE(msg.has_num_input_channels());
603 ASSERT_TRUE(msg.has_num_output_channels());
604 ASSERT_EQ(apm->kNoError,
605 apm->set_num_channels(msg.num_input_channels(),
606 msg.num_output_channels()));
607
608 ASSERT_TRUE(msg.has_num_reverse_channels());
609 ASSERT_EQ(apm->kNoError,
610 apm->set_num_reverse_channels(msg.num_reverse_channels()));
611
612 samples_per_channel = msg.sample_rate() / 100;
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000613 far_frame.sample_rate_hz_ = msg.sample_rate();
614 far_frame.samples_per_channel_ = samples_per_channel;
615 far_frame.num_channels_ = msg.num_reverse_channels();
616 near_frame.sample_rate_hz_ = msg.sample_rate();
617 near_frame.samples_per_channel_ = samples_per_channel;
ajm@google.com808e0e02011-08-03 21:08:51 +0000618
619 if (verbose) {
620 printf("Init at frame: %d (primary), %d (reverse)\n",
621 primary_count, reverse_count);
andrew@webrtc.orgba028a32011-11-23 20:37:12 +0000622 printf(" Sample rate: %d Hz\n", msg.sample_rate());
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000623 printf(" Primary channels: %d (in), %d (out)\n",
624 msg.num_input_channels(),
625 msg.num_output_channels());
626 printf(" Reverse channels: %d \n", msg.num_reverse_channels());
ajm@google.com808e0e02011-08-03 21:08:51 +0000627 }
628
629 } else if (event_msg.type() == Event::REVERSE_STREAM) {
630 ASSERT_TRUE(event_msg.has_reverse_stream());
631 const ReverseStream msg = event_msg.reverse_stream();
632 reverse_count++;
633
634 ASSERT_TRUE(msg.has_data());
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000635 ASSERT_EQ(sizeof(int16_t) * samples_per_channel *
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000636 far_frame.num_channels_, msg.data().size());
637 memcpy(far_frame.data_, msg.data().data(), msg.data().size());
ajm@google.com808e0e02011-08-03 21:08:51 +0000638
639 if (perf_testing) {
640 t0 = TickTime::Now();
641 }
642
643 ASSERT_EQ(apm->kNoError,
644 apm->AnalyzeReverseStream(&far_frame));
645
646 if (perf_testing) {
647 t1 = TickTime::Now();
648 TickInterval tick_diff = t1 - t0;
649 acc_ticks += tick_diff;
650 if (tick_diff.Microseconds() > max_time_reverse_us) {
651 max_time_reverse_us = tick_diff.Microseconds();
652 }
653 if (tick_diff.Microseconds() < min_time_reverse_us) {
654 min_time_reverse_us = tick_diff.Microseconds();
655 }
656 }
657
658 } else if (event_msg.type() == Event::STREAM) {
659 ASSERT_TRUE(event_msg.has_stream());
660 const Stream msg = event_msg.stream();
661 primary_count++;
662
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000663 // ProcessStream could have changed this for the output frame.
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000664 near_frame.num_channels_ = apm->num_input_channels();
ajm@google.com808e0e02011-08-03 21:08:51 +0000665
666 ASSERT_TRUE(msg.has_input_data());
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000667 ASSERT_EQ(sizeof(int16_t) * samples_per_channel *
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000668 near_frame.num_channels_, msg.input_data().size());
669 memcpy(near_frame.data_,
ajm@google.com808e0e02011-08-03 21:08:51 +0000670 msg.input_data().data(),
671 msg.input_data().size());
672
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000673 near_read_bytes += msg.input_data().size();
ajm@google.com808e0e02011-08-03 21:08:51 +0000674 if (progress && primary_count % 100 == 0) {
675 printf("%.0f%% complete\r",
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000676 (near_read_bytes * 100.0) / near_size_bytes);
ajm@google.com808e0e02011-08-03 21:08:51 +0000677 fflush(stdout);
678 }
679
andrew@webrtc.org81865342012-10-27 00:28:27 +0000680 if (apm->gain_control()->mode() == GainControl::kAdaptiveAnalog) {
681 SimulateMic(capture_level, &near_frame);
682 }
683
ajm@google.com808e0e02011-08-03 21:08:51 +0000684 if (perf_testing) {
685 t0 = TickTime::Now();
686 }
687
688 ASSERT_EQ(apm->kNoError,
689 apm->gain_control()->set_stream_analog_level(msg.level()));
690 ASSERT_EQ(apm->kNoError,
andrew@webrtc.org4b13fc92011-11-09 19:27:11 +0000691 apm->set_stream_delay_ms(msg.delay() + extra_delay_ms));
ajm@google.com808e0e02011-08-03 21:08:51 +0000692 ASSERT_EQ(apm->kNoError,
693 apm->echo_cancellation()->set_stream_drift_samples(msg.drift()));
694
695 int err = apm->ProcessStream(&near_frame);
696 if (err == apm->kBadStreamParameterWarning) {
697 printf("Bad parameter warning. %s\n", trace_stream.str().c_str());
698 }
699 ASSERT_TRUE(err == apm->kNoError ||
700 err == apm->kBadStreamParameterWarning);
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000701 ASSERT_TRUE(near_frame.num_channels_ == apm->num_output_channels());
ajm@google.com808e0e02011-08-03 21:08:51 +0000702
703 capture_level = apm->gain_control()->stream_analog_level();
704
705 stream_has_voice =
706 static_cast<int8_t>(apm->voice_detection()->stream_has_voice());
707 if (vad_out_file != NULL) {
708 ASSERT_EQ(1u, fwrite(&stream_has_voice,
709 sizeof(stream_has_voice),
710 1,
711 vad_out_file));
712 }
713
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000714 if (ns_prob_file != NULL) {
715 ns_speech_prob = apm->noise_suppression()->speech_probability();
716 ASSERT_EQ(1u, fwrite(&ns_speech_prob,
717 sizeof(ns_speech_prob),
718 1,
719 ns_prob_file));
720 }
721
ajm@google.com808e0e02011-08-03 21:08:51 +0000722 if (apm->gain_control()->mode() != GainControl::kAdaptiveAnalog) {
723 ASSERT_EQ(msg.level(), capture_level);
724 }
725
726 if (perf_testing) {
727 t1 = TickTime::Now();
728 TickInterval tick_diff = t1 - t0;
729 acc_ticks += tick_diff;
730 if (tick_diff.Microseconds() > max_time_us) {
731 max_time_us = tick_diff.Microseconds();
732 }
733 if (tick_diff.Microseconds() < min_time_us) {
734 min_time_us = tick_diff.Microseconds();
735 }
736 }
737
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000738 size_t size = samples_per_channel * near_frame.num_channels_;
739 ASSERT_EQ(size, fwrite(near_frame.data_,
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000740 sizeof(int16_t),
741 size,
742 out_file));
ajm@google.com808e0e02011-08-03 21:08:51 +0000743 }
744 }
745
746 ASSERT_TRUE(feof(pb_file));
ajm@google.com808e0e02011-08-03 21:08:51 +0000747
748 } else {
bjornv@google.coma2c6ea02011-09-27 08:04:45 +0000749 enum Events {
750 kInitializeEvent,
751 kRenderEvent,
752 kCaptureEvent,
753 kResetEventDeprecated
754 };
755 int16_t event = 0;
ajm@google.com808e0e02011-08-03 21:08:51 +0000756 while (simulating || feof(event_file) == 0) {
757 std::ostringstream trace_stream;
758 trace_stream << "Processed frames: " << reverse_count << " (reverse), "
759 << primary_count << " (primary)";
760 SCOPED_TRACE(trace_stream.str());
761
762 if (simulating) {
763 if (far_file == NULL) {
niklase@google.com470e71d2011-07-07 08:21:25 +0000764 event = kCaptureEvent;
765 } else {
ajm@google.com808e0e02011-08-03 21:08:51 +0000766 if (event == kRenderEvent) {
767 event = kCaptureEvent;
768 } else {
769 event = kRenderEvent;
770 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000771 }
772 } else {
ajm@google.com808e0e02011-08-03 21:08:51 +0000773 read_count = fread(&event, sizeof(event), 1, event_file);
774 if (read_count != 1) {
775 break;
niklase@google.com470e71d2011-07-07 08:21:25 +0000776 }
777 }
778
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000779 far_frame.sample_rate_hz_ = sample_rate_hz;
780 far_frame.samples_per_channel_ = samples_per_channel;
781 far_frame.num_channels_ = num_render_channels;
782 near_frame.sample_rate_hz_ = sample_rate_hz;
783 near_frame.samples_per_channel_ = samples_per_channel;
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000784
ajm@google.com808e0e02011-08-03 21:08:51 +0000785 if (event == kInitializeEvent || event == kResetEventDeprecated) {
786 ASSERT_EQ(1u,
787 fread(&sample_rate_hz, sizeof(sample_rate_hz), 1, event_file));
788 samples_per_channel = sample_rate_hz / 100;
niklase@google.com470e71d2011-07-07 08:21:25 +0000789
ajm@google.com808e0e02011-08-03 21:08:51 +0000790 ASSERT_EQ(1u,
791 fread(&device_sample_rate_hz,
792 sizeof(device_sample_rate_hz),
793 1,
794 event_file));
795
796 ASSERT_EQ(apm->kNoError,
797 apm->set_sample_rate_hz(sample_rate_hz));
798
799 ASSERT_EQ(apm->kNoError,
800 apm->echo_cancellation()->set_device_sample_rate_hz(
801 device_sample_rate_hz));
802
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000803 far_frame.sample_rate_hz_ = sample_rate_hz;
804 far_frame.samples_per_channel_ = samples_per_channel;
805 far_frame.num_channels_ = num_render_channels;
806 near_frame.sample_rate_hz_ = sample_rate_hz;
807 near_frame.samples_per_channel_ = samples_per_channel;
ajm@google.com808e0e02011-08-03 21:08:51 +0000808
809 if (verbose) {
810 printf("Init at frame: %d (primary), %d (reverse)\n",
811 primary_count, reverse_count);
812 printf(" Sample rate: %d Hz\n", sample_rate_hz);
813 }
814
815 } else if (event == kRenderEvent) {
816 reverse_count++;
ajm@google.com808e0e02011-08-03 21:08:51 +0000817
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000818 size_t size = samples_per_channel * num_render_channels;
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000819 read_count = fread(far_frame.data_,
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000820 sizeof(int16_t),
821 size,
ajm@google.com808e0e02011-08-03 21:08:51 +0000822 far_file);
823
824 if (simulating) {
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000825 if (read_count != size) {
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000826 // Read an equal amount from the near file to avoid errors due to
827 // not reaching end-of-file.
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000828 EXPECT_EQ(0, fseek(near_file, read_count * sizeof(int16_t),
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000829 SEEK_CUR));
ajm@google.com808e0e02011-08-03 21:08:51 +0000830 break; // This is expected.
831 }
832 } else {
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000833 ASSERT_EQ(size, read_count);
ajm@google.com808e0e02011-08-03 21:08:51 +0000834 }
835
836 if (perf_testing) {
837 t0 = TickTime::Now();
838 }
839
840 ASSERT_EQ(apm->kNoError,
841 apm->AnalyzeReverseStream(&far_frame));
842
843 if (perf_testing) {
844 t1 = TickTime::Now();
845 TickInterval tick_diff = t1 - t0;
846 acc_ticks += tick_diff;
847 if (tick_diff.Microseconds() > max_time_reverse_us) {
848 max_time_reverse_us = tick_diff.Microseconds();
849 }
850 if (tick_diff.Microseconds() < min_time_reverse_us) {
851 min_time_reverse_us = tick_diff.Microseconds();
852 }
853 }
854
855 } else if (event == kCaptureEvent) {
856 primary_count++;
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000857 near_frame.num_channels_ = num_capture_input_channels;
ajm@google.com808e0e02011-08-03 21:08:51 +0000858
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000859 size_t size = samples_per_channel * num_capture_input_channels;
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000860 read_count = fread(near_frame.data_,
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000861 sizeof(int16_t),
862 size,
ajm@google.com808e0e02011-08-03 21:08:51 +0000863 near_file);
864
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000865 near_read_bytes += read_count * sizeof(int16_t);
ajm@google.com808e0e02011-08-03 21:08:51 +0000866 if (progress && primary_count % 100 == 0) {
867 printf("%.0f%% complete\r",
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000868 (near_read_bytes * 100.0) / near_size_bytes);
ajm@google.com808e0e02011-08-03 21:08:51 +0000869 fflush(stdout);
870 }
871 if (simulating) {
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000872 if (read_count != size) {
ajm@google.com808e0e02011-08-03 21:08:51 +0000873 break; // This is expected.
874 }
875
876 delay_ms = 0;
877 drift_samples = 0;
878 } else {
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000879 ASSERT_EQ(size, read_count);
ajm@google.com808e0e02011-08-03 21:08:51 +0000880
881 // TODO(ajm): sizeof(delay_ms) for current files?
882 ASSERT_EQ(1u,
883 fread(&delay_ms, 2, 1, delay_file));
884 ASSERT_EQ(1u,
885 fread(&drift_samples, sizeof(drift_samples), 1, drift_file));
886 }
887
andrew@webrtc.org81865342012-10-27 00:28:27 +0000888 if (apm->gain_control()->mode() == GainControl::kAdaptiveAnalog) {
889 SimulateMic(capture_level, &near_frame);
890 }
891
ajm@google.com808e0e02011-08-03 21:08:51 +0000892 if (perf_testing) {
893 t0 = TickTime::Now();
894 }
895
896 // TODO(ajm): fake an analog gain while simulating.
897
898 int capture_level_in = capture_level;
899 ASSERT_EQ(apm->kNoError,
900 apm->gain_control()->set_stream_analog_level(capture_level));
901 ASSERT_EQ(apm->kNoError,
andrew@webrtc.org4b13fc92011-11-09 19:27:11 +0000902 apm->set_stream_delay_ms(delay_ms + extra_delay_ms));
ajm@google.com808e0e02011-08-03 21:08:51 +0000903 ASSERT_EQ(apm->kNoError,
904 apm->echo_cancellation()->set_stream_drift_samples(drift_samples));
905
906 int err = apm->ProcessStream(&near_frame);
907 if (err == apm->kBadStreamParameterWarning) {
908 printf("Bad parameter warning. %s\n", trace_stream.str().c_str());
909 }
910 ASSERT_TRUE(err == apm->kNoError ||
911 err == apm->kBadStreamParameterWarning);
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000912 ASSERT_TRUE(near_frame.num_channels_ == apm->num_output_channels());
ajm@google.com808e0e02011-08-03 21:08:51 +0000913
914 capture_level = apm->gain_control()->stream_analog_level();
915
916 stream_has_voice =
917 static_cast<int8_t>(apm->voice_detection()->stream_has_voice());
918 if (vad_out_file != NULL) {
919 ASSERT_EQ(1u, fwrite(&stream_has_voice,
920 sizeof(stream_has_voice),
921 1,
922 vad_out_file));
923 }
924
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000925 if (ns_prob_file != NULL) {
926 ns_speech_prob = apm->noise_suppression()->speech_probability();
927 ASSERT_EQ(1u, fwrite(&ns_speech_prob,
928 sizeof(ns_speech_prob),
929 1,
930 ns_prob_file));
931 }
932
ajm@google.com808e0e02011-08-03 21:08:51 +0000933 if (apm->gain_control()->mode() != GainControl::kAdaptiveAnalog) {
934 ASSERT_EQ(capture_level_in, capture_level);
935 }
936
937 if (perf_testing) {
938 t1 = TickTime::Now();
939 TickInterval tick_diff = t1 - t0;
940 acc_ticks += tick_diff;
941 if (tick_diff.Microseconds() > max_time_us) {
942 max_time_us = tick_diff.Microseconds();
943 }
944 if (tick_diff.Microseconds() < min_time_us) {
945 min_time_us = tick_diff.Microseconds();
946 }
947 }
948
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000949 size = samples_per_channel * near_frame.num_channels_;
950 ASSERT_EQ(size, fwrite(near_frame.data_,
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000951 sizeof(int16_t),
952 size,
953 out_file));
niklase@google.com470e71d2011-07-07 08:21:25 +0000954 }
ajm@google.com808e0e02011-08-03 21:08:51 +0000955 else {
956 FAIL() << "Event " << event << " is unrecognized";
niklase@google.com470e71d2011-07-07 08:21:25 +0000957 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000958 }
959 }
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000960 printf("100%% complete\r");
niklase@google.com470e71d2011-07-07 08:21:25 +0000961
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000962 if (aecm_echo_path_out_file != NULL) {
ajm@google.com22e65152011-07-18 18:03:01 +0000963 const size_t path_size =
964 apm->echo_control_mobile()->echo_path_size_bytes();
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +0000965 scoped_array<char> echo_path(new char[path_size]);
966 apm->echo_control_mobile()->GetEchoPath(echo_path.get(), path_size);
967 ASSERT_EQ(path_size, fwrite(echo_path.get(),
968 sizeof(char),
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000969 path_size,
970 aecm_echo_path_out_file));
971 fclose(aecm_echo_path_out_file);
972 aecm_echo_path_out_file = NULL;
973 }
974
niklase@google.com470e71d2011-07-07 08:21:25 +0000975 if (verbose) {
976 printf("\nProcessed frames: %d (primary), %d (reverse)\n",
977 primary_count, reverse_count);
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000978
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000979 if (apm->level_estimator()->is_enabled()) {
980 printf("\n--Level metrics--\n");
981 printf("RMS: %d dBFS\n", -apm->level_estimator()->RMS());
982 }
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000983 if (apm->echo_cancellation()->are_metrics_enabled()) {
984 EchoCancellation::Metrics metrics;
985 apm->echo_cancellation()->GetMetrics(&metrics);
986 printf("\n--Echo metrics--\n");
987 printf("(avg, max, min)\n");
988 printf("ERL: ");
989 PrintStat(metrics.echo_return_loss);
990 printf("ERLE: ");
991 PrintStat(metrics.echo_return_loss_enhancement);
992 printf("ANLP: ");
993 PrintStat(metrics.a_nlp);
994 }
bjornv@google.com1ba3dbe2011-10-03 08:18:10 +0000995 if (apm->echo_cancellation()->is_delay_logging_enabled()) {
996 int median = 0;
997 int std = 0;
998 apm->echo_cancellation()->GetDelayMetrics(&median, &std);
999 printf("\n--Delay metrics--\n");
1000 printf("Median: %3d\n", median);
1001 printf("Standard deviation: %3d\n", std);
1002 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001003 }
1004
ajm@google.com808e0e02011-08-03 21:08:51 +00001005 if (!pb_file) {
1006 int8_t temp_int8;
1007 if (far_file) {
1008 read_count = fread(&temp_int8, sizeof(temp_int8), 1, far_file);
1009 EXPECT_NE(0, feof(far_file)) << "Far-end file not fully processed";
1010 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001011
ajm@google.com808e0e02011-08-03 21:08:51 +00001012 read_count = fread(&temp_int8, sizeof(temp_int8), 1, near_file);
1013 EXPECT_NE(0, feof(near_file)) << "Near-end file not fully processed";
1014
1015 if (!simulating) {
1016 read_count = fread(&temp_int8, sizeof(temp_int8), 1, event_file);
1017 EXPECT_NE(0, feof(event_file)) << "Event file not fully processed";
1018 read_count = fread(&temp_int8, sizeof(temp_int8), 1, delay_file);
1019 EXPECT_NE(0, feof(delay_file)) << "Delay file not fully processed";
1020 read_count = fread(&temp_int8, sizeof(temp_int8), 1, drift_file);
1021 EXPECT_NE(0, feof(drift_file)) << "Drift file not fully processed";
1022 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001023 }
1024
1025 if (perf_testing) {
1026 if (primary_count > 0) {
1027 WebRtc_Word64 exec_time = acc_ticks.Milliseconds();
1028 printf("\nTotal time: %.3f s, file time: %.2f s\n",
1029 exec_time * 0.001, primary_count * 0.01);
1030 printf("Time per frame: %.3f ms (average), %.3f ms (max),"
1031 " %.3f ms (min)\n",
1032 (exec_time * 1.0) / primary_count,
1033 (max_time_us + max_time_reverse_us) / 1000.0,
1034 (min_time_us + min_time_reverse_us) / 1000.0);
1035 } else {
1036 printf("Warning: no capture frames\n");
1037 }
1038 }
1039
1040 AudioProcessing::Destroy(apm);
1041 apm = NULL;
1042}
ajm@google.com808e0e02011-08-03 21:08:51 +00001043} // namespace
niklase@google.com470e71d2011-07-07 08:21:25 +00001044
1045int main(int argc, char* argv[])
1046{
1047 void_main(argc, argv);
1048
andrew@webrtc.org64235092011-08-19 21:22:08 +00001049 // Optional, but removes memory leak noise from Valgrind.
1050 google::protobuf::ShutdownProtobufLibrary();
niklase@google.com470e71d2011-07-07 08:21:25 +00001051 return 0;
1052}