blob: 34e37ff938ec3d4bb7376dc3170e2b25b7a89023 [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
leozwang@webrtc.org9a85d8e2012-03-16 18:03:18 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
andrew@webrtc.org81865342012-10-27 00:28:27 +000011#include <math.h>
niklase@google.com470e71d2011-07-07 08:21:25 +000012#include <stdio.h>
13#include <string.h>
14#ifdef WEBRTC_ANDROID
15#include <sys/stat.h>
16#endif
17
andrew@webrtc.org81865342012-10-27 00:28:27 +000018#include <algorithm>
19
niklase@google.com470e71d2011-07-07 08:21:25 +000020#include "gtest/gtest.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000021
kma@webrtc.org0e739502012-12-07 15:26:28 +000022#include "webrtc/modules/audio_processing/include/audio_processing.h"
23#include "webrtc/modules/interface/module_common_types.h"
24#include "webrtc/system_wrappers/interface/cpu_features_wrapper.h"
25#include "webrtc/system_wrappers/interface/scoped_ptr.h"
26#include "webrtc/system_wrappers/interface/tick_util.h"
kjellander@webrtc.org10abe252012-12-17 18:28:07 +000027#include "webrtc/test/testsupport/fileutils.h"
kma@webrtc.org0e739502012-12-07 15:26:28 +000028#include "webrtc/test/testsupport/perf_test.h"
leozwang@webrtc.orga3736342012-03-16 21:36:00 +000029#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
leozwang@webrtc.org534e4952012-10-22 21:21:52 +000030#include "external/webrtc/webrtc/modules/audio_processing/debug.pb.h"
leozwang@webrtc.orga3736342012-03-16 21:36:00 +000031#else
ajm@google.com808e0e02011-08-03 21:08:51 +000032#include "webrtc/audio_processing/debug.pb.h"
leozwang@webrtc.orga3736342012-03-16 21:36:00 +000033#endif
niklase@google.com470e71d2011-07-07 08:21:25 +000034
35using webrtc::AudioFrame;
niklase@google.com470e71d2011-07-07 08:21:25 +000036using webrtc::AudioProcessing;
andrew@webrtc.org94c74132011-09-19 15:17:57 +000037using webrtc::EchoCancellation;
niklase@google.com470e71d2011-07-07 08:21:25 +000038using webrtc::GainControl;
39using webrtc::NoiseSuppression;
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +000040using webrtc::scoped_array;
ajm@google.com808e0e02011-08-03 21:08:51 +000041using webrtc::TickInterval;
42using webrtc::TickTime;
andrew@webrtc.org89752612012-10-12 16:41:45 +000043using webrtc::VoiceDetection;
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +000044
ajm@google.com808e0e02011-08-03 21:08:51 +000045using webrtc::audioproc::Event;
46using webrtc::audioproc::Init;
47using webrtc::audioproc::ReverseStream;
48using webrtc::audioproc::Stream;
49
50namespace {
51// Returns true on success, false on error or end-of-file.
52bool ReadMessageFromFile(FILE* file,
53 ::google::protobuf::MessageLite* msg) {
54 // The "wire format" for the size is little-endian.
55 // Assume process_test is running on a little-endian machine.
andrew@webrtc.orgcb181212011-10-26 00:27:17 +000056 int32_t size = 0;
ajm@google.com808e0e02011-08-03 21:08:51 +000057 if (fread(&size, sizeof(int32_t), 1, file) != 1) {
58 return false;
59 }
60 if (size <= 0) {
61 return false;
62 }
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +000063 const size_t usize = static_cast<size_t>(size);
ajm@google.com808e0e02011-08-03 21:08:51 +000064
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +000065 scoped_array<char> array(new char[usize]);
66 if (fread(array.get(), sizeof(char), usize, file) != usize) {
ajm@google.com808e0e02011-08-03 21:08:51 +000067 return false;
68 }
69
70 msg->Clear();
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +000071 return msg->ParseFromArray(array.get(), usize);
ajm@google.com808e0e02011-08-03 21:08:51 +000072}
niklase@google.com470e71d2011-07-07 08:21:25 +000073
andrew@webrtc.org94c74132011-09-19 15:17:57 +000074void PrintStat(const AudioProcessing::Statistic& stat) {
75 printf("%d, %d, %d\n", stat.average,
76 stat.maximum,
77 stat.minimum);
78}
79
niklase@google.com470e71d2011-07-07 08:21:25 +000080void usage() {
81 printf(
ajm@google.com808e0e02011-08-03 21:08:51 +000082 "Usage: process_test [options] [-pb PROTOBUF_FILE]\n"
83 " [-ir REVERSE_FILE] [-i PRIMARY_FILE] [-o OUT_FILE]\n");
niklase@google.com470e71d2011-07-07 08:21:25 +000084 printf(
85 "process_test is a test application for AudioProcessing.\n\n"
ajm@google.com808e0e02011-08-03 21:08:51 +000086 "When a protobuf debug file is available, specify it with -pb.\n"
87 "Alternately, when -ir or -i is used, the specified files will be\n"
88 "processed directly in a simulation mode. Otherwise the full set of\n"
89 "legacy test files is expected to be present in the working directory.\n");
niklase@google.com470e71d2011-07-07 08:21:25 +000090 printf("\n");
91 printf("Options\n");
ajm@google.com808e0e02011-08-03 21:08:51 +000092 printf("General configuration (only used for the simulation mode):\n");
niklase@google.com470e71d2011-07-07 08:21:25 +000093 printf(" -fs SAMPLE_RATE_HZ\n");
94 printf(" -ch CHANNELS_IN CHANNELS_OUT\n");
95 printf(" -rch REVERSE_CHANNELS\n");
96 printf("\n");
97 printf("Component configuration:\n");
98 printf(
99 "All components are disabled by default. Each block below begins with a\n"
100 "flag to enable the component with default settings. The subsequent flags\n"
101 "in the block are used to provide configuration settings.\n");
102 printf("\n -aec Echo cancellation\n");
103 printf(" --drift_compensation\n");
104 printf(" --no_drift_compensation\n");
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000105 printf(" --no_echo_metrics\n");
bjornv@google.com1ba3dbe2011-10-03 08:18:10 +0000106 printf(" --no_delay_logging\n");
niklase@google.com470e71d2011-07-07 08:21:25 +0000107 printf("\n -aecm Echo control mobile\n");
bjornv@google.com238a0222011-07-15 14:51:52 +0000108 printf(" --aecm_echo_path_in_file FILE\n");
109 printf(" --aecm_echo_path_out_file FILE\n");
niklase@google.com470e71d2011-07-07 08:21:25 +0000110 printf("\n -agc Gain control\n");
111 printf(" --analog\n");
112 printf(" --adaptive_digital\n");
113 printf(" --fixed_digital\n");
114 printf(" --target_level LEVEL\n");
115 printf(" --compression_gain GAIN\n");
116 printf(" --limiter\n");
117 printf(" --no_limiter\n");
118 printf("\n -hpf High pass filter\n");
119 printf("\n -ns Noise suppression\n");
120 printf(" --ns_low\n");
121 printf(" --ns_moderate\n");
122 printf(" --ns_high\n");
123 printf(" --ns_very_high\n");
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000124 printf(" --ns_prob_file FILE\n");
niklase@google.com470e71d2011-07-07 08:21:25 +0000125 printf("\n -vad Voice activity detection\n");
ajm@google.com808e0e02011-08-03 21:08:51 +0000126 printf(" --vad_out_file FILE\n");
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000127 printf("\n Level metrics (enabled by default)\n");
128 printf(" --no_level_metrics\n");
niklase@google.com470e71d2011-07-07 08:21:25 +0000129 printf("\n");
130 printf("Modifiers:\n");
andrew@webrtc.orgcb181212011-10-26 00:27:17 +0000131 printf(" --noasm Disable SSE optimization.\n");
andrew@webrtc.org4b13fc92011-11-09 19:27:11 +0000132 printf(" --delay DELAY Add DELAY ms to input value.\n");
andrew@webrtc.orgcb181212011-10-26 00:27:17 +0000133 printf(" --perf Measure performance.\n");
134 printf(" --quiet Suppress text output.\n");
135 printf(" --no_progress Suppress progress.\n");
136 printf(" --debug_file FILE Dump a debug recording.\n");
niklase@google.com470e71d2011-07-07 08:21:25 +0000137}
138
andrew@webrtc.org81865342012-10-27 00:28:27 +0000139static double MicLevel2Gain(int level) {
140 return pow(10.0, ((level - 127.0) / 128.0 * 80.) / 20.);
141}
142
143static void SimulateMic(int mic_level, AudioFrame* frame) {
144 mic_level = std::min(std::max(mic_level, 0), 255);
145 double mic_gain = MicLevel2Gain(mic_level);
146 int num_samples = frame->samples_per_channel_ * frame->num_channels_;
147 double v;
148 for (int n = 0; n < num_samples; n++) {
149 v = floor(frame->data_[n] * mic_gain + 0.5);
150 v = std::max(std::min(32767., v), -32768.);
151 frame->data_[n] = static_cast<int16_t>(v);
152 }
153}
154
niklase@google.com470e71d2011-07-07 08:21:25 +0000155// void function for gtest.
156void void_main(int argc, char* argv[]) {
157 if (argc > 1 && strcmp(argv[1], "--help") == 0) {
158 usage();
159 return;
160 }
161
162 if (argc < 2) {
163 printf("Did you mean to run without arguments?\n");
164 printf("Try `process_test --help' for more information.\n\n");
165 }
166
167 AudioProcessing* apm = AudioProcessing::Create(0);
168 ASSERT_TRUE(apm != NULL);
169
ajm@google.com808e0e02011-08-03 21:08:51 +0000170 const char* pb_filename = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +0000171 const char* far_filename = NULL;
172 const char* near_filename = NULL;
173 const char* out_filename = NULL;
174 const char* vad_out_filename = NULL;
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000175 const char* ns_prob_filename = NULL;
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000176 const char* aecm_echo_path_in_filename = NULL;
177 const char* aecm_echo_path_out_filename = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +0000178
179 int32_t sample_rate_hz = 16000;
180 int32_t device_sample_rate_hz = 16000;
181
182 int num_capture_input_channels = 1;
183 int num_capture_output_channels = 1;
184 int num_render_channels = 1;
185
186 int samples_per_channel = sample_rate_hz / 100;
187
188 bool simulating = false;
189 bool perf_testing = false;
190 bool verbose = true;
191 bool progress = true;
andrew@webrtc.org4b13fc92011-11-09 19:27:11 +0000192 int extra_delay_ms = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000193 //bool interleaved = true;
194
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000195 ASSERT_EQ(apm->kNoError, apm->level_estimator()->Enable(true));
niklase@google.com470e71d2011-07-07 08:21:25 +0000196 for (int i = 1; i < argc; i++) {
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000197 if (strcmp(argv[i], "-pb") == 0) {
ajm@google.com808e0e02011-08-03 21:08:51 +0000198 i++;
199 ASSERT_LT(i, argc) << "Specify protobuf filename after -pb";
200 pb_filename = argv[i];
201
202 } else if (strcmp(argv[i], "-ir") == 0) {
niklase@google.com470e71d2011-07-07 08:21:25 +0000203 i++;
204 ASSERT_LT(i, argc) << "Specify filename after -ir";
205 far_filename = argv[i];
206 simulating = true;
207
208 } else if (strcmp(argv[i], "-i") == 0) {
209 i++;
210 ASSERT_LT(i, argc) << "Specify filename after -i";
211 near_filename = argv[i];
212 simulating = true;
213
214 } else if (strcmp(argv[i], "-o") == 0) {
215 i++;
216 ASSERT_LT(i, argc) << "Specify filename after -o";
217 out_filename = argv[i];
218
219 } else if (strcmp(argv[i], "-fs") == 0) {
220 i++;
221 ASSERT_LT(i, argc) << "Specify sample rate after -fs";
222 ASSERT_EQ(1, sscanf(argv[i], "%d", &sample_rate_hz));
223 samples_per_channel = sample_rate_hz / 100;
224
225 ASSERT_EQ(apm->kNoError,
226 apm->set_sample_rate_hz(sample_rate_hz));
227
228 } else if (strcmp(argv[i], "-ch") == 0) {
229 i++;
230 ASSERT_LT(i + 1, argc) << "Specify number of channels after -ch";
231 ASSERT_EQ(1, sscanf(argv[i], "%d", &num_capture_input_channels));
232 i++;
233 ASSERT_EQ(1, sscanf(argv[i], "%d", &num_capture_output_channels));
234
235 ASSERT_EQ(apm->kNoError,
236 apm->set_num_channels(num_capture_input_channels,
237 num_capture_output_channels));
238
239 } else if (strcmp(argv[i], "-rch") == 0) {
240 i++;
241 ASSERT_LT(i, argc) << "Specify number of channels after -rch";
242 ASSERT_EQ(1, sscanf(argv[i], "%d", &num_render_channels));
243
244 ASSERT_EQ(apm->kNoError,
245 apm->set_num_reverse_channels(num_render_channels));
246
247 } else if (strcmp(argv[i], "-aec") == 0) {
248 ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->Enable(true));
bjornv@google.com1ba3dbe2011-10-03 08:18:10 +0000249 ASSERT_EQ(apm->kNoError,
250 apm->echo_cancellation()->enable_metrics(true));
251 ASSERT_EQ(apm->kNoError,
252 apm->echo_cancellation()->enable_delay_logging(true));
niklase@google.com470e71d2011-07-07 08:21:25 +0000253
niklase@google.com470e71d2011-07-07 08:21:25 +0000254 } else if (strcmp(argv[i], "--drift_compensation") == 0) {
255 ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->Enable(true));
256 // TODO(ajm): this is enabled in the VQE test app by default. Investigate
257 // why it can give better performance despite passing zeros.
258 ASSERT_EQ(apm->kNoError,
259 apm->echo_cancellation()->enable_drift_compensation(true));
260 } else if (strcmp(argv[i], "--no_drift_compensation") == 0) {
261 ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->Enable(true));
262 ASSERT_EQ(apm->kNoError,
263 apm->echo_cancellation()->enable_drift_compensation(false));
264
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000265 } else if (strcmp(argv[i], "--no_echo_metrics") == 0) {
266 ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->Enable(true));
267 ASSERT_EQ(apm->kNoError,
268 apm->echo_cancellation()->enable_metrics(false));
269
bjornv@google.com1ba3dbe2011-10-03 08:18:10 +0000270 } else if (strcmp(argv[i], "--no_delay_logging") == 0) {
271 ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->Enable(true));
272 ASSERT_EQ(apm->kNoError,
273 apm->echo_cancellation()->enable_delay_logging(false));
274
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000275 } else if (strcmp(argv[i], "--no_level_metrics") == 0) {
276 ASSERT_EQ(apm->kNoError, apm->level_estimator()->Enable(false));
277
niklase@google.com470e71d2011-07-07 08:21:25 +0000278 } else if (strcmp(argv[i], "-aecm") == 0) {
279 ASSERT_EQ(apm->kNoError, apm->echo_control_mobile()->Enable(true));
280
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000281 } else if (strcmp(argv[i], "--aecm_echo_path_in_file") == 0) {
282 i++;
283 ASSERT_LT(i, argc) << "Specify filename after --aecm_echo_path_in_file";
284 aecm_echo_path_in_filename = argv[i];
285
286 } else if (strcmp(argv[i], "--aecm_echo_path_out_file") == 0) {
287 i++;
288 ASSERT_LT(i, argc) << "Specify filename after --aecm_echo_path_out_file";
289 aecm_echo_path_out_filename = argv[i];
290
niklase@google.com470e71d2011-07-07 08:21:25 +0000291 } else if (strcmp(argv[i], "-agc") == 0) {
292 ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
293
294 } else if (strcmp(argv[i], "--analog") == 0) {
295 ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
296 ASSERT_EQ(apm->kNoError,
297 apm->gain_control()->set_mode(GainControl::kAdaptiveAnalog));
298
299 } else if (strcmp(argv[i], "--adaptive_digital") == 0) {
300 ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
301 ASSERT_EQ(apm->kNoError,
302 apm->gain_control()->set_mode(GainControl::kAdaptiveDigital));
303
304 } else if (strcmp(argv[i], "--fixed_digital") == 0) {
305 ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
306 ASSERT_EQ(apm->kNoError,
307 apm->gain_control()->set_mode(GainControl::kFixedDigital));
308
309 } else if (strcmp(argv[i], "--target_level") == 0) {
310 i++;
311 int level;
312 ASSERT_EQ(1, sscanf(argv[i], "%d", &level));
313
314 ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
315 ASSERT_EQ(apm->kNoError,
316 apm->gain_control()->set_target_level_dbfs(level));
317
318 } else if (strcmp(argv[i], "--compression_gain") == 0) {
319 i++;
320 int gain;
321 ASSERT_EQ(1, sscanf(argv[i], "%d", &gain));
322
323 ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
324 ASSERT_EQ(apm->kNoError,
325 apm->gain_control()->set_compression_gain_db(gain));
326
327 } else if (strcmp(argv[i], "--limiter") == 0) {
328 ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
329 ASSERT_EQ(apm->kNoError,
330 apm->gain_control()->enable_limiter(true));
331
332 } else if (strcmp(argv[i], "--no_limiter") == 0) {
333 ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
334 ASSERT_EQ(apm->kNoError,
335 apm->gain_control()->enable_limiter(false));
336
337 } else if (strcmp(argv[i], "-hpf") == 0) {
338 ASSERT_EQ(apm->kNoError, apm->high_pass_filter()->Enable(true));
339
340 } else if (strcmp(argv[i], "-ns") == 0) {
341 ASSERT_EQ(apm->kNoError, apm->noise_suppression()->Enable(true));
342
343 } else if (strcmp(argv[i], "--ns_low") == 0) {
344 ASSERT_EQ(apm->kNoError, apm->noise_suppression()->Enable(true));
345 ASSERT_EQ(apm->kNoError,
346 apm->noise_suppression()->set_level(NoiseSuppression::kLow));
347
348 } else if (strcmp(argv[i], "--ns_moderate") == 0) {
349 ASSERT_EQ(apm->kNoError, apm->noise_suppression()->Enable(true));
350 ASSERT_EQ(apm->kNoError,
351 apm->noise_suppression()->set_level(NoiseSuppression::kModerate));
352
353 } else if (strcmp(argv[i], "--ns_high") == 0) {
354 ASSERT_EQ(apm->kNoError, apm->noise_suppression()->Enable(true));
355 ASSERT_EQ(apm->kNoError,
356 apm->noise_suppression()->set_level(NoiseSuppression::kHigh));
357
358 } else if (strcmp(argv[i], "--ns_very_high") == 0) {
359 ASSERT_EQ(apm->kNoError, apm->noise_suppression()->Enable(true));
360 ASSERT_EQ(apm->kNoError,
361 apm->noise_suppression()->set_level(NoiseSuppression::kVeryHigh));
362
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000363 } else if (strcmp(argv[i], "--ns_prob_file") == 0) {
364 i++;
365 ASSERT_LT(i, argc) << "Specify filename after --ns_prob_file";
366 ns_prob_filename = argv[i];
367
niklase@google.com470e71d2011-07-07 08:21:25 +0000368 } else if (strcmp(argv[i], "-vad") == 0) {
369 ASSERT_EQ(apm->kNoError, apm->voice_detection()->Enable(true));
370
andrew@webrtc.org89752612012-10-12 16:41:45 +0000371 } else if (strcmp(argv[i], "--vad_very_low") == 0) {
372 ASSERT_EQ(apm->kNoError, apm->voice_detection()->Enable(true));
373 ASSERT_EQ(apm->kNoError,
374 apm->voice_detection()->set_likelihood(
375 VoiceDetection::kVeryLowLikelihood));
376
377 } else if (strcmp(argv[i], "--vad_low") == 0) {
378 ASSERT_EQ(apm->kNoError, apm->voice_detection()->Enable(true));
379 ASSERT_EQ(apm->kNoError,
380 apm->voice_detection()->set_likelihood(
381 VoiceDetection::kLowLikelihood));
382
383 } else if (strcmp(argv[i], "--vad_moderate") == 0) {
384 ASSERT_EQ(apm->kNoError, apm->voice_detection()->Enable(true));
385 ASSERT_EQ(apm->kNoError,
386 apm->voice_detection()->set_likelihood(
387 VoiceDetection::kModerateLikelihood));
388
389 } else if (strcmp(argv[i], "--vad_high") == 0) {
390 ASSERT_EQ(apm->kNoError, apm->voice_detection()->Enable(true));
391 ASSERT_EQ(apm->kNoError,
392 apm->voice_detection()->set_likelihood(
393 VoiceDetection::kHighLikelihood));
394
niklase@google.com470e71d2011-07-07 08:21:25 +0000395 } else if (strcmp(argv[i], "--vad_out_file") == 0) {
396 i++;
397 ASSERT_LT(i, argc) << "Specify filename after --vad_out_file";
398 vad_out_filename = argv[i];
399
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000400 } else if (strcmp(argv[i], "--noasm") == 0) {
401 WebRtc_GetCPUInfo = WebRtc_GetCPUInfoNoASM;
402 // We need to reinitialize here if components have already been enabled.
403 ASSERT_EQ(apm->kNoError, apm->Initialize());
404
andrew@webrtc.org4b13fc92011-11-09 19:27:11 +0000405 } else if (strcmp(argv[i], "--delay") == 0) {
406 i++;
407 ASSERT_EQ(1, sscanf(argv[i], "%d", &extra_delay_ms));
408
niklase@google.com470e71d2011-07-07 08:21:25 +0000409 } else if (strcmp(argv[i], "--perf") == 0) {
410 perf_testing = true;
411
412 } else if (strcmp(argv[i], "--quiet") == 0) {
413 verbose = false;
414 progress = false;
415
416 } else if (strcmp(argv[i], "--no_progress") == 0) {
417 progress = false;
418
andrew@webrtc.orgcb181212011-10-26 00:27:17 +0000419 } else if (strcmp(argv[i], "--debug_file") == 0) {
ajm@google.com808e0e02011-08-03 21:08:51 +0000420 i++;
andrew@webrtc.orgcb181212011-10-26 00:27:17 +0000421 ASSERT_LT(i, argc) << "Specify filename after --debug_file";
ajm@google.com808e0e02011-08-03 21:08:51 +0000422 ASSERT_EQ(apm->kNoError, apm->StartDebugRecording(argv[i]));
niklase@google.com470e71d2011-07-07 08:21:25 +0000423 } else {
424 FAIL() << "Unrecognized argument " << argv[i];
425 }
426 }
ajm@google.com808e0e02011-08-03 21:08:51 +0000427 // If we're reading a protobuf file, ensure a simulation hasn't also
428 // been requested (which makes no sense...)
429 ASSERT_FALSE(pb_filename && simulating);
niklase@google.com470e71d2011-07-07 08:21:25 +0000430
431 if (verbose) {
432 printf("Sample rate: %d Hz\n", sample_rate_hz);
433 printf("Primary channels: %d (in), %d (out)\n",
434 num_capture_input_channels,
435 num_capture_output_channels);
436 printf("Reverse channels: %d \n", num_render_channels);
437 }
438
kjellander@webrtc.org10abe252012-12-17 18:28:07 +0000439 const std::string out_path = webrtc::test::OutputPath();
niklase@google.com470e71d2011-07-07 08:21:25 +0000440 const char far_file_default[] = "apm_far.pcm";
441 const char near_file_default[] = "apm_near.pcm";
kjellander@webrtc.org10abe252012-12-17 18:28:07 +0000442 const std::string out_file_default = out_path + "out.pcm";
niklase@google.com470e71d2011-07-07 08:21:25 +0000443 const char event_filename[] = "apm_event.dat";
444 const char delay_filename[] = "apm_delay.dat";
445 const char drift_filename[] = "apm_drift.dat";
kjellander@webrtc.org10abe252012-12-17 18:28:07 +0000446 const std::string vad_file_default = out_path + "vad_out.dat";
447 const std::string ns_prob_file_default = out_path + "ns_prob.dat";
niklase@google.com470e71d2011-07-07 08:21:25 +0000448
449 if (!simulating) {
450 far_filename = far_file_default;
451 near_filename = near_file_default;
452 }
453
ajm@google.com808e0e02011-08-03 21:08:51 +0000454 if (!out_filename) {
kjellander@webrtc.org10abe252012-12-17 18:28:07 +0000455 out_filename = out_file_default.c_str();
niklase@google.com470e71d2011-07-07 08:21:25 +0000456 }
457
ajm@google.com808e0e02011-08-03 21:08:51 +0000458 if (!vad_out_filename) {
kjellander@webrtc.org10abe252012-12-17 18:28:07 +0000459 vad_out_filename = vad_file_default.c_str();
niklase@google.com470e71d2011-07-07 08:21:25 +0000460 }
461
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000462 if (!ns_prob_filename) {
kjellander@webrtc.org10abe252012-12-17 18:28:07 +0000463 ns_prob_filename = ns_prob_file_default.c_str();
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000464 }
465
ajm@google.com808e0e02011-08-03 21:08:51 +0000466 FILE* pb_file = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +0000467 FILE* far_file = NULL;
468 FILE* near_file = NULL;
469 FILE* out_file = NULL;
470 FILE* event_file = NULL;
471 FILE* delay_file = NULL;
472 FILE* drift_file = NULL;
473 FILE* vad_out_file = NULL;
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000474 FILE* ns_prob_file = NULL;
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000475 FILE* aecm_echo_path_in_file = NULL;
476 FILE* aecm_echo_path_out_file = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +0000477
ajm@google.com808e0e02011-08-03 21:08:51 +0000478 if (pb_filename) {
479 pb_file = fopen(pb_filename, "rb");
480 ASSERT_TRUE(NULL != pb_file) << "Unable to open protobuf file "
481 << pb_filename;
482 } else {
483 if (far_filename) {
484 far_file = fopen(far_filename, "rb");
485 ASSERT_TRUE(NULL != far_file) << "Unable to open far-end audio file "
486 << far_filename;
487 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000488
ajm@google.com808e0e02011-08-03 21:08:51 +0000489 near_file = fopen(near_filename, "rb");
490 ASSERT_TRUE(NULL != near_file) << "Unable to open near-end audio file "
491 << near_filename;
492 if (!simulating) {
493 event_file = fopen(event_filename, "rb");
494 ASSERT_TRUE(NULL != event_file) << "Unable to open event file "
495 << event_filename;
496
497 delay_file = fopen(delay_filename, "rb");
498 ASSERT_TRUE(NULL != delay_file) << "Unable to open buffer file "
499 << delay_filename;
500
501 drift_file = fopen(drift_filename, "rb");
502 ASSERT_TRUE(NULL != drift_file) << "Unable to open drift file "
503 << drift_filename;
504 }
505 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000506
507 out_file = fopen(out_filename, "wb");
508 ASSERT_TRUE(NULL != out_file) << "Unable to open output audio file "
509 << out_filename;
510
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000511 int near_size_bytes = 0;
ajm@google.com808e0e02011-08-03 21:08:51 +0000512 if (pb_file) {
513 struct stat st;
514 stat(pb_filename, &st);
515 // Crude estimate, but should be good enough.
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000516 near_size_bytes = st.st_size / 3;
ajm@google.com808e0e02011-08-03 21:08:51 +0000517 } else {
518 struct stat st;
519 stat(near_filename, &st);
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000520 near_size_bytes = st.st_size;
niklase@google.com470e71d2011-07-07 08:21:25 +0000521 }
522
523 if (apm->voice_detection()->is_enabled()) {
524 vad_out_file = fopen(vad_out_filename, "wb");
525 ASSERT_TRUE(NULL != vad_out_file) << "Unable to open VAD output file "
526 << vad_out_file;
527 }
528
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000529 if (apm->noise_suppression()->is_enabled()) {
530 ns_prob_file = fopen(ns_prob_filename, "wb");
531 ASSERT_TRUE(NULL != ns_prob_file) << "Unable to open NS output file "
532 << ns_prob_file;
533 }
534
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000535 if (aecm_echo_path_in_filename != NULL) {
536 aecm_echo_path_in_file = fopen(aecm_echo_path_in_filename, "rb");
537 ASSERT_TRUE(NULL != aecm_echo_path_in_file) << "Unable to open file "
538 << aecm_echo_path_in_filename;
539
ajm@google.com22e65152011-07-18 18:03:01 +0000540 const size_t path_size =
541 apm->echo_control_mobile()->echo_path_size_bytes();
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +0000542 scoped_array<char> echo_path(new char[path_size]);
543 ASSERT_EQ(path_size, fread(echo_path.get(),
544 sizeof(char),
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000545 path_size,
546 aecm_echo_path_in_file));
547 EXPECT_EQ(apm->kNoError,
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +0000548 apm->echo_control_mobile()->SetEchoPath(echo_path.get(),
549 path_size));
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000550 fclose(aecm_echo_path_in_file);
551 aecm_echo_path_in_file = NULL;
552 }
553
554 if (aecm_echo_path_out_filename != NULL) {
555 aecm_echo_path_out_file = fopen(aecm_echo_path_out_filename, "wb");
556 ASSERT_TRUE(NULL != aecm_echo_path_out_file) << "Unable to open file "
557 << aecm_echo_path_out_filename;
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000558 }
559
niklase@google.com470e71d2011-07-07 08:21:25 +0000560 size_t read_count = 0;
561 int reverse_count = 0;
562 int primary_count = 0;
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000563 int near_read_bytes = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000564 TickInterval acc_ticks;
565
566 AudioFrame far_frame;
niklase@google.com470e71d2011-07-07 08:21:25 +0000567 AudioFrame near_frame;
niklase@google.com470e71d2011-07-07 08:21:25 +0000568
569 int delay_ms = 0;
570 int drift_samples = 0;
571 int capture_level = 127;
572 int8_t stream_has_voice = 0;
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000573 float ns_speech_prob = 0.0f;
niklase@google.com470e71d2011-07-07 08:21:25 +0000574
575 TickTime t0 = TickTime::Now();
576 TickTime t1 = t0;
577 WebRtc_Word64 max_time_us = 0;
578 WebRtc_Word64 max_time_reverse_us = 0;
579 WebRtc_Word64 min_time_us = 1e6;
580 WebRtc_Word64 min_time_reverse_us = 1e6;
581
ajm@google.com808e0e02011-08-03 21:08:51 +0000582 // TODO(ajm): Ideally we would refactor this block into separate functions,
583 // but for now we want to share the variables.
584 if (pb_file) {
585 Event event_msg;
586 while (ReadMessageFromFile(pb_file, &event_msg)) {
587 std::ostringstream trace_stream;
588 trace_stream << "Processed frames: " << reverse_count << " (reverse), "
589 << primary_count << " (primary)";
590 SCOPED_TRACE(trace_stream.str());
niklase@google.com470e71d2011-07-07 08:21:25 +0000591
ajm@google.com808e0e02011-08-03 21:08:51 +0000592 if (event_msg.type() == Event::INIT) {
593 ASSERT_TRUE(event_msg.has_init());
594 const Init msg = event_msg.init();
niklase@google.com470e71d2011-07-07 08:21:25 +0000595
ajm@google.com808e0e02011-08-03 21:08:51 +0000596 ASSERT_TRUE(msg.has_sample_rate());
597 ASSERT_EQ(apm->kNoError,
598 apm->set_sample_rate_hz(msg.sample_rate()));
599
600 ASSERT_TRUE(msg.has_device_sample_rate());
601 ASSERT_EQ(apm->kNoError,
602 apm->echo_cancellation()->set_device_sample_rate_hz(
603 msg.device_sample_rate()));
604
605 ASSERT_TRUE(msg.has_num_input_channels());
606 ASSERT_TRUE(msg.has_num_output_channels());
607 ASSERT_EQ(apm->kNoError,
608 apm->set_num_channels(msg.num_input_channels(),
609 msg.num_output_channels()));
610
611 ASSERT_TRUE(msg.has_num_reverse_channels());
612 ASSERT_EQ(apm->kNoError,
613 apm->set_num_reverse_channels(msg.num_reverse_channels()));
614
615 samples_per_channel = msg.sample_rate() / 100;
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000616 far_frame.sample_rate_hz_ = msg.sample_rate();
617 far_frame.samples_per_channel_ = samples_per_channel;
618 far_frame.num_channels_ = msg.num_reverse_channels();
619 near_frame.sample_rate_hz_ = msg.sample_rate();
620 near_frame.samples_per_channel_ = samples_per_channel;
ajm@google.com808e0e02011-08-03 21:08:51 +0000621
622 if (verbose) {
623 printf("Init at frame: %d (primary), %d (reverse)\n",
624 primary_count, reverse_count);
andrew@webrtc.orgba028a32011-11-23 20:37:12 +0000625 printf(" Sample rate: %d Hz\n", msg.sample_rate());
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000626 printf(" Primary channels: %d (in), %d (out)\n",
627 msg.num_input_channels(),
628 msg.num_output_channels());
629 printf(" Reverse channels: %d \n", msg.num_reverse_channels());
ajm@google.com808e0e02011-08-03 21:08:51 +0000630 }
631
632 } else if (event_msg.type() == Event::REVERSE_STREAM) {
633 ASSERT_TRUE(event_msg.has_reverse_stream());
634 const ReverseStream msg = event_msg.reverse_stream();
635 reverse_count++;
636
637 ASSERT_TRUE(msg.has_data());
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000638 ASSERT_EQ(sizeof(int16_t) * samples_per_channel *
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000639 far_frame.num_channels_, msg.data().size());
640 memcpy(far_frame.data_, msg.data().data(), msg.data().size());
ajm@google.com808e0e02011-08-03 21:08:51 +0000641
642 if (perf_testing) {
643 t0 = TickTime::Now();
644 }
645
646 ASSERT_EQ(apm->kNoError,
647 apm->AnalyzeReverseStream(&far_frame));
648
649 if (perf_testing) {
650 t1 = TickTime::Now();
651 TickInterval tick_diff = t1 - t0;
652 acc_ticks += tick_diff;
653 if (tick_diff.Microseconds() > max_time_reverse_us) {
654 max_time_reverse_us = tick_diff.Microseconds();
655 }
656 if (tick_diff.Microseconds() < min_time_reverse_us) {
657 min_time_reverse_us = tick_diff.Microseconds();
658 }
659 }
660
661 } else if (event_msg.type() == Event::STREAM) {
662 ASSERT_TRUE(event_msg.has_stream());
663 const Stream msg = event_msg.stream();
664 primary_count++;
665
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000666 // ProcessStream could have changed this for the output frame.
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000667 near_frame.num_channels_ = apm->num_input_channels();
ajm@google.com808e0e02011-08-03 21:08:51 +0000668
669 ASSERT_TRUE(msg.has_input_data());
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000670 ASSERT_EQ(sizeof(int16_t) * samples_per_channel *
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000671 near_frame.num_channels_, msg.input_data().size());
672 memcpy(near_frame.data_,
ajm@google.com808e0e02011-08-03 21:08:51 +0000673 msg.input_data().data(),
674 msg.input_data().size());
675
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000676 near_read_bytes += msg.input_data().size();
ajm@google.com808e0e02011-08-03 21:08:51 +0000677 if (progress && primary_count % 100 == 0) {
678 printf("%.0f%% complete\r",
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000679 (near_read_bytes * 100.0) / near_size_bytes);
ajm@google.com808e0e02011-08-03 21:08:51 +0000680 fflush(stdout);
681 }
682
andrew@webrtc.org81865342012-10-27 00:28:27 +0000683 if (apm->gain_control()->mode() == GainControl::kAdaptiveAnalog) {
684 SimulateMic(capture_level, &near_frame);
685 }
686
ajm@google.com808e0e02011-08-03 21:08:51 +0000687 if (perf_testing) {
688 t0 = TickTime::Now();
689 }
690
691 ASSERT_EQ(apm->kNoError,
692 apm->gain_control()->set_stream_analog_level(msg.level()));
693 ASSERT_EQ(apm->kNoError,
andrew@webrtc.org4b13fc92011-11-09 19:27:11 +0000694 apm->set_stream_delay_ms(msg.delay() + extra_delay_ms));
ajm@google.com808e0e02011-08-03 21:08:51 +0000695 ASSERT_EQ(apm->kNoError,
696 apm->echo_cancellation()->set_stream_drift_samples(msg.drift()));
697
698 int err = apm->ProcessStream(&near_frame);
699 if (err == apm->kBadStreamParameterWarning) {
700 printf("Bad parameter warning. %s\n", trace_stream.str().c_str());
701 }
702 ASSERT_TRUE(err == apm->kNoError ||
703 err == apm->kBadStreamParameterWarning);
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000704 ASSERT_TRUE(near_frame.num_channels_ == apm->num_output_channels());
ajm@google.com808e0e02011-08-03 21:08:51 +0000705
706 capture_level = apm->gain_control()->stream_analog_level();
707
708 stream_has_voice =
709 static_cast<int8_t>(apm->voice_detection()->stream_has_voice());
710 if (vad_out_file != NULL) {
711 ASSERT_EQ(1u, fwrite(&stream_has_voice,
712 sizeof(stream_has_voice),
713 1,
714 vad_out_file));
715 }
716
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000717 if (ns_prob_file != NULL) {
718 ns_speech_prob = apm->noise_suppression()->speech_probability();
719 ASSERT_EQ(1u, fwrite(&ns_speech_prob,
720 sizeof(ns_speech_prob),
721 1,
722 ns_prob_file));
723 }
724
ajm@google.com808e0e02011-08-03 21:08:51 +0000725 if (apm->gain_control()->mode() != GainControl::kAdaptiveAnalog) {
726 ASSERT_EQ(msg.level(), capture_level);
727 }
728
729 if (perf_testing) {
730 t1 = TickTime::Now();
731 TickInterval tick_diff = t1 - t0;
732 acc_ticks += tick_diff;
733 if (tick_diff.Microseconds() > max_time_us) {
734 max_time_us = tick_diff.Microseconds();
735 }
736 if (tick_diff.Microseconds() < min_time_us) {
737 min_time_us = tick_diff.Microseconds();
738 }
739 }
740
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000741 size_t size = samples_per_channel * near_frame.num_channels_;
742 ASSERT_EQ(size, fwrite(near_frame.data_,
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000743 sizeof(int16_t),
744 size,
745 out_file));
ajm@google.com808e0e02011-08-03 21:08:51 +0000746 }
747 }
748
749 ASSERT_TRUE(feof(pb_file));
ajm@google.com808e0e02011-08-03 21:08:51 +0000750
751 } else {
bjornv@google.coma2c6ea02011-09-27 08:04:45 +0000752 enum Events {
753 kInitializeEvent,
754 kRenderEvent,
755 kCaptureEvent,
756 kResetEventDeprecated
757 };
758 int16_t event = 0;
ajm@google.com808e0e02011-08-03 21:08:51 +0000759 while (simulating || feof(event_file) == 0) {
760 std::ostringstream trace_stream;
761 trace_stream << "Processed frames: " << reverse_count << " (reverse), "
762 << primary_count << " (primary)";
763 SCOPED_TRACE(trace_stream.str());
764
765 if (simulating) {
766 if (far_file == NULL) {
niklase@google.com470e71d2011-07-07 08:21:25 +0000767 event = kCaptureEvent;
768 } else {
ajm@google.com808e0e02011-08-03 21:08:51 +0000769 if (event == kRenderEvent) {
770 event = kCaptureEvent;
771 } else {
772 event = kRenderEvent;
773 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000774 }
775 } else {
ajm@google.com808e0e02011-08-03 21:08:51 +0000776 read_count = fread(&event, sizeof(event), 1, event_file);
777 if (read_count != 1) {
778 break;
niklase@google.com470e71d2011-07-07 08:21:25 +0000779 }
780 }
781
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000782 far_frame.sample_rate_hz_ = sample_rate_hz;
783 far_frame.samples_per_channel_ = samples_per_channel;
784 far_frame.num_channels_ = num_render_channels;
785 near_frame.sample_rate_hz_ = sample_rate_hz;
786 near_frame.samples_per_channel_ = samples_per_channel;
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000787
ajm@google.com808e0e02011-08-03 21:08:51 +0000788 if (event == kInitializeEvent || event == kResetEventDeprecated) {
789 ASSERT_EQ(1u,
790 fread(&sample_rate_hz, sizeof(sample_rate_hz), 1, event_file));
791 samples_per_channel = sample_rate_hz / 100;
niklase@google.com470e71d2011-07-07 08:21:25 +0000792
ajm@google.com808e0e02011-08-03 21:08:51 +0000793 ASSERT_EQ(1u,
794 fread(&device_sample_rate_hz,
795 sizeof(device_sample_rate_hz),
796 1,
797 event_file));
798
799 ASSERT_EQ(apm->kNoError,
800 apm->set_sample_rate_hz(sample_rate_hz));
801
802 ASSERT_EQ(apm->kNoError,
803 apm->echo_cancellation()->set_device_sample_rate_hz(
804 device_sample_rate_hz));
805
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000806 far_frame.sample_rate_hz_ = sample_rate_hz;
807 far_frame.samples_per_channel_ = samples_per_channel;
808 far_frame.num_channels_ = num_render_channels;
809 near_frame.sample_rate_hz_ = sample_rate_hz;
810 near_frame.samples_per_channel_ = samples_per_channel;
ajm@google.com808e0e02011-08-03 21:08:51 +0000811
812 if (verbose) {
813 printf("Init at frame: %d (primary), %d (reverse)\n",
814 primary_count, reverse_count);
815 printf(" Sample rate: %d Hz\n", sample_rate_hz);
816 }
817
818 } else if (event == kRenderEvent) {
819 reverse_count++;
ajm@google.com808e0e02011-08-03 21:08:51 +0000820
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000821 size_t size = samples_per_channel * num_render_channels;
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000822 read_count = fread(far_frame.data_,
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000823 sizeof(int16_t),
824 size,
ajm@google.com808e0e02011-08-03 21:08:51 +0000825 far_file);
826
827 if (simulating) {
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000828 if (read_count != size) {
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000829 // Read an equal amount from the near file to avoid errors due to
830 // not reaching end-of-file.
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000831 EXPECT_EQ(0, fseek(near_file, read_count * sizeof(int16_t),
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000832 SEEK_CUR));
ajm@google.com808e0e02011-08-03 21:08:51 +0000833 break; // This is expected.
834 }
835 } else {
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000836 ASSERT_EQ(size, read_count);
ajm@google.com808e0e02011-08-03 21:08:51 +0000837 }
838
839 if (perf_testing) {
840 t0 = TickTime::Now();
841 }
842
843 ASSERT_EQ(apm->kNoError,
844 apm->AnalyzeReverseStream(&far_frame));
845
846 if (perf_testing) {
847 t1 = TickTime::Now();
848 TickInterval tick_diff = t1 - t0;
849 acc_ticks += tick_diff;
850 if (tick_diff.Microseconds() > max_time_reverse_us) {
851 max_time_reverse_us = tick_diff.Microseconds();
852 }
853 if (tick_diff.Microseconds() < min_time_reverse_us) {
854 min_time_reverse_us = tick_diff.Microseconds();
855 }
856 }
857
858 } else if (event == kCaptureEvent) {
859 primary_count++;
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000860 near_frame.num_channels_ = num_capture_input_channels;
ajm@google.com808e0e02011-08-03 21:08:51 +0000861
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000862 size_t size = samples_per_channel * num_capture_input_channels;
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000863 read_count = fread(near_frame.data_,
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000864 sizeof(int16_t),
865 size,
ajm@google.com808e0e02011-08-03 21:08:51 +0000866 near_file);
867
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000868 near_read_bytes += read_count * sizeof(int16_t);
ajm@google.com808e0e02011-08-03 21:08:51 +0000869 if (progress && primary_count % 100 == 0) {
870 printf("%.0f%% complete\r",
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000871 (near_read_bytes * 100.0) / near_size_bytes);
ajm@google.com808e0e02011-08-03 21:08:51 +0000872 fflush(stdout);
873 }
874 if (simulating) {
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000875 if (read_count != size) {
ajm@google.com808e0e02011-08-03 21:08:51 +0000876 break; // This is expected.
877 }
878
879 delay_ms = 0;
880 drift_samples = 0;
881 } else {
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000882 ASSERT_EQ(size, read_count);
ajm@google.com808e0e02011-08-03 21:08:51 +0000883
884 // TODO(ajm): sizeof(delay_ms) for current files?
885 ASSERT_EQ(1u,
886 fread(&delay_ms, 2, 1, delay_file));
887 ASSERT_EQ(1u,
888 fread(&drift_samples, sizeof(drift_samples), 1, drift_file));
889 }
890
andrew@webrtc.org81865342012-10-27 00:28:27 +0000891 if (apm->gain_control()->mode() == GainControl::kAdaptiveAnalog) {
892 SimulateMic(capture_level, &near_frame);
893 }
894
ajm@google.com808e0e02011-08-03 21:08:51 +0000895 if (perf_testing) {
896 t0 = TickTime::Now();
897 }
898
899 // TODO(ajm): fake an analog gain while simulating.
900
901 int capture_level_in = capture_level;
902 ASSERT_EQ(apm->kNoError,
903 apm->gain_control()->set_stream_analog_level(capture_level));
904 ASSERT_EQ(apm->kNoError,
andrew@webrtc.org4b13fc92011-11-09 19:27:11 +0000905 apm->set_stream_delay_ms(delay_ms + extra_delay_ms));
ajm@google.com808e0e02011-08-03 21:08:51 +0000906 ASSERT_EQ(apm->kNoError,
907 apm->echo_cancellation()->set_stream_drift_samples(drift_samples));
908
909 int err = apm->ProcessStream(&near_frame);
910 if (err == apm->kBadStreamParameterWarning) {
911 printf("Bad parameter warning. %s\n", trace_stream.str().c_str());
912 }
913 ASSERT_TRUE(err == apm->kNoError ||
914 err == apm->kBadStreamParameterWarning);
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000915 ASSERT_TRUE(near_frame.num_channels_ == apm->num_output_channels());
ajm@google.com808e0e02011-08-03 21:08:51 +0000916
917 capture_level = apm->gain_control()->stream_analog_level();
918
919 stream_has_voice =
920 static_cast<int8_t>(apm->voice_detection()->stream_has_voice());
921 if (vad_out_file != NULL) {
922 ASSERT_EQ(1u, fwrite(&stream_has_voice,
923 sizeof(stream_has_voice),
924 1,
925 vad_out_file));
926 }
927
bjornv@webrtc.org08329f42012-07-12 21:00:43 +0000928 if (ns_prob_file != NULL) {
929 ns_speech_prob = apm->noise_suppression()->speech_probability();
930 ASSERT_EQ(1u, fwrite(&ns_speech_prob,
931 sizeof(ns_speech_prob),
932 1,
933 ns_prob_file));
934 }
935
ajm@google.com808e0e02011-08-03 21:08:51 +0000936 if (apm->gain_control()->mode() != GainControl::kAdaptiveAnalog) {
937 ASSERT_EQ(capture_level_in, capture_level);
938 }
939
940 if (perf_testing) {
941 t1 = TickTime::Now();
942 TickInterval tick_diff = t1 - t0;
943 acc_ticks += tick_diff;
944 if (tick_diff.Microseconds() > max_time_us) {
945 max_time_us = tick_diff.Microseconds();
946 }
947 if (tick_diff.Microseconds() < min_time_us) {
948 min_time_us = tick_diff.Microseconds();
949 }
950 }
951
andrew@webrtc.org63a50982012-05-02 23:56:37 +0000952 size = samples_per_channel * near_frame.num_channels_;
953 ASSERT_EQ(size, fwrite(near_frame.data_,
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000954 sizeof(int16_t),
955 size,
956 out_file));
niklase@google.com470e71d2011-07-07 08:21:25 +0000957 }
ajm@google.com808e0e02011-08-03 21:08:51 +0000958 else {
959 FAIL() << "Event " << event << " is unrecognized";
niklase@google.com470e71d2011-07-07 08:21:25 +0000960 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000961 }
962 }
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000963 printf("100%% complete\r");
niklase@google.com470e71d2011-07-07 08:21:25 +0000964
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000965 if (aecm_echo_path_out_file != NULL) {
ajm@google.com22e65152011-07-18 18:03:01 +0000966 const size_t path_size =
967 apm->echo_control_mobile()->echo_path_size_bytes();
andrew@webrtc.org3119ecf2011-11-01 17:00:18 +0000968 scoped_array<char> echo_path(new char[path_size]);
969 apm->echo_control_mobile()->GetEchoPath(echo_path.get(), path_size);
970 ASSERT_EQ(path_size, fwrite(echo_path.get(),
971 sizeof(char),
bjornv@google.comc4b939c2011-07-13 08:09:56 +0000972 path_size,
973 aecm_echo_path_out_file));
974 fclose(aecm_echo_path_out_file);
975 aecm_echo_path_out_file = NULL;
976 }
977
niklase@google.com470e71d2011-07-07 08:21:25 +0000978 if (verbose) {
979 printf("\nProcessed frames: %d (primary), %d (reverse)\n",
980 primary_count, reverse_count);
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000981
andrew@webrtc.org755b04a2011-11-15 16:57:56 +0000982 if (apm->level_estimator()->is_enabled()) {
983 printf("\n--Level metrics--\n");
984 printf("RMS: %d dBFS\n", -apm->level_estimator()->RMS());
985 }
andrew@webrtc.org94c74132011-09-19 15:17:57 +0000986 if (apm->echo_cancellation()->are_metrics_enabled()) {
987 EchoCancellation::Metrics metrics;
988 apm->echo_cancellation()->GetMetrics(&metrics);
989 printf("\n--Echo metrics--\n");
990 printf("(avg, max, min)\n");
991 printf("ERL: ");
992 PrintStat(metrics.echo_return_loss);
993 printf("ERLE: ");
994 PrintStat(metrics.echo_return_loss_enhancement);
995 printf("ANLP: ");
996 PrintStat(metrics.a_nlp);
997 }
bjornv@google.com1ba3dbe2011-10-03 08:18:10 +0000998 if (apm->echo_cancellation()->is_delay_logging_enabled()) {
999 int median = 0;
1000 int std = 0;
1001 apm->echo_cancellation()->GetDelayMetrics(&median, &std);
1002 printf("\n--Delay metrics--\n");
1003 printf("Median: %3d\n", median);
1004 printf("Standard deviation: %3d\n", std);
1005 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001006 }
1007
ajm@google.com808e0e02011-08-03 21:08:51 +00001008 if (!pb_file) {
1009 int8_t temp_int8;
1010 if (far_file) {
1011 read_count = fread(&temp_int8, sizeof(temp_int8), 1, far_file);
1012 EXPECT_NE(0, feof(far_file)) << "Far-end file not fully processed";
1013 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001014
ajm@google.com808e0e02011-08-03 21:08:51 +00001015 read_count = fread(&temp_int8, sizeof(temp_int8), 1, near_file);
1016 EXPECT_NE(0, feof(near_file)) << "Near-end file not fully processed";
1017
1018 if (!simulating) {
1019 read_count = fread(&temp_int8, sizeof(temp_int8), 1, event_file);
1020 EXPECT_NE(0, feof(event_file)) << "Event file not fully processed";
1021 read_count = fread(&temp_int8, sizeof(temp_int8), 1, delay_file);
1022 EXPECT_NE(0, feof(delay_file)) << "Delay file not fully processed";
1023 read_count = fread(&temp_int8, sizeof(temp_int8), 1, drift_file);
1024 EXPECT_NE(0, feof(drift_file)) << "Drift file not fully processed";
1025 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001026 }
1027
1028 if (perf_testing) {
1029 if (primary_count > 0) {
1030 WebRtc_Word64 exec_time = acc_ticks.Milliseconds();
1031 printf("\nTotal time: %.3f s, file time: %.2f s\n",
1032 exec_time * 0.001, primary_count * 0.01);
1033 printf("Time per frame: %.3f ms (average), %.3f ms (max),"
1034 " %.3f ms (min)\n",
1035 (exec_time * 1.0) / primary_count,
1036 (max_time_us + max_time_reverse_us) / 1000.0,
1037 (min_time_us + min_time_reverse_us) / 1000.0);
kma@webrtc.org0e739502012-12-07 15:26:28 +00001038 // Record the results with Perf test tools.
1039 webrtc::test::PrintResult("time_per_10ms_frame", "", "audioproc",
1040 (exec_time * 1000) / primary_count, "us", false);
niklase@google.com470e71d2011-07-07 08:21:25 +00001041 } else {
1042 printf("Warning: no capture frames\n");
1043 }
1044 }
1045
1046 AudioProcessing::Destroy(apm);
1047 apm = NULL;
1048}
ajm@google.com808e0e02011-08-03 21:08:51 +00001049} // namespace
niklase@google.com470e71d2011-07-07 08:21:25 +00001050
1051int main(int argc, char* argv[])
1052{
1053 void_main(argc, argv);
1054
andrew@webrtc.org64235092011-08-19 21:22:08 +00001055 // Optional, but removes memory leak noise from Valgrind.
1056 google::protobuf::ShutdownProtobufLibrary();
niklase@google.com470e71d2011-07-07 08:21:25 +00001057 return 0;
1058}