blob: 32929ddaa083a8ac6ab856b3e662a6d064578770 [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
bjornv@webrtc.org0c6f9312012-01-30 09:39:08 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11/* analog_agc.c
12 *
13 * Using a feedback system, determines an appropriate analog volume level
14 * given an input signal and current volume level. Targets a conservative
15 * signal level and is intended for use with a digital AGC to apply
16 * additional gain.
17 *
18 */
19
20#include <assert.h>
21#include <stdlib.h>
bjornv@webrtc.orgea297872014-09-23 11:21:39 +000022#ifdef WEBRTC_AGC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +000023#include <stdio.h>
24#endif
pbos@webrtc.org7fad4b82013-05-28 08:11:59 +000025#include "webrtc/modules/audio_processing/agc/analog_agc.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000026
27/* The slope of in Q13*/
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +000028static const int16_t kSlope1[8] = {21793, 12517, 7189, 4129, 2372, 1362, 472, 78};
niklase@google.com470e71d2011-07-07 08:21:25 +000029
30/* The offset in Q14 */
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +000031static const int16_t kOffset1[8] = {25395, 23911, 22206, 20737, 19612, 18805, 17951,
niklase@google.com470e71d2011-07-07 08:21:25 +000032 17367};
33
34/* The slope of in Q13*/
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +000035static const int16_t kSlope2[8] = {2063, 1731, 1452, 1218, 1021, 857, 597, 337};
niklase@google.com470e71d2011-07-07 08:21:25 +000036
37/* The offset in Q14 */
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +000038static const int16_t kOffset2[8] = {18432, 18379, 18290, 18177, 18052, 17920, 17670,
niklase@google.com470e71d2011-07-07 08:21:25 +000039 17286};
40
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +000041static const int16_t kMuteGuardTimeMs = 8000;
42static const int16_t kInitCheck = 42;
niklase@google.com470e71d2011-07-07 08:21:25 +000043
44/* Default settings if config is not used */
45#define AGC_DEFAULT_TARGET_LEVEL 3
46#define AGC_DEFAULT_COMP_GAIN 9
47/* This is the target level for the analog part in ENV scale. To convert to RMS scale you
48 * have to add OFFSET_ENV_TO_RMS.
49 */
50#define ANALOG_TARGET_LEVEL 11
51#define ANALOG_TARGET_LEVEL_2 5 // ANALOG_TARGET_LEVEL / 2
52/* Offset between RMS scale (analog part) and ENV scale (digital part). This value actually
53 * varies with the FIXED_ANALOG_TARGET_LEVEL, hence we should in the future replace it with
54 * a table.
55 */
56#define OFFSET_ENV_TO_RMS 9
57/* The reference input level at which the digital part gives an output of targetLevelDbfs
58 * (desired level) if we have no compression gain. This level should be set high enough not
59 * to compress the peaks due to the dynamics.
60 */
61#define DIGITAL_REF_AT_0_COMP_GAIN 4
62/* Speed of reference level decrease.
63 */
64#define DIFF_REF_TO_ANALOG 5
65
66#ifdef MIC_LEVEL_FEEDBACK
67#define NUM_BLOCKS_IN_SAT_BEFORE_CHANGE_TARGET 7
68#endif
69/* Size of analog gain table */
70#define GAIN_TBL_LEN 32
71/* Matlab code:
72 * fprintf(1, '\t%i, %i, %i, %i,\n', round(10.^(linspace(0,10,32)/20) * 2^12));
73 */
74/* Q12 */
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +000075static const uint16_t kGainTableAnalog[GAIN_TBL_LEN] = {4096, 4251, 4412, 4579, 4752,
niklase@google.com470e71d2011-07-07 08:21:25 +000076 4932, 5118, 5312, 5513, 5722, 5938, 6163, 6396, 6638, 6889, 7150, 7420, 7701, 7992,
77 8295, 8609, 8934, 9273, 9623, 9987, 10365, 10758, 11165, 11587, 12025, 12480, 12953};
78
79/* Gain/Suppression tables for virtual Mic (in Q10) */
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +000080static const uint16_t kGainTableVirtualMic[128] = {1052, 1081, 1110, 1141, 1172, 1204,
niklase@google.com470e71d2011-07-07 08:21:25 +000081 1237, 1271, 1305, 1341, 1378, 1416, 1454, 1494, 1535, 1577, 1620, 1664, 1710, 1757,
82 1805, 1854, 1905, 1957, 2010, 2065, 2122, 2180, 2239, 2301, 2364, 2428, 2495, 2563,
83 2633, 2705, 2779, 2855, 2933, 3013, 3096, 3180, 3267, 3357, 3449, 3543, 3640, 3739,
84 3842, 3947, 4055, 4166, 4280, 4397, 4517, 4640, 4767, 4898, 5032, 5169, 5311, 5456,
85 5605, 5758, 5916, 6078, 6244, 6415, 6590, 6770, 6956, 7146, 7341, 7542, 7748, 7960,
86 8178, 8402, 8631, 8867, 9110, 9359, 9615, 9878, 10148, 10426, 10711, 11004, 11305,
87 11614, 11932, 12258, 12593, 12938, 13292, 13655, 14029, 14412, 14807, 15212, 15628,
88 16055, 16494, 16945, 17409, 17885, 18374, 18877, 19393, 19923, 20468, 21028, 21603,
89 22194, 22801, 23425, 24065, 24724, 25400, 26095, 26808, 27541, 28295, 29069, 29864,
90 30681, 31520, 32382};
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +000091static const uint16_t kSuppressionTableVirtualMic[128] = {1024, 1006, 988, 970, 952,
niklase@google.com470e71d2011-07-07 08:21:25 +000092 935, 918, 902, 886, 870, 854, 839, 824, 809, 794, 780, 766, 752, 739, 726, 713, 700,
93 687, 675, 663, 651, 639, 628, 616, 605, 594, 584, 573, 563, 553, 543, 533, 524, 514,
94 505, 496, 487, 478, 470, 461, 453, 445, 437, 429, 421, 414, 406, 399, 392, 385, 378,
95 371, 364, 358, 351, 345, 339, 333, 327, 321, 315, 309, 304, 298, 293, 288, 283, 278,
96 273, 268, 263, 258, 254, 249, 244, 240, 236, 232, 227, 223, 219, 215, 211, 208, 204,
97 200, 197, 193, 190, 186, 183, 180, 176, 173, 170, 167, 164, 161, 158, 155, 153, 150,
98 147, 145, 142, 139, 137, 134, 132, 130, 127, 125, 123, 121, 118, 116, 114, 112, 110,
99 108, 106, 104, 102};
100
101/* Table for target energy levels. Values in Q(-7)
102 * Matlab code
103 * targetLevelTable = fprintf('%d,\t%d,\t%d,\t%d,\n', round((32767*10.^(-(0:63)'/20)).^2*16/2^7) */
104
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +0000105static const int32_t kTargetLevelTable[64] = {134209536, 106606424, 84680493, 67264106,
niklase@google.com470e71d2011-07-07 08:21:25 +0000106 53429779, 42440782, 33711911, 26778323, 21270778, 16895980, 13420954, 10660642,
107 8468049, 6726411, 5342978, 4244078, 3371191, 2677832, 2127078, 1689598, 1342095,
108 1066064, 846805, 672641, 534298, 424408, 337119, 267783, 212708, 168960, 134210,
109 106606, 84680, 67264, 53430, 42441, 33712, 26778, 21271, 16896, 13421, 10661, 8468,
110 6726, 5343, 4244, 3371, 2678, 2127, 1690, 1342, 1066, 847, 673, 534, 424, 337, 268,
111 213, 169, 134, 107, 85, 67};
112
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +0000113int WebRtcAgc_AddMic(void *state, int16_t *in_mic, int16_t *in_mic_H,
114 int16_t samples)
niklase@google.com470e71d2011-07-07 08:21:25 +0000115{
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +0000116 int32_t nrg, max_nrg, sample, tmp32;
117 int32_t *ptr;
118 uint16_t targetGainIdx, gain;
119 int16_t i, n, L, M, subFrames, tmp16, tmp_speech[16];
niklase@google.com470e71d2011-07-07 08:21:25 +0000120 Agc_t *stt;
121 stt = (Agc_t *)state;
122
123 //default/initial values corresponding to 10ms for wb and swb
124 M = 10;
125 L = 16;
126 subFrames = 160;
127
128 if (stt->fs == 8000)
129 {
130 if (samples == 80)
131 {
132 subFrames = 80;
133 M = 10;
134 L = 8;
135 } else if (samples == 160)
136 {
137 subFrames = 80;
138 M = 20;
139 L = 8;
140 } else
141 {
bjornv@webrtc.orgea297872014-09-23 11:21:39 +0000142#ifdef WEBRTC_AGC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +0000143 fprintf(stt->fpt,
144 "AGC->add_mic, frame %d: Invalid number of samples\n\n",
bjornv@webrtc.orgea297872014-09-23 11:21:39 +0000145 stt->fcount + 1);
niklase@google.com470e71d2011-07-07 08:21:25 +0000146#endif
147 return -1;
148 }
149 } else if (stt->fs == 16000)
150 {
151 if (samples == 160)
152 {
153 subFrames = 160;
154 M = 10;
155 L = 16;
156 } else if (samples == 320)
157 {
158 subFrames = 160;
159 M = 20;
160 L = 16;
161 } else
162 {
bjornv@webrtc.orgea297872014-09-23 11:21:39 +0000163#ifdef WEBRTC_AGC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +0000164 fprintf(stt->fpt,
165 "AGC->add_mic, frame %d: Invalid number of samples\n\n",
bjornv@webrtc.orgea297872014-09-23 11:21:39 +0000166 stt->fcount + 1);
niklase@google.com470e71d2011-07-07 08:21:25 +0000167#endif
168 return -1;
169 }
170 } else if (stt->fs == 32000)
171 {
172 /* SWB is processed as 160 sample for L and H bands */
173 if (samples == 160)
174 {
175 subFrames = 160;
176 M = 10;
177 L = 16;
178 } else
179 {
bjornv@webrtc.orgea297872014-09-23 11:21:39 +0000180#ifdef WEBRTC_AGC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +0000181 fprintf(stt->fpt,
182 "AGC->add_mic, frame %d: Invalid sample rate\n\n",
bjornv@webrtc.orgea297872014-09-23 11:21:39 +0000183 stt->fcount + 1);
niklase@google.com470e71d2011-07-07 08:21:25 +0000184#endif
185 return -1;
186 }
187 }
188
189 /* Check for valid pointers based on sampling rate */
190 if ((stt->fs == 32000) && (in_mic_H == NULL))
191 {
192 return -1;
193 }
194 /* Check for valid pointer for low band */
195 if (in_mic == NULL)
196 {
197 return -1;
198 }
199
200 /* apply slowly varying digital gain */
201 if (stt->micVol > stt->maxAnalog)
202 {
andrew@webrtc.org3905b0c2012-01-04 15:47:20 +0000203 /* |maxLevel| is strictly >= |micVol|, so this condition should be
204 * satisfied here, ensuring there is no divide-by-zero. */
205 assert(stt->maxLevel > stt->maxAnalog);
206
niklase@google.com470e71d2011-07-07 08:21:25 +0000207 /* Q1 */
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +0000208 tmp16 = (int16_t)(stt->micVol - stt->maxAnalog);
niklase@google.com470e71d2011-07-07 08:21:25 +0000209 tmp32 = WEBRTC_SPL_MUL_16_16(GAIN_TBL_LEN - 1, tmp16);
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +0000210 tmp16 = (int16_t)(stt->maxLevel - stt->maxAnalog);
bjornv@webrtc.orgdf9fef62014-08-28 12:57:32 +0000211 targetGainIdx = tmp32 / tmp16;
niklase@google.com470e71d2011-07-07 08:21:25 +0000212 assert(targetGainIdx < GAIN_TBL_LEN);
213
214 /* Increment through the table towards the target gain.
215 * If micVol drops below maxAnalog, we allow the gain
216 * to be dropped immediately. */
217 if (stt->gainTableIdx < targetGainIdx)
218 {
219 stt->gainTableIdx++;
220 } else if (stt->gainTableIdx > targetGainIdx)
221 {
222 stt->gainTableIdx--;
223 }
224
225 /* Q12 */
226 gain = kGainTableAnalog[stt->gainTableIdx];
227
228 for (i = 0; i < samples; i++)
229 {
230 // For lower band
231 tmp32 = WEBRTC_SPL_MUL_16_U16(in_mic[i], gain);
232 sample = WEBRTC_SPL_RSHIFT_W32(tmp32, 12);
233 if (sample > 32767)
234 {
235 in_mic[i] = 32767;
236 } else if (sample < -32768)
237 {
238 in_mic[i] = -32768;
239 } else
240 {
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +0000241 in_mic[i] = (int16_t)sample;
niklase@google.com470e71d2011-07-07 08:21:25 +0000242 }
243
244 // For higher band
245 if (stt->fs == 32000)
246 {
247 tmp32 = WEBRTC_SPL_MUL_16_U16(in_mic_H[i], gain);
248 sample = WEBRTC_SPL_RSHIFT_W32(tmp32, 12);
249 if (sample > 32767)
250 {
251 in_mic_H[i] = 32767;
252 } else if (sample < -32768)
253 {
254 in_mic_H[i] = -32768;
255 } else
256 {
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +0000257 in_mic_H[i] = (int16_t)sample;
niklase@google.com470e71d2011-07-07 08:21:25 +0000258 }
259 }
260 }
261 } else
262 {
263 stt->gainTableIdx = 0;
264 }
265
266 /* compute envelope */
267 if ((M == 10) && (stt->inQueue > 0))
268 {
269 ptr = stt->env[1];
270 } else
271 {
272 ptr = stt->env[0];
273 }
274
275 for (i = 0; i < M; i++)
276 {
277 /* iterate over samples */
278 max_nrg = 0;
279 for (n = 0; n < L; n++)
280 {
281 nrg = WEBRTC_SPL_MUL_16_16(in_mic[i * L + n], in_mic[i * L + n]);
282 if (nrg > max_nrg)
283 {
284 max_nrg = nrg;
285 }
286 }
287 ptr[i] = max_nrg;
288 }
289
290 /* compute energy */
291 if ((M == 10) && (stt->inQueue > 0))
292 {
293 ptr = stt->Rxx16w32_array[1];
294 } else
295 {
296 ptr = stt->Rxx16w32_array[0];
297 }
298
299 for (i = 0; i < WEBRTC_SPL_RSHIFT_W16(M, 1); i++)
300 {
301 if (stt->fs == 16000)
302 {
303 WebRtcSpl_DownsampleBy2(&in_mic[i * 32], 32, tmp_speech, stt->filterState);
304 } else
305 {
306 memcpy(tmp_speech, &in_mic[i * 16], 16 * sizeof(short));
307 }
308 /* Compute energy in blocks of 16 samples */
309 ptr[i] = WebRtcSpl_DotProductWithScale(tmp_speech, tmp_speech, 16, 4);
310 }
311
312 /* update queue information */
313 if ((stt->inQueue == 0) && (M == 10))
314 {
315 stt->inQueue = 1;
316 } else
317 {
318 stt->inQueue = 2;
319 }
320
321 /* call VAD (use low band only) */
322 for (i = 0; i < samples; i += subFrames)
323 {
324 WebRtcAgc_ProcessVad(&stt->vadMic, &in_mic[i], subFrames);
325 }
326
327 return 0;
328}
329
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +0000330int WebRtcAgc_AddFarend(void *state, const int16_t *in_far, int16_t samples)
niklase@google.com470e71d2011-07-07 08:21:25 +0000331{
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +0000332 int32_t errHandle = 0;
333 int16_t i, subFrames;
niklase@google.com470e71d2011-07-07 08:21:25 +0000334 Agc_t *stt;
335 stt = (Agc_t *)state;
336
337 if (stt == NULL)
338 {
339 return -1;
340 }
341
342 if (stt->fs == 8000)
343 {
344 if ((samples != 80) && (samples != 160))
345 {
bjornv@webrtc.orgea297872014-09-23 11:21:39 +0000346#ifdef WEBRTC_AGC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +0000347 fprintf(stt->fpt,
348 "AGC->add_far_end, frame %d: Invalid number of samples\n\n",
349 stt->fcount);
350#endif
351 return -1;
352 }
353 subFrames = 80;
354 } else if (stt->fs == 16000)
355 {
356 if ((samples != 160) && (samples != 320))
357 {
bjornv@webrtc.orgea297872014-09-23 11:21:39 +0000358#ifdef WEBRTC_AGC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +0000359 fprintf(stt->fpt,
360 "AGC->add_far_end, frame %d: Invalid number of samples\n\n",
361 stt->fcount);
362#endif
363 return -1;
364 }
365 subFrames = 160;
366 } else if (stt->fs == 32000)
367 {
368 if ((samples != 160) && (samples != 320))
369 {
bjornv@webrtc.orgea297872014-09-23 11:21:39 +0000370#ifdef WEBRTC_AGC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +0000371 fprintf(stt->fpt,
372 "AGC->add_far_end, frame %d: Invalid number of samples\n\n",
373 stt->fcount);
374#endif
375 return -1;
376 }
377 subFrames = 160;
378 } else
379 {
bjornv@webrtc.orgea297872014-09-23 11:21:39 +0000380#ifdef WEBRTC_AGC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +0000381 fprintf(stt->fpt,
382 "AGC->add_far_end, frame %d: Invalid sample rate\n\n",
383 stt->fcount + 1);
384#endif
385 return -1;
386 }
387
388 for (i = 0; i < samples; i += subFrames)
389 {
390 errHandle += WebRtcAgc_AddFarendToDigital(&stt->digitalAgc, &in_far[i], subFrames);
391 }
392
393 return errHandle;
394}
395
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +0000396int WebRtcAgc_VirtualMic(void *agcInst, int16_t *in_near, int16_t *in_near_H,
397 int16_t samples, int32_t micLevelIn,
398 int32_t *micLevelOut)
niklase@google.com470e71d2011-07-07 08:21:25 +0000399{
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +0000400 int32_t tmpFlt, micLevelTmp, gainIdx;
401 uint16_t gain;
402 int16_t ii;
niklase@google.com470e71d2011-07-07 08:21:25 +0000403 Agc_t *stt;
404
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +0000405 uint32_t nrg;
406 int16_t sampleCntr;
407 uint32_t frameNrg = 0;
408 uint32_t frameNrgLimit = 5500;
409 int16_t numZeroCrossing = 0;
410 const int16_t kZeroCrossingLowLim = 15;
411 const int16_t kZeroCrossingHighLim = 20;
niklase@google.com470e71d2011-07-07 08:21:25 +0000412
413 stt = (Agc_t *)agcInst;
414
415 /*
416 * Before applying gain decide if this is a low-level signal.
417 * The idea is that digital AGC will not adapt to low-level
418 * signals.
419 */
420 if (stt->fs != 8000)
421 {
422 frameNrgLimit = frameNrgLimit << 1;
423 }
424
425 frameNrg = WEBRTC_SPL_MUL_16_16(in_near[0], in_near[0]);
426 for (sampleCntr = 1; sampleCntr < samples; sampleCntr++)
427 {
428
429 // increment frame energy if it is less than the limit
430 // the correct value of the energy is not important
431 if (frameNrg < frameNrgLimit)
432 {
433 nrg = WEBRTC_SPL_MUL_16_16(in_near[sampleCntr], in_near[sampleCntr]);
434 frameNrg += nrg;
435 }
436
437 // Count the zero crossings
438 numZeroCrossing += ((in_near[sampleCntr] ^ in_near[sampleCntr - 1]) < 0);
439 }
440
441 if ((frameNrg < 500) || (numZeroCrossing <= 5))
442 {
443 stt->lowLevelSignal = 1;
444 } else if (numZeroCrossing <= kZeroCrossingLowLim)
445 {
446 stt->lowLevelSignal = 0;
447 } else if (frameNrg <= frameNrgLimit)
448 {
449 stt->lowLevelSignal = 1;
450 } else if (numZeroCrossing >= kZeroCrossingHighLim)
451 {
452 stt->lowLevelSignal = 1;
453 } else
454 {
455 stt->lowLevelSignal = 0;
456 }
457
458 micLevelTmp = WEBRTC_SPL_LSHIFT_W32(micLevelIn, stt->scale);
459 /* Set desired level */
460 gainIdx = stt->micVol;
461 if (stt->micVol > stt->maxAnalog)
462 {
463 gainIdx = stt->maxAnalog;
464 }
465 if (micLevelTmp != stt->micRef)
466 {
467 /* Something has happened with the physical level, restart. */
468 stt->micRef = micLevelTmp;
469 stt->micVol = 127;
470 *micLevelOut = 127;
471 stt->micGainIdx = 127;
472 gainIdx = 127;
473 }
474 /* Pre-process the signal to emulate the microphone level. */
475 /* Take one step at a time in the gain table. */
476 if (gainIdx > 127)
477 {
478 gain = kGainTableVirtualMic[gainIdx - 128];
479 } else
480 {
481 gain = kSuppressionTableVirtualMic[127 - gainIdx];
482 }
483 for (ii = 0; ii < samples; ii++)
484 {
485 tmpFlt = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_16_U16(in_near[ii], gain), 10);
486 if (tmpFlt > 32767)
487 {
488 tmpFlt = 32767;
489 gainIdx--;
490 if (gainIdx >= 127)
491 {
492 gain = kGainTableVirtualMic[gainIdx - 127];
493 } else
494 {
495 gain = kSuppressionTableVirtualMic[127 - gainIdx];
496 }
497 }
498 if (tmpFlt < -32768)
499 {
500 tmpFlt = -32768;
501 gainIdx--;
502 if (gainIdx >= 127)
503 {
504 gain = kGainTableVirtualMic[gainIdx - 127];
505 } else
506 {
507 gain = kSuppressionTableVirtualMic[127 - gainIdx];
508 }
509 }
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +0000510 in_near[ii] = (int16_t)tmpFlt;
niklase@google.com470e71d2011-07-07 08:21:25 +0000511 if (stt->fs == 32000)
512 {
513 tmpFlt = WEBRTC_SPL_MUL_16_U16(in_near_H[ii], gain);
514 tmpFlt = WEBRTC_SPL_RSHIFT_W32(tmpFlt, 10);
515 if (tmpFlt > 32767)
516 {
517 tmpFlt = 32767;
518 }
519 if (tmpFlt < -32768)
520 {
521 tmpFlt = -32768;
522 }
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +0000523 in_near_H[ii] = (int16_t)tmpFlt;
niklase@google.com470e71d2011-07-07 08:21:25 +0000524 }
525 }
526 /* Set the level we (finally) used */
527 stt->micGainIdx = gainIdx;
528// *micLevelOut = stt->micGainIdx;
529 *micLevelOut = WEBRTC_SPL_RSHIFT_W32(stt->micGainIdx, stt->scale);
530 /* Add to Mic as if it was the output from a true microphone */
531 if (WebRtcAgc_AddMic(agcInst, in_near, in_near_H, samples) != 0)
532 {
533 return -1;
534 }
535 return 0;
536}
537
538void WebRtcAgc_UpdateAgcThresholds(Agc_t *stt)
539{
540
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +0000541 int16_t tmp16;
niklase@google.com470e71d2011-07-07 08:21:25 +0000542#ifdef MIC_LEVEL_FEEDBACK
543 int zeros;
544
545 if (stt->micLvlSat)
546 {
547 /* Lower the analog target level since we have reached its maximum */
548 zeros = WebRtcSpl_NormW32(stt->Rxx160_LPw32);
549 stt->targetIdxOffset = WEBRTC_SPL_RSHIFT_W16((3 * zeros) - stt->targetIdx - 2, 2);
550 }
551#endif
552
553 /* Set analog target level in envelope dBOv scale */
554 tmp16 = (DIFF_REF_TO_ANALOG * stt->compressionGaindB) + ANALOG_TARGET_LEVEL_2;
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +0000555 tmp16 = WebRtcSpl_DivW32W16ResW16((int32_t)tmp16, ANALOG_TARGET_LEVEL);
niklase@google.com470e71d2011-07-07 08:21:25 +0000556 stt->analogTarget = DIGITAL_REF_AT_0_COMP_GAIN + tmp16;
557 if (stt->analogTarget < DIGITAL_REF_AT_0_COMP_GAIN)
558 {
559 stt->analogTarget = DIGITAL_REF_AT_0_COMP_GAIN;
560 }
561 if (stt->agcMode == kAgcModeFixedDigital)
562 {
563 /* Adjust for different parameter interpretation in FixedDigital mode */
564 stt->analogTarget = stt->compressionGaindB;
565 }
566#ifdef MIC_LEVEL_FEEDBACK
567 stt->analogTarget += stt->targetIdxOffset;
568#endif
569 /* Since the offset between RMS and ENV is not constant, we should make this into a
570 * table, but for now, we'll stick with a constant, tuned for the chosen analog
571 * target level.
572 */
573 stt->targetIdx = ANALOG_TARGET_LEVEL + OFFSET_ENV_TO_RMS;
574#ifdef MIC_LEVEL_FEEDBACK
575 stt->targetIdx += stt->targetIdxOffset;
576#endif
577 /* Analog adaptation limits */
578 /* analogTargetLevel = round((32767*10^(-targetIdx/20))^2*16/2^7) */
579 stt->analogTargetLevel = RXX_BUFFER_LEN * kTargetLevelTable[stt->targetIdx]; /* ex. -20 dBov */
580 stt->startUpperLimit = RXX_BUFFER_LEN * kTargetLevelTable[stt->targetIdx - 1];/* -19 dBov */
581 stt->startLowerLimit = RXX_BUFFER_LEN * kTargetLevelTable[stt->targetIdx + 1];/* -21 dBov */
582 stt->upperPrimaryLimit = RXX_BUFFER_LEN * kTargetLevelTable[stt->targetIdx - 2];/* -18 dBov */
583 stt->lowerPrimaryLimit = RXX_BUFFER_LEN * kTargetLevelTable[stt->targetIdx + 2];/* -22 dBov */
584 stt->upperSecondaryLimit = RXX_BUFFER_LEN * kTargetLevelTable[stt->targetIdx - 5];/* -15 dBov */
585 stt->lowerSecondaryLimit = RXX_BUFFER_LEN * kTargetLevelTable[stt->targetIdx + 5];/* -25 dBov */
586 stt->upperLimit = stt->startUpperLimit;
587 stt->lowerLimit = stt->startLowerLimit;
588}
589
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +0000590void WebRtcAgc_SaturationCtrl(Agc_t *stt, uint8_t *saturated, int32_t *env)
niklase@google.com470e71d2011-07-07 08:21:25 +0000591{
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +0000592 int16_t i, tmpW16;
niklase@google.com470e71d2011-07-07 08:21:25 +0000593
594 /* Check if the signal is saturated */
595 for (i = 0; i < 10; i++)
596 {
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +0000597 tmpW16 = (int16_t)WEBRTC_SPL_RSHIFT_W32(env[i], 20);
niklase@google.com470e71d2011-07-07 08:21:25 +0000598 if (tmpW16 > 875)
599 {
600 stt->envSum += tmpW16;
601 }
602 }
603
604 if (stt->envSum > 25000)
605 {
606 *saturated = 1;
607 stt->envSum = 0;
608 }
609
610 /* stt->envSum *= 0.99; */
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +0000611 stt->envSum = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(stt->envSum,
612 (int16_t)32440, 15);
niklase@google.com470e71d2011-07-07 08:21:25 +0000613}
614
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +0000615void WebRtcAgc_ZeroCtrl(Agc_t *stt, int32_t *inMicLevel, int32_t *env)
niklase@google.com470e71d2011-07-07 08:21:25 +0000616{
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +0000617 int16_t i;
618 int32_t tmp32 = 0;
619 int32_t midVal;
niklase@google.com470e71d2011-07-07 08:21:25 +0000620
621 /* Is the input signal zero? */
622 for (i = 0; i < 10; i++)
623 {
624 tmp32 += env[i];
625 }
626
627 /* Each block is allowed to have a few non-zero
628 * samples.
629 */
630 if (tmp32 < 500)
631 {
632 stt->msZero += 10;
633 } else
634 {
635 stt->msZero = 0;
636 }
637
638 if (stt->muteGuardMs > 0)
639 {
640 stt->muteGuardMs -= 10;
641 }
642
643 if (stt->msZero > 500)
644 {
645 stt->msZero = 0;
646
647 /* Increase microphone level only if it's less than 50% */
648 midVal = WEBRTC_SPL_RSHIFT_W32(stt->maxAnalog + stt->minLevel + 1, 1);
649 if (*inMicLevel < midVal)
650 {
651 /* *inMicLevel *= 1.1; */
652 tmp32 = WEBRTC_SPL_MUL(1126, *inMicLevel);
653 *inMicLevel = WEBRTC_SPL_RSHIFT_W32(tmp32, 10);
654 /* Reduces risk of a muted mic repeatedly triggering excessive levels due
655 * to zero signal detection. */
656 *inMicLevel = WEBRTC_SPL_MIN(*inMicLevel, stt->zeroCtrlMax);
657 stt->micVol = *inMicLevel;
658 }
659
bjornv@webrtc.orgea297872014-09-23 11:21:39 +0000660#ifdef WEBRTC_AGC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +0000661 fprintf(stt->fpt,
bjornv@webrtc.orgea297872014-09-23 11:21:39 +0000662 "\t\tAGC->zeroCntrl, frame %d: 500 ms under threshold,"
663 " micVol: %d\n",
664 stt->fcount,
665 stt->micVol);
niklase@google.com470e71d2011-07-07 08:21:25 +0000666#endif
667
668 stt->activeSpeech = 0;
669 stt->Rxx16_LPw32Max = 0;
670
671 /* The AGC has a tendency (due to problems with the VAD parameters), to
672 * vastly increase the volume after a muting event. This timer prevents
673 * upwards adaptation for a short period. */
674 stt->muteGuardMs = kMuteGuardTimeMs;
675 }
676}
677
678void WebRtcAgc_SpeakerInactiveCtrl(Agc_t *stt)
679{
680 /* Check if the near end speaker is inactive.
681 * If that is the case the VAD threshold is
682 * increased since the VAD speech model gets
683 * more sensitive to any sound after a long
684 * silence.
685 */
686
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +0000687 int32_t tmp32;
688 int16_t vadThresh;
niklase@google.com470e71d2011-07-07 08:21:25 +0000689
690 if (stt->vadMic.stdLongTerm < 2500)
691 {
692 stt->vadThreshold = 1500;
693 } else
694 {
695 vadThresh = kNormalVadThreshold;
696 if (stt->vadMic.stdLongTerm < 4500)
697 {
698 /* Scale between min and max threshold */
699 vadThresh += WEBRTC_SPL_RSHIFT_W16(4500 - stt->vadMic.stdLongTerm, 1);
700 }
701
702 /* stt->vadThreshold = (31 * stt->vadThreshold + vadThresh) / 32; */
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +0000703 tmp32 = (int32_t)vadThresh;
704 tmp32 += WEBRTC_SPL_MUL_16_16((int16_t)31, stt->vadThreshold);
705 stt->vadThreshold = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32, 5);
niklase@google.com470e71d2011-07-07 08:21:25 +0000706 }
707}
708
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +0000709void WebRtcAgc_ExpCurve(int16_t volume, int16_t *index)
niklase@google.com470e71d2011-07-07 08:21:25 +0000710{
711 // volume in Q14
712 // index in [0-7]
713 /* 8 different curves */
714 if (volume > 5243)
715 {
716 if (volume > 7864)
717 {
718 if (volume > 12124)
719 {
720 *index = 7;
721 } else
722 {
723 *index = 6;
724 }
725 } else
726 {
727 if (volume > 6554)
728 {
729 *index = 5;
730 } else
731 {
732 *index = 4;
733 }
734 }
735 } else
736 {
737 if (volume > 2621)
738 {
739 if (volume > 3932)
740 {
741 *index = 3;
742 } else
743 {
744 *index = 2;
745 }
746 } else
747 {
748 if (volume > 1311)
749 {
750 *index = 1;
751 } else
752 {
753 *index = 0;
754 }
755 }
756 }
757}
758
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +0000759int32_t WebRtcAgc_ProcessAnalog(void *state, int32_t inMicLevel,
760 int32_t *outMicLevel,
761 int16_t vadLogRatio,
762 int16_t echo, uint8_t *saturationWarning)
niklase@google.com470e71d2011-07-07 08:21:25 +0000763{
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +0000764 uint32_t tmpU32;
765 int32_t Rxx16w32, tmp32;
766 int32_t inMicLevelTmp, lastMicVol;
767 int16_t i;
768 uint8_t saturated = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000769 Agc_t *stt;
770
771 stt = (Agc_t *)state;
772 inMicLevelTmp = WEBRTC_SPL_LSHIFT_W32(inMicLevel, stt->scale);
773
774 if (inMicLevelTmp > stt->maxAnalog)
775 {
bjornv@webrtc.orgea297872014-09-23 11:21:39 +0000776#ifdef WEBRTC_AGC_DEBUG_DUMP
777 fprintf(stt->fpt,
778 "\tAGC->ProcessAnalog, frame %d: micLvl > maxAnalog\n",
779 stt->fcount);
niklase@google.com470e71d2011-07-07 08:21:25 +0000780#endif
781 return -1;
782 } else if (inMicLevelTmp < stt->minLevel)
783 {
bjornv@webrtc.orgea297872014-09-23 11:21:39 +0000784#ifdef WEBRTC_AGC_DEBUG_DUMP
785 fprintf(stt->fpt,
786 "\tAGC->ProcessAnalog, frame %d: micLvl < minLevel\n",
787 stt->fcount);
niklase@google.com470e71d2011-07-07 08:21:25 +0000788#endif
789 return -1;
790 }
791
792 if (stt->firstCall == 0)
793 {
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +0000794 int32_t tmpVol;
niklase@google.com470e71d2011-07-07 08:21:25 +0000795 stt->firstCall = 1;
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +0000796 tmp32 = WEBRTC_SPL_RSHIFT_W32((stt->maxLevel - stt->minLevel) * (int32_t)51, 9);
niklase@google.com470e71d2011-07-07 08:21:25 +0000797 tmpVol = (stt->minLevel + tmp32);
798
799 /* If the mic level is very low at start, increase it! */
800 if ((inMicLevelTmp < tmpVol) && (stt->agcMode == kAgcModeAdaptiveAnalog))
801 {
802 inMicLevelTmp = tmpVol;
803 }
804 stt->micVol = inMicLevelTmp;
805 }
806
807 /* Set the mic level to the previous output value if there is digital input gain */
808 if ((inMicLevelTmp == stt->maxAnalog) && (stt->micVol > stt->maxAnalog))
809 {
810 inMicLevelTmp = stt->micVol;
811 }
812
813 /* If the mic level was manually changed to a very low value raise it! */
814 if ((inMicLevelTmp != stt->micVol) && (inMicLevelTmp < stt->minOutput))
815 {
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +0000816 tmp32 = WEBRTC_SPL_RSHIFT_W32((stt->maxLevel - stt->minLevel) * (int32_t)51, 9);
niklase@google.com470e71d2011-07-07 08:21:25 +0000817 inMicLevelTmp = (stt->minLevel + tmp32);
818 stt->micVol = inMicLevelTmp;
819#ifdef MIC_LEVEL_FEEDBACK
820 //stt->numBlocksMicLvlSat = 0;
821#endif
bjornv@webrtc.orgea297872014-09-23 11:21:39 +0000822#ifdef WEBRTC_AGC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +0000823 fprintf(stt->fpt,
bjornv@webrtc.orgea297872014-09-23 11:21:39 +0000824 "\tAGC->ProcessAnalog, frame %d: micLvl < minLevel by manual"
825 " decrease, raise vol\n",
niklase@google.com470e71d2011-07-07 08:21:25 +0000826 stt->fcount);
827#endif
828 }
829
830 if (inMicLevelTmp != stt->micVol)
831 {
andrew@webrtc.org27c69802014-02-18 20:24:56 +0000832 if (inMicLevel == stt->lastInMicLevel) {
833 // We requested a volume adjustment, but it didn't occur. This is
834 // probably due to a coarse quantization of the volume slider.
835 // Restore the requested value to prevent getting stuck.
836 inMicLevelTmp = stt->micVol;
837 }
838 else {
839 // As long as the value changed, update to match.
840 stt->micVol = inMicLevelTmp;
841 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000842 }
843
844 if (inMicLevelTmp > stt->maxLevel)
845 {
846 // Always allow the user to raise the volume above the maxLevel.
847 stt->maxLevel = inMicLevelTmp;
848 }
849
850 // Store last value here, after we've taken care of manual updates etc.
andrew@webrtc.org27c69802014-02-18 20:24:56 +0000851 stt->lastInMicLevel = inMicLevel;
niklase@google.com470e71d2011-07-07 08:21:25 +0000852 lastMicVol = stt->micVol;
853
854 /* Checks if the signal is saturated. Also a check if individual samples
855 * are larger than 12000 is done. If they are the counter for increasing
856 * the volume level is set to -100ms
857 */
858 WebRtcAgc_SaturationCtrl(stt, &saturated, stt->env[0]);
859
860 /* The AGC is always allowed to lower the level if the signal is saturated */
861 if (saturated == 1)
862 {
863 /* Lower the recording level
864 * Rxx160_LP is adjusted down because it is so slow it could
865 * cause the AGC to make wrong decisions. */
866 /* stt->Rxx160_LPw32 *= 0.875; */
867 stt->Rxx160_LPw32 = WEBRTC_SPL_MUL(WEBRTC_SPL_RSHIFT_W32(stt->Rxx160_LPw32, 3), 7);
868
869 stt->zeroCtrlMax = stt->micVol;
870
871 /* stt->micVol *= 0.903; */
872 tmp32 = inMicLevelTmp - stt->minLevel;
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +0000873 tmpU32 = WEBRTC_SPL_UMUL(29591, (uint32_t)(tmp32));
874 stt->micVol = (int32_t)WEBRTC_SPL_RSHIFT_U32(tmpU32, 15) + stt->minLevel;
niklase@google.com470e71d2011-07-07 08:21:25 +0000875 if (stt->micVol > lastMicVol - 2)
876 {
877 stt->micVol = lastMicVol - 2;
878 }
879 inMicLevelTmp = stt->micVol;
880
bjornv@webrtc.orgea297872014-09-23 11:21:39 +0000881#ifdef WEBRTC_AGC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +0000882 fprintf(stt->fpt,
883 "\tAGC->ProcessAnalog, frame %d: saturated, micVol = %d\n",
bjornv@webrtc.orgea297872014-09-23 11:21:39 +0000884 stt->fcount,
885 stt->micVol);
niklase@google.com470e71d2011-07-07 08:21:25 +0000886#endif
887
888 if (stt->micVol < stt->minOutput)
889 {
890 *saturationWarning = 1;
891 }
892
893 /* Reset counter for decrease of volume level to avoid
894 * decreasing too much. The saturation control can still
895 * lower the level if needed. */
896 stt->msTooHigh = -100;
897
898 /* Enable the control mechanism to ensure that our measure,
899 * Rxx160_LP, is in the correct range. This must be done since
900 * the measure is very slow. */
901 stt->activeSpeech = 0;
902 stt->Rxx16_LPw32Max = 0;
903
904 /* Reset to initial values */
905 stt->msecSpeechInnerChange = kMsecSpeechInner;
906 stt->msecSpeechOuterChange = kMsecSpeechOuter;
907 stt->changeToSlowMode = 0;
908
909 stt->muteGuardMs = 0;
910
911 stt->upperLimit = stt->startUpperLimit;
912 stt->lowerLimit = stt->startLowerLimit;
913#ifdef MIC_LEVEL_FEEDBACK
914 //stt->numBlocksMicLvlSat = 0;
915#endif
916 }
917
918 /* Check if the input speech is zero. If so the mic volume
919 * is increased. On some computers the input is zero up as high
920 * level as 17% */
921 WebRtcAgc_ZeroCtrl(stt, &inMicLevelTmp, stt->env[0]);
922
923 /* Check if the near end speaker is inactive.
924 * If that is the case the VAD threshold is
925 * increased since the VAD speech model gets
926 * more sensitive to any sound after a long
927 * silence.
928 */
929 WebRtcAgc_SpeakerInactiveCtrl(stt);
930
931 for (i = 0; i < 5; i++)
932 {
933 /* Computed on blocks of 16 samples */
934
935 Rxx16w32 = stt->Rxx16w32_array[0][i];
936
937 /* Rxx160w32 in Q(-7) */
938 tmp32 = WEBRTC_SPL_RSHIFT_W32(Rxx16w32 - stt->Rxx16_vectorw32[stt->Rxx16pos], 3);
939 stt->Rxx160w32 = stt->Rxx160w32 + tmp32;
940 stt->Rxx16_vectorw32[stt->Rxx16pos] = Rxx16w32;
941
942 /* Circular buffer */
ajm@google.com6bed0642011-07-12 14:57:10 +0000943 stt->Rxx16pos++;
niklase@google.com470e71d2011-07-07 08:21:25 +0000944 if (stt->Rxx16pos == RXX_BUFFER_LEN)
945 {
946 stt->Rxx16pos = 0;
947 }
948
949 /* Rxx16_LPw32 in Q(-4) */
950 tmp32 = WEBRTC_SPL_RSHIFT_W32(Rxx16w32 - stt->Rxx16_LPw32, kAlphaShortTerm);
951 stt->Rxx16_LPw32 = (stt->Rxx16_LPw32) + tmp32;
952
953 if (vadLogRatio > stt->vadThreshold)
954 {
955 /* Speech detected! */
956
957 /* Check if Rxx160_LP is in the correct range. If
958 * it is too high/low then we set it to the maximum of
959 * Rxx16_LPw32 during the first 200ms of speech.
960 */
961 if (stt->activeSpeech < 250)
962 {
963 stt->activeSpeech += 2;
964
965 if (stt->Rxx16_LPw32 > stt->Rxx16_LPw32Max)
966 {
967 stt->Rxx16_LPw32Max = stt->Rxx16_LPw32;
968 }
969 } else if (stt->activeSpeech == 250)
970 {
971 stt->activeSpeech += 2;
972 tmp32 = WEBRTC_SPL_RSHIFT_W32(stt->Rxx16_LPw32Max, 3);
973 stt->Rxx160_LPw32 = WEBRTC_SPL_MUL(tmp32, RXX_BUFFER_LEN);
974 }
975
976 tmp32 = WEBRTC_SPL_RSHIFT_W32(stt->Rxx160w32 - stt->Rxx160_LPw32, kAlphaLongTerm);
977 stt->Rxx160_LPw32 = stt->Rxx160_LPw32 + tmp32;
978
979 if (stt->Rxx160_LPw32 > stt->upperSecondaryLimit)
980 {
981 stt->msTooHigh += 2;
982 stt->msTooLow = 0;
983 stt->changeToSlowMode = 0;
984
985 if (stt->msTooHigh > stt->msecSpeechOuterChange)
986 {
987 stt->msTooHigh = 0;
988
989 /* Lower the recording level */
990 /* Multiply by 0.828125 which corresponds to decreasing ~0.8dB */
991 tmp32 = WEBRTC_SPL_RSHIFT_W32(stt->Rxx160_LPw32, 6);
992 stt->Rxx160_LPw32 = WEBRTC_SPL_MUL(tmp32, 53);
993
994 /* Reduce the max gain to avoid excessive oscillation
995 * (but never drop below the maximum analog level).
996 * stt->maxLevel = (15 * stt->maxLevel + stt->micVol) / 16;
997 */
998 tmp32 = (15 * stt->maxLevel) + stt->micVol;
999 stt->maxLevel = WEBRTC_SPL_RSHIFT_W32(tmp32, 4);
1000 stt->maxLevel = WEBRTC_SPL_MAX(stt->maxLevel, stt->maxAnalog);
1001
1002 stt->zeroCtrlMax = stt->micVol;
1003
1004 /* 0.95 in Q15 */
1005 tmp32 = inMicLevelTmp - stt->minLevel;
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +00001006 tmpU32 = WEBRTC_SPL_UMUL(31130, (uint32_t)(tmp32));
1007 stt->micVol = (int32_t)WEBRTC_SPL_RSHIFT_U32(tmpU32, 15) + stt->minLevel;
niklase@google.com470e71d2011-07-07 08:21:25 +00001008 if (stt->micVol > lastMicVol - 1)
1009 {
1010 stt->micVol = lastMicVol - 1;
1011 }
1012 inMicLevelTmp = stt->micVol;
1013
1014 /* Enable the control mechanism to ensure that our measure,
1015 * Rxx160_LP, is in the correct range.
1016 */
1017 stt->activeSpeech = 0;
1018 stt->Rxx16_LPw32Max = 0;
1019#ifdef MIC_LEVEL_FEEDBACK
1020 //stt->numBlocksMicLvlSat = 0;
1021#endif
bjornv@webrtc.orgea297872014-09-23 11:21:39 +00001022#ifdef WEBRTC_AGC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +00001023 fprintf(stt->fpt,
bjornv@webrtc.orgea297872014-09-23 11:21:39 +00001024 "\tAGC->ProcessAnalog, frame %d: measure >"
1025 " 2ndUpperLim, micVol = %d, maxLevel = %d\n",
1026 stt->fcount,
1027 stt->micVol,
1028 stt->maxLevel);
niklase@google.com470e71d2011-07-07 08:21:25 +00001029#endif
1030 }
1031 } else if (stt->Rxx160_LPw32 > stt->upperLimit)
1032 {
1033 stt->msTooHigh += 2;
1034 stt->msTooLow = 0;
1035 stt->changeToSlowMode = 0;
1036
1037 if (stt->msTooHigh > stt->msecSpeechInnerChange)
1038 {
1039 /* Lower the recording level */
1040 stt->msTooHigh = 0;
1041 /* Multiply by 0.828125 which corresponds to decreasing ~0.8dB */
1042 tmp32 = WEBRTC_SPL_RSHIFT_W32(stt->Rxx160_LPw32, 6);
1043 stt->Rxx160_LPw32 = WEBRTC_SPL_MUL(tmp32, 53);
1044
1045 /* Reduce the max gain to avoid excessive oscillation
1046 * (but never drop below the maximum analog level).
1047 * stt->maxLevel = (15 * stt->maxLevel + stt->micVol) / 16;
1048 */
1049 tmp32 = (15 * stt->maxLevel) + stt->micVol;
1050 stt->maxLevel = WEBRTC_SPL_RSHIFT_W32(tmp32, 4);
1051 stt->maxLevel = WEBRTC_SPL_MAX(stt->maxLevel, stt->maxAnalog);
1052
1053 stt->zeroCtrlMax = stt->micVol;
1054
1055 /* 0.965 in Q15 */
1056 tmp32 = inMicLevelTmp - stt->minLevel;
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +00001057 tmpU32 = WEBRTC_SPL_UMUL(31621, (uint32_t)(inMicLevelTmp - stt->minLevel));
1058 stt->micVol = (int32_t)WEBRTC_SPL_RSHIFT_U32(tmpU32, 15) + stt->minLevel;
niklase@google.com470e71d2011-07-07 08:21:25 +00001059 if (stt->micVol > lastMicVol - 1)
1060 {
1061 stt->micVol = lastMicVol - 1;
1062 }
1063 inMicLevelTmp = stt->micVol;
1064
1065#ifdef MIC_LEVEL_FEEDBACK
1066 //stt->numBlocksMicLvlSat = 0;
1067#endif
bjornv@webrtc.orgea297872014-09-23 11:21:39 +00001068#ifdef WEBRTC_AGC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +00001069 fprintf(stt->fpt,
bjornv@webrtc.orgea297872014-09-23 11:21:39 +00001070 "\tAGC->ProcessAnalog, frame %d: measure >"
1071 " UpperLim, micVol = %d, maxLevel = %d\n",
1072 stt->fcount,
1073 stt->micVol,
1074 stt->maxLevel);
niklase@google.com470e71d2011-07-07 08:21:25 +00001075#endif
1076 }
1077 } else if (stt->Rxx160_LPw32 < stt->lowerSecondaryLimit)
1078 {
1079 stt->msTooHigh = 0;
1080 stt->changeToSlowMode = 0;
1081 stt->msTooLow += 2;
1082
1083 if (stt->msTooLow > stt->msecSpeechOuterChange)
1084 {
1085 /* Raise the recording level */
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +00001086 int16_t index, weightFIX;
1087 int16_t volNormFIX = 16384; // =1 in Q14.
niklase@google.com470e71d2011-07-07 08:21:25 +00001088
1089 stt->msTooLow = 0;
1090
1091 /* Normalize the volume level */
1092 tmp32 = WEBRTC_SPL_LSHIFT_W32(inMicLevelTmp - stt->minLevel, 14);
1093 if (stt->maxInit != stt->minLevel)
1094 {
bjornv@webrtc.orgdf9fef62014-08-28 12:57:32 +00001095 volNormFIX = tmp32 / (stt->maxInit - stt->minLevel);
niklase@google.com470e71d2011-07-07 08:21:25 +00001096 }
1097
1098 /* Find correct curve */
1099 WebRtcAgc_ExpCurve(volNormFIX, &index);
1100
1101 /* Compute weighting factor for the volume increase, 32^(-2*X)/2+1.05 */
1102 weightFIX = kOffset1[index]
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +00001103 - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(kSlope1[index],
niklase@google.com470e71d2011-07-07 08:21:25 +00001104 volNormFIX, 13);
1105
1106 /* stt->Rxx160_LPw32 *= 1.047 [~0.2 dB]; */
1107 tmp32 = WEBRTC_SPL_RSHIFT_W32(stt->Rxx160_LPw32, 6);
1108 stt->Rxx160_LPw32 = WEBRTC_SPL_MUL(tmp32, 67);
1109
1110 tmp32 = inMicLevelTmp - stt->minLevel;
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +00001111 tmpU32 = ((uint32_t)weightFIX * (uint32_t)(inMicLevelTmp - stt->minLevel));
1112 stt->micVol = (int32_t)WEBRTC_SPL_RSHIFT_U32(tmpU32, 14) + stt->minLevel;
niklase@google.com470e71d2011-07-07 08:21:25 +00001113 if (stt->micVol < lastMicVol + 2)
1114 {
1115 stt->micVol = lastMicVol + 2;
1116 }
1117
1118 inMicLevelTmp = stt->micVol;
1119
1120#ifdef MIC_LEVEL_FEEDBACK
1121 /* Count ms in level saturation */
1122 //if (stt->micVol > stt->maxAnalog) {
1123 if (stt->micVol > 150)
1124 {
1125 /* mic level is saturated */
1126 stt->numBlocksMicLvlSat++;
1127 fprintf(stderr, "Sat mic Level: %d\n", stt->numBlocksMicLvlSat);
1128 }
1129#endif
bjornv@webrtc.orgea297872014-09-23 11:21:39 +00001130#ifdef WEBRTC_AGC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +00001131 fprintf(stt->fpt,
bjornv@webrtc.orgea297872014-09-23 11:21:39 +00001132 "\tAGC->ProcessAnalog, frame %d: measure <"
1133 " 2ndLowerLim, micVol = %d\n",
1134 stt->fcount,
1135 stt->micVol);
niklase@google.com470e71d2011-07-07 08:21:25 +00001136#endif
1137 }
1138 } else if (stt->Rxx160_LPw32 < stt->lowerLimit)
1139 {
1140 stt->msTooHigh = 0;
1141 stt->changeToSlowMode = 0;
1142 stt->msTooLow += 2;
1143
1144 if (stt->msTooLow > stt->msecSpeechInnerChange)
1145 {
1146 /* Raise the recording level */
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +00001147 int16_t index, weightFIX;
1148 int16_t volNormFIX = 16384; // =1 in Q14.
niklase@google.com470e71d2011-07-07 08:21:25 +00001149
1150 stt->msTooLow = 0;
1151
1152 /* Normalize the volume level */
1153 tmp32 = WEBRTC_SPL_LSHIFT_W32(inMicLevelTmp - stt->minLevel, 14);
1154 if (stt->maxInit != stt->minLevel)
1155 {
bjornv@webrtc.orgdf9fef62014-08-28 12:57:32 +00001156 volNormFIX = tmp32 / (stt->maxInit - stt->minLevel);
niklase@google.com470e71d2011-07-07 08:21:25 +00001157 }
1158
1159 /* Find correct curve */
1160 WebRtcAgc_ExpCurve(volNormFIX, &index);
1161
1162 /* Compute weighting factor for the volume increase, (3.^(-2.*X))/8+1 */
1163 weightFIX = kOffset2[index]
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +00001164 - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(kSlope2[index],
niklase@google.com470e71d2011-07-07 08:21:25 +00001165 volNormFIX, 13);
1166
1167 /* stt->Rxx160_LPw32 *= 1.047 [~0.2 dB]; */
1168 tmp32 = WEBRTC_SPL_RSHIFT_W32(stt->Rxx160_LPw32, 6);
1169 stt->Rxx160_LPw32 = WEBRTC_SPL_MUL(tmp32, 67);
1170
1171 tmp32 = inMicLevelTmp - stt->minLevel;
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +00001172 tmpU32 = ((uint32_t)weightFIX * (uint32_t)(inMicLevelTmp - stt->minLevel));
1173 stt->micVol = (int32_t)WEBRTC_SPL_RSHIFT_U32(tmpU32, 14) + stt->minLevel;
niklase@google.com470e71d2011-07-07 08:21:25 +00001174 if (stt->micVol < lastMicVol + 1)
1175 {
1176 stt->micVol = lastMicVol + 1;
1177 }
1178
1179 inMicLevelTmp = stt->micVol;
1180
1181#ifdef MIC_LEVEL_FEEDBACK
1182 /* Count ms in level saturation */
1183 //if (stt->micVol > stt->maxAnalog) {
1184 if (stt->micVol > 150)
1185 {
1186 /* mic level is saturated */
1187 stt->numBlocksMicLvlSat++;
1188 fprintf(stderr, "Sat mic Level: %d\n", stt->numBlocksMicLvlSat);
1189 }
1190#endif
bjornv@webrtc.orgea297872014-09-23 11:21:39 +00001191#ifdef WEBRTC_AGC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +00001192 fprintf(stt->fpt,
1193 "\tAGC->ProcessAnalog, frame %d: measure < LowerLim, micVol = %d\n",
bjornv@webrtc.orgea297872014-09-23 11:21:39 +00001194 stt->fcount,
1195 stt->micVol);
niklase@google.com470e71d2011-07-07 08:21:25 +00001196#endif
1197
1198 }
1199 } else
1200 {
1201 /* The signal is inside the desired range which is:
1202 * lowerLimit < Rxx160_LP/640 < upperLimit
1203 */
1204 if (stt->changeToSlowMode > 4000)
1205 {
1206 stt->msecSpeechInnerChange = 1000;
1207 stt->msecSpeechOuterChange = 500;
1208 stt->upperLimit = stt->upperPrimaryLimit;
1209 stt->lowerLimit = stt->lowerPrimaryLimit;
1210 } else
1211 {
1212 stt->changeToSlowMode += 2; // in milliseconds
1213 }
1214 stt->msTooLow = 0;
1215 stt->msTooHigh = 0;
1216
1217 stt->micVol = inMicLevelTmp;
1218
1219 }
1220#ifdef MIC_LEVEL_FEEDBACK
1221 if (stt->numBlocksMicLvlSat > NUM_BLOCKS_IN_SAT_BEFORE_CHANGE_TARGET)
1222 {
1223 stt->micLvlSat = 1;
1224 fprintf(stderr, "target before = %d (%d)\n", stt->analogTargetLevel, stt->targetIdx);
1225 WebRtcAgc_UpdateAgcThresholds(stt);
1226 WebRtcAgc_CalculateGainTable(&(stt->digitalAgc.gainTable[0]),
1227 stt->compressionGaindB, stt->targetLevelDbfs, stt->limiterEnable,
1228 stt->analogTarget);
1229 stt->numBlocksMicLvlSat = 0;
1230 stt->micLvlSat = 0;
1231 fprintf(stderr, "target offset = %d\n", stt->targetIdxOffset);
1232 fprintf(stderr, "target after = %d (%d)\n", stt->analogTargetLevel, stt->targetIdx);
1233 }
1234#endif
1235 }
1236 }
1237
1238 /* Ensure gain is not increased in presence of echo or after a mute event
1239 * (but allow the zeroCtrl() increase on the frame of a mute detection).
1240 */
1241 if (echo == 1 || (stt->muteGuardMs > 0 && stt->muteGuardMs < kMuteGuardTimeMs))
1242 {
1243 if (stt->micVol > lastMicVol)
1244 {
1245 stt->micVol = lastMicVol;
1246 }
1247 }
1248
1249 /* limit the gain */
1250 if (stt->micVol > stt->maxLevel)
1251 {
1252 stt->micVol = stt->maxLevel;
1253 } else if (stt->micVol < stt->minOutput)
1254 {
1255 stt->micVol = stt->minOutput;
1256 }
1257
1258 *outMicLevel = WEBRTC_SPL_RSHIFT_W32(stt->micVol, stt->scale);
1259 if (*outMicLevel > WEBRTC_SPL_RSHIFT_W32(stt->maxAnalog, stt->scale))
1260 {
1261 *outMicLevel = WEBRTC_SPL_RSHIFT_W32(stt->maxAnalog, stt->scale);
1262 }
1263
1264 return 0;
1265}
1266
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +00001267int WebRtcAgc_Process(void *agcInst, const int16_t *in_near,
1268 const int16_t *in_near_H, int16_t samples,
1269 int16_t *out, int16_t *out_H, int32_t inMicLevel,
1270 int32_t *outMicLevel, int16_t echo,
1271 uint8_t *saturationWarning)
niklase@google.com470e71d2011-07-07 08:21:25 +00001272{
1273 Agc_t *stt;
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +00001274 int32_t inMicLevelTmp;
1275 int16_t subFrames, i;
1276 uint8_t satWarningTmp = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001277
1278 stt = (Agc_t *)agcInst;
1279
1280 //
1281 if (stt == NULL)
1282 {
1283 return -1;
1284 }
1285 //
1286
1287
1288 if (stt->fs == 8000)
1289 {
1290 if ((samples != 80) && (samples != 160))
1291 {
bjornv@webrtc.orgea297872014-09-23 11:21:39 +00001292#ifdef WEBRTC_AGC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +00001293 fprintf(stt->fpt,
bjornv@webrtc.orgea297872014-09-23 11:21:39 +00001294 "AGC->Process, frame %d: Invalid number of samples\n\n",
1295 stt->fcount);
niklase@google.com470e71d2011-07-07 08:21:25 +00001296#endif
1297 return -1;
1298 }
1299 subFrames = 80;
1300 } else if (stt->fs == 16000)
1301 {
1302 if ((samples != 160) && (samples != 320))
1303 {
bjornv@webrtc.orgea297872014-09-23 11:21:39 +00001304#ifdef WEBRTC_AGC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +00001305 fprintf(stt->fpt,
bjornv@webrtc.orgea297872014-09-23 11:21:39 +00001306 "AGC->Process, frame %d: Invalid number of samples\n\n",
1307 stt->fcount);
niklase@google.com470e71d2011-07-07 08:21:25 +00001308#endif
1309 return -1;
1310 }
1311 subFrames = 160;
1312 } else if (stt->fs == 32000)
1313 {
1314 if ((samples != 160) && (samples != 320))
1315 {
bjornv@webrtc.orgea297872014-09-23 11:21:39 +00001316#ifdef WEBRTC_AGC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +00001317 fprintf(stt->fpt,
bjornv@webrtc.orgea297872014-09-23 11:21:39 +00001318 "AGC->Process, frame %d: Invalid number of samples\n\n",
1319 stt->fcount);
niklase@google.com470e71d2011-07-07 08:21:25 +00001320#endif
1321 return -1;
1322 }
1323 subFrames = 160;
1324 } else
1325 {
bjornv@webrtc.orgea297872014-09-23 11:21:39 +00001326#ifdef WEBRTC_AGC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +00001327 fprintf(stt->fpt,
bjornv@webrtc.orgea297872014-09-23 11:21:39 +00001328 "AGC->Process, frame %d: Invalid sample rate\n\n",
1329 stt->fcount);
niklase@google.com470e71d2011-07-07 08:21:25 +00001330#endif
1331 return -1;
1332 }
1333
1334 /* Check for valid pointers based on sampling rate */
1335 if (stt->fs == 32000 && in_near_H == NULL)
1336 {
1337 return -1;
1338 }
1339 /* Check for valid pointers for low band */
1340 if (in_near == NULL)
1341 {
1342 return -1;
1343 }
1344
1345 *saturationWarning = 0;
1346 //TODO: PUT IN RANGE CHECKING FOR INPUT LEVELS
1347 *outMicLevel = inMicLevel;
1348 inMicLevelTmp = inMicLevel;
1349
andrew@webrtc.org64235092011-08-19 21:22:08 +00001350 // TODO(andrew): clearly we don't need input and output pointers...
1351 // Change the interface to take a shared input/output.
1352 if (in_near != out)
1353 {
1354 // Only needed if they don't already point to the same place.
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +00001355 memcpy(out, in_near, samples * sizeof(int16_t));
andrew@webrtc.org64235092011-08-19 21:22:08 +00001356 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001357 if (stt->fs == 32000)
1358 {
andrew@webrtc.org64235092011-08-19 21:22:08 +00001359 if (in_near_H != out_H)
1360 {
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +00001361 memcpy(out_H, in_near_H, samples * sizeof(int16_t));
andrew@webrtc.org64235092011-08-19 21:22:08 +00001362 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001363 }
1364
bjornv@webrtc.orgea297872014-09-23 11:21:39 +00001365#ifdef WEBRTC_AGC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +00001366 stt->fcount++;
1367#endif
1368
1369 for (i = 0; i < samples; i += subFrames)
1370 {
1371 if (WebRtcAgc_ProcessDigital(&stt->digitalAgc, &in_near[i], &in_near_H[i], &out[i], &out_H[i],
1372 stt->fs, stt->lowLevelSignal) == -1)
1373 {
bjornv@webrtc.orgea297872014-09-23 11:21:39 +00001374#ifdef WEBRTC_AGC_DEBUG_DUMP
1375 fprintf(stt->fpt,
1376 "AGC->Process, frame %d: Error from DigAGC\n\n",
1377 stt->fcount);
niklase@google.com470e71d2011-07-07 08:21:25 +00001378#endif
1379 return -1;
1380 }
1381 if ((stt->agcMode < kAgcModeFixedDigital) && ((stt->lowLevelSignal == 0)
1382 || (stt->agcMode != kAgcModeAdaptiveDigital)))
1383 {
1384 if (WebRtcAgc_ProcessAnalog(agcInst, inMicLevelTmp, outMicLevel,
1385 stt->vadMic.logRatio, echo, saturationWarning) == -1)
1386 {
1387 return -1;
1388 }
1389 }
bjornv@webrtc.orgea297872014-09-23 11:21:39 +00001390#ifdef WEBRTC_AGC_DEBUG_DUMP
1391 fprintf(stt->agcLog,
1392 "%5d\t%d\t%d\t%d\t%d\n",
1393 stt->fcount,
1394 inMicLevelTmp,
1395 *outMicLevel,
1396 stt->maxLevel,
1397 stt->micVol);
niklase@google.com470e71d2011-07-07 08:21:25 +00001398#endif
1399
1400 /* update queue */
1401 if (stt->inQueue > 1)
1402 {
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +00001403 memcpy(stt->env[0], stt->env[1], 10 * sizeof(int32_t));
1404 memcpy(stt->Rxx16w32_array[0], stt->Rxx16w32_array[1], 5 * sizeof(int32_t));
niklase@google.com470e71d2011-07-07 08:21:25 +00001405 }
1406
1407 if (stt->inQueue > 0)
1408 {
1409 stt->inQueue--;
1410 }
1411
1412 /* If 20ms frames are used the input mic level must be updated so that
1413 * the analog AGC does not think that there has been a manual volume
1414 * change. */
1415 inMicLevelTmp = *outMicLevel;
1416
1417 /* Store a positive saturation warning. */
1418 if (*saturationWarning == 1)
1419 {
1420 satWarningTmp = 1;
1421 }
1422 }
1423
1424 /* Trigger the saturation warning if displayed by any of the frames. */
1425 *saturationWarning = satWarningTmp;
1426
1427 return 0;
1428}
1429
1430int WebRtcAgc_set_config(void *agcInst, WebRtcAgc_config_t agcConfig)
1431{
1432 Agc_t *stt;
1433 stt = (Agc_t *)agcInst;
1434
1435 if (stt == NULL)
1436 {
1437 return -1;
1438 }
1439
1440 if (stt->initFlag != kInitCheck)
1441 {
1442 stt->lastError = AGC_UNINITIALIZED_ERROR;
1443 return -1;
1444 }
1445
1446 if (agcConfig.limiterEnable != kAgcFalse && agcConfig.limiterEnable != kAgcTrue)
1447 {
1448 stt->lastError = AGC_BAD_PARAMETER_ERROR;
1449 return -1;
1450 }
1451 stt->limiterEnable = agcConfig.limiterEnable;
1452 stt->compressionGaindB = agcConfig.compressionGaindB;
1453 if ((agcConfig.targetLevelDbfs < 0) || (agcConfig.targetLevelDbfs > 31))
1454 {
1455 stt->lastError = AGC_BAD_PARAMETER_ERROR;
1456 return -1;
1457 }
1458 stt->targetLevelDbfs = agcConfig.targetLevelDbfs;
1459
1460 if (stt->agcMode == kAgcModeFixedDigital)
1461 {
1462 /* Adjust for different parameter interpretation in FixedDigital mode */
1463 stt->compressionGaindB += agcConfig.targetLevelDbfs;
1464 }
1465
1466 /* Update threshold levels for analog adaptation */
1467 WebRtcAgc_UpdateAgcThresholds(stt);
1468
1469 /* Recalculate gain table */
1470 if (WebRtcAgc_CalculateGainTable(&(stt->digitalAgc.gainTable[0]), stt->compressionGaindB,
1471 stt->targetLevelDbfs, stt->limiterEnable, stt->analogTarget) == -1)
1472 {
bjornv@webrtc.orgea297872014-09-23 11:21:39 +00001473#ifdef WEBRTC_AGC_DEBUG_DUMP
1474 fprintf(stt->fpt,
1475 "AGC->set_config, frame %d: Error from calcGainTable\n\n",
1476 stt->fcount);
niklase@google.com470e71d2011-07-07 08:21:25 +00001477#endif
1478 return -1;
1479 }
1480 /* Store the config in a WebRtcAgc_config_t */
1481 stt->usedConfig.compressionGaindB = agcConfig.compressionGaindB;
1482 stt->usedConfig.limiterEnable = agcConfig.limiterEnable;
1483 stt->usedConfig.targetLevelDbfs = agcConfig.targetLevelDbfs;
1484
1485 return 0;
1486}
1487
1488int WebRtcAgc_get_config(void *agcInst, WebRtcAgc_config_t *config)
1489{
1490 Agc_t *stt;
1491 stt = (Agc_t *)agcInst;
1492
1493 if (stt == NULL)
1494 {
1495 return -1;
1496 }
1497
1498 if (config == NULL)
1499 {
1500 stt->lastError = AGC_NULL_POINTER_ERROR;
1501 return -1;
1502 }
1503
1504 if (stt->initFlag != kInitCheck)
1505 {
1506 stt->lastError = AGC_UNINITIALIZED_ERROR;
1507 return -1;
1508 }
1509
1510 config->limiterEnable = stt->usedConfig.limiterEnable;
1511 config->targetLevelDbfs = stt->usedConfig.targetLevelDbfs;
1512 config->compressionGaindB = stt->usedConfig.compressionGaindB;
1513
1514 return 0;
1515}
1516
1517int WebRtcAgc_Create(void **agcInst)
1518{
1519 Agc_t *stt;
1520 if (agcInst == NULL)
1521 {
1522 return -1;
1523 }
1524 stt = (Agc_t *)malloc(sizeof(Agc_t));
1525
1526 *agcInst = stt;
1527 if (stt == NULL)
1528 {
1529 return -1;
1530 }
1531
bjornv@webrtc.orgea297872014-09-23 11:21:39 +00001532#ifdef WEBRTC_AGC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +00001533 stt->fpt = fopen("./agc_test_log.txt", "wt");
1534 stt->agcLog = fopen("./agc_debug_log.txt", "wt");
1535 stt->digitalAgc.logFile = fopen("./agc_log.txt", "wt");
1536#endif
1537
1538 stt->initFlag = 0;
1539 stt->lastError = 0;
1540
1541 return 0;
1542}
1543
1544int WebRtcAgc_Free(void *state)
1545{
1546 Agc_t *stt;
1547
1548 stt = (Agc_t *)state;
bjornv@webrtc.orgea297872014-09-23 11:21:39 +00001549#ifdef WEBRTC_AGC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +00001550 fclose(stt->fpt);
1551 fclose(stt->agcLog);
1552 fclose(stt->digitalAgc.logFile);
1553#endif
1554 free(stt);
1555
1556 return 0;
1557}
1558
1559/* minLevel - Minimum volume level
1560 * maxLevel - Maximum volume level
1561 */
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +00001562int WebRtcAgc_Init(void *agcInst, int32_t minLevel, int32_t maxLevel,
1563 int16_t agcMode, uint32_t fs)
niklase@google.com470e71d2011-07-07 08:21:25 +00001564{
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +00001565 int32_t max_add, tmp32;
1566 int16_t i;
niklase@google.com470e71d2011-07-07 08:21:25 +00001567 int tmpNorm;
1568 Agc_t *stt;
1569
1570 /* typecast state pointer */
1571 stt = (Agc_t *)agcInst;
1572
1573 if (WebRtcAgc_InitDigital(&stt->digitalAgc, agcMode) != 0)
1574 {
1575 stt->lastError = AGC_UNINITIALIZED_ERROR;
1576 return -1;
1577 }
1578
1579 /* Analog AGC variables */
1580 stt->envSum = 0;
1581
1582 /* mode = 0 - Only saturation protection
1583 * 1 - Analog Automatic Gain Control [-targetLevelDbfs (default -3 dBOv)]
1584 * 2 - Digital Automatic Gain Control [-targetLevelDbfs (default -3 dBOv)]
1585 * 3 - Fixed Digital Gain [compressionGaindB (default 8 dB)]
1586 */
bjornv@webrtc.orgea297872014-09-23 11:21:39 +00001587#ifdef WEBRTC_AGC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +00001588 stt->fcount = 0;
1589 fprintf(stt->fpt, "AGC->Init\n");
1590#endif
1591 if (agcMode < kAgcModeUnchanged || agcMode > kAgcModeFixedDigital)
1592 {
bjornv@webrtc.orgea297872014-09-23 11:21:39 +00001593#ifdef WEBRTC_AGC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +00001594 fprintf(stt->fpt, "AGC->Init: error, incorrect mode\n\n");
1595#endif
1596 return -1;
1597 }
1598 stt->agcMode = agcMode;
1599 stt->fs = fs;
1600
1601 /* initialize input VAD */
1602 WebRtcAgc_InitVad(&stt->vadMic);
1603
1604 /* If the volume range is smaller than 0-256 then
1605 * the levels are shifted up to Q8-domain */
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +00001606 tmpNorm = WebRtcSpl_NormU32((uint32_t)maxLevel);
niklase@google.com470e71d2011-07-07 08:21:25 +00001607 stt->scale = tmpNorm - 23;
1608 if (stt->scale < 0)
1609 {
1610 stt->scale = 0;
1611 }
1612 // TODO(bjornv): Investigate if we really need to scale up a small range now when we have
1613 // a guard against zero-increments. For now, we do not support scale up (scale = 0).
1614 stt->scale = 0;
1615 maxLevel = WEBRTC_SPL_LSHIFT_W32(maxLevel, stt->scale);
1616 minLevel = WEBRTC_SPL_LSHIFT_W32(minLevel, stt->scale);
1617
1618 /* Make minLevel and maxLevel static in AdaptiveDigital */
1619 if (stt->agcMode == kAgcModeAdaptiveDigital)
1620 {
1621 minLevel = 0;
1622 maxLevel = 255;
1623 stt->scale = 0;
1624 }
1625 /* The maximum supplemental volume range is based on a vague idea
1626 * of how much lower the gain will be than the real analog gain. */
1627 max_add = WEBRTC_SPL_RSHIFT_W32(maxLevel - minLevel, 2);
1628
1629 /* Minimum/maximum volume level that can be set */
1630 stt->minLevel = minLevel;
1631 stt->maxAnalog = maxLevel;
1632 stt->maxLevel = maxLevel + max_add;
1633 stt->maxInit = stt->maxLevel;
1634
1635 stt->zeroCtrlMax = stt->maxAnalog;
andrew@webrtc.org27c69802014-02-18 20:24:56 +00001636 stt->lastInMicLevel = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001637
1638 /* Initialize micVol parameter */
1639 stt->micVol = stt->maxAnalog;
1640 if (stt->agcMode == kAgcModeAdaptiveDigital)
1641 {
1642 stt->micVol = 127; /* Mid-point of mic level */
1643 }
1644 stt->micRef = stt->micVol;
1645 stt->micGainIdx = 127;
1646#ifdef MIC_LEVEL_FEEDBACK
1647 stt->numBlocksMicLvlSat = 0;
1648 stt->micLvlSat = 0;
1649#endif
bjornv@webrtc.orgea297872014-09-23 11:21:39 +00001650#ifdef WEBRTC_AGC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +00001651 fprintf(stt->fpt,
1652 "AGC->Init: minLevel = %d, maxAnalog = %d, maxLevel = %d\n",
bjornv@webrtc.orgea297872014-09-23 11:21:39 +00001653 stt->minLevel,
1654 stt->maxAnalog,
1655 stt->maxLevel);
niklase@google.com470e71d2011-07-07 08:21:25 +00001656#endif
1657
1658 /* Minimum output volume is 4% higher than the available lowest volume level */
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +00001659 tmp32 = WEBRTC_SPL_RSHIFT_W32((stt->maxLevel - stt->minLevel) * (int32_t)10, 8);
niklase@google.com470e71d2011-07-07 08:21:25 +00001660 stt->minOutput = (stt->minLevel + tmp32);
1661
1662 stt->msTooLow = 0;
1663 stt->msTooHigh = 0;
1664 stt->changeToSlowMode = 0;
1665 stt->firstCall = 0;
1666 stt->msZero = 0;
1667 stt->muteGuardMs = 0;
1668 stt->gainTableIdx = 0;
1669
1670 stt->msecSpeechInnerChange = kMsecSpeechInner;
1671 stt->msecSpeechOuterChange = kMsecSpeechOuter;
1672
1673 stt->activeSpeech = 0;
1674 stt->Rxx16_LPw32Max = 0;
1675
1676 stt->vadThreshold = kNormalVadThreshold;
1677 stt->inActive = 0;
1678
1679 for (i = 0; i < RXX_BUFFER_LEN; i++)
1680 {
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +00001681 stt->Rxx16_vectorw32[i] = (int32_t)1000; /* -54dBm0 */
niklase@google.com470e71d2011-07-07 08:21:25 +00001682 }
1683 stt->Rxx160w32 = 125 * RXX_BUFFER_LEN; /* (stt->Rxx16_vectorw32[0]>>3) = 125 */
1684
1685 stt->Rxx16pos = 0;
pbos@webrtc.orgb7192b82013-04-10 07:50:54 +00001686 stt->Rxx16_LPw32 = (int32_t)16284; /* Q(-4) */
niklase@google.com470e71d2011-07-07 08:21:25 +00001687
1688 for (i = 0; i < 5; i++)
1689 {
1690 stt->Rxx16w32_array[0][i] = 0;
1691 }
bjornv@webrtc.org281b7982012-05-30 07:41:57 +00001692 for (i = 0; i < 10; i++)
niklase@google.com470e71d2011-07-07 08:21:25 +00001693 {
1694 stt->env[0][i] = 0;
bjornv@webrtc.org281b7982012-05-30 07:41:57 +00001695 stt->env[1][i] = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001696 }
1697 stt->inQueue = 0;
1698
1699#ifdef MIC_LEVEL_FEEDBACK
1700 stt->targetIdxOffset = 0;
1701#endif
1702
1703 WebRtcSpl_MemSetW32(stt->filterState, 0, 8);
1704
1705 stt->initFlag = kInitCheck;
1706 // Default config settings.
1707 stt->defaultConfig.limiterEnable = kAgcTrue;
1708 stt->defaultConfig.targetLevelDbfs = AGC_DEFAULT_TARGET_LEVEL;
1709 stt->defaultConfig.compressionGaindB = AGC_DEFAULT_COMP_GAIN;
1710
1711 if (WebRtcAgc_set_config(stt, stt->defaultConfig) == -1)
1712 {
1713 stt->lastError = AGC_UNSPECIFIED_ERROR;
1714 return -1;
1715 }
1716 stt->Rxx160_LPw32 = stt->analogTargetLevel; // Initialize rms value
1717
1718 stt->lowLevelSignal = 0;
1719
1720 /* Only positive values are allowed that are not too large */
1721 if ((minLevel >= maxLevel) || (maxLevel & 0xFC000000))
1722 {
bjornv@webrtc.orgea297872014-09-23 11:21:39 +00001723#ifdef WEBRTC_AGC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +00001724 fprintf(stt->fpt, "minLevel, maxLevel value(s) are invalid\n\n");
1725#endif
1726 return -1;
1727 } else
1728 {
bjornv@webrtc.orgea297872014-09-23 11:21:39 +00001729#ifdef WEBRTC_AGC_DEBUG_DUMP
niklase@google.com470e71d2011-07-07 08:21:25 +00001730 fprintf(stt->fpt, "\n");
1731#endif
1732 return 0;
1733 }
1734}