blob: 63abbd1ef2dd09ce0c19bba62415eda4e3d47ca1 [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
andrew@webrtc.org4e423b32012-04-23 18:59:00 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11/*
12 * Implementation of RecOut function, which is the main function for the audio output
13 * process. This function must be called (through the NetEQ API) once every 10 ms.
14 */
15
16#include "dsp.h"
17
18#include <assert.h>
19#include <string.h> /* to define NULL */
20
21#include "signal_processing_library.h"
22
23#include "dsp_helpfunctions.h"
24#include "neteq_error_codes.h"
25#include "neteq_defines.h"
26#include "mcu_dsp_common.h"
27
28/* Audio types */
29#define TYPE_SPEECH 1
30#define TYPE_CNG 2
31
32#ifdef NETEQ_DELAY_LOGGING
33#include "delay_logging.h"
34#include <stdio.h>
35#pragma message("*******************************************************************")
36#pragma message("You have specified to use NETEQ_DELAY_LOGGING in the NetEQ library.")
37#pragma message("Make sure that your test application supports this.")
38#pragma message("*******************************************************************")
39#endif
40
41/* Scratch usage:
42
43 Type Name size startpos endpos
pbos@webrtc.org0946a562013-04-09 00:28:06 +000044 int16_t pw16_NetEqAlgorithm_buffer 1080*fs/8000 0 1080*fs/8000-1
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +000045 struct dspInfo 6 1080*fs/8000 1085*fs/8000
niklase@google.com470e71d2011-07-07 08:21:25 +000046
47 func WebRtcNetEQ_Normal 40+495*fs/8000 0 39+495*fs/8000
48 func WebRtcNetEQ_Merge 40+496*fs/8000 0 39+496*fs/8000
49 func WebRtcNetEQ_Expand 40+370*fs/8000 126*fs/800 39+496*fs/8000
50 func WebRtcNetEQ_Accelerate 210 240*fs/8000 209+240*fs/8000
51 func WebRtcNetEQ_BGNUpdate 69 480*fs/8000 68+480*fs/8000
52
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +000053 Total: 1086*fs/8000
niklase@google.com470e71d2011-07-07 08:21:25 +000054 */
55
56#define SCRATCH_ALGORITHM_BUFFER 0
57#define SCRATCH_NETEQ_NORMAL 0
58#define SCRATCH_NETEQ_MERGE 0
59
60#if (defined(NETEQ_48KHZ_WIDEBAND))
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +000061#define SCRATCH_DSP_INFO 6480
niklase@google.com470e71d2011-07-07 08:21:25 +000062#define SCRATCH_NETEQ_ACCELERATE 1440
63#define SCRATCH_NETEQ_BGN_UPDATE 2880
64#define SCRATCH_NETEQ_EXPAND 756
65#elif (defined(NETEQ_32KHZ_WIDEBAND))
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +000066#define SCRATCH_DSP_INFO 4320
niklase@google.com470e71d2011-07-07 08:21:25 +000067#define SCRATCH_NETEQ_ACCELERATE 960
68#define SCRATCH_NETEQ_BGN_UPDATE 1920
69#define SCRATCH_NETEQ_EXPAND 504
70#elif (defined(NETEQ_WIDEBAND))
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +000071#define SCRATCH_DSP_INFO 2160
niklase@google.com470e71d2011-07-07 08:21:25 +000072#define SCRATCH_NETEQ_ACCELERATE 480
73#define SCRATCH_NETEQ_BGN_UPDATE 960
74#define SCRATCH_NETEQ_EXPAND 252
75#else /* NB */
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +000076#define SCRATCH_DSP_INFO 1080
niklase@google.com470e71d2011-07-07 08:21:25 +000077#define SCRATCH_NETEQ_ACCELERATE 240
78#define SCRATCH_NETEQ_BGN_UPDATE 480
79#define SCRATCH_NETEQ_EXPAND 126
80#endif
81
82#if (defined(NETEQ_48KHZ_WIDEBAND))
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +000083#define SIZE_SCRATCH_BUFFER 6516
niklase@google.com470e71d2011-07-07 08:21:25 +000084#elif (defined(NETEQ_32KHZ_WIDEBAND))
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +000085#define SIZE_SCRATCH_BUFFER 4344
niklase@google.com470e71d2011-07-07 08:21:25 +000086#elif (defined(NETEQ_WIDEBAND))
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +000087#define SIZE_SCRATCH_BUFFER 2172
niklase@google.com470e71d2011-07-07 08:21:25 +000088#else /* NB */
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +000089#define SIZE_SCRATCH_BUFFER 1086
niklase@google.com470e71d2011-07-07 08:21:25 +000090#endif
91
92#ifdef NETEQ_DELAY_LOGGING
93extern FILE *delay_fid2; /* file pointer to delay log file */
pbos@webrtc.org0946a562013-04-09 00:28:06 +000094extern uint32_t tot_received_packets;
niklase@google.com470e71d2011-07-07 08:21:25 +000095#endif
96
97
pbos@webrtc.org0946a562013-04-09 00:28:06 +000098int WebRtcNetEQ_RecOutInternal(DSPInst_t *inst, int16_t *pw16_outData,
99 int16_t *pw16_len, int16_t BGNonly)
niklase@google.com470e71d2011-07-07 08:21:25 +0000100{
101
pbos@webrtc.org0946a562013-04-09 00:28:06 +0000102 int16_t blockLen, payloadLen, len = 0, pos;
103 int16_t w16_tmp1, w16_tmp2, w16_tmp3, DataEnough;
104 int16_t *blockPtr;
105 int16_t MD = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000106
pbos@webrtc.org0946a562013-04-09 00:28:06 +0000107 int16_t speechType = TYPE_SPEECH;
108 uint16_t instr;
109 uint16_t uw16_tmp;
niklase@google.com470e71d2011-07-07 08:21:25 +0000110#ifdef SCRATCH
111 char pw8_ScratchBuffer[((SIZE_SCRATCH_BUFFER + 1) * 2)];
pbos@webrtc.org0946a562013-04-09 00:28:06 +0000112 int16_t *pw16_scratchPtr = (int16_t*) pw8_ScratchBuffer;
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +0000113 /* pad with 240*fs_mult to match the overflow guard below */
pbos@webrtc.org0946a562013-04-09 00:28:06 +0000114 int16_t pw16_decoded_buffer[NETEQ_MAX_FRAME_SIZE+240*6];
115 int16_t *pw16_NetEqAlgorithm_buffer = pw16_scratchPtr
niklase@google.com470e71d2011-07-07 08:21:25 +0000116 + SCRATCH_ALGORITHM_BUFFER;
117 DSP2MCU_info_t *dspInfo = (DSP2MCU_info_t*) (pw16_scratchPtr + SCRATCH_DSP_INFO);
118#else
tina.legrand@webrtc.orga7d83872012-10-18 10:00:52 +0000119 /* pad with 240*fs_mult to match the overflow guard below */
pbos@webrtc.org0946a562013-04-09 00:28:06 +0000120 int16_t pw16_decoded_buffer[NETEQ_MAX_FRAME_SIZE+240*6];
121 int16_t pw16_NetEqAlgorithm_buffer[NETEQ_MAX_OUTPUT_SIZE+240*6];
niklase@google.com470e71d2011-07-07 08:21:25 +0000122 DSP2MCU_info_t dspInfoStruct;
123 DSP2MCU_info_t *dspInfo = &dspInfoStruct;
124#endif
pbos@webrtc.org0946a562013-04-09 00:28:06 +0000125 int16_t fs_mult;
niklase@google.com470e71d2011-07-07 08:21:25 +0000126 int borrowedSamples;
127 int oldBorrowedSamples;
128 int return_value = 0;
pbos@webrtc.org0946a562013-04-09 00:28:06 +0000129 int16_t lastModeBGNonly = (inst->w16_mode & MODE_BGN_ONLY) != 0; /* check BGN flag */
niklase@google.com470e71d2011-07-07 08:21:25 +0000130 void *mainInstBackup = inst->main_inst;
131
132#ifdef NETEQ_DELAY_LOGGING
henrik.lundin@webrtc.orgd7989532012-01-20 13:42:16 +0000133 int temp_var;
niklase@google.com470e71d2011-07-07 08:21:25 +0000134#endif
pbos@webrtc.org0946a562013-04-09 00:28:06 +0000135 int16_t dtmfValue = -1;
136 int16_t dtmfVolume = -1;
niklase@google.com470e71d2011-07-07 08:21:25 +0000137 int playDtmf = 0;
henrik.lundin@webrtc.org44ef3772011-12-07 10:43:25 +0000138#ifdef NETEQ_ATEVENT_DECODE
niklase@google.com470e71d2011-07-07 08:21:25 +0000139 int dtmfSwitch = 0;
henrik.lundin@webrtc.org44ef3772011-12-07 10:43:25 +0000140#endif
niklase@google.com470e71d2011-07-07 08:21:25 +0000141#ifdef NETEQ_STEREO
142 MasterSlaveInfo *msInfo = inst->msInfo;
143#endif
pbos@webrtc.org0946a562013-04-09 00:28:06 +0000144 int16_t *sharedMem = pw16_NetEqAlgorithm_buffer; /* Reuse memory SHARED_MEM_SIZE size */
niklase@google.com470e71d2011-07-07 08:21:25 +0000145 inst->pw16_readAddress = sharedMem;
146 inst->pw16_writeAddress = sharedMem;
147
148 /* Get information about if there is one descriptor left */
149 if (inst->codec_ptr_inst.funcGetMDinfo != NULL)
150 {
151 MD = inst->codec_ptr_inst.funcGetMDinfo(inst->codec_ptr_inst.codec_state);
152 if (MD > 0)
153 MD = 1;
154 else
155 MD = 0;
156 }
157
158#ifdef NETEQ_STEREO
159 if ((msInfo->msMode == NETEQ_SLAVE) && (inst->codec_ptr_inst.funcDecode != NULL))
160 {
161 /*
162 * Valid function pointers indicate that we have decoded something,
163 * and that the timestamp information is correct.
164 */
165
166 /* Get the information from master to correct synchronization */
pbos@webrtc.org0946a562013-04-09 00:28:06 +0000167 uint32_t currentMasterTimestamp;
168 uint32_t currentSlaveTimestamp;
niklase@google.com470e71d2011-07-07 08:21:25 +0000169
170 currentMasterTimestamp = msInfo->endTimestamp - msInfo->samplesLeftWithOverlap;
171 currentSlaveTimestamp = inst->endTimestamp - (inst->endPosition - inst->curPosition);
172
andrew@webrtc.org4e423b32012-04-23 18:59:00 +0000173 /* Partition the uint32_t space in three: [0 0.25) [0.25 0.75] (0.75 1]
174 * We consider a wrap to have occurred if the timestamps are in
175 * different edge partitions.
176 */
177 if (currentSlaveTimestamp < 0x40000000 &&
178 currentMasterTimestamp > 0xc0000000) {
179 // Slave has wrapped.
180 currentSlaveTimestamp += (0xffffffff - currentMasterTimestamp) + 1;
181 currentMasterTimestamp = 0;
182 } else if (currentMasterTimestamp < 0x40000000 &&
183 currentSlaveTimestamp > 0xc0000000) {
184 // Master has wrapped.
185 currentMasterTimestamp += (0xffffffff - currentSlaveTimestamp) + 1;
186 currentSlaveTimestamp = 0;
187 }
188
niklase@google.com470e71d2011-07-07 08:21:25 +0000189 if (currentSlaveTimestamp < currentMasterTimestamp)
190 {
191 /* brute-force discard a number of samples to catch up */
192 inst->curPosition += currentMasterTimestamp - currentSlaveTimestamp;
193
niklase@google.com470e71d2011-07-07 08:21:25 +0000194 }
195 else if (currentSlaveTimestamp > currentMasterTimestamp)
196 {
197 /* back off current position to slow down */
198 inst->curPosition -= currentSlaveTimestamp - currentMasterTimestamp;
niklase@google.com470e71d2011-07-07 08:21:25 +0000199 }
andrew@webrtc.org4e423b32012-04-23 18:59:00 +0000200
201 /* make sure we have at least "overlap" samples left */
202 inst->curPosition = WEBRTC_SPL_MIN(inst->curPosition,
203 inst->endPosition - inst->ExpandInst.w16_overlap);
204
205 /* make sure we do not end up outside the speech history */
206 inst->curPosition = WEBRTC_SPL_MAX(inst->curPosition, 0);
niklase@google.com470e71d2011-07-07 08:21:25 +0000207 }
208#endif
209
210 /* Write status data to shared memory */
211 dspInfo->playedOutTS = inst->endTimestamp;
212 dspInfo->samplesLeft = inst->endPosition - inst->curPosition
213 - inst->ExpandInst.w16_overlap;
214 dspInfo->MD = MD;
215 dspInfo->lastMode = inst->w16_mode;
216 dspInfo->frameLen = inst->w16_frameLen;
217
218 /* Force update of codec if codec function is NULL */
219 if (inst->codec_ptr_inst.funcDecode == NULL)
220 {
221 dspInfo->lastMode |= MODE_AWAITING_CODEC_PTR;
222 }
223
224#ifdef NETEQ_STEREO
225 if (msInfo->msMode == NETEQ_SLAVE && (msInfo->extraInfo == DTMF_OVERDUB
226 || msInfo->extraInfo == DTMF_ONLY))
227 {
228 /* Signal that the master instance generated DTMF tones */
229 dspInfo->lastMode |= MODE_MASTER_DTMF_SIGNAL;
230 }
231
232 if (msInfo->msMode != NETEQ_MONO)
233 {
234 /* We are using stereo mode; signal this to MCU side */
235 dspInfo->lastMode |= MODE_USING_STEREO;
236 }
237#endif
238
239 WEBRTC_SPL_MEMCPY_W8(inst->pw16_writeAddress,dspInfo,sizeof(DSP2MCU_info_t));
240
241 /* Signal MCU with "interrupt" call to main inst*/
242#ifdef NETEQ_STEREO
243 assert(msInfo != NULL);
244 if (msInfo->msMode == NETEQ_MASTER)
245 {
246 /* clear info to slave */
pbos@webrtc.org0946a562013-04-09 00:28:06 +0000247 WebRtcSpl_MemSetW16((int16_t *) msInfo, 0,
248 sizeof(MasterSlaveInfo) / sizeof(int16_t));
niklase@google.com470e71d2011-07-07 08:21:25 +0000249 /* re-set mode */
250 msInfo->msMode = NETEQ_MASTER;
251
252 /* Store some information to slave */
253 msInfo->endTimestamp = inst->endTimestamp;
254 msInfo->samplesLeftWithOverlap = inst->endPosition - inst->curPosition;
255 }
256#endif
257
258 /*
259 * This call will trigger the MCU side to make a decision based on buffer contents and
260 * decision history. Instructions, encoded data and function pointers will be written
261 * to the shared memory.
262 */
263 return_value = WebRtcNetEQ_DSP2MCUinterrupt((MainInst_t *) inst->main_inst, sharedMem);
264
265 /* Read MCU data and instructions */
pbos@webrtc.org0946a562013-04-09 00:28:06 +0000266 instr = (uint16_t) (inst->pw16_readAddress[0] & 0xf000);
niklase@google.com470e71d2011-07-07 08:21:25 +0000267
268#ifdef NETEQ_STEREO
269 if (msInfo->msMode == NETEQ_MASTER)
270 {
271 msInfo->instruction = instr;
272 }
273 else if (msInfo->msMode == NETEQ_SLAVE)
274 {
275 /* Nothing to do */
276 }
277#endif
278
279 /* check for error returned from MCU side, if so, return error */
280 if (return_value < 0)
281 {
282 inst->w16_mode = MODE_ERROR;
283 dspInfo->lastMode = MODE_ERROR;
284 return return_value;
285 }
286
287 blockPtr = &((inst->pw16_readAddress)[3]);
288
289 /* Check for DTMF payload flag */
290 if ((inst->pw16_readAddress[0] & DSP_DTMF_PAYLOAD) != 0)
291 {
292 playDtmf = 1;
293 dtmfValue = blockPtr[1];
294 dtmfVolume = blockPtr[2];
295 blockPtr += 3;
296
297#ifdef NETEQ_STEREO
298 if (msInfo->msMode == NETEQ_MASTER)
299 {
300 /* signal to slave that master is using DTMF */
301 msInfo->extraInfo = DTMF_OVERDUB;
302 }
303#endif
304 }
305
pbos@webrtc.org0946a562013-04-09 00:28:06 +0000306 blockLen = (((*blockPtr) & DSP_CODEC_MASK_RED_FLAG) + 1) >> 1; /* In # of int16_t */
niklase@google.com470e71d2011-07-07 08:21:25 +0000307 payloadLen = ((*blockPtr) & DSP_CODEC_MASK_RED_FLAG);
308 blockPtr++;
309
310 /* Do we have to change our decoder? */
311 if ((inst->pw16_readAddress[0] & 0x0f00) == DSP_CODEC_NEW_CODEC)
312 {
313 WEBRTC_SPL_MEMCPY_W16(&inst->codec_ptr_inst,blockPtr,(payloadLen+1)>>1);
314 if (inst->codec_ptr_inst.codec_fs != 0)
315 {
316 return_value = WebRtcNetEQ_DSPInit(inst, inst->codec_ptr_inst.codec_fs);
317 if (return_value != 0)
318 { /* error returned */
319 instr = DSP_INSTR_FADE_TO_BGN; /* emergency instruction */
320 }
321#ifdef NETEQ_DELAY_LOGGING
322 temp_var = NETEQ_DELAY_LOGGING_SIGNAL_CHANGE_FS;
leozwang@webrtc.org354b0ed2012-06-01 17:46:21 +0000323 if ((fwrite(&temp_var, sizeof(int),
324 1, delay_fid2) != 1) ||
pbos@webrtc.org0946a562013-04-09 00:28:06 +0000325 (fwrite(&inst->fs, sizeof(uint16_t),
leozwang@webrtc.org354b0ed2012-06-01 17:46:21 +0000326 1, delay_fid2) != 1)) {
327 return -1;
328 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000329#endif
330 }
331
332 /* Copy it again since the init destroys this part */
333
334 WEBRTC_SPL_MEMCPY_W16(&inst->codec_ptr_inst,blockPtr,(payloadLen+1)>>1);
335 inst->endTimestamp = inst->codec_ptr_inst.timeStamp;
336 inst->videoSyncTimestamp = inst->codec_ptr_inst.timeStamp;
337 blockPtr += blockLen;
338 blockLen = (((*blockPtr) & DSP_CODEC_MASK_RED_FLAG) + 1) >> 1;
339 payloadLen = ((*blockPtr) & DSP_CODEC_MASK_RED_FLAG);
340 blockPtr++;
341 if (inst->codec_ptr_inst.funcDecodeInit != NULL)
342 {
343 inst->codec_ptr_inst.funcDecodeInit(inst->codec_ptr_inst.codec_state);
344 }
345
346#ifdef NETEQ_CNG_CODEC
347
348 /* Also update the CNG state as this might be uninitialized */
349
350 WEBRTC_SPL_MEMCPY_W16(&inst->CNG_Codec_inst,blockPtr,(payloadLen+1)>>1);
351 blockPtr += blockLen;
352 blockLen = (((*blockPtr) & DSP_CODEC_MASK_RED_FLAG) + 1) >> 1;
353 payloadLen = ((*blockPtr) & DSP_CODEC_MASK_RED_FLAG);
354 blockPtr++;
355 if (inst->CNG_Codec_inst != NULL)
356 {
357 WebRtcCng_InitDec(inst->CNG_Codec_inst);
358 }
359#endif
360 }
361 else if ((inst->pw16_readAddress[0] & 0x0f00) == DSP_CODEC_RESET)
362 {
363 /* Reset the current codec (but not DSP struct) */
364 if (inst->codec_ptr_inst.funcDecodeInit != NULL)
365 {
366 inst->codec_ptr_inst.funcDecodeInit(inst->codec_ptr_inst.codec_state);
367 }
368
369#ifdef NETEQ_CNG_CODEC
370 /* And reset CNG */
371 if (inst->CNG_Codec_inst != NULL)
372 {
373 WebRtcCng_InitDec(inst->CNG_Codec_inst);
374 }
375#endif /*NETEQ_CNG_CODEC*/
376 }
377
378 fs_mult = WebRtcNetEQ_CalcFsMult(inst->fs);
379
380 /* Add late packet? */
381 if ((inst->pw16_readAddress[0] & 0x0f00) == DSP_CODEC_ADD_LATE_PKT)
382 {
383 if (inst->codec_ptr_inst.funcAddLatePkt != NULL)
384 {
385 /* Only do this if the codec has support for Add Late Pkt */
386 inst->codec_ptr_inst.funcAddLatePkt(inst->codec_ptr_inst.codec_state, blockPtr,
387 payloadLen);
388 }
389 blockPtr += blockLen;
390 blockLen = (((*blockPtr) & DSP_CODEC_MASK_RED_FLAG) + 1) >> 1; /* In # of Word16 */
391 payloadLen = ((*blockPtr) & DSP_CODEC_MASK_RED_FLAG);
392 blockPtr++;
393 }
394
395 /* Do we have to decode data? */
396 if ((instr == DSP_INSTR_NORMAL) || (instr == DSP_INSTR_ACCELERATE) || (instr
397 == DSP_INSTR_MERGE) || (instr == DSP_INSTR_PREEMPTIVE_EXPAND))
398 {
399 /* Do we need to update codec-internal PLC state? */
400 if ((instr == DSP_INSTR_MERGE) && (inst->codec_ptr_inst.funcDecodePLC != NULL))
401 {
402 len = 0;
403 len = inst->codec_ptr_inst.funcDecodePLC(inst->codec_ptr_inst.codec_state,
404 &pw16_decoded_buffer[len], 1);
405 }
406 len = 0;
407
408 /* Do decoding */
409 while ((blockLen > 0) && (len < (240 * fs_mult))) /* Guard somewhat against overflow */
410 {
411 if (inst->codec_ptr_inst.funcDecode != NULL)
412 {
pbos@webrtc.org0946a562013-04-09 00:28:06 +0000413 int16_t dec_Len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000414 if (!BGNonly)
415 {
416 /* Do decoding as normal
417 *
418 * blockPtr is pointing to payload, at this point,
419 * the most significant bit of *(blockPtr - 1) is a flag if set to 1
420 * indicates that the following payload is the redundant payload.
421 */
422 if (((*(blockPtr - 1) & DSP_CODEC_RED_FLAG) != 0)
423 && (inst->codec_ptr_inst.funcDecodeRCU != NULL))
424 {
425 dec_Len = inst->codec_ptr_inst.funcDecodeRCU(
426 inst->codec_ptr_inst.codec_state, blockPtr, payloadLen,
427 &pw16_decoded_buffer[len], &speechType);
428 }
429 else
430 {
431 dec_Len = inst->codec_ptr_inst.funcDecode(
432 inst->codec_ptr_inst.codec_state, blockPtr, payloadLen,
433 &pw16_decoded_buffer[len], &speechType);
434 }
435 }
436 else
437 {
438 /*
439 * Background noise mode: don't decode, just produce the same length BGN.
440 * Don't call Expand for BGN here, since Expand uses the memory where the
441 * bitstreams are stored (sharemem).
442 */
443 dec_Len = inst->w16_frameLen;
444 }
445
446 if (dec_Len > 0)
447 {
448 len += dec_Len;
449 /* Update frameLen */
450 inst->w16_frameLen = dec_Len;
451 }
452 else if (dec_Len < 0)
453 {
454 /* Error */
455 len = -1;
456 break;
457 }
458 /*
459 * Sanity check (although we might still write outside memory when this
460 * happens...)
461 */
462 if (len > NETEQ_MAX_FRAME_SIZE)
463 {
464 WebRtcSpl_MemSetW16(pw16_outData, 0, inst->timestampsPerCall);
465 *pw16_len = inst->timestampsPerCall;
466 inst->w16_mode = MODE_ERROR;
467 dspInfo->lastMode = MODE_ERROR;
468 return RECOUT_ERROR_DECODED_TOO_MUCH;
469 }
470
471 /* Verify that instance was not corrupted by decoder */
472 if (mainInstBackup != inst->main_inst)
473 {
474 /* Instance is corrupt */
475 return CORRUPT_INSTANCE;
476 }
477
478 }
479 blockPtr += blockLen;
480 blockLen = (((*blockPtr) & DSP_CODEC_MASK_RED_FLAG) + 1) >> 1; /* In # of Word16 */
481 payloadLen = ((*blockPtr) & DSP_CODEC_MASK_RED_FLAG);
482 blockPtr++;
483 }
484
485 if (len < 0)
486 {
487 len = 0;
488 inst->endTimestamp += inst->w16_frameLen; /* advance one frame */
489 if (inst->codec_ptr_inst.funcGetErrorCode != NULL)
490 {
491 return_value = -inst->codec_ptr_inst.funcGetErrorCode(
492 inst->codec_ptr_inst.codec_state);
493 }
494 else
495 {
496 return_value = RECOUT_ERROR_DECODING;
497 }
498 instr = DSP_INSTR_FADE_TO_BGN;
499 }
500 if (speechType != TYPE_CNG)
501 {
502 /*
503 * Don't increment timestamp if codec returned CNG speech type
504 * since in this case, the MCU side will increment the CNGplayedTS counter.
505 */
506 inst->endTimestamp += len;
507 }
508 }
509 else if (instr == DSP_INSTR_NORMAL_ONE_DESC)
510 {
511 if (inst->codec_ptr_inst.funcDecode != NULL)
512 {
513 len = inst->codec_ptr_inst.funcDecode(inst->codec_ptr_inst.codec_state, NULL, 0,
514 pw16_decoded_buffer, &speechType);
515#ifdef NETEQ_DELAY_LOGGING
516 temp_var = NETEQ_DELAY_LOGGING_SIGNAL_DECODE_ONE_DESC;
leozwang@webrtc.org354b0ed2012-06-01 17:46:21 +0000517 if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
518 return -1;
519 }
pbos@webrtc.org0946a562013-04-09 00:28:06 +0000520 if (fwrite(&inst->endTimestamp, sizeof(uint32_t),
leozwang@webrtc.org354b0ed2012-06-01 17:46:21 +0000521 1, delay_fid2) != 1) {
522 return -1;
523 }
pbos@webrtc.org0946a562013-04-09 00:28:06 +0000524 if (fwrite(&dspInfo->samplesLeft, sizeof(uint16_t),
leozwang@webrtc.org354b0ed2012-06-01 17:46:21 +0000525 1, delay_fid2) != 1) {
526 return -1;
527 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000528 tot_received_packets++;
529#endif
530 }
531 if (speechType != TYPE_CNG)
532 {
533 /*
534 * Don't increment timestamp if codec returned CNG speech type
535 * since in this case, the MCU side will increment the CNGplayedTS counter.
536 */
537 inst->endTimestamp += len;
538 }
539
540 /* Verify that instance was not corrupted by decoder */
541 if (mainInstBackup != inst->main_inst)
542 {
543 /* Instance is corrupt */
544 return CORRUPT_INSTANCE;
545 }
546
547 if (len <= 0)
548 {
549 len = 0;
550 if (inst->codec_ptr_inst.funcGetErrorCode != NULL)
551 {
552 return_value = -inst->codec_ptr_inst.funcGetErrorCode(
553 inst->codec_ptr_inst.codec_state);
554 }
555 else
556 {
557 return_value = RECOUT_ERROR_DECODING;
558 }
559 if ((inst->codec_ptr_inst.funcDecodeInit != NULL)
560 && (inst->codec_ptr_inst.codec_state != NULL))
561 {
562 /* Reinitialize codec state as something is obviously wrong */
563 inst->codec_ptr_inst.funcDecodeInit(inst->codec_ptr_inst.codec_state);
564 }
565 inst->endTimestamp += inst->w16_frameLen; /* advance one frame */
566 instr = DSP_INSTR_FADE_TO_BGN;
567 }
568 }
569
570 if (len == 0 && lastModeBGNonly) /* no new data */
571 {
572 BGNonly = 1; /* force BGN this time too */
573 }
574
575#ifdef NETEQ_VAD
576 if ((speechType == TYPE_CNG) /* decoder responded with codec-internal CNG */
577 || ((instr == DSP_INSTR_DO_RFC3389CNG) && (blockLen > 0)) /* ... or, SID frame */
578 || (inst->fs > 16000)) /* ... or, if not NB or WB */
579 {
580 /* disable post-decode VAD upon first sign of send-side DTX/VAD active, or if SWB */
581 inst->VADInst.VADEnabled = 0;
582 inst->VADInst.VADDecision = 1; /* set to always active, just to be on the safe side */
583 inst->VADInst.SIDintervalCounter = 0; /* reset SID interval counter */
584 }
585 else if (!inst->VADInst.VADEnabled) /* VAD disabled and no SID/CNG data observed this time */
586 {
587 inst->VADInst.SIDintervalCounter++; /* increase counter */
588 }
589
590 /* check for re-enabling the VAD */
591 if (inst->VADInst.SIDintervalCounter >= POST_DECODE_VAD_AUTO_ENABLE)
592 {
593 /*
594 * It's been a while since the last CNG/SID frame was observed => re-enable VAD.
595 * (Do not care to look for a VAD instance, since this is done inside the init
596 * function)
597 */
598 WebRtcNetEQ_InitVAD(&inst->VADInst, inst->fs);
599 }
600
601 if (len > 0 /* if we decoded any data */
602 && inst->VADInst.VADEnabled /* and VAD enabled */
603 && inst->fs <= 16000) /* can only do VAD for NB and WB */
604 {
605 int VADframeSize; /* VAD frame size in ms */
606 int VADSamplePtr = 0;
607
608 inst->VADInst.VADDecision = 0;
609
610 if (inst->VADInst.VADFunction != NULL) /* make sure that VAD function is provided */
611 {
612 /* divide the data into groups, as large as possible */
613 for (VADframeSize = 30; VADframeSize >= 10; VADframeSize -= 10)
614 {
615 /* loop through 30, 20, 10 */
616
617 while (inst->VADInst.VADDecision == 0
618 && len - VADSamplePtr >= VADframeSize * fs_mult * 8)
619 {
620 /*
621 * Only continue until first active speech found, and as long as there is
622 * one VADframeSize left.
623 */
624
625 /* call VAD with new decoded data */
626 inst->VADInst.VADDecision |= inst->VADInst.VADFunction(
bjornv@webrtc.orgb38fca12012-06-19 11:03:32 +0000627 inst->VADInst.VADState, (int) inst->fs,
pbos@webrtc.org0946a562013-04-09 00:28:06 +0000628 (int16_t *) &pw16_decoded_buffer[VADSamplePtr],
bjornv@webrtc.orgb38fca12012-06-19 11:03:32 +0000629 (VADframeSize * fs_mult * 8));
niklase@google.com470e71d2011-07-07 08:21:25 +0000630
631 VADSamplePtr += VADframeSize * fs_mult * 8; /* increment sample counter */
632 }
633 }
634 }
635 else
636 { /* VAD function is NULL */
637 inst->VADInst.VADDecision = 1; /* set decision to active */
638 inst->VADInst.VADEnabled = 0; /* disable VAD since we have no VAD function */
639 }
640
641 }
642#endif /* NETEQ_VAD */
643
644 /* Adjust timestamp if needed */
pbos@webrtc.org0946a562013-04-09 00:28:06 +0000645 uw16_tmp = (uint16_t) inst->pw16_readAddress[1];
646 inst->endTimestamp += (((uint32_t) uw16_tmp) << 16);
647 uw16_tmp = (uint16_t) inst->pw16_readAddress[2];
niklase@google.com470e71d2011-07-07 08:21:25 +0000648 inst->endTimestamp += uw16_tmp;
649
650 if (BGNonly && len > 0)
651 {
652 /*
653 * If BGN mode, we did not produce any data at decoding.
654 * Do it now instead.
655 */
656
657 WebRtcNetEQ_GenerateBGN(inst,
658#ifdef SCRATCH
659 pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
660#endif
661 pw16_decoded_buffer, len);
662 }
663
664 /* Switch on the instruction received from the MCU side. */
665 switch (instr)
666 {
667 case DSP_INSTR_NORMAL:
668
669 /* Allow for signal processing to apply gain-back etc */
670 WebRtcNetEQ_Normal(inst,
671#ifdef SCRATCH
672 pw16_scratchPtr + SCRATCH_NETEQ_NORMAL,
673#endif
674 pw16_decoded_buffer, len, pw16_NetEqAlgorithm_buffer, &len);
675
676 /* If last packet was decoded as a inband CNG set mode to CNG instead */
677 if ((speechType == TYPE_CNG) || ((inst->w16_mode == MODE_CODEC_INTERNAL_CNG)
678 && (len == 0)))
679 {
680 inst->w16_mode = MODE_CODEC_INTERNAL_CNG;
681 }
682
683#ifdef NETEQ_ATEVENT_DECODE
684 if (playDtmf == 0)
685 {
686 inst->DTMFInst.reinit = 1;
687 }
688#endif
689 break;
690 case DSP_INSTR_NORMAL_ONE_DESC:
691
692 /* Allow for signal processing to apply gain-back etc */
693 WebRtcNetEQ_Normal(inst,
694#ifdef SCRATCH
695 pw16_scratchPtr + SCRATCH_NETEQ_NORMAL,
696#endif
697 pw16_decoded_buffer, len, pw16_NetEqAlgorithm_buffer, &len);
698#ifdef NETEQ_ATEVENT_DECODE
699 if (playDtmf == 0)
700 {
701 inst->DTMFInst.reinit = 1;
702 }
703#endif
704 inst->w16_mode = MODE_ONE_DESCRIPTOR;
705 break;
706 case DSP_INSTR_MERGE:
707#ifdef NETEQ_DELAY_LOGGING
708 temp_var = NETEQ_DELAY_LOGGING_SIGNAL_MERGE_INFO;
leozwang@webrtc.org354b0ed2012-06-01 17:46:21 +0000709 if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
710 return -1;
711 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000712 temp_var = -len;
713#endif
714 /* Call Merge with history*/
715 return_value = WebRtcNetEQ_Merge(inst,
716#ifdef SCRATCH
717 pw16_scratchPtr + SCRATCH_NETEQ_MERGE,
718#endif
719 pw16_decoded_buffer, len, pw16_NetEqAlgorithm_buffer, &len);
720
721 if (return_value < 0)
722 {
723 /* error */
724 return return_value;
725 }
726
727#ifdef NETEQ_DELAY_LOGGING
728 temp_var += len;
leozwang@webrtc.org354b0ed2012-06-01 17:46:21 +0000729 if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
730 return -1;
731 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000732#endif
733 /* If last packet was decoded as a inband CNG set mode to CNG instead */
734 if (speechType == TYPE_CNG) inst->w16_mode = MODE_CODEC_INTERNAL_CNG;
735#ifdef NETEQ_ATEVENT_DECODE
736 if (playDtmf == 0)
737 {
738 inst->DTMFInst.reinit = 1;
739 }
740#endif
741 break;
742
743 case DSP_INSTR_EXPAND:
744 len = 0;
745 pos = 0;
746 while ((inst->endPosition - inst->curPosition - inst->ExpandInst.w16_overlap + pos)
747 < (inst->timestampsPerCall))
748 {
749 return_value = WebRtcNetEQ_Expand(inst,
750#ifdef SCRATCH
751 pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
752#endif
753 pw16_NetEqAlgorithm_buffer, &len, BGNonly);
754 if (return_value < 0)
755 {
756 /* error */
757 return return_value;
758 }
759
760 /*
761 * Update buffer, but only end part (otherwise expand state is destroyed
762 * since it reuses speechBuffer[] memory
763 */
764
765 WEBRTC_SPL_MEMMOVE_W16(inst->pw16_speechHistory,
766 inst->pw16_speechHistory + len,
767 (inst->w16_speechHistoryLen-len));
768 WEBRTC_SPL_MEMCPY_W16(&inst->pw16_speechHistory[inst->w16_speechHistoryLen-len],
769 pw16_NetEqAlgorithm_buffer, len);
770
771 inst->curPosition -= len;
772
773 /* Update variables for VQmon */
774 inst->w16_concealedTS += len;
775#ifdef NETEQ_DELAY_LOGGING
776 temp_var = NETEQ_DELAY_LOGGING_SIGNAL_EXPAND_INFO;
leozwang@webrtc.org354b0ed2012-06-01 17:46:21 +0000777 if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
778 return -1;
779 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000780 temp_var = len;
leozwang@webrtc.org354b0ed2012-06-01 17:46:21 +0000781 if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
782 return -1;
783 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000784#endif
785 len = 0; /* already written the data, so do not write it again further down. */
786 }
787#ifdef NETEQ_ATEVENT_DECODE
788 if (playDtmf == 0)
789 {
790 inst->DTMFInst.reinit = 1;
791 }
792#endif
793 break;
794
795 case DSP_INSTR_ACCELERATE:
796 if (len < 3 * 80 * fs_mult)
797 {
798 /* We need to move data from the speechBuffer[] in order to get 30 ms */
799 borrowedSamples = 3 * 80 * fs_mult - len;
800
801 WEBRTC_SPL_MEMMOVE_W16(&pw16_decoded_buffer[borrowedSamples],
802 pw16_decoded_buffer, len);
803 WEBRTC_SPL_MEMCPY_W16(pw16_decoded_buffer,
804 &(inst->speechBuffer[inst->endPosition-borrowedSamples]),
805 borrowedSamples);
806
807 return_value = WebRtcNetEQ_Accelerate(inst,
808#ifdef SCRATCH
809 pw16_scratchPtr + SCRATCH_NETEQ_ACCELERATE,
810#endif
811 pw16_decoded_buffer, 3 * inst->timestampsPerCall,
812 pw16_NetEqAlgorithm_buffer, &len, BGNonly);
813
814 if (return_value < 0)
815 {
816 /* error */
817 return return_value;
818 }
819
820 /* Copy back samples to the buffer */
821 if (len < borrowedSamples)
822 {
823 /*
824 * This destroys the beginning of the buffer, but will not cause any
825 * problems
826 */
827
828 WEBRTC_SPL_MEMCPY_W16(&inst->speechBuffer[inst->endPosition-borrowedSamples],
829 pw16_NetEqAlgorithm_buffer, len);
830 WEBRTC_SPL_MEMMOVE_W16(&inst->speechBuffer[borrowedSamples-len],
831 inst->speechBuffer,
832 (inst->endPosition-(borrowedSamples-len)));
833
834 inst->curPosition += (borrowedSamples - len);
835#ifdef NETEQ_DELAY_LOGGING
836 temp_var = NETEQ_DELAY_LOGGING_SIGNAL_ACCELERATE_INFO;
leozwang@webrtc.org354b0ed2012-06-01 17:46:21 +0000837 if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
838 return -1;
839 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000840 temp_var = 3 * inst->timestampsPerCall - len;
leozwang@webrtc.org354b0ed2012-06-01 17:46:21 +0000841 if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
842 return -1;
843 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000844#endif
845 len = 0;
846 }
847 else
848 {
849 WEBRTC_SPL_MEMCPY_W16(&inst->speechBuffer[inst->endPosition-borrowedSamples],
850 pw16_NetEqAlgorithm_buffer, borrowedSamples);
851 WEBRTC_SPL_MEMMOVE_W16(pw16_NetEqAlgorithm_buffer,
852 &pw16_NetEqAlgorithm_buffer[borrowedSamples],
853 (len-borrowedSamples));
854#ifdef NETEQ_DELAY_LOGGING
855 temp_var = NETEQ_DELAY_LOGGING_SIGNAL_ACCELERATE_INFO;
leozwang@webrtc.org354b0ed2012-06-01 17:46:21 +0000856 if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
857 return -1;
858 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000859 temp_var = 3 * inst->timestampsPerCall - len;
leozwang@webrtc.org354b0ed2012-06-01 17:46:21 +0000860 if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
861 return -1;
862 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000863#endif
864 len = len - borrowedSamples;
865 }
866
867 }
868 else
869 {
870#ifdef NETEQ_DELAY_LOGGING
871 temp_var = NETEQ_DELAY_LOGGING_SIGNAL_ACCELERATE_INFO;
leozwang@webrtc.org354b0ed2012-06-01 17:46:21 +0000872 if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
873 return -1;
874 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000875 temp_var = len;
876#endif
877 return_value = WebRtcNetEQ_Accelerate(inst,
878#ifdef SCRATCH
879 pw16_scratchPtr + SCRATCH_NETEQ_ACCELERATE,
880#endif
881 pw16_decoded_buffer, len, pw16_NetEqAlgorithm_buffer, &len, BGNonly);
882
883 if (return_value < 0)
884 {
885 /* error */
886 return return_value;
887 }
888
889#ifdef NETEQ_DELAY_LOGGING
890 temp_var -= len;
leozwang@webrtc.org354b0ed2012-06-01 17:46:21 +0000891 if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
892 return -1;
893 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000894#endif
895 }
896 /* If last packet was decoded as a inband CNG set mode to CNG instead */
897 if (speechType == TYPE_CNG) inst->w16_mode = MODE_CODEC_INTERNAL_CNG;
898#ifdef NETEQ_ATEVENT_DECODE
899 if (playDtmf == 0)
900 {
901 inst->DTMFInst.reinit = 1;
902 }
903#endif
904 break;
905
906 case DSP_INSTR_DO_RFC3389CNG:
907#ifdef NETEQ_CNG_CODEC
908 if (blockLen > 0)
909 {
pbos@webrtc.org0946a562013-04-09 00:28:06 +0000910 if (WebRtcCng_UpdateSid(inst->CNG_Codec_inst, (uint8_t*) blockPtr,
niklase@google.com470e71d2011-07-07 08:21:25 +0000911 payloadLen) < 0)
912 {
913 /* error returned from CNG function */
914 return_value = -WebRtcCng_GetErrorCodeDec(inst->CNG_Codec_inst);
915 len = inst->timestampsPerCall;
916 WebRtcSpl_MemSetW16(pw16_NetEqAlgorithm_buffer, 0, len);
917 break;
918 }
919 }
920
921 if (BGNonly)
922 {
923 /* Get data from BGN function instead of CNG */
924 len = WebRtcNetEQ_GenerateBGN(inst,
925#ifdef SCRATCH
926 pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
927#endif
928 pw16_NetEqAlgorithm_buffer, inst->timestampsPerCall);
929 if (len != inst->timestampsPerCall)
930 {
931 /* this is not good, treat this as an error */
932 return_value = -1;
933 }
934 }
935 else
936 {
937 return_value = WebRtcNetEQ_Cng(inst, pw16_NetEqAlgorithm_buffer,
938 inst->timestampsPerCall);
939 }
940 len = inst->timestampsPerCall;
941 inst->ExpandInst.w16_consecExp = 0;
942 inst->w16_mode = MODE_RFC3389CNG;
943#ifdef NETEQ_ATEVENT_DECODE
944 if (playDtmf == 0)
945 {
946 inst->DTMFInst.reinit = 1;
947 }
948#endif
949
950 if (return_value < 0)
951 {
952 /* error returned */
953 WebRtcSpl_MemSetW16(pw16_NetEqAlgorithm_buffer, 0, len);
954 }
955
956 break;
957#else
958 return FAULTY_INSTRUCTION;
959#endif
960 case DSP_INSTR_DO_CODEC_INTERNAL_CNG:
961 /*
962 * This represents the case when there is no transmission and the decoder should
963 * do internal CNG.
964 */
965 len = 0;
966 if (inst->codec_ptr_inst.funcDecode != NULL && !BGNonly)
967 {
968 len = inst->codec_ptr_inst.funcDecode(inst->codec_ptr_inst.codec_state,
969 blockPtr, 0, pw16_decoded_buffer, &speechType);
970 }
971 else
972 {
973 /* get BGN data */
974 len = WebRtcNetEQ_GenerateBGN(inst,
975#ifdef SCRATCH
976 pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
977#endif
978 pw16_decoded_buffer, inst->timestampsPerCall);
979 }
980 WebRtcNetEQ_Normal(inst,
981#ifdef SCRATCH
982 pw16_scratchPtr + SCRATCH_NETEQ_NORMAL,
983#endif
984 pw16_decoded_buffer, len, pw16_NetEqAlgorithm_buffer, &len);
985 inst->w16_mode = MODE_CODEC_INTERNAL_CNG;
986 inst->ExpandInst.w16_consecExp = 0;
987 break;
988
989 case DSP_INSTR_DTMF_GENERATE:
990#ifdef NETEQ_ATEVENT_DECODE
991 dtmfSwitch = 0;
992 if ((inst->w16_mode != MODE_DTMF) && (inst->DTMFInst.reinit == 0))
993 {
994 /* Special case; see below.
995 * We must catch this before calling DTMFGenerate,
996 * since reinit is set to 0 in that call.
997 */
998 dtmfSwitch = 1;
999 }
1000
1001 len = WebRtcNetEQ_DTMFGenerate(&inst->DTMFInst, dtmfValue, dtmfVolume,
1002 pw16_NetEqAlgorithm_buffer, inst->fs, -1);
1003 if (len < 0)
1004 {
1005 /* error occurred */
1006 return_value = len;
1007 len = inst->timestampsPerCall;
1008 WebRtcSpl_MemSetW16(pw16_NetEqAlgorithm_buffer, 0, len);
1009 }
1010
1011 if (dtmfSwitch == 1)
1012 {
1013 /*
1014 * This is the special case where the previous operation was DTMF overdub.
1015 * but the current instruction is "regular" DTMF. We must make sure that the
1016 * DTMF does not have any discontinuities. The first DTMF sample that we
1017 * generate now must be played out immediately, wherefore it must be copied to
1018 * the speech buffer.
1019 */
1020
1021 /*
1022 * Generate extra DTMF data to fill the space between
1023 * curPosition and endPosition
1024 */
pbos@webrtc.org0946a562013-04-09 00:28:06 +00001025 int16_t tempLen;
niklase@google.com470e71d2011-07-07 08:21:25 +00001026
1027 tempLen = WebRtcNetEQ_DTMFGenerate(&inst->DTMFInst, dtmfValue, dtmfVolume,
1028 &pw16_NetEqAlgorithm_buffer[len], inst->fs,
1029 inst->endPosition - inst->curPosition);
1030 if (tempLen < 0)
1031 {
1032 /* error occurred */
1033 return_value = tempLen;
1034 len = inst->endPosition - inst->curPosition;
1035 WebRtcSpl_MemSetW16(pw16_NetEqAlgorithm_buffer, 0,
1036 inst->endPosition - inst->curPosition);
1037 }
1038
1039 /* Add to total length */
1040 len += tempLen;
1041
1042 /* Overwrite the "future" part of the speech buffer with the new DTMF data */
1043
1044 WEBRTC_SPL_MEMCPY_W16(&inst->speechBuffer[inst->curPosition],
1045 pw16_NetEqAlgorithm_buffer,
1046 inst->endPosition - inst->curPosition);
1047
1048 /* Shuffle the remaining data to the beginning of algorithm buffer */
1049 len -= (inst->endPosition - inst->curPosition);
1050 WEBRTC_SPL_MEMMOVE_W16(pw16_NetEqAlgorithm_buffer,
1051 &pw16_NetEqAlgorithm_buffer[inst->endPosition - inst->curPosition],
1052 len);
1053 }
1054
1055 inst->endTimestamp += inst->timestampsPerCall;
1056 inst->DTMFInst.reinit = 0;
1057 inst->ExpandInst.w16_consecExp = 0;
1058 inst->w16_mode = MODE_DTMF;
1059 BGNonly = 0; /* override BGN only and let DTMF through */
1060
1061 playDtmf = 0; /* set to zero because the DTMF is already in the Algorithm buffer */
1062 /*
1063 * If playDtmf is 1, an extra DTMF vector will be generated and overdubbed
1064 * on the output.
1065 */
1066
1067#ifdef NETEQ_STEREO
1068 if (msInfo->msMode == NETEQ_MASTER)
1069 {
1070 /* signal to slave that master is using DTMF only */
1071 msInfo->extraInfo = DTMF_ONLY;
1072 }
1073#endif
1074
1075 break;
1076#else
1077 inst->w16_mode = MODE_ERROR;
1078 dspInfo->lastMode = MODE_ERROR;
1079 return FAULTY_INSTRUCTION;
1080#endif
1081
1082 case DSP_INSTR_DO_ALTERNATIVE_PLC:
1083 if (inst->codec_ptr_inst.funcDecodePLC != 0)
1084 {
1085 len = inst->codec_ptr_inst.funcDecodePLC(inst->codec_ptr_inst.codec_state,
1086 pw16_NetEqAlgorithm_buffer, 1);
1087 }
1088 else
1089 {
1090 len = inst->timestampsPerCall;
1091 /* ZeroStuffing... */
1092 WebRtcSpl_MemSetW16(pw16_NetEqAlgorithm_buffer, 0, len);
roosa@google.comb8ba4d82012-12-14 00:06:18 +00001093 /* By not advancing the timestamp, NetEq inserts samples. */
1094 inst->statInst.addedSamples += len;
niklase@google.com470e71d2011-07-07 08:21:25 +00001095 }
1096 inst->ExpandInst.w16_consecExp = 0;
1097 break;
1098 case DSP_INSTR_DO_ALTERNATIVE_PLC_INC_TS:
1099 if (inst->codec_ptr_inst.funcDecodePLC != 0)
1100 {
1101 len = inst->codec_ptr_inst.funcDecodePLC(inst->codec_ptr_inst.codec_state,
1102 pw16_NetEqAlgorithm_buffer, 1);
1103 }
1104 else
1105 {
1106 len = inst->timestampsPerCall;
1107 /* ZeroStuffing... */
1108 WebRtcSpl_MemSetW16(pw16_NetEqAlgorithm_buffer, 0, len);
1109 }
1110 inst->ExpandInst.w16_consecExp = 0;
1111 inst->endTimestamp += len;
1112 break;
1113 case DSP_INSTR_DO_AUDIO_REPETITION:
1114 len = inst->timestampsPerCall;
1115 /* copy->paste... */
1116 WEBRTC_SPL_MEMCPY_W16(pw16_NetEqAlgorithm_buffer,
1117 &inst->speechBuffer[inst->endPosition-len], len);
1118 inst->ExpandInst.w16_consecExp = 0;
1119 break;
1120 case DSP_INSTR_DO_AUDIO_REPETITION_INC_TS:
1121 len = inst->timestampsPerCall;
1122 /* copy->paste... */
1123 WEBRTC_SPL_MEMCPY_W16(pw16_NetEqAlgorithm_buffer,
1124 &inst->speechBuffer[inst->endPosition-len], len);
1125 inst->ExpandInst.w16_consecExp = 0;
1126 inst->endTimestamp += len;
1127 break;
1128
1129 case DSP_INSTR_PREEMPTIVE_EXPAND:
1130 if (len < 3 * inst->timestampsPerCall)
1131 {
1132 /* borrow samples from sync buffer if necessary */
1133 borrowedSamples = 3 * inst->timestampsPerCall - len; /* borrow this many samples */
1134 /* calculate how many of these are already played out */
1135 oldBorrowedSamples = WEBRTC_SPL_MAX(0,
1136 borrowedSamples - (inst->endPosition - inst->curPosition));
1137 WEBRTC_SPL_MEMMOVE_W16(&pw16_decoded_buffer[borrowedSamples],
1138 pw16_decoded_buffer, len);
1139 WEBRTC_SPL_MEMCPY_W16(pw16_decoded_buffer,
1140 &(inst->speechBuffer[inst->endPosition-borrowedSamples]),
1141 borrowedSamples);
1142 }
1143 else
1144 {
1145 borrowedSamples = 0;
1146 oldBorrowedSamples = 0;
1147 }
1148
1149#ifdef NETEQ_DELAY_LOGGING
1150 w16_tmp1 = len;
1151#endif
1152 /* do the expand */
1153 return_value = WebRtcNetEQ_PreEmptiveExpand(inst,
1154#ifdef SCRATCH
1155 /* use same scratch memory as Accelerate */
1156 pw16_scratchPtr + SCRATCH_NETEQ_ACCELERATE,
1157#endif
1158 pw16_decoded_buffer, len + borrowedSamples, oldBorrowedSamples,
1159 pw16_NetEqAlgorithm_buffer, &len, BGNonly);
1160
1161 if (return_value < 0)
1162 {
1163 /* error */
1164 return return_value;
1165 }
1166
1167 if (borrowedSamples > 0)
1168 {
1169 /* return borrowed samples */
1170
1171 /* Copy back to last part of speechBuffer from beginning of output buffer */
1172 WEBRTC_SPL_MEMCPY_W16( &(inst->speechBuffer[inst->endPosition-borrowedSamples]),
1173 pw16_NetEqAlgorithm_buffer,
1174 borrowedSamples);
1175
1176 len -= borrowedSamples; /* remove the borrowed samples from new total length */
1177
1178 /* Move to beginning of output buffer from end of output buffer */
1179 WEBRTC_SPL_MEMMOVE_W16( pw16_NetEqAlgorithm_buffer,
1180 &pw16_NetEqAlgorithm_buffer[borrowedSamples],
1181 len);
1182 }
1183
1184#ifdef NETEQ_DELAY_LOGGING
1185 temp_var = NETEQ_DELAY_LOGGING_SIGNAL_PREEMPTIVE_INFO;
leozwang@webrtc.org354b0ed2012-06-01 17:46:21 +00001186 if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
1187 return -1;
1188 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001189 temp_var = len - w16_tmp1; /* number of samples added */
leozwang@webrtc.org354b0ed2012-06-01 17:46:21 +00001190 if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
1191 return -1;
1192 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001193#endif
1194 /* If last packet was decoded as inband CNG, set mode to CNG instead */
1195 if (speechType == TYPE_CNG) inst->w16_mode = MODE_CODEC_INTERNAL_CNG;
1196#ifdef NETEQ_ATEVENT_DECODE
1197 if (playDtmf == 0)
1198 {
1199 inst->DTMFInst.reinit = 1;
1200 }
1201#endif
1202 break;
1203
1204 case DSP_INSTR_FADE_TO_BGN:
1205 {
1206 int tempReturnValue;
1207 /* do not overwrite return_value, since it likely contains an error code */
1208
1209 /* calculate interpolation length */
1210 w16_tmp3 = WEBRTC_SPL_MIN(inst->endPosition - inst->curPosition,
1211 inst->timestampsPerCall);
1212 /* check that it will fit in pw16_NetEqAlgorithm_buffer */
1213 if (w16_tmp3 + inst->w16_frameLen > NETEQ_MAX_OUTPUT_SIZE)
1214 {
1215 w16_tmp3 = NETEQ_MAX_OUTPUT_SIZE - inst->w16_frameLen;
1216 }
1217
1218 /* call Expand */
1219 len = inst->timestampsPerCall + inst->ExpandInst.w16_overlap;
1220 pos = 0;
1221
1222 tempReturnValue = WebRtcNetEQ_Expand(inst,
1223#ifdef SCRATCH
1224 pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
1225#endif
1226 pw16_NetEqAlgorithm_buffer, &len, 1);
1227
1228 if (tempReturnValue < 0)
1229 {
1230 /* error */
1231 /* this error value will override return_value */
1232 return tempReturnValue;
1233 }
1234
1235 pos += len; /* got len samples from expand */
1236
1237 /* copy to fill the demand */
1238 while (pos + len <= inst->w16_frameLen + w16_tmp3)
1239 {
1240 WEBRTC_SPL_MEMCPY_W16(&pw16_NetEqAlgorithm_buffer[pos],
1241 pw16_NetEqAlgorithm_buffer, len);
1242 pos += len;
1243 }
1244
1245 /* fill with fraction of the expand vector if needed */
1246 if (pos < inst->w16_frameLen + w16_tmp3)
1247 {
1248 WEBRTC_SPL_MEMCPY_W16(&pw16_NetEqAlgorithm_buffer[pos], pw16_NetEqAlgorithm_buffer,
1249 inst->w16_frameLen + w16_tmp3 - pos);
1250 }
1251
1252 len = inst->w16_frameLen + w16_tmp3; /* truncate any surplus samples since we don't want these */
1253
1254 /*
1255 * Mix with contents in sync buffer. Find largest power of two that is less than
1256 * interpolate length divide 16384 with this number; result is in w16_tmp2.
1257 */
1258 w16_tmp1 = 2;
1259 w16_tmp2 = 16384;
1260 while (w16_tmp1 <= w16_tmp3)
1261 {
1262 w16_tmp2 >>= 1; /* divide with 2 */
1263 w16_tmp1 <<= 1; /* increase with a factor of 2 */
1264 }
1265
1266 w16_tmp1 = 0;
1267 pos = 0;
1268 while (w16_tmp1 < 16384)
1269 {
1270 inst->speechBuffer[inst->curPosition + pos]
1271 =
pbos@webrtc.org0946a562013-04-09 00:28:06 +00001272 (int16_t) WEBRTC_SPL_RSHIFT_W32(
niklase@google.com470e71d2011-07-07 08:21:25 +00001273 WEBRTC_SPL_MUL_16_16( inst->speechBuffer[inst->endPosition - w16_tmp3 + pos],
1274 16384-w16_tmp1 ) +
1275 WEBRTC_SPL_MUL_16_16( pw16_NetEqAlgorithm_buffer[pos], w16_tmp1 ),
1276 14 );
1277 w16_tmp1 += w16_tmp2;
1278 pos++;
1279 }
1280
1281 /* overwrite remainder of speech buffer */
1282
1283 WEBRTC_SPL_MEMCPY_W16( &inst->speechBuffer[inst->endPosition - w16_tmp3 + pos],
1284 &pw16_NetEqAlgorithm_buffer[pos], w16_tmp3 - pos);
1285
1286 len -= w16_tmp3;
1287 /* shift algorithm buffer */
1288
1289 WEBRTC_SPL_MEMMOVE_W16( pw16_NetEqAlgorithm_buffer,
1290 &pw16_NetEqAlgorithm_buffer[w16_tmp3],
1291 len );
1292
1293 /* Update variables for VQmon */
1294 inst->w16_concealedTS += len;
1295
1296 inst->w16_mode = MODE_FADE_TO_BGN;
1297#ifdef NETEQ_DELAY_LOGGING
1298 temp_var = NETEQ_DELAY_LOGGING_SIGNAL_EXPAND_INFO;
leozwang@webrtc.org354b0ed2012-06-01 17:46:21 +00001299 if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
1300 return -1;
1301 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001302 temp_var = len;
leozwang@webrtc.org354b0ed2012-06-01 17:46:21 +00001303 if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
1304 return -1;
1305 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001306#endif
1307
1308 break;
1309 }
1310
1311 default:
1312 inst->w16_mode = MODE_ERROR;
1313 dspInfo->lastMode = MODE_ERROR;
1314 return FAULTY_INSTRUCTION;
1315 } /* end of grand switch */
1316
1317 /* Copy data directly to output buffer */
1318
1319 w16_tmp2 = 0;
1320 if ((inst->endPosition + len - inst->curPosition - inst->ExpandInst.w16_overlap)
1321 >= inst->timestampsPerCall)
1322 {
1323 w16_tmp2 = inst->endPosition - inst->curPosition;
1324 w16_tmp2 = WEBRTC_SPL_MAX(w16_tmp2, 0); /* Additional error protection, just in case */
1325 w16_tmp1 = WEBRTC_SPL_MIN(w16_tmp2, inst->timestampsPerCall);
1326 w16_tmp2 = inst->timestampsPerCall - w16_tmp1;
1327 WEBRTC_SPL_MEMCPY_W16(pw16_outData, &inst->speechBuffer[inst->curPosition], w16_tmp1);
1328 WEBRTC_SPL_MEMCPY_W16(&pw16_outData[w16_tmp1], pw16_NetEqAlgorithm_buffer, w16_tmp2);
1329 DataEnough = 1;
1330 }
1331 else
1332 {
1333 DataEnough = 0;
1334 }
1335
1336 if (playDtmf != 0)
1337 {
henrik.lundin@webrtc.org44ef3772011-12-07 10:43:25 +00001338#ifdef NETEQ_ATEVENT_DECODE
pbos@webrtc.org0946a562013-04-09 00:28:06 +00001339 int16_t outDataIndex = 0;
1340 int16_t overdubLen = -1; /* default len */
1341 int16_t dtmfLen;
niklase@google.com470e71d2011-07-07 08:21:25 +00001342
1343 /*
1344 * Overdub the output with DTMF. Note that this is not executed if the
1345 * DSP_INSTR_DTMF_GENERATE operation is performed above.
1346 */
niklase@google.com470e71d2011-07-07 08:21:25 +00001347 if (inst->DTMFInst.lastDtmfSample - inst->curPosition > 0)
1348 {
1349 /* special operation for transition from "DTMF only" to "DTMF overdub" */
1350 outDataIndex
1351 = WEBRTC_SPL_MIN(inst->DTMFInst.lastDtmfSample - inst->curPosition,
1352 inst->timestampsPerCall);
1353 overdubLen = inst->timestampsPerCall - outDataIndex;
1354 }
1355
1356 dtmfLen = WebRtcNetEQ_DTMFGenerate(&inst->DTMFInst, dtmfValue, dtmfVolume,
1357 &pw16_outData[outDataIndex], inst->fs, overdubLen);
1358 if (dtmfLen < 0)
1359 {
1360 /* error occurred */
1361 return_value = dtmfLen;
1362 }
1363 inst->DTMFInst.reinit = 0;
1364#else
1365 inst->w16_mode = MODE_ERROR;
1366 dspInfo->lastMode = MODE_ERROR;
1367 return FAULTY_INSTRUCTION;
1368#endif
1369 }
1370
1371 /*
1372 * Shuffle speech buffer to allow more data. Move data from pw16_NetEqAlgorithm_buffer
1373 * to speechBuffer.
1374 */
1375 if (instr != DSP_INSTR_EXPAND)
1376 {
1377 w16_tmp1 = WEBRTC_SPL_MIN(inst->endPosition, len);
1378 WEBRTC_SPL_MEMMOVE_W16(inst->speechBuffer, inst->speechBuffer + w16_tmp1,
1379 (inst->endPosition-w16_tmp1));
1380 WEBRTC_SPL_MEMCPY_W16(&inst->speechBuffer[inst->endPosition-w16_tmp1],
1381 &pw16_NetEqAlgorithm_buffer[len-w16_tmp1], w16_tmp1);
1382#ifdef NETEQ_ATEVENT_DECODE
1383 /* Update index to end of DTMF data in speech buffer */
1384 if (instr == DSP_INSTR_DTMF_GENERATE)
1385 {
1386 /* We have written DTMF data to the end of speech buffer */
1387 inst->DTMFInst.lastDtmfSample = inst->endPosition;
1388 }
1389 else if (inst->DTMFInst.lastDtmfSample > 0)
1390 {
1391 /* The end of DTMF data in speech buffer has been shuffled */
1392 inst->DTMFInst.lastDtmfSample -= w16_tmp1;
1393 }
1394#endif
1395 /*
1396 * Update the BGN history if last operation was not expand (nor Merge, Accelerate
1397 * or Pre-emptive expand, to save complexity).
1398 */
1399 if ((inst->w16_mode != MODE_EXPAND) && (inst->w16_mode != MODE_MERGE)
1400 && (inst->w16_mode != MODE_SUCCESS_ACCELERATE) && (inst->w16_mode
1401 != MODE_LOWEN_ACCELERATE) && (inst->w16_mode != MODE_SUCCESS_PREEMPTIVE)
1402 && (inst->w16_mode != MODE_LOWEN_PREEMPTIVE) && (inst->w16_mode
1403 != MODE_FADE_TO_BGN) && (inst->w16_mode != MODE_DTMF) && (!BGNonly))
1404 {
1405 WebRtcNetEQ_BGNUpdate(inst
1406#ifdef SCRATCH
1407 , pw16_scratchPtr + SCRATCH_NETEQ_BGN_UPDATE
1408#endif
1409 );
1410 }
1411 }
1412 else /* instr == DSP_INSTR_EXPAND */
1413 {
1414 /* Nothing should be done since data is already copied to output. */
1415 }
1416
1417 inst->curPosition -= len;
1418
1419 /*
1420 * Extra protection in case something should go totally wrong in terms of sizes...
1421 * If everything is ok this should NEVER happen.
1422 */
1423 if (inst->curPosition < -inst->timestampsPerCall)
1424 {
1425 inst->curPosition = -inst->timestampsPerCall;
1426 }
1427
1428 if ((instr != DSP_INSTR_EXPAND) && (instr != DSP_INSTR_MERGE) && (instr
1429 != DSP_INSTR_FADE_TO_BGN))
1430 {
1431 /* Reset concealed TS parameter if it does not seem to have been flushed */
1432 if (inst->w16_concealedTS > inst->timestampsPerCall)
1433 {
1434 inst->w16_concealedTS = 0;
1435 }
1436 }
1437
1438 /*
1439 * Double-check that we actually have 10 ms to play. If we haven't, there has been a
1440 * serious error.The decoder might have returned way too few samples
1441 */
1442 if (!DataEnough)
1443 {
1444 /* This should not happen. Set outdata to zeros, and return error. */
1445 WebRtcSpl_MemSetW16(pw16_outData, 0, inst->timestampsPerCall);
1446 *pw16_len = inst->timestampsPerCall;
1447 inst->w16_mode = MODE_ERROR;
1448 dspInfo->lastMode = MODE_ERROR;
1449 return RECOUT_ERROR_SAMPLEUNDERRUN;
1450 }
1451
1452 /*
1453 * Update Videosync timestamp (this special timestamp is needed since the endTimestamp
1454 * stops during CNG and Expand periods.
1455 */
1456 if ((inst->w16_mode != MODE_EXPAND) && (inst->w16_mode != MODE_RFC3389CNG))
1457 {
pbos@webrtc.org0946a562013-04-09 00:28:06 +00001458 uint32_t uw32_tmpTS;
niklase@google.com470e71d2011-07-07 08:21:25 +00001459 uw32_tmpTS = inst->endTimestamp - (inst->endPosition - inst->curPosition);
pbos@webrtc.org0946a562013-04-09 00:28:06 +00001460 if ((int32_t) (uw32_tmpTS - inst->videoSyncTimestamp) > 0)
niklase@google.com470e71d2011-07-07 08:21:25 +00001461 {
1462 inst->videoSyncTimestamp = uw32_tmpTS;
1463 }
1464 }
1465 else
1466 {
1467 inst->videoSyncTimestamp += inst->timestampsPerCall;
1468 }
1469
1470 /* After this, regardless of what has happened, deliver 10 ms of future data */
1471 inst->curPosition += inst->timestampsPerCall;
1472 *pw16_len = inst->timestampsPerCall;
1473
1474 /* Remember if BGNonly was used */
1475 if (BGNonly)
1476 {
1477 inst->w16_mode |= MODE_BGN_ONLY;
1478 }
1479
1480 return return_value;
1481}
1482
1483#undef SCRATCH_ALGORITHM_BUFFER
1484#undef SCRATCH_NETEQ_NORMAL
1485#undef SCRATCH_NETEQ_MERGE
1486#undef SCRATCH_NETEQ_BGN_UPDATE
1487#undef SCRATCH_NETEQ_EXPAND
1488#undef SCRATCH_DSP_INFO
1489#undef SCRATCH_NETEQ_ACCELERATE
1490#undef SIZE_SCRATCH_BUFFER