blob: 5e2116409db37cf528de76865e6578a6d1e4c4fc [file] [log] [blame]
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001/*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
henrik.lundin@webrtc.org9c55f0f2014-06-09 08:10:28 +000011#include "webrtc/modules/audio_coding/neteq/neteq_impl.h"
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +000012
13#include <assert.h>
14#include <memory.h> // memset
15
16#include <algorithm>
ossu97ba30e2016-04-25 07:55:58 -070017#include <vector>
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +000018
henrik.lundin9c3efd02015-08-27 13:12:22 -070019#include "webrtc/base/checks.h"
Henrik Lundind67a2192015-08-03 12:54:37 +020020#include "webrtc/base/logging.h"
Tommid44c0772016-03-11 17:12:32 -080021#include "webrtc/base/safe_conversions.h"
kwibergac554ee2016-09-02 00:39:33 -070022#include "webrtc/base/sanitizer.h"
henrik.lundina689b442015-12-17 03:50:05 -080023#include "webrtc/base/trace_event.h"
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +000024#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
kwiberg@webrtc.orge04a93b2014-12-09 10:12:53 +000025#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
henrik.lundin@webrtc.org9c55f0f2014-06-09 08:10:28 +000026#include "webrtc/modules/audio_coding/neteq/accelerate.h"
27#include "webrtc/modules/audio_coding/neteq/background_noise.h"
28#include "webrtc/modules/audio_coding/neteq/buffer_level_filter.h"
29#include "webrtc/modules/audio_coding/neteq/comfort_noise.h"
30#include "webrtc/modules/audio_coding/neteq/decision_logic.h"
31#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
32#include "webrtc/modules/audio_coding/neteq/defines.h"
33#include "webrtc/modules/audio_coding/neteq/delay_manager.h"
34#include "webrtc/modules/audio_coding/neteq/delay_peak_detector.h"
35#include "webrtc/modules/audio_coding/neteq/dtmf_buffer.h"
36#include "webrtc/modules/audio_coding/neteq/dtmf_tone_generator.h"
37#include "webrtc/modules/audio_coding/neteq/expand.h"
henrik.lundin@webrtc.org9c55f0f2014-06-09 08:10:28 +000038#include "webrtc/modules/audio_coding/neteq/merge.h"
henrik.lundin91951862016-06-08 06:43:41 -070039#include "webrtc/modules/audio_coding/neteq/nack_tracker.h"
henrik.lundin@webrtc.org9c55f0f2014-06-09 08:10:28 +000040#include "webrtc/modules/audio_coding/neteq/normal.h"
41#include "webrtc/modules/audio_coding/neteq/packet_buffer.h"
42#include "webrtc/modules/audio_coding/neteq/packet.h"
43#include "webrtc/modules/audio_coding/neteq/payload_splitter.h"
44#include "webrtc/modules/audio_coding/neteq/post_decode_vad.h"
45#include "webrtc/modules/audio_coding/neteq/preemptive_expand.h"
46#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
henrik.lundined497212016-04-25 10:11:38 -070047#include "webrtc/modules/audio_coding/neteq/tick_timer.h"
henrik.lundin@webrtc.org9c55f0f2014-06-09 08:10:28 +000048#include "webrtc/modules/audio_coding/neteq/timestamp_scaler.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010049#include "webrtc/modules/include/module_common_types.h"
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +000050
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +000051namespace webrtc {
52
ossue3525782016-05-25 07:37:43 -070053NetEqImpl::Dependencies::Dependencies(
54 const NetEq::Config& config,
55 const rtc::scoped_refptr<AudioDecoderFactory>& decoder_factory)
henrik.lundin1d9061e2016-04-26 12:19:34 -070056 : tick_timer(new TickTimer),
57 buffer_level_filter(new BufferLevelFilter),
ossue3525782016-05-25 07:37:43 -070058 decoder_database(new DecoderDatabase(decoder_factory)),
henrik.lundinf3933702016-04-28 01:53:52 -070059 delay_peak_detector(new DelayPeakDetector(tick_timer.get())),
henrik.lundin1d9061e2016-04-26 12:19:34 -070060 delay_manager(new DelayManager(config.max_packets_in_buffer,
henrik.lundin8f8c96d2016-04-28 23:19:20 -070061 delay_peak_detector.get(),
62 tick_timer.get())),
henrik.lundin1d9061e2016-04-26 12:19:34 -070063 dtmf_buffer(new DtmfBuffer(config.sample_rate_hz)),
64 dtmf_tone_generator(new DtmfToneGenerator),
65 packet_buffer(
66 new PacketBuffer(config.max_packets_in_buffer, tick_timer.get())),
67 payload_splitter(new PayloadSplitter),
68 timestamp_scaler(new TimestampScaler(*decoder_database)),
69 accelerate_factory(new AccelerateFactory),
70 expand_factory(new ExpandFactory),
71 preemptive_expand_factory(new PreemptiveExpandFactory) {}
72
73NetEqImpl::Dependencies::~Dependencies() = default;
74
henrik.lundin@webrtc.orgea257842014-08-07 12:27:37 +000075NetEqImpl::NetEqImpl(const NetEq::Config& config,
henrik.lundin1d9061e2016-04-26 12:19:34 -070076 Dependencies&& deps,
turaj@webrtc.org8d1cdaa2014-04-11 18:47:55 +000077 bool create_components)
henrik.lundin1d9061e2016-04-26 12:19:34 -070078 : tick_timer_(std::move(deps.tick_timer)),
79 buffer_level_filter_(std::move(deps.buffer_level_filter)),
80 decoder_database_(std::move(deps.decoder_database)),
81 delay_manager_(std::move(deps.delay_manager)),
82 delay_peak_detector_(std::move(deps.delay_peak_detector)),
83 dtmf_buffer_(std::move(deps.dtmf_buffer)),
84 dtmf_tone_generator_(std::move(deps.dtmf_tone_generator)),
85 packet_buffer_(std::move(deps.packet_buffer)),
86 payload_splitter_(std::move(deps.payload_splitter)),
87 timestamp_scaler_(std::move(deps.timestamp_scaler)),
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +000088 vad_(new PostDecodeVad()),
henrik.lundin1d9061e2016-04-26 12:19:34 -070089 expand_factory_(std::move(deps.expand_factory)),
90 accelerate_factory_(std::move(deps.accelerate_factory)),
91 preemptive_expand_factory_(std::move(deps.preemptive_expand_factory)),
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +000092 last_mode_(kModeNormal),
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +000093 decoded_buffer_length_(kMaxFrameSize),
94 decoded_buffer_(new int16_t[decoded_buffer_length_]),
95 playout_timestamp_(0),
96 new_codec_(false),
97 timestamp_(0),
98 reset_decoder_(false),
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +000099 ssrc_(0),
100 first_packet_(true),
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000101 error_code_(0),
102 decoder_error_code_(0),
henrik.lundin@webrtc.orgea257842014-08-07 12:27:37 +0000103 background_noise_mode_(config.background_noise_mode),
henrik.lundin@webrtc.org7cbc4f92014-10-07 06:37:39 +0000104 playout_mode_(config.playout_mode),
Henrik Lundincf808d22015-05-27 14:33:29 +0200105 enable_fast_accelerate_(config.enable_fast_accelerate),
henrik.lundin7a926812016-05-12 13:51:28 -0700106 nack_enabled_(false),
107 enable_muted_state_(config.enable_muted_state) {
Henrik Lundin905495c2015-05-25 16:58:41 +0200108 LOG(LS_INFO) << "NetEq config: " << config.ToString();
henrik.lundin@webrtc.orgea257842014-08-07 12:27:37 +0000109 int fs = config.sample_rate_hz;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000110 if (fs != 8000 && fs != 16000 && fs != 32000 && fs != 48000) {
111 LOG(LS_ERROR) << "Sample rate " << fs << " Hz not supported. " <<
112 "Changing to 8000 Hz.";
113 fs = 8000;
114 }
henrik.lundin1d9061e2016-04-26 12:19:34 -0700115 delay_manager_->SetMaximumDelay(config.max_delay_ms);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000116 fs_hz_ = fs;
117 fs_mult_ = fs / 8000;
henrik.lundind89814b2015-11-23 06:49:25 -0800118 last_output_sample_rate_hz_ = fs;
Peter Kastingdce40cf2015-08-24 14:52:23 -0700119 output_size_samples_ = static_cast<size_t>(kOutputSizeMs * 8 * fs_mult_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000120 decoder_frame_length_ = 3 * output_size_samples_;
121 WebRtcSpl_Init();
turaj@webrtc.org8d1cdaa2014-04-11 18:47:55 +0000122 if (create_components) {
123 SetSampleRateAndChannels(fs, 1); // Default is 1 channel.
124 }
henrik.lundin9bc26672015-11-02 03:25:57 -0800125 RTC_DCHECK(!vad_->enabled());
126 if (config.enable_post_decode_vad) {
127 vad_->Enable();
128 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000129}
130
Henrik Lundind67a2192015-08-03 12:54:37 +0200131NetEqImpl::~NetEqImpl() = default;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000132
133int NetEqImpl::InsertPacket(const WebRtcRTPHeader& rtp_header,
kwibergee2bac22015-11-11 10:34:00 -0800134 rtc::ArrayView<const uint8_t> payload,
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000135 uint32_t receive_timestamp) {
kwibergac554ee2016-09-02 00:39:33 -0700136 rtc::MsanCheckInitialized(payload);
henrik.lundina689b442015-12-17 03:50:05 -0800137 TRACE_EVENT0("webrtc", "NetEqImpl::InsertPacket");
Tommi9090e0b2016-01-20 13:39:36 +0100138 rtc::CritScope lock(&crit_sect_);
kwibergee2bac22015-11-11 10:34:00 -0800139 int error =
ossu17e3fa12016-09-08 04:52:55 -0700140 InsertPacketInternal(rtp_header, payload, receive_timestamp);
henrik.lundin@webrtc.orge7ce4372014-01-09 14:01:55 +0000141 if (error != 0) {
henrik.lundin@webrtc.orge7ce4372014-01-09 14:01:55 +0000142 error_code_ = error;
143 return kFail;
144 }
145 return kOK;
turaj@webrtc.org7b75ac62013-09-26 00:27:56 +0000146}
147
henrik.lundin500c04b2016-03-08 02:36:04 -0800148namespace {
149void SetAudioFrameActivityAndType(bool vad_enabled,
henrik.lundin55480f52016-03-08 02:37:57 -0800150 NetEqImpl::OutputType type,
henrik.lundin500c04b2016-03-08 02:36:04 -0800151 AudioFrame::VADActivity last_vad_activity,
152 AudioFrame* audio_frame) {
153 switch (type) {
henrik.lundin55480f52016-03-08 02:37:57 -0800154 case NetEqImpl::OutputType::kNormalSpeech: {
henrik.lundin500c04b2016-03-08 02:36:04 -0800155 audio_frame->speech_type_ = AudioFrame::kNormalSpeech;
156 audio_frame->vad_activity_ = AudioFrame::kVadActive;
157 break;
158 }
henrik.lundin55480f52016-03-08 02:37:57 -0800159 case NetEqImpl::OutputType::kVadPassive: {
henrik.lundin500c04b2016-03-08 02:36:04 -0800160 // This should only be reached if the VAD is enabled.
161 RTC_DCHECK(vad_enabled);
162 audio_frame->speech_type_ = AudioFrame::kNormalSpeech;
163 audio_frame->vad_activity_ = AudioFrame::kVadPassive;
164 break;
165 }
henrik.lundin55480f52016-03-08 02:37:57 -0800166 case NetEqImpl::OutputType::kCNG: {
henrik.lundin500c04b2016-03-08 02:36:04 -0800167 audio_frame->speech_type_ = AudioFrame::kCNG;
168 audio_frame->vad_activity_ = AudioFrame::kVadPassive;
169 break;
170 }
henrik.lundin55480f52016-03-08 02:37:57 -0800171 case NetEqImpl::OutputType::kPLC: {
henrik.lundin500c04b2016-03-08 02:36:04 -0800172 audio_frame->speech_type_ = AudioFrame::kPLC;
173 audio_frame->vad_activity_ = last_vad_activity;
174 break;
175 }
henrik.lundin55480f52016-03-08 02:37:57 -0800176 case NetEqImpl::OutputType::kPLCCNG: {
henrik.lundin500c04b2016-03-08 02:36:04 -0800177 audio_frame->speech_type_ = AudioFrame::kPLCCNG;
178 audio_frame->vad_activity_ = AudioFrame::kVadPassive;
179 break;
180 }
181 default:
182 RTC_NOTREACHED();
183 }
184 if (!vad_enabled) {
185 // Always set kVadUnknown when receive VAD is inactive.
186 audio_frame->vad_activity_ = AudioFrame::kVadUnknown;
187 }
188}
henrik.lundinbc89de32016-03-08 05:20:14 -0800189} // namespace
henrik.lundin500c04b2016-03-08 02:36:04 -0800190
henrik.lundin7a926812016-05-12 13:51:28 -0700191int NetEqImpl::GetAudio(AudioFrame* audio_frame, bool* muted) {
henrik.lundine1ca1672016-01-08 03:50:08 -0800192 TRACE_EVENT0("webrtc", "NetEqImpl::GetAudio");
Tommi9090e0b2016-01-20 13:39:36 +0100193 rtc::CritScope lock(&crit_sect_);
henrik.lundin7a926812016-05-12 13:51:28 -0700194 int error = GetAudioInternal(audio_frame, muted);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000195 if (error != 0) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000196 error_code_ = error;
197 return kFail;
198 }
henrik.lundin5fac3f02016-08-24 11:18:49 -0700199 RTC_DCHECK_EQ(
200 audio_frame->sample_rate_hz_,
201 rtc::checked_cast<int>(audio_frame->samples_per_channel_ * 100));
henrik.lundin500c04b2016-03-08 02:36:04 -0800202 SetAudioFrameActivityAndType(vad_->enabled(), LastOutputType(),
203 last_vad_activity_, audio_frame);
204 last_vad_activity_ = audio_frame->vad_activity_;
henrik.lundin6d8e0112016-03-04 10:34:21 -0800205 last_output_sample_rate_hz_ = audio_frame->sample_rate_hz_;
henrik.lundind89814b2015-11-23 06:49:25 -0800206 RTC_DCHECK(last_output_sample_rate_hz_ == 8000 ||
207 last_output_sample_rate_hz_ == 16000 ||
208 last_output_sample_rate_hz_ == 32000 ||
209 last_output_sample_rate_hz_ == 48000)
210 << "Unexpected sample rate " << last_output_sample_rate_hz_;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000211 return kOK;
212}
213
kwibergee1879c2015-10-29 06:20:28 -0700214int NetEqImpl::RegisterPayloadType(NetEqDecoder codec,
henrik.lundin4cf61dd2015-12-09 06:20:58 -0800215 const std::string& name,
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000216 uint8_t rtp_payload_type) {
Tommi9090e0b2016-01-20 13:39:36 +0100217 rtc::CritScope lock(&crit_sect_);
Henrik Lundind67a2192015-08-03 12:54:37 +0200218 LOG(LS_VERBOSE) << "RegisterPayloadType "
kwibergee1879c2015-10-29 06:20:28 -0700219 << static_cast<int>(rtp_payload_type) << " "
220 << static_cast<int>(codec);
henrik.lundin4cf61dd2015-12-09 06:20:58 -0800221 int ret = decoder_database_->RegisterPayload(rtp_payload_type, codec, name);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000222 if (ret != DecoderDatabase::kOK) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000223 switch (ret) {
224 case DecoderDatabase::kInvalidRtpPayloadType:
225 error_code_ = kInvalidRtpPayloadType;
226 break;
227 case DecoderDatabase::kCodecNotSupported:
228 error_code_ = kCodecNotSupported;
229 break;
230 case DecoderDatabase::kDecoderExists:
231 error_code_ = kDecoderExists;
232 break;
233 default:
234 error_code_ = kOtherError;
235 }
236 return kFail;
237 }
238 return kOK;
239}
240
241int NetEqImpl::RegisterExternalDecoder(AudioDecoder* decoder,
kwibergee1879c2015-10-29 06:20:28 -0700242 NetEqDecoder codec,
henrik.lundin4cf61dd2015-12-09 06:20:58 -0800243 const std::string& codec_name,
kwiberg342f7402016-06-16 03:18:00 -0700244 uint8_t rtp_payload_type) {
Tommi9090e0b2016-01-20 13:39:36 +0100245 rtc::CritScope lock(&crit_sect_);
Henrik Lundind67a2192015-08-03 12:54:37 +0200246 LOG(LS_VERBOSE) << "RegisterExternalDecoder "
kwibergee1879c2015-10-29 06:20:28 -0700247 << static_cast<int>(rtp_payload_type) << " "
248 << static_cast<int>(codec);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000249 if (!decoder) {
250 LOG(LS_ERROR) << "Cannot register external decoder with NULL pointer";
251 assert(false);
252 return kFail;
253 }
kwiberg342f7402016-06-16 03:18:00 -0700254 int ret = decoder_database_->InsertExternal(rtp_payload_type, codec,
255 codec_name, decoder);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000256 if (ret != DecoderDatabase::kOK) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000257 switch (ret) {
258 case DecoderDatabase::kInvalidRtpPayloadType:
259 error_code_ = kInvalidRtpPayloadType;
260 break;
261 case DecoderDatabase::kCodecNotSupported:
262 error_code_ = kCodecNotSupported;
263 break;
264 case DecoderDatabase::kDecoderExists:
265 error_code_ = kDecoderExists;
266 break;
267 case DecoderDatabase::kInvalidSampleRate:
268 error_code_ = kInvalidSampleRate;
269 break;
270 case DecoderDatabase::kInvalidPointer:
271 error_code_ = kInvalidPointer;
272 break;
273 default:
274 error_code_ = kOtherError;
275 }
276 return kFail;
277 }
278 return kOK;
279}
280
281int NetEqImpl::RemovePayloadType(uint8_t rtp_payload_type) {
Tommi9090e0b2016-01-20 13:39:36 +0100282 rtc::CritScope lock(&crit_sect_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000283 int ret = decoder_database_->Remove(rtp_payload_type);
284 if (ret == DecoderDatabase::kOK) {
285 return kOK;
286 } else if (ret == DecoderDatabase::kDecoderNotFound) {
287 error_code_ = kDecoderNotFound;
288 } else {
289 error_code_ = kOtherError;
290 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000291 return kFail;
292}
293
kwibergf6232b42016-09-17 10:45:21 -0700294void NetEqImpl::RemoveAllPayloadTypes() {
295 rtc::CritScope lock(&crit_sect_);
296 decoder_database_->RemoveAll();
297}
298
turaj@webrtc.orgf1efc572013-08-16 23:44:24 +0000299bool NetEqImpl::SetMinimumDelay(int delay_ms) {
Tommi9090e0b2016-01-20 13:39:36 +0100300 rtc::CritScope lock(&crit_sect_);
turaj@webrtc.orgf1efc572013-08-16 23:44:24 +0000301 if (delay_ms >= 0 && delay_ms < 10000) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000302 assert(delay_manager_.get());
turaj@webrtc.orgf1efc572013-08-16 23:44:24 +0000303 return delay_manager_->SetMinimumDelay(delay_ms);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000304 }
305 return false;
306}
307
turaj@webrtc.orgf1efc572013-08-16 23:44:24 +0000308bool NetEqImpl::SetMaximumDelay(int delay_ms) {
Tommi9090e0b2016-01-20 13:39:36 +0100309 rtc::CritScope lock(&crit_sect_);
turaj@webrtc.orgf1efc572013-08-16 23:44:24 +0000310 if (delay_ms >= 0 && delay_ms < 10000) {
311 assert(delay_manager_.get());
312 return delay_manager_->SetMaximumDelay(delay_ms);
313 }
314 return false;
315}
316
317int NetEqImpl::LeastRequiredDelayMs() const {
Tommi9090e0b2016-01-20 13:39:36 +0100318 rtc::CritScope lock(&crit_sect_);
turaj@webrtc.orgf1efc572013-08-16 23:44:24 +0000319 assert(delay_manager_.get());
320 return delay_manager_->least_required_delay_ms();
321}
322
Karl Wiberg7f6c4d42015-04-09 15:44:22 +0200323int NetEqImpl::SetTargetDelay() {
324 return kNotImplemented;
325}
326
327int NetEqImpl::TargetDelay() {
328 return kNotImplemented;
329}
330
henrik.lundin9c3efd02015-08-27 13:12:22 -0700331int NetEqImpl::CurrentDelayMs() const {
Tommi9090e0b2016-01-20 13:39:36 +0100332 rtc::CritScope lock(&crit_sect_);
henrik.lundin9c3efd02015-08-27 13:12:22 -0700333 if (fs_hz_ == 0)
334 return 0;
335 // Sum up the samples in the packet buffer with the future length of the sync
336 // buffer, and divide the sum by the sample rate.
337 const size_t delay_samples =
338 packet_buffer_->NumSamplesInBuffer(decoder_database_.get(),
339 decoder_frame_length_) +
340 sync_buffer_->FutureLength();
341 // The division below will truncate.
342 const int delay_ms =
343 static_cast<int>(delay_samples) / rtc::CheckedDivExact(fs_hz_, 1000);
344 return delay_ms;
Karl Wiberg7f6c4d42015-04-09 15:44:22 +0200345}
346
henrik.lundinb3f1c5d2016-08-22 15:39:53 -0700347int NetEqImpl::FilteredCurrentDelayMs() const {
348 rtc::CritScope lock(&crit_sect_);
349 // Calculate the filtered packet buffer level in samples. The value from
350 // |buffer_level_filter_| is in number of packets, represented in Q8.
351 const size_t packet_buffer_samples =
352 (buffer_level_filter_->filtered_current_level() *
353 decoder_frame_length_) >>
354 8;
355 // Sum up the filtered packet buffer level with the future length of the sync
356 // buffer, and divide the sum by the sample rate.
357 const size_t delay_samples =
358 packet_buffer_samples + sync_buffer_->FutureLength();
359 // The division below will truncate. The return value is in ms.
360 return static_cast<int>(delay_samples) / rtc::CheckedDivExact(fs_hz_, 1000);
361}
362
henrik.lundin@webrtc.org7cbc4f92014-10-07 06:37:39 +0000363// Deprecated.
364// TODO(henrik.lundin) Delete.
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000365void NetEqImpl::SetPlayoutMode(NetEqPlayoutMode mode) {
Tommi9090e0b2016-01-20 13:39:36 +0100366 rtc::CritScope lock(&crit_sect_);
henrik.lundin@webrtc.org7cbc4f92014-10-07 06:37:39 +0000367 if (mode != playout_mode_) {
368 playout_mode_ = mode;
369 CreateDecisionLogic();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000370 }
371}
372
henrik.lundin@webrtc.org7cbc4f92014-10-07 06:37:39 +0000373// Deprecated.
374// TODO(henrik.lundin) Delete.
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000375NetEqPlayoutMode NetEqImpl::PlayoutMode() const {
Tommi9090e0b2016-01-20 13:39:36 +0100376 rtc::CritScope lock(&crit_sect_);
henrik.lundin@webrtc.org7cbc4f92014-10-07 06:37:39 +0000377 return playout_mode_;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000378}
379
380int NetEqImpl::NetworkStatistics(NetEqNetworkStatistics* stats) {
Tommi9090e0b2016-01-20 13:39:36 +0100381 rtc::CritScope lock(&crit_sect_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000382 assert(decoder_database_.get());
Peter Kastingdce40cf2015-08-24 14:52:23 -0700383 const size_t total_samples_in_buffers =
Peter Kasting728d9032015-06-11 14:31:38 -0700384 packet_buffer_->NumSamplesInBuffer(decoder_database_.get(),
385 decoder_frame_length_) +
Peter Kastingdce40cf2015-08-24 14:52:23 -0700386 sync_buffer_->FutureLength();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000387 assert(delay_manager_.get());
388 assert(decision_logic_.get());
389 stats_.GetNetworkStatistics(fs_hz_, total_samples_in_buffers,
390 decoder_frame_length_, *delay_manager_.get(),
391 *decision_logic_.get(), stats);
392 return 0;
393}
394
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000395void NetEqImpl::GetRtcpStatistics(RtcpStatistics* stats) {
Tommi9090e0b2016-01-20 13:39:36 +0100396 rtc::CritScope lock(&crit_sect_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000397 if (stats) {
398 rtcp_.GetStatistics(false, stats);
399 }
400}
401
402void NetEqImpl::GetRtcpStatisticsNoReset(RtcpStatistics* stats) {
Tommi9090e0b2016-01-20 13:39:36 +0100403 rtc::CritScope lock(&crit_sect_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000404 if (stats) {
405 rtcp_.GetStatistics(true, stats);
406 }
407}
408
409void NetEqImpl::EnableVad() {
Tommi9090e0b2016-01-20 13:39:36 +0100410 rtc::CritScope lock(&crit_sect_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000411 assert(vad_.get());
412 vad_->Enable();
413}
414
415void NetEqImpl::DisableVad() {
Tommi9090e0b2016-01-20 13:39:36 +0100416 rtc::CritScope lock(&crit_sect_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000417 assert(vad_.get());
418 vad_->Disable();
419}
420
henrik.lundin15c51e32016-04-06 08:38:56 -0700421rtc::Optional<uint32_t> NetEqImpl::GetPlayoutTimestamp() const {
Tommi9090e0b2016-01-20 13:39:36 +0100422 rtc::CritScope lock(&crit_sect_);
henrik.lundin0d96ab72016-04-06 12:28:26 -0700423 if (first_packet_ || last_mode_ == kModeRfc3389Cng ||
424 last_mode_ == kModeCodecInternalCng) {
wu@webrtc.org94454b72014-06-05 20:34:08 +0000425 // We don't have a valid RTP timestamp until we have decoded our first
henrik.lundin0d96ab72016-04-06 12:28:26 -0700426 // RTP packet. Also, the RTP timestamp is not accurate while playing CNG,
427 // which is indicated by returning an empty value.
henrik.lundin9a410dd2016-04-06 01:39:22 -0700428 return rtc::Optional<uint32_t>();
wu@webrtc.org94454b72014-06-05 20:34:08 +0000429 }
henrik.lundin9a410dd2016-04-06 01:39:22 -0700430 return rtc::Optional<uint32_t>(
431 timestamp_scaler_->ToExternal(playout_timestamp_));
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000432}
433
henrik.lundind89814b2015-11-23 06:49:25 -0800434int NetEqImpl::last_output_sample_rate_hz() const {
Tommi9090e0b2016-01-20 13:39:36 +0100435 rtc::CritScope lock(&crit_sect_);
henrik.lundind89814b2015-11-23 06:49:25 -0800436 return last_output_sample_rate_hz_;
437}
438
kwiberg1e4d8b52016-09-17 08:40:13 -0700439rtc::Optional<CodecInst> NetEqImpl::GetDecoder(int payload_type) const {
440 rtc::CritScope lock(&crit_sect_);
441 const DecoderDatabase::DecoderInfo* di =
442 decoder_database_->GetDecoderInfo(payload_type);
443 if (!di) {
444 return rtc::Optional<CodecInst>();
445 }
446
447 // Create a CodecInst with some fields set. The remaining fields are zeroed,
448 // but we tell MSan to consider them uninitialized.
449 CodecInst ci = {0};
450 rtc::MsanMarkUninitialized(rtc::MakeArrayView(&ci, 1));
451 ci.pltype = payload_type;
452 std::strncpy(ci.plname, di->name.c_str(), sizeof(ci.plname));
453 ci.plname[sizeof(ci.plname) - 1] = '\0';
454 ci.plfreq = di->IsRed() || di->IsDtmf() ? 8000 : di->SampleRateHz();
455 AudioDecoder* const decoder = di->GetDecoder();
456 ci.channels = decoder ? decoder->Channels() : 1;
457 return rtc::Optional<CodecInst>(ci);
458}
459
Karl Wiberg7f6c4d42015-04-09 15:44:22 +0200460int NetEqImpl::SetTargetNumberOfChannels() {
461 return kNotImplemented;
462}
463
464int NetEqImpl::SetTargetSampleRate() {
465 return kNotImplemented;
466}
467
henrik.lundin@webrtc.orgb0f4b3d2014-11-04 08:53:10 +0000468int NetEqImpl::LastError() const {
Tommi9090e0b2016-01-20 13:39:36 +0100469 rtc::CritScope lock(&crit_sect_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000470 return error_code_;
471}
472
473int NetEqImpl::LastDecoderError() {
Tommi9090e0b2016-01-20 13:39:36 +0100474 rtc::CritScope lock(&crit_sect_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000475 return decoder_error_code_;
476}
477
478void NetEqImpl::FlushBuffers() {
Tommi9090e0b2016-01-20 13:39:36 +0100479 rtc::CritScope lock(&crit_sect_);
Henrik Lundind67a2192015-08-03 12:54:37 +0200480 LOG(LS_VERBOSE) << "FlushBuffers";
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000481 packet_buffer_->Flush();
henrik.lundin@webrtc.org0d5da252013-09-18 21:12:38 +0000482 assert(sync_buffer_.get());
483 assert(expand_.get());
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000484 sync_buffer_->Flush();
485 sync_buffer_->set_next_index(sync_buffer_->next_index() -
486 expand_->overlap_length());
487 // Set to wait for new codec.
488 first_packet_ = true;
489}
490
turaj@webrtc.org3170b572013-08-30 15:36:53 +0000491void NetEqImpl::PacketBufferStatistics(int* current_num_packets,
henrik.lundin@webrtc.org116ed1d2014-04-28 08:20:04 +0000492 int* max_num_packets) const {
Tommi9090e0b2016-01-20 13:39:36 +0100493 rtc::CritScope lock(&crit_sect_);
henrik.lundin@webrtc.org116ed1d2014-04-28 08:20:04 +0000494 packet_buffer_->BufferStat(current_num_packets, max_num_packets);
turaj@webrtc.org3170b572013-08-30 15:36:53 +0000495}
496
henrik.lundin48ed9302015-10-29 05:36:24 -0700497void NetEqImpl::EnableNack(size_t max_nack_list_size) {
Tommi9090e0b2016-01-20 13:39:36 +0100498 rtc::CritScope lock(&crit_sect_);
henrik.lundin48ed9302015-10-29 05:36:24 -0700499 if (!nack_enabled_) {
500 const int kNackThresholdPackets = 2;
henrik.lundin91951862016-06-08 06:43:41 -0700501 nack_.reset(NackTracker::Create(kNackThresholdPackets));
henrik.lundin48ed9302015-10-29 05:36:24 -0700502 nack_enabled_ = true;
503 nack_->UpdateSampleRate(fs_hz_);
504 }
505 nack_->SetMaxNackListSize(max_nack_list_size);
506}
507
508void NetEqImpl::DisableNack() {
Tommi9090e0b2016-01-20 13:39:36 +0100509 rtc::CritScope lock(&crit_sect_);
henrik.lundin48ed9302015-10-29 05:36:24 -0700510 nack_.reset();
511 nack_enabled_ = false;
512}
513
514std::vector<uint16_t> NetEqImpl::GetNackList(int64_t round_trip_time_ms) const {
Tommi9090e0b2016-01-20 13:39:36 +0100515 rtc::CritScope lock(&crit_sect_);
henrik.lundin48ed9302015-10-29 05:36:24 -0700516 if (!nack_enabled_) {
517 return std::vector<uint16_t>();
518 }
519 RTC_DCHECK(nack_.get());
520 return nack_->GetNackList(round_trip_time_ms);
minyue@webrtc.orgd7301772013-08-29 00:58:14 +0000521}
522
henrik.lundin@webrtc.orgb287d962014-04-07 21:21:45 +0000523const SyncBuffer* NetEqImpl::sync_buffer_for_test() const {
Tommi9090e0b2016-01-20 13:39:36 +0100524 rtc::CritScope lock(&crit_sect_);
henrik.lundin@webrtc.orgb287d962014-04-07 21:21:45 +0000525 return sync_buffer_.get();
526}
527
minyue5bd33972016-05-02 04:46:11 -0700528Operations NetEqImpl::last_operation_for_test() const {
529 rtc::CritScope lock(&crit_sect_);
530 return last_operation_;
531}
532
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000533// Methods below this line are private.
534
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000535int NetEqImpl::InsertPacketInternal(const WebRtcRTPHeader& rtp_header,
kwibergee2bac22015-11-11 10:34:00 -0800536 rtc::ArrayView<const uint8_t> payload,
ossu17e3fa12016-09-08 04:52:55 -0700537 uint32_t receive_timestamp) {
kwibergee2bac22015-11-11 10:34:00 -0800538 if (payload.empty()) {
539 LOG_F(LS_ERROR) << "payload is empty";
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000540 return kInvalidPointer;
541 }
ossu17e3fa12016-09-08 04:52:55 -0700542
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000543 PacketList packet_list;
544 RTPHeader main_header;
545 {
henrik.lundin@webrtc.orge1d468c2013-01-30 07:37:20 +0000546 // Convert to Packet.
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000547 // Create |packet| within this separate scope, since it should not be used
548 // directly once it's been inserted in the packet list. This way, |packet|
549 // is not defined outside of this block.
henrik.lundin@webrtc.orge1d468c2013-01-30 07:37:20 +0000550 Packet* packet = new Packet;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000551 packet->header.markerBit = false;
552 packet->header.payloadType = rtp_header.header.payloadType;
553 packet->header.sequenceNumber = rtp_header.header.sequenceNumber;
554 packet->header.timestamp = rtp_header.header.timestamp;
555 packet->header.ssrc = rtp_header.header.ssrc;
556 packet->header.numCSRCs = 0;
ossudc431ce2016-08-31 08:51:13 -0700557 packet->payload.SetData(payload.data(), payload.size());
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000558 packet->primary = true;
henrik.lundin84f8cd62016-04-26 07:45:16 -0700559 // Waiting time will be set upon inserting the packet in the buffer.
560 RTC_DCHECK(!packet->waiting_time);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000561 // Insert packet in a packet list.
562 packet_list.push_back(packet);
563 // Save main payloads header for later.
564 memcpy(&main_header, &packet->header, sizeof(main_header));
565 }
566
turaj@webrtc.orga6101d72013-10-01 22:01:09 +0000567 bool update_sample_rate_and_channels = false;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000568 // Reinitialize NetEq if it's needed (changed SSRC or first call).
569 if ((main_header.ssrc != ssrc_) || first_packet_) {
henrik.lundin@webrtc.org6ff3ac12014-11-20 14:14:49 +0000570 // Note: |first_packet_| will be cleared further down in this method, once
571 // the packet has been successfully inserted into the packet buffer.
572
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000573 rtcp_.Init(main_header.sequenceNumber);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000574
575 // Flush the packet buffer and DTMF buffer.
576 packet_buffer_->Flush();
577 dtmf_buffer_->Flush();
578
579 // Store new SSRC.
580 ssrc_ = main_header.ssrc;
581
turaj@webrtc.org4d06db52013-03-27 18:31:42 +0000582 // Update audio buffer timestamp.
583 sync_buffer_->IncreaseEndTimestamp(main_header.timestamp - timestamp_);
584
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000585 // Update codecs.
586 timestamp_ = main_header.timestamp;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000587
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000588 // Reset timestamp scaling.
589 timestamp_scaler_->Reset();
turaj@webrtc.orga6101d72013-10-01 22:01:09 +0000590
henrik.lundin@webrtc.org6ff3ac12014-11-20 14:14:49 +0000591 // Trigger an update of sampling rate and the number of channels.
turaj@webrtc.orga6101d72013-10-01 22:01:09 +0000592 update_sample_rate_and_channels = true;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000593 }
594
turaj@webrtc.org7b75ac62013-09-26 00:27:56 +0000595 // Update RTCP statistics, only for regular packets.
ossu17e3fa12016-09-08 04:52:55 -0700596 rtcp_.Update(main_header, receive_timestamp);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000597
598 // Check for RED payload type, and separate payloads into several packets.
599 if (decoder_database_->IsRed(main_header.payloadType)) {
600 if (payload_splitter_->SplitRed(&packet_list) != PayloadSplitter::kOK) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000601 PacketBuffer::DeleteAllPackets(&packet_list);
602 return kRedundancySplitError;
603 }
604 // Only accept a few RED payloads of the same type as the main data,
605 // DTMF events and CNG.
606 payload_splitter_->CheckRedPayloads(&packet_list, *decoder_database_);
607 // Update the stored main payload header since the main payload has now
608 // changed.
609 memcpy(&main_header, &packet_list.front()->header, sizeof(main_header));
610 }
611
612 // Check payload types.
613 if (decoder_database_->CheckPayloadTypes(packet_list) ==
614 DecoderDatabase::kDecoderNotFound) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000615 PacketBuffer::DeleteAllPackets(&packet_list);
616 return kUnknownRtpPayloadType;
617 }
618
619 // Scale timestamp to internal domain (only for some codecs).
620 timestamp_scaler_->ToInternal(&packet_list);
621
622 // Process DTMF payloads. Cycle through the list of packets, and pick out any
623 // DTMF payloads found.
624 PacketList::iterator it = packet_list.begin();
625 while (it != packet_list.end()) {
626 Packet* current_packet = (*it);
627 assert(current_packet);
ossudc431ce2016-08-31 08:51:13 -0700628 assert(!current_packet->payload.empty());
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000629 if (decoder_database_->IsDtmf(current_packet->header.payloadType)) {
minyue@webrtc.org9721db72013-08-06 05:36:26 +0000630 DtmfEvent event;
ossudc431ce2016-08-31 08:51:13 -0700631 int ret = DtmfBuffer::ParseEvent(current_packet->header.timestamp,
632 current_packet->payload.data(),
633 current_packet->payload.size(), &event);
minyue@webrtc.org9721db72013-08-06 05:36:26 +0000634 if (ret != DtmfBuffer::kOK) {
minyue@webrtc.org9721db72013-08-06 05:36:26 +0000635 PacketBuffer::DeleteAllPackets(&packet_list);
636 return kDtmfParsingError;
637 }
638 if (dtmf_buffer_->InsertEvent(event) != DtmfBuffer::kOK) {
minyue@webrtc.org9721db72013-08-06 05:36:26 +0000639 PacketBuffer::DeleteAllPackets(&packet_list);
640 return kDtmfInsertError;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000641 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000642 delete current_packet;
643 it = packet_list.erase(it);
644 } else {
645 ++it;
646 }
647 }
648
minyue@webrtc.org7549ff42014-04-02 15:03:01 +0000649 // Check for FEC in packets, and separate payloads into several packets.
650 int ret = payload_splitter_->SplitFec(&packet_list, decoder_database_.get());
651 if (ret != PayloadSplitter::kOK) {
minyue@webrtc.org7549ff42014-04-02 15:03:01 +0000652 PacketBuffer::DeleteAllPackets(&packet_list);
653 switch (ret) {
654 case PayloadSplitter::kUnknownPayloadType:
655 return kUnknownRtpPayloadType;
656 default:
657 return kOtherError;
658 }
659 }
660
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000661 // Split payloads into smaller chunks. This also verifies that all payloads
ossu17e3fa12016-09-08 04:52:55 -0700662 // are of a known payload type.
minyue@webrtc.orgb28bfa72014-03-21 12:07:40 +0000663 ret = payload_splitter_->SplitAudio(&packet_list, *decoder_database_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000664 if (ret != PayloadSplitter::kOK) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000665 PacketBuffer::DeleteAllPackets(&packet_list);
666 switch (ret) {
667 case PayloadSplitter::kUnknownPayloadType:
668 return kUnknownRtpPayloadType;
669 case PayloadSplitter::kFrameSplitError:
670 return kFrameSplitError;
671 default:
672 return kOtherError;
673 }
674 }
675
ossu17e3fa12016-09-08 04:52:55 -0700676 // Update bandwidth estimate, if the packet is not comfort noise.
677 if (!packet_list.empty() &&
ossu97ba30e2016-04-25 07:55:58 -0700678 !decoder_database_->IsComfortNoise(main_header.payloadType)) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000679 // The list can be empty here if we got nothing but DTMF payloads.
680 AudioDecoder* decoder =
681 decoder_database_->GetDecoder(main_header.payloadType);
682 assert(decoder); // Should always get a valid object, since we have
ossu97ba30e2016-04-25 07:55:58 -0700683 // already checked that the payload types are known.
ossudc431ce2016-08-31 08:51:13 -0700684 decoder->IncomingPacket(packet_list.front()->payload.data(),
685 packet_list.front()->payload.size(),
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000686 packet_list.front()->header.sequenceNumber,
687 packet_list.front()->header.timestamp,
688 receive_timestamp);
689 }
690
henrik.lundin48ed9302015-10-29 05:36:24 -0700691 if (nack_enabled_) {
692 RTC_DCHECK(nack_);
693 if (update_sample_rate_and_channels) {
694 nack_->Reset();
695 }
696 nack_->UpdateLastReceivedPacket(packet_list.front()->header.sequenceNumber,
697 packet_list.front()->header.timestamp);
698 }
699
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000700 // Insert packets in buffer.
henrik.lundin116c84e2015-08-27 13:14:48 -0700701 const size_t buffer_length_before_insert =
702 packet_buffer_->NumPacketsInBuffer();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000703 ret = packet_buffer_->InsertPacketList(
704 &packet_list,
705 *decoder_database_,
706 &current_rtp_payload_type_,
707 &current_cng_rtp_payload_type_);
708 if (ret == PacketBuffer::kFlushed) {
709 // Reset DSP timestamp etc. if packet buffer flushed.
710 new_codec_ = true;
turaj@webrtc.orga6101d72013-10-01 22:01:09 +0000711 update_sample_rate_and_channels = true;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000712 } else if (ret != PacketBuffer::kOK) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000713 PacketBuffer::DeleteAllPackets(&packet_list);
minyue@webrtc.org7bb54362013-08-06 05:40:57 +0000714 return kOtherError;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000715 }
henrik.lundin@webrtc.org6ff3ac12014-11-20 14:14:49 +0000716
717 if (first_packet_) {
718 first_packet_ = false;
719 // Update the codec on the next GetAudio call.
720 new_codec_ = true;
721 }
722
henrik.lundinda8bbf62016-08-31 03:14:11 -0700723 if (current_rtp_payload_type_) {
724 RTC_DCHECK(decoder_database_->GetDecoderInfo(*current_rtp_payload_type_))
725 << "Payload type " << static_cast<int>(*current_rtp_payload_type_)
726 << " is unknown where it shouldn't be";
727 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000728
turaj@webrtc.orga6101d72013-10-01 22:01:09 +0000729 if (update_sample_rate_and_channels && !packet_buffer_->Empty()) {
730 // We do not use |current_rtp_payload_type_| to |set payload_type|, but
731 // get the next RTP header from |packet_buffer_| to obtain the payload type.
732 // The reason for it is the following corner case. If NetEq receives a
733 // CNG packet with a sample rate different than the current CNG then it
734 // flushes its buffer, assuming send codec must have been changed. However,
735 // payload type of the hypothetically new send codec is not known.
736 const RTPHeader* rtp_header = packet_buffer_->NextRtpHeader();
737 assert(rtp_header);
738 int payload_type = rtp_header->payloadType;
ossu97ba30e2016-04-25 07:55:58 -0700739 size_t channels = 1;
740 if (!decoder_database_->IsComfortNoise(payload_type)) {
741 AudioDecoder* decoder = decoder_database_->GetDecoder(payload_type);
742 assert(decoder); // Payloads are already checked to be valid.
743 channels = decoder->Channels();
744 }
turaj@webrtc.orga6101d72013-10-01 22:01:09 +0000745 const DecoderDatabase::DecoderInfo* decoder_info =
746 decoder_database_->GetDecoderInfo(payload_type);
747 assert(decoder_info);
kwibergc0f2dcf2016-05-31 06:28:03 -0700748 if (decoder_info->SampleRateHz() != fs_hz_ ||
ossu97ba30e2016-04-25 07:55:58 -0700749 channels != algorithm_buffer_->Channels()) {
kwibergc0f2dcf2016-05-31 06:28:03 -0700750 SetSampleRateAndChannels(decoder_info->SampleRateHz(),
751 channels);
henrik.lundin48ed9302015-10-29 05:36:24 -0700752 }
753 if (nack_enabled_) {
754 RTC_DCHECK(nack_);
755 // Update the sample rate even if the rate is not new, because of Reset().
756 nack_->UpdateSampleRate(fs_hz_);
757 }
turaj@webrtc.orga6101d72013-10-01 22:01:09 +0000758 }
759
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000760 // TODO(hlundin): Move this code to DelayManager class.
761 const DecoderDatabase::DecoderInfo* dec_info =
762 decoder_database_->GetDecoderInfo(main_header.payloadType);
763 assert(dec_info); // Already checked that the payload type is known.
764 delay_manager_->LastDecoderType(dec_info->codec_type);
765 if (delay_manager_->last_pack_cng_or_dtmf() == 0) {
766 // Calculate the total speech length carried in each packet.
henrik.lundin116c84e2015-08-27 13:14:48 -0700767 const size_t buffer_length_after_insert =
768 packet_buffer_->NumPacketsInBuffer();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000769
henrik.lundin116c84e2015-08-27 13:14:48 -0700770 if (buffer_length_after_insert > buffer_length_before_insert) {
771 const size_t packet_length_samples =
772 (buffer_length_after_insert - buffer_length_before_insert) *
773 decoder_frame_length_;
774 if (packet_length_samples != decision_logic_->packet_length_samples()) {
775 decision_logic_->set_packet_length_samples(packet_length_samples);
776 delay_manager_->SetPacketAudioLength(
777 rtc::checked_cast<int>((1000 * packet_length_samples) / fs_hz_));
778 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000779 }
780
781 // Update statistics.
pbos@webrtc.org0946a562013-04-09 00:28:06 +0000782 if ((int32_t) (main_header.timestamp - timestamp_) >= 0 &&
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000783 !new_codec_) {
784 // Only update statistics if incoming packet is not older than last played
785 // out packet, and if new codec flag is not set.
786 delay_manager_->Update(main_header.sequenceNumber, main_header.timestamp,
787 fs_hz_);
788 }
789 } else if (delay_manager_->last_pack_cng_or_dtmf() == -1) {
790 // This is first "normal" packet after CNG or DTMF.
791 // Reset packet time counter and measure time until next packet,
792 // but don't update statistics.
793 delay_manager_->set_last_pack_cng_or_dtmf(0);
794 delay_manager_->ResetPacketIatCount();
795 }
796 return 0;
797}
798
henrik.lundin7a926812016-05-12 13:51:28 -0700799int NetEqImpl::GetAudioInternal(AudioFrame* audio_frame, bool* muted) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000800 PacketList packet_list;
801 DtmfEvent dtmf_event;
802 Operations operation;
803 bool play_dtmf;
henrik.lundin7a926812016-05-12 13:51:28 -0700804 *muted = false;
henrik.lundined497212016-04-25 10:11:38 -0700805 tick_timer_->Increment();
henrik.lundin60f6ce22016-05-10 03:52:04 -0700806 stats_.IncreaseCounter(output_size_samples_, fs_hz_);
henrik.lundin7a926812016-05-12 13:51:28 -0700807
808 // Check for muted state.
809 if (enable_muted_state_ && expand_->Muted() && packet_buffer_->Empty()) {
810 RTC_DCHECK_EQ(last_mode_, kModeExpand);
811 playout_timestamp_ += static_cast<uint32_t>(output_size_samples_);
812 audio_frame->sample_rate_hz_ = fs_hz_;
813 audio_frame->samples_per_channel_ = output_size_samples_;
814 audio_frame->timestamp_ =
815 first_packet_
816 ? 0
817 : timestamp_scaler_->ToExternal(playout_timestamp_) -
818 static_cast<uint32_t>(audio_frame->samples_per_channel_);
819 audio_frame->num_channels_ = sync_buffer_->Channels();
henrik.lundin612c25e2016-05-25 08:21:04 -0700820 stats_.ExpandedNoiseSamples(output_size_samples_);
henrik.lundin7a926812016-05-12 13:51:28 -0700821 *muted = true;
822 return 0;
823 }
824
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000825 int return_value = GetDecision(&operation, &packet_list, &dtmf_event,
826 &play_dtmf);
827 if (return_value != 0) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000828 last_mode_ = kModeError;
829 return return_value;
830 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000831
832 AudioDecoder::SpeechType speech_type;
833 int length = 0;
834 int decode_return_value = Decode(&packet_list, &operation,
835 &length, &speech_type);
836
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000837 assert(vad_.get());
838 bool sid_frame_available =
839 (operation == kRfc3389Cng && !packet_list.empty());
Peter Kastingdce40cf2015-08-24 14:52:23 -0700840 vad_->Update(decoded_buffer_.get(), static_cast<size_t>(length), speech_type,
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000841 sid_frame_available, fs_hz_);
842
henrik.lundinb1fb72b2016-05-03 08:18:47 -0700843 if (sid_frame_available || speech_type == AudioDecoder::kComfortNoise) {
844 // Start a new stopwatch since we are decoding a new CNG packet.
845 generated_noise_stopwatch_ = tick_timer_->GetNewStopwatch();
846 }
847
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +0000848 algorithm_buffer_->Clear();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000849 switch (operation) {
850 case kNormal: {
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +0000851 DoNormal(decoded_buffer_.get(), length, speech_type, play_dtmf);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000852 break;
853 }
854 case kMerge: {
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +0000855 DoMerge(decoded_buffer_.get(), length, speech_type, play_dtmf);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000856 break;
857 }
858 case kExpand: {
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +0000859 return_value = DoExpand(play_dtmf);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000860 break;
861 }
Henrik Lundincf808d22015-05-27 14:33:29 +0200862 case kAccelerate:
863 case kFastAccelerate: {
864 const bool fast_accelerate =
865 enable_fast_accelerate_ && (operation == kFastAccelerate);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000866 return_value = DoAccelerate(decoded_buffer_.get(), length, speech_type,
Henrik Lundincf808d22015-05-27 14:33:29 +0200867 play_dtmf, fast_accelerate);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000868 break;
869 }
870 case kPreemptiveExpand: {
871 return_value = DoPreemptiveExpand(decoded_buffer_.get(), length,
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +0000872 speech_type, play_dtmf);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000873 break;
874 }
875 case kRfc3389Cng:
876 case kRfc3389CngNoPacket: {
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +0000877 return_value = DoRfc3389Cng(&packet_list, play_dtmf);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000878 break;
879 }
880 case kCodecInternalCng: {
881 // This handles the case when there is no transmission and the decoder
882 // should produce internal comfort noise.
883 // TODO(hlundin): Write test for codec-internal CNG.
minyuel6d92bf52015-09-23 15:20:39 +0200884 DoCodecInternalCng(decoded_buffer_.get(), length);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000885 break;
886 }
887 case kDtmf: {
888 // TODO(hlundin): Write test for this.
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +0000889 return_value = DoDtmf(dtmf_event, &play_dtmf);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000890 break;
891 }
892 case kAlternativePlc: {
893 // TODO(hlundin): Write test for this.
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +0000894 DoAlternativePlc(false);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000895 break;
896 }
897 case kAlternativePlcIncreaseTimestamp: {
898 // TODO(hlundin): Write test for this.
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +0000899 DoAlternativePlc(true);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000900 break;
901 }
902 case kAudioRepetitionIncreaseTimestamp: {
903 // TODO(hlundin): Write test for this.
Peter Kastingb7e50542015-06-11 12:55:50 -0700904 sync_buffer_->IncreaseEndTimestamp(
905 static_cast<uint32_t>(output_size_samples_));
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000906 // Skipping break on purpose. Execution should move on into the
907 // next case.
kjellander@webrtc.org7d2b6a92015-01-28 18:37:58 +0000908 FALLTHROUGH();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000909 }
910 case kAudioRepetition: {
911 // TODO(hlundin): Write test for this.
912 // Copy last |output_size_samples_| from |sync_buffer_| to
913 // |algorithm_buffer|.
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +0000914 algorithm_buffer_->PushBackFromIndex(
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000915 *sync_buffer_, sync_buffer_->Size() - output_size_samples_);
916 expand_->Reset();
917 break;
918 }
919 case kUndefined: {
Henrik Lundind67a2192015-08-03 12:54:37 +0200920 LOG(LS_ERROR) << "Invalid operation kUndefined.";
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000921 assert(false); // This should not happen.
922 last_mode_ = kModeError;
923 return kInvalidOperation;
924 }
925 } // End of switch.
minyue5bd33972016-05-02 04:46:11 -0700926 last_operation_ = operation;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000927 if (return_value < 0) {
928 return return_value;
929 }
930
931 if (last_mode_ != kModeRfc3389Cng) {
932 comfort_noise_->Reset();
933 }
934
935 // Copy from |algorithm_buffer| to |sync_buffer_|.
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +0000936 sync_buffer_->PushBack(*algorithm_buffer_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000937
938 // Extract data from |sync_buffer_| to |output|.
turaj@webrtc.org362a55e2013-09-20 16:25:28 +0000939 size_t num_output_samples_per_channel = output_size_samples_;
940 size_t num_output_samples = output_size_samples_ * sync_buffer_->Channels();
henrik.lundin6d8e0112016-03-04 10:34:21 -0800941 if (num_output_samples > AudioFrame::kMaxDataSizeSamples) {
942 LOG(LS_WARNING) << "Output array is too short. "
943 << AudioFrame::kMaxDataSizeSamples << " < "
944 << output_size_samples_ << " * "
945 << sync_buffer_->Channels();
946 num_output_samples = AudioFrame::kMaxDataSizeSamples;
947 num_output_samples_per_channel =
948 AudioFrame::kMaxDataSizeSamples / sync_buffer_->Channels();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000949 }
henrik.lundin6d8e0112016-03-04 10:34:21 -0800950 sync_buffer_->GetNextAudioInterleaved(num_output_samples_per_channel,
951 audio_frame);
952 audio_frame->sample_rate_hz_ = fs_hz_;
Henrik Lundin05f71fc2015-09-01 11:51:58 +0200953 if (sync_buffer_->FutureLength() < expand_->overlap_length()) {
954 // The sync buffer should always contain |overlap_length| samples, but now
955 // too many samples have been extracted. Reinstall the |overlap_length|
956 // lookahead by moving the index.
957 const size_t missing_lookahead_samples =
958 expand_->overlap_length() - sync_buffer_->FutureLength();
henrikg91d6ede2015-09-17 00:24:34 -0700959 RTC_DCHECK_GE(sync_buffer_->next_index(), missing_lookahead_samples);
Henrik Lundin05f71fc2015-09-01 11:51:58 +0200960 sync_buffer_->set_next_index(sync_buffer_->next_index() -
961 missing_lookahead_samples);
962 }
henrik.lundin6d8e0112016-03-04 10:34:21 -0800963 if (audio_frame->samples_per_channel_ != output_size_samples_) {
964 LOG(LS_ERROR) << "audio_frame->samples_per_channel_ ("
965 << audio_frame->samples_per_channel_
Henrik Lundind67a2192015-08-03 12:54:37 +0200966 << ") != output_size_samples_ (" << output_size_samples_
967 << ")";
minyue@webrtc.orgdb1cefc2013-08-13 01:39:21 +0000968 // TODO(minyue): treatment of under-run, filling zeros
henrik.lundin6d8e0112016-03-04 10:34:21 -0800969 memset(audio_frame->data_, 0, num_output_samples * sizeof(int16_t));
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000970 return kSampleUnderrun;
971 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000972
973 // Should always have overlap samples left in the |sync_buffer_|.
henrikg91d6ede2015-09-17 00:24:34 -0700974 RTC_DCHECK_GE(sync_buffer_->FutureLength(), expand_->overlap_length());
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000975
976 if (play_dtmf) {
henrik.lundin6d8e0112016-03-04 10:34:21 -0800977 return_value =
978 DtmfOverdub(dtmf_event, sync_buffer_->Channels(), audio_frame->data_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000979 }
980
981 // Update the background noise parameters if last operation wrote data
982 // straight from the decoder to the |sync_buffer_|. That is, none of the
983 // operations that modify the signal can be followed by a parameter update.
984 if ((last_mode_ == kModeNormal) ||
985 (last_mode_ == kModeAccelerateFail) ||
986 (last_mode_ == kModePreemptiveExpandFail) ||
987 (last_mode_ == kModeRfc3389Cng) ||
988 (last_mode_ == kModeCodecInternalCng)) {
989 background_noise_->Update(*sync_buffer_, *vad_.get());
990 }
991
992 if (operation == kDtmf) {
993 // DTMF data was written the end of |sync_buffer_|.
994 // Update index to end of DTMF data in |sync_buffer_|.
995 sync_buffer_->set_dtmf_index(sync_buffer_->Size());
996 }
997
henrik.lundin@webrtc.orged865b52014-03-06 10:28:07 +0000998 if (last_mode_ != kModeExpand) {
999 // If last operation was not expand, calculate the |playout_timestamp_| from
1000 // the |sync_buffer_|. However, do not update the |playout_timestamp_| if it
1001 // would be moved "backwards".
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001002 uint32_t temp_timestamp = sync_buffer_->end_timestamp() -
turaj@webrtc.org362a55e2013-09-20 16:25:28 +00001003 static_cast<uint32_t>(sync_buffer_->FutureLength());
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001004 if (static_cast<int32_t>(temp_timestamp - playout_timestamp_) > 0) {
1005 playout_timestamp_ = temp_timestamp;
1006 }
1007 } else {
1008 // Use dead reckoning to estimate the |playout_timestamp_|.
Peter Kastingb7e50542015-06-11 12:55:50 -07001009 playout_timestamp_ += static_cast<uint32_t>(output_size_samples_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001010 }
henrik.lundin15c51e32016-04-06 08:38:56 -07001011 // Set the timestamp in the audio frame to zero before the first packet has
1012 // been inserted. Otherwise, subtract the frame size in samples to get the
1013 // timestamp of the first sample in the frame (playout_timestamp_ is the
1014 // last + 1).
1015 audio_frame->timestamp_ =
1016 first_packet_
1017 ? 0
1018 : timestamp_scaler_->ToExternal(playout_timestamp_) -
1019 static_cast<uint32_t>(audio_frame->samples_per_channel_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001020
henrik.lundinb1fb72b2016-05-03 08:18:47 -07001021 if (!(last_mode_ == kModeRfc3389Cng ||
1022 last_mode_ == kModeCodecInternalCng ||
1023 last_mode_ == kModeExpand)) {
1024 generated_noise_stopwatch_.reset();
1025 }
1026
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001027 if (decode_return_value) return decode_return_value;
1028 return return_value;
1029}
1030
1031int NetEqImpl::GetDecision(Operations* operation,
1032 PacketList* packet_list,
1033 DtmfEvent* dtmf_event,
1034 bool* play_dtmf) {
1035 // Initialize output variables.
1036 *play_dtmf = false;
1037 *operation = kUndefined;
1038
henrik.lundin@webrtc.org0d5da252013-09-18 21:12:38 +00001039 assert(sync_buffer_.get());
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001040 uint32_t end_timestamp = sync_buffer_->end_timestamp();
henrik.lundin@webrtc.org52b42cb2014-11-04 14:03:58 +00001041 if (!new_codec_) {
1042 const uint32_t five_seconds_samples = 5 * fs_hz_;
1043 packet_buffer_->DiscardOldPackets(end_timestamp, five_seconds_samples);
1044 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001045 const RTPHeader* header = packet_buffer_->NextRtpHeader();
1046
henrik.lundinb1fb72b2016-05-03 08:18:47 -07001047 RTC_DCHECK(!generated_noise_stopwatch_ ||
1048 generated_noise_stopwatch_->ElapsedTicks() >= 1);
1049 uint64_t generated_noise_samples =
1050 generated_noise_stopwatch_
1051 ? (generated_noise_stopwatch_->ElapsedTicks() - 1) *
1052 output_size_samples_ +
1053 decision_logic_->noise_fast_forward()
1054 : 0;
1055
henrik.lundin@webrtc.orgca8cb952014-03-12 10:26:52 +00001056 if (decision_logic_->CngRfc3389On() || last_mode_ == kModeRfc3389Cng) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001057 // Because of timestamp peculiarities, we have to "manually" disallow using
1058 // a CNG packet with the same timestamp as the one that was last played.
1059 // This can happen when using redundancy and will cause the timing to shift.
henrik.lundin@webrtc.org24779fe2014-03-14 12:40:05 +00001060 while (header && decoder_database_->IsComfortNoise(header->payloadType) &&
1061 (end_timestamp >= header->timestamp ||
henrik.lundinb1fb72b2016-05-03 08:18:47 -07001062 end_timestamp + generated_noise_samples > header->timestamp)) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001063 // Don't use this packet, discard it.
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001064 if (packet_buffer_->DiscardNextPacket() != PacketBuffer::kOK) {
1065 assert(false); // Must be ok by design.
1066 }
1067 // Check buffer again.
1068 if (!new_codec_) {
henrik.lundin@webrtc.org52b42cb2014-11-04 14:03:58 +00001069 packet_buffer_->DiscardOldPackets(end_timestamp, 5 * fs_hz_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001070 }
1071 header = packet_buffer_->NextRtpHeader();
1072 }
1073 }
1074
henrik.lundin@webrtc.org0d5da252013-09-18 21:12:38 +00001075 assert(expand_.get());
turaj@webrtc.org362a55e2013-09-20 16:25:28 +00001076 const int samples_left = static_cast<int>(sync_buffer_->FutureLength() -
1077 expand_->overlap_length());
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001078 if (last_mode_ == kModeAccelerateSuccess ||
1079 last_mode_ == kModeAccelerateLowEnergy ||
1080 last_mode_ == kModePreemptiveExpandSuccess ||
1081 last_mode_ == kModePreemptiveExpandLowEnergy) {
1082 // Subtract (samples_left + output_size_samples_) from sampleMemory.
Peter Kastingdce40cf2015-08-24 14:52:23 -07001083 decision_logic_->AddSampleMemory(
1084 -(samples_left + rtc::checked_cast<int>(output_size_samples_)));
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001085 }
1086
1087 // Check if it is time to play a DTMF event.
Peter Kastingb7e50542015-06-11 12:55:50 -07001088 if (dtmf_buffer_->GetEvent(
1089 static_cast<uint32_t>(
henrik.lundinb1fb72b2016-05-03 08:18:47 -07001090 end_timestamp + generated_noise_samples),
Peter Kastingb7e50542015-06-11 12:55:50 -07001091 dtmf_event)) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001092 *play_dtmf = true;
1093 }
1094
1095 // Get instruction.
henrik.lundin@webrtc.org0d5da252013-09-18 21:12:38 +00001096 assert(sync_buffer_.get());
1097 assert(expand_.get());
henrik.lundinb1fb72b2016-05-03 08:18:47 -07001098 generated_noise_samples =
1099 generated_noise_stopwatch_
1100 ? generated_noise_stopwatch_->ElapsedTicks() * output_size_samples_ +
1101 decision_logic_->noise_fast_forward()
1102 : 0;
1103 *operation = decision_logic_->GetDecision(
1104 *sync_buffer_, *expand_, decoder_frame_length_, header, last_mode_,
1105 *play_dtmf, generated_noise_samples, &reset_decoder_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001106
1107 // Check if we already have enough samples in the |sync_buffer_|. If so,
1108 // change decision to normal, unless the decision was merge, accelerate, or
1109 // preemptive expand.
Peter Kastingdce40cf2015-08-24 14:52:23 -07001110 if (samples_left >= rtc::checked_cast<int>(output_size_samples_) &&
1111 *operation != kMerge &&
1112 *operation != kAccelerate &&
1113 *operation != kFastAccelerate &&
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001114 *operation != kPreemptiveExpand) {
1115 *operation = kNormal;
1116 return 0;
1117 }
1118
turaj@webrtc.org8d1cdaa2014-04-11 18:47:55 +00001119 decision_logic_->ExpandDecision(*operation);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001120
1121 // Check conditions for reset.
1122 if (new_codec_ || *operation == kUndefined) {
1123 // The only valid reason to get kUndefined is that new_codec_ is set.
1124 assert(new_codec_);
turaj@webrtc.org4d06db52013-03-27 18:31:42 +00001125 if (*play_dtmf && !header) {
1126 timestamp_ = dtmf_event->timestamp;
1127 } else {
turaj@webrtc.org4d06db52013-03-27 18:31:42 +00001128 if (!header) {
Henrik Lundind67a2192015-08-03 12:54:37 +02001129 LOG(LS_ERROR) << "Packet missing where it shouldn't.";
turaj@webrtc.org4d06db52013-03-27 18:31:42 +00001130 return -1;
1131 }
1132 timestamp_ = header->timestamp;
ossu108ecec2016-07-08 08:45:18 -07001133 if (*operation == kRfc3389CngNoPacket &&
1134 decoder_database_->IsComfortNoise(header->payloadType)) {
turaj@webrtc.org4d06db52013-03-27 18:31:42 +00001135 // Change decision to CNG packet, since we do have a CNG packet, but it
1136 // was considered too early to use. Now, use it anyway.
1137 *operation = kRfc3389Cng;
1138 } else if (*operation != kRfc3389Cng) {
1139 *operation = kNormal;
1140 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001141 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001142 // Adjust |sync_buffer_| timestamp before setting |end_timestamp| to the
1143 // new value.
1144 sync_buffer_->IncreaseEndTimestamp(timestamp_ - end_timestamp);
turaj@webrtc.org4d06db52013-03-27 18:31:42 +00001145 end_timestamp = timestamp_;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001146 new_codec_ = false;
1147 decision_logic_->SoftReset();
1148 buffer_level_filter_->Reset();
1149 delay_manager_->Reset();
1150 stats_.ResetMcu();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001151 }
1152
Peter Kastingdce40cf2015-08-24 14:52:23 -07001153 size_t required_samples = output_size_samples_;
1154 const size_t samples_10_ms = static_cast<size_t>(80 * fs_mult_);
1155 const size_t samples_20_ms = 2 * samples_10_ms;
1156 const size_t samples_30_ms = 3 * samples_10_ms;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001157
1158 switch (*operation) {
1159 case kExpand: {
1160 timestamp_ = end_timestamp;
1161 return 0;
1162 }
1163 case kRfc3389CngNoPacket:
1164 case kCodecInternalCng: {
1165 return 0;
1166 }
1167 case kDtmf: {
1168 // TODO(hlundin): Write test for this.
1169 // Update timestamp.
1170 timestamp_ = end_timestamp;
henrik.lundinb1fb72b2016-05-03 08:18:47 -07001171 const uint64_t generated_noise_samples =
1172 generated_noise_stopwatch_
1173 ? generated_noise_stopwatch_->ElapsedTicks() *
1174 output_size_samples_ +
1175 decision_logic_->noise_fast_forward()
1176 : 0;
1177 if (generated_noise_samples > 0 && last_mode_ != kModeDtmf) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001178 // Make a jump in timestamp due to the recently played comfort noise.
Peter Kastingb7e50542015-06-11 12:55:50 -07001179 uint32_t timestamp_jump =
henrik.lundinb1fb72b2016-05-03 08:18:47 -07001180 static_cast<uint32_t>(generated_noise_samples);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001181 sync_buffer_->IncreaseEndTimestamp(timestamp_jump);
1182 timestamp_ += timestamp_jump;
1183 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001184 return 0;
1185 }
Henrik Lundincf808d22015-05-27 14:33:29 +02001186 case kAccelerate:
1187 case kFastAccelerate: {
1188 // In order to do an accelerate we need at least 30 ms of audio data.
Peter Kastingdce40cf2015-08-24 14:52:23 -07001189 if (samples_left >= static_cast<int>(samples_30_ms)) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001190 // Already have enough data, so we do not need to extract any more.
1191 decision_logic_->set_sample_memory(samples_left);
1192 decision_logic_->set_prev_time_scale(true);
1193 return 0;
Peter Kastingdce40cf2015-08-24 14:52:23 -07001194 } else if (samples_left >= static_cast<int>(samples_10_ms) &&
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001195 decoder_frame_length_ >= samples_30_ms) {
1196 // Avoid decoding more data as it might overflow the playout buffer.
1197 *operation = kNormal;
1198 return 0;
Peter Kastingdce40cf2015-08-24 14:52:23 -07001199 } else if (samples_left < static_cast<int>(samples_20_ms) &&
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001200 decoder_frame_length_ < samples_30_ms) {
1201 // Build up decoded data by decoding at least 20 ms of audio data. Do
1202 // not perform accelerate yet, but wait until we only need to do one
1203 // decoding.
1204 required_samples = 2 * output_size_samples_;
1205 *operation = kNormal;
1206 }
1207 // If none of the above is true, we have one of two possible situations:
1208 // (1) 20 ms <= samples_left < 30 ms and decoder_frame_length_ < 30 ms; or
1209 // (2) samples_left < 10 ms and decoder_frame_length_ >= 30 ms.
1210 // In either case, we move on with the accelerate decision, and decode one
1211 // frame now.
1212 break;
1213 }
1214 case kPreemptiveExpand: {
1215 // In order to do a preemptive expand we need at least 30 ms of decoded
1216 // audio data.
Peter Kastingdce40cf2015-08-24 14:52:23 -07001217 if ((samples_left >= static_cast<int>(samples_30_ms)) ||
1218 (samples_left >= static_cast<int>(samples_10_ms) &&
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001219 decoder_frame_length_ >= samples_30_ms)) {
1220 // Already have enough data, so we do not need to extract any more.
1221 // Or, avoid decoding more data as it might overflow the playout buffer.
1222 // Still try preemptive expand, though.
1223 decision_logic_->set_sample_memory(samples_left);
1224 decision_logic_->set_prev_time_scale(true);
1225 return 0;
1226 }
Peter Kastingdce40cf2015-08-24 14:52:23 -07001227 if (samples_left < static_cast<int>(samples_20_ms) &&
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001228 decoder_frame_length_ < samples_30_ms) {
1229 // Build up decoded data by decoding at least 20 ms of audio data.
1230 // Still try to perform preemptive expand.
1231 required_samples = 2 * output_size_samples_;
1232 }
1233 // Move on with the preemptive expand decision.
1234 break;
1235 }
turaj@webrtc.org8d1cdaa2014-04-11 18:47:55 +00001236 case kMerge: {
1237 required_samples =
1238 std::max(merge_->RequiredFutureSamples(), required_samples);
1239 break;
1240 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001241 default: {
1242 // Do nothing.
1243 }
1244 }
1245
1246 // Get packets from buffer.
1247 int extracted_samples = 0;
1248 if (header &&
1249 *operation != kAlternativePlc &&
1250 *operation != kAlternativePlcIncreaseTimestamp &&
1251 *operation != kAudioRepetition &&
1252 *operation != kAudioRepetitionIncreaseTimestamp) {
1253 sync_buffer_->IncreaseEndTimestamp(header->timestamp - end_timestamp);
1254 if (decision_logic_->CngOff()) {
1255 // Adjustment of timestamp only corresponds to an actual packet loss
1256 // if comfort noise is not played. If comfort noise was just played,
1257 // this adjustment of timestamp is only done to get back in sync with the
1258 // stream timestamp; no loss to report.
1259 stats_.LostSamples(header->timestamp - end_timestamp);
1260 }
1261
1262 if (*operation != kRfc3389Cng) {
1263 // We are about to decode and use a non-CNG packet.
1264 decision_logic_->SetCngOff();
1265 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001266
1267 extracted_samples = ExtractPackets(required_samples, packet_list);
1268 if (extracted_samples < 0) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001269 return kPacketBufferCorruption;
1270 }
1271 }
1272
Henrik Lundincf808d22015-05-27 14:33:29 +02001273 if (*operation == kAccelerate || *operation == kFastAccelerate ||
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001274 *operation == kPreemptiveExpand) {
1275 decision_logic_->set_sample_memory(samples_left + extracted_samples);
1276 decision_logic_->set_prev_time_scale(true);
1277 }
1278
Henrik Lundincf808d22015-05-27 14:33:29 +02001279 if (*operation == kAccelerate || *operation == kFastAccelerate) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001280 // Check that we have enough data (30ms) to do accelerate.
Peter Kastingdce40cf2015-08-24 14:52:23 -07001281 if (extracted_samples + samples_left < static_cast<int>(samples_30_ms)) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001282 // TODO(hlundin): Write test for this.
1283 // Not enough, do normal operation instead.
1284 *operation = kNormal;
1285 }
1286 }
1287
1288 timestamp_ = end_timestamp;
1289 return 0;
1290}
1291
1292int NetEqImpl::Decode(PacketList* packet_list, Operations* operation,
1293 int* decoded_length,
1294 AudioDecoder::SpeechType* speech_type) {
1295 *speech_type = AudioDecoder::kSpeech;
minyuel6d92bf52015-09-23 15:20:39 +02001296
1297 // When packet_list is empty, we may be in kCodecInternalCng mode, and for
1298 // that we use current active decoder.
1299 AudioDecoder* decoder = decoder_database_->GetActiveDecoder();
1300
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001301 if (!packet_list->empty()) {
1302 const Packet* packet = packet_list->front();
pkasting@chromium.org0e81fdf2015-02-02 23:54:03 +00001303 uint8_t payload_type = packet->header.payloadType;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001304 if (!decoder_database_->IsComfortNoise(payload_type)) {
1305 decoder = decoder_database_->GetDecoder(payload_type);
1306 assert(decoder);
1307 if (!decoder) {
Henrik Lundind67a2192015-08-03 12:54:37 +02001308 LOG(LS_WARNING) << "Unknown payload type "
1309 << static_cast<int>(payload_type);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001310 PacketBuffer::DeleteAllPackets(packet_list);
1311 return kDecoderNotFound;
1312 }
1313 bool decoder_changed;
1314 decoder_database_->SetActiveDecoder(payload_type, &decoder_changed);
1315 if (decoder_changed) {
1316 // We have a new decoder. Re-init some values.
1317 const DecoderDatabase::DecoderInfo* decoder_info = decoder_database_
1318 ->GetDecoderInfo(payload_type);
1319 assert(decoder_info);
1320 if (!decoder_info) {
Henrik Lundind67a2192015-08-03 12:54:37 +02001321 LOG(LS_WARNING) << "Unknown payload type "
1322 << static_cast<int>(payload_type);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001323 PacketBuffer::DeleteAllPackets(packet_list);
1324 return kDecoderNotFound;
1325 }
tina.legrand@webrtc.orgba5a6c32014-03-23 09:58:48 +00001326 // If sampling rate or number of channels has changed, we need to make
1327 // a reset.
kwibergc0f2dcf2016-05-31 06:28:03 -07001328 if (decoder_info->SampleRateHz() != fs_hz_ ||
henrik.lundin@webrtc.org6dba1eb2015-03-18 09:47:08 +00001329 decoder->Channels() != algorithm_buffer_->Channels()) {
tina.legrand@webrtc.orgba5a6c32014-03-23 09:58:48 +00001330 // TODO(tlegrand): Add unittest to cover this event.
kwibergc0f2dcf2016-05-31 06:28:03 -07001331 SetSampleRateAndChannels(decoder_info->SampleRateHz(),
1332 decoder->Channels());
turaj@webrtc.orga6101d72013-10-01 22:01:09 +00001333 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001334 sync_buffer_->set_end_timestamp(timestamp_);
1335 playout_timestamp_ = timestamp_;
1336 }
1337 }
1338 }
1339
1340 if (reset_decoder_) {
1341 // TODO(hlundin): Write test for this.
Karl Wiberg43766482015-08-27 15:22:11 +02001342 if (decoder)
1343 decoder->Reset();
1344
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001345 // Reset comfort noise decoder.
ossu97ba30e2016-04-25 07:55:58 -07001346 ComfortNoiseDecoder* cng_decoder = decoder_database_->GetActiveCngDecoder();
Karl Wiberg43766482015-08-27 15:22:11 +02001347 if (cng_decoder)
1348 cng_decoder->Reset();
1349
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001350 reset_decoder_ = false;
1351 }
1352
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001353 *decoded_length = 0;
1354 // Update codec-internal PLC state.
1355 if ((*operation == kMerge) && decoder && decoder->HasDecodePlc()) {
1356 decoder->DecodePlc(1, &decoded_buffer_[*decoded_length]);
1357 }
1358
minyuel6d92bf52015-09-23 15:20:39 +02001359 int return_value;
1360 if (*operation == kCodecInternalCng) {
1361 RTC_DCHECK(packet_list->empty());
1362 return_value = DecodeCng(decoder, decoded_length, speech_type);
1363 } else {
1364 return_value = DecodeLoop(packet_list, *operation, decoder,
1365 decoded_length, speech_type);
1366 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001367
1368 if (*decoded_length < 0) {
1369 // Error returned from the decoder.
1370 *decoded_length = 0;
Peter Kastingb7e50542015-06-11 12:55:50 -07001371 sync_buffer_->IncreaseEndTimestamp(
1372 static_cast<uint32_t>(decoder_frame_length_));
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001373 int error_code = 0;
1374 if (decoder)
1375 error_code = decoder->ErrorCode();
1376 if (error_code != 0) {
1377 // Got some error code from the decoder.
1378 decoder_error_code_ = error_code;
1379 return_value = kDecoderErrorCode;
Henrik Lundind67a2192015-08-03 12:54:37 +02001380 LOG(LS_WARNING) << "Decoder returned error code: " << error_code;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001381 } else {
1382 // Decoder does not implement error codes. Return generic error.
1383 return_value = kOtherDecoderError;
Henrik Lundind67a2192015-08-03 12:54:37 +02001384 LOG(LS_WARNING) << "Decoder error (no error code)";
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001385 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001386 *operation = kExpand; // Do expansion to get data instead.
1387 }
1388 if (*speech_type != AudioDecoder::kComfortNoise) {
1389 // Don't increment timestamp if codec returned CNG speech type
1390 // since in this case, the we will increment the CNGplayedTS counter.
1391 // Increase with number of samples per channel.
1392 assert(*decoded_length == 0 ||
henrik.lundin@webrtc.org6dba1eb2015-03-18 09:47:08 +00001393 (decoder && decoder->Channels() == sync_buffer_->Channels()));
turaj@webrtc.org362a55e2013-09-20 16:25:28 +00001394 sync_buffer_->IncreaseEndTimestamp(
1395 *decoded_length / static_cast<int>(sync_buffer_->Channels()));
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001396 }
1397 return return_value;
1398}
1399
minyuel6d92bf52015-09-23 15:20:39 +02001400int NetEqImpl::DecodeCng(AudioDecoder* decoder, int* decoded_length,
1401 AudioDecoder::SpeechType* speech_type) {
1402 if (!decoder) {
1403 // This happens when active decoder is not defined.
1404 *decoded_length = -1;
1405 return 0;
1406 }
1407
1408 while (*decoded_length < rtc::checked_cast<int>(output_size_samples_)) {
1409 const int length = decoder->Decode(
1410 nullptr, 0, fs_hz_,
1411 (decoded_buffer_length_ - *decoded_length) * sizeof(int16_t),
1412 &decoded_buffer_[*decoded_length], speech_type);
1413 if (length > 0) {
1414 *decoded_length += length;
minyuel6d92bf52015-09-23 15:20:39 +02001415 } else {
1416 // Error.
1417 LOG(LS_WARNING) << "Failed to decode CNG";
1418 *decoded_length = -1;
1419 break;
1420 }
1421 if (*decoded_length > static_cast<int>(decoded_buffer_length_)) {
1422 // Guard against overflow.
1423 LOG(LS_WARNING) << "Decoded too much CNG.";
1424 return kDecodedTooMuch;
1425 }
1426 }
1427 return 0;
1428}
1429
1430int NetEqImpl::DecodeLoop(PacketList* packet_list, const Operations& operation,
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001431 AudioDecoder* decoder, int* decoded_length,
1432 AudioDecoder::SpeechType* speech_type) {
1433 Packet* packet = NULL;
1434 if (!packet_list->empty()) {
1435 packet = packet_list->front();
1436 }
minyuel6d92bf52015-09-23 15:20:39 +02001437
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001438 // Do decoding.
1439 while (packet &&
1440 !decoder_database_->IsComfortNoise(packet->header.payloadType)) {
1441 assert(decoder); // At this point, we must have a decoder object.
1442 // The number of channels in the |sync_buffer_| should be the same as the
1443 // number decoder channels.
henrik.lundin@webrtc.org6dba1eb2015-03-18 09:47:08 +00001444 assert(sync_buffer_->Channels() == decoder->Channels());
1445 assert(decoded_buffer_length_ >= kMaxFrameSize * decoder->Channels());
minyuel6d92bf52015-09-23 15:20:39 +02001446 assert(operation == kNormal || operation == kAccelerate ||
1447 operation == kFastAccelerate || operation == kMerge ||
1448 operation == kPreemptiveExpand);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001449 packet_list->pop_front();
ossudc431ce2016-08-31 08:51:13 -07001450 const size_t payload_length = packet->payload.size();
Peter Kasting36b7cc32015-06-11 19:57:18 -07001451 int decode_length;
ossu17e3fa12016-09-08 04:52:55 -07001452 if (!packet->primary) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001453 // This is a redundant payload; call the special decoder method.
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001454 decode_length = decoder->DecodeRedundant(
ossudc431ce2016-08-31 08:51:13 -07001455 packet->payload.data(), packet->payload.size(), fs_hz_,
minyue@webrtc.org7f7d7e32015-03-16 12:30:37 +00001456 (decoded_buffer_length_ - *decoded_length) * sizeof(int16_t),
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001457 &decoded_buffer_[*decoded_length], speech_type);
1458 } else {
ossudc431ce2016-08-31 08:51:13 -07001459 decode_length = decoder->Decode(
1460 packet->payload.data(), packet->payload.size(), fs_hz_,
1461 (decoded_buffer_length_ - *decoded_length) * sizeof(int16_t),
1462 &decoded_buffer_[*decoded_length], speech_type);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001463 }
1464
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001465 delete packet;
turaj@webrtc.org58cd3162013-10-31 15:15:55 +00001466 packet = NULL;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001467 if (decode_length > 0) {
1468 *decoded_length += decode_length;
1469 // Update |decoder_frame_length_| with number of samples per channel.
henrik.lundin@webrtc.org6dba1eb2015-03-18 09:47:08 +00001470 decoder_frame_length_ =
Peter Kastingdce40cf2015-08-24 14:52:23 -07001471 static_cast<size_t>(decode_length) / decoder->Channels();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001472 } else if (decode_length < 0) {
1473 // Error.
Henrik Lundind67a2192015-08-03 12:54:37 +02001474 LOG(LS_WARNING) << "Decode " << decode_length << " " << payload_length;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001475 *decoded_length = -1;
1476 PacketBuffer::DeleteAllPackets(packet_list);
1477 break;
1478 }
1479 if (*decoded_length > static_cast<int>(decoded_buffer_length_)) {
1480 // Guard against overflow.
Henrik Lundind67a2192015-08-03 12:54:37 +02001481 LOG(LS_WARNING) << "Decoded too much.";
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001482 PacketBuffer::DeleteAllPackets(packet_list);
1483 return kDecodedTooMuch;
1484 }
1485 if (!packet_list->empty()) {
1486 packet = packet_list->front();
1487 } else {
1488 packet = NULL;
1489 }
1490 } // End of decode loop.
1491
turaj@webrtc.org58cd3162013-10-31 15:15:55 +00001492 // If the list is not empty at this point, either a decoding error terminated
1493 // the while-loop, or list must hold exactly one CNG packet.
1494 assert(packet_list->empty() || *decoded_length < 0 ||
1495 (packet_list->size() == 1 && packet &&
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001496 decoder_database_->IsComfortNoise(packet->header.payloadType)));
1497 return 0;
1498}
1499
1500void NetEqImpl::DoNormal(const int16_t* decoded_buffer, size_t decoded_length,
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001501 AudioDecoder::SpeechType speech_type, bool play_dtmf) {
henrik.lundin@webrtc.org40d3fc62013-09-18 12:19:50 +00001502 assert(normal_.get());
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001503 assert(mute_factor_array_.get());
henrik.lundin@webrtc.org40d3fc62013-09-18 12:19:50 +00001504 normal_->Process(decoded_buffer, decoded_length, last_mode_,
henrik.lundin@webrtc.org0d5da252013-09-18 21:12:38 +00001505 mute_factor_array_.get(), algorithm_buffer_.get());
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001506 if (decoded_length != 0) {
1507 last_mode_ = kModeNormal;
1508 }
1509
1510 // If last packet was decoded as an inband CNG, set mode to CNG instead.
1511 if ((speech_type == AudioDecoder::kComfortNoise)
1512 || ((last_mode_ == kModeCodecInternalCng)
1513 && (decoded_length == 0))) {
1514 // TODO(hlundin): Remove second part of || statement above.
1515 last_mode_ = kModeCodecInternalCng;
1516 }
1517
1518 if (!play_dtmf) {
1519 dtmf_tone_generator_->Reset();
1520 }
1521}
1522
1523void NetEqImpl::DoMerge(int16_t* decoded_buffer, size_t decoded_length,
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001524 AudioDecoder::SpeechType speech_type, bool play_dtmf) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001525 assert(mute_factor_array_.get());
henrik.lundin@webrtc.org40d3fc62013-09-18 12:19:50 +00001526 assert(merge_.get());
Peter Kastingdce40cf2015-08-24 14:52:23 -07001527 size_t new_length = merge_->Process(decoded_buffer, decoded_length,
1528 mute_factor_array_.get(),
1529 algorithm_buffer_.get());
1530 size_t expand_length_correction = new_length -
1531 decoded_length / algorithm_buffer_->Channels();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001532
1533 // Update in-call and post-call statistics.
1534 if (expand_->MuteFactor(0) == 0) {
1535 // Expand generates only noise.
minyue@webrtc.orgc11348b2015-02-10 08:35:38 +00001536 stats_.ExpandedNoiseSamples(expand_length_correction);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001537 } else {
1538 // Expansion generates more than only noise.
minyue@webrtc.orgc11348b2015-02-10 08:35:38 +00001539 stats_.ExpandedVoiceSamples(expand_length_correction);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001540 }
1541
1542 last_mode_ = kModeMerge;
1543 // If last packet was decoded as an inband CNG, set mode to CNG instead.
1544 if (speech_type == AudioDecoder::kComfortNoise) {
1545 last_mode_ = kModeCodecInternalCng;
1546 }
1547 expand_->Reset();
1548 if (!play_dtmf) {
1549 dtmf_tone_generator_->Reset();
1550 }
1551}
1552
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001553int NetEqImpl::DoExpand(bool play_dtmf) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001554 while ((sync_buffer_->FutureLength() - expand_->overlap_length()) <
Peter Kastingdce40cf2015-08-24 14:52:23 -07001555 output_size_samples_) {
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001556 algorithm_buffer_->Clear();
henrik.lundin@webrtc.org0d5da252013-09-18 21:12:38 +00001557 int return_value = expand_->Process(algorithm_buffer_.get());
Peter Kastingdce40cf2015-08-24 14:52:23 -07001558 size_t length = algorithm_buffer_->Size();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001559
1560 // Update in-call and post-call statistics.
1561 if (expand_->MuteFactor(0) == 0) {
1562 // Expand operation generates only noise.
1563 stats_.ExpandedNoiseSamples(length);
1564 } else {
1565 // Expand operation generates more than only noise.
1566 stats_.ExpandedVoiceSamples(length);
1567 }
1568
1569 last_mode_ = kModeExpand;
1570
1571 if (return_value < 0) {
1572 return return_value;
1573 }
1574
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001575 sync_buffer_->PushBack(*algorithm_buffer_);
1576 algorithm_buffer_->Clear();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001577 }
1578 if (!play_dtmf) {
1579 dtmf_tone_generator_->Reset();
1580 }
henrik.lundinb1fb72b2016-05-03 08:18:47 -07001581
1582 if (!generated_noise_stopwatch_) {
1583 // Start a new stopwatch since we may be covering for a lost CNG packet.
1584 generated_noise_stopwatch_ = tick_timer_->GetNewStopwatch();
1585 }
1586
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001587 return 0;
1588}
1589
Henrik Lundincf808d22015-05-27 14:33:29 +02001590int NetEqImpl::DoAccelerate(int16_t* decoded_buffer,
1591 size_t decoded_length,
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001592 AudioDecoder::SpeechType speech_type,
Henrik Lundincf808d22015-05-27 14:33:29 +02001593 bool play_dtmf,
1594 bool fast_accelerate) {
Peter Kastingdce40cf2015-08-24 14:52:23 -07001595 const size_t required_samples =
1596 static_cast<size_t>(240 * fs_mult_); // Must have 30 ms.
turaj@webrtc.org362a55e2013-09-20 16:25:28 +00001597 size_t borrowed_samples_per_channel = 0;
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001598 size_t num_channels = algorithm_buffer_->Channels();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001599 size_t decoded_length_per_channel = decoded_length / num_channels;
1600 if (decoded_length_per_channel < required_samples) {
1601 // Must move data from the |sync_buffer_| in order to get 30 ms.
turaj@webrtc.org362a55e2013-09-20 16:25:28 +00001602 borrowed_samples_per_channel = static_cast<int>(required_samples -
1603 decoded_length_per_channel);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001604 memmove(&decoded_buffer[borrowed_samples_per_channel * num_channels],
1605 decoded_buffer,
1606 sizeof(int16_t) * decoded_length);
1607 sync_buffer_->ReadInterleavedFromEnd(borrowed_samples_per_channel,
1608 decoded_buffer);
1609 decoded_length = required_samples * num_channels;
1610 }
1611
Peter Kastingdce40cf2015-08-24 14:52:23 -07001612 size_t samples_removed;
Henrik Lundincf808d22015-05-27 14:33:29 +02001613 Accelerate::ReturnCodes return_code =
1614 accelerate_->Process(decoded_buffer, decoded_length, fast_accelerate,
1615 algorithm_buffer_.get(), &samples_removed);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001616 stats_.AcceleratedSamples(samples_removed);
1617 switch (return_code) {
1618 case Accelerate::kSuccess:
1619 last_mode_ = kModeAccelerateSuccess;
1620 break;
1621 case Accelerate::kSuccessLowEnergy:
1622 last_mode_ = kModeAccelerateLowEnergy;
1623 break;
1624 case Accelerate::kNoStretch:
1625 last_mode_ = kModeAccelerateFail;
1626 break;
1627 case Accelerate::kError:
1628 // TODO(hlundin): Map to kModeError instead?
1629 last_mode_ = kModeAccelerateFail;
1630 return kAccelerateError;
1631 }
1632
1633 if (borrowed_samples_per_channel > 0) {
1634 // Copy borrowed samples back to the |sync_buffer_|.
turaj@webrtc.org362a55e2013-09-20 16:25:28 +00001635 size_t length = algorithm_buffer_->Size();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001636 if (length < borrowed_samples_per_channel) {
1637 // This destroys the beginning of the buffer, but will not cause any
1638 // problems.
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001639 sync_buffer_->ReplaceAtIndex(*algorithm_buffer_,
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001640 sync_buffer_->Size() -
1641 borrowed_samples_per_channel);
1642 sync_buffer_->PushFrontZeros(borrowed_samples_per_channel - length);
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001643 algorithm_buffer_->PopFront(length);
1644 assert(algorithm_buffer_->Empty());
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001645 } else {
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001646 sync_buffer_->ReplaceAtIndex(*algorithm_buffer_,
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001647 borrowed_samples_per_channel,
1648 sync_buffer_->Size() -
1649 borrowed_samples_per_channel);
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001650 algorithm_buffer_->PopFront(borrowed_samples_per_channel);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001651 }
1652 }
1653
1654 // If last packet was decoded as an inband CNG, set mode to CNG instead.
1655 if (speech_type == AudioDecoder::kComfortNoise) {
1656 last_mode_ = kModeCodecInternalCng;
1657 }
1658 if (!play_dtmf) {
1659 dtmf_tone_generator_->Reset();
1660 }
1661 expand_->Reset();
1662 return 0;
1663}
1664
1665int NetEqImpl::DoPreemptiveExpand(int16_t* decoded_buffer,
1666 size_t decoded_length,
1667 AudioDecoder::SpeechType speech_type,
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001668 bool play_dtmf) {
Peter Kastingdce40cf2015-08-24 14:52:23 -07001669 const size_t required_samples =
1670 static_cast<size_t>(240 * fs_mult_); // Must have 30 ms.
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001671 size_t num_channels = algorithm_buffer_->Channels();
Peter Kastingdce40cf2015-08-24 14:52:23 -07001672 size_t borrowed_samples_per_channel = 0;
1673 size_t old_borrowed_samples_per_channel = 0;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001674 size_t decoded_length_per_channel = decoded_length / num_channels;
1675 if (decoded_length_per_channel < required_samples) {
1676 // Must move data from the |sync_buffer_| in order to get 30 ms.
Peter Kastingdce40cf2015-08-24 14:52:23 -07001677 borrowed_samples_per_channel =
1678 required_samples - decoded_length_per_channel;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001679 // Calculate how many of these were already played out.
Peter Kastingf045e4d2015-06-10 21:15:38 -07001680 old_borrowed_samples_per_channel =
Peter Kastingdce40cf2015-08-24 14:52:23 -07001681 (borrowed_samples_per_channel > sync_buffer_->FutureLength()) ?
1682 (borrowed_samples_per_channel - sync_buffer_->FutureLength()) : 0;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001683 memmove(&decoded_buffer[borrowed_samples_per_channel * num_channels],
1684 decoded_buffer,
1685 sizeof(int16_t) * decoded_length);
1686 sync_buffer_->ReadInterleavedFromEnd(borrowed_samples_per_channel,
1687 decoded_buffer);
1688 decoded_length = required_samples * num_channels;
1689 }
1690
Peter Kastingdce40cf2015-08-24 14:52:23 -07001691 size_t samples_added;
henrik.lundin@webrtc.org40d3fc62013-09-18 12:19:50 +00001692 PreemptiveExpand::ReturnCodes return_code = preemptive_expand_->Process(
Peter Kastingdce40cf2015-08-24 14:52:23 -07001693 decoded_buffer, decoded_length,
turaj@webrtc.org362a55e2013-09-20 16:25:28 +00001694 old_borrowed_samples_per_channel,
henrik.lundin@webrtc.org0d5da252013-09-18 21:12:38 +00001695 algorithm_buffer_.get(), &samples_added);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001696 stats_.PreemptiveExpandedSamples(samples_added);
1697 switch (return_code) {
1698 case PreemptiveExpand::kSuccess:
1699 last_mode_ = kModePreemptiveExpandSuccess;
1700 break;
1701 case PreemptiveExpand::kSuccessLowEnergy:
1702 last_mode_ = kModePreemptiveExpandLowEnergy;
1703 break;
1704 case PreemptiveExpand::kNoStretch:
1705 last_mode_ = kModePreemptiveExpandFail;
1706 break;
1707 case PreemptiveExpand::kError:
1708 // TODO(hlundin): Map to kModeError instead?
1709 last_mode_ = kModePreemptiveExpandFail;
1710 return kPreemptiveExpandError;
1711 }
1712
1713 if (borrowed_samples_per_channel > 0) {
1714 // Copy borrowed samples back to the |sync_buffer_|.
1715 sync_buffer_->ReplaceAtIndex(
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001716 *algorithm_buffer_, borrowed_samples_per_channel,
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001717 sync_buffer_->Size() - borrowed_samples_per_channel);
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001718 algorithm_buffer_->PopFront(borrowed_samples_per_channel);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001719 }
1720
1721 // If last packet was decoded as an inband CNG, set mode to CNG instead.
1722 if (speech_type == AudioDecoder::kComfortNoise) {
1723 last_mode_ = kModeCodecInternalCng;
1724 }
1725 if (!play_dtmf) {
1726 dtmf_tone_generator_->Reset();
1727 }
1728 expand_->Reset();
1729 return 0;
1730}
1731
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001732int NetEqImpl::DoRfc3389Cng(PacketList* packet_list, bool play_dtmf) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001733 if (!packet_list->empty()) {
1734 // Must have exactly one SID frame at this point.
1735 assert(packet_list->size() == 1);
1736 Packet* packet = packet_list->front();
1737 packet_list->pop_front();
henrik.lundin@webrtc.org73deaad2013-01-31 13:32:51 +00001738 if (!decoder_database_->IsComfortNoise(packet->header.payloadType)) {
henrik.lundin@webrtc.org73deaad2013-01-31 13:32:51 +00001739 LOG(LS_ERROR) << "Trying to decode non-CNG payload as CNG.";
1740 return kOtherError;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001741 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001742 // UpdateParameters() deletes |packet|.
1743 if (comfort_noise_->UpdateParameters(packet) ==
1744 ComfortNoise::kInternalError) {
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001745 algorithm_buffer_->Zeros(output_size_samples_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001746 return -comfort_noise_->internal_error_code();
1747 }
1748 }
1749 int cn_return = comfort_noise_->Generate(output_size_samples_,
henrik.lundin@webrtc.org0d5da252013-09-18 21:12:38 +00001750 algorithm_buffer_.get());
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001751 expand_->Reset();
1752 last_mode_ = kModeRfc3389Cng;
1753 if (!play_dtmf) {
1754 dtmf_tone_generator_->Reset();
1755 }
1756 if (cn_return == ComfortNoise::kInternalError) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001757 decoder_error_code_ = comfort_noise_->internal_error_code();
1758 return kComfortNoiseErrorCode;
1759 } else if (cn_return == ComfortNoise::kUnknownPayloadType) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001760 return kUnknownRtpPayloadType;
1761 }
1762 return 0;
1763}
1764
minyuel6d92bf52015-09-23 15:20:39 +02001765void NetEqImpl::DoCodecInternalCng(const int16_t* decoded_buffer,
1766 size_t decoded_length) {
1767 RTC_DCHECK(normal_.get());
1768 RTC_DCHECK(mute_factor_array_.get());
1769 normal_->Process(decoded_buffer, decoded_length, last_mode_,
1770 mute_factor_array_.get(), algorithm_buffer_.get());
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001771 last_mode_ = kModeCodecInternalCng;
1772 expand_->Reset();
1773}
1774
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001775int NetEqImpl::DoDtmf(const DtmfEvent& dtmf_event, bool* play_dtmf) {
turaj@webrtc.org4d06db52013-03-27 18:31:42 +00001776 // This block of the code and the block further down, handling |dtmf_switch|
1777 // are commented out. Otherwise playing out-of-band DTMF would fail in VoE
1778 // test, DtmfTest.ManualSuccessfullySendsOutOfBandTelephoneEvents. This is
1779 // equivalent to |dtmf_switch| always be false.
1780 //
1781 // See http://webrtc-codereview.appspot.com/1195004/ for discussion
1782 // On this issue. This change might cause some glitches at the point of
1783 // switch from audio to DTMF. Issue 1545 is filed to track this.
1784 //
1785 // bool dtmf_switch = false;
1786 // if ((last_mode_ != kModeDtmf) && dtmf_tone_generator_->initialized()) {
1787 // // Special case; see below.
1788 // // We must catch this before calling Generate, since |initialized| is
1789 // // modified in that call.
1790 // dtmf_switch = true;
1791 // }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001792
1793 int dtmf_return_value = 0;
1794 if (!dtmf_tone_generator_->initialized()) {
1795 // Initialize if not already done.
1796 dtmf_return_value = dtmf_tone_generator_->Init(fs_hz_, dtmf_event.event_no,
1797 dtmf_event.volume);
1798 }
turaj@webrtc.org4d06db52013-03-27 18:31:42 +00001799
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001800 if (dtmf_return_value == 0) {
1801 // Generate DTMF signal.
1802 dtmf_return_value = dtmf_tone_generator_->Generate(output_size_samples_,
henrik.lundin@webrtc.org0d5da252013-09-18 21:12:38 +00001803 algorithm_buffer_.get());
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001804 }
turaj@webrtc.org4d06db52013-03-27 18:31:42 +00001805
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001806 if (dtmf_return_value < 0) {
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001807 algorithm_buffer_->Zeros(output_size_samples_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001808 return dtmf_return_value;
1809 }
1810
turaj@webrtc.org4d06db52013-03-27 18:31:42 +00001811 // if (dtmf_switch) {
1812 // // This is the special case where the previous operation was DTMF
1813 // // overdub, but the current instruction is "regular" DTMF. We must make
1814 // // sure that the DTMF does not have any discontinuities. The first DTMF
1815 // // sample that we generate now must be played out immediately, therefore
1816 // // it must be copied to the speech buffer.
1817 // // TODO(hlundin): This code seems incorrect. (Legacy.) Write test and
1818 // // verify correct operation.
1819 // assert(false);
1820 // // Must generate enough data to replace all of the |sync_buffer_|
1821 // // "future".
1822 // int required_length = sync_buffer_->FutureLength();
1823 // assert(dtmf_tone_generator_->initialized());
1824 // dtmf_return_value = dtmf_tone_generator_->Generate(required_length,
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001825 // algorithm_buffer_);
1826 // assert((size_t) required_length == algorithm_buffer_->Size());
turaj@webrtc.org4d06db52013-03-27 18:31:42 +00001827 // if (dtmf_return_value < 0) {
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001828 // algorithm_buffer_->Zeros(output_size_samples_);
turaj@webrtc.org4d06db52013-03-27 18:31:42 +00001829 // return dtmf_return_value;
1830 // }
1831 //
1832 // // Overwrite the "future" part of the speech buffer with the new DTMF
1833 // // data.
1834 // // TODO(hlundin): It seems that this overwriting has gone lost.
1835 // // Not adapted for multi-channel yet.
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001836 // assert(algorithm_buffer_->Channels() == 1);
1837 // if (algorithm_buffer_->Channels() != 1) {
turaj@webrtc.org4d06db52013-03-27 18:31:42 +00001838 // LOG(LS_WARNING) << "DTMF not supported for more than one channel";
1839 // return kStereoNotSupported;
1840 // }
1841 // // Shuffle the remaining data to the beginning of algorithm buffer.
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001842 // algorithm_buffer_->PopFront(sync_buffer_->FutureLength());
turaj@webrtc.org4d06db52013-03-27 18:31:42 +00001843 // }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001844
Peter Kastingb7e50542015-06-11 12:55:50 -07001845 sync_buffer_->IncreaseEndTimestamp(
1846 static_cast<uint32_t>(output_size_samples_));
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001847 expand_->Reset();
1848 last_mode_ = kModeDtmf;
1849
1850 // Set to false because the DTMF is already in the algorithm buffer.
1851 *play_dtmf = false;
1852 return 0;
1853}
1854
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001855void NetEqImpl::DoAlternativePlc(bool increase_timestamp) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001856 AudioDecoder* decoder = decoder_database_->GetActiveDecoder();
Peter Kastingdce40cf2015-08-24 14:52:23 -07001857 size_t length;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001858 if (decoder && decoder->HasDecodePlc()) {
1859 // Use the decoder's packet-loss concealment.
1860 // TODO(hlundin): Will probably need a longer buffer for multi-channel.
1861 int16_t decoded_buffer[kMaxFrameSize];
1862 length = decoder->DecodePlc(1, decoded_buffer);
Peter Kastingdce40cf2015-08-24 14:52:23 -07001863 if (length > 0)
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001864 algorithm_buffer_->PushBackInterleaved(decoded_buffer, length);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001865 } else {
1866 // Do simple zero-stuffing.
1867 length = output_size_samples_;
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001868 algorithm_buffer_->Zeros(length);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001869 // By not advancing the timestamp, NetEq inserts samples.
1870 stats_.AddZeros(length);
1871 }
1872 if (increase_timestamp) {
Peter Kastingb7e50542015-06-11 12:55:50 -07001873 sync_buffer_->IncreaseEndTimestamp(static_cast<uint32_t>(length));
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001874 }
1875 expand_->Reset();
1876}
1877
1878int NetEqImpl::DtmfOverdub(const DtmfEvent& dtmf_event, size_t num_channels,
1879 int16_t* output) const {
1880 size_t out_index = 0;
Peter Kastingdce40cf2015-08-24 14:52:23 -07001881 size_t overdub_length = output_size_samples_; // Default value.
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001882
1883 if (sync_buffer_->dtmf_index() > sync_buffer_->next_index()) {
1884 // Special operation for transition from "DTMF only" to "DTMF overdub".
1885 out_index = std::min(
1886 sync_buffer_->dtmf_index() - sync_buffer_->next_index(),
Peter Kastingdce40cf2015-08-24 14:52:23 -07001887 output_size_samples_);
1888 overdub_length = output_size_samples_ - out_index;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001889 }
1890
henrik.lundin@webrtc.orgfd11bbf2013-09-30 20:38:44 +00001891 AudioMultiVector dtmf_output(num_channels);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001892 int dtmf_return_value = 0;
1893 if (!dtmf_tone_generator_->initialized()) {
1894 dtmf_return_value = dtmf_tone_generator_->Init(fs_hz_, dtmf_event.event_no,
1895 dtmf_event.volume);
1896 }
1897 if (dtmf_return_value == 0) {
1898 dtmf_return_value = dtmf_tone_generator_->Generate(overdub_length,
1899 &dtmf_output);
Peter Kastingdce40cf2015-08-24 14:52:23 -07001900 assert(overdub_length == dtmf_output.Size());
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001901 }
1902 dtmf_output.ReadInterleaved(overdub_length, &output[out_index]);
1903 return dtmf_return_value < 0 ? dtmf_return_value : 0;
1904}
1905
Peter Kastingdce40cf2015-08-24 14:52:23 -07001906int NetEqImpl::ExtractPackets(size_t required_samples,
1907 PacketList* packet_list) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001908 bool first_packet = true;
1909 uint8_t prev_payload_type = 0;
1910 uint32_t prev_timestamp = 0;
1911 uint16_t prev_sequence_number = 0;
1912 bool next_packet_available = false;
1913
henrik.lundin@webrtc.orge1d468c2013-01-30 07:37:20 +00001914 const RTPHeader* header = packet_buffer_->NextRtpHeader();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001915 assert(header);
1916 if (!header) {
Henrik Lundind67a2192015-08-03 12:54:37 +02001917 LOG(LS_ERROR) << "Packet buffer unexpectedly empty.";
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001918 return -1;
1919 }
turaj@webrtc.org7df97062013-08-02 18:07:13 +00001920 uint32_t first_timestamp = header->timestamp;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001921 int extracted_samples = 0;
1922
1923 // Packet extraction loop.
1924 do {
1925 timestamp_ = header->timestamp;
Peter Kastingdce40cf2015-08-24 14:52:23 -07001926 size_t discard_count = 0;
henrik.lundin@webrtc.orge1d468c2013-01-30 07:37:20 +00001927 Packet* packet = packet_buffer_->GetNextPacket(&discard_count);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001928 // |header| may be invalid after the |packet_buffer_| operation.
1929 header = NULL;
1930 if (!packet) {
Henrik Lundind67a2192015-08-03 12:54:37 +02001931 LOG(LS_ERROR) << "Should always be able to extract a packet here";
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001932 assert(false); // Should always be able to extract a packet here.
1933 return -1;
1934 }
1935 stats_.PacketsDiscarded(discard_count);
henrik.lundin84f8cd62016-04-26 07:45:16 -07001936 stats_.StoreWaitingTime(packet->waiting_time->ElapsedMs());
ossudc431ce2016-08-31 08:51:13 -07001937 assert(!packet->payload.empty());
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001938 packet_list->push_back(packet); // Store packet in list.
1939
1940 if (first_packet) {
1941 first_packet = false;
henrik.lundin48ed9302015-10-29 05:36:24 -07001942 if (nack_enabled_) {
1943 RTC_DCHECK(nack_);
1944 // TODO(henrik.lundin): Should we update this for all decoded packets?
1945 nack_->UpdateLastDecodedPacket(packet->header.sequenceNumber,
1946 packet->header.timestamp);
1947 }
1948 prev_sequence_number = packet->header.sequenceNumber;
1949 prev_timestamp = packet->header.timestamp;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001950 prev_payload_type = packet->header.payloadType;
1951 }
1952
1953 // Store number of extracted samples.
1954 int packet_duration = 0;
1955 AudioDecoder* decoder = decoder_database_->GetDecoder(
1956 packet->header.payloadType);
1957 if (decoder) {
ossu17e3fa12016-09-08 04:52:55 -07001958 if (packet->primary) {
1959 packet_duration = decoder->PacketDuration(packet->payload.data(),
1960 packet->payload.size());
minyue@webrtc.orgb28bfa72014-03-21 12:07:40 +00001961 } else {
ossu17e3fa12016-09-08 04:52:55 -07001962 packet_duration = decoder->PacketDurationRedundant(
1963 packet->payload.data(), packet->payload.size());
1964 stats_.SecondaryDecodedSamples(packet_duration);
minyue@webrtc.orgb28bfa72014-03-21 12:07:40 +00001965 }
ossu97ba30e2016-04-25 07:55:58 -07001966 } else if (!decoder_database_->IsComfortNoise(packet->header.payloadType)) {
Henrik Lundind67a2192015-08-03 12:54:37 +02001967 LOG(LS_WARNING) << "Unknown payload type "
1968 << static_cast<int>(packet->header.payloadType);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001969 assert(false);
1970 }
1971 if (packet_duration <= 0) {
1972 // Decoder did not return a packet duration. Assume that the packet
1973 // contains the same number of samples as the previous one.
Peter Kastingdce40cf2015-08-24 14:52:23 -07001974 packet_duration = rtc::checked_cast<int>(decoder_frame_length_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001975 }
1976 extracted_samples = packet->header.timestamp - first_timestamp +
1977 packet_duration;
1978
1979 // Check what packet is available next.
1980 header = packet_buffer_->NextRtpHeader();
1981 next_packet_available = false;
1982 if (header && prev_payload_type == header->payloadType) {
1983 int16_t seq_no_diff = header->sequenceNumber - prev_sequence_number;
Peter Kastingdce40cf2015-08-24 14:52:23 -07001984 size_t ts_diff = header->timestamp - prev_timestamp;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001985 if (seq_no_diff == 1 ||
1986 (seq_no_diff == 0 && ts_diff == decoder_frame_length_)) {
1987 // The next sequence number is available, or the next part of a packet
1988 // that was split into pieces upon insertion.
1989 next_packet_available = true;
1990 }
1991 prev_sequence_number = header->sequenceNumber;
1992 }
Peter Kastingdce40cf2015-08-24 14:52:23 -07001993 } while (extracted_samples < rtc::checked_cast<int>(required_samples) &&
1994 next_packet_available);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001995
henrik.lundin@webrtc.org61217152014-09-22 08:30:07 +00001996 if (extracted_samples > 0) {
1997 // Delete old packets only when we are going to decode something. Otherwise,
1998 // we could end up in the situation where we never decode anything, since
1999 // all incoming packets are considered too old but the buffer will also
2000 // never be flooded and flushed.
henrik.lundin@webrtc.org52b42cb2014-11-04 14:03:58 +00002001 packet_buffer_->DiscardAllOldPackets(timestamp_);
henrik.lundin@webrtc.org61217152014-09-22 08:30:07 +00002002 }
2003
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002004 return extracted_samples;
2005}
2006
turaj@webrtc.org8d1cdaa2014-04-11 18:47:55 +00002007void NetEqImpl::UpdatePlcComponents(int fs_hz, size_t channels) {
2008 // Delete objects and create new ones.
2009 expand_.reset(expand_factory_->Create(background_noise_.get(),
2010 sync_buffer_.get(), &random_vector_,
Henrik Lundinbef77e22015-08-18 14:58:09 +02002011 &stats_, fs_hz, channels));
turaj@webrtc.org8d1cdaa2014-04-11 18:47:55 +00002012 merge_.reset(new Merge(fs_hz, channels, expand_.get(), sync_buffer_.get()));
2013}
2014
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002015void NetEqImpl::SetSampleRateAndChannels(int fs_hz, size_t channels) {
Henrik Lundind67a2192015-08-03 12:54:37 +02002016 LOG(LS_VERBOSE) << "SetSampleRateAndChannels " << fs_hz << " " << channels;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002017 // TODO(hlundin): Change to an enumerator and skip assert.
2018 assert(fs_hz == 8000 || fs_hz == 16000 || fs_hz == 32000 || fs_hz == 48000);
2019 assert(channels > 0);
2020
2021 fs_hz_ = fs_hz;
2022 fs_mult_ = fs_hz / 8000;
Peter Kastingdce40cf2015-08-24 14:52:23 -07002023 output_size_samples_ = static_cast<size_t>(kOutputSizeMs * 8 * fs_mult_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002024 decoder_frame_length_ = 3 * output_size_samples_; // Initialize to 30ms.
2025
2026 last_mode_ = kModeNormal;
2027
2028 // Create a new array of mute factors and set all to 1.
2029 mute_factor_array_.reset(new int16_t[channels]);
2030 for (size_t i = 0; i < channels; ++i) {
2031 mute_factor_array_[i] = 16384; // 1.0 in Q14.
2032 }
2033
ossu97ba30e2016-04-25 07:55:58 -07002034 ComfortNoiseDecoder* cng_decoder = decoder_database_->GetActiveCngDecoder();
Karl Wiberg43766482015-08-27 15:22:11 +02002035 if (cng_decoder)
2036 cng_decoder->Reset();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002037
2038 // Reinit post-decode VAD with new sample rate.
2039 assert(vad_.get()); // Cannot be NULL here.
2040 vad_->Init();
2041
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00002042 // Delete algorithm buffer and create a new one.
henrik.lundin@webrtc.orgfd11bbf2013-09-30 20:38:44 +00002043 algorithm_buffer_.reset(new AudioMultiVector(channels));
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00002044
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002045 // Delete sync buffer and create a new one.
henrik.lundin@webrtc.org0d5da252013-09-18 21:12:38 +00002046 sync_buffer_.reset(new SyncBuffer(channels, kSyncBufferSize * fs_mult_));
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002047
henrik.lundin@webrtc.orgea257842014-08-07 12:27:37 +00002048 // Delete BackgroundNoise object and create a new one.
henrik.lundin@webrtc.org0d5da252013-09-18 21:12:38 +00002049 background_noise_.reset(new BackgroundNoise(channels));
henrik.lundin@webrtc.orgea257842014-08-07 12:27:37 +00002050 background_noise_->set_mode(background_noise_mode_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002051
2052 // Reset random vector.
2053 random_vector_.Reset();
2054
turaj@webrtc.org8d1cdaa2014-04-11 18:47:55 +00002055 UpdatePlcComponents(fs_hz, channels);
2056
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002057 // Move index so that we create a small set of future samples (all 0).
2058 sync_buffer_->set_next_index(sync_buffer_->next_index() -
turaj@webrtc.org8d1cdaa2014-04-11 18:47:55 +00002059 expand_->overlap_length());
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002060
henrik.lundin@webrtc.org40d3fc62013-09-18 12:19:50 +00002061 normal_.reset(new Normal(fs_hz, decoder_database_.get(), *background_noise_,
henrik.lundin@webrtc.org0d5da252013-09-18 21:12:38 +00002062 expand_.get()));
henrik.lundin@webrtc.orgd9faa462014-01-14 10:18:45 +00002063 accelerate_.reset(
2064 accelerate_factory_->Create(fs_hz, channels, *background_noise_));
turaj@webrtc.org8d1cdaa2014-04-11 18:47:55 +00002065 preemptive_expand_.reset(preemptive_expand_factory_->Create(
Peter Kastingdce40cf2015-08-24 14:52:23 -07002066 fs_hz, channels, *background_noise_, expand_->overlap_length()));
henrik.lundin@webrtc.org40d3fc62013-09-18 12:19:50 +00002067
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002068 // Delete ComfortNoise object and create a new one.
henrik.lundin@webrtc.org0d5da252013-09-18 21:12:38 +00002069 comfort_noise_.reset(new ComfortNoise(fs_hz, decoder_database_.get(),
2070 sync_buffer_.get()));
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002071
2072 // Verify that |decoded_buffer_| is long enough.
2073 if (decoded_buffer_length_ < kMaxFrameSize * channels) {
2074 // Reallocate to larger size.
2075 decoded_buffer_length_ = kMaxFrameSize * channels;
2076 decoded_buffer_.reset(new int16_t[decoded_buffer_length_]);
2077 }
2078
turaj@webrtc.org8d1cdaa2014-04-11 18:47:55 +00002079 // Create DecisionLogic if it is not created yet, then communicate new sample
2080 // rate and output size to DecisionLogic object.
2081 if (!decision_logic_.get()) {
henrik.lundin@webrtc.org7cbc4f92014-10-07 06:37:39 +00002082 CreateDecisionLogic();
turaj@webrtc.org8d1cdaa2014-04-11 18:47:55 +00002083 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002084 decision_logic_->SetSampleRate(fs_hz_, output_size_samples_);
2085}
2086
henrik.lundin55480f52016-03-08 02:37:57 -08002087NetEqImpl::OutputType NetEqImpl::LastOutputType() {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002088 assert(vad_.get());
henrik.lundin@webrtc.org0d5da252013-09-18 21:12:38 +00002089 assert(expand_.get());
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002090 if (last_mode_ == kModeCodecInternalCng || last_mode_ == kModeRfc3389Cng) {
henrik.lundin55480f52016-03-08 02:37:57 -08002091 return OutputType::kCNG;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002092 } else if (last_mode_ == kModeExpand && expand_->MuteFactor(0) == 0) {
2093 // Expand mode has faded down to background noise only (very long expand).
henrik.lundin55480f52016-03-08 02:37:57 -08002094 return OutputType::kPLCCNG;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002095 } else if (last_mode_ == kModeExpand) {
henrik.lundin55480f52016-03-08 02:37:57 -08002096 return OutputType::kPLC;
wu@webrtc.org24301a62013-12-13 19:17:43 +00002097 } else if (vad_->running() && !vad_->active_speech()) {
henrik.lundin55480f52016-03-08 02:37:57 -08002098 return OutputType::kVadPassive;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002099 } else {
henrik.lundin55480f52016-03-08 02:37:57 -08002100 return OutputType::kNormalSpeech;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002101 }
2102}
2103
henrik.lundin@webrtc.org7cbc4f92014-10-07 06:37:39 +00002104void NetEqImpl::CreateDecisionLogic() {
Henrik Lundin47b17dc2016-05-10 10:20:59 +02002105 decision_logic_.reset(DecisionLogic::Create(
2106 fs_hz_, output_size_samples_, playout_mode_, decoder_database_.get(),
2107 *packet_buffer_.get(), delay_manager_.get(), buffer_level_filter_.get(),
2108 tick_timer_.get()));
turaj@webrtc.org8d1cdaa2014-04-11 18:47:55 +00002109}
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002110} // namespace webrtc