blob: 8c36dd98ee98a1c0fa02f5f11e372097ed0e8639 [file] [log] [blame]
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001/*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
henrik.lundin@webrtc.org9c55f0f2014-06-09 08:10:28 +000011#include "webrtc/modules/audio_coding/neteq/neteq_impl.h"
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +000012
13#include <assert.h>
14#include <memory.h> // memset
15
16#include <algorithm>
ossu97ba30e2016-04-25 07:55:58 -070017#include <vector>
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +000018
henrik.lundin9c3efd02015-08-27 13:12:22 -070019#include "webrtc/base/checks.h"
Henrik Lundind67a2192015-08-03 12:54:37 +020020#include "webrtc/base/logging.h"
Tommid44c0772016-03-11 17:12:32 -080021#include "webrtc/base/safe_conversions.h"
henrik.lundina689b442015-12-17 03:50:05 -080022#include "webrtc/base/trace_event.h"
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +000023#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
kwiberg@webrtc.orge04a93b2014-12-09 10:12:53 +000024#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
henrik.lundin@webrtc.org9c55f0f2014-06-09 08:10:28 +000025#include "webrtc/modules/audio_coding/neteq/accelerate.h"
26#include "webrtc/modules/audio_coding/neteq/background_noise.h"
27#include "webrtc/modules/audio_coding/neteq/buffer_level_filter.h"
28#include "webrtc/modules/audio_coding/neteq/comfort_noise.h"
29#include "webrtc/modules/audio_coding/neteq/decision_logic.h"
30#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
31#include "webrtc/modules/audio_coding/neteq/defines.h"
32#include "webrtc/modules/audio_coding/neteq/delay_manager.h"
33#include "webrtc/modules/audio_coding/neteq/delay_peak_detector.h"
34#include "webrtc/modules/audio_coding/neteq/dtmf_buffer.h"
35#include "webrtc/modules/audio_coding/neteq/dtmf_tone_generator.h"
36#include "webrtc/modules/audio_coding/neteq/expand.h"
henrik.lundin@webrtc.org9c55f0f2014-06-09 08:10:28 +000037#include "webrtc/modules/audio_coding/neteq/merge.h"
henrik.lundin91951862016-06-08 06:43:41 -070038#include "webrtc/modules/audio_coding/neteq/nack_tracker.h"
henrik.lundin@webrtc.org9c55f0f2014-06-09 08:10:28 +000039#include "webrtc/modules/audio_coding/neteq/normal.h"
40#include "webrtc/modules/audio_coding/neteq/packet_buffer.h"
41#include "webrtc/modules/audio_coding/neteq/packet.h"
42#include "webrtc/modules/audio_coding/neteq/payload_splitter.h"
43#include "webrtc/modules/audio_coding/neteq/post_decode_vad.h"
44#include "webrtc/modules/audio_coding/neteq/preemptive_expand.h"
45#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
henrik.lundined497212016-04-25 10:11:38 -070046#include "webrtc/modules/audio_coding/neteq/tick_timer.h"
henrik.lundin@webrtc.org9c55f0f2014-06-09 08:10:28 +000047#include "webrtc/modules/audio_coding/neteq/timestamp_scaler.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010048#include "webrtc/modules/include/module_common_types.h"
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +000049
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +000050namespace webrtc {
51
ossue3525782016-05-25 07:37:43 -070052NetEqImpl::Dependencies::Dependencies(
53 const NetEq::Config& config,
54 const rtc::scoped_refptr<AudioDecoderFactory>& decoder_factory)
henrik.lundin1d9061e2016-04-26 12:19:34 -070055 : tick_timer(new TickTimer),
56 buffer_level_filter(new BufferLevelFilter),
ossue3525782016-05-25 07:37:43 -070057 decoder_database(new DecoderDatabase(decoder_factory)),
henrik.lundinf3933702016-04-28 01:53:52 -070058 delay_peak_detector(new DelayPeakDetector(tick_timer.get())),
henrik.lundin1d9061e2016-04-26 12:19:34 -070059 delay_manager(new DelayManager(config.max_packets_in_buffer,
henrik.lundin8f8c96d2016-04-28 23:19:20 -070060 delay_peak_detector.get(),
61 tick_timer.get())),
henrik.lundin1d9061e2016-04-26 12:19:34 -070062 dtmf_buffer(new DtmfBuffer(config.sample_rate_hz)),
63 dtmf_tone_generator(new DtmfToneGenerator),
64 packet_buffer(
65 new PacketBuffer(config.max_packets_in_buffer, tick_timer.get())),
66 payload_splitter(new PayloadSplitter),
67 timestamp_scaler(new TimestampScaler(*decoder_database)),
68 accelerate_factory(new AccelerateFactory),
69 expand_factory(new ExpandFactory),
70 preemptive_expand_factory(new PreemptiveExpandFactory) {}
71
72NetEqImpl::Dependencies::~Dependencies() = default;
73
henrik.lundin@webrtc.orgea257842014-08-07 12:27:37 +000074NetEqImpl::NetEqImpl(const NetEq::Config& config,
henrik.lundin1d9061e2016-04-26 12:19:34 -070075 Dependencies&& deps,
turaj@webrtc.org8d1cdaa2014-04-11 18:47:55 +000076 bool create_components)
henrik.lundin1d9061e2016-04-26 12:19:34 -070077 : tick_timer_(std::move(deps.tick_timer)),
78 buffer_level_filter_(std::move(deps.buffer_level_filter)),
79 decoder_database_(std::move(deps.decoder_database)),
80 delay_manager_(std::move(deps.delay_manager)),
81 delay_peak_detector_(std::move(deps.delay_peak_detector)),
82 dtmf_buffer_(std::move(deps.dtmf_buffer)),
83 dtmf_tone_generator_(std::move(deps.dtmf_tone_generator)),
84 packet_buffer_(std::move(deps.packet_buffer)),
85 payload_splitter_(std::move(deps.payload_splitter)),
86 timestamp_scaler_(std::move(deps.timestamp_scaler)),
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +000087 vad_(new PostDecodeVad()),
henrik.lundin1d9061e2016-04-26 12:19:34 -070088 expand_factory_(std::move(deps.expand_factory)),
89 accelerate_factory_(std::move(deps.accelerate_factory)),
90 preemptive_expand_factory_(std::move(deps.preemptive_expand_factory)),
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +000091 last_mode_(kModeNormal),
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +000092 decoded_buffer_length_(kMaxFrameSize),
93 decoded_buffer_(new int16_t[decoded_buffer_length_]),
94 playout_timestamp_(0),
95 new_codec_(false),
96 timestamp_(0),
97 reset_decoder_(false),
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +000098 ssrc_(0),
99 first_packet_(true),
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000100 error_code_(0),
101 decoder_error_code_(0),
henrik.lundin@webrtc.orgea257842014-08-07 12:27:37 +0000102 background_noise_mode_(config.background_noise_mode),
henrik.lundin@webrtc.org7cbc4f92014-10-07 06:37:39 +0000103 playout_mode_(config.playout_mode),
Henrik Lundincf808d22015-05-27 14:33:29 +0200104 enable_fast_accelerate_(config.enable_fast_accelerate),
henrik.lundin7a926812016-05-12 13:51:28 -0700105 nack_enabled_(false),
106 enable_muted_state_(config.enable_muted_state) {
Henrik Lundin905495c2015-05-25 16:58:41 +0200107 LOG(LS_INFO) << "NetEq config: " << config.ToString();
henrik.lundin@webrtc.orgea257842014-08-07 12:27:37 +0000108 int fs = config.sample_rate_hz;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000109 if (fs != 8000 && fs != 16000 && fs != 32000 && fs != 48000) {
110 LOG(LS_ERROR) << "Sample rate " << fs << " Hz not supported. " <<
111 "Changing to 8000 Hz.";
112 fs = 8000;
113 }
henrik.lundin1d9061e2016-04-26 12:19:34 -0700114 delay_manager_->SetMaximumDelay(config.max_delay_ms);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000115 fs_hz_ = fs;
116 fs_mult_ = fs / 8000;
henrik.lundind89814b2015-11-23 06:49:25 -0800117 last_output_sample_rate_hz_ = fs;
Peter Kastingdce40cf2015-08-24 14:52:23 -0700118 output_size_samples_ = static_cast<size_t>(kOutputSizeMs * 8 * fs_mult_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000119 decoder_frame_length_ = 3 * output_size_samples_;
120 WebRtcSpl_Init();
turaj@webrtc.org8d1cdaa2014-04-11 18:47:55 +0000121 if (create_components) {
122 SetSampleRateAndChannels(fs, 1); // Default is 1 channel.
123 }
henrik.lundin9bc26672015-11-02 03:25:57 -0800124 RTC_DCHECK(!vad_->enabled());
125 if (config.enable_post_decode_vad) {
126 vad_->Enable();
127 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000128}
129
Henrik Lundind67a2192015-08-03 12:54:37 +0200130NetEqImpl::~NetEqImpl() = default;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000131
132int NetEqImpl::InsertPacket(const WebRtcRTPHeader& rtp_header,
kwibergee2bac22015-11-11 10:34:00 -0800133 rtc::ArrayView<const uint8_t> payload,
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000134 uint32_t receive_timestamp) {
henrik.lundina689b442015-12-17 03:50:05 -0800135 TRACE_EVENT0("webrtc", "NetEqImpl::InsertPacket");
Tommi9090e0b2016-01-20 13:39:36 +0100136 rtc::CritScope lock(&crit_sect_);
kwibergee2bac22015-11-11 10:34:00 -0800137 int error =
138 InsertPacketInternal(rtp_header, payload, receive_timestamp, false);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000139 if (error != 0) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000140 error_code_ = error;
141 return kFail;
142 }
143 return kOK;
144}
145
turaj@webrtc.org7b75ac62013-09-26 00:27:56 +0000146int NetEqImpl::InsertSyncPacket(const WebRtcRTPHeader& rtp_header,
147 uint32_t receive_timestamp) {
Tommi9090e0b2016-01-20 13:39:36 +0100148 rtc::CritScope lock(&crit_sect_);
turaj@webrtc.org7b75ac62013-09-26 00:27:56 +0000149 const uint8_t kSyncPayload[] = { 's', 'y', 'n', 'c' };
kwibergee2bac22015-11-11 10:34:00 -0800150 int error =
151 InsertPacketInternal(rtp_header, kSyncPayload, receive_timestamp, true);
turaj@webrtc.org7b75ac62013-09-26 00:27:56 +0000152
henrik.lundin@webrtc.orge7ce4372014-01-09 14:01:55 +0000153 if (error != 0) {
henrik.lundin@webrtc.orge7ce4372014-01-09 14:01:55 +0000154 error_code_ = error;
155 return kFail;
156 }
157 return kOK;
turaj@webrtc.org7b75ac62013-09-26 00:27:56 +0000158}
159
henrik.lundin500c04b2016-03-08 02:36:04 -0800160namespace {
161void SetAudioFrameActivityAndType(bool vad_enabled,
henrik.lundin55480f52016-03-08 02:37:57 -0800162 NetEqImpl::OutputType type,
henrik.lundin500c04b2016-03-08 02:36:04 -0800163 AudioFrame::VADActivity last_vad_activity,
164 AudioFrame* audio_frame) {
165 switch (type) {
henrik.lundin55480f52016-03-08 02:37:57 -0800166 case NetEqImpl::OutputType::kNormalSpeech: {
henrik.lundin500c04b2016-03-08 02:36:04 -0800167 audio_frame->speech_type_ = AudioFrame::kNormalSpeech;
168 audio_frame->vad_activity_ = AudioFrame::kVadActive;
169 break;
170 }
henrik.lundin55480f52016-03-08 02:37:57 -0800171 case NetEqImpl::OutputType::kVadPassive: {
henrik.lundin500c04b2016-03-08 02:36:04 -0800172 // This should only be reached if the VAD is enabled.
173 RTC_DCHECK(vad_enabled);
174 audio_frame->speech_type_ = AudioFrame::kNormalSpeech;
175 audio_frame->vad_activity_ = AudioFrame::kVadPassive;
176 break;
177 }
henrik.lundin55480f52016-03-08 02:37:57 -0800178 case NetEqImpl::OutputType::kCNG: {
henrik.lundin500c04b2016-03-08 02:36:04 -0800179 audio_frame->speech_type_ = AudioFrame::kCNG;
180 audio_frame->vad_activity_ = AudioFrame::kVadPassive;
181 break;
182 }
henrik.lundin55480f52016-03-08 02:37:57 -0800183 case NetEqImpl::OutputType::kPLC: {
henrik.lundin500c04b2016-03-08 02:36:04 -0800184 audio_frame->speech_type_ = AudioFrame::kPLC;
185 audio_frame->vad_activity_ = last_vad_activity;
186 break;
187 }
henrik.lundin55480f52016-03-08 02:37:57 -0800188 case NetEqImpl::OutputType::kPLCCNG: {
henrik.lundin500c04b2016-03-08 02:36:04 -0800189 audio_frame->speech_type_ = AudioFrame::kPLCCNG;
190 audio_frame->vad_activity_ = AudioFrame::kVadPassive;
191 break;
192 }
193 default:
194 RTC_NOTREACHED();
195 }
196 if (!vad_enabled) {
197 // Always set kVadUnknown when receive VAD is inactive.
198 audio_frame->vad_activity_ = AudioFrame::kVadUnknown;
199 }
200}
henrik.lundinbc89de32016-03-08 05:20:14 -0800201} // namespace
henrik.lundin500c04b2016-03-08 02:36:04 -0800202
henrik.lundin7a926812016-05-12 13:51:28 -0700203int NetEqImpl::GetAudio(AudioFrame* audio_frame, bool* muted) {
henrik.lundine1ca1672016-01-08 03:50:08 -0800204 TRACE_EVENT0("webrtc", "NetEqImpl::GetAudio");
Tommi9090e0b2016-01-20 13:39:36 +0100205 rtc::CritScope lock(&crit_sect_);
henrik.lundin7a926812016-05-12 13:51:28 -0700206 int error = GetAudioInternal(audio_frame, muted);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000207 if (error != 0) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000208 error_code_ = error;
209 return kFail;
210 }
henrik.lundin5fac3f02016-08-24 11:18:49 -0700211 RTC_DCHECK_EQ(
212 audio_frame->sample_rate_hz_,
213 rtc::checked_cast<int>(audio_frame->samples_per_channel_ * 100));
henrik.lundin500c04b2016-03-08 02:36:04 -0800214 SetAudioFrameActivityAndType(vad_->enabled(), LastOutputType(),
215 last_vad_activity_, audio_frame);
216 last_vad_activity_ = audio_frame->vad_activity_;
henrik.lundin6d8e0112016-03-04 10:34:21 -0800217 last_output_sample_rate_hz_ = audio_frame->sample_rate_hz_;
henrik.lundind89814b2015-11-23 06:49:25 -0800218 RTC_DCHECK(last_output_sample_rate_hz_ == 8000 ||
219 last_output_sample_rate_hz_ == 16000 ||
220 last_output_sample_rate_hz_ == 32000 ||
221 last_output_sample_rate_hz_ == 48000)
222 << "Unexpected sample rate " << last_output_sample_rate_hz_;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000223 return kOK;
224}
225
kwibergee1879c2015-10-29 06:20:28 -0700226int NetEqImpl::RegisterPayloadType(NetEqDecoder codec,
henrik.lundin4cf61dd2015-12-09 06:20:58 -0800227 const std::string& name,
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000228 uint8_t rtp_payload_type) {
Tommi9090e0b2016-01-20 13:39:36 +0100229 rtc::CritScope lock(&crit_sect_);
Henrik Lundind67a2192015-08-03 12:54:37 +0200230 LOG(LS_VERBOSE) << "RegisterPayloadType "
kwibergee1879c2015-10-29 06:20:28 -0700231 << static_cast<int>(rtp_payload_type) << " "
232 << static_cast<int>(codec);
henrik.lundin4cf61dd2015-12-09 06:20:58 -0800233 int ret = decoder_database_->RegisterPayload(rtp_payload_type, codec, name);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000234 if (ret != DecoderDatabase::kOK) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000235 switch (ret) {
236 case DecoderDatabase::kInvalidRtpPayloadType:
237 error_code_ = kInvalidRtpPayloadType;
238 break;
239 case DecoderDatabase::kCodecNotSupported:
240 error_code_ = kCodecNotSupported;
241 break;
242 case DecoderDatabase::kDecoderExists:
243 error_code_ = kDecoderExists;
244 break;
245 default:
246 error_code_ = kOtherError;
247 }
248 return kFail;
249 }
250 return kOK;
251}
252
253int NetEqImpl::RegisterExternalDecoder(AudioDecoder* decoder,
kwibergee1879c2015-10-29 06:20:28 -0700254 NetEqDecoder codec,
henrik.lundin4cf61dd2015-12-09 06:20:58 -0800255 const std::string& codec_name,
kwiberg342f7402016-06-16 03:18:00 -0700256 uint8_t rtp_payload_type) {
Tommi9090e0b2016-01-20 13:39:36 +0100257 rtc::CritScope lock(&crit_sect_);
Henrik Lundind67a2192015-08-03 12:54:37 +0200258 LOG(LS_VERBOSE) << "RegisterExternalDecoder "
kwibergee1879c2015-10-29 06:20:28 -0700259 << static_cast<int>(rtp_payload_type) << " "
260 << static_cast<int>(codec);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000261 if (!decoder) {
262 LOG(LS_ERROR) << "Cannot register external decoder with NULL pointer";
263 assert(false);
264 return kFail;
265 }
kwiberg342f7402016-06-16 03:18:00 -0700266 int ret = decoder_database_->InsertExternal(rtp_payload_type, codec,
267 codec_name, decoder);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000268 if (ret != DecoderDatabase::kOK) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000269 switch (ret) {
270 case DecoderDatabase::kInvalidRtpPayloadType:
271 error_code_ = kInvalidRtpPayloadType;
272 break;
273 case DecoderDatabase::kCodecNotSupported:
274 error_code_ = kCodecNotSupported;
275 break;
276 case DecoderDatabase::kDecoderExists:
277 error_code_ = kDecoderExists;
278 break;
279 case DecoderDatabase::kInvalidSampleRate:
280 error_code_ = kInvalidSampleRate;
281 break;
282 case DecoderDatabase::kInvalidPointer:
283 error_code_ = kInvalidPointer;
284 break;
285 default:
286 error_code_ = kOtherError;
287 }
288 return kFail;
289 }
290 return kOK;
291}
292
293int NetEqImpl::RemovePayloadType(uint8_t rtp_payload_type) {
Tommi9090e0b2016-01-20 13:39:36 +0100294 rtc::CritScope lock(&crit_sect_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000295 int ret = decoder_database_->Remove(rtp_payload_type);
296 if (ret == DecoderDatabase::kOK) {
297 return kOK;
298 } else if (ret == DecoderDatabase::kDecoderNotFound) {
299 error_code_ = kDecoderNotFound;
300 } else {
301 error_code_ = kOtherError;
302 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000303 return kFail;
304}
305
turaj@webrtc.orgf1efc572013-08-16 23:44:24 +0000306bool NetEqImpl::SetMinimumDelay(int delay_ms) {
Tommi9090e0b2016-01-20 13:39:36 +0100307 rtc::CritScope lock(&crit_sect_);
turaj@webrtc.orgf1efc572013-08-16 23:44:24 +0000308 if (delay_ms >= 0 && delay_ms < 10000) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000309 assert(delay_manager_.get());
turaj@webrtc.orgf1efc572013-08-16 23:44:24 +0000310 return delay_manager_->SetMinimumDelay(delay_ms);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000311 }
312 return false;
313}
314
turaj@webrtc.orgf1efc572013-08-16 23:44:24 +0000315bool NetEqImpl::SetMaximumDelay(int delay_ms) {
Tommi9090e0b2016-01-20 13:39:36 +0100316 rtc::CritScope lock(&crit_sect_);
turaj@webrtc.orgf1efc572013-08-16 23:44:24 +0000317 if (delay_ms >= 0 && delay_ms < 10000) {
318 assert(delay_manager_.get());
319 return delay_manager_->SetMaximumDelay(delay_ms);
320 }
321 return false;
322}
323
324int NetEqImpl::LeastRequiredDelayMs() const {
Tommi9090e0b2016-01-20 13:39:36 +0100325 rtc::CritScope lock(&crit_sect_);
turaj@webrtc.orgf1efc572013-08-16 23:44:24 +0000326 assert(delay_manager_.get());
327 return delay_manager_->least_required_delay_ms();
328}
329
Karl Wiberg7f6c4d42015-04-09 15:44:22 +0200330int NetEqImpl::SetTargetDelay() {
331 return kNotImplemented;
332}
333
334int NetEqImpl::TargetDelay() {
335 return kNotImplemented;
336}
337
henrik.lundin9c3efd02015-08-27 13:12:22 -0700338int NetEqImpl::CurrentDelayMs() const {
Tommi9090e0b2016-01-20 13:39:36 +0100339 rtc::CritScope lock(&crit_sect_);
henrik.lundin9c3efd02015-08-27 13:12:22 -0700340 if (fs_hz_ == 0)
341 return 0;
342 // Sum up the samples in the packet buffer with the future length of the sync
343 // buffer, and divide the sum by the sample rate.
344 const size_t delay_samples =
345 packet_buffer_->NumSamplesInBuffer(decoder_database_.get(),
346 decoder_frame_length_) +
347 sync_buffer_->FutureLength();
348 // The division below will truncate.
349 const int delay_ms =
350 static_cast<int>(delay_samples) / rtc::CheckedDivExact(fs_hz_, 1000);
351 return delay_ms;
Karl Wiberg7f6c4d42015-04-09 15:44:22 +0200352}
353
henrik.lundinb3f1c5d2016-08-22 15:39:53 -0700354int NetEqImpl::FilteredCurrentDelayMs() const {
355 rtc::CritScope lock(&crit_sect_);
356 // Calculate the filtered packet buffer level in samples. The value from
357 // |buffer_level_filter_| is in number of packets, represented in Q8.
358 const size_t packet_buffer_samples =
359 (buffer_level_filter_->filtered_current_level() *
360 decoder_frame_length_) >>
361 8;
362 // Sum up the filtered packet buffer level with the future length of the sync
363 // buffer, and divide the sum by the sample rate.
364 const size_t delay_samples =
365 packet_buffer_samples + sync_buffer_->FutureLength();
366 // The division below will truncate. The return value is in ms.
367 return static_cast<int>(delay_samples) / rtc::CheckedDivExact(fs_hz_, 1000);
368}
369
henrik.lundin@webrtc.org7cbc4f92014-10-07 06:37:39 +0000370// Deprecated.
371// TODO(henrik.lundin) Delete.
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000372void NetEqImpl::SetPlayoutMode(NetEqPlayoutMode mode) {
Tommi9090e0b2016-01-20 13:39:36 +0100373 rtc::CritScope lock(&crit_sect_);
henrik.lundin@webrtc.org7cbc4f92014-10-07 06:37:39 +0000374 if (mode != playout_mode_) {
375 playout_mode_ = mode;
376 CreateDecisionLogic();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000377 }
378}
379
henrik.lundin@webrtc.org7cbc4f92014-10-07 06:37:39 +0000380// Deprecated.
381// TODO(henrik.lundin) Delete.
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000382NetEqPlayoutMode NetEqImpl::PlayoutMode() const {
Tommi9090e0b2016-01-20 13:39:36 +0100383 rtc::CritScope lock(&crit_sect_);
henrik.lundin@webrtc.org7cbc4f92014-10-07 06:37:39 +0000384 return playout_mode_;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000385}
386
387int NetEqImpl::NetworkStatistics(NetEqNetworkStatistics* stats) {
Tommi9090e0b2016-01-20 13:39:36 +0100388 rtc::CritScope lock(&crit_sect_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000389 assert(decoder_database_.get());
Peter Kastingdce40cf2015-08-24 14:52:23 -0700390 const size_t total_samples_in_buffers =
Peter Kasting728d9032015-06-11 14:31:38 -0700391 packet_buffer_->NumSamplesInBuffer(decoder_database_.get(),
392 decoder_frame_length_) +
Peter Kastingdce40cf2015-08-24 14:52:23 -0700393 sync_buffer_->FutureLength();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000394 assert(delay_manager_.get());
395 assert(decision_logic_.get());
396 stats_.GetNetworkStatistics(fs_hz_, total_samples_in_buffers,
397 decoder_frame_length_, *delay_manager_.get(),
398 *decision_logic_.get(), stats);
399 return 0;
400}
401
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000402void NetEqImpl::GetRtcpStatistics(RtcpStatistics* stats) {
Tommi9090e0b2016-01-20 13:39:36 +0100403 rtc::CritScope lock(&crit_sect_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000404 if (stats) {
405 rtcp_.GetStatistics(false, stats);
406 }
407}
408
409void NetEqImpl::GetRtcpStatisticsNoReset(RtcpStatistics* stats) {
Tommi9090e0b2016-01-20 13:39:36 +0100410 rtc::CritScope lock(&crit_sect_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000411 if (stats) {
412 rtcp_.GetStatistics(true, stats);
413 }
414}
415
416void NetEqImpl::EnableVad() {
Tommi9090e0b2016-01-20 13:39:36 +0100417 rtc::CritScope lock(&crit_sect_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000418 assert(vad_.get());
419 vad_->Enable();
420}
421
422void NetEqImpl::DisableVad() {
Tommi9090e0b2016-01-20 13:39:36 +0100423 rtc::CritScope lock(&crit_sect_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000424 assert(vad_.get());
425 vad_->Disable();
426}
427
henrik.lundin15c51e32016-04-06 08:38:56 -0700428rtc::Optional<uint32_t> NetEqImpl::GetPlayoutTimestamp() const {
Tommi9090e0b2016-01-20 13:39:36 +0100429 rtc::CritScope lock(&crit_sect_);
henrik.lundin0d96ab72016-04-06 12:28:26 -0700430 if (first_packet_ || last_mode_ == kModeRfc3389Cng ||
431 last_mode_ == kModeCodecInternalCng) {
wu@webrtc.org94454b72014-06-05 20:34:08 +0000432 // We don't have a valid RTP timestamp until we have decoded our first
henrik.lundin0d96ab72016-04-06 12:28:26 -0700433 // RTP packet. Also, the RTP timestamp is not accurate while playing CNG,
434 // which is indicated by returning an empty value.
henrik.lundin9a410dd2016-04-06 01:39:22 -0700435 return rtc::Optional<uint32_t>();
wu@webrtc.org94454b72014-06-05 20:34:08 +0000436 }
henrik.lundin9a410dd2016-04-06 01:39:22 -0700437 return rtc::Optional<uint32_t>(
438 timestamp_scaler_->ToExternal(playout_timestamp_));
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000439}
440
henrik.lundind89814b2015-11-23 06:49:25 -0800441int NetEqImpl::last_output_sample_rate_hz() const {
Tommi9090e0b2016-01-20 13:39:36 +0100442 rtc::CritScope lock(&crit_sect_);
henrik.lundind89814b2015-11-23 06:49:25 -0800443 return last_output_sample_rate_hz_;
444}
445
Karl Wiberg7f6c4d42015-04-09 15:44:22 +0200446int NetEqImpl::SetTargetNumberOfChannels() {
447 return kNotImplemented;
448}
449
450int NetEqImpl::SetTargetSampleRate() {
451 return kNotImplemented;
452}
453
henrik.lundin@webrtc.orgb0f4b3d2014-11-04 08:53:10 +0000454int NetEqImpl::LastError() const {
Tommi9090e0b2016-01-20 13:39:36 +0100455 rtc::CritScope lock(&crit_sect_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000456 return error_code_;
457}
458
459int NetEqImpl::LastDecoderError() {
Tommi9090e0b2016-01-20 13:39:36 +0100460 rtc::CritScope lock(&crit_sect_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000461 return decoder_error_code_;
462}
463
464void NetEqImpl::FlushBuffers() {
Tommi9090e0b2016-01-20 13:39:36 +0100465 rtc::CritScope lock(&crit_sect_);
Henrik Lundind67a2192015-08-03 12:54:37 +0200466 LOG(LS_VERBOSE) << "FlushBuffers";
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000467 packet_buffer_->Flush();
henrik.lundin@webrtc.org0d5da252013-09-18 21:12:38 +0000468 assert(sync_buffer_.get());
469 assert(expand_.get());
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000470 sync_buffer_->Flush();
471 sync_buffer_->set_next_index(sync_buffer_->next_index() -
472 expand_->overlap_length());
473 // Set to wait for new codec.
474 first_packet_ = true;
475}
476
turaj@webrtc.org3170b572013-08-30 15:36:53 +0000477void NetEqImpl::PacketBufferStatistics(int* current_num_packets,
henrik.lundin@webrtc.org116ed1d2014-04-28 08:20:04 +0000478 int* max_num_packets) const {
Tommi9090e0b2016-01-20 13:39:36 +0100479 rtc::CritScope lock(&crit_sect_);
henrik.lundin@webrtc.org116ed1d2014-04-28 08:20:04 +0000480 packet_buffer_->BufferStat(current_num_packets, max_num_packets);
turaj@webrtc.org3170b572013-08-30 15:36:53 +0000481}
482
henrik.lundin48ed9302015-10-29 05:36:24 -0700483void NetEqImpl::EnableNack(size_t max_nack_list_size) {
Tommi9090e0b2016-01-20 13:39:36 +0100484 rtc::CritScope lock(&crit_sect_);
henrik.lundin48ed9302015-10-29 05:36:24 -0700485 if (!nack_enabled_) {
486 const int kNackThresholdPackets = 2;
henrik.lundin91951862016-06-08 06:43:41 -0700487 nack_.reset(NackTracker::Create(kNackThresholdPackets));
henrik.lundin48ed9302015-10-29 05:36:24 -0700488 nack_enabled_ = true;
489 nack_->UpdateSampleRate(fs_hz_);
490 }
491 nack_->SetMaxNackListSize(max_nack_list_size);
492}
493
494void NetEqImpl::DisableNack() {
Tommi9090e0b2016-01-20 13:39:36 +0100495 rtc::CritScope lock(&crit_sect_);
henrik.lundin48ed9302015-10-29 05:36:24 -0700496 nack_.reset();
497 nack_enabled_ = false;
498}
499
500std::vector<uint16_t> NetEqImpl::GetNackList(int64_t round_trip_time_ms) const {
Tommi9090e0b2016-01-20 13:39:36 +0100501 rtc::CritScope lock(&crit_sect_);
henrik.lundin48ed9302015-10-29 05:36:24 -0700502 if (!nack_enabled_) {
503 return std::vector<uint16_t>();
504 }
505 RTC_DCHECK(nack_.get());
506 return nack_->GetNackList(round_trip_time_ms);
minyue@webrtc.orgd7301772013-08-29 00:58:14 +0000507}
508
henrik.lundin@webrtc.orgb287d962014-04-07 21:21:45 +0000509const SyncBuffer* NetEqImpl::sync_buffer_for_test() const {
Tommi9090e0b2016-01-20 13:39:36 +0100510 rtc::CritScope lock(&crit_sect_);
henrik.lundin@webrtc.orgb287d962014-04-07 21:21:45 +0000511 return sync_buffer_.get();
512}
513
minyue5bd33972016-05-02 04:46:11 -0700514Operations NetEqImpl::last_operation_for_test() const {
515 rtc::CritScope lock(&crit_sect_);
516 return last_operation_;
517}
518
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000519// Methods below this line are private.
520
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000521int NetEqImpl::InsertPacketInternal(const WebRtcRTPHeader& rtp_header,
kwibergee2bac22015-11-11 10:34:00 -0800522 rtc::ArrayView<const uint8_t> payload,
turaj@webrtc.org7b75ac62013-09-26 00:27:56 +0000523 uint32_t receive_timestamp,
524 bool is_sync_packet) {
kwibergee2bac22015-11-11 10:34:00 -0800525 if (payload.empty()) {
526 LOG_F(LS_ERROR) << "payload is empty";
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000527 return kInvalidPointer;
528 }
turaj@webrtc.org7b75ac62013-09-26 00:27:56 +0000529 // Sanity checks for sync-packets.
530 if (is_sync_packet) {
531 if (decoder_database_->IsDtmf(rtp_header.header.payloadType) ||
532 decoder_database_->IsRed(rtp_header.header.payloadType) ||
533 decoder_database_->IsComfortNoise(rtp_header.header.payloadType)) {
534 LOG_F(LS_ERROR) << "Sync-packet with an unacceptable payload type "
pkasting@chromium.orgd3245462015-02-23 21:28:22 +0000535 << static_cast<int>(rtp_header.header.payloadType);
turaj@webrtc.org7b75ac62013-09-26 00:27:56 +0000536 return kSyncPacketNotAccepted;
537 }
henrik.lundinda8bbf62016-08-31 03:14:11 -0700538 if (first_packet_ || !current_rtp_payload_type_ ||
539 rtp_header.header.payloadType != *current_rtp_payload_type_ ||
turaj@webrtc.org7b75ac62013-09-26 00:27:56 +0000540 rtp_header.header.ssrc != ssrc_) {
henrik.lundinda8bbf62016-08-31 03:14:11 -0700541 // Even if |current_rtp_payload_type_| is empty, sync-packet isn't
turaj@webrtc.org7b75ac62013-09-26 00:27:56 +0000542 // accepted.
pkasting@chromium.orgd3245462015-02-23 21:28:22 +0000543 LOG_F(LS_ERROR)
544 << "Changing codec, SSRC or first packet with sync-packet.";
turaj@webrtc.org7b75ac62013-09-26 00:27:56 +0000545 return kSyncPacketNotAccepted;
546 }
547 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000548 PacketList packet_list;
549 RTPHeader main_header;
550 {
henrik.lundin@webrtc.orge1d468c2013-01-30 07:37:20 +0000551 // Convert to Packet.
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000552 // Create |packet| within this separate scope, since it should not be used
553 // directly once it's been inserted in the packet list. This way, |packet|
554 // is not defined outside of this block.
henrik.lundin@webrtc.orge1d468c2013-01-30 07:37:20 +0000555 Packet* packet = new Packet;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000556 packet->header.markerBit = false;
557 packet->header.payloadType = rtp_header.header.payloadType;
558 packet->header.sequenceNumber = rtp_header.header.sequenceNumber;
559 packet->header.timestamp = rtp_header.header.timestamp;
560 packet->header.ssrc = rtp_header.header.ssrc;
561 packet->header.numCSRCs = 0;
kwibergee2bac22015-11-11 10:34:00 -0800562 packet->payload_length = payload.size();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000563 packet->primary = true;
henrik.lundin84f8cd62016-04-26 07:45:16 -0700564 // Waiting time will be set upon inserting the packet in the buffer.
565 RTC_DCHECK(!packet->waiting_time);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000566 packet->payload = new uint8_t[packet->payload_length];
turaj@webrtc.org7b75ac62013-09-26 00:27:56 +0000567 packet->sync_packet = is_sync_packet;
henrik.lundin@webrtc.org73deaad2013-01-31 13:32:51 +0000568 if (!packet->payload) {
569 LOG_F(LS_ERROR) << "Payload pointer is NULL.";
570 }
kwibergee2bac22015-11-11 10:34:00 -0800571 assert(!payload.empty()); // Already checked above.
572 memcpy(packet->payload, payload.data(), packet->payload_length);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000573 // Insert packet in a packet list.
574 packet_list.push_back(packet);
575 // Save main payloads header for later.
576 memcpy(&main_header, &packet->header, sizeof(main_header));
577 }
578
turaj@webrtc.orga6101d72013-10-01 22:01:09 +0000579 bool update_sample_rate_and_channels = false;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000580 // Reinitialize NetEq if it's needed (changed SSRC or first call).
581 if ((main_header.ssrc != ssrc_) || first_packet_) {
henrik.lundin@webrtc.org6ff3ac12014-11-20 14:14:49 +0000582 // Note: |first_packet_| will be cleared further down in this method, once
583 // the packet has been successfully inserted into the packet buffer.
584
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000585 rtcp_.Init(main_header.sequenceNumber);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000586
587 // Flush the packet buffer and DTMF buffer.
588 packet_buffer_->Flush();
589 dtmf_buffer_->Flush();
590
591 // Store new SSRC.
592 ssrc_ = main_header.ssrc;
593
turaj@webrtc.org4d06db52013-03-27 18:31:42 +0000594 // Update audio buffer timestamp.
595 sync_buffer_->IncreaseEndTimestamp(main_header.timestamp - timestamp_);
596
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000597 // Update codecs.
598 timestamp_ = main_header.timestamp;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000599
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000600 // Reset timestamp scaling.
601 timestamp_scaler_->Reset();
turaj@webrtc.orga6101d72013-10-01 22:01:09 +0000602
henrik.lundin@webrtc.org6ff3ac12014-11-20 14:14:49 +0000603 // Trigger an update of sampling rate and the number of channels.
turaj@webrtc.orga6101d72013-10-01 22:01:09 +0000604 update_sample_rate_and_channels = true;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000605 }
606
turaj@webrtc.org7b75ac62013-09-26 00:27:56 +0000607 // Update RTCP statistics, only for regular packets.
608 if (!is_sync_packet)
609 rtcp_.Update(main_header, receive_timestamp);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000610
611 // Check for RED payload type, and separate payloads into several packets.
612 if (decoder_database_->IsRed(main_header.payloadType)) {
turaj@webrtc.org7b75ac62013-09-26 00:27:56 +0000613 assert(!is_sync_packet); // We had a sanity check for this.
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000614 if (payload_splitter_->SplitRed(&packet_list) != PayloadSplitter::kOK) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000615 PacketBuffer::DeleteAllPackets(&packet_list);
616 return kRedundancySplitError;
617 }
618 // Only accept a few RED payloads of the same type as the main data,
619 // DTMF events and CNG.
620 payload_splitter_->CheckRedPayloads(&packet_list, *decoder_database_);
621 // Update the stored main payload header since the main payload has now
622 // changed.
623 memcpy(&main_header, &packet_list.front()->header, sizeof(main_header));
624 }
625
626 // Check payload types.
627 if (decoder_database_->CheckPayloadTypes(packet_list) ==
628 DecoderDatabase::kDecoderNotFound) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000629 PacketBuffer::DeleteAllPackets(&packet_list);
630 return kUnknownRtpPayloadType;
631 }
632
633 // Scale timestamp to internal domain (only for some codecs).
634 timestamp_scaler_->ToInternal(&packet_list);
635
636 // Process DTMF payloads. Cycle through the list of packets, and pick out any
637 // DTMF payloads found.
638 PacketList::iterator it = packet_list.begin();
639 while (it != packet_list.end()) {
640 Packet* current_packet = (*it);
641 assert(current_packet);
642 assert(current_packet->payload);
643 if (decoder_database_->IsDtmf(current_packet->header.payloadType)) {
turaj@webrtc.org7b75ac62013-09-26 00:27:56 +0000644 assert(!current_packet->sync_packet); // We had a sanity check for this.
minyue@webrtc.org9721db72013-08-06 05:36:26 +0000645 DtmfEvent event;
646 int ret = DtmfBuffer::ParseEvent(
647 current_packet->header.timestamp,
648 current_packet->payload,
649 current_packet->payload_length,
650 &event);
651 if (ret != DtmfBuffer::kOK) {
minyue@webrtc.org9721db72013-08-06 05:36:26 +0000652 PacketBuffer::DeleteAllPackets(&packet_list);
653 return kDtmfParsingError;
654 }
655 if (dtmf_buffer_->InsertEvent(event) != DtmfBuffer::kOK) {
minyue@webrtc.org9721db72013-08-06 05:36:26 +0000656 PacketBuffer::DeleteAllPackets(&packet_list);
657 return kDtmfInsertError;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000658 }
659 // TODO(hlundin): Let the destructor of Packet handle the payload.
660 delete [] current_packet->payload;
661 delete current_packet;
662 it = packet_list.erase(it);
663 } else {
664 ++it;
665 }
666 }
667
minyue@webrtc.org7549ff42014-04-02 15:03:01 +0000668 // Check for FEC in packets, and separate payloads into several packets.
669 int ret = payload_splitter_->SplitFec(&packet_list, decoder_database_.get());
670 if (ret != PayloadSplitter::kOK) {
minyue@webrtc.org7549ff42014-04-02 15:03:01 +0000671 PacketBuffer::DeleteAllPackets(&packet_list);
672 switch (ret) {
673 case PayloadSplitter::kUnknownPayloadType:
674 return kUnknownRtpPayloadType;
675 default:
676 return kOtherError;
677 }
678 }
679
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000680 // Split payloads into smaller chunks. This also verifies that all payloads
turaj@webrtc.org7b75ac62013-09-26 00:27:56 +0000681 // are of a known payload type. SplitAudio() method is protected against
682 // sync-packets.
minyue@webrtc.orgb28bfa72014-03-21 12:07:40 +0000683 ret = payload_splitter_->SplitAudio(&packet_list, *decoder_database_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000684 if (ret != PayloadSplitter::kOK) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000685 PacketBuffer::DeleteAllPackets(&packet_list);
686 switch (ret) {
687 case PayloadSplitter::kUnknownPayloadType:
688 return kUnknownRtpPayloadType;
689 case PayloadSplitter::kFrameSplitError:
690 return kFrameSplitError;
691 default:
692 return kOtherError;
693 }
694 }
695
ossu97ba30e2016-04-25 07:55:58 -0700696 // Update bandwidth estimate, if the packet is not sync-packet nor comfort
697 // noise.
698 if (!packet_list.empty() && !packet_list.front()->sync_packet &&
699 !decoder_database_->IsComfortNoise(main_header.payloadType)) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000700 // The list can be empty here if we got nothing but DTMF payloads.
701 AudioDecoder* decoder =
702 decoder_database_->GetDecoder(main_header.payloadType);
703 assert(decoder); // Should always get a valid object, since we have
ossu97ba30e2016-04-25 07:55:58 -0700704 // already checked that the payload types are known.
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000705 decoder->IncomingPacket(packet_list.front()->payload,
706 packet_list.front()->payload_length,
707 packet_list.front()->header.sequenceNumber,
708 packet_list.front()->header.timestamp,
709 receive_timestamp);
710 }
711
henrik.lundin48ed9302015-10-29 05:36:24 -0700712 if (nack_enabled_) {
713 RTC_DCHECK(nack_);
714 if (update_sample_rate_and_channels) {
715 nack_->Reset();
716 }
717 nack_->UpdateLastReceivedPacket(packet_list.front()->header.sequenceNumber,
718 packet_list.front()->header.timestamp);
719 }
720
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000721 // Insert packets in buffer.
henrik.lundin116c84e2015-08-27 13:14:48 -0700722 const size_t buffer_length_before_insert =
723 packet_buffer_->NumPacketsInBuffer();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000724 ret = packet_buffer_->InsertPacketList(
725 &packet_list,
726 *decoder_database_,
727 &current_rtp_payload_type_,
728 &current_cng_rtp_payload_type_);
729 if (ret == PacketBuffer::kFlushed) {
730 // Reset DSP timestamp etc. if packet buffer flushed.
731 new_codec_ = true;
turaj@webrtc.orga6101d72013-10-01 22:01:09 +0000732 update_sample_rate_and_channels = true;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000733 } else if (ret != PacketBuffer::kOK) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000734 PacketBuffer::DeleteAllPackets(&packet_list);
minyue@webrtc.org7bb54362013-08-06 05:40:57 +0000735 return kOtherError;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000736 }
henrik.lundin@webrtc.org6ff3ac12014-11-20 14:14:49 +0000737
738 if (first_packet_) {
739 first_packet_ = false;
740 // Update the codec on the next GetAudio call.
741 new_codec_ = true;
742 }
743
henrik.lundinda8bbf62016-08-31 03:14:11 -0700744 if (current_rtp_payload_type_) {
745 RTC_DCHECK(decoder_database_->GetDecoderInfo(*current_rtp_payload_type_))
746 << "Payload type " << static_cast<int>(*current_rtp_payload_type_)
747 << " is unknown where it shouldn't be";
748 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000749
turaj@webrtc.orga6101d72013-10-01 22:01:09 +0000750 if (update_sample_rate_and_channels && !packet_buffer_->Empty()) {
751 // We do not use |current_rtp_payload_type_| to |set payload_type|, but
752 // get the next RTP header from |packet_buffer_| to obtain the payload type.
753 // The reason for it is the following corner case. If NetEq receives a
754 // CNG packet with a sample rate different than the current CNG then it
755 // flushes its buffer, assuming send codec must have been changed. However,
756 // payload type of the hypothetically new send codec is not known.
757 const RTPHeader* rtp_header = packet_buffer_->NextRtpHeader();
758 assert(rtp_header);
759 int payload_type = rtp_header->payloadType;
ossu97ba30e2016-04-25 07:55:58 -0700760 size_t channels = 1;
761 if (!decoder_database_->IsComfortNoise(payload_type)) {
762 AudioDecoder* decoder = decoder_database_->GetDecoder(payload_type);
763 assert(decoder); // Payloads are already checked to be valid.
764 channels = decoder->Channels();
765 }
turaj@webrtc.orga6101d72013-10-01 22:01:09 +0000766 const DecoderDatabase::DecoderInfo* decoder_info =
767 decoder_database_->GetDecoderInfo(payload_type);
768 assert(decoder_info);
kwibergc0f2dcf2016-05-31 06:28:03 -0700769 if (decoder_info->SampleRateHz() != fs_hz_ ||
ossu97ba30e2016-04-25 07:55:58 -0700770 channels != algorithm_buffer_->Channels()) {
kwibergc0f2dcf2016-05-31 06:28:03 -0700771 SetSampleRateAndChannels(decoder_info->SampleRateHz(),
772 channels);
henrik.lundin48ed9302015-10-29 05:36:24 -0700773 }
774 if (nack_enabled_) {
775 RTC_DCHECK(nack_);
776 // Update the sample rate even if the rate is not new, because of Reset().
777 nack_->UpdateSampleRate(fs_hz_);
778 }
turaj@webrtc.orga6101d72013-10-01 22:01:09 +0000779 }
780
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000781 // TODO(hlundin): Move this code to DelayManager class.
782 const DecoderDatabase::DecoderInfo* dec_info =
783 decoder_database_->GetDecoderInfo(main_header.payloadType);
784 assert(dec_info); // Already checked that the payload type is known.
785 delay_manager_->LastDecoderType(dec_info->codec_type);
786 if (delay_manager_->last_pack_cng_or_dtmf() == 0) {
787 // Calculate the total speech length carried in each packet.
henrik.lundin116c84e2015-08-27 13:14:48 -0700788 const size_t buffer_length_after_insert =
789 packet_buffer_->NumPacketsInBuffer();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000790
henrik.lundin116c84e2015-08-27 13:14:48 -0700791 if (buffer_length_after_insert > buffer_length_before_insert) {
792 const size_t packet_length_samples =
793 (buffer_length_after_insert - buffer_length_before_insert) *
794 decoder_frame_length_;
795 if (packet_length_samples != decision_logic_->packet_length_samples()) {
796 decision_logic_->set_packet_length_samples(packet_length_samples);
797 delay_manager_->SetPacketAudioLength(
798 rtc::checked_cast<int>((1000 * packet_length_samples) / fs_hz_));
799 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000800 }
801
802 // Update statistics.
pbos@webrtc.org0946a562013-04-09 00:28:06 +0000803 if ((int32_t) (main_header.timestamp - timestamp_) >= 0 &&
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000804 !new_codec_) {
805 // Only update statistics if incoming packet is not older than last played
806 // out packet, and if new codec flag is not set.
807 delay_manager_->Update(main_header.sequenceNumber, main_header.timestamp,
808 fs_hz_);
809 }
810 } else if (delay_manager_->last_pack_cng_or_dtmf() == -1) {
811 // This is first "normal" packet after CNG or DTMF.
812 // Reset packet time counter and measure time until next packet,
813 // but don't update statistics.
814 delay_manager_->set_last_pack_cng_or_dtmf(0);
815 delay_manager_->ResetPacketIatCount();
816 }
817 return 0;
818}
819
henrik.lundin7a926812016-05-12 13:51:28 -0700820int NetEqImpl::GetAudioInternal(AudioFrame* audio_frame, bool* muted) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000821 PacketList packet_list;
822 DtmfEvent dtmf_event;
823 Operations operation;
824 bool play_dtmf;
henrik.lundin7a926812016-05-12 13:51:28 -0700825 *muted = false;
henrik.lundined497212016-04-25 10:11:38 -0700826 tick_timer_->Increment();
henrik.lundin60f6ce22016-05-10 03:52:04 -0700827 stats_.IncreaseCounter(output_size_samples_, fs_hz_);
henrik.lundin7a926812016-05-12 13:51:28 -0700828
829 // Check for muted state.
830 if (enable_muted_state_ && expand_->Muted() && packet_buffer_->Empty()) {
831 RTC_DCHECK_EQ(last_mode_, kModeExpand);
832 playout_timestamp_ += static_cast<uint32_t>(output_size_samples_);
833 audio_frame->sample_rate_hz_ = fs_hz_;
834 audio_frame->samples_per_channel_ = output_size_samples_;
835 audio_frame->timestamp_ =
836 first_packet_
837 ? 0
838 : timestamp_scaler_->ToExternal(playout_timestamp_) -
839 static_cast<uint32_t>(audio_frame->samples_per_channel_);
840 audio_frame->num_channels_ = sync_buffer_->Channels();
henrik.lundin612c25e2016-05-25 08:21:04 -0700841 stats_.ExpandedNoiseSamples(output_size_samples_);
henrik.lundin7a926812016-05-12 13:51:28 -0700842 *muted = true;
843 return 0;
844 }
845
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000846 int return_value = GetDecision(&operation, &packet_list, &dtmf_event,
847 &play_dtmf);
848 if (return_value != 0) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000849 last_mode_ = kModeError;
850 return return_value;
851 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000852
853 AudioDecoder::SpeechType speech_type;
854 int length = 0;
855 int decode_return_value = Decode(&packet_list, &operation,
856 &length, &speech_type);
857
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000858 assert(vad_.get());
859 bool sid_frame_available =
860 (operation == kRfc3389Cng && !packet_list.empty());
Peter Kastingdce40cf2015-08-24 14:52:23 -0700861 vad_->Update(decoded_buffer_.get(), static_cast<size_t>(length), speech_type,
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000862 sid_frame_available, fs_hz_);
863
henrik.lundinb1fb72b2016-05-03 08:18:47 -0700864 if (sid_frame_available || speech_type == AudioDecoder::kComfortNoise) {
865 // Start a new stopwatch since we are decoding a new CNG packet.
866 generated_noise_stopwatch_ = tick_timer_->GetNewStopwatch();
867 }
868
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +0000869 algorithm_buffer_->Clear();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000870 switch (operation) {
871 case kNormal: {
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +0000872 DoNormal(decoded_buffer_.get(), length, speech_type, play_dtmf);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000873 break;
874 }
875 case kMerge: {
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +0000876 DoMerge(decoded_buffer_.get(), length, speech_type, play_dtmf);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000877 break;
878 }
879 case kExpand: {
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +0000880 return_value = DoExpand(play_dtmf);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000881 break;
882 }
Henrik Lundincf808d22015-05-27 14:33:29 +0200883 case kAccelerate:
884 case kFastAccelerate: {
885 const bool fast_accelerate =
886 enable_fast_accelerate_ && (operation == kFastAccelerate);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000887 return_value = DoAccelerate(decoded_buffer_.get(), length, speech_type,
Henrik Lundincf808d22015-05-27 14:33:29 +0200888 play_dtmf, fast_accelerate);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000889 break;
890 }
891 case kPreemptiveExpand: {
892 return_value = DoPreemptiveExpand(decoded_buffer_.get(), length,
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +0000893 speech_type, play_dtmf);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000894 break;
895 }
896 case kRfc3389Cng:
897 case kRfc3389CngNoPacket: {
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +0000898 return_value = DoRfc3389Cng(&packet_list, play_dtmf);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000899 break;
900 }
901 case kCodecInternalCng: {
902 // This handles the case when there is no transmission and the decoder
903 // should produce internal comfort noise.
904 // TODO(hlundin): Write test for codec-internal CNG.
minyuel6d92bf52015-09-23 15:20:39 +0200905 DoCodecInternalCng(decoded_buffer_.get(), length);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000906 break;
907 }
908 case kDtmf: {
909 // TODO(hlundin): Write test for this.
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +0000910 return_value = DoDtmf(dtmf_event, &play_dtmf);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000911 break;
912 }
913 case kAlternativePlc: {
914 // TODO(hlundin): Write test for this.
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +0000915 DoAlternativePlc(false);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000916 break;
917 }
918 case kAlternativePlcIncreaseTimestamp: {
919 // TODO(hlundin): Write test for this.
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +0000920 DoAlternativePlc(true);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000921 break;
922 }
923 case kAudioRepetitionIncreaseTimestamp: {
924 // TODO(hlundin): Write test for this.
Peter Kastingb7e50542015-06-11 12:55:50 -0700925 sync_buffer_->IncreaseEndTimestamp(
926 static_cast<uint32_t>(output_size_samples_));
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000927 // Skipping break on purpose. Execution should move on into the
928 // next case.
kjellander@webrtc.org7d2b6a92015-01-28 18:37:58 +0000929 FALLTHROUGH();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000930 }
931 case kAudioRepetition: {
932 // TODO(hlundin): Write test for this.
933 // Copy last |output_size_samples_| from |sync_buffer_| to
934 // |algorithm_buffer|.
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +0000935 algorithm_buffer_->PushBackFromIndex(
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000936 *sync_buffer_, sync_buffer_->Size() - output_size_samples_);
937 expand_->Reset();
938 break;
939 }
940 case kUndefined: {
Henrik Lundind67a2192015-08-03 12:54:37 +0200941 LOG(LS_ERROR) << "Invalid operation kUndefined.";
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000942 assert(false); // This should not happen.
943 last_mode_ = kModeError;
944 return kInvalidOperation;
945 }
946 } // End of switch.
minyue5bd33972016-05-02 04:46:11 -0700947 last_operation_ = operation;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000948 if (return_value < 0) {
949 return return_value;
950 }
951
952 if (last_mode_ != kModeRfc3389Cng) {
953 comfort_noise_->Reset();
954 }
955
956 // Copy from |algorithm_buffer| to |sync_buffer_|.
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +0000957 sync_buffer_->PushBack(*algorithm_buffer_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000958
959 // Extract data from |sync_buffer_| to |output|.
turaj@webrtc.org362a55e2013-09-20 16:25:28 +0000960 size_t num_output_samples_per_channel = output_size_samples_;
961 size_t num_output_samples = output_size_samples_ * sync_buffer_->Channels();
henrik.lundin6d8e0112016-03-04 10:34:21 -0800962 if (num_output_samples > AudioFrame::kMaxDataSizeSamples) {
963 LOG(LS_WARNING) << "Output array is too short. "
964 << AudioFrame::kMaxDataSizeSamples << " < "
965 << output_size_samples_ << " * "
966 << sync_buffer_->Channels();
967 num_output_samples = AudioFrame::kMaxDataSizeSamples;
968 num_output_samples_per_channel =
969 AudioFrame::kMaxDataSizeSamples / sync_buffer_->Channels();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000970 }
henrik.lundin6d8e0112016-03-04 10:34:21 -0800971 sync_buffer_->GetNextAudioInterleaved(num_output_samples_per_channel,
972 audio_frame);
973 audio_frame->sample_rate_hz_ = fs_hz_;
Henrik Lundin05f71fc2015-09-01 11:51:58 +0200974 if (sync_buffer_->FutureLength() < expand_->overlap_length()) {
975 // The sync buffer should always contain |overlap_length| samples, but now
976 // too many samples have been extracted. Reinstall the |overlap_length|
977 // lookahead by moving the index.
978 const size_t missing_lookahead_samples =
979 expand_->overlap_length() - sync_buffer_->FutureLength();
henrikg91d6ede2015-09-17 00:24:34 -0700980 RTC_DCHECK_GE(sync_buffer_->next_index(), missing_lookahead_samples);
Henrik Lundin05f71fc2015-09-01 11:51:58 +0200981 sync_buffer_->set_next_index(sync_buffer_->next_index() -
982 missing_lookahead_samples);
983 }
henrik.lundin6d8e0112016-03-04 10:34:21 -0800984 if (audio_frame->samples_per_channel_ != output_size_samples_) {
985 LOG(LS_ERROR) << "audio_frame->samples_per_channel_ ("
986 << audio_frame->samples_per_channel_
Henrik Lundind67a2192015-08-03 12:54:37 +0200987 << ") != output_size_samples_ (" << output_size_samples_
988 << ")";
minyue@webrtc.orgdb1cefc2013-08-13 01:39:21 +0000989 // TODO(minyue): treatment of under-run, filling zeros
henrik.lundin6d8e0112016-03-04 10:34:21 -0800990 memset(audio_frame->data_, 0, num_output_samples * sizeof(int16_t));
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000991 return kSampleUnderrun;
992 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000993
994 // Should always have overlap samples left in the |sync_buffer_|.
henrikg91d6ede2015-09-17 00:24:34 -0700995 RTC_DCHECK_GE(sync_buffer_->FutureLength(), expand_->overlap_length());
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +0000996
997 if (play_dtmf) {
henrik.lundin6d8e0112016-03-04 10:34:21 -0800998 return_value =
999 DtmfOverdub(dtmf_event, sync_buffer_->Channels(), audio_frame->data_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001000 }
1001
1002 // Update the background noise parameters if last operation wrote data
1003 // straight from the decoder to the |sync_buffer_|. That is, none of the
1004 // operations that modify the signal can be followed by a parameter update.
1005 if ((last_mode_ == kModeNormal) ||
1006 (last_mode_ == kModeAccelerateFail) ||
1007 (last_mode_ == kModePreemptiveExpandFail) ||
1008 (last_mode_ == kModeRfc3389Cng) ||
1009 (last_mode_ == kModeCodecInternalCng)) {
1010 background_noise_->Update(*sync_buffer_, *vad_.get());
1011 }
1012
1013 if (operation == kDtmf) {
1014 // DTMF data was written the end of |sync_buffer_|.
1015 // Update index to end of DTMF data in |sync_buffer_|.
1016 sync_buffer_->set_dtmf_index(sync_buffer_->Size());
1017 }
1018
henrik.lundin@webrtc.orged865b52014-03-06 10:28:07 +00001019 if (last_mode_ != kModeExpand) {
1020 // If last operation was not expand, calculate the |playout_timestamp_| from
1021 // the |sync_buffer_|. However, do not update the |playout_timestamp_| if it
1022 // would be moved "backwards".
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001023 uint32_t temp_timestamp = sync_buffer_->end_timestamp() -
turaj@webrtc.org362a55e2013-09-20 16:25:28 +00001024 static_cast<uint32_t>(sync_buffer_->FutureLength());
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001025 if (static_cast<int32_t>(temp_timestamp - playout_timestamp_) > 0) {
1026 playout_timestamp_ = temp_timestamp;
1027 }
1028 } else {
1029 // Use dead reckoning to estimate the |playout_timestamp_|.
Peter Kastingb7e50542015-06-11 12:55:50 -07001030 playout_timestamp_ += static_cast<uint32_t>(output_size_samples_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001031 }
henrik.lundin15c51e32016-04-06 08:38:56 -07001032 // Set the timestamp in the audio frame to zero before the first packet has
1033 // been inserted. Otherwise, subtract the frame size in samples to get the
1034 // timestamp of the first sample in the frame (playout_timestamp_ is the
1035 // last + 1).
1036 audio_frame->timestamp_ =
1037 first_packet_
1038 ? 0
1039 : timestamp_scaler_->ToExternal(playout_timestamp_) -
1040 static_cast<uint32_t>(audio_frame->samples_per_channel_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001041
henrik.lundinb1fb72b2016-05-03 08:18:47 -07001042 if (!(last_mode_ == kModeRfc3389Cng ||
1043 last_mode_ == kModeCodecInternalCng ||
1044 last_mode_ == kModeExpand)) {
1045 generated_noise_stopwatch_.reset();
1046 }
1047
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001048 if (decode_return_value) return decode_return_value;
1049 return return_value;
1050}
1051
1052int NetEqImpl::GetDecision(Operations* operation,
1053 PacketList* packet_list,
1054 DtmfEvent* dtmf_event,
1055 bool* play_dtmf) {
1056 // Initialize output variables.
1057 *play_dtmf = false;
1058 *operation = kUndefined;
1059
henrik.lundin@webrtc.org0d5da252013-09-18 21:12:38 +00001060 assert(sync_buffer_.get());
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001061 uint32_t end_timestamp = sync_buffer_->end_timestamp();
henrik.lundin@webrtc.org52b42cb2014-11-04 14:03:58 +00001062 if (!new_codec_) {
1063 const uint32_t five_seconds_samples = 5 * fs_hz_;
1064 packet_buffer_->DiscardOldPackets(end_timestamp, five_seconds_samples);
1065 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001066 const RTPHeader* header = packet_buffer_->NextRtpHeader();
1067
henrik.lundinb1fb72b2016-05-03 08:18:47 -07001068 RTC_DCHECK(!generated_noise_stopwatch_ ||
1069 generated_noise_stopwatch_->ElapsedTicks() >= 1);
1070 uint64_t generated_noise_samples =
1071 generated_noise_stopwatch_
1072 ? (generated_noise_stopwatch_->ElapsedTicks() - 1) *
1073 output_size_samples_ +
1074 decision_logic_->noise_fast_forward()
1075 : 0;
1076
henrik.lundin@webrtc.orgca8cb952014-03-12 10:26:52 +00001077 if (decision_logic_->CngRfc3389On() || last_mode_ == kModeRfc3389Cng) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001078 // Because of timestamp peculiarities, we have to "manually" disallow using
1079 // a CNG packet with the same timestamp as the one that was last played.
1080 // This can happen when using redundancy and will cause the timing to shift.
henrik.lundin@webrtc.org24779fe2014-03-14 12:40:05 +00001081 while (header && decoder_database_->IsComfortNoise(header->payloadType) &&
1082 (end_timestamp >= header->timestamp ||
henrik.lundinb1fb72b2016-05-03 08:18:47 -07001083 end_timestamp + generated_noise_samples > header->timestamp)) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001084 // Don't use this packet, discard it.
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001085 if (packet_buffer_->DiscardNextPacket() != PacketBuffer::kOK) {
1086 assert(false); // Must be ok by design.
1087 }
1088 // Check buffer again.
1089 if (!new_codec_) {
henrik.lundin@webrtc.org52b42cb2014-11-04 14:03:58 +00001090 packet_buffer_->DiscardOldPackets(end_timestamp, 5 * fs_hz_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001091 }
1092 header = packet_buffer_->NextRtpHeader();
1093 }
1094 }
1095
henrik.lundin@webrtc.org0d5da252013-09-18 21:12:38 +00001096 assert(expand_.get());
turaj@webrtc.org362a55e2013-09-20 16:25:28 +00001097 const int samples_left = static_cast<int>(sync_buffer_->FutureLength() -
1098 expand_->overlap_length());
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001099 if (last_mode_ == kModeAccelerateSuccess ||
1100 last_mode_ == kModeAccelerateLowEnergy ||
1101 last_mode_ == kModePreemptiveExpandSuccess ||
1102 last_mode_ == kModePreemptiveExpandLowEnergy) {
1103 // Subtract (samples_left + output_size_samples_) from sampleMemory.
Peter Kastingdce40cf2015-08-24 14:52:23 -07001104 decision_logic_->AddSampleMemory(
1105 -(samples_left + rtc::checked_cast<int>(output_size_samples_)));
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001106 }
1107
1108 // Check if it is time to play a DTMF event.
Peter Kastingb7e50542015-06-11 12:55:50 -07001109 if (dtmf_buffer_->GetEvent(
1110 static_cast<uint32_t>(
henrik.lundinb1fb72b2016-05-03 08:18:47 -07001111 end_timestamp + generated_noise_samples),
Peter Kastingb7e50542015-06-11 12:55:50 -07001112 dtmf_event)) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001113 *play_dtmf = true;
1114 }
1115
1116 // Get instruction.
henrik.lundin@webrtc.org0d5da252013-09-18 21:12:38 +00001117 assert(sync_buffer_.get());
1118 assert(expand_.get());
henrik.lundinb1fb72b2016-05-03 08:18:47 -07001119 generated_noise_samples =
1120 generated_noise_stopwatch_
1121 ? generated_noise_stopwatch_->ElapsedTicks() * output_size_samples_ +
1122 decision_logic_->noise_fast_forward()
1123 : 0;
1124 *operation = decision_logic_->GetDecision(
1125 *sync_buffer_, *expand_, decoder_frame_length_, header, last_mode_,
1126 *play_dtmf, generated_noise_samples, &reset_decoder_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001127
1128 // Check if we already have enough samples in the |sync_buffer_|. If so,
1129 // change decision to normal, unless the decision was merge, accelerate, or
1130 // preemptive expand.
Peter Kastingdce40cf2015-08-24 14:52:23 -07001131 if (samples_left >= rtc::checked_cast<int>(output_size_samples_) &&
1132 *operation != kMerge &&
1133 *operation != kAccelerate &&
1134 *operation != kFastAccelerate &&
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001135 *operation != kPreemptiveExpand) {
1136 *operation = kNormal;
1137 return 0;
1138 }
1139
turaj@webrtc.org8d1cdaa2014-04-11 18:47:55 +00001140 decision_logic_->ExpandDecision(*operation);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001141
1142 // Check conditions for reset.
1143 if (new_codec_ || *operation == kUndefined) {
1144 // The only valid reason to get kUndefined is that new_codec_ is set.
1145 assert(new_codec_);
turaj@webrtc.org4d06db52013-03-27 18:31:42 +00001146 if (*play_dtmf && !header) {
1147 timestamp_ = dtmf_event->timestamp;
1148 } else {
turaj@webrtc.org4d06db52013-03-27 18:31:42 +00001149 if (!header) {
Henrik Lundind67a2192015-08-03 12:54:37 +02001150 LOG(LS_ERROR) << "Packet missing where it shouldn't.";
turaj@webrtc.org4d06db52013-03-27 18:31:42 +00001151 return -1;
1152 }
1153 timestamp_ = header->timestamp;
ossu108ecec2016-07-08 08:45:18 -07001154 if (*operation == kRfc3389CngNoPacket &&
1155 decoder_database_->IsComfortNoise(header->payloadType)) {
turaj@webrtc.org4d06db52013-03-27 18:31:42 +00001156 // Change decision to CNG packet, since we do have a CNG packet, but it
1157 // was considered too early to use. Now, use it anyway.
1158 *operation = kRfc3389Cng;
1159 } else if (*operation != kRfc3389Cng) {
1160 *operation = kNormal;
1161 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001162 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001163 // Adjust |sync_buffer_| timestamp before setting |end_timestamp| to the
1164 // new value.
1165 sync_buffer_->IncreaseEndTimestamp(timestamp_ - end_timestamp);
turaj@webrtc.org4d06db52013-03-27 18:31:42 +00001166 end_timestamp = timestamp_;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001167 new_codec_ = false;
1168 decision_logic_->SoftReset();
1169 buffer_level_filter_->Reset();
1170 delay_manager_->Reset();
1171 stats_.ResetMcu();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001172 }
1173
Peter Kastingdce40cf2015-08-24 14:52:23 -07001174 size_t required_samples = output_size_samples_;
1175 const size_t samples_10_ms = static_cast<size_t>(80 * fs_mult_);
1176 const size_t samples_20_ms = 2 * samples_10_ms;
1177 const size_t samples_30_ms = 3 * samples_10_ms;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001178
1179 switch (*operation) {
1180 case kExpand: {
1181 timestamp_ = end_timestamp;
1182 return 0;
1183 }
1184 case kRfc3389CngNoPacket:
1185 case kCodecInternalCng: {
1186 return 0;
1187 }
1188 case kDtmf: {
1189 // TODO(hlundin): Write test for this.
1190 // Update timestamp.
1191 timestamp_ = end_timestamp;
henrik.lundinb1fb72b2016-05-03 08:18:47 -07001192 const uint64_t generated_noise_samples =
1193 generated_noise_stopwatch_
1194 ? generated_noise_stopwatch_->ElapsedTicks() *
1195 output_size_samples_ +
1196 decision_logic_->noise_fast_forward()
1197 : 0;
1198 if (generated_noise_samples > 0 && last_mode_ != kModeDtmf) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001199 // Make a jump in timestamp due to the recently played comfort noise.
Peter Kastingb7e50542015-06-11 12:55:50 -07001200 uint32_t timestamp_jump =
henrik.lundinb1fb72b2016-05-03 08:18:47 -07001201 static_cast<uint32_t>(generated_noise_samples);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001202 sync_buffer_->IncreaseEndTimestamp(timestamp_jump);
1203 timestamp_ += timestamp_jump;
1204 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001205 return 0;
1206 }
Henrik Lundincf808d22015-05-27 14:33:29 +02001207 case kAccelerate:
1208 case kFastAccelerate: {
1209 // In order to do an accelerate we need at least 30 ms of audio data.
Peter Kastingdce40cf2015-08-24 14:52:23 -07001210 if (samples_left >= static_cast<int>(samples_30_ms)) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001211 // Already have enough data, so we do not need to extract any more.
1212 decision_logic_->set_sample_memory(samples_left);
1213 decision_logic_->set_prev_time_scale(true);
1214 return 0;
Peter Kastingdce40cf2015-08-24 14:52:23 -07001215 } else if (samples_left >= static_cast<int>(samples_10_ms) &&
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001216 decoder_frame_length_ >= samples_30_ms) {
1217 // Avoid decoding more data as it might overflow the playout buffer.
1218 *operation = kNormal;
1219 return 0;
Peter Kastingdce40cf2015-08-24 14:52:23 -07001220 } else if (samples_left < static_cast<int>(samples_20_ms) &&
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001221 decoder_frame_length_ < samples_30_ms) {
1222 // Build up decoded data by decoding at least 20 ms of audio data. Do
1223 // not perform accelerate yet, but wait until we only need to do one
1224 // decoding.
1225 required_samples = 2 * output_size_samples_;
1226 *operation = kNormal;
1227 }
1228 // If none of the above is true, we have one of two possible situations:
1229 // (1) 20 ms <= samples_left < 30 ms and decoder_frame_length_ < 30 ms; or
1230 // (2) samples_left < 10 ms and decoder_frame_length_ >= 30 ms.
1231 // In either case, we move on with the accelerate decision, and decode one
1232 // frame now.
1233 break;
1234 }
1235 case kPreemptiveExpand: {
1236 // In order to do a preemptive expand we need at least 30 ms of decoded
1237 // audio data.
Peter Kastingdce40cf2015-08-24 14:52:23 -07001238 if ((samples_left >= static_cast<int>(samples_30_ms)) ||
1239 (samples_left >= static_cast<int>(samples_10_ms) &&
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001240 decoder_frame_length_ >= samples_30_ms)) {
1241 // Already have enough data, so we do not need to extract any more.
1242 // Or, avoid decoding more data as it might overflow the playout buffer.
1243 // Still try preemptive expand, though.
1244 decision_logic_->set_sample_memory(samples_left);
1245 decision_logic_->set_prev_time_scale(true);
1246 return 0;
1247 }
Peter Kastingdce40cf2015-08-24 14:52:23 -07001248 if (samples_left < static_cast<int>(samples_20_ms) &&
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001249 decoder_frame_length_ < samples_30_ms) {
1250 // Build up decoded data by decoding at least 20 ms of audio data.
1251 // Still try to perform preemptive expand.
1252 required_samples = 2 * output_size_samples_;
1253 }
1254 // Move on with the preemptive expand decision.
1255 break;
1256 }
turaj@webrtc.org8d1cdaa2014-04-11 18:47:55 +00001257 case kMerge: {
1258 required_samples =
1259 std::max(merge_->RequiredFutureSamples(), required_samples);
1260 break;
1261 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001262 default: {
1263 // Do nothing.
1264 }
1265 }
1266
1267 // Get packets from buffer.
1268 int extracted_samples = 0;
1269 if (header &&
1270 *operation != kAlternativePlc &&
1271 *operation != kAlternativePlcIncreaseTimestamp &&
1272 *operation != kAudioRepetition &&
1273 *operation != kAudioRepetitionIncreaseTimestamp) {
1274 sync_buffer_->IncreaseEndTimestamp(header->timestamp - end_timestamp);
1275 if (decision_logic_->CngOff()) {
1276 // Adjustment of timestamp only corresponds to an actual packet loss
1277 // if comfort noise is not played. If comfort noise was just played,
1278 // this adjustment of timestamp is only done to get back in sync with the
1279 // stream timestamp; no loss to report.
1280 stats_.LostSamples(header->timestamp - end_timestamp);
1281 }
1282
1283 if (*operation != kRfc3389Cng) {
1284 // We are about to decode and use a non-CNG packet.
1285 decision_logic_->SetCngOff();
1286 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001287
1288 extracted_samples = ExtractPackets(required_samples, packet_list);
1289 if (extracted_samples < 0) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001290 return kPacketBufferCorruption;
1291 }
1292 }
1293
Henrik Lundincf808d22015-05-27 14:33:29 +02001294 if (*operation == kAccelerate || *operation == kFastAccelerate ||
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001295 *operation == kPreemptiveExpand) {
1296 decision_logic_->set_sample_memory(samples_left + extracted_samples);
1297 decision_logic_->set_prev_time_scale(true);
1298 }
1299
Henrik Lundincf808d22015-05-27 14:33:29 +02001300 if (*operation == kAccelerate || *operation == kFastAccelerate) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001301 // Check that we have enough data (30ms) to do accelerate.
Peter Kastingdce40cf2015-08-24 14:52:23 -07001302 if (extracted_samples + samples_left < static_cast<int>(samples_30_ms)) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001303 // TODO(hlundin): Write test for this.
1304 // Not enough, do normal operation instead.
1305 *operation = kNormal;
1306 }
1307 }
1308
1309 timestamp_ = end_timestamp;
1310 return 0;
1311}
1312
1313int NetEqImpl::Decode(PacketList* packet_list, Operations* operation,
1314 int* decoded_length,
1315 AudioDecoder::SpeechType* speech_type) {
1316 *speech_type = AudioDecoder::kSpeech;
minyuel6d92bf52015-09-23 15:20:39 +02001317
1318 // When packet_list is empty, we may be in kCodecInternalCng mode, and for
1319 // that we use current active decoder.
1320 AudioDecoder* decoder = decoder_database_->GetActiveDecoder();
1321
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001322 if (!packet_list->empty()) {
1323 const Packet* packet = packet_list->front();
pkasting@chromium.org0e81fdf2015-02-02 23:54:03 +00001324 uint8_t payload_type = packet->header.payloadType;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001325 if (!decoder_database_->IsComfortNoise(payload_type)) {
1326 decoder = decoder_database_->GetDecoder(payload_type);
1327 assert(decoder);
1328 if (!decoder) {
Henrik Lundind67a2192015-08-03 12:54:37 +02001329 LOG(LS_WARNING) << "Unknown payload type "
1330 << static_cast<int>(payload_type);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001331 PacketBuffer::DeleteAllPackets(packet_list);
1332 return kDecoderNotFound;
1333 }
1334 bool decoder_changed;
1335 decoder_database_->SetActiveDecoder(payload_type, &decoder_changed);
1336 if (decoder_changed) {
1337 // We have a new decoder. Re-init some values.
1338 const DecoderDatabase::DecoderInfo* decoder_info = decoder_database_
1339 ->GetDecoderInfo(payload_type);
1340 assert(decoder_info);
1341 if (!decoder_info) {
Henrik Lundind67a2192015-08-03 12:54:37 +02001342 LOG(LS_WARNING) << "Unknown payload type "
1343 << static_cast<int>(payload_type);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001344 PacketBuffer::DeleteAllPackets(packet_list);
1345 return kDecoderNotFound;
1346 }
tina.legrand@webrtc.orgba5a6c32014-03-23 09:58:48 +00001347 // If sampling rate or number of channels has changed, we need to make
1348 // a reset.
kwibergc0f2dcf2016-05-31 06:28:03 -07001349 if (decoder_info->SampleRateHz() != fs_hz_ ||
henrik.lundin@webrtc.org6dba1eb2015-03-18 09:47:08 +00001350 decoder->Channels() != algorithm_buffer_->Channels()) {
tina.legrand@webrtc.orgba5a6c32014-03-23 09:58:48 +00001351 // TODO(tlegrand): Add unittest to cover this event.
kwibergc0f2dcf2016-05-31 06:28:03 -07001352 SetSampleRateAndChannels(decoder_info->SampleRateHz(),
1353 decoder->Channels());
turaj@webrtc.orga6101d72013-10-01 22:01:09 +00001354 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001355 sync_buffer_->set_end_timestamp(timestamp_);
1356 playout_timestamp_ = timestamp_;
1357 }
1358 }
1359 }
1360
1361 if (reset_decoder_) {
1362 // TODO(hlundin): Write test for this.
Karl Wiberg43766482015-08-27 15:22:11 +02001363 if (decoder)
1364 decoder->Reset();
1365
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001366 // Reset comfort noise decoder.
ossu97ba30e2016-04-25 07:55:58 -07001367 ComfortNoiseDecoder* cng_decoder = decoder_database_->GetActiveCngDecoder();
Karl Wiberg43766482015-08-27 15:22:11 +02001368 if (cng_decoder)
1369 cng_decoder->Reset();
1370
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001371 reset_decoder_ = false;
1372 }
1373
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001374 *decoded_length = 0;
1375 // Update codec-internal PLC state.
1376 if ((*operation == kMerge) && decoder && decoder->HasDecodePlc()) {
1377 decoder->DecodePlc(1, &decoded_buffer_[*decoded_length]);
1378 }
1379
minyuel6d92bf52015-09-23 15:20:39 +02001380 int return_value;
1381 if (*operation == kCodecInternalCng) {
1382 RTC_DCHECK(packet_list->empty());
1383 return_value = DecodeCng(decoder, decoded_length, speech_type);
1384 } else {
1385 return_value = DecodeLoop(packet_list, *operation, decoder,
1386 decoded_length, speech_type);
1387 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001388
1389 if (*decoded_length < 0) {
1390 // Error returned from the decoder.
1391 *decoded_length = 0;
Peter Kastingb7e50542015-06-11 12:55:50 -07001392 sync_buffer_->IncreaseEndTimestamp(
1393 static_cast<uint32_t>(decoder_frame_length_));
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001394 int error_code = 0;
1395 if (decoder)
1396 error_code = decoder->ErrorCode();
1397 if (error_code != 0) {
1398 // Got some error code from the decoder.
1399 decoder_error_code_ = error_code;
1400 return_value = kDecoderErrorCode;
Henrik Lundind67a2192015-08-03 12:54:37 +02001401 LOG(LS_WARNING) << "Decoder returned error code: " << error_code;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001402 } else {
1403 // Decoder does not implement error codes. Return generic error.
1404 return_value = kOtherDecoderError;
Henrik Lundind67a2192015-08-03 12:54:37 +02001405 LOG(LS_WARNING) << "Decoder error (no error code)";
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001406 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001407 *operation = kExpand; // Do expansion to get data instead.
1408 }
1409 if (*speech_type != AudioDecoder::kComfortNoise) {
1410 // Don't increment timestamp if codec returned CNG speech type
1411 // since in this case, the we will increment the CNGplayedTS counter.
1412 // Increase with number of samples per channel.
1413 assert(*decoded_length == 0 ||
henrik.lundin@webrtc.org6dba1eb2015-03-18 09:47:08 +00001414 (decoder && decoder->Channels() == sync_buffer_->Channels()));
turaj@webrtc.org362a55e2013-09-20 16:25:28 +00001415 sync_buffer_->IncreaseEndTimestamp(
1416 *decoded_length / static_cast<int>(sync_buffer_->Channels()));
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001417 }
1418 return return_value;
1419}
1420
minyuel6d92bf52015-09-23 15:20:39 +02001421int NetEqImpl::DecodeCng(AudioDecoder* decoder, int* decoded_length,
1422 AudioDecoder::SpeechType* speech_type) {
1423 if (!decoder) {
1424 // This happens when active decoder is not defined.
1425 *decoded_length = -1;
1426 return 0;
1427 }
1428
1429 while (*decoded_length < rtc::checked_cast<int>(output_size_samples_)) {
1430 const int length = decoder->Decode(
1431 nullptr, 0, fs_hz_,
1432 (decoded_buffer_length_ - *decoded_length) * sizeof(int16_t),
1433 &decoded_buffer_[*decoded_length], speech_type);
1434 if (length > 0) {
1435 *decoded_length += length;
minyuel6d92bf52015-09-23 15:20:39 +02001436 } else {
1437 // Error.
1438 LOG(LS_WARNING) << "Failed to decode CNG";
1439 *decoded_length = -1;
1440 break;
1441 }
1442 if (*decoded_length > static_cast<int>(decoded_buffer_length_)) {
1443 // Guard against overflow.
1444 LOG(LS_WARNING) << "Decoded too much CNG.";
1445 return kDecodedTooMuch;
1446 }
1447 }
1448 return 0;
1449}
1450
1451int NetEqImpl::DecodeLoop(PacketList* packet_list, const Operations& operation,
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001452 AudioDecoder* decoder, int* decoded_length,
1453 AudioDecoder::SpeechType* speech_type) {
1454 Packet* packet = NULL;
1455 if (!packet_list->empty()) {
1456 packet = packet_list->front();
1457 }
minyuel6d92bf52015-09-23 15:20:39 +02001458
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001459 // Do decoding.
1460 while (packet &&
1461 !decoder_database_->IsComfortNoise(packet->header.payloadType)) {
1462 assert(decoder); // At this point, we must have a decoder object.
1463 // The number of channels in the |sync_buffer_| should be the same as the
1464 // number decoder channels.
henrik.lundin@webrtc.org6dba1eb2015-03-18 09:47:08 +00001465 assert(sync_buffer_->Channels() == decoder->Channels());
1466 assert(decoded_buffer_length_ >= kMaxFrameSize * decoder->Channels());
minyuel6d92bf52015-09-23 15:20:39 +02001467 assert(operation == kNormal || operation == kAccelerate ||
1468 operation == kFastAccelerate || operation == kMerge ||
1469 operation == kPreemptiveExpand);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001470 packet_list->pop_front();
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001471 size_t payload_length = packet->payload_length;
Peter Kasting36b7cc32015-06-11 19:57:18 -07001472 int decode_length;
turaj@webrtc.org7b75ac62013-09-26 00:27:56 +00001473 if (packet->sync_packet) {
1474 // Decode to silence with the same frame size as the last decode.
henrik.lundin@webrtc.org6dba1eb2015-03-18 09:47:08 +00001475 memset(&decoded_buffer_[*decoded_length], 0,
1476 decoder_frame_length_ * decoder->Channels() *
1477 sizeof(decoded_buffer_[0]));
Peter Kastingdce40cf2015-08-24 14:52:23 -07001478 decode_length = rtc::checked_cast<int>(decoder_frame_length_);
turaj@webrtc.org7b75ac62013-09-26 00:27:56 +00001479 } else if (!packet->primary) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001480 // This is a redundant payload; call the special decoder method.
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001481 decode_length = decoder->DecodeRedundant(
henrik.lundin@webrtc.org1eda4e32015-02-25 10:02:29 +00001482 packet->payload, packet->payload_length, fs_hz_,
minyue@webrtc.org7f7d7e32015-03-16 12:30:37 +00001483 (decoded_buffer_length_ - *decoded_length) * sizeof(int16_t),
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001484 &decoded_buffer_[*decoded_length], speech_type);
1485 } else {
henrik.lundin@webrtc.org1eda4e32015-02-25 10:02:29 +00001486 decode_length =
minyue@webrtc.org7f7d7e32015-03-16 12:30:37 +00001487 decoder->Decode(
1488 packet->payload, packet->payload_length, fs_hz_,
1489 (decoded_buffer_length_ - *decoded_length) * sizeof(int16_t),
1490 &decoded_buffer_[*decoded_length], speech_type);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001491 }
1492
1493 delete[] packet->payload;
1494 delete packet;
turaj@webrtc.org58cd3162013-10-31 15:15:55 +00001495 packet = NULL;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001496 if (decode_length > 0) {
1497 *decoded_length += decode_length;
1498 // Update |decoder_frame_length_| with number of samples per channel.
henrik.lundin@webrtc.org6dba1eb2015-03-18 09:47:08 +00001499 decoder_frame_length_ =
Peter Kastingdce40cf2015-08-24 14:52:23 -07001500 static_cast<size_t>(decode_length) / decoder->Channels();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001501 } else if (decode_length < 0) {
1502 // Error.
Henrik Lundind67a2192015-08-03 12:54:37 +02001503 LOG(LS_WARNING) << "Decode " << decode_length << " " << payload_length;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001504 *decoded_length = -1;
1505 PacketBuffer::DeleteAllPackets(packet_list);
1506 break;
1507 }
1508 if (*decoded_length > static_cast<int>(decoded_buffer_length_)) {
1509 // Guard against overflow.
Henrik Lundind67a2192015-08-03 12:54:37 +02001510 LOG(LS_WARNING) << "Decoded too much.";
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001511 PacketBuffer::DeleteAllPackets(packet_list);
1512 return kDecodedTooMuch;
1513 }
1514 if (!packet_list->empty()) {
1515 packet = packet_list->front();
1516 } else {
1517 packet = NULL;
1518 }
1519 } // End of decode loop.
1520
turaj@webrtc.org58cd3162013-10-31 15:15:55 +00001521 // If the list is not empty at this point, either a decoding error terminated
1522 // the while-loop, or list must hold exactly one CNG packet.
1523 assert(packet_list->empty() || *decoded_length < 0 ||
1524 (packet_list->size() == 1 && packet &&
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001525 decoder_database_->IsComfortNoise(packet->header.payloadType)));
1526 return 0;
1527}
1528
1529void NetEqImpl::DoNormal(const int16_t* decoded_buffer, size_t decoded_length,
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001530 AudioDecoder::SpeechType speech_type, bool play_dtmf) {
henrik.lundin@webrtc.org40d3fc62013-09-18 12:19:50 +00001531 assert(normal_.get());
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001532 assert(mute_factor_array_.get());
henrik.lundin@webrtc.org40d3fc62013-09-18 12:19:50 +00001533 normal_->Process(decoded_buffer, decoded_length, last_mode_,
henrik.lundin@webrtc.org0d5da252013-09-18 21:12:38 +00001534 mute_factor_array_.get(), algorithm_buffer_.get());
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001535 if (decoded_length != 0) {
1536 last_mode_ = kModeNormal;
1537 }
1538
1539 // If last packet was decoded as an inband CNG, set mode to CNG instead.
1540 if ((speech_type == AudioDecoder::kComfortNoise)
1541 || ((last_mode_ == kModeCodecInternalCng)
1542 && (decoded_length == 0))) {
1543 // TODO(hlundin): Remove second part of || statement above.
1544 last_mode_ = kModeCodecInternalCng;
1545 }
1546
1547 if (!play_dtmf) {
1548 dtmf_tone_generator_->Reset();
1549 }
1550}
1551
1552void NetEqImpl::DoMerge(int16_t* decoded_buffer, size_t decoded_length,
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001553 AudioDecoder::SpeechType speech_type, bool play_dtmf) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001554 assert(mute_factor_array_.get());
henrik.lundin@webrtc.org40d3fc62013-09-18 12:19:50 +00001555 assert(merge_.get());
Peter Kastingdce40cf2015-08-24 14:52:23 -07001556 size_t new_length = merge_->Process(decoded_buffer, decoded_length,
1557 mute_factor_array_.get(),
1558 algorithm_buffer_.get());
1559 size_t expand_length_correction = new_length -
1560 decoded_length / algorithm_buffer_->Channels();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001561
1562 // Update in-call and post-call statistics.
1563 if (expand_->MuteFactor(0) == 0) {
1564 // Expand generates only noise.
minyue@webrtc.orgc11348b2015-02-10 08:35:38 +00001565 stats_.ExpandedNoiseSamples(expand_length_correction);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001566 } else {
1567 // Expansion generates more than only noise.
minyue@webrtc.orgc11348b2015-02-10 08:35:38 +00001568 stats_.ExpandedVoiceSamples(expand_length_correction);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001569 }
1570
1571 last_mode_ = kModeMerge;
1572 // If last packet was decoded as an inband CNG, set mode to CNG instead.
1573 if (speech_type == AudioDecoder::kComfortNoise) {
1574 last_mode_ = kModeCodecInternalCng;
1575 }
1576 expand_->Reset();
1577 if (!play_dtmf) {
1578 dtmf_tone_generator_->Reset();
1579 }
1580}
1581
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001582int NetEqImpl::DoExpand(bool play_dtmf) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001583 while ((sync_buffer_->FutureLength() - expand_->overlap_length()) <
Peter Kastingdce40cf2015-08-24 14:52:23 -07001584 output_size_samples_) {
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001585 algorithm_buffer_->Clear();
henrik.lundin@webrtc.org0d5da252013-09-18 21:12:38 +00001586 int return_value = expand_->Process(algorithm_buffer_.get());
Peter Kastingdce40cf2015-08-24 14:52:23 -07001587 size_t length = algorithm_buffer_->Size();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001588
1589 // Update in-call and post-call statistics.
1590 if (expand_->MuteFactor(0) == 0) {
1591 // Expand operation generates only noise.
1592 stats_.ExpandedNoiseSamples(length);
1593 } else {
1594 // Expand operation generates more than only noise.
1595 stats_.ExpandedVoiceSamples(length);
1596 }
1597
1598 last_mode_ = kModeExpand;
1599
1600 if (return_value < 0) {
1601 return return_value;
1602 }
1603
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001604 sync_buffer_->PushBack(*algorithm_buffer_);
1605 algorithm_buffer_->Clear();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001606 }
1607 if (!play_dtmf) {
1608 dtmf_tone_generator_->Reset();
1609 }
henrik.lundinb1fb72b2016-05-03 08:18:47 -07001610
1611 if (!generated_noise_stopwatch_) {
1612 // Start a new stopwatch since we may be covering for a lost CNG packet.
1613 generated_noise_stopwatch_ = tick_timer_->GetNewStopwatch();
1614 }
1615
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001616 return 0;
1617}
1618
Henrik Lundincf808d22015-05-27 14:33:29 +02001619int NetEqImpl::DoAccelerate(int16_t* decoded_buffer,
1620 size_t decoded_length,
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001621 AudioDecoder::SpeechType speech_type,
Henrik Lundincf808d22015-05-27 14:33:29 +02001622 bool play_dtmf,
1623 bool fast_accelerate) {
Peter Kastingdce40cf2015-08-24 14:52:23 -07001624 const size_t required_samples =
1625 static_cast<size_t>(240 * fs_mult_); // Must have 30 ms.
turaj@webrtc.org362a55e2013-09-20 16:25:28 +00001626 size_t borrowed_samples_per_channel = 0;
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001627 size_t num_channels = algorithm_buffer_->Channels();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001628 size_t decoded_length_per_channel = decoded_length / num_channels;
1629 if (decoded_length_per_channel < required_samples) {
1630 // Must move data from the |sync_buffer_| in order to get 30 ms.
turaj@webrtc.org362a55e2013-09-20 16:25:28 +00001631 borrowed_samples_per_channel = static_cast<int>(required_samples -
1632 decoded_length_per_channel);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001633 memmove(&decoded_buffer[borrowed_samples_per_channel * num_channels],
1634 decoded_buffer,
1635 sizeof(int16_t) * decoded_length);
1636 sync_buffer_->ReadInterleavedFromEnd(borrowed_samples_per_channel,
1637 decoded_buffer);
1638 decoded_length = required_samples * num_channels;
1639 }
1640
Peter Kastingdce40cf2015-08-24 14:52:23 -07001641 size_t samples_removed;
Henrik Lundincf808d22015-05-27 14:33:29 +02001642 Accelerate::ReturnCodes return_code =
1643 accelerate_->Process(decoded_buffer, decoded_length, fast_accelerate,
1644 algorithm_buffer_.get(), &samples_removed);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001645 stats_.AcceleratedSamples(samples_removed);
1646 switch (return_code) {
1647 case Accelerate::kSuccess:
1648 last_mode_ = kModeAccelerateSuccess;
1649 break;
1650 case Accelerate::kSuccessLowEnergy:
1651 last_mode_ = kModeAccelerateLowEnergy;
1652 break;
1653 case Accelerate::kNoStretch:
1654 last_mode_ = kModeAccelerateFail;
1655 break;
1656 case Accelerate::kError:
1657 // TODO(hlundin): Map to kModeError instead?
1658 last_mode_ = kModeAccelerateFail;
1659 return kAccelerateError;
1660 }
1661
1662 if (borrowed_samples_per_channel > 0) {
1663 // Copy borrowed samples back to the |sync_buffer_|.
turaj@webrtc.org362a55e2013-09-20 16:25:28 +00001664 size_t length = algorithm_buffer_->Size();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001665 if (length < borrowed_samples_per_channel) {
1666 // This destroys the beginning of the buffer, but will not cause any
1667 // problems.
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001668 sync_buffer_->ReplaceAtIndex(*algorithm_buffer_,
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001669 sync_buffer_->Size() -
1670 borrowed_samples_per_channel);
1671 sync_buffer_->PushFrontZeros(borrowed_samples_per_channel - length);
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001672 algorithm_buffer_->PopFront(length);
1673 assert(algorithm_buffer_->Empty());
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001674 } else {
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001675 sync_buffer_->ReplaceAtIndex(*algorithm_buffer_,
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001676 borrowed_samples_per_channel,
1677 sync_buffer_->Size() -
1678 borrowed_samples_per_channel);
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001679 algorithm_buffer_->PopFront(borrowed_samples_per_channel);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001680 }
1681 }
1682
1683 // If last packet was decoded as an inband CNG, set mode to CNG instead.
1684 if (speech_type == AudioDecoder::kComfortNoise) {
1685 last_mode_ = kModeCodecInternalCng;
1686 }
1687 if (!play_dtmf) {
1688 dtmf_tone_generator_->Reset();
1689 }
1690 expand_->Reset();
1691 return 0;
1692}
1693
1694int NetEqImpl::DoPreemptiveExpand(int16_t* decoded_buffer,
1695 size_t decoded_length,
1696 AudioDecoder::SpeechType speech_type,
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001697 bool play_dtmf) {
Peter Kastingdce40cf2015-08-24 14:52:23 -07001698 const size_t required_samples =
1699 static_cast<size_t>(240 * fs_mult_); // Must have 30 ms.
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001700 size_t num_channels = algorithm_buffer_->Channels();
Peter Kastingdce40cf2015-08-24 14:52:23 -07001701 size_t borrowed_samples_per_channel = 0;
1702 size_t old_borrowed_samples_per_channel = 0;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001703 size_t decoded_length_per_channel = decoded_length / num_channels;
1704 if (decoded_length_per_channel < required_samples) {
1705 // Must move data from the |sync_buffer_| in order to get 30 ms.
Peter Kastingdce40cf2015-08-24 14:52:23 -07001706 borrowed_samples_per_channel =
1707 required_samples - decoded_length_per_channel;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001708 // Calculate how many of these were already played out.
Peter Kastingf045e4d2015-06-10 21:15:38 -07001709 old_borrowed_samples_per_channel =
Peter Kastingdce40cf2015-08-24 14:52:23 -07001710 (borrowed_samples_per_channel > sync_buffer_->FutureLength()) ?
1711 (borrowed_samples_per_channel - sync_buffer_->FutureLength()) : 0;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001712 memmove(&decoded_buffer[borrowed_samples_per_channel * num_channels],
1713 decoded_buffer,
1714 sizeof(int16_t) * decoded_length);
1715 sync_buffer_->ReadInterleavedFromEnd(borrowed_samples_per_channel,
1716 decoded_buffer);
1717 decoded_length = required_samples * num_channels;
1718 }
1719
Peter Kastingdce40cf2015-08-24 14:52:23 -07001720 size_t samples_added;
henrik.lundin@webrtc.org40d3fc62013-09-18 12:19:50 +00001721 PreemptiveExpand::ReturnCodes return_code = preemptive_expand_->Process(
Peter Kastingdce40cf2015-08-24 14:52:23 -07001722 decoded_buffer, decoded_length,
turaj@webrtc.org362a55e2013-09-20 16:25:28 +00001723 old_borrowed_samples_per_channel,
henrik.lundin@webrtc.org0d5da252013-09-18 21:12:38 +00001724 algorithm_buffer_.get(), &samples_added);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001725 stats_.PreemptiveExpandedSamples(samples_added);
1726 switch (return_code) {
1727 case PreemptiveExpand::kSuccess:
1728 last_mode_ = kModePreemptiveExpandSuccess;
1729 break;
1730 case PreemptiveExpand::kSuccessLowEnergy:
1731 last_mode_ = kModePreemptiveExpandLowEnergy;
1732 break;
1733 case PreemptiveExpand::kNoStretch:
1734 last_mode_ = kModePreemptiveExpandFail;
1735 break;
1736 case PreemptiveExpand::kError:
1737 // TODO(hlundin): Map to kModeError instead?
1738 last_mode_ = kModePreemptiveExpandFail;
1739 return kPreemptiveExpandError;
1740 }
1741
1742 if (borrowed_samples_per_channel > 0) {
1743 // Copy borrowed samples back to the |sync_buffer_|.
1744 sync_buffer_->ReplaceAtIndex(
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001745 *algorithm_buffer_, borrowed_samples_per_channel,
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001746 sync_buffer_->Size() - borrowed_samples_per_channel);
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001747 algorithm_buffer_->PopFront(borrowed_samples_per_channel);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001748 }
1749
1750 // If last packet was decoded as an inband CNG, set mode to CNG instead.
1751 if (speech_type == AudioDecoder::kComfortNoise) {
1752 last_mode_ = kModeCodecInternalCng;
1753 }
1754 if (!play_dtmf) {
1755 dtmf_tone_generator_->Reset();
1756 }
1757 expand_->Reset();
1758 return 0;
1759}
1760
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001761int NetEqImpl::DoRfc3389Cng(PacketList* packet_list, bool play_dtmf) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001762 if (!packet_list->empty()) {
1763 // Must have exactly one SID frame at this point.
1764 assert(packet_list->size() == 1);
1765 Packet* packet = packet_list->front();
1766 packet_list->pop_front();
henrik.lundin@webrtc.org73deaad2013-01-31 13:32:51 +00001767 if (!decoder_database_->IsComfortNoise(packet->header.payloadType)) {
henrik.lundin@webrtc.org73deaad2013-01-31 13:32:51 +00001768 LOG(LS_ERROR) << "Trying to decode non-CNG payload as CNG.";
1769 return kOtherError;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001770 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001771 // UpdateParameters() deletes |packet|.
1772 if (comfort_noise_->UpdateParameters(packet) ==
1773 ComfortNoise::kInternalError) {
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001774 algorithm_buffer_->Zeros(output_size_samples_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001775 return -comfort_noise_->internal_error_code();
1776 }
1777 }
1778 int cn_return = comfort_noise_->Generate(output_size_samples_,
henrik.lundin@webrtc.org0d5da252013-09-18 21:12:38 +00001779 algorithm_buffer_.get());
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001780 expand_->Reset();
1781 last_mode_ = kModeRfc3389Cng;
1782 if (!play_dtmf) {
1783 dtmf_tone_generator_->Reset();
1784 }
1785 if (cn_return == ComfortNoise::kInternalError) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001786 decoder_error_code_ = comfort_noise_->internal_error_code();
1787 return kComfortNoiseErrorCode;
1788 } else if (cn_return == ComfortNoise::kUnknownPayloadType) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001789 return kUnknownRtpPayloadType;
1790 }
1791 return 0;
1792}
1793
minyuel6d92bf52015-09-23 15:20:39 +02001794void NetEqImpl::DoCodecInternalCng(const int16_t* decoded_buffer,
1795 size_t decoded_length) {
1796 RTC_DCHECK(normal_.get());
1797 RTC_DCHECK(mute_factor_array_.get());
1798 normal_->Process(decoded_buffer, decoded_length, last_mode_,
1799 mute_factor_array_.get(), algorithm_buffer_.get());
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001800 last_mode_ = kModeCodecInternalCng;
1801 expand_->Reset();
1802}
1803
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001804int NetEqImpl::DoDtmf(const DtmfEvent& dtmf_event, bool* play_dtmf) {
turaj@webrtc.org4d06db52013-03-27 18:31:42 +00001805 // This block of the code and the block further down, handling |dtmf_switch|
1806 // are commented out. Otherwise playing out-of-band DTMF would fail in VoE
1807 // test, DtmfTest.ManualSuccessfullySendsOutOfBandTelephoneEvents. This is
1808 // equivalent to |dtmf_switch| always be false.
1809 //
1810 // See http://webrtc-codereview.appspot.com/1195004/ for discussion
1811 // On this issue. This change might cause some glitches at the point of
1812 // switch from audio to DTMF. Issue 1545 is filed to track this.
1813 //
1814 // bool dtmf_switch = false;
1815 // if ((last_mode_ != kModeDtmf) && dtmf_tone_generator_->initialized()) {
1816 // // Special case; see below.
1817 // // We must catch this before calling Generate, since |initialized| is
1818 // // modified in that call.
1819 // dtmf_switch = true;
1820 // }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001821
1822 int dtmf_return_value = 0;
1823 if (!dtmf_tone_generator_->initialized()) {
1824 // Initialize if not already done.
1825 dtmf_return_value = dtmf_tone_generator_->Init(fs_hz_, dtmf_event.event_no,
1826 dtmf_event.volume);
1827 }
turaj@webrtc.org4d06db52013-03-27 18:31:42 +00001828
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001829 if (dtmf_return_value == 0) {
1830 // Generate DTMF signal.
1831 dtmf_return_value = dtmf_tone_generator_->Generate(output_size_samples_,
henrik.lundin@webrtc.org0d5da252013-09-18 21:12:38 +00001832 algorithm_buffer_.get());
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001833 }
turaj@webrtc.org4d06db52013-03-27 18:31:42 +00001834
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001835 if (dtmf_return_value < 0) {
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001836 algorithm_buffer_->Zeros(output_size_samples_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001837 return dtmf_return_value;
1838 }
1839
turaj@webrtc.org4d06db52013-03-27 18:31:42 +00001840 // if (dtmf_switch) {
1841 // // This is the special case where the previous operation was DTMF
1842 // // overdub, but the current instruction is "regular" DTMF. We must make
1843 // // sure that the DTMF does not have any discontinuities. The first DTMF
1844 // // sample that we generate now must be played out immediately, therefore
1845 // // it must be copied to the speech buffer.
1846 // // TODO(hlundin): This code seems incorrect. (Legacy.) Write test and
1847 // // verify correct operation.
1848 // assert(false);
1849 // // Must generate enough data to replace all of the |sync_buffer_|
1850 // // "future".
1851 // int required_length = sync_buffer_->FutureLength();
1852 // assert(dtmf_tone_generator_->initialized());
1853 // dtmf_return_value = dtmf_tone_generator_->Generate(required_length,
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001854 // algorithm_buffer_);
1855 // assert((size_t) required_length == algorithm_buffer_->Size());
turaj@webrtc.org4d06db52013-03-27 18:31:42 +00001856 // if (dtmf_return_value < 0) {
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001857 // algorithm_buffer_->Zeros(output_size_samples_);
turaj@webrtc.org4d06db52013-03-27 18:31:42 +00001858 // return dtmf_return_value;
1859 // }
1860 //
1861 // // Overwrite the "future" part of the speech buffer with the new DTMF
1862 // // data.
1863 // // TODO(hlundin): It seems that this overwriting has gone lost.
1864 // // Not adapted for multi-channel yet.
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001865 // assert(algorithm_buffer_->Channels() == 1);
1866 // if (algorithm_buffer_->Channels() != 1) {
turaj@webrtc.org4d06db52013-03-27 18:31:42 +00001867 // LOG(LS_WARNING) << "DTMF not supported for more than one channel";
1868 // return kStereoNotSupported;
1869 // }
1870 // // Shuffle the remaining data to the beginning of algorithm buffer.
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001871 // algorithm_buffer_->PopFront(sync_buffer_->FutureLength());
turaj@webrtc.org4d06db52013-03-27 18:31:42 +00001872 // }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001873
Peter Kastingb7e50542015-06-11 12:55:50 -07001874 sync_buffer_->IncreaseEndTimestamp(
1875 static_cast<uint32_t>(output_size_samples_));
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001876 expand_->Reset();
1877 last_mode_ = kModeDtmf;
1878
1879 // Set to false because the DTMF is already in the algorithm buffer.
1880 *play_dtmf = false;
1881 return 0;
1882}
1883
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001884void NetEqImpl::DoAlternativePlc(bool increase_timestamp) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001885 AudioDecoder* decoder = decoder_database_->GetActiveDecoder();
Peter Kastingdce40cf2015-08-24 14:52:23 -07001886 size_t length;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001887 if (decoder && decoder->HasDecodePlc()) {
1888 // Use the decoder's packet-loss concealment.
1889 // TODO(hlundin): Will probably need a longer buffer for multi-channel.
1890 int16_t decoded_buffer[kMaxFrameSize];
1891 length = decoder->DecodePlc(1, decoded_buffer);
Peter Kastingdce40cf2015-08-24 14:52:23 -07001892 if (length > 0)
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001893 algorithm_buffer_->PushBackInterleaved(decoded_buffer, length);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001894 } else {
1895 // Do simple zero-stuffing.
1896 length = output_size_samples_;
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00001897 algorithm_buffer_->Zeros(length);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001898 // By not advancing the timestamp, NetEq inserts samples.
1899 stats_.AddZeros(length);
1900 }
1901 if (increase_timestamp) {
Peter Kastingb7e50542015-06-11 12:55:50 -07001902 sync_buffer_->IncreaseEndTimestamp(static_cast<uint32_t>(length));
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001903 }
1904 expand_->Reset();
1905}
1906
1907int NetEqImpl::DtmfOverdub(const DtmfEvent& dtmf_event, size_t num_channels,
1908 int16_t* output) const {
1909 size_t out_index = 0;
Peter Kastingdce40cf2015-08-24 14:52:23 -07001910 size_t overdub_length = output_size_samples_; // Default value.
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001911
1912 if (sync_buffer_->dtmf_index() > sync_buffer_->next_index()) {
1913 // Special operation for transition from "DTMF only" to "DTMF overdub".
1914 out_index = std::min(
1915 sync_buffer_->dtmf_index() - sync_buffer_->next_index(),
Peter Kastingdce40cf2015-08-24 14:52:23 -07001916 output_size_samples_);
1917 overdub_length = output_size_samples_ - out_index;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001918 }
1919
henrik.lundin@webrtc.orgfd11bbf2013-09-30 20:38:44 +00001920 AudioMultiVector dtmf_output(num_channels);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001921 int dtmf_return_value = 0;
1922 if (!dtmf_tone_generator_->initialized()) {
1923 dtmf_return_value = dtmf_tone_generator_->Init(fs_hz_, dtmf_event.event_no,
1924 dtmf_event.volume);
1925 }
1926 if (dtmf_return_value == 0) {
1927 dtmf_return_value = dtmf_tone_generator_->Generate(overdub_length,
1928 &dtmf_output);
Peter Kastingdce40cf2015-08-24 14:52:23 -07001929 assert(overdub_length == dtmf_output.Size());
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001930 }
1931 dtmf_output.ReadInterleaved(overdub_length, &output[out_index]);
1932 return dtmf_return_value < 0 ? dtmf_return_value : 0;
1933}
1934
Peter Kastingdce40cf2015-08-24 14:52:23 -07001935int NetEqImpl::ExtractPackets(size_t required_samples,
1936 PacketList* packet_list) {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001937 bool first_packet = true;
1938 uint8_t prev_payload_type = 0;
1939 uint32_t prev_timestamp = 0;
1940 uint16_t prev_sequence_number = 0;
1941 bool next_packet_available = false;
1942
henrik.lundin@webrtc.orge1d468c2013-01-30 07:37:20 +00001943 const RTPHeader* header = packet_buffer_->NextRtpHeader();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001944 assert(header);
1945 if (!header) {
Henrik Lundind67a2192015-08-03 12:54:37 +02001946 LOG(LS_ERROR) << "Packet buffer unexpectedly empty.";
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001947 return -1;
1948 }
turaj@webrtc.org7df97062013-08-02 18:07:13 +00001949 uint32_t first_timestamp = header->timestamp;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001950 int extracted_samples = 0;
1951
1952 // Packet extraction loop.
1953 do {
1954 timestamp_ = header->timestamp;
Peter Kastingdce40cf2015-08-24 14:52:23 -07001955 size_t discard_count = 0;
henrik.lundin@webrtc.orge1d468c2013-01-30 07:37:20 +00001956 Packet* packet = packet_buffer_->GetNextPacket(&discard_count);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001957 // |header| may be invalid after the |packet_buffer_| operation.
1958 header = NULL;
1959 if (!packet) {
Henrik Lundind67a2192015-08-03 12:54:37 +02001960 LOG(LS_ERROR) << "Should always be able to extract a packet here";
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001961 assert(false); // Should always be able to extract a packet here.
1962 return -1;
1963 }
1964 stats_.PacketsDiscarded(discard_count);
henrik.lundin84f8cd62016-04-26 07:45:16 -07001965 stats_.StoreWaitingTime(packet->waiting_time->ElapsedMs());
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001966 assert(packet->payload_length > 0);
1967 packet_list->push_back(packet); // Store packet in list.
1968
1969 if (first_packet) {
1970 first_packet = false;
henrik.lundin48ed9302015-10-29 05:36:24 -07001971 if (nack_enabled_) {
1972 RTC_DCHECK(nack_);
1973 // TODO(henrik.lundin): Should we update this for all decoded packets?
1974 nack_->UpdateLastDecodedPacket(packet->header.sequenceNumber,
1975 packet->header.timestamp);
1976 }
1977 prev_sequence_number = packet->header.sequenceNumber;
1978 prev_timestamp = packet->header.timestamp;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00001979 prev_payload_type = packet->header.payloadType;
1980 }
1981
1982 // Store number of extracted samples.
1983 int packet_duration = 0;
1984 AudioDecoder* decoder = decoder_database_->GetDecoder(
1985 packet->header.payloadType);
1986 if (decoder) {
minyue@webrtc.orgb28bfa72014-03-21 12:07:40 +00001987 if (packet->sync_packet) {
Peter Kastingdce40cf2015-08-24 14:52:23 -07001988 packet_duration = rtc::checked_cast<int>(decoder_frame_length_);
minyue@webrtc.orgb28bfa72014-03-21 12:07:40 +00001989 } else {
minyue@webrtc.org2c1bcf22015-02-17 10:17:09 +00001990 if (packet->primary) {
1991 packet_duration = decoder->PacketDuration(packet->payload,
1992 packet->payload_length);
1993 } else {
1994 packet_duration = decoder->
1995 PacketDurationRedundant(packet->payload, packet->payload_length);
1996 stats_.SecondaryDecodedSamples(packet_duration);
1997 }
minyue@webrtc.orgb28bfa72014-03-21 12:07:40 +00001998 }
ossu97ba30e2016-04-25 07:55:58 -07001999 } else if (!decoder_database_->IsComfortNoise(packet->header.payloadType)) {
Henrik Lundind67a2192015-08-03 12:54:37 +02002000 LOG(LS_WARNING) << "Unknown payload type "
2001 << static_cast<int>(packet->header.payloadType);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002002 assert(false);
2003 }
2004 if (packet_duration <= 0) {
2005 // Decoder did not return a packet duration. Assume that the packet
2006 // contains the same number of samples as the previous one.
Peter Kastingdce40cf2015-08-24 14:52:23 -07002007 packet_duration = rtc::checked_cast<int>(decoder_frame_length_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002008 }
2009 extracted_samples = packet->header.timestamp - first_timestamp +
2010 packet_duration;
2011
2012 // Check what packet is available next.
2013 header = packet_buffer_->NextRtpHeader();
2014 next_packet_available = false;
2015 if (header && prev_payload_type == header->payloadType) {
2016 int16_t seq_no_diff = header->sequenceNumber - prev_sequence_number;
Peter Kastingdce40cf2015-08-24 14:52:23 -07002017 size_t ts_diff = header->timestamp - prev_timestamp;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002018 if (seq_no_diff == 1 ||
2019 (seq_no_diff == 0 && ts_diff == decoder_frame_length_)) {
2020 // The next sequence number is available, or the next part of a packet
2021 // that was split into pieces upon insertion.
2022 next_packet_available = true;
2023 }
2024 prev_sequence_number = header->sequenceNumber;
2025 }
Peter Kastingdce40cf2015-08-24 14:52:23 -07002026 } while (extracted_samples < rtc::checked_cast<int>(required_samples) &&
2027 next_packet_available);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002028
henrik.lundin@webrtc.org61217152014-09-22 08:30:07 +00002029 if (extracted_samples > 0) {
2030 // Delete old packets only when we are going to decode something. Otherwise,
2031 // we could end up in the situation where we never decode anything, since
2032 // all incoming packets are considered too old but the buffer will also
2033 // never be flooded and flushed.
henrik.lundin@webrtc.org52b42cb2014-11-04 14:03:58 +00002034 packet_buffer_->DiscardAllOldPackets(timestamp_);
henrik.lundin@webrtc.org61217152014-09-22 08:30:07 +00002035 }
2036
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002037 return extracted_samples;
2038}
2039
turaj@webrtc.org8d1cdaa2014-04-11 18:47:55 +00002040void NetEqImpl::UpdatePlcComponents(int fs_hz, size_t channels) {
2041 // Delete objects and create new ones.
2042 expand_.reset(expand_factory_->Create(background_noise_.get(),
2043 sync_buffer_.get(), &random_vector_,
Henrik Lundinbef77e22015-08-18 14:58:09 +02002044 &stats_, fs_hz, channels));
turaj@webrtc.org8d1cdaa2014-04-11 18:47:55 +00002045 merge_.reset(new Merge(fs_hz, channels, expand_.get(), sync_buffer_.get()));
2046}
2047
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002048void NetEqImpl::SetSampleRateAndChannels(int fs_hz, size_t channels) {
Henrik Lundind67a2192015-08-03 12:54:37 +02002049 LOG(LS_VERBOSE) << "SetSampleRateAndChannels " << fs_hz << " " << channels;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002050 // TODO(hlundin): Change to an enumerator and skip assert.
2051 assert(fs_hz == 8000 || fs_hz == 16000 || fs_hz == 32000 || fs_hz == 48000);
2052 assert(channels > 0);
2053
2054 fs_hz_ = fs_hz;
2055 fs_mult_ = fs_hz / 8000;
Peter Kastingdce40cf2015-08-24 14:52:23 -07002056 output_size_samples_ = static_cast<size_t>(kOutputSizeMs * 8 * fs_mult_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002057 decoder_frame_length_ = 3 * output_size_samples_; // Initialize to 30ms.
2058
2059 last_mode_ = kModeNormal;
2060
2061 // Create a new array of mute factors and set all to 1.
2062 mute_factor_array_.reset(new int16_t[channels]);
2063 for (size_t i = 0; i < channels; ++i) {
2064 mute_factor_array_[i] = 16384; // 1.0 in Q14.
2065 }
2066
ossu97ba30e2016-04-25 07:55:58 -07002067 ComfortNoiseDecoder* cng_decoder = decoder_database_->GetActiveCngDecoder();
Karl Wiberg43766482015-08-27 15:22:11 +02002068 if (cng_decoder)
2069 cng_decoder->Reset();
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002070
2071 // Reinit post-decode VAD with new sample rate.
2072 assert(vad_.get()); // Cannot be NULL here.
2073 vad_->Init();
2074
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00002075 // Delete algorithm buffer and create a new one.
henrik.lundin@webrtc.orgfd11bbf2013-09-30 20:38:44 +00002076 algorithm_buffer_.reset(new AudioMultiVector(channels));
henrik.lundin@webrtc.orgc487c6a2013-09-02 07:59:30 +00002077
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002078 // Delete sync buffer and create a new one.
henrik.lundin@webrtc.org0d5da252013-09-18 21:12:38 +00002079 sync_buffer_.reset(new SyncBuffer(channels, kSyncBufferSize * fs_mult_));
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002080
henrik.lundin@webrtc.orgea257842014-08-07 12:27:37 +00002081 // Delete BackgroundNoise object and create a new one.
henrik.lundin@webrtc.org0d5da252013-09-18 21:12:38 +00002082 background_noise_.reset(new BackgroundNoise(channels));
henrik.lundin@webrtc.orgea257842014-08-07 12:27:37 +00002083 background_noise_->set_mode(background_noise_mode_);
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002084
2085 // Reset random vector.
2086 random_vector_.Reset();
2087
turaj@webrtc.org8d1cdaa2014-04-11 18:47:55 +00002088 UpdatePlcComponents(fs_hz, channels);
2089
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002090 // Move index so that we create a small set of future samples (all 0).
2091 sync_buffer_->set_next_index(sync_buffer_->next_index() -
turaj@webrtc.org8d1cdaa2014-04-11 18:47:55 +00002092 expand_->overlap_length());
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002093
henrik.lundin@webrtc.org40d3fc62013-09-18 12:19:50 +00002094 normal_.reset(new Normal(fs_hz, decoder_database_.get(), *background_noise_,
henrik.lundin@webrtc.org0d5da252013-09-18 21:12:38 +00002095 expand_.get()));
henrik.lundin@webrtc.orgd9faa462014-01-14 10:18:45 +00002096 accelerate_.reset(
2097 accelerate_factory_->Create(fs_hz, channels, *background_noise_));
turaj@webrtc.org8d1cdaa2014-04-11 18:47:55 +00002098 preemptive_expand_.reset(preemptive_expand_factory_->Create(
Peter Kastingdce40cf2015-08-24 14:52:23 -07002099 fs_hz, channels, *background_noise_, expand_->overlap_length()));
henrik.lundin@webrtc.org40d3fc62013-09-18 12:19:50 +00002100
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002101 // Delete ComfortNoise object and create a new one.
henrik.lundin@webrtc.org0d5da252013-09-18 21:12:38 +00002102 comfort_noise_.reset(new ComfortNoise(fs_hz, decoder_database_.get(),
2103 sync_buffer_.get()));
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002104
2105 // Verify that |decoded_buffer_| is long enough.
2106 if (decoded_buffer_length_ < kMaxFrameSize * channels) {
2107 // Reallocate to larger size.
2108 decoded_buffer_length_ = kMaxFrameSize * channels;
2109 decoded_buffer_.reset(new int16_t[decoded_buffer_length_]);
2110 }
2111
turaj@webrtc.org8d1cdaa2014-04-11 18:47:55 +00002112 // Create DecisionLogic if it is not created yet, then communicate new sample
2113 // rate and output size to DecisionLogic object.
2114 if (!decision_logic_.get()) {
henrik.lundin@webrtc.org7cbc4f92014-10-07 06:37:39 +00002115 CreateDecisionLogic();
turaj@webrtc.org8d1cdaa2014-04-11 18:47:55 +00002116 }
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002117 decision_logic_->SetSampleRate(fs_hz_, output_size_samples_);
2118}
2119
henrik.lundin55480f52016-03-08 02:37:57 -08002120NetEqImpl::OutputType NetEqImpl::LastOutputType() {
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002121 assert(vad_.get());
henrik.lundin@webrtc.org0d5da252013-09-18 21:12:38 +00002122 assert(expand_.get());
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002123 if (last_mode_ == kModeCodecInternalCng || last_mode_ == kModeRfc3389Cng) {
henrik.lundin55480f52016-03-08 02:37:57 -08002124 return OutputType::kCNG;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002125 } else if (last_mode_ == kModeExpand && expand_->MuteFactor(0) == 0) {
2126 // Expand mode has faded down to background noise only (very long expand).
henrik.lundin55480f52016-03-08 02:37:57 -08002127 return OutputType::kPLCCNG;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002128 } else if (last_mode_ == kModeExpand) {
henrik.lundin55480f52016-03-08 02:37:57 -08002129 return OutputType::kPLC;
wu@webrtc.org24301a62013-12-13 19:17:43 +00002130 } else if (vad_->running() && !vad_->active_speech()) {
henrik.lundin55480f52016-03-08 02:37:57 -08002131 return OutputType::kVadPassive;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002132 } else {
henrik.lundin55480f52016-03-08 02:37:57 -08002133 return OutputType::kNormalSpeech;
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002134 }
2135}
2136
henrik.lundin@webrtc.org7cbc4f92014-10-07 06:37:39 +00002137void NetEqImpl::CreateDecisionLogic() {
Henrik Lundin47b17dc2016-05-10 10:20:59 +02002138 decision_logic_.reset(DecisionLogic::Create(
2139 fs_hz_, output_size_samples_, playout_mode_, decoder_database_.get(),
2140 *packet_buffer_.get(), delay_manager_.get(), buffer_level_filter_.get(),
2141 tick_timer_.get()));
turaj@webrtc.org8d1cdaa2014-04-11 18:47:55 +00002142}
henrik.lundin@webrtc.orgd94659d2013-01-29 12:09:21 +00002143} // namespace webrtc