blob: 5a8d1eb7645705113dc3155565668b198a6bbeb3 [file] [log] [blame]
aleloi77ad3942016-07-04 06:33:02 -07001/*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
aleloi5d167d62016-08-24 02:20:54 -070011#include "webrtc/modules/audio_mixer/audio_mixer_impl.h"
aleloi77ad3942016-07-04 06:33:02 -070012
13#include <algorithm>
aleloif3882572016-07-29 02:12:41 -070014#include <functional>
aleloi77ad3942016-07-04 06:33:02 -070015
aleloi5bcc00e2016-08-15 03:01:31 -070016#include "webrtc/modules/audio_mixer/audio_frame_manipulator.h"
17#include "webrtc/modules/audio_mixer/audio_mixer_defines.h"
aleloi77ad3942016-07-04 06:33:02 -070018#include "webrtc/modules/audio_processing/include/audio_processing.h"
19#include "webrtc/modules/utility/include/audio_frame_operations.h"
20#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
21#include "webrtc/system_wrappers/include/trace.h"
22
23namespace webrtc {
24namespace {
25
aleloif3882572016-07-29 02:12:41 -070026class SourceFrame {
27 public:
28 SourceFrame(MixerAudioSource* p, AudioFrame* a, bool m, bool was_mixed_before)
29 : audio_source_(p),
30 audio_frame_(a),
31 muted_(m),
32 was_mixed_before_(was_mixed_before) {
33 if (!muted_) {
aleloi5bcc00e2016-08-15 03:01:31 -070034 energy_ = NewMixerCalculateEnergy(*a);
aleloif3882572016-07-29 02:12:41 -070035 }
36 }
aleloi77ad3942016-07-04 06:33:02 -070037
aleloif3882572016-07-29 02:12:41 -070038 // a.shouldMixBefore(b) is used to select mixer participants.
39 bool shouldMixBefore(const SourceFrame& other) const {
40 if (muted_ != other.muted_) {
41 return other.muted_;
42 }
43
44 auto our_activity = audio_frame_->vad_activity_;
45 auto other_activity = other.audio_frame_->vad_activity_;
46
47 if (our_activity != other_activity) {
48 return our_activity == AudioFrame::kVadActive;
49 }
50
51 return energy_ > other.energy_;
52 }
53
54 MixerAudioSource* audio_source_;
55 AudioFrame* audio_frame_;
56 bool muted_;
57 uint32_t energy_;
58 bool was_mixed_before_;
59};
tereliusea4c1412016-07-29 01:36:14 -070060
aleloi44968092016-08-08 10:18:58 -070061// Remixes a frame between stereo and mono.
62void RemixFrame(AudioFrame* frame, size_t number_of_channels) {
63 RTC_DCHECK(number_of_channels == 1 || number_of_channels == 2);
64 if (frame->num_channels_ == 1 && number_of_channels == 2) {
65 AudioFrameOperations::MonoToStereo(frame);
66 } else if (frame->num_channels_ == 2 && number_of_channels == 1) {
67 AudioFrameOperations::StereoToMono(frame);
68 }
69}
70
aleloi77ad3942016-07-04 06:33:02 -070071// Mix |frame| into |mixed_frame|, with saturation protection and upmixing.
72// These effects are applied to |frame| itself prior to mixing. Assumes that
73// |mixed_frame| always has at least as many channels as |frame|. Supports
74// stereo at most.
75//
aleloi77ad3942016-07-04 06:33:02 -070076void MixFrames(AudioFrame* mixed_frame, AudioFrame* frame, bool use_limiter) {
aleloi09f45102016-07-28 03:52:15 -070077 RTC_DCHECK_GE(mixed_frame->num_channels_, frame->num_channels_);
aleloi77ad3942016-07-04 06:33:02 -070078 if (use_limiter) {
79 // Divide by two to avoid saturation in the mixing.
80 // This is only meaningful if the limiter will be used.
81 *frame >>= 1;
82 }
aleloi44968092016-08-08 10:18:58 -070083 RTC_DCHECK_EQ(frame->num_channels_, mixed_frame->num_channels_);
aleloi77ad3942016-07-04 06:33:02 -070084 *mixed_frame += *frame;
85}
86
aleloi77ad3942016-07-04 06:33:02 -070087} // namespace
88
89MixerAudioSource::MixerAudioSource() : _mixHistory(new NewMixHistory()) {}
90
91MixerAudioSource::~MixerAudioSource() {
92 delete _mixHistory;
93}
94
95bool MixerAudioSource::IsMixed() const {
96 return _mixHistory->IsMixed();
97}
98
aleloi6382a192016-08-08 10:25:04 -070099NewMixHistory::NewMixHistory() : is_mixed_(0) {}
aleloi77ad3942016-07-04 06:33:02 -0700100
101NewMixHistory::~NewMixHistory() {}
102
103bool NewMixHistory::IsMixed() const {
aleloi6382a192016-08-08 10:25:04 -0700104 return is_mixed_;
aleloi77ad3942016-07-04 06:33:02 -0700105}
106
107bool NewMixHistory::WasMixed() const {
108 // Was mixed is the same as is mixed depending on perspective. This function
109 // is for the perspective of NewAudioConferenceMixerImpl.
110 return IsMixed();
111}
112
113int32_t NewMixHistory::SetIsMixed(const bool mixed) {
aleloi6382a192016-08-08 10:25:04 -0700114 is_mixed_ = mixed;
aleloi77ad3942016-07-04 06:33:02 -0700115 return 0;
116}
117
118void NewMixHistory::ResetMixedStatus() {
aleloi6382a192016-08-08 10:25:04 -0700119 is_mixed_ = false;
aleloi77ad3942016-07-04 06:33:02 -0700120}
121
aleloi5d167d62016-08-24 02:20:54 -0700122std::unique_ptr<AudioMixer> AudioMixer::Create(int id) {
123 AudioMixerImpl* mixer = new AudioMixerImpl(id);
aleloi77ad3942016-07-04 06:33:02 -0700124 if (!mixer->Init()) {
125 delete mixer;
126 return NULL;
127 }
aleloi5d167d62016-08-24 02:20:54 -0700128 return std::unique_ptr<AudioMixer>(mixer);
aleloi77ad3942016-07-04 06:33:02 -0700129}
130
aleloi5d167d62016-08-24 02:20:54 -0700131AudioMixerImpl::AudioMixerImpl(int id)
aleloi6382a192016-08-08 10:25:04 -0700132 : id_(id),
133 output_frequency_(kDefaultFrequency),
134 sample_size_(0),
aleloi09f45102016-07-28 03:52:15 -0700135 audio_source_list_(),
136 additional_audio_source_list_(),
137 num_mixed_audio_sources_(0),
aleloi77ad3942016-07-04 06:33:02 -0700138 use_limiter_(true),
aleloi6382a192016-08-08 10:25:04 -0700139 time_stamp_(0) {
aleloi8b2233f2016-07-28 06:24:14 -0700140 thread_checker_.DetachFromThread();
aleloia0db81f2016-07-28 06:36:22 -0700141}
aleloi77ad3942016-07-04 06:33:02 -0700142
aleloi5d167d62016-08-24 02:20:54 -0700143AudioMixerImpl::~AudioMixerImpl() {}
aleloi70f866c2016-08-16 02:15:49 -0700144
aleloi5d167d62016-08-24 02:20:54 -0700145bool AudioMixerImpl::Init() {
aleloi6382a192016-08-08 10:25:04 -0700146 crit_.reset(CriticalSectionWrapper::CreateCriticalSection());
147 if (crit_.get() == NULL)
aleloi77ad3942016-07-04 06:33:02 -0700148 return false;
149
aleloi6382a192016-08-08 10:25:04 -0700150 cb_crit_.reset(CriticalSectionWrapper::CreateCriticalSection());
151 if (cb_crit_.get() == NULL)
aleloi77ad3942016-07-04 06:33:02 -0700152 return false;
153
154 Config config;
155 config.Set<ExperimentalAgc>(new ExperimentalAgc(false));
aleloi6382a192016-08-08 10:25:04 -0700156 limiter_.reset(AudioProcessing::Create(config));
157 if (!limiter_.get())
aleloi77ad3942016-07-04 06:33:02 -0700158 return false;
159
aleloi77ad3942016-07-04 06:33:02 -0700160 if (SetOutputFrequency(kDefaultFrequency) == -1)
161 return false;
162
aleloi6382a192016-08-08 10:25:04 -0700163 if (limiter_->gain_control()->set_mode(GainControl::kFixedDigital) !=
164 limiter_->kNoError)
aleloi77ad3942016-07-04 06:33:02 -0700165 return false;
166
167 // We smoothly limit the mixed frame to -7 dbFS. -6 would correspond to the
168 // divide-by-2 but -7 is used instead to give a bit of headroom since the
169 // AGC is not a hard limiter.
aleloi6382a192016-08-08 10:25:04 -0700170 if (limiter_->gain_control()->set_target_level_dbfs(7) != limiter_->kNoError)
aleloi77ad3942016-07-04 06:33:02 -0700171 return false;
172
aleloi6382a192016-08-08 10:25:04 -0700173 if (limiter_->gain_control()->set_compression_gain_db(0) !=
174 limiter_->kNoError)
aleloi77ad3942016-07-04 06:33:02 -0700175 return false;
176
aleloi6382a192016-08-08 10:25:04 -0700177 if (limiter_->gain_control()->enable_limiter(true) != limiter_->kNoError)
aleloi77ad3942016-07-04 06:33:02 -0700178 return false;
179
aleloi6382a192016-08-08 10:25:04 -0700180 if (limiter_->gain_control()->Enable(true) != limiter_->kNoError)
aleloi77ad3942016-07-04 06:33:02 -0700181 return false;
182
183 return true;
184}
185
aleloi5d167d62016-08-24 02:20:54 -0700186void AudioMixerImpl::Mix(int sample_rate,
187 size_t number_of_channels,
188 AudioFrame* audio_frame_for_mixing) {
aleloi44968092016-08-08 10:18:58 -0700189 RTC_DCHECK(number_of_channels == 1 || number_of_channels == 2);
aleloi8b2233f2016-07-28 06:24:14 -0700190 RTC_DCHECK(thread_checker_.CalledOnValidThread());
aleloi77ad3942016-07-04 06:33:02 -0700191 AudioFrameList mixList;
aleloi77ad3942016-07-04 06:33:02 -0700192 AudioFrameList additionalFramesList;
aleloi09f45102016-07-28 03:52:15 -0700193 std::map<int, MixerAudioSource*> mixedAudioSourcesMap;
aleloi77ad3942016-07-04 06:33:02 -0700194 {
aleloi6382a192016-08-08 10:25:04 -0700195 CriticalSectionScoped cs(cb_crit_.get());
aleloi44968092016-08-08 10:18:58 -0700196 Frequency mixing_frequency;
aleloi77ad3942016-07-04 06:33:02 -0700197
aleloi44968092016-08-08 10:18:58 -0700198 switch (sample_rate) {
199 case 8000:
200 mixing_frequency = kNbInHz;
201 break;
202 case 16000:
203 mixing_frequency = kWbInHz;
204 break;
205 case 32000:
206 mixing_frequency = kSwbInHz;
207 break;
208 case 48000:
209 mixing_frequency = kFbInHz;
210 break;
211 default:
212 RTC_NOTREACHED();
213 return;
aleloi77ad3942016-07-04 06:33:02 -0700214 }
aleloi44968092016-08-08 10:18:58 -0700215
216 if (OutputFrequency() != mixing_frequency) {
217 SetOutputFrequency(mixing_frequency);
aleloi77ad3942016-07-04 06:33:02 -0700218 }
219
aleloi30be5d72016-08-24 01:38:44 -0700220 mixList = UpdateToMix(kMaximumAmountOfMixedAudioSources);
aleloi77ad3942016-07-04 06:33:02 -0700221 GetAdditionalAudio(&additionalFramesList);
aleloi77ad3942016-07-04 06:33:02 -0700222 }
223
aleloi44968092016-08-08 10:18:58 -0700224 for (FrameAndMuteInfo& frame_and_mute : mixList) {
225 RemixFrame(frame_and_mute.frame, number_of_channels);
226 }
227 for (FrameAndMuteInfo& frame_and_mute : additionalFramesList) {
228 RemixFrame(frame_and_mute.frame, number_of_channels);
229 }
aleloi09f45102016-07-28 03:52:15 -0700230
231 audio_frame_for_mixing->UpdateFrame(
aleloi6382a192016-08-08 10:25:04 -0700232 -1, time_stamp_, NULL, 0, output_frequency_, AudioFrame::kNormalSpeech,
aleloi44968092016-08-08 10:18:58 -0700233 AudioFrame::kVadPassive, number_of_channels);
aleloi09f45102016-07-28 03:52:15 -0700234
aleloi6382a192016-08-08 10:25:04 -0700235 time_stamp_ += static_cast<uint32_t>(sample_size_);
aleloi09f45102016-07-28 03:52:15 -0700236
aleloi616df1e2016-08-24 01:17:12 -0700237 use_limiter_ = num_mixed_audio_sources_ > 1;
aleloi09f45102016-07-28 03:52:15 -0700238
239 // We only use the limiter if it supports the output sample rate and
240 // we're actually mixing multiple streams.
aleloi6382a192016-08-08 10:25:04 -0700241 MixFromList(audio_frame_for_mixing, mixList, id_, use_limiter_);
aleloi77ad3942016-07-04 06:33:02 -0700242
243 {
aleloi6382a192016-08-08 10:25:04 -0700244 CriticalSectionScoped cs(crit_.get());
aleloi09f45102016-07-28 03:52:15 -0700245 MixAnonomouslyFromList(audio_frame_for_mixing, additionalFramesList);
aleloi77ad3942016-07-04 06:33:02 -0700246
aleloi09f45102016-07-28 03:52:15 -0700247 if (audio_frame_for_mixing->samples_per_channel_ == 0) {
aleloi77ad3942016-07-04 06:33:02 -0700248 // Nothing was mixed, set the audio samples to silence.
aleloi6382a192016-08-08 10:25:04 -0700249 audio_frame_for_mixing->samples_per_channel_ = sample_size_;
aleloi09f45102016-07-28 03:52:15 -0700250 audio_frame_for_mixing->Mute();
aleloi77ad3942016-07-04 06:33:02 -0700251 } else {
252 // Only call the limiter if we have something to mix.
aleloi09f45102016-07-28 03:52:15 -0700253 LimitMixedAudio(audio_frame_for_mixing);
aleloi77ad3942016-07-04 06:33:02 -0700254 }
255 }
aleloi616df1e2016-08-24 01:17:12 -0700256
257 // Pass the final result to the level indicator.
258 audio_level_.ComputeLevel(*audio_frame_for_mixing);
259
aleloi77ad3942016-07-04 06:33:02 -0700260 return;
261}
262
aleloi5d167d62016-08-24 02:20:54 -0700263int32_t AudioMixerImpl::SetOutputFrequency(const Frequency& frequency) {
aleloi6382a192016-08-08 10:25:04 -0700264 CriticalSectionScoped cs(crit_.get());
aleloi77ad3942016-07-04 06:33:02 -0700265
aleloi6382a192016-08-08 10:25:04 -0700266 output_frequency_ = frequency;
267 sample_size_ =
aleloi5d167d62016-08-24 02:20:54 -0700268 static_cast<size_t>((output_frequency_ * kFrameDurationInMs) / 1000);
aleloi77ad3942016-07-04 06:33:02 -0700269
270 return 0;
271}
272
aleloi5d167d62016-08-24 02:20:54 -0700273AudioMixer::Frequency AudioMixerImpl::OutputFrequency() const {
aleloi6382a192016-08-08 10:25:04 -0700274 CriticalSectionScoped cs(crit_.get());
275 return output_frequency_;
aleloi77ad3942016-07-04 06:33:02 -0700276}
277
aleloi5d167d62016-08-24 02:20:54 -0700278int32_t AudioMixerImpl::SetMixabilityStatus(MixerAudioSource* audio_source,
279 bool mixable) {
aleloi77ad3942016-07-04 06:33:02 -0700280 if (!mixable) {
aleloi09f45102016-07-28 03:52:15 -0700281 // Anonymous audio sources are in a separate list. Make sure that the
282 // audio source is in the _audioSourceList if it is being mixed.
283 SetAnonymousMixabilityStatus(audio_source, false);
aleloi77ad3942016-07-04 06:33:02 -0700284 }
aleloi09f45102016-07-28 03:52:15 -0700285 size_t numMixedAudioSources;
aleloi77ad3942016-07-04 06:33:02 -0700286 {
aleloi6382a192016-08-08 10:25:04 -0700287 CriticalSectionScoped cs(cb_crit_.get());
aleloi09f45102016-07-28 03:52:15 -0700288 const bool isMixed = IsAudioSourceInList(*audio_source, audio_source_list_);
aleloi77ad3942016-07-04 06:33:02 -0700289 // API must be called with a new state.
290 if (!(mixable ^ isMixed)) {
aleloi6382a192016-08-08 10:25:04 -0700291 WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, id_,
aleloi77ad3942016-07-04 06:33:02 -0700292 "Mixable is aready %s", isMixed ? "ON" : "off");
293 return -1;
294 }
295 bool success = false;
296 if (mixable) {
aleloi09f45102016-07-28 03:52:15 -0700297 success = AddAudioSourceToList(audio_source, &audio_source_list_);
aleloi77ad3942016-07-04 06:33:02 -0700298 } else {
aleloi09f45102016-07-28 03:52:15 -0700299 success = RemoveAudioSourceFromList(audio_source, &audio_source_list_);
aleloi77ad3942016-07-04 06:33:02 -0700300 }
301 if (!success) {
aleloi6382a192016-08-08 10:25:04 -0700302 WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, id_,
aleloi09f45102016-07-28 03:52:15 -0700303 "failed to %s audio_source", mixable ? "add" : "remove");
304 RTC_NOTREACHED();
aleloi77ad3942016-07-04 06:33:02 -0700305 return -1;
306 }
307
aleloi09f45102016-07-28 03:52:15 -0700308 size_t numMixedNonAnonymous = audio_source_list_.size();
309 if (numMixedNonAnonymous > kMaximumAmountOfMixedAudioSources) {
310 numMixedNonAnonymous = kMaximumAmountOfMixedAudioSources;
aleloi77ad3942016-07-04 06:33:02 -0700311 }
aleloi09f45102016-07-28 03:52:15 -0700312 numMixedAudioSources =
313 numMixedNonAnonymous + additional_audio_source_list_.size();
aleloi77ad3942016-07-04 06:33:02 -0700314 }
315 // A MixerAudioSource was added or removed. Make sure the scratch
316 // buffer is updated if necessary.
317 // Note: The scratch buffer may only be updated in Process().
aleloi6382a192016-08-08 10:25:04 -0700318 CriticalSectionScoped cs(crit_.get());
aleloi09f45102016-07-28 03:52:15 -0700319 num_mixed_audio_sources_ = numMixedAudioSources;
aleloi77ad3942016-07-04 06:33:02 -0700320 return 0;
321}
322
aleloi5d167d62016-08-24 02:20:54 -0700323bool AudioMixerImpl::MixabilityStatus(
aleloi09f45102016-07-28 03:52:15 -0700324 const MixerAudioSource& audio_source) const {
aleloi6382a192016-08-08 10:25:04 -0700325 CriticalSectionScoped cs(cb_crit_.get());
aleloi09f45102016-07-28 03:52:15 -0700326 return IsAudioSourceInList(audio_source, audio_source_list_);
aleloi77ad3942016-07-04 06:33:02 -0700327}
328
aleloi5d167d62016-08-24 02:20:54 -0700329int32_t AudioMixerImpl::SetAnonymousMixabilityStatus(
aleloi09f45102016-07-28 03:52:15 -0700330 MixerAudioSource* audio_source,
aleloi77ad3942016-07-04 06:33:02 -0700331 bool anonymous) {
aleloi6382a192016-08-08 10:25:04 -0700332 CriticalSectionScoped cs(cb_crit_.get());
aleloi09f45102016-07-28 03:52:15 -0700333 if (IsAudioSourceInList(*audio_source, additional_audio_source_list_)) {
aleloi77ad3942016-07-04 06:33:02 -0700334 if (anonymous) {
335 return 0;
336 }
aleloi09f45102016-07-28 03:52:15 -0700337 if (!RemoveAudioSourceFromList(audio_source,
338 &additional_audio_source_list_)) {
aleloi6382a192016-08-08 10:25:04 -0700339 WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, id_,
aleloi09f45102016-07-28 03:52:15 -0700340 "unable to remove audio_source from anonymous list");
341 RTC_NOTREACHED();
aleloi77ad3942016-07-04 06:33:02 -0700342 return -1;
343 }
aleloi09f45102016-07-28 03:52:15 -0700344 return AddAudioSourceToList(audio_source, &audio_source_list_) ? 0 : -1;
aleloi77ad3942016-07-04 06:33:02 -0700345 }
346 if (!anonymous) {
347 return 0;
348 }
349 const bool mixable =
aleloi09f45102016-07-28 03:52:15 -0700350 RemoveAudioSourceFromList(audio_source, &audio_source_list_);
aleloi77ad3942016-07-04 06:33:02 -0700351 if (!mixable) {
352 WEBRTC_TRACE(
aleloi6382a192016-08-08 10:25:04 -0700353 kTraceWarning, kTraceAudioMixerServer, id_,
aleloi09f45102016-07-28 03:52:15 -0700354 "audio_source must be registered before turning it into anonymous");
aleloi77ad3942016-07-04 06:33:02 -0700355 // Setting anonymous status is only possible if MixerAudioSource is
356 // already registered.
357 return -1;
358 }
aleloi09f45102016-07-28 03:52:15 -0700359 return AddAudioSourceToList(audio_source, &additional_audio_source_list_)
360 ? 0
361 : -1;
aleloi77ad3942016-07-04 06:33:02 -0700362}
363
aleloi5d167d62016-08-24 02:20:54 -0700364bool AudioMixerImpl::AnonymousMixabilityStatus(
aleloi09f45102016-07-28 03:52:15 -0700365 const MixerAudioSource& audio_source) const {
aleloi6382a192016-08-08 10:25:04 -0700366 CriticalSectionScoped cs(cb_crit_.get());
aleloi09f45102016-07-28 03:52:15 -0700367 return IsAudioSourceInList(audio_source, additional_audio_source_list_);
aleloi77ad3942016-07-04 06:33:02 -0700368}
369
aleloi5d167d62016-08-24 02:20:54 -0700370AudioFrameList AudioMixerImpl::UpdateToMix(size_t maxAudioFrameCounter) const {
aleloif3882572016-07-29 02:12:41 -0700371 AudioFrameList result;
372 std::vector<SourceFrame> audioSourceMixingDataList;
aleloi77ad3942016-07-04 06:33:02 -0700373
aleloif3882572016-07-29 02:12:41 -0700374 // Get audio source audio and put it in the struct vector.
375 for (MixerAudioSource* audio_source : audio_source_list_) {
376 auto audio_frame_with_info = audio_source->GetAudioFrameWithMuted(
aleloi6382a192016-08-08 10:25:04 -0700377 id_, static_cast<int>(output_frequency_));
aleloi77ad3942016-07-04 06:33:02 -0700378
aleloif3882572016-07-29 02:12:41 -0700379 auto audio_frame_info = audio_frame_with_info.audio_frame_info;
380 AudioFrame* audio_source_audio_frame = audio_frame_with_info.audio_frame;
381
382 if (audio_frame_info == MixerAudioSource::AudioFrameInfo::kError) {
aleloi6382a192016-08-08 10:25:04 -0700383 WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, id_,
aleloif3882572016-07-29 02:12:41 -0700384 "failed to GetAudioFrameWithMuted() from participant");
tereliusea4c1412016-07-29 01:36:14 -0700385 continue;
386 }
aleloif3882572016-07-29 02:12:41 -0700387 audioSourceMixingDataList.emplace_back(
388 audio_source, audio_source_audio_frame,
389 audio_frame_info == MixerAudioSource::AudioFrameInfo::kMuted,
390 audio_source->_mixHistory->WasMixed());
391 }
392
393 // Sort frames by sorting function.
394 std::sort(audioSourceMixingDataList.begin(), audioSourceMixingDataList.end(),
395 std::mem_fn(&SourceFrame::shouldMixBefore));
396
397 // Go through list in order and put things in mixList.
398 for (SourceFrame& p : audioSourceMixingDataList) {
399 // Filter muted.
400 if (p.muted_) {
401 p.audio_source_->_mixHistory->SetIsMixed(false);
402 continue;
tereliusea4c1412016-07-29 01:36:14 -0700403 }
aleloi2942e242016-07-29 01:23:49 -0700404
aleloif3882572016-07-29 02:12:41 -0700405 // Add frame to result vector for mixing.
406 bool is_mixed = false;
407 if (maxAudioFrameCounter > 0) {
408 --maxAudioFrameCounter;
409 if (!p.was_mixed_before_) {
aleloi5bcc00e2016-08-15 03:01:31 -0700410 NewMixerRampIn(p.audio_frame_);
aleloi77ad3942016-07-04 06:33:02 -0700411 }
aleloif3882572016-07-29 02:12:41 -0700412 result.emplace_back(p.audio_frame_, false);
413 is_mixed = true;
tereliusea4c1412016-07-29 01:36:14 -0700414 }
aleloif3882572016-07-29 02:12:41 -0700415
416 // Ramp out unmuted.
417 if (p.was_mixed_before_ && !is_mixed) {
aleloi5bcc00e2016-08-15 03:01:31 -0700418 NewMixerRampOut(p.audio_frame_);
aleloif3882572016-07-29 02:12:41 -0700419 result.emplace_back(p.audio_frame_, false);
tereliusea4c1412016-07-29 01:36:14 -0700420 }
aleloif3882572016-07-29 02:12:41 -0700421
422 p.audio_source_->_mixHistory->SetIsMixed(is_mixed);
tereliusea4c1412016-07-29 01:36:14 -0700423 }
aleloif3882572016-07-29 02:12:41 -0700424 return result;
aleloi77ad3942016-07-04 06:33:02 -0700425}
426
aleloi5d167d62016-08-24 02:20:54 -0700427void AudioMixerImpl::GetAdditionalAudio(
aleloi77ad3942016-07-04 06:33:02 -0700428 AudioFrameList* additionalFramesList) const {
aleloi6382a192016-08-08 10:25:04 -0700429 WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, id_,
aleloi77ad3942016-07-04 06:33:02 -0700430 "GetAdditionalAudio(additionalFramesList)");
aleloi09f45102016-07-28 03:52:15 -0700431 // The GetAudioFrameWithMuted() callback may result in the audio source being
aleloia0db81f2016-07-28 06:36:22 -0700432 // removed from additionalAudioFramesList_. If that happens it will
aleloi09f45102016-07-28 03:52:15 -0700433 // invalidate any iterators. Create a copy of the audio sources list such
aleloia0db81f2016-07-28 06:36:22 -0700434 // that the list of participants can be traversed safely.
aleloi09f45102016-07-28 03:52:15 -0700435 MixerAudioSourceList additionalAudioSourceList;
436 additionalAudioSourceList.insert(additionalAudioSourceList.begin(),
437 additional_audio_source_list_.begin(),
438 additional_audio_source_list_.end());
aleloi77ad3942016-07-04 06:33:02 -0700439
aleloi09f45102016-07-28 03:52:15 -0700440 for (MixerAudioSourceList::const_iterator audio_source =
441 additionalAudioSourceList.begin();
442 audio_source != additionalAudioSourceList.end(); ++audio_source) {
aleloia0db81f2016-07-28 06:36:22 -0700443 auto audio_frame_with_info =
aleloi6382a192016-08-08 10:25:04 -0700444 (*audio_source)->GetAudioFrameWithMuted(id_, output_frequency_);
aleloia0db81f2016-07-28 06:36:22 -0700445 auto ret = audio_frame_with_info.audio_frame_info;
446 AudioFrame* audio_frame = audio_frame_with_info.audio_frame;
aleloi77ad3942016-07-04 06:33:02 -0700447 if (ret == MixerAudioSource::AudioFrameInfo::kError) {
aleloi6382a192016-08-08 10:25:04 -0700448 WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, id_,
aleloi09f45102016-07-28 03:52:15 -0700449 "failed to GetAudioFrameWithMuted() from audio_source");
aleloi77ad3942016-07-04 06:33:02 -0700450 continue;
451 }
aleloia0db81f2016-07-28 06:36:22 -0700452 if (audio_frame->samples_per_channel_ == 0) {
aleloi77ad3942016-07-04 06:33:02 -0700453 // Empty frame. Don't use it.
aleloi77ad3942016-07-04 06:33:02 -0700454 continue;
455 }
456 additionalFramesList->push_back(FrameAndMuteInfo(
aleloia0db81f2016-07-28 06:36:22 -0700457 audio_frame, ret == MixerAudioSource::AudioFrameInfo::kMuted));
aleloi77ad3942016-07-04 06:33:02 -0700458 }
459}
460
aleloi5d167d62016-08-24 02:20:54 -0700461bool AudioMixerImpl::IsAudioSourceInList(
aleloi09f45102016-07-28 03:52:15 -0700462 const MixerAudioSource& audio_source,
463 const MixerAudioSourceList& audioSourceList) const {
aleloi6382a192016-08-08 10:25:04 -0700464 WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, id_,
aleloi09f45102016-07-28 03:52:15 -0700465 "IsAudioSourceInList(audio_source,audioSourceList)");
aleloi6382a192016-08-08 10:25:04 -0700466 return std::find(audioSourceList.begin(), audioSourceList.end(),
467 &audio_source) != audioSourceList.end();
aleloi77ad3942016-07-04 06:33:02 -0700468}
469
aleloi5d167d62016-08-24 02:20:54 -0700470bool AudioMixerImpl::AddAudioSourceToList(
aleloi09f45102016-07-28 03:52:15 -0700471 MixerAudioSource* audio_source,
472 MixerAudioSourceList* audioSourceList) const {
aleloi6382a192016-08-08 10:25:04 -0700473 WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, id_,
aleloi09f45102016-07-28 03:52:15 -0700474 "AddAudioSourceToList(audio_source, audioSourceList)");
475 audioSourceList->push_back(audio_source);
aleloi77ad3942016-07-04 06:33:02 -0700476 // Make sure that the mixed status is correct for new MixerAudioSource.
aleloi09f45102016-07-28 03:52:15 -0700477 audio_source->_mixHistory->ResetMixedStatus();
aleloi77ad3942016-07-04 06:33:02 -0700478 return true;
479}
480
aleloi5d167d62016-08-24 02:20:54 -0700481bool AudioMixerImpl::RemoveAudioSourceFromList(
aleloi09f45102016-07-28 03:52:15 -0700482 MixerAudioSource* audio_source,
483 MixerAudioSourceList* audioSourceList) const {
aleloi6382a192016-08-08 10:25:04 -0700484 WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, id_,
aleloi09f45102016-07-28 03:52:15 -0700485 "RemoveAudioSourceFromList(audio_source, audioSourceList)");
aleloi6382a192016-08-08 10:25:04 -0700486 auto iter =
487 std::find(audioSourceList->begin(), audioSourceList->end(), audio_source);
488 if (iter != audioSourceList->end()) {
489 audioSourceList->erase(iter);
490 // AudioSource is no longer mixed, reset to default.
491 audio_source->_mixHistory->ResetMixedStatus();
492 return true;
493 } else {
494 return false;
aleloi77ad3942016-07-04 06:33:02 -0700495 }
aleloi77ad3942016-07-04 06:33:02 -0700496}
497
aleloi5d167d62016-08-24 02:20:54 -0700498int32_t AudioMixerImpl::MixFromList(AudioFrame* mixedAudio,
499 const AudioFrameList& audioFrameList,
500 int32_t id,
501 bool use_limiter) {
aleloi09f45102016-07-28 03:52:15 -0700502 WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, id,
aleloi77ad3942016-07-04 06:33:02 -0700503 "MixFromList(mixedAudio, audioFrameList)");
504 if (audioFrameList.empty())
505 return 0;
506
507 uint32_t position = 0;
508
aleloi09f45102016-07-28 03:52:15 -0700509 if (audioFrameList.size() == 1) {
aleloi77ad3942016-07-04 06:33:02 -0700510 mixedAudio->timestamp_ = audioFrameList.front().frame->timestamp_;
511 mixedAudio->elapsed_time_ms_ =
512 audioFrameList.front().frame->elapsed_time_ms_;
513 } else {
514 // TODO(wu): Issue 3390.
515 // Audio frame timestamp is only supported in one channel case.
516 mixedAudio->timestamp_ = 0;
517 mixedAudio->elapsed_time_ms_ = -1;
518 }
519
520 for (AudioFrameList::const_iterator iter = audioFrameList.begin();
521 iter != audioFrameList.end(); ++iter) {
aleloi77ad3942016-07-04 06:33:02 -0700522 if (!iter->muted) {
aleloi09f45102016-07-28 03:52:15 -0700523 MixFrames(mixedAudio, iter->frame, use_limiter);
aleloi77ad3942016-07-04 06:33:02 -0700524 }
525
526 position++;
527 }
528
529 return 0;
530}
531
532// TODO(andrew): consolidate this function with MixFromList.
aleloi5d167d62016-08-24 02:20:54 -0700533int32_t AudioMixerImpl::MixAnonomouslyFromList(
aleloi77ad3942016-07-04 06:33:02 -0700534 AudioFrame* mixedAudio,
535 const AudioFrameList& audioFrameList) const {
aleloi6382a192016-08-08 10:25:04 -0700536 WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, id_,
aleloi77ad3942016-07-04 06:33:02 -0700537 "MixAnonomouslyFromList(mixedAudio, audioFrameList)");
538
539 if (audioFrameList.empty())
540 return 0;
541
542 for (AudioFrameList::const_iterator iter = audioFrameList.begin();
543 iter != audioFrameList.end(); ++iter) {
544 if (!iter->muted) {
545 MixFrames(mixedAudio, iter->frame, use_limiter_);
546 }
547 }
548 return 0;
549}
550
aleloi5d167d62016-08-24 02:20:54 -0700551bool AudioMixerImpl::LimitMixedAudio(AudioFrame* mixedAudio) const {
aleloi77ad3942016-07-04 06:33:02 -0700552 if (!use_limiter_) {
553 return true;
554 }
555
556 // Smoothly limit the mixed frame.
aleloi6382a192016-08-08 10:25:04 -0700557 const int error = limiter_->ProcessStream(mixedAudio);
aleloi77ad3942016-07-04 06:33:02 -0700558
559 // And now we can safely restore the level. This procedure results in
560 // some loss of resolution, deemed acceptable.
561 //
562 // It's possible to apply the gain in the AGC (with a target level of 0 dbFS
563 // and compression gain of 6 dB). However, in the transition frame when this
aleloi09f45102016-07-28 03:52:15 -0700564 // is enabled (moving from one to two audio sources) it has the potential to
aleloi77ad3942016-07-04 06:33:02 -0700565 // create discontinuities in the mixed frame.
566 //
567 // Instead we double the frame (with addition since left-shifting a
568 // negative value is undefined).
569 *mixedAudio += *mixedAudio;
570
aleloi6382a192016-08-08 10:25:04 -0700571 if (error != limiter_->kNoError) {
572 WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, id_,
aleloi77ad3942016-07-04 06:33:02 -0700573 "Error from AudioProcessing: %d", error);
aleloi09f45102016-07-28 03:52:15 -0700574 RTC_NOTREACHED();
aleloi77ad3942016-07-04 06:33:02 -0700575 return false;
576 }
577 return true;
578}
aleloi616df1e2016-08-24 01:17:12 -0700579
aleloi5d167d62016-08-24 02:20:54 -0700580int AudioMixerImpl::GetOutputAudioLevel() {
aleloi616df1e2016-08-24 01:17:12 -0700581 const int level = audio_level_.Level();
582 WEBRTC_TRACE(kTraceStateInfo, kTraceAudioMixerServer, id_,
583 "GetAudioOutputLevel() => level=%d", level);
584 return level;
585}
586
aleloi5d167d62016-08-24 02:20:54 -0700587int AudioMixerImpl::GetOutputAudioLevelFullRange() {
aleloi616df1e2016-08-24 01:17:12 -0700588 const int level = audio_level_.LevelFullRange();
589 WEBRTC_TRACE(kTraceStateInfo, kTraceAudioMixerServer, id_,
590 "GetAudioOutputLevelFullRange() => level=%d", level);
591 return level;
592}
aleloi77ad3942016-07-04 06:33:02 -0700593} // namespace webrtc