aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license |
| 5 | * that can be found in the LICENSE file in the root of the source |
| 6 | * tree. An additional intellectual property rights grant can be found |
| 7 | * in the file PATENTS. All contributing project authors may |
| 8 | * be found in the AUTHORS file in the root of the source tree. |
| 9 | */ |
| 10 | |
aleloi | 5d167d6 | 2016-08-24 02:20:54 -0700 | [diff] [blame^] | 11 | #include "webrtc/modules/audio_mixer/audio_mixer_impl.h" |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 12 | |
| 13 | #include <algorithm> |
aleloi | f388257 | 2016-07-29 02:12:41 -0700 | [diff] [blame] | 14 | #include <functional> |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 15 | |
aleloi | 5bcc00e | 2016-08-15 03:01:31 -0700 | [diff] [blame] | 16 | #include "webrtc/modules/audio_mixer/audio_frame_manipulator.h" |
| 17 | #include "webrtc/modules/audio_mixer/audio_mixer_defines.h" |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 18 | #include "webrtc/modules/audio_processing/include/audio_processing.h" |
| 19 | #include "webrtc/modules/utility/include/audio_frame_operations.h" |
| 20 | #include "webrtc/system_wrappers/include/critical_section_wrapper.h" |
| 21 | #include "webrtc/system_wrappers/include/trace.h" |
| 22 | |
| 23 | namespace webrtc { |
| 24 | namespace { |
| 25 | |
aleloi | f388257 | 2016-07-29 02:12:41 -0700 | [diff] [blame] | 26 | class SourceFrame { |
| 27 | public: |
| 28 | SourceFrame(MixerAudioSource* p, AudioFrame* a, bool m, bool was_mixed_before) |
| 29 | : audio_source_(p), |
| 30 | audio_frame_(a), |
| 31 | muted_(m), |
| 32 | was_mixed_before_(was_mixed_before) { |
| 33 | if (!muted_) { |
aleloi | 5bcc00e | 2016-08-15 03:01:31 -0700 | [diff] [blame] | 34 | energy_ = NewMixerCalculateEnergy(*a); |
aleloi | f388257 | 2016-07-29 02:12:41 -0700 | [diff] [blame] | 35 | } |
| 36 | } |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 37 | |
aleloi | f388257 | 2016-07-29 02:12:41 -0700 | [diff] [blame] | 38 | // a.shouldMixBefore(b) is used to select mixer participants. |
| 39 | bool shouldMixBefore(const SourceFrame& other) const { |
| 40 | if (muted_ != other.muted_) { |
| 41 | return other.muted_; |
| 42 | } |
| 43 | |
| 44 | auto our_activity = audio_frame_->vad_activity_; |
| 45 | auto other_activity = other.audio_frame_->vad_activity_; |
| 46 | |
| 47 | if (our_activity != other_activity) { |
| 48 | return our_activity == AudioFrame::kVadActive; |
| 49 | } |
| 50 | |
| 51 | return energy_ > other.energy_; |
| 52 | } |
| 53 | |
| 54 | MixerAudioSource* audio_source_; |
| 55 | AudioFrame* audio_frame_; |
| 56 | bool muted_; |
| 57 | uint32_t energy_; |
| 58 | bool was_mixed_before_; |
| 59 | }; |
terelius | ea4c141 | 2016-07-29 01:36:14 -0700 | [diff] [blame] | 60 | |
aleloi | 4496809 | 2016-08-08 10:18:58 -0700 | [diff] [blame] | 61 | // Remixes a frame between stereo and mono. |
| 62 | void RemixFrame(AudioFrame* frame, size_t number_of_channels) { |
| 63 | RTC_DCHECK(number_of_channels == 1 || number_of_channels == 2); |
| 64 | if (frame->num_channels_ == 1 && number_of_channels == 2) { |
| 65 | AudioFrameOperations::MonoToStereo(frame); |
| 66 | } else if (frame->num_channels_ == 2 && number_of_channels == 1) { |
| 67 | AudioFrameOperations::StereoToMono(frame); |
| 68 | } |
| 69 | } |
| 70 | |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 71 | // Mix |frame| into |mixed_frame|, with saturation protection and upmixing. |
| 72 | // These effects are applied to |frame| itself prior to mixing. Assumes that |
| 73 | // |mixed_frame| always has at least as many channels as |frame|. Supports |
| 74 | // stereo at most. |
| 75 | // |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 76 | void MixFrames(AudioFrame* mixed_frame, AudioFrame* frame, bool use_limiter) { |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 77 | RTC_DCHECK_GE(mixed_frame->num_channels_, frame->num_channels_); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 78 | if (use_limiter) { |
| 79 | // Divide by two to avoid saturation in the mixing. |
| 80 | // This is only meaningful if the limiter will be used. |
| 81 | *frame >>= 1; |
| 82 | } |
aleloi | 4496809 | 2016-08-08 10:18:58 -0700 | [diff] [blame] | 83 | RTC_DCHECK_EQ(frame->num_channels_, mixed_frame->num_channels_); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 84 | *mixed_frame += *frame; |
| 85 | } |
| 86 | |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 87 | } // namespace |
| 88 | |
| 89 | MixerAudioSource::MixerAudioSource() : _mixHistory(new NewMixHistory()) {} |
| 90 | |
| 91 | MixerAudioSource::~MixerAudioSource() { |
| 92 | delete _mixHistory; |
| 93 | } |
| 94 | |
| 95 | bool MixerAudioSource::IsMixed() const { |
| 96 | return _mixHistory->IsMixed(); |
| 97 | } |
| 98 | |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 99 | NewMixHistory::NewMixHistory() : is_mixed_(0) {} |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 100 | |
| 101 | NewMixHistory::~NewMixHistory() {} |
| 102 | |
| 103 | bool NewMixHistory::IsMixed() const { |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 104 | return is_mixed_; |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 105 | } |
| 106 | |
| 107 | bool NewMixHistory::WasMixed() const { |
| 108 | // Was mixed is the same as is mixed depending on perspective. This function |
| 109 | // is for the perspective of NewAudioConferenceMixerImpl. |
| 110 | return IsMixed(); |
| 111 | } |
| 112 | |
| 113 | int32_t NewMixHistory::SetIsMixed(const bool mixed) { |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 114 | is_mixed_ = mixed; |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 115 | return 0; |
| 116 | } |
| 117 | |
| 118 | void NewMixHistory::ResetMixedStatus() { |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 119 | is_mixed_ = false; |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 120 | } |
| 121 | |
aleloi | 5d167d6 | 2016-08-24 02:20:54 -0700 | [diff] [blame^] | 122 | std::unique_ptr<AudioMixer> AudioMixer::Create(int id) { |
| 123 | AudioMixerImpl* mixer = new AudioMixerImpl(id); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 124 | if (!mixer->Init()) { |
| 125 | delete mixer; |
| 126 | return NULL; |
| 127 | } |
aleloi | 5d167d6 | 2016-08-24 02:20:54 -0700 | [diff] [blame^] | 128 | return std::unique_ptr<AudioMixer>(mixer); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 129 | } |
| 130 | |
aleloi | 5d167d6 | 2016-08-24 02:20:54 -0700 | [diff] [blame^] | 131 | AudioMixerImpl::AudioMixerImpl(int id) |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 132 | : id_(id), |
| 133 | output_frequency_(kDefaultFrequency), |
| 134 | sample_size_(0), |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 135 | audio_source_list_(), |
| 136 | additional_audio_source_list_(), |
| 137 | num_mixed_audio_sources_(0), |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 138 | use_limiter_(true), |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 139 | time_stamp_(0) { |
aleloi | 8b2233f | 2016-07-28 06:24:14 -0700 | [diff] [blame] | 140 | thread_checker_.DetachFromThread(); |
aleloi | a0db81f | 2016-07-28 06:36:22 -0700 | [diff] [blame] | 141 | } |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 142 | |
aleloi | 5d167d6 | 2016-08-24 02:20:54 -0700 | [diff] [blame^] | 143 | AudioMixerImpl::~AudioMixerImpl() {} |
aleloi | 70f866c | 2016-08-16 02:15:49 -0700 | [diff] [blame] | 144 | |
aleloi | 5d167d6 | 2016-08-24 02:20:54 -0700 | [diff] [blame^] | 145 | bool AudioMixerImpl::Init() { |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 146 | crit_.reset(CriticalSectionWrapper::CreateCriticalSection()); |
| 147 | if (crit_.get() == NULL) |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 148 | return false; |
| 149 | |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 150 | cb_crit_.reset(CriticalSectionWrapper::CreateCriticalSection()); |
| 151 | if (cb_crit_.get() == NULL) |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 152 | return false; |
| 153 | |
| 154 | Config config; |
| 155 | config.Set<ExperimentalAgc>(new ExperimentalAgc(false)); |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 156 | limiter_.reset(AudioProcessing::Create(config)); |
| 157 | if (!limiter_.get()) |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 158 | return false; |
| 159 | |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 160 | if (SetOutputFrequency(kDefaultFrequency) == -1) |
| 161 | return false; |
| 162 | |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 163 | if (limiter_->gain_control()->set_mode(GainControl::kFixedDigital) != |
| 164 | limiter_->kNoError) |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 165 | return false; |
| 166 | |
| 167 | // We smoothly limit the mixed frame to -7 dbFS. -6 would correspond to the |
| 168 | // divide-by-2 but -7 is used instead to give a bit of headroom since the |
| 169 | // AGC is not a hard limiter. |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 170 | if (limiter_->gain_control()->set_target_level_dbfs(7) != limiter_->kNoError) |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 171 | return false; |
| 172 | |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 173 | if (limiter_->gain_control()->set_compression_gain_db(0) != |
| 174 | limiter_->kNoError) |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 175 | return false; |
| 176 | |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 177 | if (limiter_->gain_control()->enable_limiter(true) != limiter_->kNoError) |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 178 | return false; |
| 179 | |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 180 | if (limiter_->gain_control()->Enable(true) != limiter_->kNoError) |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 181 | return false; |
| 182 | |
| 183 | return true; |
| 184 | } |
| 185 | |
aleloi | 5d167d6 | 2016-08-24 02:20:54 -0700 | [diff] [blame^] | 186 | void AudioMixerImpl::Mix(int sample_rate, |
| 187 | size_t number_of_channels, |
| 188 | AudioFrame* audio_frame_for_mixing) { |
aleloi | 4496809 | 2016-08-08 10:18:58 -0700 | [diff] [blame] | 189 | RTC_DCHECK(number_of_channels == 1 || number_of_channels == 2); |
aleloi | 8b2233f | 2016-07-28 06:24:14 -0700 | [diff] [blame] | 190 | RTC_DCHECK(thread_checker_.CalledOnValidThread()); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 191 | AudioFrameList mixList; |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 192 | AudioFrameList additionalFramesList; |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 193 | std::map<int, MixerAudioSource*> mixedAudioSourcesMap; |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 194 | { |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 195 | CriticalSectionScoped cs(cb_crit_.get()); |
aleloi | 4496809 | 2016-08-08 10:18:58 -0700 | [diff] [blame] | 196 | Frequency mixing_frequency; |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 197 | |
aleloi | 4496809 | 2016-08-08 10:18:58 -0700 | [diff] [blame] | 198 | switch (sample_rate) { |
| 199 | case 8000: |
| 200 | mixing_frequency = kNbInHz; |
| 201 | break; |
| 202 | case 16000: |
| 203 | mixing_frequency = kWbInHz; |
| 204 | break; |
| 205 | case 32000: |
| 206 | mixing_frequency = kSwbInHz; |
| 207 | break; |
| 208 | case 48000: |
| 209 | mixing_frequency = kFbInHz; |
| 210 | break; |
| 211 | default: |
| 212 | RTC_NOTREACHED(); |
| 213 | return; |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 214 | } |
aleloi | 4496809 | 2016-08-08 10:18:58 -0700 | [diff] [blame] | 215 | |
| 216 | if (OutputFrequency() != mixing_frequency) { |
| 217 | SetOutputFrequency(mixing_frequency); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 218 | } |
| 219 | |
aleloi | 30be5d7 | 2016-08-24 01:38:44 -0700 | [diff] [blame] | 220 | mixList = UpdateToMix(kMaximumAmountOfMixedAudioSources); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 221 | GetAdditionalAudio(&additionalFramesList); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 222 | } |
| 223 | |
aleloi | 4496809 | 2016-08-08 10:18:58 -0700 | [diff] [blame] | 224 | for (FrameAndMuteInfo& frame_and_mute : mixList) { |
| 225 | RemixFrame(frame_and_mute.frame, number_of_channels); |
| 226 | } |
| 227 | for (FrameAndMuteInfo& frame_and_mute : additionalFramesList) { |
| 228 | RemixFrame(frame_and_mute.frame, number_of_channels); |
| 229 | } |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 230 | |
| 231 | audio_frame_for_mixing->UpdateFrame( |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 232 | -1, time_stamp_, NULL, 0, output_frequency_, AudioFrame::kNormalSpeech, |
aleloi | 4496809 | 2016-08-08 10:18:58 -0700 | [diff] [blame] | 233 | AudioFrame::kVadPassive, number_of_channels); |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 234 | |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 235 | time_stamp_ += static_cast<uint32_t>(sample_size_); |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 236 | |
aleloi | 616df1e | 2016-08-24 01:17:12 -0700 | [diff] [blame] | 237 | use_limiter_ = num_mixed_audio_sources_ > 1; |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 238 | |
| 239 | // We only use the limiter if it supports the output sample rate and |
| 240 | // we're actually mixing multiple streams. |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 241 | MixFromList(audio_frame_for_mixing, mixList, id_, use_limiter_); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 242 | |
| 243 | { |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 244 | CriticalSectionScoped cs(crit_.get()); |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 245 | MixAnonomouslyFromList(audio_frame_for_mixing, additionalFramesList); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 246 | |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 247 | if (audio_frame_for_mixing->samples_per_channel_ == 0) { |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 248 | // Nothing was mixed, set the audio samples to silence. |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 249 | audio_frame_for_mixing->samples_per_channel_ = sample_size_; |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 250 | audio_frame_for_mixing->Mute(); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 251 | } else { |
| 252 | // Only call the limiter if we have something to mix. |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 253 | LimitMixedAudio(audio_frame_for_mixing); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 254 | } |
| 255 | } |
aleloi | 616df1e | 2016-08-24 01:17:12 -0700 | [diff] [blame] | 256 | |
| 257 | // Pass the final result to the level indicator. |
| 258 | audio_level_.ComputeLevel(*audio_frame_for_mixing); |
| 259 | |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 260 | return; |
| 261 | } |
| 262 | |
aleloi | 5d167d6 | 2016-08-24 02:20:54 -0700 | [diff] [blame^] | 263 | int32_t AudioMixerImpl::SetOutputFrequency(const Frequency& frequency) { |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 264 | CriticalSectionScoped cs(crit_.get()); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 265 | |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 266 | output_frequency_ = frequency; |
| 267 | sample_size_ = |
aleloi | 5d167d6 | 2016-08-24 02:20:54 -0700 | [diff] [blame^] | 268 | static_cast<size_t>((output_frequency_ * kFrameDurationInMs) / 1000); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 269 | |
| 270 | return 0; |
| 271 | } |
| 272 | |
aleloi | 5d167d6 | 2016-08-24 02:20:54 -0700 | [diff] [blame^] | 273 | AudioMixer::Frequency AudioMixerImpl::OutputFrequency() const { |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 274 | CriticalSectionScoped cs(crit_.get()); |
| 275 | return output_frequency_; |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 276 | } |
| 277 | |
aleloi | 5d167d6 | 2016-08-24 02:20:54 -0700 | [diff] [blame^] | 278 | int32_t AudioMixerImpl::SetMixabilityStatus(MixerAudioSource* audio_source, |
| 279 | bool mixable) { |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 280 | if (!mixable) { |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 281 | // Anonymous audio sources are in a separate list. Make sure that the |
| 282 | // audio source is in the _audioSourceList if it is being mixed. |
| 283 | SetAnonymousMixabilityStatus(audio_source, false); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 284 | } |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 285 | size_t numMixedAudioSources; |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 286 | { |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 287 | CriticalSectionScoped cs(cb_crit_.get()); |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 288 | const bool isMixed = IsAudioSourceInList(*audio_source, audio_source_list_); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 289 | // API must be called with a new state. |
| 290 | if (!(mixable ^ isMixed)) { |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 291 | WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, id_, |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 292 | "Mixable is aready %s", isMixed ? "ON" : "off"); |
| 293 | return -1; |
| 294 | } |
| 295 | bool success = false; |
| 296 | if (mixable) { |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 297 | success = AddAudioSourceToList(audio_source, &audio_source_list_); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 298 | } else { |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 299 | success = RemoveAudioSourceFromList(audio_source, &audio_source_list_); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 300 | } |
| 301 | if (!success) { |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 302 | WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, id_, |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 303 | "failed to %s audio_source", mixable ? "add" : "remove"); |
| 304 | RTC_NOTREACHED(); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 305 | return -1; |
| 306 | } |
| 307 | |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 308 | size_t numMixedNonAnonymous = audio_source_list_.size(); |
| 309 | if (numMixedNonAnonymous > kMaximumAmountOfMixedAudioSources) { |
| 310 | numMixedNonAnonymous = kMaximumAmountOfMixedAudioSources; |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 311 | } |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 312 | numMixedAudioSources = |
| 313 | numMixedNonAnonymous + additional_audio_source_list_.size(); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 314 | } |
| 315 | // A MixerAudioSource was added or removed. Make sure the scratch |
| 316 | // buffer is updated if necessary. |
| 317 | // Note: The scratch buffer may only be updated in Process(). |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 318 | CriticalSectionScoped cs(crit_.get()); |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 319 | num_mixed_audio_sources_ = numMixedAudioSources; |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 320 | return 0; |
| 321 | } |
| 322 | |
aleloi | 5d167d6 | 2016-08-24 02:20:54 -0700 | [diff] [blame^] | 323 | bool AudioMixerImpl::MixabilityStatus( |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 324 | const MixerAudioSource& audio_source) const { |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 325 | CriticalSectionScoped cs(cb_crit_.get()); |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 326 | return IsAudioSourceInList(audio_source, audio_source_list_); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 327 | } |
| 328 | |
aleloi | 5d167d6 | 2016-08-24 02:20:54 -0700 | [diff] [blame^] | 329 | int32_t AudioMixerImpl::SetAnonymousMixabilityStatus( |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 330 | MixerAudioSource* audio_source, |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 331 | bool anonymous) { |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 332 | CriticalSectionScoped cs(cb_crit_.get()); |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 333 | if (IsAudioSourceInList(*audio_source, additional_audio_source_list_)) { |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 334 | if (anonymous) { |
| 335 | return 0; |
| 336 | } |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 337 | if (!RemoveAudioSourceFromList(audio_source, |
| 338 | &additional_audio_source_list_)) { |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 339 | WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, id_, |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 340 | "unable to remove audio_source from anonymous list"); |
| 341 | RTC_NOTREACHED(); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 342 | return -1; |
| 343 | } |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 344 | return AddAudioSourceToList(audio_source, &audio_source_list_) ? 0 : -1; |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 345 | } |
| 346 | if (!anonymous) { |
| 347 | return 0; |
| 348 | } |
| 349 | const bool mixable = |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 350 | RemoveAudioSourceFromList(audio_source, &audio_source_list_); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 351 | if (!mixable) { |
| 352 | WEBRTC_TRACE( |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 353 | kTraceWarning, kTraceAudioMixerServer, id_, |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 354 | "audio_source must be registered before turning it into anonymous"); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 355 | // Setting anonymous status is only possible if MixerAudioSource is |
| 356 | // already registered. |
| 357 | return -1; |
| 358 | } |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 359 | return AddAudioSourceToList(audio_source, &additional_audio_source_list_) |
| 360 | ? 0 |
| 361 | : -1; |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 362 | } |
| 363 | |
aleloi | 5d167d6 | 2016-08-24 02:20:54 -0700 | [diff] [blame^] | 364 | bool AudioMixerImpl::AnonymousMixabilityStatus( |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 365 | const MixerAudioSource& audio_source) const { |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 366 | CriticalSectionScoped cs(cb_crit_.get()); |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 367 | return IsAudioSourceInList(audio_source, additional_audio_source_list_); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 368 | } |
| 369 | |
aleloi | 5d167d6 | 2016-08-24 02:20:54 -0700 | [diff] [blame^] | 370 | AudioFrameList AudioMixerImpl::UpdateToMix(size_t maxAudioFrameCounter) const { |
aleloi | f388257 | 2016-07-29 02:12:41 -0700 | [diff] [blame] | 371 | AudioFrameList result; |
| 372 | std::vector<SourceFrame> audioSourceMixingDataList; |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 373 | |
aleloi | f388257 | 2016-07-29 02:12:41 -0700 | [diff] [blame] | 374 | // Get audio source audio and put it in the struct vector. |
| 375 | for (MixerAudioSource* audio_source : audio_source_list_) { |
| 376 | auto audio_frame_with_info = audio_source->GetAudioFrameWithMuted( |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 377 | id_, static_cast<int>(output_frequency_)); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 378 | |
aleloi | f388257 | 2016-07-29 02:12:41 -0700 | [diff] [blame] | 379 | auto audio_frame_info = audio_frame_with_info.audio_frame_info; |
| 380 | AudioFrame* audio_source_audio_frame = audio_frame_with_info.audio_frame; |
| 381 | |
| 382 | if (audio_frame_info == MixerAudioSource::AudioFrameInfo::kError) { |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 383 | WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, id_, |
aleloi | f388257 | 2016-07-29 02:12:41 -0700 | [diff] [blame] | 384 | "failed to GetAudioFrameWithMuted() from participant"); |
terelius | ea4c141 | 2016-07-29 01:36:14 -0700 | [diff] [blame] | 385 | continue; |
| 386 | } |
aleloi | f388257 | 2016-07-29 02:12:41 -0700 | [diff] [blame] | 387 | audioSourceMixingDataList.emplace_back( |
| 388 | audio_source, audio_source_audio_frame, |
| 389 | audio_frame_info == MixerAudioSource::AudioFrameInfo::kMuted, |
| 390 | audio_source->_mixHistory->WasMixed()); |
| 391 | } |
| 392 | |
| 393 | // Sort frames by sorting function. |
| 394 | std::sort(audioSourceMixingDataList.begin(), audioSourceMixingDataList.end(), |
| 395 | std::mem_fn(&SourceFrame::shouldMixBefore)); |
| 396 | |
| 397 | // Go through list in order and put things in mixList. |
| 398 | for (SourceFrame& p : audioSourceMixingDataList) { |
| 399 | // Filter muted. |
| 400 | if (p.muted_) { |
| 401 | p.audio_source_->_mixHistory->SetIsMixed(false); |
| 402 | continue; |
terelius | ea4c141 | 2016-07-29 01:36:14 -0700 | [diff] [blame] | 403 | } |
aleloi | 2942e24 | 2016-07-29 01:23:49 -0700 | [diff] [blame] | 404 | |
aleloi | f388257 | 2016-07-29 02:12:41 -0700 | [diff] [blame] | 405 | // Add frame to result vector for mixing. |
| 406 | bool is_mixed = false; |
| 407 | if (maxAudioFrameCounter > 0) { |
| 408 | --maxAudioFrameCounter; |
| 409 | if (!p.was_mixed_before_) { |
aleloi | 5bcc00e | 2016-08-15 03:01:31 -0700 | [diff] [blame] | 410 | NewMixerRampIn(p.audio_frame_); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 411 | } |
aleloi | f388257 | 2016-07-29 02:12:41 -0700 | [diff] [blame] | 412 | result.emplace_back(p.audio_frame_, false); |
| 413 | is_mixed = true; |
terelius | ea4c141 | 2016-07-29 01:36:14 -0700 | [diff] [blame] | 414 | } |
aleloi | f388257 | 2016-07-29 02:12:41 -0700 | [diff] [blame] | 415 | |
| 416 | // Ramp out unmuted. |
| 417 | if (p.was_mixed_before_ && !is_mixed) { |
aleloi | 5bcc00e | 2016-08-15 03:01:31 -0700 | [diff] [blame] | 418 | NewMixerRampOut(p.audio_frame_); |
aleloi | f388257 | 2016-07-29 02:12:41 -0700 | [diff] [blame] | 419 | result.emplace_back(p.audio_frame_, false); |
terelius | ea4c141 | 2016-07-29 01:36:14 -0700 | [diff] [blame] | 420 | } |
aleloi | f388257 | 2016-07-29 02:12:41 -0700 | [diff] [blame] | 421 | |
| 422 | p.audio_source_->_mixHistory->SetIsMixed(is_mixed); |
terelius | ea4c141 | 2016-07-29 01:36:14 -0700 | [diff] [blame] | 423 | } |
aleloi | f388257 | 2016-07-29 02:12:41 -0700 | [diff] [blame] | 424 | return result; |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 425 | } |
| 426 | |
aleloi | 5d167d6 | 2016-08-24 02:20:54 -0700 | [diff] [blame^] | 427 | void AudioMixerImpl::GetAdditionalAudio( |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 428 | AudioFrameList* additionalFramesList) const { |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 429 | WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, id_, |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 430 | "GetAdditionalAudio(additionalFramesList)"); |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 431 | // The GetAudioFrameWithMuted() callback may result in the audio source being |
aleloi | a0db81f | 2016-07-28 06:36:22 -0700 | [diff] [blame] | 432 | // removed from additionalAudioFramesList_. If that happens it will |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 433 | // invalidate any iterators. Create a copy of the audio sources list such |
aleloi | a0db81f | 2016-07-28 06:36:22 -0700 | [diff] [blame] | 434 | // that the list of participants can be traversed safely. |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 435 | MixerAudioSourceList additionalAudioSourceList; |
| 436 | additionalAudioSourceList.insert(additionalAudioSourceList.begin(), |
| 437 | additional_audio_source_list_.begin(), |
| 438 | additional_audio_source_list_.end()); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 439 | |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 440 | for (MixerAudioSourceList::const_iterator audio_source = |
| 441 | additionalAudioSourceList.begin(); |
| 442 | audio_source != additionalAudioSourceList.end(); ++audio_source) { |
aleloi | a0db81f | 2016-07-28 06:36:22 -0700 | [diff] [blame] | 443 | auto audio_frame_with_info = |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 444 | (*audio_source)->GetAudioFrameWithMuted(id_, output_frequency_); |
aleloi | a0db81f | 2016-07-28 06:36:22 -0700 | [diff] [blame] | 445 | auto ret = audio_frame_with_info.audio_frame_info; |
| 446 | AudioFrame* audio_frame = audio_frame_with_info.audio_frame; |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 447 | if (ret == MixerAudioSource::AudioFrameInfo::kError) { |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 448 | WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, id_, |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 449 | "failed to GetAudioFrameWithMuted() from audio_source"); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 450 | continue; |
| 451 | } |
aleloi | a0db81f | 2016-07-28 06:36:22 -0700 | [diff] [blame] | 452 | if (audio_frame->samples_per_channel_ == 0) { |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 453 | // Empty frame. Don't use it. |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 454 | continue; |
| 455 | } |
| 456 | additionalFramesList->push_back(FrameAndMuteInfo( |
aleloi | a0db81f | 2016-07-28 06:36:22 -0700 | [diff] [blame] | 457 | audio_frame, ret == MixerAudioSource::AudioFrameInfo::kMuted)); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 458 | } |
| 459 | } |
| 460 | |
aleloi | 5d167d6 | 2016-08-24 02:20:54 -0700 | [diff] [blame^] | 461 | bool AudioMixerImpl::IsAudioSourceInList( |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 462 | const MixerAudioSource& audio_source, |
| 463 | const MixerAudioSourceList& audioSourceList) const { |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 464 | WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, id_, |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 465 | "IsAudioSourceInList(audio_source,audioSourceList)"); |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 466 | return std::find(audioSourceList.begin(), audioSourceList.end(), |
| 467 | &audio_source) != audioSourceList.end(); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 468 | } |
| 469 | |
aleloi | 5d167d6 | 2016-08-24 02:20:54 -0700 | [diff] [blame^] | 470 | bool AudioMixerImpl::AddAudioSourceToList( |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 471 | MixerAudioSource* audio_source, |
| 472 | MixerAudioSourceList* audioSourceList) const { |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 473 | WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, id_, |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 474 | "AddAudioSourceToList(audio_source, audioSourceList)"); |
| 475 | audioSourceList->push_back(audio_source); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 476 | // Make sure that the mixed status is correct for new MixerAudioSource. |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 477 | audio_source->_mixHistory->ResetMixedStatus(); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 478 | return true; |
| 479 | } |
| 480 | |
aleloi | 5d167d6 | 2016-08-24 02:20:54 -0700 | [diff] [blame^] | 481 | bool AudioMixerImpl::RemoveAudioSourceFromList( |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 482 | MixerAudioSource* audio_source, |
| 483 | MixerAudioSourceList* audioSourceList) const { |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 484 | WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, id_, |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 485 | "RemoveAudioSourceFromList(audio_source, audioSourceList)"); |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 486 | auto iter = |
| 487 | std::find(audioSourceList->begin(), audioSourceList->end(), audio_source); |
| 488 | if (iter != audioSourceList->end()) { |
| 489 | audioSourceList->erase(iter); |
| 490 | // AudioSource is no longer mixed, reset to default. |
| 491 | audio_source->_mixHistory->ResetMixedStatus(); |
| 492 | return true; |
| 493 | } else { |
| 494 | return false; |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 495 | } |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 496 | } |
| 497 | |
aleloi | 5d167d6 | 2016-08-24 02:20:54 -0700 | [diff] [blame^] | 498 | int32_t AudioMixerImpl::MixFromList(AudioFrame* mixedAudio, |
| 499 | const AudioFrameList& audioFrameList, |
| 500 | int32_t id, |
| 501 | bool use_limiter) { |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 502 | WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, id, |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 503 | "MixFromList(mixedAudio, audioFrameList)"); |
| 504 | if (audioFrameList.empty()) |
| 505 | return 0; |
| 506 | |
| 507 | uint32_t position = 0; |
| 508 | |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 509 | if (audioFrameList.size() == 1) { |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 510 | mixedAudio->timestamp_ = audioFrameList.front().frame->timestamp_; |
| 511 | mixedAudio->elapsed_time_ms_ = |
| 512 | audioFrameList.front().frame->elapsed_time_ms_; |
| 513 | } else { |
| 514 | // TODO(wu): Issue 3390. |
| 515 | // Audio frame timestamp is only supported in one channel case. |
| 516 | mixedAudio->timestamp_ = 0; |
| 517 | mixedAudio->elapsed_time_ms_ = -1; |
| 518 | } |
| 519 | |
| 520 | for (AudioFrameList::const_iterator iter = audioFrameList.begin(); |
| 521 | iter != audioFrameList.end(); ++iter) { |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 522 | if (!iter->muted) { |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 523 | MixFrames(mixedAudio, iter->frame, use_limiter); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 524 | } |
| 525 | |
| 526 | position++; |
| 527 | } |
| 528 | |
| 529 | return 0; |
| 530 | } |
| 531 | |
| 532 | // TODO(andrew): consolidate this function with MixFromList. |
aleloi | 5d167d6 | 2016-08-24 02:20:54 -0700 | [diff] [blame^] | 533 | int32_t AudioMixerImpl::MixAnonomouslyFromList( |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 534 | AudioFrame* mixedAudio, |
| 535 | const AudioFrameList& audioFrameList) const { |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 536 | WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, id_, |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 537 | "MixAnonomouslyFromList(mixedAudio, audioFrameList)"); |
| 538 | |
| 539 | if (audioFrameList.empty()) |
| 540 | return 0; |
| 541 | |
| 542 | for (AudioFrameList::const_iterator iter = audioFrameList.begin(); |
| 543 | iter != audioFrameList.end(); ++iter) { |
| 544 | if (!iter->muted) { |
| 545 | MixFrames(mixedAudio, iter->frame, use_limiter_); |
| 546 | } |
| 547 | } |
| 548 | return 0; |
| 549 | } |
| 550 | |
aleloi | 5d167d6 | 2016-08-24 02:20:54 -0700 | [diff] [blame^] | 551 | bool AudioMixerImpl::LimitMixedAudio(AudioFrame* mixedAudio) const { |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 552 | if (!use_limiter_) { |
| 553 | return true; |
| 554 | } |
| 555 | |
| 556 | // Smoothly limit the mixed frame. |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 557 | const int error = limiter_->ProcessStream(mixedAudio); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 558 | |
| 559 | // And now we can safely restore the level. This procedure results in |
| 560 | // some loss of resolution, deemed acceptable. |
| 561 | // |
| 562 | // It's possible to apply the gain in the AGC (with a target level of 0 dbFS |
| 563 | // and compression gain of 6 dB). However, in the transition frame when this |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 564 | // is enabled (moving from one to two audio sources) it has the potential to |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 565 | // create discontinuities in the mixed frame. |
| 566 | // |
| 567 | // Instead we double the frame (with addition since left-shifting a |
| 568 | // negative value is undefined). |
| 569 | *mixedAudio += *mixedAudio; |
| 570 | |
aleloi | 6382a19 | 2016-08-08 10:25:04 -0700 | [diff] [blame] | 571 | if (error != limiter_->kNoError) { |
| 572 | WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, id_, |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 573 | "Error from AudioProcessing: %d", error); |
aleloi | 09f4510 | 2016-07-28 03:52:15 -0700 | [diff] [blame] | 574 | RTC_NOTREACHED(); |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 575 | return false; |
| 576 | } |
| 577 | return true; |
| 578 | } |
aleloi | 616df1e | 2016-08-24 01:17:12 -0700 | [diff] [blame] | 579 | |
aleloi | 5d167d6 | 2016-08-24 02:20:54 -0700 | [diff] [blame^] | 580 | int AudioMixerImpl::GetOutputAudioLevel() { |
aleloi | 616df1e | 2016-08-24 01:17:12 -0700 | [diff] [blame] | 581 | const int level = audio_level_.Level(); |
| 582 | WEBRTC_TRACE(kTraceStateInfo, kTraceAudioMixerServer, id_, |
| 583 | "GetAudioOutputLevel() => level=%d", level); |
| 584 | return level; |
| 585 | } |
| 586 | |
aleloi | 5d167d6 | 2016-08-24 02:20:54 -0700 | [diff] [blame^] | 587 | int AudioMixerImpl::GetOutputAudioLevelFullRange() { |
aleloi | 616df1e | 2016-08-24 01:17:12 -0700 | [diff] [blame] | 588 | const int level = audio_level_.LevelFullRange(); |
| 589 | WEBRTC_TRACE(kTraceStateInfo, kTraceAudioMixerServer, id_, |
| 590 | "GetAudioOutputLevelFullRange() => level=%d", level); |
| 591 | return level; |
| 592 | } |
aleloi | 77ad394 | 2016-07-04 06:33:02 -0700 | [diff] [blame] | 593 | } // namespace webrtc |