Remove the type parameter to NetEq::GetAudio

The type is included in the AudioFrame output parameter.

Rename the type NetEqOutputType to just OutputType, since it is now
internal to NetEq.

BUG=webrtc:5607

Review URL: https://codereview.webrtc.org/1769883002

Cr-Commit-Position: refs/heads/master@{#11903}
diff --git a/webrtc/modules/audio_coding/neteq/neteq_impl.cc b/webrtc/modules/audio_coding/neteq/neteq_impl.cc
index fc74f2d..b4cc915 100644
--- a/webrtc/modules/audio_coding/neteq/neteq_impl.cc
+++ b/webrtc/modules/audio_coding/neteq/neteq_impl.cc
@@ -150,33 +150,33 @@
 
 namespace {
 void SetAudioFrameActivityAndType(bool vad_enabled,
-                                  NetEqOutputType type,
+                                  NetEqImpl::OutputType type,
                                   AudioFrame::VADActivity last_vad_activity,
                                   AudioFrame* audio_frame) {
   switch (type) {
-    case kOutputNormal: {
+    case NetEqImpl::OutputType::kNormalSpeech: {
       audio_frame->speech_type_ = AudioFrame::kNormalSpeech;
       audio_frame->vad_activity_ = AudioFrame::kVadActive;
       break;
     }
-    case kOutputVADPassive: {
+    case NetEqImpl::OutputType::kVadPassive: {
       // This should only be reached if the VAD is enabled.
       RTC_DCHECK(vad_enabled);
       audio_frame->speech_type_ = AudioFrame::kNormalSpeech;
       audio_frame->vad_activity_ = AudioFrame::kVadPassive;
       break;
     }
-    case kOutputCNG: {
+    case NetEqImpl::OutputType::kCNG: {
       audio_frame->speech_type_ = AudioFrame::kCNG;
       audio_frame->vad_activity_ = AudioFrame::kVadPassive;
       break;
     }
-    case kOutputPLC: {
+    case NetEqImpl::OutputType::kPLC: {
       audio_frame->speech_type_ = AudioFrame::kPLC;
       audio_frame->vad_activity_ = last_vad_activity;
       break;
     }
-    case kOutputPLCtoCNG: {
+    case NetEqImpl::OutputType::kPLCCNG: {
       audio_frame->speech_type_ = AudioFrame::kPLCCNG;
       audio_frame->vad_activity_ = AudioFrame::kVadPassive;
       break;
@@ -191,7 +191,7 @@
 }
 }
 
-int NetEqImpl::GetAudio(AudioFrame* audio_frame, NetEqOutputType* type) {
+int NetEqImpl::GetAudio(AudioFrame* audio_frame) {
   TRACE_EVENT0("webrtc", "NetEqImpl::GetAudio");
   rtc::CritScope lock(&crit_sect_);
   int error = GetAudioInternal(audio_frame);
@@ -202,9 +202,6 @@
     error_code_ = error;
     return kFail;
   }
-  if (type) {
-    *type = LastOutputType();
-  }
   SetAudioFrameActivityAndType(vad_->enabled(), LastOutputType(),
                                last_vad_activity_, audio_frame);
   last_vad_activity_ = audio_frame->vad_activity_;
@@ -2068,20 +2065,20 @@
   decision_logic_->SetSampleRate(fs_hz_, output_size_samples_);
 }
 
-NetEqOutputType NetEqImpl::LastOutputType() {
+NetEqImpl::OutputType NetEqImpl::LastOutputType() {
   assert(vad_.get());
   assert(expand_.get());
   if (last_mode_ == kModeCodecInternalCng || last_mode_ == kModeRfc3389Cng) {
-    return kOutputCNG;
+    return OutputType::kCNG;
   } else if (last_mode_ == kModeExpand && expand_->MuteFactor(0) == 0) {
     // Expand mode has faded down to background noise only (very long expand).
-    return kOutputPLCtoCNG;
+    return OutputType::kPLCCNG;
   } else if (last_mode_ == kModeExpand) {
-    return kOutputPLC;
+    return OutputType::kPLC;
   } else if (vad_->running() && !vad_->active_speech()) {
-    return kOutputVADPassive;
+    return OutputType::kVadPassive;
   } else {
-    return kOutputNormal;
+    return OutputType::kNormalSpeech;
   }
 }