blob: c97ed7509d3b1d849e9011d5fc17a0ba0da7c621 [file] [log] [blame]
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001/*
2 * libjingle
3 * Copyright 2004 Google Inc.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 * 3. The name of the author may not be used to endorse or promote products
14 * derived from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27
28#ifdef HAVE_CONFIG_H
29#include <config.h>
30#endif
31
32#ifdef HAVE_WEBRTC_VOICE
33
34#include "talk/media/webrtc/webrtcvoiceengine.h"
35
36#include <algorithm>
37#include <cstdio>
38#include <string>
39#include <vector>
40
buildbot@webrtc.orga09a9992014-08-13 17:26:08 +000041#include "talk/media/base/audiorenderer.h"
42#include "talk/media/base/constants.h"
43#include "talk/media/base/streamparams.h"
44#include "talk/media/base/voiceprocessor.h"
45#include "talk/media/webrtc/webrtcvoe.h"
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +000046#include "webrtc/base/base64.h"
47#include "webrtc/base/byteorder.h"
48#include "webrtc/base/common.h"
49#include "webrtc/base/helpers.h"
50#include "webrtc/base/logging.h"
51#include "webrtc/base/stringencode.h"
52#include "webrtc/base/stringutils.h"
mallinath@webrtc.orga27be8e2013-09-27 23:04:10 +000053#include "webrtc/common.h"
henrike@webrtc.org28e20752013-07-10 00:45:36 +000054#include "webrtc/modules/audio_processing/include/audio_processing.h"
buildbot@webrtc.orgb4c7b092014-08-25 12:11:58 +000055#include "webrtc/video_engine/include/vie_network.h"
henrike@webrtc.org28e20752013-07-10 00:45:36 +000056
57#ifdef WIN32
58#include <objbase.h> // NOLINT
59#endif
60
61namespace cricket {
62
63struct CodecPref {
64 const char* name;
65 int clockrate;
66 int channels;
67 int payload_type;
68 bool is_multi_rate;
69};
70
71static const CodecPref kCodecPrefs[] = {
72 { "OPUS", 48000, 2, 111, true },
73 { "ISAC", 16000, 1, 103, true },
74 { "ISAC", 32000, 1, 104, true },
75 { "CELT", 32000, 1, 109, true },
76 { "CELT", 32000, 2, 110, true },
henrik.lundin@webrtc.org8038d422014-11-11 08:38:24 +000077 // G722 should be advertised as 8000 Hz because of the RFC "bug".
henrik.lundin@webrtc.orgf85dbce2014-11-07 12:25:00 +000078 { "G722", 8000, 1, 9, false },
henrike@webrtc.org28e20752013-07-10 00:45:36 +000079 { "ILBC", 8000, 1, 102, false },
80 { "PCMU", 8000, 1, 0, false },
81 { "PCMA", 8000, 1, 8, false },
82 { "CN", 48000, 1, 107, false },
83 { "CN", 32000, 1, 106, false },
84 { "CN", 16000, 1, 105, false },
85 { "CN", 8000, 1, 13, false },
86 { "red", 8000, 1, 127, false },
87 { "telephone-event", 8000, 1, 126, false },
88};
89
90// For Linux/Mac, using the default device is done by specifying index 0 for
91// VoE 4.0 and not -1 (which was the case for VoE 3.5).
92//
93// On Windows Vista and newer, Microsoft introduced the concept of "Default
94// Communications Device". This means that there are two types of default
95// devices (old Wave Audio style default and Default Communications Device).
96//
97// On Windows systems which only support Wave Audio style default, uses either
98// -1 or 0 to select the default device.
99//
100// On Windows systems which support both "Default Communication Device" and
101// old Wave Audio style default, use -1 for Default Communications Device and
102// -2 for Wave Audio style default, which is what we want to use for clips.
103// It's not clear yet whether the -2 index is handled properly on other OSes.
104
105#ifdef WIN32
106static const int kDefaultAudioDeviceId = -1;
107static const int kDefaultSoundclipDeviceId = -2;
108#else
109static const int kDefaultAudioDeviceId = 0;
110#endif
111
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000112static const char kIsacCodecName[] = "ISAC";
113static const char kL16CodecName[] = "L16";
henrik.lundin@webrtc.orgf85dbce2014-11-07 12:25:00 +0000114static const char kG722CodecName[] = "G722";
minyue@webrtc.org2dc6f312014-10-31 05:33:10 +0000115
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000116// Parameter used for NACK.
117// This value is equivalent to 5 seconds of audio data at 20 ms per packet.
118static const int kNackMaxPackets = 250;
minyue@webrtc.org2dc6f312014-10-31 05:33:10 +0000119
120// Codec parameters for Opus.
henrike@webrtc.org1e09a712013-07-26 19:17:59 +0000121// draft-spittka-payload-rtp-opus-03
minyue@webrtc.org2dc6f312014-10-31 05:33:10 +0000122
123// Recommended bitrates:
124// 8-12 kb/s for NB speech,
125// 16-20 kb/s for WB speech,
126// 28-40 kb/s for FB speech,
127// 48-64 kb/s for FB mono music, and
128// 64-128 kb/s for FB stereo music.
129// The current implementation applies the following values to mono signals,
130// and multiplies them by 2 for stereo.
131static const int kOpusBitrateNb = 12000;
132static const int kOpusBitrateWb = 20000;
133static const int kOpusBitrateFb = 32000;
134
henrike@webrtc.org1e09a712013-07-26 19:17:59 +0000135// Opus bitrate should be in the range between 6000 and 510000.
136static const int kOpusMinBitrate = 6000;
137static const int kOpusMaxBitrate = 510000;
buildbot@webrtc.org5d639b32014-09-10 07:57:12 +0000138
wu@webrtc.orgde305012013-10-31 15:40:38 +0000139// Default audio dscp value.
140// See http://tools.ietf.org/html/rfc2474 for details.
141// See also http://tools.ietf.org/html/draft-jennings-rtcweb-qos-00
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000142static const rtc::DiffServCodePoint kAudioDscpValue = rtc::DSCP_EF;
henrike@webrtc.org1e09a712013-07-26 19:17:59 +0000143
sergeyu@chromium.orga59696b2013-09-13 23:48:58 +0000144// Ensure we open the file in a writeable path on ChromeOS and Android. This
145// workaround can be removed when it's possible to specify a filename for audio
146// option based AEC dumps.
henrike@webrtc.org1e09a712013-07-26 19:17:59 +0000147//
148// TODO(grunell): Use a string in the options instead of hardcoding it here
149// and let the embedder choose the filename (crbug.com/264223).
150//
sergeyu@chromium.orga59696b2013-09-13 23:48:58 +0000151// NOTE(ajm): Don't use hardcoded paths on platforms not explicitly specified
152// below.
153#if defined(CHROMEOS)
henrike@webrtc.org1e09a712013-07-26 19:17:59 +0000154static const char kAecDumpByAudioOptionFilename[] = "/tmp/audio.aecdump";
sergeyu@chromium.orga59696b2013-09-13 23:48:58 +0000155#elif defined(ANDROID)
156static const char kAecDumpByAudioOptionFilename[] = "/sdcard/audio.aecdump";
henrike@webrtc.org1e09a712013-07-26 19:17:59 +0000157#else
158static const char kAecDumpByAudioOptionFilename[] = "audio.aecdump";
159#endif
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000160
161// Dumps an AudioCodec in RFC 2327-ish format.
162static std::string ToString(const AudioCodec& codec) {
163 std::stringstream ss;
164 ss << codec.name << "/" << codec.clockrate << "/" << codec.channels
165 << " (" << codec.id << ")";
166 return ss.str();
167}
168static std::string ToString(const webrtc::CodecInst& codec) {
169 std::stringstream ss;
170 ss << codec.plname << "/" << codec.plfreq << "/" << codec.channels
171 << " (" << codec.pltype << ")";
172 return ss.str();
173}
174
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000175static void LogMultiline(rtc::LoggingSeverity sev, char* text) {
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000176 const char* delim = "\r\n";
177 for (char* tok = strtok(text, delim); tok; tok = strtok(NULL, delim)) {
178 LOG_V(sev) << tok;
179 }
180}
181
182// Severity is an integer because it comes is assumed to be from command line.
183static int SeverityToFilter(int severity) {
184 int filter = webrtc::kTraceNone;
185 switch (severity) {
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000186 case rtc::LS_VERBOSE:
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000187 filter |= webrtc::kTraceAll;
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000188 case rtc::LS_INFO:
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000189 filter |= (webrtc::kTraceStateInfo | webrtc::kTraceInfo);
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000190 case rtc::LS_WARNING:
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000191 filter |= (webrtc::kTraceTerseInfo | webrtc::kTraceWarning);
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000192 case rtc::LS_ERROR:
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000193 filter |= (webrtc::kTraceError | webrtc::kTraceCritical);
194 }
195 return filter;
196}
197
198static bool IsCodecMultiRate(const webrtc::CodecInst& codec) {
199 for (size_t i = 0; i < ARRAY_SIZE(kCodecPrefs); ++i) {
200 if (_stricmp(kCodecPrefs[i].name, codec.plname) == 0 &&
201 kCodecPrefs[i].clockrate == codec.plfreq) {
202 return kCodecPrefs[i].is_multi_rate;
203 }
204 }
205 return false;
206}
207
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000208static bool IsTelephoneEventCodec(const std::string& name) {
209 return _stricmp(name.c_str(), "telephone-event") == 0;
210}
211
212static bool IsCNCodec(const std::string& name) {
213 return _stricmp(name.c_str(), "CN") == 0;
214}
215
216static bool IsRedCodec(const std::string& name) {
217 return _stricmp(name.c_str(), "red") == 0;
218}
219
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000220static bool FindCodec(const std::vector<AudioCodec>& codecs,
221 const AudioCodec& codec,
222 AudioCodec* found_codec) {
223 for (std::vector<AudioCodec>::const_iterator it = codecs.begin();
224 it != codecs.end(); ++it) {
225 if (it->Matches(codec)) {
226 if (found_codec != NULL) {
227 *found_codec = *it;
228 }
229 return true;
230 }
231 }
232 return false;
233}
wu@webrtc.org1d1ffc92013-10-16 18:12:02 +0000234
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000235static bool IsNackEnabled(const AudioCodec& codec) {
236 return codec.HasFeedbackParam(FeedbackParam(kRtcpFbParamNack,
237 kParamValueEmpty));
238}
239
wu@webrtc.org1d1ffc92013-10-16 18:12:02 +0000240// Gets the default set of options applied to the engine. Historically, these
241// were supplied as a combination of flags from the channel manager (ec, agc,
242// ns, and highpass) and the rest hardcoded in InitInternal.
243static AudioOptions GetDefaultEngineOptions() {
244 AudioOptions options;
245 options.echo_cancellation.Set(true);
246 options.auto_gain_control.Set(true);
247 options.noise_suppression.Set(true);
248 options.highpass_filter.Set(true);
249 options.stereo_swapping.Set(false);
250 options.typing_detection.Set(true);
251 options.conference_mode.Set(false);
252 options.adjust_agc_delta.Set(0);
253 options.experimental_agc.Set(false);
254 options.experimental_aec.Set(false);
sergeyu@chromium.org9cf037b2014-02-07 19:03:26 +0000255 options.experimental_ns.Set(false);
wu@webrtc.org1d1ffc92013-10-16 18:12:02 +0000256 options.aec_dump.Set(false);
257 return options;
258}
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000259
260class WebRtcSoundclipMedia : public SoundclipMedia {
261 public:
262 explicit WebRtcSoundclipMedia(WebRtcVoiceEngine *engine)
263 : engine_(engine), webrtc_channel_(-1) {
264 engine_->RegisterSoundclip(this);
265 }
266
267 virtual ~WebRtcSoundclipMedia() {
268 engine_->UnregisterSoundclip(this);
269 if (webrtc_channel_ != -1) {
270 // We shouldn't have to call Disable() here. DeleteChannel() should call
271 // StopPlayout() while deleting the channel. We should fix the bug
272 // inside WebRTC and remove the Disable() call bellow. This work is
273 // tracked by bug http://b/issue?id=5382855.
274 PlaySound(NULL, 0, 0);
275 Disable();
276 if (engine_->voe_sc()->base()->DeleteChannel(webrtc_channel_)
277 == -1) {
278 LOG_RTCERR1(DeleteChannel, webrtc_channel_);
279 }
280 }
281 }
282
283 bool Init() {
wu@webrtc.org4551b792013-10-09 15:37:36 +0000284 if (!engine_->voe_sc()) {
285 return false;
286 }
sergeyu@chromium.org5bc25c42013-12-05 00:24:06 +0000287 webrtc_channel_ = engine_->CreateSoundclipVoiceChannel();
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000288 if (webrtc_channel_ == -1) {
289 LOG_RTCERR0(CreateChannel);
290 return false;
291 }
292 return true;
293 }
294
295 bool Enable() {
296 if (engine_->voe_sc()->base()->StartPlayout(webrtc_channel_) == -1) {
297 LOG_RTCERR1(StartPlayout, webrtc_channel_);
298 return false;
299 }
300 return true;
301 }
302
303 bool Disable() {
304 if (engine_->voe_sc()->base()->StopPlayout(webrtc_channel_) == -1) {
305 LOG_RTCERR1(StopPlayout, webrtc_channel_);
306 return false;
307 }
308 return true;
309 }
310
311 virtual bool PlaySound(const char *buf, int len, int flags) {
312 // The voe file api is not available in chrome.
313 if (!engine_->voe_sc()->file()) {
314 return false;
315 }
316 // Must stop playing the current sound (if any), because we are about to
317 // modify the stream.
318 if (engine_->voe_sc()->file()->StopPlayingFileLocally(webrtc_channel_)
319 == -1) {
320 LOG_RTCERR1(StopPlayingFileLocally, webrtc_channel_);
321 return false;
322 }
323
324 if (buf) {
325 stream_.reset(new WebRtcSoundclipStream(buf, len));
326 stream_->set_loop((flags & SF_LOOP) != 0);
327 stream_->Rewind();
328
329 // Play it.
330 if (engine_->voe_sc()->file()->StartPlayingFileLocally(
331 webrtc_channel_, stream_.get()) == -1) {
332 LOG_RTCERR2(StartPlayingFileLocally, webrtc_channel_, stream_.get());
333 LOG(LS_ERROR) << "Unable to start soundclip";
334 return false;
335 }
336 } else {
337 stream_.reset();
338 }
339 return true;
340 }
341
342 int GetLastEngineError() const { return engine_->voe_sc()->error(); }
343
344 private:
345 WebRtcVoiceEngine *engine_;
346 int webrtc_channel_;
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000347 rtc::scoped_ptr<WebRtcSoundclipStream> stream_;
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000348};
349
350WebRtcVoiceEngine::WebRtcVoiceEngine()
351 : voe_wrapper_(new VoEWrapper()),
352 voe_wrapper_sc_(new VoEWrapper()),
wu@webrtc.org4551b792013-10-09 15:37:36 +0000353 voe_wrapper_sc_initialized_(false),
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000354 tracing_(new VoETraceWrapper()),
355 adm_(NULL),
356 adm_sc_(NULL),
357 log_filter_(SeverityToFilter(kDefaultLogSeverity)),
358 is_dumping_aec_(false),
359 desired_local_monitor_enable_(false),
360 tx_processor_ssrc_(0),
361 rx_processor_ssrc_(0) {
362 Construct();
363}
364
365WebRtcVoiceEngine::WebRtcVoiceEngine(VoEWrapper* voe_wrapper,
366 VoEWrapper* voe_wrapper_sc,
367 VoETraceWrapper* tracing)
368 : voe_wrapper_(voe_wrapper),
369 voe_wrapper_sc_(voe_wrapper_sc),
wu@webrtc.org4551b792013-10-09 15:37:36 +0000370 voe_wrapper_sc_initialized_(false),
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000371 tracing_(tracing),
372 adm_(NULL),
373 adm_sc_(NULL),
374 log_filter_(SeverityToFilter(kDefaultLogSeverity)),
375 is_dumping_aec_(false),
376 desired_local_monitor_enable_(false),
buildbot@webrtc.org13d67762014-05-02 17:33:29 +0000377 tx_processor_ssrc_(0),
378 rx_processor_ssrc_(0) {
379 Construct();
380}
381
382void WebRtcVoiceEngine::Construct() {
383 SetTraceFilter(log_filter_);
384 initialized_ = false;
385 LOG(LS_VERBOSE) << "WebRtcVoiceEngine::WebRtcVoiceEngine";
386 SetTraceOptions("");
387 if (tracing_->SetTraceCallback(this) == -1) {
388 LOG_RTCERR0(SetTraceCallback);
389 }
390 if (voe_wrapper_->base()->RegisterVoiceEngineObserver(*this) == -1) {
391 LOG_RTCERR0(RegisterVoiceEngineObserver);
392 }
393 // Clear the default agc state.
394 memset(&default_agc_config_, 0, sizeof(default_agc_config_));
395
396 // Load our audio codec list.
397 ConstructCodecs();
398
399 // Load our RTP Header extensions.
400 rtp_header_extensions_.push_back(
401 RtpHeaderExtension(kRtpAudioLevelHeaderExtension,
402 kRtpAudioLevelHeaderExtensionDefaultId));
403 rtp_header_extensions_.push_back(
404 RtpHeaderExtension(kRtpAbsoluteSenderTimeHeaderExtension,
405 kRtpAbsoluteSenderTimeHeaderExtensionDefaultId));
406 options_ = GetDefaultEngineOptions();
407}
408
409static bool IsOpus(const AudioCodec& codec) {
410 return (_stricmp(codec.name.c_str(), kOpusCodecName) == 0);
411}
412
413static bool IsIsac(const AudioCodec& codec) {
414 return (_stricmp(codec.name.c_str(), kIsacCodecName) == 0);
415}
416
417// True if params["stereo"] == "1"
418static bool IsOpusStereoEnabled(const AudioCodec& codec) {
buildbot@webrtc.orgd27d9ae2014-06-19 01:56:46 +0000419 int value;
420 return codec.GetParam(kCodecParamStereo, &value) && value == 1;
buildbot@webrtc.org13d67762014-05-02 17:33:29 +0000421}
422
buildbot@webrtc.org9d446f22014-10-23 12:22:06 +0000423// Use params[kCodecParamMaxAverageBitrate] if it is defined, use codec.bitrate
424// otherwise. If the value (either from params or codec.bitrate) <=0, use the
425// default configuration. If the value is beyond feasible bit rate of Opus,
426// clamp it. Returns the Opus bit rate for operation.
buildbot@webrtc.org879fac82014-10-30 07:50:13 +0000427static int GetOpusBitrate(const AudioCodec& codec, int max_playback_rate) {
buildbot@webrtc.org13d67762014-05-02 17:33:29 +0000428 int bitrate = 0;
buildbot@webrtc.org9d446f22014-10-23 12:22:06 +0000429 bool use_param = true;
buildbot@webrtc.org13d67762014-05-02 17:33:29 +0000430 if (!codec.GetParam(kCodecParamMaxAverageBitrate, &bitrate)) {
buildbot@webrtc.org9d446f22014-10-23 12:22:06 +0000431 bitrate = codec.bitrate;
432 use_param = false;
buildbot@webrtc.org13d67762014-05-02 17:33:29 +0000433 }
buildbot@webrtc.org9d446f22014-10-23 12:22:06 +0000434 if (bitrate <= 0) {
minyue@webrtc.org2dc6f312014-10-31 05:33:10 +0000435 if (max_playback_rate <= 8000) {
436 bitrate = kOpusBitrateNb;
437 } else if (max_playback_rate <= 16000) {
438 bitrate = kOpusBitrateWb;
439 } else {
440 bitrate = kOpusBitrateFb;
441 }
442
443 if (IsOpusStereoEnabled(codec)) {
444 bitrate *= 2;
445 }
buildbot@webrtc.org9d446f22014-10-23 12:22:06 +0000446 } else if (bitrate < kOpusMinBitrate || bitrate > kOpusMaxBitrate) {
447 bitrate = (bitrate < kOpusMinBitrate) ? kOpusMinBitrate : kOpusMaxBitrate;
448 std::string rate_source =
449 use_param ? "Codec parameter \"maxaveragebitrate\"" :
450 "Supplied Opus bitrate";
451 LOG(LS_WARNING) << rate_source
452 << " is invalid and is replaced by: "
453 << bitrate;
buildbot@webrtc.org13d67762014-05-02 17:33:29 +0000454 }
455 return bitrate;
456}
457
buildbot@webrtc.orgfbd13282014-06-19 19:50:55 +0000458// Return true if params[kCodecParamUseInbandFec] == "1", false
buildbot@webrtc.orgd27d9ae2014-06-19 01:56:46 +0000459// otherwise.
buildbot@webrtc.orgae740dd2014-06-17 10:56:41 +0000460static bool IsOpusFecEnabled(const AudioCodec& codec) {
buildbot@webrtc.orgd27d9ae2014-06-19 01:56:46 +0000461 int value;
462 return codec.GetParam(kCodecParamUseInbandFec, &value) && value == 1;
463}
buildbot@webrtc.orgae740dd2014-06-17 10:56:41 +0000464
buildbot@webrtc.org5d639b32014-09-10 07:57:12 +0000465// Returns kOpusDefaultPlaybackRate if params[kCodecParamMaxPlaybackRate] is not
466// defined. Returns the value of params[kCodecParamMaxPlaybackRate] otherwise.
467static int GetOpusMaxPlaybackRate(const AudioCodec& codec) {
468 int value;
469 if (codec.GetParam(kCodecParamMaxPlaybackRate, &value)) {
470 return value;
471 }
472 return kOpusDefaultMaxPlaybackRate;
473}
474
475static void GetOpusConfig(const AudioCodec& codec, webrtc::CodecInst* voe_codec,
476 bool* enable_codec_fec, int* max_playback_rate) {
477 *enable_codec_fec = IsOpusFecEnabled(codec);
478 *max_playback_rate = GetOpusMaxPlaybackRate(codec);
479
480 // If OPUS, change what we send according to the "stereo" codec
481 // parameter, and not the "channels" parameter. We set
482 // voe_codec.channels to 2 if "stereo=1" and 1 otherwise. If
buildbot@webrtc.org9d446f22014-10-23 12:22:06 +0000483 // the bitrate is not specified, i.e. is <= zero, we set it to the
buildbot@webrtc.org5d639b32014-09-10 07:57:12 +0000484 // appropriate default value for mono or stereo Opus.
485
buildbot@webrtc.org9d446f22014-10-23 12:22:06 +0000486 voe_codec->channels = IsOpusStereoEnabled(codec) ? 2 : 1;
buildbot@webrtc.org879fac82014-10-30 07:50:13 +0000487 voe_codec->rate = GetOpusBitrate(codec, *max_playback_rate);
buildbot@webrtc.org5d639b32014-09-10 07:57:12 +0000488}
489
henrik.lundin@webrtc.orgf85dbce2014-11-07 12:25:00 +0000490// Changes RTP timestamp rate of G722. This is due to the "bug" in the RFC
491// which says that G722 should be advertised as 8 kHz although it is a 16 kHz
492// codec.
493static void MaybeFixupG722(webrtc::CodecInst* voe_codec, int new_plfreq) {
494 if (_stricmp(voe_codec->plname, kG722CodecName) == 0) {
495 // If the ASSERT triggers, the codec definition in WebRTC VoiceEngine
496 // has changed, and this special case is no longer needed.
497 ASSERT(voe_codec->plfreq != new_plfreq);
498 voe_codec->plfreq = new_plfreq;
499 }
500}
501
buildbot@webrtc.org13d67762014-05-02 17:33:29 +0000502void WebRtcVoiceEngine::ConstructCodecs() {
503 LOG(LS_INFO) << "WebRtc VoiceEngine codecs:";
504 int ncodecs = voe_wrapper_->codec()->NumOfCodecs();
505 for (int i = 0; i < ncodecs; ++i) {
506 webrtc::CodecInst voe_codec;
henrik.lundin@webrtc.org8038d422014-11-11 08:38:24 +0000507 if (GetVoeCodec(i, &voe_codec)) {
buildbot@webrtc.org13d67762014-05-02 17:33:29 +0000508 // Skip uncompressed formats.
509 if (_stricmp(voe_codec.plname, kL16CodecName) == 0) {
510 continue;
511 }
512
513 const CodecPref* pref = NULL;
514 for (size_t j = 0; j < ARRAY_SIZE(kCodecPrefs); ++j) {
515 if (_stricmp(kCodecPrefs[j].name, voe_codec.plname) == 0 &&
516 kCodecPrefs[j].clockrate == voe_codec.plfreq &&
517 kCodecPrefs[j].channels == voe_codec.channels) {
518 pref = &kCodecPrefs[j];
519 break;
520 }
521 }
522
523 if (pref) {
524 // Use the payload type that we've configured in our pref table;
525 // use the offset in our pref table to determine the sort order.
526 AudioCodec codec(pref->payload_type, voe_codec.plname, voe_codec.plfreq,
527 voe_codec.rate, voe_codec.channels,
528 ARRAY_SIZE(kCodecPrefs) - (pref - kCodecPrefs));
529 LOG(LS_INFO) << ToString(codec);
530 if (IsIsac(codec)) {
minyue@webrtc.org26236952014-10-29 02:27:08 +0000531 // Indicate auto-bitrate in signaling.
buildbot@webrtc.org13d67762014-05-02 17:33:29 +0000532 codec.bitrate = 0;
533 }
534 if (IsOpus(codec)) {
535 // Only add fmtp parameters that differ from the spec.
536 if (kPreferredMinPTime != kOpusDefaultMinPTime) {
537 codec.params[kCodecParamMinPTime] =
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000538 rtc::ToString(kPreferredMinPTime);
buildbot@webrtc.org13d67762014-05-02 17:33:29 +0000539 }
540 if (kPreferredMaxPTime != kOpusDefaultMaxPTime) {
541 codec.params[kCodecParamMaxPTime] =
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000542 rtc::ToString(kPreferredMaxPTime);
buildbot@webrtc.org13d67762014-05-02 17:33:29 +0000543 }
544 // TODO(hellner): Add ptime, sprop-stereo, stereo and useinbandfec
545 // when they can be set to values other than the default.
546 }
547 codecs_.push_back(codec);
548 } else {
549 LOG(LS_WARNING) << "Unexpected codec: " << ToString(voe_codec);
550 }
551 }
552 }
553 // Make sure they are in local preference order.
554 std::sort(codecs_.begin(), codecs_.end(), &AudioCodec::Preferable);
555}
556
henrik.lundin@webrtc.org8038d422014-11-11 08:38:24 +0000557bool WebRtcVoiceEngine::GetVoeCodec(int index, webrtc::CodecInst* codec) {
558 if (voe_wrapper_->codec()->GetCodec(index, *codec) == -1) {
559 return false;
henrik.lundin@webrtc.orgf85dbce2014-11-07 12:25:00 +0000560 }
henrik.lundin@webrtc.org8038d422014-11-11 08:38:24 +0000561 // Change the sample rate of G722 to 8000 to match SDP.
562 MaybeFixupG722(codec, 8000);
563 return true;
henrik.lundin@webrtc.orgf85dbce2014-11-07 12:25:00 +0000564}
565
buildbot@webrtc.org13d67762014-05-02 17:33:29 +0000566WebRtcVoiceEngine::~WebRtcVoiceEngine() {
567 LOG(LS_VERBOSE) << "WebRtcVoiceEngine::~WebRtcVoiceEngine";
568 if (voe_wrapper_->base()->DeRegisterVoiceEngineObserver() == -1) {
569 LOG_RTCERR0(DeRegisterVoiceEngineObserver);
570 }
571 if (adm_) {
572 voe_wrapper_.reset();
573 adm_->Release();
574 adm_ = NULL;
575 }
576 if (adm_sc_) {
577 voe_wrapper_sc_.reset();
578 adm_sc_->Release();
579 adm_sc_ = NULL;
580 }
581
582 // Test to see if the media processor was deregistered properly
583 ASSERT(SignalRxMediaFrame.is_empty());
584 ASSERT(SignalTxMediaFrame.is_empty());
585
586 tracing_->SetTraceCallback(NULL);
587}
588
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000589bool WebRtcVoiceEngine::Init(rtc::Thread* worker_thread) {
buildbot@webrtc.org13d67762014-05-02 17:33:29 +0000590 LOG(LS_INFO) << "WebRtcVoiceEngine::Init";
591 bool res = InitInternal();
592 if (res) {
593 LOG(LS_INFO) << "WebRtcVoiceEngine::Init Done!";
594 } else {
595 LOG(LS_ERROR) << "WebRtcVoiceEngine::Init failed";
596 Terminate();
597 }
598 return res;
599}
600
601bool WebRtcVoiceEngine::InitInternal() {
602 // Temporarily turn logging level up for the Init call
603 int old_filter = log_filter_;
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000604 int extended_filter = log_filter_ | SeverityToFilter(rtc::LS_INFO);
buildbot@webrtc.org13d67762014-05-02 17:33:29 +0000605 SetTraceFilter(extended_filter);
606 SetTraceOptions("");
607
608 // Init WebRtc VoiceEngine.
609 if (voe_wrapper_->base()->Init(adm_) == -1) {
610 LOG_RTCERR0_EX(Init, voe_wrapper_->error());
611 SetTraceFilter(old_filter);
612 return false;
613 }
614
615 SetTraceFilter(old_filter);
616 SetTraceOptions(log_options_);
617
618 // Log the VoiceEngine version info
619 char buffer[1024] = "";
620 voe_wrapper_->base()->GetVersion(buffer);
621 LOG(LS_INFO) << "WebRtc VoiceEngine Version:";
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000622 LogMultiline(rtc::LS_INFO, buffer);
buildbot@webrtc.org13d67762014-05-02 17:33:29 +0000623
624 // Save the default AGC configuration settings. This must happen before
625 // calling SetOptions or the default will be overwritten.
626 if (voe_wrapper_->processing()->GetAgcConfig(default_agc_config_) == -1) {
627 LOG_RTCERR0(GetAgcConfig);
628 return false;
629 }
630
631 // Set defaults for options, so that ApplyOptions applies them explicitly
632 // when we clear option (channel) overrides. External clients can still
633 // modify the defaults via SetOptions (on the media engine).
634 if (!SetOptions(GetDefaultEngineOptions())) {
635 return false;
636 }
637
638 // Print our codec list again for the call diagnostic log
639 LOG(LS_INFO) << "WebRtc VoiceEngine codecs:";
640 for (std::vector<AudioCodec>::const_iterator it = codecs_.begin();
641 it != codecs_.end(); ++it) {
642 LOG(LS_INFO) << ToString(*it);
643 }
644
645 // Disable the DTMF playout when a tone is sent.
646 // PlayDtmfTone will be used if local playout is needed.
647 if (voe_wrapper_->dtmf()->SetDtmfFeedbackStatus(false) == -1) {
648 LOG_RTCERR1(SetDtmfFeedbackStatus, false);
649 }
650
651 initialized_ = true;
652 return true;
653}
654
655bool WebRtcVoiceEngine::EnsureSoundclipEngineInit() {
656 if (voe_wrapper_sc_initialized_) {
657 return true;
658 }
659 // Note that, if initialization fails, voe_wrapper_sc_initialized_ will still
660 // be false, so subsequent calls to EnsureSoundclipEngineInit will
661 // probably just fail again. That's acceptable behavior.
662#if defined(LINUX) && !defined(HAVE_LIBPULSE)
663 voe_wrapper_sc_->hw()->SetAudioDeviceLayer(webrtc::kAudioLinuxAlsa);
664#endif
665
666 // Initialize the VoiceEngine instance that we'll use to play out sound clips.
667 if (voe_wrapper_sc_->base()->Init(adm_sc_) == -1) {
668 LOG_RTCERR0_EX(Init, voe_wrapper_sc_->error());
669 return false;
670 }
671
672 // On Windows, tell it to use the default sound (not communication) devices.
673 // First check whether there is a valid sound device for playback.
674 // TODO(juberti): Clean this up when we support setting the soundclip device.
675#ifdef WIN32
676 // The SetPlayoutDevice may not be implemented in the case of external ADM.
677 // TODO(ronghuawu): We should only check the adm_sc_ here, but current
678 // PeerConnection interface never set the adm_sc_, so need to check both
679 // in order to determine if the external adm is used.
680 if (!adm_ && !adm_sc_) {
681 int num_of_devices = 0;
682 if (voe_wrapper_sc_->hw()->GetNumOfPlayoutDevices(num_of_devices) != -1 &&
683 num_of_devices > 0) {
684 if (voe_wrapper_sc_->hw()->SetPlayoutDevice(kDefaultSoundclipDeviceId)
685 == -1) {
686 LOG_RTCERR1_EX(SetPlayoutDevice, kDefaultSoundclipDeviceId,
687 voe_wrapper_sc_->error());
688 return false;
689 }
690 } else {
691 LOG(LS_WARNING) << "No valid sound playout device found.";
692 }
693 }
694#endif
695 voe_wrapper_sc_initialized_ = true;
696 LOG(LS_INFO) << "Initialized WebRtc soundclip engine.";
697 return true;
698}
699
700void WebRtcVoiceEngine::Terminate() {
701 LOG(LS_INFO) << "WebRtcVoiceEngine::Terminate";
702 initialized_ = false;
703
704 StopAecDump();
705
706 if (voe_wrapper_sc_) {
707 voe_wrapper_sc_initialized_ = false;
708 voe_wrapper_sc_->base()->Terminate();
709 }
710 voe_wrapper_->base()->Terminate();
711 desired_local_monitor_enable_ = false;
712}
713
714int WebRtcVoiceEngine::GetCapabilities() {
715 return AUDIO_SEND | AUDIO_RECV;
716}
717
718VoiceMediaChannel *WebRtcVoiceEngine::CreateChannel() {
719 WebRtcVoiceMediaChannel* ch = new WebRtcVoiceMediaChannel(this);
720 if (!ch->valid()) {
721 delete ch;
722 ch = NULL;
723 }
724 return ch;
725}
726
727SoundclipMedia *WebRtcVoiceEngine::CreateSoundclip() {
728 if (!EnsureSoundclipEngineInit()) {
729 LOG(LS_ERROR) << "Unable to create soundclip: soundclip engine failed to "
730 << "initialize.";
731 return NULL;
732 }
733 WebRtcSoundclipMedia *soundclip = new WebRtcSoundclipMedia(this);
734 if (!soundclip->Init() || !soundclip->Enable()) {
735 delete soundclip;
736 return NULL;
737 }
738 return soundclip;
739}
740
741bool WebRtcVoiceEngine::SetOptions(const AudioOptions& options) {
742 if (!ApplyOptions(options)) {
743 return false;
744 }
745 options_ = options;
746 return true;
747}
748
749bool WebRtcVoiceEngine::SetOptionOverrides(const AudioOptions& overrides) {
750 LOG(LS_INFO) << "Setting option overrides: " << overrides.ToString();
751 if (!ApplyOptions(overrides)) {
752 return false;
753 }
754 option_overrides_ = overrides;
755 return true;
756}
757
758bool WebRtcVoiceEngine::ClearOptionOverrides() {
759 LOG(LS_INFO) << "Clearing option overrides.";
760 AudioOptions options = options_;
761 // Only call ApplyOptions if |options_overrides_| contains overrided options.
762 // ApplyOptions affects NS, AGC other options that is shared between
763 // all WebRtcVoiceEngineChannels.
764 if (option_overrides_ == AudioOptions()) {
765 return true;
766 }
767
768 if (!ApplyOptions(options)) {
769 return false;
770 }
771 option_overrides_ = AudioOptions();
772 return true;
773}
774
775// AudioOptions defaults are set in InitInternal (for options with corresponding
776// MediaEngineInterface flags) and in SetOptions(int) for flagless options.
777bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) {
778 AudioOptions options = options_in; // The options are modified below.
779 // kEcConference is AEC with high suppression.
780 webrtc::EcModes ec_mode = webrtc::kEcConference;
781 webrtc::AecmModes aecm_mode = webrtc::kAecmSpeakerphone;
782 webrtc::AgcModes agc_mode = webrtc::kAgcAdaptiveAnalog;
783 webrtc::NsModes ns_mode = webrtc::kNsHighSuppression;
784 bool aecm_comfort_noise = false;
785 if (options.aecm_generate_comfort_noise.Get(&aecm_comfort_noise)) {
786 LOG(LS_VERBOSE) << "Comfort noise explicitly set to "
787 << aecm_comfort_noise << " (default is false).";
788 }
789
790#if defined(IOS)
791 // On iOS, VPIO provides built-in EC and AGC.
792 options.echo_cancellation.Set(false);
793 options.auto_gain_control.Set(false);
794#elif defined(ANDROID)
795 ec_mode = webrtc::kEcAecm;
796#endif
797
798#if defined(IOS) || defined(ANDROID)
799 // Set the AGC mode for iOS as well despite disabling it above, to avoid
800 // unsupported configuration errors from webrtc.
801 agc_mode = webrtc::kAgcFixedDigital;
802 options.typing_detection.Set(false);
803 options.experimental_agc.Set(false);
804 options.experimental_aec.Set(false);
805 options.experimental_ns.Set(false);
806#endif
807
808 LOG(LS_INFO) << "Applying audio options: " << options.ToString();
809
810 webrtc::VoEAudioProcessing* voep = voe_wrapper_->processing();
811
812 bool echo_cancellation;
813 if (options.echo_cancellation.Get(&echo_cancellation)) {
814 if (voep->SetEcStatus(echo_cancellation, ec_mode) == -1) {
815 LOG_RTCERR2(SetEcStatus, echo_cancellation, ec_mode);
816 return false;
817 } else {
818 LOG(LS_VERBOSE) << "Echo control set to " << echo_cancellation
819 << " with mode " << ec_mode;
820 }
821#if !defined(ANDROID)
822 // TODO(ajm): Remove the error return on Android from webrtc.
823 if (voep->SetEcMetricsStatus(echo_cancellation) == -1) {
824 LOG_RTCERR1(SetEcMetricsStatus, echo_cancellation);
825 return false;
826 }
827#endif
828 if (ec_mode == webrtc::kEcAecm) {
829 if (voep->SetAecmMode(aecm_mode, aecm_comfort_noise) != 0) {
830 LOG_RTCERR2(SetAecmMode, aecm_mode, aecm_comfort_noise);
831 return false;
832 }
833 }
834 }
835
836 bool auto_gain_control;
837 if (options.auto_gain_control.Get(&auto_gain_control)) {
838 if (voep->SetAgcStatus(auto_gain_control, agc_mode) == -1) {
839 LOG_RTCERR2(SetAgcStatus, auto_gain_control, agc_mode);
840 return false;
841 } else {
842 LOG(LS_VERBOSE) << "Auto gain set to " << auto_gain_control
843 << " with mode " << agc_mode;
844 }
845 }
846
847 if (options.tx_agc_target_dbov.IsSet() ||
848 options.tx_agc_digital_compression_gain.IsSet() ||
849 options.tx_agc_limiter.IsSet()) {
850 // Override default_agc_config_. Generally, an unset option means "leave
851 // the VoE bits alone" in this function, so we want whatever is set to be
852 // stored as the new "default". If we didn't, then setting e.g.
853 // tx_agc_target_dbov would reset digital compression gain and limiter
854 // settings.
855 // Also, if we don't update default_agc_config_, then adjust_agc_delta
856 // would be an offset from the original values, and not whatever was set
857 // explicitly.
858 default_agc_config_.targetLeveldBOv =
859 options.tx_agc_target_dbov.GetWithDefaultIfUnset(
860 default_agc_config_.targetLeveldBOv);
861 default_agc_config_.digitalCompressionGaindB =
862 options.tx_agc_digital_compression_gain.GetWithDefaultIfUnset(
863 default_agc_config_.digitalCompressionGaindB);
864 default_agc_config_.limiterEnable =
865 options.tx_agc_limiter.GetWithDefaultIfUnset(
866 default_agc_config_.limiterEnable);
867 if (voe_wrapper_->processing()->SetAgcConfig(default_agc_config_) == -1) {
868 LOG_RTCERR3(SetAgcConfig,
869 default_agc_config_.targetLeveldBOv,
870 default_agc_config_.digitalCompressionGaindB,
871 default_agc_config_.limiterEnable);
872 return false;
873 }
874 }
875
876 bool noise_suppression;
877 if (options.noise_suppression.Get(&noise_suppression)) {
878 if (voep->SetNsStatus(noise_suppression, ns_mode) == -1) {
879 LOG_RTCERR2(SetNsStatus, noise_suppression, ns_mode);
880 return false;
881 } else {
882 LOG(LS_VERBOSE) << "Noise suppression set to " << noise_suppression
883 << " with mode " << ns_mode;
884 }
885 }
886
buildbot@webrtc.org13d67762014-05-02 17:33:29 +0000887 bool highpass_filter;
888 if (options.highpass_filter.Get(&highpass_filter)) {
889 LOG(LS_INFO) << "High pass filter enabled? " << highpass_filter;
890 if (voep->EnableHighPassFilter(highpass_filter) == -1) {
891 LOG_RTCERR1(SetHighpassFilterStatus, highpass_filter);
892 return false;
893 }
894 }
895
896 bool stereo_swapping;
897 if (options.stereo_swapping.Get(&stereo_swapping)) {
898 LOG(LS_INFO) << "Stereo swapping enabled? " << stereo_swapping;
899 voep->EnableStereoChannelSwapping(stereo_swapping);
900 if (voep->IsStereoChannelSwappingEnabled() != stereo_swapping) {
901 LOG_RTCERR1(EnableStereoChannelSwapping, stereo_swapping);
902 return false;
903 }
904 }
905
906 bool typing_detection;
907 if (options.typing_detection.Get(&typing_detection)) {
908 LOG(LS_INFO) << "Typing detection is enabled? " << typing_detection;
909 if (voep->SetTypingDetectionStatus(typing_detection) == -1) {
910 // In case of error, log the info and continue
911 LOG_RTCERR1(SetTypingDetectionStatus, typing_detection);
912 }
913 }
914
915 int adjust_agc_delta;
916 if (options.adjust_agc_delta.Get(&adjust_agc_delta)) {
917 LOG(LS_INFO) << "Adjust agc delta is " << adjust_agc_delta;
918 if (!AdjustAgcLevel(adjust_agc_delta)) {
919 return false;
920 }
921 }
922
923 bool aec_dump;
924 if (options.aec_dump.Get(&aec_dump)) {
925 LOG(LS_INFO) << "Aec dump is enabled? " << aec_dump;
926 if (aec_dump)
927 StartAecDump(kAecDumpByAudioOptionFilename);
928 else
929 StopAecDump();
930 }
931
buildbot@webrtc.org1f8a2372014-08-28 10:52:44 +0000932 webrtc::Config config;
933
934 experimental_aec_.SetFrom(options.experimental_aec);
buildbot@webrtc.org13d67762014-05-02 17:33:29 +0000935 bool experimental_aec;
buildbot@webrtc.org1f8a2372014-08-28 10:52:44 +0000936 if (experimental_aec_.Get(&experimental_aec)) {
937 LOG(LS_INFO) << "Experimental aec is enabled? " << experimental_aec;
938 config.Set<webrtc::DelayCorrection>(
939 new webrtc::DelayCorrection(experimental_aec));
940 }
941
942#ifdef USE_WEBRTC_DEV_BRANCH
943 experimental_ns_.SetFrom(options.experimental_ns);
944 bool experimental_ns;
945 if (experimental_ns_.Get(&experimental_ns)) {
946 LOG(LS_INFO) << "Experimental ns is enabled? " << experimental_ns;
947 config.Set<webrtc::ExperimentalNs>(
948 new webrtc::ExperimentalNs(experimental_ns));
949 }
950#endif
951
952 // We check audioproc for the benefit of tests, since FakeWebRtcVoiceEngine
953 // returns NULL on audio_processing().
954 webrtc::AudioProcessing* audioproc = voe_wrapper_->base()->audio_processing();
955 if (audioproc) {
956 audioproc->SetExtraOptions(config);
957 }
958
959#ifndef USE_WEBRTC_DEV_BRANCH
960 bool experimental_ns;
961 if (options.experimental_ns.Get(&experimental_ns)) {
962 LOG(LS_INFO) << "Experimental ns is enabled? " << experimental_ns;
buildbot@webrtc.org13d67762014-05-02 17:33:29 +0000963 // We check audioproc for the benefit of tests, since FakeWebRtcVoiceEngine
964 // returns NULL on audio_processing().
965 if (audioproc) {
buildbot@webrtc.org1f8a2372014-08-28 10:52:44 +0000966 if (audioproc->EnableExperimentalNs(experimental_ns) == -1) {
967 LOG_RTCERR1(EnableExperimentalNs, experimental_ns);
968 return false;
969 }
970 } else {
971 LOG(LS_VERBOSE) << "Experimental noise suppression set to "
972 << experimental_ns;
buildbot@webrtc.org13d67762014-05-02 17:33:29 +0000973 }
974 }
buildbot@webrtc.org1f8a2372014-08-28 10:52:44 +0000975#endif
buildbot@webrtc.org13d67762014-05-02 17:33:29 +0000976
977 uint32 recording_sample_rate;
978 if (options.recording_sample_rate.Get(&recording_sample_rate)) {
979 LOG(LS_INFO) << "Recording sample rate is " << recording_sample_rate;
980 if (voe_wrapper_->hw()->SetRecordingSampleRate(recording_sample_rate)) {
981 LOG_RTCERR1(SetRecordingSampleRate, recording_sample_rate);
982 }
983 }
984
985 uint32 playout_sample_rate;
986 if (options.playout_sample_rate.Get(&playout_sample_rate)) {
987 LOG(LS_INFO) << "Playout sample rate is " << playout_sample_rate;
988 if (voe_wrapper_->hw()->SetPlayoutSampleRate(playout_sample_rate)) {
989 LOG_RTCERR1(SetPlayoutSampleRate, playout_sample_rate);
990 }
991 }
992
993 return true;
994}
995
996bool WebRtcVoiceEngine::SetDelayOffset(int offset) {
997 voe_wrapper_->processing()->SetDelayOffsetMs(offset);
998 if (voe_wrapper_->processing()->DelayOffsetMs() != offset) {
999 LOG_RTCERR1(SetDelayOffsetMs, offset);
1000 return false;
1001 }
1002
1003 return true;
1004}
1005
1006struct ResumeEntry {
1007 ResumeEntry(WebRtcVoiceMediaChannel *c, bool p, SendFlags s)
1008 : channel(c),
1009 playout(p),
1010 send(s) {
1011 }
1012
1013 WebRtcVoiceMediaChannel *channel;
1014 bool playout;
1015 SendFlags send;
1016};
1017
1018// TODO(juberti): Refactor this so that the core logic can be used to set the
1019// soundclip device. At that time, reinstate the soundclip pause/resume code.
1020bool WebRtcVoiceEngine::SetDevices(const Device* in_device,
1021 const Device* out_device) {
1022#if !defined(IOS)
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00001023 int in_id = in_device ? rtc::FromString<int>(in_device->id) :
buildbot@webrtc.org13d67762014-05-02 17:33:29 +00001024 kDefaultAudioDeviceId;
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00001025 int out_id = out_device ? rtc::FromString<int>(out_device->id) :
buildbot@webrtc.org13d67762014-05-02 17:33:29 +00001026 kDefaultAudioDeviceId;
1027 // The device manager uses -1 as the default device, which was the case for
1028 // VoE 3.5. VoE 4.0, however, uses 0 as the default in Linux and Mac.
1029#ifndef WIN32
1030 if (-1 == in_id) {
1031 in_id = kDefaultAudioDeviceId;
1032 }
1033 if (-1 == out_id) {
1034 out_id = kDefaultAudioDeviceId;
1035 }
1036#endif
1037
1038 std::string in_name = (in_id != kDefaultAudioDeviceId) ?
1039 in_device->name : "Default device";
1040 std::string out_name = (out_id != kDefaultAudioDeviceId) ?
1041 out_device->name : "Default device";
1042 LOG(LS_INFO) << "Setting microphone to (id=" << in_id << ", name=" << in_name
1043 << ") and speaker to (id=" << out_id << ", name=" << out_name
1044 << ")";
1045
1046 // If we're running the local monitor, we need to stop it first.
1047 bool ret = true;
1048 if (!PauseLocalMonitor()) {
1049 LOG(LS_WARNING) << "Failed to pause local monitor";
1050 ret = false;
1051 }
1052
1053 // Must also pause all audio playback and capture.
1054 for (ChannelList::const_iterator i = channels_.begin();
1055 i != channels_.end(); ++i) {
1056 WebRtcVoiceMediaChannel *channel = *i;
1057 if (!channel->PausePlayout()) {
1058 LOG(LS_WARNING) << "Failed to pause playout";
1059 ret = false;
1060 }
1061 if (!channel->PauseSend()) {
1062 LOG(LS_WARNING) << "Failed to pause send";
1063 ret = false;
1064 }
1065 }
1066
1067 // Find the recording device id in VoiceEngine and set recording device.
1068 if (!FindWebRtcAudioDeviceId(true, in_name, in_id, &in_id)) {
1069 ret = false;
1070 }
1071 if (ret) {
1072 if (voe_wrapper_->hw()->SetRecordingDevice(in_id) == -1) {
1073 LOG_RTCERR2(SetRecordingDevice, in_name, in_id);
1074 ret = false;
1075 }
buildbot@webrtc.org6b21b712014-07-31 15:08:53 +00001076 webrtc::AudioProcessing* ap = voe()->base()->audio_processing();
1077 if (ap)
1078 ap->Initialize();
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001079 }
1080
1081 // Find the playout device id in VoiceEngine and set playout device.
1082 if (!FindWebRtcAudioDeviceId(false, out_name, out_id, &out_id)) {
1083 LOG(LS_WARNING) << "Failed to find VoiceEngine device id for " << out_name;
1084 ret = false;
1085 }
1086 if (ret) {
1087 if (voe_wrapper_->hw()->SetPlayoutDevice(out_id) == -1) {
sergeyu@chromium.org5bc25c42013-12-05 00:24:06 +00001088 LOG_RTCERR2(SetPlayoutDevice, out_name, out_id);
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001089 ret = false;
1090 }
1091 }
1092
1093 // Resume all audio playback and capture.
1094 for (ChannelList::const_iterator i = channels_.begin();
1095 i != channels_.end(); ++i) {
1096 WebRtcVoiceMediaChannel *channel = *i;
1097 if (!channel->ResumePlayout()) {
1098 LOG(LS_WARNING) << "Failed to resume playout";
1099 ret = false;
1100 }
1101 if (!channel->ResumeSend()) {
1102 LOG(LS_WARNING) << "Failed to resume send";
1103 ret = false;
1104 }
1105 }
1106
1107 // Resume local monitor.
1108 if (!ResumeLocalMonitor()) {
1109 LOG(LS_WARNING) << "Failed to resume local monitor";
1110 ret = false;
1111 }
1112
1113 if (ret) {
1114 LOG(LS_INFO) << "Set microphone to (id=" << in_id <<" name=" << in_name
1115 << ") and speaker to (id="<< out_id << " name=" << out_name
1116 << ")";
1117 }
1118
1119 return ret;
1120#else
1121 return true;
wu@webrtc.orgcecfd182013-10-30 05:18:12 +00001122#endif // !IOS
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001123}
1124
1125bool WebRtcVoiceEngine::FindWebRtcAudioDeviceId(
1126 bool is_input, const std::string& dev_name, int dev_id, int* rtc_id) {
1127 // In Linux, VoiceEngine uses the same device dev_id as the device manager.
wu@webrtc.orgcecfd182013-10-30 05:18:12 +00001128#if defined(LINUX) || defined(ANDROID)
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001129 *rtc_id = dev_id;
1130 return true;
1131#else
1132 // In Windows and Mac, we need to find the VoiceEngine device id by name
1133 // unless the input dev_id is the default device id.
1134 if (kDefaultAudioDeviceId == dev_id) {
1135 *rtc_id = dev_id;
1136 return true;
1137 }
1138
1139 // Get the number of VoiceEngine audio devices.
1140 int count = 0;
1141 if (is_input) {
1142 if (-1 == voe_wrapper_->hw()->GetNumOfRecordingDevices(count)) {
1143 LOG_RTCERR0(GetNumOfRecordingDevices);
1144 return false;
1145 }
1146 } else {
1147 if (-1 == voe_wrapper_->hw()->GetNumOfPlayoutDevices(count)) {
1148 LOG_RTCERR0(GetNumOfPlayoutDevices);
1149 return false;
1150 }
1151 }
1152
1153 for (int i = 0; i < count; ++i) {
1154 char name[128];
1155 char guid[128];
1156 if (is_input) {
1157 voe_wrapper_->hw()->GetRecordingDeviceName(i, name, guid);
1158 LOG(LS_VERBOSE) << "VoiceEngine microphone " << i << ": " << name;
1159 } else {
1160 voe_wrapper_->hw()->GetPlayoutDeviceName(i, name, guid);
1161 LOG(LS_VERBOSE) << "VoiceEngine speaker " << i << ": " << name;
1162 }
1163
1164 std::string webrtc_name(name);
1165 if (dev_name.compare(0, webrtc_name.size(), webrtc_name) == 0) {
1166 *rtc_id = i;
1167 return true;
1168 }
1169 }
1170 LOG(LS_WARNING) << "VoiceEngine cannot find device: " << dev_name;
1171 return false;
1172#endif
1173}
1174
1175bool WebRtcVoiceEngine::GetOutputVolume(int* level) {
1176 unsigned int ulevel;
1177 if (voe_wrapper_->volume()->GetSpeakerVolume(ulevel) == -1) {
1178 LOG_RTCERR1(GetSpeakerVolume, level);
1179 return false;
1180 }
1181 *level = ulevel;
1182 return true;
1183}
1184
1185bool WebRtcVoiceEngine::SetOutputVolume(int level) {
1186 ASSERT(level >= 0 && level <= 255);
1187 if (voe_wrapper_->volume()->SetSpeakerVolume(level) == -1) {
1188 LOG_RTCERR1(SetSpeakerVolume, level);
1189 return false;
1190 }
1191 return true;
1192}
1193
1194int WebRtcVoiceEngine::GetInputLevel() {
1195 unsigned int ulevel;
1196 return (voe_wrapper_->volume()->GetSpeechInputLevel(ulevel) != -1) ?
1197 static_cast<int>(ulevel) : -1;
1198}
1199
1200bool WebRtcVoiceEngine::SetLocalMonitor(bool enable) {
1201 desired_local_monitor_enable_ = enable;
1202 return ChangeLocalMonitor(desired_local_monitor_enable_);
1203}
1204
1205bool WebRtcVoiceEngine::ChangeLocalMonitor(bool enable) {
1206 // The voe file api is not available in chrome.
1207 if (!voe_wrapper_->file()) {
1208 return false;
1209 }
1210 if (enable && !monitor_) {
1211 monitor_.reset(new WebRtcMonitorStream);
1212 if (voe_wrapper_->file()->StartRecordingMicrophone(monitor_.get()) == -1) {
1213 LOG_RTCERR1(StartRecordingMicrophone, monitor_.get());
1214 // Must call Stop() because there are some cases where Start will report
1215 // failure but still change the state, and if we leave VE in the on state
1216 // then it could crash later when trying to invoke methods on our monitor.
1217 voe_wrapper_->file()->StopRecordingMicrophone();
1218 monitor_.reset();
1219 return false;
1220 }
1221 } else if (!enable && monitor_) {
1222 voe_wrapper_->file()->StopRecordingMicrophone();
1223 monitor_.reset();
1224 }
1225 return true;
1226}
1227
1228bool WebRtcVoiceEngine::PauseLocalMonitor() {
1229 return ChangeLocalMonitor(false);
1230}
1231
1232bool WebRtcVoiceEngine::ResumeLocalMonitor() {
1233 return ChangeLocalMonitor(desired_local_monitor_enable_);
1234}
1235
1236const std::vector<AudioCodec>& WebRtcVoiceEngine::codecs() {
1237 return codecs_;
1238}
1239
1240bool WebRtcVoiceEngine::FindCodec(const AudioCodec& in) {
1241 return FindWebRtcCodec(in, NULL);
1242}
1243
1244// Get the VoiceEngine codec that matches |in|, with the supplied settings.
1245bool WebRtcVoiceEngine::FindWebRtcCodec(const AudioCodec& in,
1246 webrtc::CodecInst* out) {
1247 int ncodecs = voe_wrapper_->codec()->NumOfCodecs();
1248 for (int i = 0; i < ncodecs; ++i) {
1249 webrtc::CodecInst voe_codec;
henrik.lundin@webrtc.org8038d422014-11-11 08:38:24 +00001250 if (GetVoeCodec(i, &voe_codec)) {
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001251 AudioCodec codec(voe_codec.pltype, voe_codec.plname, voe_codec.plfreq,
1252 voe_codec.rate, voe_codec.channels, 0);
1253 bool multi_rate = IsCodecMultiRate(voe_codec);
1254 // Allow arbitrary rates for ISAC to be specified.
1255 if (multi_rate) {
1256 // Set codec.bitrate to 0 so the check for codec.Matches() passes.
1257 codec.bitrate = 0;
1258 }
1259 if (codec.Matches(in)) {
1260 if (out) {
1261 // Fixup the payload type.
1262 voe_codec.pltype = in.id;
1263
1264 // Set bitrate if specified.
1265 if (multi_rate && in.bitrate != 0) {
1266 voe_codec.rate = in.bitrate;
1267 }
1268
henrik.lundin@webrtc.orgf85dbce2014-11-07 12:25:00 +00001269 // Reset G722 sample rate to 16000 to match WebRTC.
1270 MaybeFixupG722(&voe_codec, 16000);
1271
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001272 // Apply codec-specific settings.
1273 if (IsIsac(codec)) {
1274 // If ISAC and an explicit bitrate is not specified,
minyue@webrtc.org26236952014-10-29 02:27:08 +00001275 // enable auto bitrate adjustment.
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001276 voe_codec.rate = (in.bitrate > 0) ? in.bitrate : -1;
1277 }
1278 *out = voe_codec;
1279 }
1280 return true;
1281 }
1282 }
1283 }
1284 return false;
1285}
1286const std::vector<RtpHeaderExtension>&
1287WebRtcVoiceEngine::rtp_header_extensions() const {
1288 return rtp_header_extensions_;
1289}
1290
1291void WebRtcVoiceEngine::SetLogging(int min_sev, const char* filter) {
1292 // if min_sev == -1, we keep the current log level.
1293 if (min_sev >= 0) {
1294 SetTraceFilter(SeverityToFilter(min_sev));
1295 }
1296 log_options_ = filter;
1297 SetTraceOptions(initialized_ ? log_options_ : "");
1298}
1299
1300int WebRtcVoiceEngine::GetLastEngineError() {
1301 return voe_wrapper_->error();
1302}
1303
1304void WebRtcVoiceEngine::SetTraceFilter(int filter) {
1305 log_filter_ = filter;
1306 tracing_->SetTraceFilter(filter);
1307}
1308
1309// We suppport three different logging settings for VoiceEngine:
1310// 1. Observer callback that goes into talk diagnostic logfile.
1311// Use --logfile and --loglevel
1312//
1313// 2. Encrypted VoiceEngine log for debugging VoiceEngine.
1314// Use --voice_loglevel --voice_logfilter "tracefile file_name"
1315//
1316// 3. EC log and dump for debugging QualityEngine.
1317// Use --voice_loglevel --voice_logfilter "recordEC file_name"
1318//
1319// For more details see: "https://sites.google.com/a/google.com/wavelet/Home/
1320// Magic-Flute--RTC-Engine-/Magic-Flute-Command-Line-Parameters"
1321void WebRtcVoiceEngine::SetTraceOptions(const std::string& options) {
1322 // Set encrypted trace file.
1323 std::vector<std::string> opts;
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00001324 rtc::tokenize(options, ' ', '"', '"', &opts);
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001325 std::vector<std::string>::iterator tracefile =
1326 std::find(opts.begin(), opts.end(), "tracefile");
1327 if (tracefile != opts.end() && ++tracefile != opts.end()) {
1328 // Write encrypted debug output (at same loglevel) to file
1329 // EncryptedTraceFile no longer supported.
1330 if (tracing_->SetTraceFile(tracefile->c_str()) == -1) {
1331 LOG_RTCERR1(SetTraceFile, *tracefile);
1332 }
1333 }
1334
wu@webrtc.org97077a32013-10-25 21:18:33 +00001335 // Allow trace options to override the trace filter. We default
1336 // it to log_filter_ (as a translation of libjingle log levels)
1337 // elsewhere, but this allows clients to explicitly set webrtc
1338 // log levels.
1339 std::vector<std::string>::iterator tracefilter =
1340 std::find(opts.begin(), opts.end(), "tracefilter");
1341 if (tracefilter != opts.end() && ++tracefilter != opts.end()) {
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00001342 if (!tracing_->SetTraceFilter(rtc::FromString<int>(*tracefilter))) {
wu@webrtc.org97077a32013-10-25 21:18:33 +00001343 LOG_RTCERR1(SetTraceFilter, *tracefilter);
1344 }
1345 }
1346
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001347 // Set AEC dump file
1348 std::vector<std::string>::iterator recordEC =
1349 std::find(opts.begin(), opts.end(), "recordEC");
1350 if (recordEC != opts.end()) {
1351 ++recordEC;
1352 if (recordEC != opts.end())
1353 StartAecDump(recordEC->c_str());
1354 else
1355 StopAecDump();
1356 }
1357}
1358
1359// Ignore spammy trace messages, mostly from the stats API when we haven't
1360// gotten RTCP info yet from the remote side.
1361bool WebRtcVoiceEngine::ShouldIgnoreTrace(const std::string& trace) {
1362 static const char* kTracesToIgnore[] = {
1363 "\tfailed to GetReportBlockInformation",
1364 "GetRecCodec() failed to get received codec",
1365 "GetReceivedRtcpStatistics: Could not get received RTP statistics",
1366 "GetRemoteRTCPData() failed to measure statistics due to lack of received RTP and/or RTCP packets", // NOLINT
1367 "GetRemoteRTCPData() failed to retrieve sender info for remote side",
1368 "GetRTPStatistics() failed to measure RTT since no RTP packets have been received yet", // NOLINT
1369 "GetRTPStatistics() failed to read RTP statistics from the RTP/RTCP module",
1370 "GetRTPStatistics() failed to retrieve RTT from the RTP/RTCP module",
1371 "SenderInfoReceived No received SR",
1372 "StatisticsRTP() no statistics available",
1373 "TransmitMixer::TypingDetection() VE_TYPING_NOISE_WARNING message has been posted", // NOLINT
1374 "TransmitMixer::TypingDetection() pending noise-saturation warning exists", // NOLINT
1375 "GetRecPayloadType() failed to retrieve RX payload type (error=10026)", // NOLINT
1376 "StopPlayingFileAsMicrophone() isnot playing (error=8088)",
1377 NULL
1378 };
1379 for (const char* const* p = kTracesToIgnore; *p; ++p) {
1380 if (trace.find(*p) != std::string::npos) {
1381 return true;
1382 }
1383 }
1384 return false;
1385}
1386
1387void WebRtcVoiceEngine::Print(webrtc::TraceLevel level, const char* trace,
1388 int length) {
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00001389 rtc::LoggingSeverity sev = rtc::LS_VERBOSE;
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001390 if (level == webrtc::kTraceError || level == webrtc::kTraceCritical)
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00001391 sev = rtc::LS_ERROR;
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001392 else if (level == webrtc::kTraceWarning)
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00001393 sev = rtc::LS_WARNING;
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001394 else if (level == webrtc::kTraceStateInfo || level == webrtc::kTraceInfo)
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00001395 sev = rtc::LS_INFO;
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001396 else if (level == webrtc::kTraceTerseInfo)
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00001397 sev = rtc::LS_INFO;
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001398
1399 // Skip past boilerplate prefix text
1400 if (length < 72) {
1401 std::string msg(trace, length);
1402 LOG(LS_ERROR) << "Malformed webrtc log message: ";
1403 LOG_V(sev) << msg;
1404 } else {
1405 std::string msg(trace + 71, length - 72);
1406 if (!ShouldIgnoreTrace(msg)) {
1407 LOG_V(sev) << "webrtc: " << msg;
1408 }
1409 }
1410}
1411
1412void WebRtcVoiceEngine::CallbackOnError(int channel_num, int err_code) {
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00001413 rtc::CritScope lock(&channels_cs_);
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001414 WebRtcVoiceMediaChannel* channel = NULL;
1415 uint32 ssrc = 0;
1416 LOG(LS_WARNING) << "VoiceEngine error " << err_code << " reported on channel "
1417 << channel_num << ".";
1418 if (FindChannelAndSsrc(channel_num, &channel, &ssrc)) {
1419 ASSERT(channel != NULL);
1420 channel->OnError(ssrc, err_code);
1421 } else {
1422 LOG(LS_ERROR) << "VoiceEngine channel " << channel_num
1423 << " could not be found in channel list when error reported.";
1424 }
1425}
1426
1427bool WebRtcVoiceEngine::FindChannelAndSsrc(
1428 int channel_num, WebRtcVoiceMediaChannel** channel, uint32* ssrc) const {
1429 ASSERT(channel != NULL && ssrc != NULL);
1430
1431 *channel = NULL;
1432 *ssrc = 0;
1433 // Find corresponding channel and ssrc
1434 for (ChannelList::const_iterator it = channels_.begin();
1435 it != channels_.end(); ++it) {
1436 ASSERT(*it != NULL);
1437 if ((*it)->FindSsrc(channel_num, ssrc)) {
1438 *channel = *it;
1439 return true;
1440 }
1441 }
1442
1443 return false;
1444}
1445
1446// This method will search through the WebRtcVoiceMediaChannels and
1447// obtain the voice engine's channel number.
1448bool WebRtcVoiceEngine::FindChannelNumFromSsrc(
1449 uint32 ssrc, MediaProcessorDirection direction, int* channel_num) {
1450 ASSERT(channel_num != NULL);
1451 ASSERT(direction == MPD_RX || direction == MPD_TX);
1452
1453 *channel_num = -1;
1454 // Find corresponding channel for ssrc.
1455 for (ChannelList::const_iterator it = channels_.begin();
1456 it != channels_.end(); ++it) {
1457 ASSERT(*it != NULL);
1458 if (direction & MPD_RX) {
1459 *channel_num = (*it)->GetReceiveChannelNum(ssrc);
1460 }
1461 if (*channel_num == -1 && (direction & MPD_TX)) {
1462 *channel_num = (*it)->GetSendChannelNum(ssrc);
1463 }
1464 if (*channel_num != -1) {
1465 return true;
1466 }
1467 }
1468 LOG(LS_WARNING) << "FindChannelFromSsrc. No Channel Found for Ssrc: " << ssrc;
1469 return false;
1470}
1471
1472void WebRtcVoiceEngine::RegisterChannel(WebRtcVoiceMediaChannel *channel) {
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00001473 rtc::CritScope lock(&channels_cs_);
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001474 channels_.push_back(channel);
1475}
1476
1477void WebRtcVoiceEngine::UnregisterChannel(WebRtcVoiceMediaChannel *channel) {
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00001478 rtc::CritScope lock(&channels_cs_);
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001479 ChannelList::iterator i = std::find(channels_.begin(),
1480 channels_.end(),
1481 channel);
1482 if (i != channels_.end()) {
1483 channels_.erase(i);
1484 }
1485}
1486
1487void WebRtcVoiceEngine::RegisterSoundclip(WebRtcSoundclipMedia *soundclip) {
1488 soundclips_.push_back(soundclip);
1489}
1490
1491void WebRtcVoiceEngine::UnregisterSoundclip(WebRtcSoundclipMedia *soundclip) {
1492 SoundclipList::iterator i = std::find(soundclips_.begin(),
1493 soundclips_.end(),
1494 soundclip);
1495 if (i != soundclips_.end()) {
1496 soundclips_.erase(i);
1497 }
1498}
1499
1500// Adjusts the default AGC target level by the specified delta.
1501// NB: If we start messing with other config fields, we'll want
1502// to save the current webrtc::AgcConfig as well.
1503bool WebRtcVoiceEngine::AdjustAgcLevel(int delta) {
1504 webrtc::AgcConfig config = default_agc_config_;
1505 config.targetLeveldBOv -= delta;
1506
1507 LOG(LS_INFO) << "Adjusting AGC level from default -"
1508 << default_agc_config_.targetLeveldBOv << "dB to -"
1509 << config.targetLeveldBOv << "dB";
1510
1511 if (voe_wrapper_->processing()->SetAgcConfig(config) == -1) {
1512 LOG_RTCERR1(SetAgcConfig, config.targetLeveldBOv);
1513 return false;
1514 }
1515 return true;
1516}
1517
1518bool WebRtcVoiceEngine::SetAudioDeviceModule(webrtc::AudioDeviceModule* adm,
1519 webrtc::AudioDeviceModule* adm_sc) {
1520 if (initialized_) {
1521 LOG(LS_WARNING) << "SetAudioDeviceModule can not be called after Init.";
1522 return false;
1523 }
1524 if (adm_) {
1525 adm_->Release();
1526 adm_ = NULL;
1527 }
1528 if (adm) {
1529 adm_ = adm;
1530 adm_->AddRef();
1531 }
1532
1533 if (adm_sc_) {
1534 adm_sc_->Release();
1535 adm_sc_ = NULL;
1536 }
1537 if (adm_sc) {
1538 adm_sc_ = adm_sc;
1539 adm_sc_->AddRef();
1540 }
1541 return true;
1542}
1543
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00001544bool WebRtcVoiceEngine::StartAecDump(rtc::PlatformFile file) {
1545 FILE* aec_dump_file_stream = rtc::FdopenPlatformFileForWriting(file);
wu@webrtc.orga8910d22014-01-23 22:12:45 +00001546 if (!aec_dump_file_stream) {
1547 LOG(LS_ERROR) << "Could not open AEC dump file stream.";
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00001548 if (!rtc::ClosePlatformFile(file))
wu@webrtc.orga8910d22014-01-23 22:12:45 +00001549 LOG(LS_WARNING) << "Could not close file.";
1550 return false;
1551 }
wu@webrtc.orga9890802013-12-13 00:21:03 +00001552 StopAecDump();
wu@webrtc.orga8910d22014-01-23 22:12:45 +00001553 if (voe_wrapper_->processing()->StartDebugRecording(aec_dump_file_stream) !=
wu@webrtc.orga9890802013-12-13 00:21:03 +00001554 webrtc::AudioProcessing::kNoError) {
wu@webrtc.orga8910d22014-01-23 22:12:45 +00001555 LOG_RTCERR0(StartDebugRecording);
1556 fclose(aec_dump_file_stream);
wu@webrtc.orga9890802013-12-13 00:21:03 +00001557 return false;
1558 }
1559 is_dumping_aec_ = true;
1560 return true;
wu@webrtc.orga9890802013-12-13 00:21:03 +00001561}
1562
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001563bool WebRtcVoiceEngine::RegisterProcessor(
1564 uint32 ssrc,
1565 VoiceProcessor* voice_processor,
1566 MediaProcessorDirection direction) {
1567 bool register_with_webrtc = false;
1568 int channel_id = -1;
1569 bool success = false;
1570 uint32* processor_ssrc = NULL;
1571 bool found_channel = FindChannelNumFromSsrc(ssrc, direction, &channel_id);
1572 if (voice_processor == NULL || !found_channel) {
1573 LOG(LS_WARNING) << "Media Processing Registration Failed. ssrc: " << ssrc
1574 << " foundChannel: " << found_channel;
1575 return false;
1576 }
1577
1578 webrtc::ProcessingTypes processing_type;
1579 {
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00001580 rtc::CritScope cs(&signal_media_critical_);
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001581 if (direction == MPD_RX) {
1582 processing_type = webrtc::kPlaybackAllChannelsMixed;
1583 if (SignalRxMediaFrame.is_empty()) {
1584 register_with_webrtc = true;
1585 processor_ssrc = &rx_processor_ssrc_;
1586 }
1587 SignalRxMediaFrame.connect(voice_processor,
1588 &VoiceProcessor::OnFrame);
1589 } else {
1590 processing_type = webrtc::kRecordingPerChannel;
1591 if (SignalTxMediaFrame.is_empty()) {
1592 register_with_webrtc = true;
1593 processor_ssrc = &tx_processor_ssrc_;
1594 }
1595 SignalTxMediaFrame.connect(voice_processor,
1596 &VoiceProcessor::OnFrame);
1597 }
1598 }
1599 if (register_with_webrtc) {
1600 // TODO(janahan): when registering consider instantiating a
1601 // a VoeMediaProcess object and not make the engine extend the interface.
1602 if (voe()->media() && voe()->media()->
1603 RegisterExternalMediaProcessing(channel_id,
1604 processing_type,
1605 *this) != -1) {
1606 LOG(LS_INFO) << "Media Processing Registration Succeeded. channel:"
1607 << channel_id;
1608 *processor_ssrc = ssrc;
1609 success = true;
1610 } else {
1611 LOG_RTCERR2(RegisterExternalMediaProcessing,
1612 channel_id,
1613 processing_type);
1614 success = false;
1615 }
1616 } else {
1617 // If we don't have to register with the engine, we just needed to
1618 // connect a new processor, set success to true;
1619 success = true;
1620 }
1621 return success;
1622}
1623
1624bool WebRtcVoiceEngine::UnregisterProcessorChannel(
1625 MediaProcessorDirection channel_direction,
1626 uint32 ssrc,
1627 VoiceProcessor* voice_processor,
1628 MediaProcessorDirection processor_direction) {
1629 bool success = true;
1630 FrameSignal* signal;
1631 webrtc::ProcessingTypes processing_type;
1632 uint32* processor_ssrc = NULL;
1633 if (channel_direction == MPD_RX) {
1634 signal = &SignalRxMediaFrame;
1635 processing_type = webrtc::kPlaybackAllChannelsMixed;
1636 processor_ssrc = &rx_processor_ssrc_;
1637 } else {
1638 signal = &SignalTxMediaFrame;
1639 processing_type = webrtc::kRecordingPerChannel;
1640 processor_ssrc = &tx_processor_ssrc_;
1641 }
1642
1643 int deregister_id = -1;
1644 {
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00001645 rtc::CritScope cs(&signal_media_critical_);
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001646 if ((processor_direction & channel_direction) != 0 && !signal->is_empty()) {
1647 signal->disconnect(voice_processor);
1648 int channel_id = -1;
1649 bool found_channel = FindChannelNumFromSsrc(ssrc,
1650 channel_direction,
1651 &channel_id);
1652 if (signal->is_empty() && found_channel) {
1653 deregister_id = channel_id;
1654 }
1655 }
1656 }
1657 if (deregister_id != -1) {
1658 if (voe()->media() &&
1659 voe()->media()->DeRegisterExternalMediaProcessing(deregister_id,
1660 processing_type) != -1) {
1661 *processor_ssrc = 0;
1662 LOG(LS_INFO) << "Media Processing DeRegistration Succeeded. channel:"
1663 << deregister_id;
1664 } else {
1665 LOG_RTCERR2(DeRegisterExternalMediaProcessing,
1666 deregister_id,
1667 processing_type);
1668 success = false;
1669 }
1670 }
1671 return success;
1672}
1673
1674bool WebRtcVoiceEngine::UnregisterProcessor(
1675 uint32 ssrc,
1676 VoiceProcessor* voice_processor,
1677 MediaProcessorDirection direction) {
1678 bool success = true;
1679 if (voice_processor == NULL) {
1680 LOG(LS_WARNING) << "Media Processing Deregistration Failed. ssrc: "
1681 << ssrc;
1682 return false;
1683 }
1684 if (!UnregisterProcessorChannel(MPD_RX, ssrc, voice_processor, direction)) {
1685 success = false;
1686 }
1687 if (!UnregisterProcessorChannel(MPD_TX, ssrc, voice_processor, direction)) {
1688 success = false;
1689 }
1690 return success;
1691}
1692
1693// Implementing method from WebRtc VoEMediaProcess interface
1694// Do not lock mux_channel_cs_ in this callback.
1695void WebRtcVoiceEngine::Process(int channel,
1696 webrtc::ProcessingTypes type,
1697 int16_t audio10ms[],
1698 int length,
1699 int sampling_freq,
1700 bool is_stereo) {
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00001701 rtc::CritScope cs(&signal_media_critical_);
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001702 AudioFrame frame(audio10ms, length, sampling_freq, is_stereo);
1703 if (type == webrtc::kPlaybackAllChannelsMixed) {
1704 SignalRxMediaFrame(rx_processor_ssrc_, MPD_RX, &frame);
1705 } else if (type == webrtc::kRecordingPerChannel) {
1706 SignalTxMediaFrame(tx_processor_ssrc_, MPD_TX, &frame);
1707 } else {
1708 LOG(LS_WARNING) << "Media Processing invoked unexpectedly."
1709 << " channel: " << channel << " type: " << type
1710 << " tx_ssrc: " << tx_processor_ssrc_
1711 << " rx_ssrc: " << rx_processor_ssrc_;
1712 }
1713}
1714
1715void WebRtcVoiceEngine::StartAecDump(const std::string& filename) {
1716 if (!is_dumping_aec_) {
1717 // Start dumping AEC when we are not dumping.
1718 if (voe_wrapper_->processing()->StartDebugRecording(
1719 filename.c_str()) != webrtc::AudioProcessing::kNoError) {
wu@webrtc.orga9890802013-12-13 00:21:03 +00001720 LOG_RTCERR1(StartDebugRecording, filename.c_str());
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001721 } else {
1722 is_dumping_aec_ = true;
1723 }
1724 }
1725}
1726
1727void WebRtcVoiceEngine::StopAecDump() {
1728 if (is_dumping_aec_) {
1729 // Stop dumping AEC when we are dumping.
1730 if (voe_wrapper_->processing()->StopDebugRecording() !=
1731 webrtc::AudioProcessing::kNoError) {
1732 LOG_RTCERR0(StopDebugRecording);
1733 }
1734 is_dumping_aec_ = false;
1735 }
1736}
1737
sergeyu@chromium.org5bc25c42013-12-05 00:24:06 +00001738int WebRtcVoiceEngine::CreateVoiceChannel(VoEWrapper* voice_engine_wrapper) {
sergeyu@chromium.org5bc25c42013-12-05 00:24:06 +00001739 return voice_engine_wrapper->base()->CreateChannel(voe_config_);
sergeyu@chromium.org5bc25c42013-12-05 00:24:06 +00001740}
1741
1742int WebRtcVoiceEngine::CreateMediaVoiceChannel() {
1743 return CreateVoiceChannel(voe_wrapper_.get());
1744}
1745
1746int WebRtcVoiceEngine::CreateSoundclipVoiceChannel() {
1747 return CreateVoiceChannel(voe_wrapper_sc_.get());
1748}
1749
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00001750class WebRtcVoiceMediaChannel::WebRtcVoiceChannelRenderer
1751 : public AudioRenderer::Sink {
1752 public:
1753 WebRtcVoiceChannelRenderer(int ch,
1754 webrtc::AudioTransport* voe_audio_transport)
1755 : channel_(ch),
1756 voe_audio_transport_(voe_audio_transport),
1757 renderer_(NULL) {
1758 }
1759 virtual ~WebRtcVoiceChannelRenderer() {
1760 Stop();
1761 }
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00001762
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00001763 // Starts the rendering by setting a sink to the renderer to get data
1764 // callback.
henrike@webrtc.orga7b98182014-02-21 15:51:43 +00001765 // This method is called on the libjingle worker thread.
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00001766 // TODO(xians): Make sure Start() is called only once.
1767 void Start(AudioRenderer* renderer) {
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00001768 rtc::CritScope lock(&lock_);
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00001769 ASSERT(renderer != NULL);
henrike@webrtc.orga7b98182014-02-21 15:51:43 +00001770 if (renderer_ != NULL) {
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00001771 ASSERT(renderer_ == renderer);
1772 return;
1773 }
1774
1775 // TODO(xians): Remove AddChannel() call after Chrome turns on APM
1776 // in getUserMedia by default.
1777 renderer->AddChannel(channel_);
1778 renderer->SetSink(this);
1779 renderer_ = renderer;
1780 }
1781
1782 // Stops rendering by setting the sink of the renderer to NULL. No data
1783 // callback will be received after this method.
henrike@webrtc.orga7b98182014-02-21 15:51:43 +00001784 // This method is called on the libjingle worker thread.
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00001785 void Stop() {
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00001786 rtc::CritScope lock(&lock_);
henrike@webrtc.orga7b98182014-02-21 15:51:43 +00001787 if (renderer_ == NULL)
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00001788 return;
1789
1790 renderer_->RemoveChannel(channel_);
1791 renderer_->SetSink(NULL);
1792 renderer_ = NULL;
1793 }
1794
1795 // AudioRenderer::Sink implementation.
henrike@webrtc.orga7b98182014-02-21 15:51:43 +00001796 // This method is called on the audio thread.
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00001797 virtual void OnData(const void* audio_data,
1798 int bits_per_sample,
1799 int sample_rate,
1800 int number_of_channels,
1801 int number_of_frames) OVERRIDE {
henrike@webrtc.orga7b98182014-02-21 15:51:43 +00001802 voe_audio_transport_->OnData(channel_,
1803 audio_data,
1804 bits_per_sample,
1805 sample_rate,
1806 number_of_channels,
1807 number_of_frames);
henrike@webrtc.orga7b98182014-02-21 15:51:43 +00001808 }
1809
1810 // Callback from the |renderer_| when it is going away. In case Start() has
1811 // never been called, this callback won't be triggered.
1812 virtual void OnClose() OVERRIDE {
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00001813 rtc::CritScope lock(&lock_);
henrike@webrtc.orga7b98182014-02-21 15:51:43 +00001814 // Set |renderer_| to NULL to make sure no more callback will get into
1815 // the renderer.
1816 renderer_ = NULL;
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00001817 }
1818
1819 // Accessor to the VoE channel ID.
1820 int channel() const { return channel_; }
1821
1822 private:
1823 const int channel_;
1824 webrtc::AudioTransport* const voe_audio_transport_;
1825
1826 // Raw pointer to AudioRenderer owned by LocalAudioTrackHandler.
1827 // PeerConnection will make sure invalidating the pointer before the object
1828 // goes away.
1829 AudioRenderer* renderer_;
henrike@webrtc.orga7b98182014-02-21 15:51:43 +00001830
1831 // Protects |renderer_| in Start(), Stop() and OnClose().
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00001832 rtc::CriticalSection lock_;
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00001833};
1834
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001835// WebRtcVoiceMediaChannel
1836WebRtcVoiceMediaChannel::WebRtcVoiceMediaChannel(WebRtcVoiceEngine *engine)
1837 : WebRtcMediaChannel<VoiceMediaChannel, WebRtcVoiceEngine>(
1838 engine,
sergeyu@chromium.org5bc25c42013-12-05 00:24:06 +00001839 engine->CreateMediaVoiceChannel()),
minyue@webrtc.org26236952014-10-29 02:27:08 +00001840 send_bitrate_setting_(false),
1841 send_bitrate_bps_(0),
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001842 options_(),
1843 dtmf_allowed_(false),
1844 desired_playout_(false),
1845 nack_enabled_(false),
1846 playout_(false),
wu@webrtc.org967bfff2013-09-19 05:49:50 +00001847 typing_noise_detected_(false),
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001848 desired_send_(SEND_NOTHING),
1849 send_(SEND_NOTHING),
buildbot@webrtc.orgb4c7b092014-08-25 12:11:58 +00001850 shared_bwe_vie_(NULL),
1851 shared_bwe_vie_channel_(-1),
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001852 default_receive_ssrc_(0) {
1853 engine->RegisterChannel(this);
1854 LOG(LS_VERBOSE) << "WebRtcVoiceMediaChannel::WebRtcVoiceMediaChannel "
1855 << voe_channel();
1856
wu@webrtc.org9dba5252013-08-05 20:36:57 +00001857 ConfigureSendChannel(voe_channel());
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001858}
1859
1860WebRtcVoiceMediaChannel::~WebRtcVoiceMediaChannel() {
1861 LOG(LS_VERBOSE) << "WebRtcVoiceMediaChannel::~WebRtcVoiceMediaChannel "
1862 << voe_channel();
buildbot@webrtc.org6e5c7842014-09-19 06:46:37 +00001863 SetupSharedBandwidthEstimation(NULL, -1);
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001864
wu@webrtc.org9dba5252013-08-05 20:36:57 +00001865 // Remove any remaining send streams, the default channel will be deleted
1866 // later.
1867 while (!send_channels_.empty())
1868 RemoveSendStream(send_channels_.begin()->first);
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001869
1870 // Unregister ourselves from the engine.
1871 engine()->UnregisterChannel(this);
1872 // Remove any remaining streams.
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00001873 while (!receive_channels_.empty()) {
1874 RemoveRecvStream(receive_channels_.begin()->first);
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001875 }
1876
wu@webrtc.org9dba5252013-08-05 20:36:57 +00001877 // Delete the default channel.
1878 DeleteChannel(voe_channel());
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001879}
1880
1881bool WebRtcVoiceMediaChannel::SetOptions(const AudioOptions& options) {
1882 LOG(LS_INFO) << "Setting voice channel options: "
1883 << options.ToString();
1884
wu@webrtc.orgde305012013-10-31 15:40:38 +00001885 // Check if DSCP value is changed from previous.
1886 bool dscp_option_changed = (options_.dscp != options.dscp);
1887
wu@webrtc.org9dba5252013-08-05 20:36:57 +00001888 // TODO(xians): Add support to set different options for different send
1889 // streams after we support multiple APMs.
1890
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001891 // We retain all of the existing options, and apply the given ones
1892 // on top. This means there is no way to "clear" options such that
1893 // they go back to the engine default.
1894 options_.SetAll(options);
1895
1896 if (send_ != SEND_NOTHING) {
1897 if (!engine()->SetOptionOverrides(options_)) {
1898 LOG(LS_WARNING) <<
1899 "Failed to engine SetOptionOverrides during channel SetOptions.";
1900 return false;
1901 }
1902 } else {
1903 // Will be interpreted when appropriate.
1904 }
1905
wu@webrtc.org97077a32013-10-25 21:18:33 +00001906 // Receiver-side auto gain control happens per channel, so set it here from
1907 // options. Note that, like conference mode, setting it on the engine won't
1908 // have the desired effect, since voice channels don't inherit options from
1909 // the media engine when those options are applied per-channel.
1910 bool rx_auto_gain_control;
1911 if (options.rx_auto_gain_control.Get(&rx_auto_gain_control)) {
1912 if (engine()->voe()->processing()->SetRxAgcStatus(
1913 voe_channel(), rx_auto_gain_control,
1914 webrtc::kAgcFixedDigital) == -1) {
1915 LOG_RTCERR1(SetRxAgcStatus, rx_auto_gain_control);
1916 return false;
1917 } else {
1918 LOG(LS_VERBOSE) << "Rx auto gain set to " << rx_auto_gain_control
1919 << " with mode " << webrtc::kAgcFixedDigital;
1920 }
1921 }
1922 if (options.rx_agc_target_dbov.IsSet() ||
1923 options.rx_agc_digital_compression_gain.IsSet() ||
1924 options.rx_agc_limiter.IsSet()) {
1925 webrtc::AgcConfig config;
1926 // If only some of the options are being overridden, get the current
1927 // settings for the channel and bail if they aren't available.
1928 if (!options.rx_agc_target_dbov.IsSet() ||
1929 !options.rx_agc_digital_compression_gain.IsSet() ||
1930 !options.rx_agc_limiter.IsSet()) {
1931 if (engine()->voe()->processing()->GetRxAgcConfig(
1932 voe_channel(), config) != 0) {
1933 LOG(LS_ERROR) << "Failed to get default rx agc configuration for "
1934 << "channel " << voe_channel() << ". Since not all rx "
1935 << "agc options are specified, unable to safely set rx "
1936 << "agc options.";
1937 return false;
1938 }
1939 }
1940 config.targetLeveldBOv =
1941 options.rx_agc_target_dbov.GetWithDefaultIfUnset(
1942 config.targetLeveldBOv);
1943 config.digitalCompressionGaindB =
1944 options.rx_agc_digital_compression_gain.GetWithDefaultIfUnset(
1945 config.digitalCompressionGaindB);
1946 config.limiterEnable = options.rx_agc_limiter.GetWithDefaultIfUnset(
1947 config.limiterEnable);
1948 if (engine()->voe()->processing()->SetRxAgcConfig(
1949 voe_channel(), config) == -1) {
1950 LOG_RTCERR4(SetRxAgcConfig, voe_channel(), config.targetLeveldBOv,
1951 config.digitalCompressionGaindB, config.limiterEnable);
1952 return false;
1953 }
1954 }
wu@webrtc.orgde305012013-10-31 15:40:38 +00001955 if (dscp_option_changed) {
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00001956 rtc::DiffServCodePoint dscp = rtc::DSCP_DEFAULT;
henrika@webrtc.orgaebb1ad2014-01-14 10:00:58 +00001957 if (options_.dscp.GetWithDefaultIfUnset(false))
wu@webrtc.orgde305012013-10-31 15:40:38 +00001958 dscp = kAudioDscpValue;
1959 if (MediaChannel::SetDscp(dscp) != 0) {
1960 LOG(LS_WARNING) << "Failed to set DSCP settings for audio channel";
1961 }
1962 }
wu@webrtc.org97077a32013-10-25 21:18:33 +00001963
buildbot@webrtc.orgb4c7b092014-08-25 12:11:58 +00001964 // Force update of Video Engine BWE forwarding to reflect experiment setting.
1965 if (!SetupSharedBandwidthEstimation(shared_bwe_vie_,
1966 shared_bwe_vie_channel_)) {
1967 return false;
1968 }
1969
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001970 LOG(LS_INFO) << "Set voice channel options. Current options: "
1971 << options_.ToString();
1972 return true;
1973}
1974
1975bool WebRtcVoiceMediaChannel::SetRecvCodecs(
1976 const std::vector<AudioCodec>& codecs) {
1977 // Set the payload types to be used for incoming media.
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001978 LOG(LS_INFO) << "Setting receive voice codecs:";
1979
1980 std::vector<AudioCodec> new_codecs;
1981 // Find all new codecs. We allow adding new codecs but don't allow changing
1982 // the payload type of codecs that is already configured since we might
1983 // already be receiving packets with that payload type.
1984 for (std::vector<AudioCodec>::const_iterator it = codecs.begin();
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00001985 it != codecs.end(); ++it) {
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001986 AudioCodec old_codec;
1987 if (FindCodec(recv_codecs_, *it, &old_codec)) {
1988 if (old_codec.id != it->id) {
1989 LOG(LS_ERROR) << it->name << " payload type changed.";
1990 return false;
1991 }
1992 } else {
1993 new_codecs.push_back(*it);
1994 }
1995 }
1996 if (new_codecs.empty()) {
1997 // There are no new codecs to configure. Already configured codecs are
1998 // never removed.
1999 return true;
2000 }
2001
2002 if (playout_) {
2003 // Receive codecs can not be changed while playing. So we temporarily
2004 // pause playout.
2005 PausePlayout();
2006 }
2007
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002008 bool ret = true;
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002009 for (std::vector<AudioCodec>::const_iterator it = new_codecs.begin();
2010 it != new_codecs.end() && ret; ++it) {
2011 webrtc::CodecInst voe_codec;
2012 if (engine()->FindWebRtcCodec(*it, &voe_codec)) {
2013 LOG(LS_INFO) << ToString(*it);
2014 voe_codec.pltype = it->id;
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002015 if (default_receive_ssrc_ == 0) {
2016 // Set the receive codecs on the default channel explicitly if the
2017 // default channel is not used by |receive_channels_|, this happens in
2018 // conference mode or in non-conference mode when there is no playout
2019 // channel.
2020 // TODO(xians): Figure out how we use the default channel in conference
2021 // mode.
2022 if (engine()->voe()->codec()->SetRecPayloadType(
2023 voe_channel(), voe_codec) == -1) {
2024 LOG_RTCERR2(SetRecPayloadType, voe_channel(), ToString(voe_codec));
2025 ret = false;
2026 }
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002027 }
2028
2029 // Set the receive codecs on all receiving channels.
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002030 for (ChannelMap::iterator it = receive_channels_.begin();
2031 it != receive_channels_.end() && ret; ++it) {
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002032 if (engine()->voe()->codec()->SetRecPayloadType(
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00002033 it->second->channel(), voe_codec) == -1) {
2034 LOG_RTCERR2(SetRecPayloadType, it->second->channel(),
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002035 ToString(voe_codec));
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002036 ret = false;
2037 }
2038 }
2039 } else {
2040 LOG(LS_WARNING) << "Unknown codec " << ToString(*it);
2041 ret = false;
2042 }
2043 }
2044 if (ret) {
2045 recv_codecs_ = codecs;
2046 }
2047
2048 if (desired_playout_ && !playout_) {
2049 ResumePlayout();
2050 }
2051 return ret;
2052}
2053
2054bool WebRtcVoiceMediaChannel::SetSendCodecs(
wu@webrtc.orgcadf9042013-08-30 21:24:16 +00002055 int channel, const std::vector<AudioCodec>& codecs) {
buildbot@webrtc.orgae740dd2014-06-17 10:56:41 +00002056 // Disable VAD, FEC, and RED unless we know the other side wants them.
wu@webrtc.orgcadf9042013-08-30 21:24:16 +00002057 engine()->voe()->codec()->SetVADStatus(channel, false);
2058 engine()->voe()->rtp()->SetNACKStatus(channel, false, 0);
buildbot@webrtc.orgae740dd2014-06-17 10:56:41 +00002059#ifdef USE_WEBRTC_DEV_BRANCH
2060 engine()->voe()->rtp()->SetREDStatus(channel, false);
2061 engine()->voe()->codec()->SetFECStatus(channel, false);
2062#else
2063 // TODO(minyue): Remove code under #else case after new WebRTC roll.
wu@webrtc.orgcadf9042013-08-30 21:24:16 +00002064 engine()->voe()->rtp()->SetFECStatus(channel, false);
buildbot@webrtc.orgae740dd2014-06-17 10:56:41 +00002065#endif // USE_WEBRTC_DEV_BRANCH
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002066
2067 // Scan through the list to figure out the codec to use for sending, along
2068 // with the proper configuration for VAD and DTMF.
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +00002069 bool found_send_codec = false;
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002070 webrtc::CodecInst send_codec;
2071 memset(&send_codec, 0, sizeof(send_codec));
2072
wu@webrtc.org05e7b442014-04-01 17:44:24 +00002073 bool nack_enabled = nack_enabled_;
buildbot@webrtc.org3ffa1f92014-07-02 19:51:26 +00002074 bool enable_codec_fec = false;
wu@webrtc.org05e7b442014-04-01 17:44:24 +00002075
minyue@webrtc.org26236952014-10-29 02:27:08 +00002076 int opus_max_playback_rate = 0;
buildbot@webrtc.org5d639b32014-09-10 07:57:12 +00002077
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +00002078 // Set send codec (the first non-telephone-event/CN codec)
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002079 for (std::vector<AudioCodec>::const_iterator it = codecs.begin();
2080 it != codecs.end(); ++it) {
2081 // Ignore codecs we don't know about. The negotiation step should prevent
2082 // this, but double-check to be sure.
2083 webrtc::CodecInst voe_codec;
2084 if (!engine()->FindWebRtcCodec(*it, &voe_codec)) {
henrika@webrtc.orgaebb1ad2014-01-14 10:00:58 +00002085 LOG(LS_WARNING) << "Unknown codec " << ToString(*it);
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002086 continue;
2087 }
2088
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +00002089 if (IsTelephoneEventCodec(it->name) || IsCNCodec(it->name)) {
2090 // Skip telephone-event/CN codec, which will be handled later.
2091 continue;
2092 }
2093
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +00002094 // We'll use the first codec in the list to actually send audio data.
2095 // Be sure to use the payload type requested by the remote side.
buildbot@webrtc.orgae740dd2014-06-17 10:56:41 +00002096 // "red", for RED audio, is a special case where the actual codec to be
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +00002097 // used is specified in params.
2098 if (IsRedCodec(it->name)) {
2099 // Parse out the RED parameters. If we fail, just ignore RED;
2100 // we don't support all possible params/usage scenarios.
2101 if (!GetRedSendCodec(*it, codecs, &send_codec)) {
2102 continue;
2103 }
2104
2105 // Enable redundant encoding of the specified codec. Treat any
2106 // failure as a fatal internal error.
buildbot@webrtc.orgae740dd2014-06-17 10:56:41 +00002107#ifdef USE_WEBRTC_DEV_BRANCH
2108 LOG(LS_INFO) << "Enabling RED on channel " << channel;
2109 if (engine()->voe()->rtp()->SetREDStatus(channel, true, it->id) == -1) {
2110 LOG_RTCERR3(SetREDStatus, channel, true, it->id);
2111#else
2112 // TODO(minyue): Remove code under #else case after new WebRTC roll.
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +00002113 LOG(LS_INFO) << "Enabling FEC";
2114 if (engine()->voe()->rtp()->SetFECStatus(channel, true, it->id) == -1) {
2115 LOG_RTCERR3(SetFECStatus, channel, true, it->id);
buildbot@webrtc.orgae740dd2014-06-17 10:56:41 +00002116#endif // USE_WEBRTC_DEV_BRANCH
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +00002117 return false;
2118 }
2119 } else {
2120 send_codec = voe_codec;
wu@webrtc.org05e7b442014-04-01 17:44:24 +00002121 nack_enabled = IsNackEnabled(*it);
buildbot@webrtc.org5d639b32014-09-10 07:57:12 +00002122 // For Opus as the send codec, we are to enable inband FEC if requested
2123 // and set maximum playback rate.
2124 if (IsOpus(*it)) {
minyue@webrtc.org26236952014-10-29 02:27:08 +00002125 GetOpusConfig(*it, &send_codec, &enable_codec_fec,
2126 &opus_max_playback_rate);
buildbot@webrtc.org5d639b32014-09-10 07:57:12 +00002127 }
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +00002128 }
2129 found_send_codec = true;
2130 break;
2131 }
2132
wu@webrtc.org05e7b442014-04-01 17:44:24 +00002133 if (nack_enabled_ != nack_enabled) {
2134 SetNack(channel, nack_enabled);
2135 nack_enabled_ = nack_enabled;
2136 }
2137
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +00002138 if (!found_send_codec) {
2139 LOG(LS_WARNING) << "Received empty list of codecs.";
2140 return false;
2141 }
2142
2143 // Set the codec immediately, since SetVADStatus() depends on whether
2144 // the current codec is mono or stereo.
2145 if (!SetSendCodec(channel, send_codec))
2146 return false;
2147
buildbot@webrtc.org3ffa1f92014-07-02 19:51:26 +00002148 // FEC should be enabled after SetSendCodec.
2149 if (enable_codec_fec) {
2150 LOG(LS_INFO) << "Attempt to enable codec internal FEC on channel "
2151 << channel;
2152#ifdef USE_WEBRTC_DEV_BRANCH
2153 if (engine()->voe()->codec()->SetFECStatus(channel, true) == -1) {
2154 // Enable codec internal FEC. Treat any failure as fatal internal error.
2155 LOG_RTCERR2(SetFECStatus, channel, true);
2156 return false;
2157 }
2158#endif // USE_WEBRTC_DEV_BRANCH
2159 }
2160
buildbot@webrtc.org5d639b32014-09-10 07:57:12 +00002161 // maxplaybackrate should be set after SetSendCodec.
minyue@webrtc.org26236952014-10-29 02:27:08 +00002162 // If opus_max_playback_rate <= 0, the default maximum playback rate of 48 kHz
2163 // will be used.
2164 if (opus_max_playback_rate > 0) {
buildbot@webrtc.org5d639b32014-09-10 07:57:12 +00002165 LOG(LS_INFO) << "Attempt to set maximum playback rate to "
minyue@webrtc.org26236952014-10-29 02:27:08 +00002166 << opus_max_playback_rate
buildbot@webrtc.org5d639b32014-09-10 07:57:12 +00002167 << " Hz on channel "
2168 << channel;
2169#ifdef USE_WEBRTC_DEV_BRANCH
buildbot@webrtc.org5d639b32014-09-10 07:57:12 +00002170 if (engine()->voe()->codec()->SetOpusMaxPlaybackRate(
minyue@webrtc.org26236952014-10-29 02:27:08 +00002171 channel, opus_max_playback_rate) == -1) {
buildbot@webrtc.org5d639b32014-09-10 07:57:12 +00002172 LOG(LS_WARNING) << "Could not set maximum playback rate.";
2173 }
2174#endif
2175 }
2176
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +00002177 // Always update the |send_codec_| to the currently set send codec.
2178 send_codec_.reset(new webrtc::CodecInst(send_codec));
2179
minyue@webrtc.org26236952014-10-29 02:27:08 +00002180 if (send_bitrate_setting_) {
2181 SetSendBitrateInternal(send_bitrate_bps_);
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +00002182 }
2183
2184 // Loop through the codecs list again to config the telephone-event/CN codec.
2185 for (std::vector<AudioCodec>::const_iterator it = codecs.begin();
2186 it != codecs.end(); ++it) {
2187 // Ignore codecs we don't know about. The negotiation step should prevent
2188 // this, but double-check to be sure.
2189 webrtc::CodecInst voe_codec;
2190 if (!engine()->FindWebRtcCodec(*it, &voe_codec)) {
2191 LOG(LS_WARNING) << "Unknown codec " << ToString(*it);
2192 continue;
2193 }
2194
wu@webrtc.org9dba5252013-08-05 20:36:57 +00002195 // Find the DTMF telephone event "codec" and tell VoiceEngine channels
2196 // about it.
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +00002197 if (IsTelephoneEventCodec(it->name)) {
wu@webrtc.orgcadf9042013-08-30 21:24:16 +00002198 if (engine()->voe()->dtmf()->SetSendTelephoneEventPayloadType(
2199 channel, it->id) == -1) {
2200 LOG_RTCERR2(SetSendTelephoneEventPayloadType, channel, it->id);
2201 return false;
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002202 }
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +00002203 } else if (IsCNCodec(it->name)) {
2204 // Turn voice activity detection/comfort noise on if supported.
2205 // Set the wideband CN payload type appropriately.
2206 // (narrowband always uses the static payload type 13).
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002207 webrtc::PayloadFrequencies cn_freq;
2208 switch (it->clockrate) {
2209 case 8000:
2210 cn_freq = webrtc::kFreq8000Hz;
2211 break;
2212 case 16000:
2213 cn_freq = webrtc::kFreq16000Hz;
2214 break;
2215 case 32000:
2216 cn_freq = webrtc::kFreq32000Hz;
2217 break;
2218 default:
2219 LOG(LS_WARNING) << "CN frequency " << it->clockrate
2220 << " not supported.";
2221 continue;
2222 }
wu@webrtc.orgcadf9042013-08-30 21:24:16 +00002223 // Set the CN payloadtype and the VAD status.
2224 // The CN payload type for 8000 Hz clockrate is fixed at 13.
2225 if (cn_freq != webrtc::kFreq8000Hz) {
2226 if (engine()->voe()->codec()->SetSendCNPayloadType(
2227 channel, it->id, cn_freq) == -1) {
2228 LOG_RTCERR3(SetSendCNPayloadType, channel, it->id, cn_freq);
2229 // TODO(ajm): This failure condition will be removed from VoE.
2230 // Restore the return here when we update to a new enough webrtc.
2231 //
2232 // Not returning false because the SetSendCNPayloadType will fail if
2233 // the channel is already sending.
2234 // This can happen if the remote description is applied twice, for
2235 // example in the case of ROAP on top of JSEP, where both side will
2236 // send the offer.
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002237 }
wu@webrtc.orgcadf9042013-08-30 21:24:16 +00002238 }
wu@webrtc.orgcadf9042013-08-30 21:24:16 +00002239 // Only turn on VAD if we have a CN payload type that matches the
2240 // clockrate for the codec we are going to use.
2241 if (it->clockrate == send_codec.plfreq) {
2242 LOG(LS_INFO) << "Enabling VAD";
2243 if (engine()->voe()->codec()->SetVADStatus(channel, true) == -1) {
2244 LOG_RTCERR2(SetVADStatus, channel, true);
2245 return false;
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002246 }
2247 }
2248 }
wu@webrtc.org1d1ffc92013-10-16 18:12:02 +00002249 }
wu@webrtc.orgcadf9042013-08-30 21:24:16 +00002250 return true;
2251}
2252
2253bool WebRtcVoiceMediaChannel::SetSendCodecs(
2254 const std::vector<AudioCodec>& codecs) {
2255 dtmf_allowed_ = false;
2256 for (std::vector<AudioCodec>::const_iterator it = codecs.begin();
2257 it != codecs.end(); ++it) {
2258 // Find the DTMF telephone event "codec".
2259 if (_stricmp(it->name.c_str(), "telephone-event") == 0 ||
2260 _stricmp(it->name.c_str(), "audio/telephone-event") == 0) {
2261 dtmf_allowed_ = true;
2262 }
2263 }
2264
2265 // Cache the codecs in order to configure the channel created later.
2266 send_codecs_ = codecs;
2267 for (ChannelMap::iterator iter = send_channels_.begin();
2268 iter != send_channels_.end(); ++iter) {
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00002269 if (!SetSendCodecs(iter->second->channel(), codecs)) {
wu@webrtc.orgcadf9042013-08-30 21:24:16 +00002270 return false;
2271 }
2272 }
2273
wu@webrtc.org05e7b442014-04-01 17:44:24 +00002274 // Set nack status on receive channels and update |nack_enabled_|.
wu@webrtc.orgcadf9042013-08-30 21:24:16 +00002275 SetNack(receive_channels_, nack_enabled_);
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002276 return true;
2277}
wu@webrtc.org9dba5252013-08-05 20:36:57 +00002278
2279void WebRtcVoiceMediaChannel::SetNack(const ChannelMap& channels,
2280 bool nack_enabled) {
2281 for (ChannelMap::const_iterator it = channels.begin();
2282 it != channels.end(); ++it) {
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00002283 SetNack(it->second->channel(), nack_enabled);
wu@webrtc.org9dba5252013-08-05 20:36:57 +00002284 }
2285}
2286
wu@webrtc.orgcadf9042013-08-30 21:24:16 +00002287void WebRtcVoiceMediaChannel::SetNack(int channel, bool nack_enabled) {
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002288 if (nack_enabled) {
wu@webrtc.orgcadf9042013-08-30 21:24:16 +00002289 LOG(LS_INFO) << "Enabling NACK for channel " << channel;
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002290 engine()->voe()->rtp()->SetNACKStatus(channel, true, kNackMaxPackets);
2291 } else {
wu@webrtc.orgcadf9042013-08-30 21:24:16 +00002292 LOG(LS_INFO) << "Disabling NACK for channel " << channel;
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002293 engine()->voe()->rtp()->SetNACKStatus(channel, false, 0);
2294 }
2295}
2296
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002297bool WebRtcVoiceMediaChannel::SetSendCodec(
2298 const webrtc::CodecInst& send_codec) {
2299 LOG(LS_INFO) << "Selected voice codec " << ToString(send_codec)
2300 << ", bitrate=" << send_codec.rate;
wu@webrtc.org9dba5252013-08-05 20:36:57 +00002301 for (ChannelMap::iterator iter = send_channels_.begin();
2302 iter != send_channels_.end(); ++iter) {
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00002303 if (!SetSendCodec(iter->second->channel(), send_codec))
wu@webrtc.org9dba5252013-08-05 20:36:57 +00002304 return false;
2305 }
2306
wu@webrtc.org9dba5252013-08-05 20:36:57 +00002307 return true;
2308}
2309
2310bool WebRtcVoiceMediaChannel::SetSendCodec(
2311 int channel, const webrtc::CodecInst& send_codec) {
2312 LOG(LS_INFO) << "Send channel " << channel << " selected voice codec "
2313 << ToString(send_codec) << ", bitrate=" << send_codec.rate;
2314
wu@webrtc.org05e7b442014-04-01 17:44:24 +00002315 webrtc::CodecInst current_codec;
2316 if (engine()->voe()->codec()->GetSendCodec(channel, current_codec) == 0 &&
2317 (send_codec == current_codec)) {
2318 // Codec is already configured, we can return without setting it again.
2319 return true;
2320 }
2321
wu@webrtc.org9dba5252013-08-05 20:36:57 +00002322 if (engine()->voe()->codec()->SetSendCodec(channel, send_codec) == -1) {
2323 LOG_RTCERR2(SetSendCodec, channel, ToString(send_codec));
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002324 return false;
2325 }
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002326 return true;
2327}
2328
2329bool WebRtcVoiceMediaChannel::SetRecvRtpHeaderExtensions(
2330 const std::vector<RtpHeaderExtension>& extensions) {
buildbot@webrtc.org150835e2014-05-06 15:54:38 +00002331 if (receive_extensions_ == extensions) {
2332 return true;
2333 }
2334
2335 // The default channel may or may not be in |receive_channels_|. Set the rtp
2336 // header extensions for default channel regardless.
2337 if (!SetChannelRecvRtpHeaderExtensions(voe_channel(), extensions)) {
2338 return false;
2339 }
henrike@webrtc.org79047f92014-03-06 23:46:59 +00002340
2341 // Loop through all receive channels and enable/disable the extensions.
2342 for (ChannelMap::const_iterator channel_it = receive_channels_.begin();
2343 channel_it != receive_channels_.end(); ++channel_it) {
buildbot@webrtc.org150835e2014-05-06 15:54:38 +00002344 if (!SetChannelRecvRtpHeaderExtensions(channel_it->second->channel(),
2345 extensions)) {
henrike@webrtc.org79047f92014-03-06 23:46:59 +00002346 return false;
2347 }
2348 }
buildbot@webrtc.org150835e2014-05-06 15:54:38 +00002349
2350 receive_extensions_ = extensions;
2351 return true;
2352}
2353
2354bool WebRtcVoiceMediaChannel::SetChannelRecvRtpHeaderExtensions(
2355 int channel_id, const std::vector<RtpHeaderExtension>& extensions) {
buildbot@webrtc.org150835e2014-05-06 15:54:38 +00002356 const RtpHeaderExtension* audio_level_extension =
2357 FindHeaderExtension(extensions, kRtpAudioLevelHeaderExtension);
2358 if (!SetHeaderExtension(
2359 &webrtc::VoERTP_RTCP::SetReceiveAudioLevelIndicationStatus, channel_id,
2360 audio_level_extension)) {
2361 return false;
2362 }
buildbot@webrtc.org150835e2014-05-06 15:54:38 +00002363
2364 const RtpHeaderExtension* send_time_extension =
2365 FindHeaderExtension(extensions, kRtpAbsoluteSenderTimeHeaderExtension);
2366 if (!SetHeaderExtension(
2367 &webrtc::VoERTP_RTCP::SetReceiveAbsoluteSenderTimeStatus, channel_id,
2368 send_time_extension)) {
2369 return false;
2370 }
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002371 return true;
2372}
2373
2374bool WebRtcVoiceMediaChannel::SetSendRtpHeaderExtensions(
2375 const std::vector<RtpHeaderExtension>& extensions) {
buildbot@webrtc.org150835e2014-05-06 15:54:38 +00002376 if (send_extensions_ == extensions) {
2377 return true;
2378 }
2379
2380 // The default channel may or may not be in |send_channels_|. Set the rtp
2381 // header extensions for default channel regardless.
2382
2383 if (!SetChannelSendRtpHeaderExtensions(voe_channel(), extensions)) {
2384 return false;
2385 }
2386
2387 // Loop through all send channels and enable/disable the extensions.
2388 for (ChannelMap::const_iterator channel_it = send_channels_.begin();
2389 channel_it != send_channels_.end(); ++channel_it) {
2390 if (!SetChannelSendRtpHeaderExtensions(channel_it->second->channel(),
2391 extensions)) {
2392 return false;
2393 }
2394 }
2395
2396 send_extensions_ = extensions;
2397 return true;
2398}
2399
2400bool WebRtcVoiceMediaChannel::SetChannelSendRtpHeaderExtensions(
2401 int channel_id, const std::vector<RtpHeaderExtension>& extensions) {
henrike@webrtc.org79047f92014-03-06 23:46:59 +00002402 const RtpHeaderExtension* audio_level_extension =
2403 FindHeaderExtension(extensions, kRtpAudioLevelHeaderExtension);
buildbot@webrtc.org150835e2014-05-06 15:54:38 +00002404
henrike@webrtc.org79047f92014-03-06 23:46:59 +00002405 if (!SetHeaderExtension(
buildbot@webrtc.org150835e2014-05-06 15:54:38 +00002406 &webrtc::VoERTP_RTCP::SetSendAudioLevelIndicationStatus, channel_id,
henrike@webrtc.org79047f92014-03-06 23:46:59 +00002407 audio_level_extension)) {
2408 return false;
2409 }
buildbot@webrtc.org150835e2014-05-06 15:54:38 +00002410
2411 const RtpHeaderExtension* send_time_extension =
2412 FindHeaderExtension(extensions, kRtpAbsoluteSenderTimeHeaderExtension);
henrike@webrtc.org79047f92014-03-06 23:46:59 +00002413 if (!SetHeaderExtension(
buildbot@webrtc.org150835e2014-05-06 15:54:38 +00002414 &webrtc::VoERTP_RTCP::SetSendAbsoluteSenderTimeStatus, channel_id,
henrike@webrtc.org79047f92014-03-06 23:46:59 +00002415 send_time_extension)) {
2416 return false;
2417 }
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002418
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002419 return true;
2420}
2421
2422bool WebRtcVoiceMediaChannel::SetPlayout(bool playout) {
2423 desired_playout_ = playout;
2424 return ChangePlayout(desired_playout_);
2425}
2426
2427bool WebRtcVoiceMediaChannel::PausePlayout() {
2428 return ChangePlayout(false);
2429}
2430
2431bool WebRtcVoiceMediaChannel::ResumePlayout() {
2432 return ChangePlayout(desired_playout_);
2433}
2434
2435bool WebRtcVoiceMediaChannel::ChangePlayout(bool playout) {
2436 if (playout_ == playout) {
2437 return true;
2438 }
2439
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002440 // Change the playout of all channels to the new state.
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002441 bool result = true;
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002442 if (receive_channels_.empty()) {
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002443 // Only toggle the default channel if we don't have any other channels.
2444 result = SetPlayout(voe_channel(), playout);
2445 }
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002446 for (ChannelMap::iterator it = receive_channels_.begin();
2447 it != receive_channels_.end() && result; ++it) {
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00002448 if (!SetPlayout(it->second->channel(), playout)) {
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002449 LOG(LS_ERROR) << "SetPlayout " << playout << " on channel "
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00002450 << it->second->channel() << " failed";
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002451 result = false;
2452 }
2453 }
2454
2455 if (result) {
2456 playout_ = playout;
2457 }
2458 return result;
2459}
2460
2461bool WebRtcVoiceMediaChannel::SetSend(SendFlags send) {
2462 desired_send_ = send;
wu@webrtc.org9dba5252013-08-05 20:36:57 +00002463 if (!send_channels_.empty())
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002464 return ChangeSend(desired_send_);
2465 return true;
2466}
2467
2468bool WebRtcVoiceMediaChannel::PauseSend() {
2469 return ChangeSend(SEND_NOTHING);
2470}
2471
2472bool WebRtcVoiceMediaChannel::ResumeSend() {
2473 return ChangeSend(desired_send_);
2474}
2475
2476bool WebRtcVoiceMediaChannel::ChangeSend(SendFlags send) {
2477 if (send_ == send) {
2478 return true;
2479 }
2480
wu@webrtc.org9dba5252013-08-05 20:36:57 +00002481 // Change the settings on each send channel.
2482 if (send == SEND_MICROPHONE)
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002483 engine()->SetOptionOverrides(options_);
2484
wu@webrtc.org9dba5252013-08-05 20:36:57 +00002485 // Change the settings on each send channel.
2486 for (ChannelMap::iterator iter = send_channels_.begin();
2487 iter != send_channels_.end(); ++iter) {
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00002488 if (!ChangeSend(iter->second->channel(), send))
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002489 return false;
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002490 }
wu@webrtc.org9dba5252013-08-05 20:36:57 +00002491
2492 // Clear up the options after stopping sending.
2493 if (send == SEND_NOTHING)
2494 engine()->ClearOptionOverrides();
2495
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002496 send_ = send;
2497 return true;
2498}
2499
wu@webrtc.org9dba5252013-08-05 20:36:57 +00002500bool WebRtcVoiceMediaChannel::ChangeSend(int channel, SendFlags send) {
2501 if (send == SEND_MICROPHONE) {
2502 if (engine()->voe()->base()->StartSend(channel) == -1) {
2503 LOG_RTCERR1(StartSend, channel);
2504 return false;
2505 }
2506 if (engine()->voe()->file() &&
2507 engine()->voe()->file()->StopPlayingFileAsMicrophone(channel) == -1) {
2508 LOG_RTCERR1(StopPlayingFileAsMicrophone, channel);
2509 return false;
2510 }
2511 } else { // SEND_NOTHING
2512 ASSERT(send == SEND_NOTHING);
2513 if (engine()->voe()->base()->StopSend(channel) == -1) {
2514 LOG_RTCERR1(StopSend, channel);
2515 return false;
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002516 }
2517 }
2518
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002519 return true;
2520}
2521
buildbot@webrtc.org150835e2014-05-06 15:54:38 +00002522// TODO(ronghuawu): Change this method to return bool.
wu@webrtc.org9dba5252013-08-05 20:36:57 +00002523void WebRtcVoiceMediaChannel::ConfigureSendChannel(int channel) {
2524 if (engine()->voe()->network()->RegisterExternalTransport(
2525 channel, *this) == -1) {
2526 LOG_RTCERR2(RegisterExternalTransport, channel, this);
2527 }
2528
2529 // Enable RTCP (for quality stats and feedback messages)
2530 EnableRtcp(channel);
2531
2532 // Reset all recv codecs; they will be enabled via SetRecvCodecs.
2533 ResetRecvCodecs(channel);
buildbot@webrtc.org150835e2014-05-06 15:54:38 +00002534
2535 // Set RTP header extension for the new channel.
2536 SetChannelSendRtpHeaderExtensions(channel, send_extensions_);
wu@webrtc.org9dba5252013-08-05 20:36:57 +00002537}
2538
2539bool WebRtcVoiceMediaChannel::DeleteChannel(int channel) {
2540 if (engine()->voe()->network()->DeRegisterExternalTransport(channel) == -1) {
2541 LOG_RTCERR1(DeRegisterExternalTransport, channel);
2542 }
2543
2544 if (engine()->voe()->base()->DeleteChannel(channel) == -1) {
2545 LOG_RTCERR1(DeleteChannel, channel);
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002546 return false;
2547 }
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002548
wu@webrtc.org9dba5252013-08-05 20:36:57 +00002549 return true;
2550}
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002551
wu@webrtc.org9dba5252013-08-05 20:36:57 +00002552bool WebRtcVoiceMediaChannel::AddSendStream(const StreamParams& sp) {
2553 // If the default channel is already used for sending create a new channel
2554 // otherwise use the default channel for sending.
2555 int channel = GetSendChannelNum(sp.first_ssrc());
2556 if (channel != -1) {
2557 LOG(LS_ERROR) << "Stream already exists with ssrc " << sp.first_ssrc();
2558 return false;
2559 }
2560
2561 bool default_channel_is_available = true;
2562 for (ChannelMap::const_iterator iter = send_channels_.begin();
2563 iter != send_channels_.end(); ++iter) {
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00002564 if (IsDefaultChannel(iter->second->channel())) {
wu@webrtc.org9dba5252013-08-05 20:36:57 +00002565 default_channel_is_available = false;
2566 break;
2567 }
2568 }
2569 if (default_channel_is_available) {
2570 channel = voe_channel();
2571 } else {
2572 // Create a new channel for sending audio data.
sergeyu@chromium.org5bc25c42013-12-05 00:24:06 +00002573 channel = engine()->CreateMediaVoiceChannel();
wu@webrtc.org9dba5252013-08-05 20:36:57 +00002574 if (channel == -1) {
2575 LOG_RTCERR0(CreateChannel);
2576 return false;
2577 }
2578
2579 ConfigureSendChannel(channel);
2580 }
2581
2582 // Save the channel to send_channels_, so that RemoveSendStream() can still
2583 // delete the channel in case failure happens below.
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00002584 webrtc::AudioTransport* audio_transport =
2585 engine()->voe()->base()->audio_transport();
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00002586 send_channels_.insert(std::make_pair(
2587 sp.first_ssrc(),
2588 new WebRtcVoiceChannelRenderer(channel, audio_transport)));
wu@webrtc.org9dba5252013-08-05 20:36:57 +00002589
2590 // Set the send (local) SSRC.
2591 // If there are multiple send SSRCs, we can only set the first one here, and
2592 // the rest of the SSRC(s) need to be set after SetSendCodec has been called
2593 // (with a codec requires multiple SSRC(s)).
2594 if (engine()->voe()->rtp()->SetLocalSSRC(channel, sp.first_ssrc()) == -1) {
2595 LOG_RTCERR2(SetSendSSRC, channel, sp.first_ssrc());
2596 return false;
2597 }
2598
2599 // At this point the channel's local SSRC has been updated. If the channel is
2600 // the default channel make sure that all the receive channels are updated as
2601 // well. Receive channels have to have the same SSRC as the default channel in
2602 // order to send receiver reports with this SSRC.
2603 if (IsDefaultChannel(channel)) {
2604 for (ChannelMap::const_iterator it = receive_channels_.begin();
2605 it != receive_channels_.end(); ++it) {
2606 // Only update the SSRC for non-default channels.
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00002607 if (!IsDefaultChannel(it->second->channel())) {
2608 if (engine()->voe()->rtp()->SetLocalSSRC(it->second->channel(),
wu@webrtc.org9dba5252013-08-05 20:36:57 +00002609 sp.first_ssrc()) != 0) {
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00002610 LOG_RTCERR2(SetLocalSSRC, it->second->channel(), sp.first_ssrc());
wu@webrtc.org9dba5252013-08-05 20:36:57 +00002611 return false;
2612 }
2613 }
2614 }
2615 }
2616
2617 if (engine()->voe()->rtp()->SetRTCP_CNAME(channel, sp.cname.c_str()) == -1) {
buildbot@webrtc.orgb4c7b092014-08-25 12:11:58 +00002618 LOG_RTCERR2(SetRTCP_CNAME, channel, sp.cname);
2619 return false;
wu@webrtc.org9dba5252013-08-05 20:36:57 +00002620 }
2621
wu@webrtc.orgcadf9042013-08-30 21:24:16 +00002622 // Set the current codecs to be used for the new channel.
2623 if (!send_codecs_.empty() && !SetSendCodecs(channel, send_codecs_))
wu@webrtc.org9dba5252013-08-05 20:36:57 +00002624 return false;
2625
2626 return ChangeSend(channel, desired_send_);
2627}
2628
2629bool WebRtcVoiceMediaChannel::RemoveSendStream(uint32 ssrc) {
2630 ChannelMap::iterator it = send_channels_.find(ssrc);
2631 if (it == send_channels_.end()) {
2632 LOG(LS_WARNING) << "Try to remove stream with ssrc " << ssrc
2633 << " which doesn't exist.";
2634 return false;
2635 }
2636
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00002637 int channel = it->second->channel();
wu@webrtc.org9dba5252013-08-05 20:36:57 +00002638 ChangeSend(channel, SEND_NOTHING);
2639
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00002640 // Delete the WebRtcVoiceChannelRenderer object connected to the channel,
2641 // this will disconnect the audio renderer with the send channel.
2642 delete it->second;
2643 send_channels_.erase(it);
wu@webrtc.org9dba5252013-08-05 20:36:57 +00002644
2645 if (IsDefaultChannel(channel)) {
2646 // Do not delete the default channel since the receive channels depend on
2647 // the default channel, recycle it instead.
2648 ChangeSend(channel, SEND_NOTHING);
2649 } else {
2650 // Clean up and delete the send channel.
2651 LOG(LS_INFO) << "Removing audio send stream " << ssrc
2652 << " with VoiceEngine channel #" << channel << ".";
2653 if (!DeleteChannel(channel))
2654 return false;
2655 }
2656
wu@webrtc.org9dba5252013-08-05 20:36:57 +00002657 if (send_channels_.empty())
2658 ChangeSend(SEND_NOTHING);
2659
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002660 return true;
2661}
2662
2663bool WebRtcVoiceMediaChannel::AddRecvStream(const StreamParams& sp) {
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00002664 rtc::CritScope lock(&receive_channels_cs_);
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002665
2666 if (!VERIFY(sp.ssrcs.size() == 1))
2667 return false;
2668 uint32 ssrc = sp.first_ssrc();
2669
wu@webrtc.org78187522013-10-07 23:32:02 +00002670 if (ssrc == 0) {
2671 LOG(LS_WARNING) << "AddRecvStream with 0 ssrc is not supported.";
2672 return false;
2673 }
2674
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002675 if (receive_channels_.find(ssrc) != receive_channels_.end()) {
2676 LOG(LS_ERROR) << "Stream already exists with ssrc " << ssrc;
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002677 return false;
2678 }
2679
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002680 // Reuse default channel for recv stream in non-conference mode call
2681 // when the default channel is not being used.
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00002682 webrtc::AudioTransport* audio_transport =
2683 engine()->voe()->base()->audio_transport();
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002684 if (!InConferenceMode() && default_receive_ssrc_ == 0) {
2685 LOG(LS_INFO) << "Recv stream " << sp.first_ssrc()
2686 << " reuse default channel";
2687 default_receive_ssrc_ = sp.first_ssrc();
2688 receive_channels_.insert(std::make_pair(
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00002689 default_receive_ssrc_,
2690 new WebRtcVoiceChannelRenderer(voe_channel(), audio_transport)));
buildbot@webrtc.orgb4c7b092014-08-25 12:11:58 +00002691 if (!SetupSharedBweOnChannel(voe_channel())) {
2692 return false;
2693 }
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002694 return SetPlayout(voe_channel(), playout_);
2695 }
2696
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002697 // Create a new channel for receiving audio data.
sergeyu@chromium.org5bc25c42013-12-05 00:24:06 +00002698 int channel = engine()->CreateMediaVoiceChannel();
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002699 if (channel == -1) {
2700 LOG_RTCERR0(CreateChannel);
2701 return false;
2702 }
2703
wu@webrtc.org78187522013-10-07 23:32:02 +00002704 if (!ConfigureRecvChannel(channel)) {
2705 DeleteChannel(channel);
2706 return false;
2707 }
2708
2709 receive_channels_.insert(
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00002710 std::make_pair(
2711 ssrc, new WebRtcVoiceChannelRenderer(channel, audio_transport)));
wu@webrtc.org78187522013-10-07 23:32:02 +00002712
2713 LOG(LS_INFO) << "New audio stream " << ssrc
2714 << " registered to VoiceEngine channel #"
2715 << channel << ".";
2716 return true;
2717}
2718
2719bool WebRtcVoiceMediaChannel::ConfigureRecvChannel(int channel) {
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002720 // Configure to use external transport, like our default channel.
2721 if (engine()->voe()->network()->RegisterExternalTransport(
2722 channel, *this) == -1) {
2723 LOG_RTCERR2(SetExternalTransport, channel, this);
2724 return false;
2725 }
2726
2727 // Use the same SSRC as our default channel (so the RTCP reports are correct).
henrika@webrtc.orgaebb1ad2014-01-14 10:00:58 +00002728 unsigned int send_ssrc = 0;
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002729 webrtc::VoERTP_RTCP* rtp = engine()->voe()->rtp();
2730 if (rtp->GetLocalSSRC(voe_channel(), send_ssrc) == -1) {
henrika@webrtc.orgaebb1ad2014-01-14 10:00:58 +00002731 LOG_RTCERR1(GetSendSSRC, channel);
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002732 return false;
2733 }
2734 if (rtp->SetLocalSSRC(channel, send_ssrc) == -1) {
henrika@webrtc.orgaebb1ad2014-01-14 10:00:58 +00002735 LOG_RTCERR1(SetSendSSRC, channel);
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002736 return false;
2737 }
2738
2739 // Use the same recv payload types as our default channel.
2740 ResetRecvCodecs(channel);
2741 if (!recv_codecs_.empty()) {
2742 for (std::vector<AudioCodec>::const_iterator it = recv_codecs_.begin();
2743 it != recv_codecs_.end(); ++it) {
2744 webrtc::CodecInst voe_codec;
2745 if (engine()->FindWebRtcCodec(*it, &voe_codec)) {
2746 voe_codec.pltype = it->id;
2747 voe_codec.rate = 0; // Needed to make GetRecPayloadType work for ISAC
2748 if (engine()->voe()->codec()->GetRecPayloadType(
2749 voe_channel(), voe_codec) != -1) {
2750 if (engine()->voe()->codec()->SetRecPayloadType(
2751 channel, voe_codec) == -1) {
2752 LOG_RTCERR2(SetRecPayloadType, channel, ToString(voe_codec));
2753 return false;
2754 }
2755 }
2756 }
2757 }
2758 }
2759
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002760 if (InConferenceMode()) {
2761 // To be in par with the video, voe_channel() is not used for receiving in
2762 // a conference call.
2763 if (receive_channels_.empty() && default_receive_ssrc_ == 0 && playout_) {
2764 // This is the first stream in a multi user meeting. We can now
2765 // disable playback of the default stream. This since the default
2766 // stream will probably have received some initial packets before
2767 // the new stream was added. This will mean that the CN state from
2768 // the default channel will be mixed in with the other streams
2769 // throughout the whole meeting, which might be disturbing.
2770 LOG(LS_INFO) << "Disabling playback on the default voice channel";
2771 SetPlayout(voe_channel(), false);
2772 }
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002773 }
wu@webrtc.orgcadf9042013-08-30 21:24:16 +00002774 SetNack(channel, nack_enabled_);
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002775
buildbot@webrtc.org150835e2014-05-06 15:54:38 +00002776 // Set RTP header extension for the new channel.
2777 if (!SetChannelRecvRtpHeaderExtensions(channel, receive_extensions_)) {
2778 return false;
2779 }
2780
buildbot@webrtc.orgb4c7b092014-08-25 12:11:58 +00002781 // Set up channel to be able to forward incoming packets to video engine BWE.
2782 if (!SetupSharedBweOnChannel(channel)) {
2783 return false;
2784 }
2785
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002786 return SetPlayout(channel, playout_);
2787}
2788
2789bool WebRtcVoiceMediaChannel::RemoveRecvStream(uint32 ssrc) {
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00002790 rtc::CritScope lock(&receive_channels_cs_);
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002791 ChannelMap::iterator it = receive_channels_.find(ssrc);
wu@webrtc.org9dba5252013-08-05 20:36:57 +00002792 if (it == receive_channels_.end()) {
2793 LOG(LS_WARNING) << "Try to remove stream with ssrc " << ssrc
2794 << " which doesn't exist.";
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002795 return false;
wu@webrtc.org9dba5252013-08-05 20:36:57 +00002796 }
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002797
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00002798 // Delete the WebRtcVoiceChannelRenderer object connected to the channel, this
2799 // will disconnect the audio renderer with the receive channel.
2800 // Cache the channel before the deletion.
2801 const int channel = it->second->channel();
2802 delete it->second;
2803 receive_channels_.erase(it);
2804
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002805 if (ssrc == default_receive_ssrc_) {
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00002806 ASSERT(IsDefaultChannel(channel));
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002807 // Recycle the default channel is for recv stream.
2808 if (playout_)
2809 SetPlayout(voe_channel(), false);
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002810
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002811 default_receive_ssrc_ = 0;
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002812 return true;
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002813 }
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002814
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002815 LOG(LS_INFO) << "Removing audio stream " << ssrc
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00002816 << " with VoiceEngine channel #" << channel << ".";
2817 if (!DeleteChannel(channel))
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002818 return false;
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002819
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002820 bool enable_default_channel_playout = false;
2821 if (receive_channels_.empty()) {
2822 // The last stream was removed. We can now enable the default
2823 // channel for new channels to be played out immediately without
2824 // waiting for AddStream messages.
2825 // We do this for both conference mode and non-conference mode.
2826 // TODO(oja): Does the default channel still have it's CN state?
2827 enable_default_channel_playout = true;
2828 }
2829 if (!InConferenceMode() && receive_channels_.size() == 1 &&
2830 default_receive_ssrc_ != 0) {
2831 // Only the default channel is active, enable the playout on default
2832 // channel.
2833 enable_default_channel_playout = true;
2834 }
2835 if (enable_default_channel_playout && playout_) {
2836 LOG(LS_INFO) << "Enabling playback on the default voice channel";
2837 SetPlayout(voe_channel(), true);
2838 }
2839
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002840 return true;
2841}
2842
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002843bool WebRtcVoiceMediaChannel::SetRemoteRenderer(uint32 ssrc,
2844 AudioRenderer* renderer) {
2845 ChannelMap::iterator it = receive_channels_.find(ssrc);
2846 if (it == receive_channels_.end()) {
2847 if (renderer) {
2848 // Return an error if trying to set a valid renderer with an invalid ssrc.
wu@webrtc.org9dba5252013-08-05 20:36:57 +00002849 LOG(LS_ERROR) << "SetRemoteRenderer failed with ssrc "<< ssrc;
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002850 return false;
2851 }
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002852
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002853 // The channel likely has gone away, do nothing.
2854 return true;
2855 }
2856
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00002857 if (renderer)
2858 it->second->Start(renderer);
2859 else
2860 it->second->Stop();
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002861
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002862 return true;
2863}
2864
2865bool WebRtcVoiceMediaChannel::SetLocalRenderer(uint32 ssrc,
2866 AudioRenderer* renderer) {
wu@webrtc.org9dba5252013-08-05 20:36:57 +00002867 ChannelMap::iterator it = send_channels_.find(ssrc);
2868 if (it == send_channels_.end()) {
2869 if (renderer) {
2870 // Return an error if trying to set a valid renderer with an invalid ssrc.
2871 LOG(LS_ERROR) << "SetLocalRenderer failed with ssrc "<< ssrc;
2872 return false;
2873 }
2874
2875 // The channel likely has gone away, do nothing.
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002876 return true;
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002877 }
2878
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00002879 if (renderer)
2880 it->second->Start(renderer);
2881 else
2882 it->second->Stop();
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002883
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002884 return true;
2885}
2886
2887bool WebRtcVoiceMediaChannel::GetActiveStreams(
2888 AudioInfo::StreamList* actives) {
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002889 // In conference mode, the default channel should not be in
2890 // |receive_channels_|.
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002891 actives->clear();
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002892 for (ChannelMap::iterator it = receive_channels_.begin();
2893 it != receive_channels_.end(); ++it) {
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00002894 int level = GetOutputLevel(it->second->channel());
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002895 if (level > 0) {
2896 actives->push_back(std::make_pair(it->first, level));
2897 }
2898 }
2899 return true;
2900}
2901
2902int WebRtcVoiceMediaChannel::GetOutputLevel() {
2903 // return the highest output level of all streams
2904 int highest = GetOutputLevel(voe_channel());
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002905 for (ChannelMap::iterator it = receive_channels_.begin();
2906 it != receive_channels_.end(); ++it) {
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00002907 int level = GetOutputLevel(it->second->channel());
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00002908 highest = rtc::_max(level, highest);
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002909 }
2910 return highest;
2911}
2912
2913int WebRtcVoiceMediaChannel::GetTimeSinceLastTyping() {
2914 int ret;
2915 if (engine()->voe()->processing()->TimeSinceLastTyping(ret) == -1) {
2916 // In case of error, log the info and continue
2917 LOG_RTCERR0(TimeSinceLastTyping);
2918 ret = -1;
2919 } else {
2920 ret *= 1000; // We return ms, webrtc returns seconds.
2921 }
2922 return ret;
2923}
2924
2925void WebRtcVoiceMediaChannel::SetTypingDetectionParameters(int time_window,
2926 int cost_per_typing, int reporting_threshold, int penalty_decay,
2927 int type_event_delay) {
2928 if (engine()->voe()->processing()->SetTypingDetectionParameters(
2929 time_window, cost_per_typing,
2930 reporting_threshold, penalty_decay, type_event_delay) == -1) {
2931 // In case of error, log the info and continue
2932 LOG_RTCERR5(SetTypingDetectionParameters, time_window,
2933 cost_per_typing, reporting_threshold, penalty_decay,
2934 type_event_delay);
2935 }
2936}
2937
2938bool WebRtcVoiceMediaChannel::SetOutputScaling(
2939 uint32 ssrc, double left, double right) {
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00002940 rtc::CritScope lock(&receive_channels_cs_);
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002941 // Collect the channels to scale the output volume.
2942 std::vector<int> channels;
2943 if (0 == ssrc) { // Collect all channels, including the default one.
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00002944 // Default channel is not in receive_channels_ if it is not being used for
2945 // playout.
2946 if (default_receive_ssrc_ == 0)
2947 channels.push_back(voe_channel());
2948 for (ChannelMap::const_iterator it = receive_channels_.begin();
2949 it != receive_channels_.end(); ++it) {
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00002950 channels.push_back(it->second->channel());
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002951 }
2952 } else { // Collect only the channel of the specified ssrc.
2953 int channel = GetReceiveChannelNum(ssrc);
2954 if (-1 == channel) {
2955 LOG(LS_WARNING) << "Cannot find channel for ssrc:" << ssrc;
2956 return false;
2957 }
2958 channels.push_back(channel);
2959 }
2960
2961 // Scale the output volume for the collected channels. We first normalize to
2962 // scale the volume and then set the left and right pan.
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00002963 float scale = static_cast<float>(rtc::_max(left, right));
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002964 if (scale > 0.0001f) {
2965 left /= scale;
2966 right /= scale;
2967 }
2968 for (std::vector<int>::const_iterator it = channels.begin();
2969 it != channels.end(); ++it) {
2970 if (-1 == engine()->voe()->volume()->SetChannelOutputVolumeScaling(
2971 *it, scale)) {
2972 LOG_RTCERR2(SetChannelOutputVolumeScaling, *it, scale);
2973 return false;
2974 }
2975 if (-1 == engine()->voe()->volume()->SetOutputVolumePan(
2976 *it, static_cast<float>(left), static_cast<float>(right))) {
2977 LOG_RTCERR3(SetOutputVolumePan, *it, left, right);
2978 // Do not return if fails. SetOutputVolumePan is not available for all
2979 // pltforms.
2980 }
2981 LOG(LS_INFO) << "SetOutputScaling to left=" << left * scale
2982 << " right=" << right * scale
2983 << " for channel " << *it << " and ssrc " << ssrc;
2984 }
2985 return true;
2986}
2987
2988bool WebRtcVoiceMediaChannel::GetOutputScaling(
2989 uint32 ssrc, double* left, double* right) {
2990 if (!left || !right) return false;
2991
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00002992 rtc::CritScope lock(&receive_channels_cs_);
henrike@webrtc.org28e20752013-07-10 00:45:36 +00002993 // Determine which channel based on ssrc.
2994 int channel = (0 == ssrc) ? voe_channel() : GetReceiveChannelNum(ssrc);
2995 if (channel == -1) {
2996 LOG(LS_WARNING) << "Cannot find channel for ssrc:" << ssrc;
2997 return false;
2998 }
2999
3000 float scaling;
3001 if (-1 == engine()->voe()->volume()->GetChannelOutputVolumeScaling(
3002 channel, scaling)) {
3003 LOG_RTCERR2(GetChannelOutputVolumeScaling, channel, scaling);
3004 return false;
3005 }
3006
3007 float left_pan;
3008 float right_pan;
3009 if (-1 == engine()->voe()->volume()->GetOutputVolumePan(
3010 channel, left_pan, right_pan)) {
3011 LOG_RTCERR3(GetOutputVolumePan, channel, left_pan, right_pan);
3012 // If GetOutputVolumePan fails, we use the default left and right pan.
3013 left_pan = 1.0f;
3014 right_pan = 1.0f;
3015 }
3016
3017 *left = scaling * left_pan;
3018 *right = scaling * right_pan;
3019 return true;
3020}
3021
3022bool WebRtcVoiceMediaChannel::SetRingbackTone(const char *buf, int len) {
3023 ringback_tone_.reset(new WebRtcSoundclipStream(buf, len));
3024 return true;
3025}
3026
3027bool WebRtcVoiceMediaChannel::PlayRingbackTone(uint32 ssrc,
3028 bool play, bool loop) {
3029 if (!ringback_tone_) {
3030 return false;
3031 }
3032
3033 // The voe file api is not available in chrome.
3034 if (!engine()->voe()->file()) {
3035 return false;
3036 }
3037
3038 // Determine which VoiceEngine channel to play on.
3039 int channel = (ssrc == 0) ? voe_channel() : GetReceiveChannelNum(ssrc);
3040 if (channel == -1) {
3041 return false;
3042 }
3043
3044 // Make sure the ringtone is cued properly, and play it out.
3045 if (play) {
3046 ringback_tone_->set_loop(loop);
3047 ringback_tone_->Rewind();
3048 if (engine()->voe()->file()->StartPlayingFileLocally(channel,
3049 ringback_tone_.get()) == -1) {
3050 LOG_RTCERR2(StartPlayingFileLocally, channel, ringback_tone_.get());
3051 LOG(LS_ERROR) << "Unable to start ringback tone";
3052 return false;
3053 }
3054 ringback_channels_.insert(channel);
3055 LOG(LS_INFO) << "Started ringback on channel " << channel;
3056 } else {
3057 if (engine()->voe()->file()->IsPlayingFileLocally(channel) == 1 &&
3058 engine()->voe()->file()->StopPlayingFileLocally(channel) == -1) {
3059 LOG_RTCERR1(StopPlayingFileLocally, channel);
3060 return false;
3061 }
3062 LOG(LS_INFO) << "Stopped ringback on channel " << channel;
3063 ringback_channels_.erase(channel);
3064 }
3065
3066 return true;
3067}
3068
3069bool WebRtcVoiceMediaChannel::CanInsertDtmf() {
3070 return dtmf_allowed_;
3071}
3072
3073bool WebRtcVoiceMediaChannel::InsertDtmf(uint32 ssrc, int event,
3074 int duration, int flags) {
3075 if (!dtmf_allowed_) {
3076 return false;
3077 }
3078
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003079 // Send the event.
3080 if (flags & cricket::DF_SEND) {
wu@webrtc.orgcadf9042013-08-30 21:24:16 +00003081 int channel = -1;
3082 if (ssrc == 0) {
3083 bool default_channel_is_inuse = false;
3084 for (ChannelMap::const_iterator iter = send_channels_.begin();
3085 iter != send_channels_.end(); ++iter) {
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00003086 if (IsDefaultChannel(iter->second->channel())) {
wu@webrtc.orgcadf9042013-08-30 21:24:16 +00003087 default_channel_is_inuse = true;
3088 break;
3089 }
3090 }
3091 if (default_channel_is_inuse) {
3092 channel = voe_channel();
3093 } else if (!send_channels_.empty()) {
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00003094 channel = send_channels_.begin()->second->channel();
wu@webrtc.orgcadf9042013-08-30 21:24:16 +00003095 }
3096 } else {
3097 channel = GetSendChannelNum(ssrc);
3098 }
wu@webrtc.org9dba5252013-08-05 20:36:57 +00003099 if (channel == -1) {
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003100 LOG(LS_WARNING) << "InsertDtmf - The specified ssrc "
3101 << ssrc << " is not in use.";
3102 return false;
3103 }
3104 // Send DTMF using out-of-band DTMF. ("true", as 3rd arg)
wu@webrtc.org9dba5252013-08-05 20:36:57 +00003105 if (engine()->voe()->dtmf()->SendTelephoneEvent(
3106 channel, event, true, duration) == -1) {
3107 LOG_RTCERR4(SendTelephoneEvent, channel, event, true, duration);
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003108 return false;
3109 }
3110 }
3111
3112 // Play the event.
3113 if (flags & cricket::DF_PLAY) {
3114 // Play DTMF tone locally.
3115 if (engine()->voe()->dtmf()->PlayDtmfTone(event, duration) == -1) {
3116 LOG_RTCERR2(PlayDtmfTone, event, duration);
3117 return false;
3118 }
3119 }
3120
3121 return true;
3122}
3123
wu@webrtc.orga9890802013-12-13 00:21:03 +00003124void WebRtcVoiceMediaChannel::OnPacketReceived(
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00003125 rtc::Buffer* packet, const rtc::PacketTime& packet_time) {
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003126 // Pick which channel to send this packet to. If this packet doesn't match
3127 // any multiplexed streams, just send it to the default channel. Otherwise,
3128 // send it to the specific decoder instance for that stream.
3129 int which_channel = GetReceiveChannelNum(
3130 ParseSsrc(packet->data(), packet->length(), false));
3131 if (which_channel == -1) {
3132 which_channel = voe_channel();
3133 }
3134
3135 // Stop any ringback that might be playing on the channel.
3136 // It's possible the ringback has already stopped, ih which case we'll just
3137 // use the opportunity to remove the channel from ringback_channels_.
3138 if (engine()->voe()->file()) {
3139 const std::set<int>::iterator it = ringback_channels_.find(which_channel);
3140 if (it != ringback_channels_.end()) {
3141 if (engine()->voe()->file()->IsPlayingFileLocally(
3142 which_channel) == 1) {
3143 engine()->voe()->file()->StopPlayingFileLocally(which_channel);
3144 LOG(LS_INFO) << "Stopped ringback on channel " << which_channel
3145 << " due to incoming media";
3146 }
3147 ringback_channels_.erase(which_channel);
3148 }
3149 }
3150
3151 // Pass it off to the decoder.
henrike@webrtc.org28654cb2013-07-22 21:07:49 +00003152 engine()->voe()->network()->ReceivedRTPPacket(
3153 which_channel,
3154 packet->data(),
buildbot@webrtc.orgb4c7b092014-08-25 12:11:58 +00003155 static_cast<unsigned int>(packet->length()),
3156 webrtc::PacketTime(packet_time.timestamp, packet_time.not_before));
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003157}
3158
wu@webrtc.orga9890802013-12-13 00:21:03 +00003159void WebRtcVoiceMediaChannel::OnRtcpReceived(
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00003160 rtc::Buffer* packet, const rtc::PacketTime& packet_time) {
wu@webrtc.org9dba5252013-08-05 20:36:57 +00003161 // Sending channels need all RTCP packets with feedback information.
3162 // Even sender reports can contain attached report blocks.
3163 // Receiving channels need sender reports in order to create
3164 // correct receiver reports.
3165 int type = 0;
3166 if (!GetRtcpType(packet->data(), packet->length(), &type)) {
3167 LOG(LS_WARNING) << "Failed to parse type from received RTCP packet";
3168 return;
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003169 }
3170
wu@webrtc.org9dba5252013-08-05 20:36:57 +00003171 // If it is a sender report, find the channel that is listening.
3172 bool has_sent_to_default_channel = false;
3173 if (type == kRtcpTypeSR) {
3174 int which_channel = GetReceiveChannelNum(
3175 ParseSsrc(packet->data(), packet->length(), true));
3176 if (which_channel != -1) {
3177 engine()->voe()->network()->ReceivedRTCPPacket(
3178 which_channel,
3179 packet->data(),
3180 static_cast<unsigned int>(packet->length()));
3181
3182 if (IsDefaultChannel(which_channel))
3183 has_sent_to_default_channel = true;
3184 }
3185 }
3186
3187 // SR may continue RR and any RR entry may correspond to any one of the send
3188 // channels. So all RTCP packets must be forwarded all send channels. VoE
3189 // will filter out RR internally.
3190 for (ChannelMap::iterator iter = send_channels_.begin();
3191 iter != send_channels_.end(); ++iter) {
3192 // Make sure not sending the same packet to default channel more than once.
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00003193 if (IsDefaultChannel(iter->second->channel()) &&
3194 has_sent_to_default_channel)
wu@webrtc.org9dba5252013-08-05 20:36:57 +00003195 continue;
3196
3197 engine()->voe()->network()->ReceivedRTCPPacket(
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00003198 iter->second->channel(),
wu@webrtc.org9dba5252013-08-05 20:36:57 +00003199 packet->data(),
3200 static_cast<unsigned int>(packet->length()));
3201 }
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003202}
3203
3204bool WebRtcVoiceMediaChannel::MuteStream(uint32 ssrc, bool muted) {
wu@webrtc.org9dba5252013-08-05 20:36:57 +00003205 int channel = (ssrc == 0) ? voe_channel() : GetSendChannelNum(ssrc);
3206 if (channel == -1) {
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003207 LOG(LS_WARNING) << "The specified ssrc " << ssrc << " is not in use.";
3208 return false;
3209 }
wu@webrtc.org9dba5252013-08-05 20:36:57 +00003210 if (engine()->voe()->volume()->SetInputMute(channel, muted) == -1) {
3211 LOG_RTCERR2(SetInputMute, channel, muted);
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003212 return false;
3213 }
buildbot@webrtc.org6b21b712014-07-31 15:08:53 +00003214 // We set the AGC to mute state only when all the channels are muted.
3215 // This implementation is not ideal, instead we should signal the AGC when
3216 // the mic channel is muted/unmuted. We can't do it today because there
3217 // is no good way to know which stream is mapping to the mic channel.
3218 bool all_muted = muted;
3219 for (ChannelMap::const_iterator iter = send_channels_.begin();
3220 iter != send_channels_.end() && all_muted; ++iter) {
3221 if (engine()->voe()->volume()->GetInputMute(iter->second->channel(),
3222 all_muted)) {
3223 LOG_RTCERR1(GetInputMute, iter->second->channel());
3224 return false;
3225 }
3226 }
3227
3228 webrtc::AudioProcessing* ap = engine()->voe()->base()->audio_processing();
3229 if (ap)
3230 ap->set_output_will_be_muted(all_muted);
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003231 return true;
3232}
3233
minyue@webrtc.org26236952014-10-29 02:27:08 +00003234// TODO(minyue): SetMaxSendBandwidth() is subject to be renamed to
3235// SetMaxSendBitrate() in future.
sergeyu@chromium.org4b26e2e2014-01-15 23:15:54 +00003236bool WebRtcVoiceMediaChannel::SetMaxSendBandwidth(int bps) {
minyue@webrtc.org26236952014-10-29 02:27:08 +00003237 LOG(LS_INFO) << "WebRtcVoiceMediaChannel::SetMaxSendBandwidth.";
sergeyu@chromium.org4b26e2e2014-01-15 23:15:54 +00003238
minyue@webrtc.org26236952014-10-29 02:27:08 +00003239 return SetSendBitrateInternal(bps);
sergeyu@chromium.org4b26e2e2014-01-15 23:15:54 +00003240}
3241
minyue@webrtc.org26236952014-10-29 02:27:08 +00003242bool WebRtcVoiceMediaChannel::SetSendBitrateInternal(int bps) {
3243 LOG(LS_INFO) << "WebRtcVoiceMediaChannel::SetSendBitrateInternal.";
sergeyu@chromium.org4b26e2e2014-01-15 23:15:54 +00003244
minyue@webrtc.org26236952014-10-29 02:27:08 +00003245 send_bitrate_setting_ = true;
3246 send_bitrate_bps_ = bps;
wu@webrtc.org1d1ffc92013-10-16 18:12:02 +00003247
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003248 if (!send_codec_) {
wu@webrtc.org1d1ffc92013-10-16 18:12:02 +00003249 LOG(LS_INFO) << "The send codec has not been set up yet. "
minyue@webrtc.org26236952014-10-29 02:27:08 +00003250 << "The send bitrate setting will be applied later.";
wu@webrtc.org1d1ffc92013-10-16 18:12:02 +00003251 return true;
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003252 }
3253
minyue@webrtc.org26236952014-10-29 02:27:08 +00003254 // Bitrate is auto by default.
sergeyu@chromium.org4b26e2e2014-01-15 23:15:54 +00003255 // TODO(bemasc): Fix this so that if SetMaxSendBandwidth(50) is followed by
3256 // SetMaxSendBandwith(0), the second call removes the previous limit.
3257 if (bps <= 0)
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003258 return true;
3259
3260 webrtc::CodecInst codec = *send_codec_;
3261 bool is_multi_rate = IsCodecMultiRate(codec);
3262
3263 if (is_multi_rate) {
3264 // If codec is multi-rate then just set the bitrate.
3265 codec.rate = bps;
3266 if (!SetSendCodec(codec)) {
3267 LOG(LS_INFO) << "Failed to set codec " << codec.plname
3268 << " to bitrate " << bps << " bps.";
3269 return false;
3270 }
3271 return true;
3272 } else {
3273 // If codec is not multi-rate and |bps| is less than the fixed bitrate
3274 // then fail. If codec is not multi-rate and |bps| exceeds or equal the
3275 // fixed bitrate then ignore.
3276 if (bps < codec.rate) {
3277 LOG(LS_INFO) << "Failed to set codec " << codec.plname
3278 << " to bitrate " << bps << " bps"
3279 << ", requires at least " << codec.rate << " bps.";
3280 return false;
3281 }
3282 return true;
3283 }
3284}
3285
3286bool WebRtcVoiceMediaChannel::GetStats(VoiceMediaInfo* info) {
wu@webrtc.org9dba5252013-08-05 20:36:57 +00003287 bool echo_metrics_on = false;
3288 // These can take on valid negative values, so use the lowest possible level
3289 // as default rather than -1.
3290 int echo_return_loss = -100;
3291 int echo_return_loss_enhancement = -100;
3292 // These can also be negative, but in practice -1 is only used to signal
3293 // insufficient data, since the resolution is limited to multiples of 4 ms.
3294 int echo_delay_median_ms = -1;
3295 int echo_delay_std_ms = -1;
3296 if (engine()->voe()->processing()->GetEcMetricsStatus(
3297 echo_metrics_on) != -1 && echo_metrics_on) {
3298 // TODO(ajm): we may want to use VoECallReport::GetEchoMetricsSummary
3299 // here, but it appears to be unsuitable currently. Revisit after this is
3300 // investigated: http://b/issue?id=5666755
3301 int erl, erle, rerl, anlp;
3302 if (engine()->voe()->processing()->GetEchoMetrics(
3303 erl, erle, rerl, anlp) != -1) {
3304 echo_return_loss = erl;
3305 echo_return_loss_enhancement = erle;
3306 }
3307
3308 int median, std;
3309 if (engine()->voe()->processing()->GetEcDelayMetrics(median, std) != -1) {
3310 echo_delay_median_ms = median;
3311 echo_delay_std_ms = std;
3312 }
3313 }
3314
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003315 webrtc::CallStatistics cs;
3316 unsigned int ssrc;
3317 webrtc::CodecInst codec;
3318 unsigned int level;
3319
wu@webrtc.org9dba5252013-08-05 20:36:57 +00003320 for (ChannelMap::const_iterator channel_iter = send_channels_.begin();
3321 channel_iter != send_channels_.end(); ++channel_iter) {
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00003322 const int channel = channel_iter->second->channel();
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003323
wu@webrtc.org9dba5252013-08-05 20:36:57 +00003324 // Fill in the sender info, based on what we know, and what the
3325 // remote side told us it got from its RTCP report.
3326 VoiceSenderInfo sinfo;
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003327
wu@webrtc.org9dba5252013-08-05 20:36:57 +00003328 if (engine()->voe()->rtp()->GetRTCPStatistics(channel, cs) == -1 ||
3329 engine()->voe()->rtp()->GetLocalSSRC(channel, ssrc) == -1) {
3330 continue;
3331 }
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003332
sergeyu@chromium.org5bc25c42013-12-05 00:24:06 +00003333 sinfo.add_ssrc(ssrc);
wu@webrtc.org9dba5252013-08-05 20:36:57 +00003334 sinfo.codec_name = send_codec_.get() ? send_codec_->plname : "";
3335 sinfo.bytes_sent = cs.bytesSent;
3336 sinfo.packets_sent = cs.packetsSent;
3337 // RTT isn't known until a RTCP report is received. Until then, VoiceEngine
3338 // returns 0 to indicate an error value.
3339 sinfo.rtt_ms = (cs.rttMs > 0) ? cs.rttMs : -1;
3340
3341 // Get data from the last remote RTCP report. Use default values if no data
3342 // available.
3343 sinfo.fraction_lost = -1.0;
3344 sinfo.jitter_ms = -1;
3345 sinfo.packets_lost = -1;
3346 sinfo.ext_seqnum = -1;
3347 std::vector<webrtc::ReportBlock> receive_blocks;
3348 if (engine()->voe()->rtp()->GetRemoteRTCPReportBlocks(
3349 channel, &receive_blocks) != -1 &&
3350 engine()->voe()->codec()->GetSendCodec(channel, codec) != -1) {
3351 std::vector<webrtc::ReportBlock>::iterator iter;
3352 for (iter = receive_blocks.begin(); iter != receive_blocks.end();
3353 ++iter) {
3354 // Lookup report for send ssrc only.
sergeyu@chromium.org5bc25c42013-12-05 00:24:06 +00003355 if (iter->source_SSRC == sinfo.ssrc()) {
wu@webrtc.org9dba5252013-08-05 20:36:57 +00003356 // Convert Q8 to floating point.
3357 sinfo.fraction_lost = static_cast<float>(iter->fraction_lost) / 256;
3358 // Convert samples to milliseconds.
3359 if (codec.plfreq / 1000 > 0) {
3360 sinfo.jitter_ms = iter->interarrival_jitter / (codec.plfreq / 1000);
3361 }
3362 sinfo.packets_lost = iter->cumulative_num_packets_lost;
3363 sinfo.ext_seqnum = iter->extended_highest_sequence_number;
3364 break;
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003365 }
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003366 }
3367 }
wu@webrtc.org9dba5252013-08-05 20:36:57 +00003368
3369 // Local speech level.
3370 sinfo.audio_level = (engine()->voe()->volume()->
3371 GetSpeechInputLevelFullRange(level) != -1) ? level : -1;
3372
3373 // TODO(xians): We are injecting the same APM logging to all the send
3374 // channels here because there is no good way to know which send channel
3375 // is using the APM. The correct fix is to allow the send channels to have
3376 // their own APM so that we can feed the correct APM logging to different
3377 // send channels. See issue crbug/264611 .
3378 sinfo.echo_return_loss = echo_return_loss;
3379 sinfo.echo_return_loss_enhancement = echo_return_loss_enhancement;
3380 sinfo.echo_delay_median_ms = echo_delay_median_ms;
3381 sinfo.echo_delay_std_ms = echo_delay_std_ms;
mallinath@webrtc.orga27be8e2013-09-27 23:04:10 +00003382 // TODO(ajm): Re-enable this metric once we have a reliable implementation.
3383 sinfo.aec_quality_min = -1;
wu@webrtc.org967bfff2013-09-19 05:49:50 +00003384 sinfo.typing_noise_detected = typing_noise_detected_;
wu@webrtc.org9dba5252013-08-05 20:36:57 +00003385
3386 info->senders.push_back(sinfo);
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003387 }
3388
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00003389 // Build the list of receivers, one for each receiving channel, or 1 in
3390 // a 1:1 call.
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003391 std::vector<int> channels;
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00003392 for (ChannelMap::const_iterator it = receive_channels_.begin();
3393 it != receive_channels_.end(); ++it) {
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00003394 channels.push_back(it->second->channel());
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003395 }
3396 if (channels.empty()) {
3397 channels.push_back(voe_channel());
3398 }
3399
3400 // Get the SSRC and stats for each receiver, based on our own calculations.
3401 for (std::vector<int>::const_iterator it = channels.begin();
3402 it != channels.end(); ++it) {
3403 memset(&cs, 0, sizeof(cs));
3404 if (engine()->voe()->rtp()->GetRemoteSSRC(*it, ssrc) != -1 &&
3405 engine()->voe()->rtp()->GetRTCPStatistics(*it, cs) != -1 &&
3406 engine()->voe()->codec()->GetRecCodec(*it, codec) != -1) {
3407 VoiceReceiverInfo rinfo;
sergeyu@chromium.org5bc25c42013-12-05 00:24:06 +00003408 rinfo.add_ssrc(ssrc);
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003409 rinfo.bytes_rcvd = cs.bytesReceived;
3410 rinfo.packets_rcvd = cs.packetsReceived;
3411 // The next four fields are from the most recently sent RTCP report.
3412 // Convert Q8 to floating point.
3413 rinfo.fraction_lost = static_cast<float>(cs.fractionLost) / (1 << 8);
3414 rinfo.packets_lost = cs.cumulativeLost;
3415 rinfo.ext_seqnum = cs.extendedMax;
buildbot@webrtc.orgb525a9d2014-06-03 09:42:15 +00003416#ifdef USE_WEBRTC_DEV_BRANCH
3417 rinfo.capture_start_ntp_time_ms = cs.capture_start_ntp_time_ms_;
3418#endif
buildbot@webrtc.org7e71b772014-06-13 01:14:01 +00003419 if (codec.pltype != -1) {
3420 rinfo.codec_name = codec.plname;
3421 }
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003422 // Convert samples to milliseconds.
3423 if (codec.plfreq / 1000 > 0) {
3424 rinfo.jitter_ms = cs.jitterSamples / (codec.plfreq / 1000);
3425 }
3426
3427 // Get jitter buffer and total delay (alg + jitter + playout) stats.
3428 webrtc::NetworkStatistics ns;
3429 if (engine()->voe()->neteq() &&
3430 engine()->voe()->neteq()->GetNetworkStatistics(
3431 *it, ns) != -1) {
3432 rinfo.jitter_buffer_ms = ns.currentBufferSize;
3433 rinfo.jitter_buffer_preferred_ms = ns.preferredBufferSize;
3434 rinfo.expand_rate =
henrike@webrtc.org28654cb2013-07-22 21:07:49 +00003435 static_cast<float>(ns.currentExpandRate) / (1 << 14);
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003436 }
henrike@webrtc.orgb8c254a2014-02-14 23:38:45 +00003437
3438 webrtc::AudioDecodingCallStats ds;
3439 if (engine()->voe()->neteq() &&
3440 engine()->voe()->neteq()->GetDecodingCallStatistics(
3441 *it, &ds) != -1) {
3442 rinfo.decoding_calls_to_silence_generator =
3443 ds.calls_to_silence_generator;
3444 rinfo.decoding_calls_to_neteq = ds.calls_to_neteq;
3445 rinfo.decoding_normal = ds.decoded_normal;
3446 rinfo.decoding_plc = ds.decoded_plc;
3447 rinfo.decoding_cng = ds.decoded_cng;
3448 rinfo.decoding_plc_cng = ds.decoded_plc_cng;
3449 }
3450
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003451 if (engine()->voe()->sync()) {
sergeyu@chromium.orga23f0ca2013-11-13 22:48:52 +00003452 int jitter_buffer_delay_ms = 0;
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003453 int playout_buffer_delay_ms = 0;
3454 engine()->voe()->sync()->GetDelayEstimate(
sergeyu@chromium.orga23f0ca2013-11-13 22:48:52 +00003455 *it, &jitter_buffer_delay_ms, &playout_buffer_delay_ms);
3456 rinfo.delay_estimate_ms = jitter_buffer_delay_ms +
3457 playout_buffer_delay_ms;
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003458 }
3459
3460 // Get speech level.
3461 rinfo.audio_level = (engine()->voe()->volume()->
3462 GetSpeechOutputLevelFullRange(*it, level) != -1) ? level : -1;
3463 info->receivers.push_back(rinfo);
3464 }
3465 }
3466
3467 return true;
3468}
3469
3470void WebRtcVoiceMediaChannel::GetLastMediaError(
3471 uint32* ssrc, VoiceMediaChannel::Error* error) {
3472 ASSERT(ssrc != NULL);
3473 ASSERT(error != NULL);
3474 FindSsrc(voe_channel(), ssrc);
3475 *error = WebRtcErrorToChannelError(GetLastEngineError());
3476}
3477
3478bool WebRtcVoiceMediaChannel::FindSsrc(int channel_num, uint32* ssrc) {
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00003479 rtc::CritScope lock(&receive_channels_cs_);
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003480 ASSERT(ssrc != NULL);
wu@webrtc.org9dba5252013-08-05 20:36:57 +00003481 if (channel_num == -1 && send_ != SEND_NOTHING) {
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003482 // Sometimes the VoiceEngine core will throw error with channel_num = -1.
3483 // This means the error is not limited to a specific channel. Signal the
3484 // message using ssrc=0. If the current channel is sending, use this
3485 // channel for sending the message.
3486 *ssrc = 0;
3487 return true;
3488 } else {
wu@webrtc.org9dba5252013-08-05 20:36:57 +00003489 // Check whether this is a sending channel.
3490 for (ChannelMap::const_iterator it = send_channels_.begin();
3491 it != send_channels_.end(); ++it) {
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00003492 if (it->second->channel() == channel_num) {
wu@webrtc.org9dba5252013-08-05 20:36:57 +00003493 // This is a sending channel.
3494 uint32 local_ssrc = 0;
3495 if (engine()->voe()->rtp()->GetLocalSSRC(
3496 channel_num, local_ssrc) != -1) {
3497 *ssrc = local_ssrc;
3498 }
3499 return true;
3500 }
3501 }
3502
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003503 // Check whether this is a receiving channel.
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00003504 for (ChannelMap::const_iterator it = receive_channels_.begin();
3505 it != receive_channels_.end(); ++it) {
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00003506 if (it->second->channel() == channel_num) {
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003507 *ssrc = it->first;
3508 return true;
3509 }
3510 }
3511 }
3512 return false;
3513}
3514
3515void WebRtcVoiceMediaChannel::OnError(uint32 ssrc, int error) {
wu@webrtc.org967bfff2013-09-19 05:49:50 +00003516 if (error == VE_TYPING_NOISE_WARNING) {
3517 typing_noise_detected_ = true;
3518 } else if (error == VE_TYPING_NOISE_OFF_WARNING) {
3519 typing_noise_detected_ = false;
3520 }
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003521 SignalMediaError(ssrc, WebRtcErrorToChannelError(error));
3522}
3523
3524int WebRtcVoiceMediaChannel::GetOutputLevel(int channel) {
3525 unsigned int ulevel;
3526 int ret =
3527 engine()->voe()->volume()->GetSpeechOutputLevel(channel, ulevel);
3528 return (ret == 0) ? static_cast<int>(ulevel) : -1;
3529}
3530
3531int WebRtcVoiceMediaChannel::GetReceiveChannelNum(uint32 ssrc) {
henrike@webrtc.org1e09a712013-07-26 19:17:59 +00003532 ChannelMap::iterator it = receive_channels_.find(ssrc);
3533 if (it != receive_channels_.end())
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00003534 return it->second->channel();
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003535 return (ssrc == default_receive_ssrc_) ? voe_channel() : -1;
3536}
3537
3538int WebRtcVoiceMediaChannel::GetSendChannelNum(uint32 ssrc) {
wu@webrtc.org9dba5252013-08-05 20:36:57 +00003539 ChannelMap::iterator it = send_channels_.find(ssrc);
3540 if (it != send_channels_.end())
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +00003541 return it->second->channel();
wu@webrtc.org9dba5252013-08-05 20:36:57 +00003542
3543 return -1;
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003544}
3545
buildbot@webrtc.orgb4c7b092014-08-25 12:11:58 +00003546bool WebRtcVoiceMediaChannel::SetupSharedBandwidthEstimation(
3547 webrtc::VideoEngine* vie, int vie_channel) {
3548 shared_bwe_vie_ = vie;
3549 shared_bwe_vie_channel_ = vie_channel;
3550
3551 if (!SetupSharedBweOnChannel(voe_channel())) {
3552 return false;
3553 }
3554 for (ChannelMap::iterator it = receive_channels_.begin();
3555 it != receive_channels_.end(); ++it) {
3556 if (!SetupSharedBweOnChannel(it->second->channel())) {
3557 return false;
3558 }
3559 }
3560 return true;
3561}
3562
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003563bool WebRtcVoiceMediaChannel::GetRedSendCodec(const AudioCodec& red_codec,
3564 const std::vector<AudioCodec>& all_codecs, webrtc::CodecInst* send_codec) {
3565 // Get the RED encodings from the parameter with no name. This may
3566 // change based on what is discussed on the Jingle list.
3567 // The encoding parameter is of the form "a/b"; we only support where
3568 // a == b. Verify this and parse out the value into red_pt.
3569 // If the parameter value is absent (as it will be until we wire up the
3570 // signaling of this message), use the second codec specified (i.e. the
3571 // one after "red") as the encoding parameter.
3572 int red_pt = -1;
3573 std::string red_params;
3574 CodecParameterMap::const_iterator it = red_codec.params.find("");
3575 if (it != red_codec.params.end()) {
3576 red_params = it->second;
3577 std::vector<std::string> red_pts;
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00003578 if (rtc::split(red_params, '/', &red_pts) != 2 ||
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003579 red_pts[0] != red_pts[1] ||
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00003580 !rtc::FromString(red_pts[0], &red_pt)) {
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003581 LOG(LS_WARNING) << "RED params " << red_params << " not supported.";
3582 return false;
3583 }
3584 } else if (red_codec.params.empty()) {
3585 LOG(LS_WARNING) << "RED params not present, using defaults";
3586 if (all_codecs.size() > 1) {
3587 red_pt = all_codecs[1].id;
3588 }
3589 }
3590
3591 // Try to find red_pt in |codecs|.
3592 std::vector<AudioCodec>::const_iterator codec;
3593 for (codec = all_codecs.begin(); codec != all_codecs.end(); ++codec) {
3594 if (codec->id == red_pt)
3595 break;
3596 }
3597
3598 // If we find the right codec, that will be the codec we pass to
3599 // SetSendCodec, with the desired payload type.
3600 if (codec != all_codecs.end() &&
3601 engine()->FindWebRtcCodec(*codec, send_codec)) {
3602 } else {
3603 LOG(LS_WARNING) << "RED params " << red_params << " are invalid.";
3604 return false;
3605 }
3606
3607 return true;
3608}
3609
3610bool WebRtcVoiceMediaChannel::EnableRtcp(int channel) {
3611 if (engine()->voe()->rtp()->SetRTCPStatus(channel, true) == -1) {
wu@webrtc.org9dba5252013-08-05 20:36:57 +00003612 LOG_RTCERR2(SetRTCPStatus, channel, 1);
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003613 return false;
3614 }
3615 // TODO(juberti): Enable VQMon and RTCP XR reports, once we know what
3616 // what we want to do with them.
3617 // engine()->voe().EnableVQMon(voe_channel(), true);
3618 // engine()->voe().EnableRTCP_XR(voe_channel(), true);
3619 return true;
3620}
3621
3622bool WebRtcVoiceMediaChannel::ResetRecvCodecs(int channel) {
3623 int ncodecs = engine()->voe()->codec()->NumOfCodecs();
3624 for (int i = 0; i < ncodecs; ++i) {
3625 webrtc::CodecInst voe_codec;
3626 if (engine()->voe()->codec()->GetCodec(i, voe_codec) != -1) {
3627 voe_codec.pltype = -1;
3628 if (engine()->voe()->codec()->SetRecPayloadType(
3629 channel, voe_codec) == -1) {
3630 LOG_RTCERR2(SetRecPayloadType, channel, ToString(voe_codec));
3631 return false;
3632 }
3633 }
3634 }
3635 return true;
3636}
3637
3638bool WebRtcVoiceMediaChannel::SetPlayout(int channel, bool playout) {
3639 if (playout) {
3640 LOG(LS_INFO) << "Starting playout for channel #" << channel;
3641 if (engine()->voe()->base()->StartPlayout(channel) == -1) {
3642 LOG_RTCERR1(StartPlayout, channel);
3643 return false;
3644 }
3645 } else {
3646 LOG(LS_INFO) << "Stopping playout for channel #" << channel;
3647 engine()->voe()->base()->StopPlayout(channel);
3648 }
3649 return true;
3650}
3651
3652uint32 WebRtcVoiceMediaChannel::ParseSsrc(const void* data, size_t len,
3653 bool rtcp) {
3654 size_t ssrc_pos = (!rtcp) ? 8 : 4;
3655 uint32 ssrc = 0;
3656 if (len >= (ssrc_pos + sizeof(ssrc))) {
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +00003657 ssrc = rtc::GetBE32(static_cast<const char*>(data) + ssrc_pos);
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003658 }
3659 return ssrc;
3660}
3661
3662// Convert VoiceEngine error code into VoiceMediaChannel::Error enum.
3663VoiceMediaChannel::Error
3664 WebRtcVoiceMediaChannel::WebRtcErrorToChannelError(int err_code) {
3665 switch (err_code) {
3666 case 0:
3667 return ERROR_NONE;
3668 case VE_CANNOT_START_RECORDING:
3669 case VE_MIC_VOL_ERROR:
3670 case VE_GET_MIC_VOL_ERROR:
3671 case VE_CANNOT_ACCESS_MIC_VOL:
3672 return ERROR_REC_DEVICE_OPEN_FAILED;
3673 case VE_SATURATION_WARNING:
3674 return ERROR_REC_DEVICE_SATURATION;
3675 case VE_REC_DEVICE_REMOVED:
3676 return ERROR_REC_DEVICE_REMOVED;
3677 case VE_RUNTIME_REC_WARNING:
3678 case VE_RUNTIME_REC_ERROR:
3679 return ERROR_REC_RUNTIME_ERROR;
3680 case VE_CANNOT_START_PLAYOUT:
3681 case VE_SPEAKER_VOL_ERROR:
3682 case VE_GET_SPEAKER_VOL_ERROR:
3683 case VE_CANNOT_ACCESS_SPEAKER_VOL:
3684 return ERROR_PLAY_DEVICE_OPEN_FAILED;
3685 case VE_RUNTIME_PLAY_WARNING:
3686 case VE_RUNTIME_PLAY_ERROR:
3687 return ERROR_PLAY_RUNTIME_ERROR;
3688 case VE_TYPING_NOISE_WARNING:
3689 return ERROR_REC_TYPING_NOISE_DETECTED;
3690 default:
3691 return VoiceMediaChannel::ERROR_OTHER;
3692 }
3693}
3694
henrike@webrtc.org79047f92014-03-06 23:46:59 +00003695bool WebRtcVoiceMediaChannel::SetHeaderExtension(ExtensionSetterFunction setter,
3696 int channel_id, const RtpHeaderExtension* extension) {
3697 bool enable = false;
buildbot@webrtc.org150835e2014-05-06 15:54:38 +00003698 int id = 0;
3699 std::string uri;
henrike@webrtc.org79047f92014-03-06 23:46:59 +00003700 if (extension) {
3701 enable = true;
3702 id = extension->id;
buildbot@webrtc.org150835e2014-05-06 15:54:38 +00003703 uri = extension->uri;
henrike@webrtc.org79047f92014-03-06 23:46:59 +00003704 }
3705 if ((engine()->voe()->rtp()->*setter)(channel_id, enable, id) != 0) {
buildbot@webrtc.org150835e2014-05-06 15:54:38 +00003706 LOG_RTCERR4(*setter, uri, channel_id, enable, id);
henrike@webrtc.org79047f92014-03-06 23:46:59 +00003707 return false;
3708 }
3709 return true;
3710}
3711
buildbot@webrtc.orgb4c7b092014-08-25 12:11:58 +00003712bool WebRtcVoiceMediaChannel::SetupSharedBweOnChannel(int voe_channel) {
3713 webrtc::ViENetwork* vie_network = NULL;
3714 int vie_channel = -1;
3715 if (options_.combined_audio_video_bwe.GetWithDefaultIfUnset(false) &&
3716 shared_bwe_vie_ != NULL && shared_bwe_vie_channel_ != -1) {
3717 vie_network = webrtc::ViENetwork::GetInterface(shared_bwe_vie_);
3718 vie_channel = shared_bwe_vie_channel_;
3719 }
3720 if (engine()->voe()->rtp()->SetVideoEngineBWETarget(voe_channel, vie_network,
3721 vie_channel) == -1) {
3722 LOG_RTCERR3(SetVideoEngineBWETarget, voe_channel, vie_network, vie_channel);
3723 if (vie_network != NULL) {
3724 // Don't fail if we're tearing down.
3725 return false;
3726 }
3727 }
3728 return true;
3729}
3730
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003731int WebRtcSoundclipStream::Read(void *buf, int len) {
3732 size_t res = 0;
3733 mem_.Read(buf, len, &res, NULL);
henrike@webrtc.org28654cb2013-07-22 21:07:49 +00003734 return static_cast<int>(res);
henrike@webrtc.org28e20752013-07-10 00:45:36 +00003735}
3736
3737int WebRtcSoundclipStream::Rewind() {
3738 mem_.Rewind();
3739 // Return -1 to keep VoiceEngine from looping.
3740 return (loop_) ? 0 : -1;
3741}
3742
3743} // namespace cricket
3744
3745#endif // HAVE_WEBRTC_VOICE