blob: 9c336105e3714c9bd260a3b5d91d8a412d02d30b [file] [log] [blame]
Ilya Nikolaevskiy255d1cd2017-12-21 18:02:59 +01001/*
2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11#include <stdio.h>
12
13#include "rtc_base/flags.h"
14#include "rtc_base/stringencode.h"
Bjorn Tereliusedab3012018-01-31 17:23:40 +010015#include "system_wrappers/include/field_trial_default.h"
Ilya Nikolaevskiy255d1cd2017-12-21 18:02:59 +010016#include "test/field_trial.h"
17#include "test/gtest.h"
18#include "test/run_test.h"
19#include "video/video_quality_test.h"
20
21namespace webrtc {
22namespace flags {
23
Sergey Silkin57027362018-05-15 09:12:05 +020024InterLayerPredMode IntToInterLayerPredMode(int inter_layer_pred) {
25 if (inter_layer_pred == 0) {
26 return InterLayerPredMode::kOn;
27 } else if (inter_layer_pred == 1) {
28 return InterLayerPredMode::kOff;
29 } else {
30 RTC_DCHECK_EQ(inter_layer_pred, 2);
31 return InterLayerPredMode::kOnKeyPic;
32 }
33}
34
Ilya Nikolaevskiy255d1cd2017-12-21 18:02:59 +010035// Flags for video.
36DEFINE_int(vwidth, 640, "Video width.");
37size_t VideoWidth() {
38 return static_cast<size_t>(FLAG_vwidth);
39}
40
41DEFINE_int(vheight, 480, "Video height.");
42size_t VideoHeight() {
43 return static_cast<size_t>(FLAG_vheight);
44}
45
46DEFINE_int(vfps, 30, "Video frames per second.");
47int VideoFps() {
48 return static_cast<int>(FLAG_vfps);
49}
50
51DEFINE_int(capture_device_index,
52 0,
53 "Capture device to select for video stream");
54size_t GetCaptureDevice() {
55 return static_cast<size_t>(FLAG_capture_device_index);
56}
57
58DEFINE_int(vtarget_bitrate, 400, "Video stream target bitrate in kbps.");
59int VideoTargetBitrateKbps() {
60 return static_cast<int>(FLAG_vtarget_bitrate);
61}
62
63DEFINE_int(vmin_bitrate, 100, "Video stream min bitrate in kbps.");
64int VideoMinBitrateKbps() {
65 return static_cast<int>(FLAG_vmin_bitrate);
66}
67
68DEFINE_int(vmax_bitrate, 2000, "Video stream max bitrate in kbps.");
69int VideoMaxBitrateKbps() {
70 return static_cast<int>(FLAG_vmax_bitrate);
71}
72
73DEFINE_bool(suspend_below_min_bitrate,
74 false,
75 "Suspends video below the configured min bitrate.");
76
77DEFINE_int(vnum_temporal_layers,
78 1,
79 "Number of temporal layers for video. Set to 1-4 to override.");
80int VideoNumTemporalLayers() {
81 return static_cast<int>(FLAG_vnum_temporal_layers);
82}
83
84DEFINE_int(vnum_streams, 0, "Number of video streams to show or analyze.");
85int VideoNumStreams() {
86 return static_cast<int>(FLAG_vnum_streams);
87}
88
89DEFINE_int(vnum_spatial_layers, 1, "Number of video spatial layers to use.");
90int VideoNumSpatialLayers() {
91 return static_cast<int>(FLAG_vnum_spatial_layers);
92}
93
Sergey Silkin57027362018-05-15 09:12:05 +020094DEFINE_int(vinter_layer_pred,
Sergey Silkindf736d82018-05-23 23:22:46 +020095 2,
Sergey Silkin57027362018-05-15 09:12:05 +020096 "Video inter-layer prediction mode. "
97 "0 - enabled, 1 - disabled, 2 - enabled only for key pictures.");
98InterLayerPredMode VideoInterLayerPred() {
99 return IntToInterLayerPredMode(FLAG_vinter_layer_pred);
100}
101
Ilya Nikolaevskiy255d1cd2017-12-21 18:02:59 +0100102DEFINE_string(
103 vstream0,
104 "",
105 "Comma separated values describing VideoStream for video stream #0.");
106std::string VideoStream0() {
107 return static_cast<std::string>(FLAG_vstream0);
108}
109
110DEFINE_string(
111 vstream1,
112 "",
113 "Comma separated values describing VideoStream for video stream #1.");
114std::string VideoStream1() {
115 return static_cast<std::string>(FLAG_vstream1);
116}
117
118DEFINE_string(
119 vsl0,
120 "",
121 "Comma separated values describing SpatialLayer for video layer #0.");
122std::string VideoSL0() {
123 return static_cast<std::string>(FLAG_vsl0);
124}
125
126DEFINE_string(
127 vsl1,
128 "",
129 "Comma separated values describing SpatialLayer for video layer #1.");
130std::string VideoSL1() {
131 return static_cast<std::string>(FLAG_vsl1);
132}
133
134DEFINE_int(vselected_tl,
135 -1,
136 "Temporal layer to show or analyze for screenshare. -1 to disable "
137 "filtering.");
138int VideoSelectedTL() {
139 return static_cast<int>(FLAG_vselected_tl);
140}
141
142DEFINE_int(vselected_stream,
143 0,
144 "ID of the stream to show or analyze for screenshare."
145 "Set to the number of streams to show them all.");
146int VideoSelectedStream() {
147 return static_cast<int>(FLAG_vselected_stream);
148}
149
150DEFINE_int(vselected_sl,
151 -1,
152 "Spatial layer to show or analyze for screenshare. -1 to disable "
153 "filtering.");
154int VideoSelectedSL() {
155 return static_cast<int>(FLAG_vselected_sl);
156}
157
158// Flags for screenshare.
159DEFINE_int(min_transmit_bitrate,
160 400,
161 "Min transmit bitrate incl. padding for screenshare.");
162int ScreenshareMinTransmitBitrateKbps() {
163 return FLAG_min_transmit_bitrate;
164}
165
166DEFINE_int(swidth, 1850, "Screenshare width (crops source).");
167size_t ScreenshareWidth() {
168 return static_cast<size_t>(FLAG_swidth);
169}
170
171DEFINE_int(sheight, 1110, "Screenshare height (crops source).");
172size_t ScreenshareHeight() {
173 return static_cast<size_t>(FLAG_sheight);
174}
175
176DEFINE_int(sfps, 5, "Frames per second for screenshare.");
177int ScreenshareFps() {
178 return static_cast<int>(FLAG_sfps);
179}
180
181DEFINE_int(starget_bitrate, 100, "Screenshare stream target bitrate in kbps.");
182int ScreenshareTargetBitrateKbps() {
183 return static_cast<int>(FLAG_starget_bitrate);
184}
185
186DEFINE_int(smin_bitrate, 100, "Screenshare stream min bitrate in kbps.");
187int ScreenshareMinBitrateKbps() {
188 return static_cast<int>(FLAG_smin_bitrate);
189}
190
191DEFINE_int(smax_bitrate, 2000, "Screenshare stream max bitrate in kbps.");
192int ScreenshareMaxBitrateKbps() {
193 return static_cast<int>(FLAG_smax_bitrate);
194}
195
196DEFINE_int(snum_temporal_layers,
197 2,
198 "Number of temporal layers to use in screenshare.");
199int ScreenshareNumTemporalLayers() {
200 return static_cast<int>(FLAG_snum_temporal_layers);
201}
202
203DEFINE_int(snum_streams,
204 0,
205 "Number of screenshare streams to show or analyze.");
206int ScreenshareNumStreams() {
207 return static_cast<int>(FLAG_snum_streams);
208}
209
210DEFINE_int(snum_spatial_layers,
211 1,
Sergey Silkin57027362018-05-15 09:12:05 +0200212 "Number of screenshare spatial layers to use.");
Ilya Nikolaevskiy255d1cd2017-12-21 18:02:59 +0100213int ScreenshareNumSpatialLayers() {
214 return static_cast<int>(FLAG_snum_spatial_layers);
215}
216
Sergey Silkin57027362018-05-15 09:12:05 +0200217DEFINE_int(sinter_layer_pred,
Sergey Silkindf736d82018-05-23 23:22:46 +0200218 0,
Sergey Silkin57027362018-05-15 09:12:05 +0200219 "Screenshare inter-layer prediction mode. "
220 "0 - enabled, 1 - disabled, 2 - enabled only for key pictures.");
221InterLayerPredMode ScreenshareInterLayerPred() {
222 return IntToInterLayerPredMode(FLAG_sinter_layer_pred);
223}
224
Ilya Nikolaevskiy255d1cd2017-12-21 18:02:59 +0100225DEFINE_string(
226 sstream0,
227 "",
228 "Comma separated values describing VideoStream for screenshare stream #0.");
229std::string ScreenshareStream0() {
230 return static_cast<std::string>(FLAG_sstream0);
231}
232
233DEFINE_string(
234 sstream1,
235 "",
236 "Comma separated values describing VideoStream for screenshare stream #1.");
237std::string ScreenshareStream1() {
238 return static_cast<std::string>(FLAG_sstream1);
239}
240
241DEFINE_string(
242 ssl0,
243 "",
244 "Comma separated values describing SpatialLayer for screenshare layer #0.");
245std::string ScreenshareSL0() {
246 return static_cast<std::string>(FLAG_ssl0);
247}
248
249DEFINE_string(
250 ssl1,
251 "",
252 "Comma separated values describing SpatialLayer for screenshare layer #1.");
253std::string ScreenshareSL1() {
254 return static_cast<std::string>(FLAG_ssl1);
255}
256
257DEFINE_int(sselected_tl,
258 -1,
259 "Temporal layer to show or analyze for screenshare. -1 to disable "
260 "filtering.");
261int ScreenshareSelectedTL() {
262 return static_cast<int>(FLAG_sselected_tl);
263}
264
265DEFINE_int(sselected_stream,
266 0,
267 "ID of the stream to show or analyze for screenshare."
268 "Set to the number of streams to show them all.");
269int ScreenshareSelectedStream() {
270 return static_cast<int>(FLAG_sselected_stream);
271}
272
273DEFINE_int(sselected_sl,
274 -1,
275 "Spatial layer to show or analyze for screenshare. -1 to disable "
276 "filtering.");
277int ScreenshareSelectedSL() {
278 return static_cast<int>(FLAG_sselected_sl);
279}
280
281DEFINE_bool(
282 generate_slides,
283 false,
284 "Whether to use randomly generated slides or read them from files.");
285bool GenerateSlides() {
286 return static_cast<int>(FLAG_generate_slides);
287}
288
289DEFINE_int(slide_change_interval,
290 10,
291 "Interval (in seconds) between simulated slide changes.");
292int SlideChangeInterval() {
293 return static_cast<int>(FLAG_slide_change_interval);
294}
295
296DEFINE_int(
297 scroll_duration,
298 0,
299 "Duration (in seconds) during which a slide will be scrolled into place.");
300int ScrollDuration() {
301 return static_cast<int>(FLAG_scroll_duration);
302}
303
304DEFINE_string(slides,
305 "",
306 "Comma-separated list of *.yuv files to display as slides.");
307std::vector<std::string> Slides() {
308 std::vector<std::string> slides;
309 std::string slides_list = FLAG_slides;
310 rtc::tokenize(slides_list, ',', &slides);
311 return slides;
312}
313
314// Flags common with screenshare and video loopback, with equal default values.
315DEFINE_int(start_bitrate, 600, "Call start bitrate in kbps.");
316int StartBitrateKbps() {
317 return static_cast<int>(FLAG_start_bitrate);
318}
319
320DEFINE_string(codec, "VP8", "Video codec to use.");
321std::string Codec() {
322 return static_cast<std::string>(FLAG_codec);
323}
324
325DEFINE_bool(analyze_video,
326 false,
327 "Analyze video stream (if --duration is present)");
328bool AnalyzeVideo() {
329 return static_cast<bool>(FLAG_analyze_video);
330}
331
332DEFINE_bool(analyze_screenshare,
333 false,
334 "Analyze screenshare stream (if --duration is present)");
335bool AnalyzeScreenshare() {
336 return static_cast<bool>(FLAG_analyze_screenshare);
337}
338
339DEFINE_int(
340 duration,
341 0,
342 "Duration of the test in seconds. If 0, rendered will be shown instead.");
343int DurationSecs() {
344 return static_cast<int>(FLAG_duration);
345}
346
347DEFINE_string(output_filename, "", "Target graph data filename.");
348std::string OutputFilename() {
349 return static_cast<std::string>(FLAG_output_filename);
350}
351
352DEFINE_string(graph_title,
353 "",
354 "If empty, title will be generated automatically.");
355std::string GraphTitle() {
356 return static_cast<std::string>(FLAG_graph_title);
357}
358
359DEFINE_int(loss_percent, 0, "Percentage of packets randomly lost.");
360int LossPercent() {
361 return static_cast<int>(FLAG_loss_percent);
362}
363
364DEFINE_int(avg_burst_loss_length, -1, "Average burst length of lost packets.");
365int AvgBurstLossLength() {
366 return static_cast<int>(FLAG_avg_burst_loss_length);
367}
368
369DEFINE_int(link_capacity,
370 0,
371 "Capacity (kbps) of the fake link. 0 means infinite.");
372int LinkCapacityKbps() {
373 return static_cast<int>(FLAG_link_capacity);
374}
375
376DEFINE_int(queue_size, 0, "Size of the bottleneck link queue in packets.");
377int QueueSize() {
378 return static_cast<int>(FLAG_queue_size);
379}
380
381DEFINE_int(avg_propagation_delay_ms,
382 0,
383 "Average link propagation delay in ms.");
384int AvgPropagationDelayMs() {
385 return static_cast<int>(FLAG_avg_propagation_delay_ms);
386}
387
388DEFINE_string(rtc_event_log_name,
389 "",
390 "Filename for rtc event log. Two files "
391 "with \"_send\" and \"_recv\" suffixes will be created. "
392 "Works only when --duration is set.");
393std::string RtcEventLogName() {
394 return static_cast<std::string>(FLAG_rtc_event_log_name);
395}
396
397DEFINE_string(rtp_dump_name, "", "Filename for dumped received RTP stream.");
398std::string RtpDumpName() {
399 return static_cast<std::string>(FLAG_rtp_dump_name);
400}
401
402DEFINE_int(std_propagation_delay_ms,
403 0,
404 "Link propagation delay standard deviation in ms.");
405int StdPropagationDelayMs() {
406 return static_cast<int>(FLAG_std_propagation_delay_ms);
407}
408
409DEFINE_string(encoded_frame_path,
410 "",
411 "The base path for encoded frame logs. Created files will have "
412 "the form <encoded_frame_path>.<n>.(recv|send.<m>).ivf");
413std::string EncodedFramePath() {
414 return static_cast<std::string>(FLAG_encoded_frame_path);
415}
416
417DEFINE_bool(logs, false, "print logs to stderr");
418
419DEFINE_bool(send_side_bwe, true, "Use send-side bandwidth estimation");
420
421DEFINE_bool(allow_reordering, false, "Allow packet reordering to occur");
422
423DEFINE_bool(use_ulpfec, false, "Use RED+ULPFEC forward error correction.");
424
425DEFINE_bool(use_flexfec, false, "Use FlexFEC forward error correction.");
426
427DEFINE_bool(audio, false, "Add audio stream");
428
429DEFINE_bool(audio_video_sync,
430 false,
431 "Sync audio and video stream (no effect if"
432 " audio is false)");
433
434DEFINE_bool(audio_dtx, false, "Enable audio DTX (no effect if audio is false)");
435
436DEFINE_bool(video, true, "Add video stream");
437
438DEFINE_string(
439 force_fieldtrials,
440 "",
441 "Field trials control experimental feature code which can be forced. "
442 "E.g. running with --force_fieldtrials=WebRTC-FooFeature/Enable/"
443 " will assign the group Enable to field trial WebRTC-FooFeature. Multiple "
444 "trials are separated by \"/\"");
445
446// Video-specific flags.
447DEFINE_string(vclip,
448 "",
449 "Name of the clip to show. If empty, the camera is used. Use "
450 "\"Generator\" for chroma generator.");
451std::string VideoClip() {
452 return static_cast<std::string>(FLAG_vclip);
453}
454
455DEFINE_bool(help, false, "prints this message");
456
457} // namespace flags
458
459void Loopback() {
460 int camera_idx, screenshare_idx;
461 RTC_CHECK(!(flags::AnalyzeScreenshare() && flags::AnalyzeVideo()))
462 << "Select only one of video or screenshare.";
463 RTC_CHECK(!flags::DurationSecs() || flags::AnalyzeScreenshare() ||
464 flags::AnalyzeVideo())
465 << "If duration is set, exactly one of analyze_* flags should be set.";
466 // Default: camera feed first, if nothing selected.
467 if (flags::AnalyzeVideo() || !flags::AnalyzeScreenshare()) {
468 camera_idx = 0;
469 screenshare_idx = 1;
470 } else {
471 camera_idx = 1;
472 screenshare_idx = 0;
473 }
474
475 FakeNetworkPipe::Config pipe_config;
476 pipe_config.loss_percent = flags::LossPercent();
477 pipe_config.avg_burst_loss_length = flags::AvgBurstLossLength();
478 pipe_config.link_capacity_kbps = flags::LinkCapacityKbps();
479 pipe_config.queue_length_packets = flags::QueueSize();
480 pipe_config.queue_delay_ms = flags::AvgPropagationDelayMs();
481 pipe_config.delay_standard_deviation_ms = flags::StdPropagationDelayMs();
482 pipe_config.allow_reordering = flags::FLAG_allow_reordering;
483
Sebastian Janssonfc8d26b2018-02-21 09:52:06 +0100484 BitrateConstraints call_bitrate_config;
Ilya Nikolaevskiy255d1cd2017-12-21 18:02:59 +0100485 call_bitrate_config.min_bitrate_bps =
486 (flags::ScreenshareMinBitrateKbps() + flags::VideoMinBitrateKbps()) *
487 1000;
488 call_bitrate_config.start_bitrate_bps = flags::StartBitrateKbps() * 1000;
489 call_bitrate_config.max_bitrate_bps =
490 (flags::ScreenshareMaxBitrateKbps() + flags::VideoMaxBitrateKbps()) *
491 1000;
492
493 VideoQualityTest::Params params, camera_params, screenshare_params;
494 params.call = {flags::FLAG_send_side_bwe, call_bitrate_config, 0};
495 params.call.dual_video = true;
496 params.video[screenshare_idx] = {
497 true,
498 flags::ScreenshareWidth(),
499 flags::ScreenshareHeight(),
500 flags::ScreenshareFps(),
501 flags::ScreenshareMinBitrateKbps() * 1000,
502 flags::ScreenshareTargetBitrateKbps() * 1000,
503 flags::ScreenshareMaxBitrateKbps() * 1000,
504 false,
505 flags::Codec(),
506 flags::ScreenshareNumTemporalLayers(),
507 flags::ScreenshareSelectedTL(),
508 flags::ScreenshareMinTransmitBitrateKbps() * 1000,
509 false, // ULPFEC disabled.
510 false, // FlexFEC disabled.
Niels Möller6aa415e2018-06-07 11:14:13 +0200511 false, // Automatic scaling disabled
Ilya Nikolaevskiy255d1cd2017-12-21 18:02:59 +0100512 ""};
513 params.video[camera_idx] = {flags::FLAG_video,
514 flags::VideoWidth(),
515 flags::VideoHeight(),
516 flags::VideoFps(),
517 flags::VideoMinBitrateKbps() * 1000,
518 flags::VideoTargetBitrateKbps() * 1000,
519 flags::VideoMaxBitrateKbps() * 1000,
520 flags::FLAG_suspend_below_min_bitrate,
521 flags::Codec(),
522 flags::VideoNumTemporalLayers(),
523 flags::VideoSelectedTL(),
524 0, // No min transmit bitrate.
525 flags::FLAG_use_ulpfec,
526 flags::FLAG_use_flexfec,
Niels Möller6aa415e2018-06-07 11:14:13 +0200527 false,
Ilya Nikolaevskiy255d1cd2017-12-21 18:02:59 +0100528 flags::VideoClip(),
529 flags::GetCaptureDevice()};
530 params.audio = {flags::FLAG_audio, flags::FLAG_audio_video_sync,
531 flags::FLAG_audio_dtx};
532 params.logging = {flags::FLAG_logs, flags::FLAG_rtc_event_log_name,
533 flags::FLAG_rtp_dump_name, flags::FLAG_encoded_frame_path};
534 params.analyzer = {"dual_streams",
535 0.0,
536 0.0,
537 flags::DurationSecs(),
538 flags::OutputFilename(),
539 flags::GraphTitle()};
540 params.pipe = pipe_config;
541
542 params.screenshare[camera_idx].enabled = false;
543 params.screenshare[screenshare_idx] = {
544 true, flags::GenerateSlides(), flags::SlideChangeInterval(),
545 flags::ScrollDuration(), flags::Slides()};
546
547 if (flags::VideoNumStreams() > 1 && flags::VideoStream0().empty() &&
548 flags::VideoStream1().empty()) {
549 params.ss[camera_idx].infer_streams = true;
550 }
551
552 if (flags::ScreenshareNumStreams() > 1 &&
553 flags::ScreenshareStream0().empty() &&
554 flags::ScreenshareStream1().empty()) {
555 params.ss[screenshare_idx].infer_streams = true;
556 }
557
558 std::vector<std::string> stream_descriptors;
559 stream_descriptors.push_back(flags::ScreenshareStream0());
560 stream_descriptors.push_back(flags::ScreenshareStream1());
561 std::vector<std::string> SL_descriptors;
562 SL_descriptors.push_back(flags::ScreenshareSL0());
563 SL_descriptors.push_back(flags::ScreenshareSL1());
564 VideoQualityTest::FillScalabilitySettings(
565 &params, screenshare_idx, stream_descriptors,
566 flags::ScreenshareNumStreams(), flags::ScreenshareSelectedStream(),
567 flags::ScreenshareNumSpatialLayers(), flags::ScreenshareSelectedSL(),
Sergey Silkin57027362018-05-15 09:12:05 +0200568 flags::ScreenshareInterLayerPred(), SL_descriptors);
Ilya Nikolaevskiy255d1cd2017-12-21 18:02:59 +0100569
570 stream_descriptors.clear();
571 stream_descriptors.push_back(flags::VideoStream0());
572 stream_descriptors.push_back(flags::VideoStream1());
573 SL_descriptors.clear();
574 SL_descriptors.push_back(flags::VideoSL0());
575 SL_descriptors.push_back(flags::VideoSL1());
576 VideoQualityTest::FillScalabilitySettings(
577 &params, camera_idx, stream_descriptors, flags::VideoNumStreams(),
578 flags::VideoSelectedStream(), flags::VideoNumSpatialLayers(),
Sergey Silkin57027362018-05-15 09:12:05 +0200579 flags::VideoSelectedSL(), flags::VideoInterLayerPred(), SL_descriptors);
Ilya Nikolaevskiy255d1cd2017-12-21 18:02:59 +0100580
Patrik Höglundb6b29e02018-06-21 16:58:01 +0200581 auto fixture = rtc::MakeUnique<VideoQualityTest>(nullptr);
Ilya Nikolaevskiy255d1cd2017-12-21 18:02:59 +0100582 if (flags::DurationSecs()) {
Patrik Höglundb6b29e02018-06-21 16:58:01 +0200583 fixture->RunWithAnalyzer(params);
Ilya Nikolaevskiy255d1cd2017-12-21 18:02:59 +0100584 } else {
Patrik Höglundb6b29e02018-06-21 16:58:01 +0200585 fixture->RunWithRenderers(params);
Ilya Nikolaevskiy255d1cd2017-12-21 18:02:59 +0100586 }
587}
588} // namespace webrtc
589
590int main(int argc, char* argv[]) {
591 ::testing::InitGoogleTest(&argc, argv);
592 if (rtc::FlagList::SetFlagsFromCommandLine(&argc, argv, true) != 0) {
593 // Fail on unrecognized flags.
594 return 1;
595 }
596 if (webrtc::flags::FLAG_help) {
597 rtc::FlagList::Print(nullptr, false);
598 return 0;
599 }
600
Bjorn Tereliusedab3012018-01-31 17:23:40 +0100601 webrtc::test::ValidateFieldTrialsStringOrDie(
602 webrtc::flags::FLAG_force_fieldtrials);
603 // InitFieldTrialsFromString stores the char*, so the char array must outlive
604 // the application.
605 webrtc::field_trial::InitFieldTrialsFromString(
606 webrtc::flags::FLAG_force_fieldtrials);
Ilya Nikolaevskiy255d1cd2017-12-21 18:02:59 +0100607
608 webrtc::test::RunTest(webrtc::Loopback);
609 return 0;
610}