blob: 24d3bedf32868f7128ac993f1c8f3e01c7e27f44 [file] [log] [blame]
Ilya Nikolaevskiy255d1cd2017-12-21 18:02:59 +01001/*
2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11#include <stdio.h>
12
13#include "rtc_base/flags.h"
14#include "rtc_base/stringencode.h"
15#include "test/field_trial.h"
16#include "test/gtest.h"
17#include "test/run_test.h"
18#include "video/video_quality_test.h"
19
20namespace webrtc {
21namespace flags {
22
23// Flags for video.
24DEFINE_int(vwidth, 640, "Video width.");
25size_t VideoWidth() {
26 return static_cast<size_t>(FLAG_vwidth);
27}
28
29DEFINE_int(vheight, 480, "Video height.");
30size_t VideoHeight() {
31 return static_cast<size_t>(FLAG_vheight);
32}
33
34DEFINE_int(vfps, 30, "Video frames per second.");
35int VideoFps() {
36 return static_cast<int>(FLAG_vfps);
37}
38
39DEFINE_int(capture_device_index,
40 0,
41 "Capture device to select for video stream");
42size_t GetCaptureDevice() {
43 return static_cast<size_t>(FLAG_capture_device_index);
44}
45
46DEFINE_int(vtarget_bitrate, 400, "Video stream target bitrate in kbps.");
47int VideoTargetBitrateKbps() {
48 return static_cast<int>(FLAG_vtarget_bitrate);
49}
50
51DEFINE_int(vmin_bitrate, 100, "Video stream min bitrate in kbps.");
52int VideoMinBitrateKbps() {
53 return static_cast<int>(FLAG_vmin_bitrate);
54}
55
56DEFINE_int(vmax_bitrate, 2000, "Video stream max bitrate in kbps.");
57int VideoMaxBitrateKbps() {
58 return static_cast<int>(FLAG_vmax_bitrate);
59}
60
61DEFINE_bool(suspend_below_min_bitrate,
62 false,
63 "Suspends video below the configured min bitrate.");
64
65DEFINE_int(vnum_temporal_layers,
66 1,
67 "Number of temporal layers for video. Set to 1-4 to override.");
68int VideoNumTemporalLayers() {
69 return static_cast<int>(FLAG_vnum_temporal_layers);
70}
71
72DEFINE_int(vnum_streams, 0, "Number of video streams to show or analyze.");
73int VideoNumStreams() {
74 return static_cast<int>(FLAG_vnum_streams);
75}
76
77DEFINE_int(vnum_spatial_layers, 1, "Number of video spatial layers to use.");
78int VideoNumSpatialLayers() {
79 return static_cast<int>(FLAG_vnum_spatial_layers);
80}
81
82DEFINE_string(
83 vstream0,
84 "",
85 "Comma separated values describing VideoStream for video stream #0.");
86std::string VideoStream0() {
87 return static_cast<std::string>(FLAG_vstream0);
88}
89
90DEFINE_string(
91 vstream1,
92 "",
93 "Comma separated values describing VideoStream for video stream #1.");
94std::string VideoStream1() {
95 return static_cast<std::string>(FLAG_vstream1);
96}
97
98DEFINE_string(
99 vsl0,
100 "",
101 "Comma separated values describing SpatialLayer for video layer #0.");
102std::string VideoSL0() {
103 return static_cast<std::string>(FLAG_vsl0);
104}
105
106DEFINE_string(
107 vsl1,
108 "",
109 "Comma separated values describing SpatialLayer for video layer #1.");
110std::string VideoSL1() {
111 return static_cast<std::string>(FLAG_vsl1);
112}
113
114DEFINE_int(vselected_tl,
115 -1,
116 "Temporal layer to show or analyze for screenshare. -1 to disable "
117 "filtering.");
118int VideoSelectedTL() {
119 return static_cast<int>(FLAG_vselected_tl);
120}
121
122DEFINE_int(vselected_stream,
123 0,
124 "ID of the stream to show or analyze for screenshare."
125 "Set to the number of streams to show them all.");
126int VideoSelectedStream() {
127 return static_cast<int>(FLAG_vselected_stream);
128}
129
130DEFINE_int(vselected_sl,
131 -1,
132 "Spatial layer to show or analyze for screenshare. -1 to disable "
133 "filtering.");
134int VideoSelectedSL() {
135 return static_cast<int>(FLAG_vselected_sl);
136}
137
138// Flags for screenshare.
139DEFINE_int(min_transmit_bitrate,
140 400,
141 "Min transmit bitrate incl. padding for screenshare.");
142int ScreenshareMinTransmitBitrateKbps() {
143 return FLAG_min_transmit_bitrate;
144}
145
146DEFINE_int(swidth, 1850, "Screenshare width (crops source).");
147size_t ScreenshareWidth() {
148 return static_cast<size_t>(FLAG_swidth);
149}
150
151DEFINE_int(sheight, 1110, "Screenshare height (crops source).");
152size_t ScreenshareHeight() {
153 return static_cast<size_t>(FLAG_sheight);
154}
155
156DEFINE_int(sfps, 5, "Frames per second for screenshare.");
157int ScreenshareFps() {
158 return static_cast<int>(FLAG_sfps);
159}
160
161DEFINE_int(starget_bitrate, 100, "Screenshare stream target bitrate in kbps.");
162int ScreenshareTargetBitrateKbps() {
163 return static_cast<int>(FLAG_starget_bitrate);
164}
165
166DEFINE_int(smin_bitrate, 100, "Screenshare stream min bitrate in kbps.");
167int ScreenshareMinBitrateKbps() {
168 return static_cast<int>(FLAG_smin_bitrate);
169}
170
171DEFINE_int(smax_bitrate, 2000, "Screenshare stream max bitrate in kbps.");
172int ScreenshareMaxBitrateKbps() {
173 return static_cast<int>(FLAG_smax_bitrate);
174}
175
176DEFINE_int(snum_temporal_layers,
177 2,
178 "Number of temporal layers to use in screenshare.");
179int ScreenshareNumTemporalLayers() {
180 return static_cast<int>(FLAG_snum_temporal_layers);
181}
182
183DEFINE_int(snum_streams,
184 0,
185 "Number of screenshare streams to show or analyze.");
186int ScreenshareNumStreams() {
187 return static_cast<int>(FLAG_snum_streams);
188}
189
190DEFINE_int(snum_spatial_layers,
191 1,
192 "Number of screemshare spatial layers to use.");
193int ScreenshareNumSpatialLayers() {
194 return static_cast<int>(FLAG_snum_spatial_layers);
195}
196
197DEFINE_string(
198 sstream0,
199 "",
200 "Comma separated values describing VideoStream for screenshare stream #0.");
201std::string ScreenshareStream0() {
202 return static_cast<std::string>(FLAG_sstream0);
203}
204
205DEFINE_string(
206 sstream1,
207 "",
208 "Comma separated values describing VideoStream for screenshare stream #1.");
209std::string ScreenshareStream1() {
210 return static_cast<std::string>(FLAG_sstream1);
211}
212
213DEFINE_string(
214 ssl0,
215 "",
216 "Comma separated values describing SpatialLayer for screenshare layer #0.");
217std::string ScreenshareSL0() {
218 return static_cast<std::string>(FLAG_ssl0);
219}
220
221DEFINE_string(
222 ssl1,
223 "",
224 "Comma separated values describing SpatialLayer for screenshare layer #1.");
225std::string ScreenshareSL1() {
226 return static_cast<std::string>(FLAG_ssl1);
227}
228
229DEFINE_int(sselected_tl,
230 -1,
231 "Temporal layer to show or analyze for screenshare. -1 to disable "
232 "filtering.");
233int ScreenshareSelectedTL() {
234 return static_cast<int>(FLAG_sselected_tl);
235}
236
237DEFINE_int(sselected_stream,
238 0,
239 "ID of the stream to show or analyze for screenshare."
240 "Set to the number of streams to show them all.");
241int ScreenshareSelectedStream() {
242 return static_cast<int>(FLAG_sselected_stream);
243}
244
245DEFINE_int(sselected_sl,
246 -1,
247 "Spatial layer to show or analyze for screenshare. -1 to disable "
248 "filtering.");
249int ScreenshareSelectedSL() {
250 return static_cast<int>(FLAG_sselected_sl);
251}
252
253DEFINE_bool(
254 generate_slides,
255 false,
256 "Whether to use randomly generated slides or read them from files.");
257bool GenerateSlides() {
258 return static_cast<int>(FLAG_generate_slides);
259}
260
261DEFINE_int(slide_change_interval,
262 10,
263 "Interval (in seconds) between simulated slide changes.");
264int SlideChangeInterval() {
265 return static_cast<int>(FLAG_slide_change_interval);
266}
267
268DEFINE_int(
269 scroll_duration,
270 0,
271 "Duration (in seconds) during which a slide will be scrolled into place.");
272int ScrollDuration() {
273 return static_cast<int>(FLAG_scroll_duration);
274}
275
276DEFINE_string(slides,
277 "",
278 "Comma-separated list of *.yuv files to display as slides.");
279std::vector<std::string> Slides() {
280 std::vector<std::string> slides;
281 std::string slides_list = FLAG_slides;
282 rtc::tokenize(slides_list, ',', &slides);
283 return slides;
284}
285
286// Flags common with screenshare and video loopback, with equal default values.
287DEFINE_int(start_bitrate, 600, "Call start bitrate in kbps.");
288int StartBitrateKbps() {
289 return static_cast<int>(FLAG_start_bitrate);
290}
291
292DEFINE_string(codec, "VP8", "Video codec to use.");
293std::string Codec() {
294 return static_cast<std::string>(FLAG_codec);
295}
296
297DEFINE_bool(analyze_video,
298 false,
299 "Analyze video stream (if --duration is present)");
300bool AnalyzeVideo() {
301 return static_cast<bool>(FLAG_analyze_video);
302}
303
304DEFINE_bool(analyze_screenshare,
305 false,
306 "Analyze screenshare stream (if --duration is present)");
307bool AnalyzeScreenshare() {
308 return static_cast<bool>(FLAG_analyze_screenshare);
309}
310
311DEFINE_int(
312 duration,
313 0,
314 "Duration of the test in seconds. If 0, rendered will be shown instead.");
315int DurationSecs() {
316 return static_cast<int>(FLAG_duration);
317}
318
319DEFINE_string(output_filename, "", "Target graph data filename.");
320std::string OutputFilename() {
321 return static_cast<std::string>(FLAG_output_filename);
322}
323
324DEFINE_string(graph_title,
325 "",
326 "If empty, title will be generated automatically.");
327std::string GraphTitle() {
328 return static_cast<std::string>(FLAG_graph_title);
329}
330
331DEFINE_int(loss_percent, 0, "Percentage of packets randomly lost.");
332int LossPercent() {
333 return static_cast<int>(FLAG_loss_percent);
334}
335
336DEFINE_int(avg_burst_loss_length, -1, "Average burst length of lost packets.");
337int AvgBurstLossLength() {
338 return static_cast<int>(FLAG_avg_burst_loss_length);
339}
340
341DEFINE_int(link_capacity,
342 0,
343 "Capacity (kbps) of the fake link. 0 means infinite.");
344int LinkCapacityKbps() {
345 return static_cast<int>(FLAG_link_capacity);
346}
347
348DEFINE_int(queue_size, 0, "Size of the bottleneck link queue in packets.");
349int QueueSize() {
350 return static_cast<int>(FLAG_queue_size);
351}
352
353DEFINE_int(avg_propagation_delay_ms,
354 0,
355 "Average link propagation delay in ms.");
356int AvgPropagationDelayMs() {
357 return static_cast<int>(FLAG_avg_propagation_delay_ms);
358}
359
360DEFINE_string(rtc_event_log_name,
361 "",
362 "Filename for rtc event log. Two files "
363 "with \"_send\" and \"_recv\" suffixes will be created. "
364 "Works only when --duration is set.");
365std::string RtcEventLogName() {
366 return static_cast<std::string>(FLAG_rtc_event_log_name);
367}
368
369DEFINE_string(rtp_dump_name, "", "Filename for dumped received RTP stream.");
370std::string RtpDumpName() {
371 return static_cast<std::string>(FLAG_rtp_dump_name);
372}
373
374DEFINE_int(std_propagation_delay_ms,
375 0,
376 "Link propagation delay standard deviation in ms.");
377int StdPropagationDelayMs() {
378 return static_cast<int>(FLAG_std_propagation_delay_ms);
379}
380
381DEFINE_string(encoded_frame_path,
382 "",
383 "The base path for encoded frame logs. Created files will have "
384 "the form <encoded_frame_path>.<n>.(recv|send.<m>).ivf");
385std::string EncodedFramePath() {
386 return static_cast<std::string>(FLAG_encoded_frame_path);
387}
388
389DEFINE_bool(logs, false, "print logs to stderr");
390
391DEFINE_bool(send_side_bwe, true, "Use send-side bandwidth estimation");
392
393DEFINE_bool(allow_reordering, false, "Allow packet reordering to occur");
394
395DEFINE_bool(use_ulpfec, false, "Use RED+ULPFEC forward error correction.");
396
397DEFINE_bool(use_flexfec, false, "Use FlexFEC forward error correction.");
398
399DEFINE_bool(audio, false, "Add audio stream");
400
401DEFINE_bool(audio_video_sync,
402 false,
403 "Sync audio and video stream (no effect if"
404 " audio is false)");
405
406DEFINE_bool(audio_dtx, false, "Enable audio DTX (no effect if audio is false)");
407
408DEFINE_bool(video, true, "Add video stream");
409
410DEFINE_string(
411 force_fieldtrials,
412 "",
413 "Field trials control experimental feature code which can be forced. "
414 "E.g. running with --force_fieldtrials=WebRTC-FooFeature/Enable/"
415 " will assign the group Enable to field trial WebRTC-FooFeature. Multiple "
416 "trials are separated by \"/\"");
417
418// Video-specific flags.
419DEFINE_string(vclip,
420 "",
421 "Name of the clip to show. If empty, the camera is used. Use "
422 "\"Generator\" for chroma generator.");
423std::string VideoClip() {
424 return static_cast<std::string>(FLAG_vclip);
425}
426
427DEFINE_bool(help, false, "prints this message");
428
429} // namespace flags
430
431void Loopback() {
432 int camera_idx, screenshare_idx;
433 RTC_CHECK(!(flags::AnalyzeScreenshare() && flags::AnalyzeVideo()))
434 << "Select only one of video or screenshare.";
435 RTC_CHECK(!flags::DurationSecs() || flags::AnalyzeScreenshare() ||
436 flags::AnalyzeVideo())
437 << "If duration is set, exactly one of analyze_* flags should be set.";
438 // Default: camera feed first, if nothing selected.
439 if (flags::AnalyzeVideo() || !flags::AnalyzeScreenshare()) {
440 camera_idx = 0;
441 screenshare_idx = 1;
442 } else {
443 camera_idx = 1;
444 screenshare_idx = 0;
445 }
446
447 FakeNetworkPipe::Config pipe_config;
448 pipe_config.loss_percent = flags::LossPercent();
449 pipe_config.avg_burst_loss_length = flags::AvgBurstLossLength();
450 pipe_config.link_capacity_kbps = flags::LinkCapacityKbps();
451 pipe_config.queue_length_packets = flags::QueueSize();
452 pipe_config.queue_delay_ms = flags::AvgPropagationDelayMs();
453 pipe_config.delay_standard_deviation_ms = flags::StdPropagationDelayMs();
454 pipe_config.allow_reordering = flags::FLAG_allow_reordering;
455
456 Call::Config::BitrateConfig call_bitrate_config;
457 call_bitrate_config.min_bitrate_bps =
458 (flags::ScreenshareMinBitrateKbps() + flags::VideoMinBitrateKbps()) *
459 1000;
460 call_bitrate_config.start_bitrate_bps = flags::StartBitrateKbps() * 1000;
461 call_bitrate_config.max_bitrate_bps =
462 (flags::ScreenshareMaxBitrateKbps() + flags::VideoMaxBitrateKbps()) *
463 1000;
464
465 VideoQualityTest::Params params, camera_params, screenshare_params;
466 params.call = {flags::FLAG_send_side_bwe, call_bitrate_config, 0};
467 params.call.dual_video = true;
468 params.video[screenshare_idx] = {
469 true,
470 flags::ScreenshareWidth(),
471 flags::ScreenshareHeight(),
472 flags::ScreenshareFps(),
473 flags::ScreenshareMinBitrateKbps() * 1000,
474 flags::ScreenshareTargetBitrateKbps() * 1000,
475 flags::ScreenshareMaxBitrateKbps() * 1000,
476 false,
477 flags::Codec(),
478 flags::ScreenshareNumTemporalLayers(),
479 flags::ScreenshareSelectedTL(),
480 flags::ScreenshareMinTransmitBitrateKbps() * 1000,
481 false, // ULPFEC disabled.
482 false, // FlexFEC disabled.
483 ""};
484 params.video[camera_idx] = {flags::FLAG_video,
485 flags::VideoWidth(),
486 flags::VideoHeight(),
487 flags::VideoFps(),
488 flags::VideoMinBitrateKbps() * 1000,
489 flags::VideoTargetBitrateKbps() * 1000,
490 flags::VideoMaxBitrateKbps() * 1000,
491 flags::FLAG_suspend_below_min_bitrate,
492 flags::Codec(),
493 flags::VideoNumTemporalLayers(),
494 flags::VideoSelectedTL(),
495 0, // No min transmit bitrate.
496 flags::FLAG_use_ulpfec,
497 flags::FLAG_use_flexfec,
498 flags::VideoClip(),
499 flags::GetCaptureDevice()};
500 params.audio = {flags::FLAG_audio, flags::FLAG_audio_video_sync,
501 flags::FLAG_audio_dtx};
502 params.logging = {flags::FLAG_logs, flags::FLAG_rtc_event_log_name,
503 flags::FLAG_rtp_dump_name, flags::FLAG_encoded_frame_path};
504 params.analyzer = {"dual_streams",
505 0.0,
506 0.0,
507 flags::DurationSecs(),
508 flags::OutputFilename(),
509 flags::GraphTitle()};
510 params.pipe = pipe_config;
511
512 params.screenshare[camera_idx].enabled = false;
513 params.screenshare[screenshare_idx] = {
514 true, flags::GenerateSlides(), flags::SlideChangeInterval(),
515 flags::ScrollDuration(), flags::Slides()};
516
517 if (flags::VideoNumStreams() > 1 && flags::VideoStream0().empty() &&
518 flags::VideoStream1().empty()) {
519 params.ss[camera_idx].infer_streams = true;
520 }
521
522 if (flags::ScreenshareNumStreams() > 1 &&
523 flags::ScreenshareStream0().empty() &&
524 flags::ScreenshareStream1().empty()) {
525 params.ss[screenshare_idx].infer_streams = true;
526 }
527
528 std::vector<std::string> stream_descriptors;
529 stream_descriptors.push_back(flags::ScreenshareStream0());
530 stream_descriptors.push_back(flags::ScreenshareStream1());
531 std::vector<std::string> SL_descriptors;
532 SL_descriptors.push_back(flags::ScreenshareSL0());
533 SL_descriptors.push_back(flags::ScreenshareSL1());
534 VideoQualityTest::FillScalabilitySettings(
535 &params, screenshare_idx, stream_descriptors,
536 flags::ScreenshareNumStreams(), flags::ScreenshareSelectedStream(),
537 flags::ScreenshareNumSpatialLayers(), flags::ScreenshareSelectedSL(),
538 SL_descriptors);
539
540 stream_descriptors.clear();
541 stream_descriptors.push_back(flags::VideoStream0());
542 stream_descriptors.push_back(flags::VideoStream1());
543 SL_descriptors.clear();
544 SL_descriptors.push_back(flags::VideoSL0());
545 SL_descriptors.push_back(flags::VideoSL1());
546 VideoQualityTest::FillScalabilitySettings(
547 &params, camera_idx, stream_descriptors, flags::VideoNumStreams(),
548 flags::VideoSelectedStream(), flags::VideoNumSpatialLayers(),
549 flags::VideoSelectedSL(), SL_descriptors);
550
551 VideoQualityTest test;
552 if (flags::DurationSecs()) {
553 test.RunWithAnalyzer(params);
554 } else {
555 test.RunWithRenderers(params);
556 }
557}
558} // namespace webrtc
559
560int main(int argc, char* argv[]) {
561 ::testing::InitGoogleTest(&argc, argv);
562 if (rtc::FlagList::SetFlagsFromCommandLine(&argc, argv, true) != 0) {
563 // Fail on unrecognized flags.
564 return 1;
565 }
566 if (webrtc::flags::FLAG_help) {
567 rtc::FlagList::Print(nullptr, false);
568 return 0;
569 }
570
571 // InitFieldTrialsFromString needs a reference to an std::string instance,
572 // with a scope that outlives the test.
573 std::string field_trials = webrtc::flags::FLAG_force_fieldtrials;
574 webrtc::test::InitFieldTrialsFromString(field_trials);
575
576 webrtc::test::RunTest(webrtc::Loopback);
577 return 0;
578}