Add initial support for RtpEncodingParameters max_framerate.
Add support to set the framerate to the maximum of |max_framerate|.
Different framerates are currently not supported per stream for video.
Bug: webrtc:9597
Change-Id: Ie326617b66bd97be387f809a7f82b97b8f3ff5fe
Reviewed-on: https://webrtc-review.googlesource.com/92392
Reviewed-by: Sebastian Jansson <srte@webrtc.org>
Reviewed-by: Erik Språng <sprang@webrtc.org>
Reviewed-by: Magnus Jedvert <magjed@webrtc.org>
Reviewed-by: Steve Anton <steveanton@webrtc.org>
Commit-Queue: Åsa Persson <asapersson@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#24270}
diff --git a/api/rtpparameters.h b/api/rtpparameters.h
index 9a29c08..858342e 100644
--- a/api/rtpparameters.h
+++ b/api/rtpparameters.h
@@ -417,7 +417,10 @@
// TODO(asapersson): Not implemented for ORTC API.
absl::optional<int> min_bitrate_bps;
- // TODO(deadbeef): Not implemented.
+ // Specifies the maximum framerate in fps for video.
+ // TODO(asapersson): Different framerates are not supported per stream.
+ // If set, the maximum |max_framerate| is currently used.
+ // Not supported for screencast.
absl::optional<int> max_framerate;
// For video, scale the resolution down by this factor.
@@ -451,6 +454,7 @@
fec == o.fec && rtx == o.rtx && dtx == o.dtx &&
bitrate_priority == o.bitrate_priority && ptime == o.ptime &&
max_bitrate_bps == o.max_bitrate_bps &&
+ min_bitrate_bps == o.min_bitrate_bps &&
max_framerate == o.max_framerate &&
scale_resolution_down_by == o.scale_resolution_down_by &&
scale_framerate_down_by == o.scale_framerate_down_by &&
diff --git a/call/call_perf_tests.cc b/call/call_perf_tests.cc
index 54e01b1..418854f 100644
--- a/call/call_perf_tests.cc
+++ b/call/call_perf_tests.cc
@@ -493,7 +493,7 @@
class LoadObserver : public test::SendTest,
public test::FrameGeneratorCapturer::SinkWantsObserver {
public:
- LoadObserver() : SendTest(kLongTimeoutMs), test_phase_(TestPhase::kStart) {}
+ LoadObserver() : SendTest(kLongTimeoutMs), test_phase_(TestPhase::kInit) {}
void OnFrameGeneratorCapturerCreated(
test::FrameGeneratorCapturer* frame_generator_capturer) override {
@@ -507,9 +507,21 @@
// TODO(sprang): Add integration test for maintain-framerate mode?
void OnSinkWantsChanged(rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override {
- // First expect CPU overuse. Then expect CPU underuse when the encoder
+ // At kStart expect CPU overuse. Then expect CPU underuse when the encoder
// delay has been decreased.
switch (test_phase_) {
+ case TestPhase::kInit:
+ // Max framerate should be set initially.
+ if (wants.max_framerate_fps != std::numeric_limits<int>::max() &&
+ wants.max_pixel_count == std::numeric_limits<int>::max()) {
+ test_phase_ = TestPhase::kStart;
+ } else {
+ ADD_FAILURE() << "Got unexpected adaptation request, max res = "
+ << wants.max_pixel_count << ", target res = "
+ << wants.target_pixel_count.value_or(-1)
+ << ", max fps = " << wants.max_framerate_fps;
+ }
+ break;
case TestPhase::kStart:
if (wants.max_pixel_count < std::numeric_limits<int>::max()) {
// On adapting down, VideoStreamEncoder::VideoSourceProxy will set
@@ -553,7 +565,12 @@
EXPECT_TRUE(Wait()) << "Timed out before receiving an overuse callback.";
}
- enum class TestPhase { kStart, kAdaptedDown, kAdaptedUp } test_phase_;
+ enum class TestPhase {
+ kInit,
+ kStart,
+ kAdaptedDown,
+ kAdaptedUp
+ } test_phase_;
} test;
RunBaseTest(&test);
diff --git a/media/engine/simulcast.cc b/media/engine/simulcast.cc
index dc05081..1809c32 100644
--- a/media/engine/simulcast.cc
+++ b/media/engine/simulcast.cc
@@ -12,6 +12,7 @@
#include <algorithm>
#include <string>
+#include "media/base/mediaconstants.h"
#include "media/base/streamparams.h"
#include "media/engine/constants.h"
#include "media/engine/simulcast.h"
@@ -189,17 +190,15 @@
int /*max_bitrate_bps*/,
double bitrate_priority,
int max_qp,
- int max_framerate,
+ int /*max_framerate*/,
bool is_screenshare,
bool temporal_layers_supported) {
if (is_screenshare) {
return GetScreenshareLayers(max_layers, width, height, bitrate_priority,
- max_qp, max_framerate,
- temporal_layers_supported);
+ max_qp, temporal_layers_supported);
} else {
return GetNormalSimulcastLayers(max_layers, width, height, bitrate_priority,
- max_qp, max_framerate,
- temporal_layers_supported);
+ max_qp, temporal_layers_supported);
}
}
@@ -209,7 +208,6 @@
int height,
double bitrate_priority,
int max_qp,
- int max_framerate,
bool temporal_layers_supported) {
// TODO(bugs.webrtc.org/8785): Currently if the resolution isn't large enough
// (defined in kSimulcastFormats) we scale down the number of simulcast
@@ -270,7 +268,7 @@
static_cast<int>(layers[s].target_bitrate_bps * rate_factor);
}
layers[s].min_bitrate_bps = FindSimulcastMinBitrateBps(width, height);
- layers[s].max_framerate = max_framerate;
+ layers[s].max_framerate = kDefaultVideoMaxFramerate;
width /= 2;
height /= 2;
@@ -294,7 +292,6 @@
int height,
double bitrate_priority,
int max_qp,
- int max_framerate,
bool temporal_layers_supported) {
size_t num_simulcast_layers =
std::min<int>(max_layers, kMaxScreenshareSimulcastLayers);
@@ -346,7 +343,7 @@
layers[1].width = width;
layers[1].height = height;
layers[1].max_qp = max_qp;
- layers[1].max_framerate = max_framerate;
+ layers[1].max_framerate = kDefaultVideoMaxFramerate;
layers[1].num_temporal_layers =
temporal_layers_supported ? DefaultNumberOfTemporalLayers(1) : 0;
layers[1].min_bitrate_bps = layers[0].target_bitrate_bps * 2;
diff --git a/media/engine/simulcast.h b/media/engine/simulcast.h
index 84583bc..1eaed6e 100644
--- a/media/engine/simulcast.h
+++ b/media/engine/simulcast.h
@@ -26,7 +26,7 @@
std::vector<webrtc::VideoStream>* layers);
// Gets simulcast settings.
-// TODO(asapersson): Remove max_bitrate_bps.
+// TODO(asapersson): Remove max_bitrate_bps and max_framerate.
std::vector<webrtc::VideoStream> GetSimulcastConfig(
size_t max_layers,
int width,
@@ -34,7 +34,7 @@
int /*max_bitrate_bps*/,
double bitrate_priority,
int max_qp,
- int max_framerate,
+ int /*max_framerate*/,
bool is_screenshare,
bool temporal_layers_supported = true);
@@ -45,7 +45,6 @@
int height,
double bitrate_priority,
int max_qp,
- int max_framerate,
bool temporal_layers_supported = true);
// Gets simulcast config layers for screenshare settings.
@@ -55,7 +54,6 @@
int height,
double bitrate_priority,
int max_qp,
- int max_framerate,
bool temporal_layers_supported = true);
} // namespace cricket
diff --git a/media/engine/simulcast_unittest.cc b/media/engine/simulcast_unittest.cc
index 29226d1..60231b4 100644
--- a/media/engine/simulcast_unittest.cc
+++ b/media/engine/simulcast_unittest.cc
@@ -10,6 +10,7 @@
#include "media/engine/simulcast.h"
+#include "media/base/mediaconstants.h"
#include "media/engine/constants.h"
#include "test/field_trial.h"
#include "test/gtest.h"
@@ -94,7 +95,7 @@
for (size_t i = 0; i < streams.size(); ++i) {
EXPECT_EQ(size_t{kDefaultTemporalLayers}, streams[i].num_temporal_layers);
- EXPECT_EQ(kMaxFps, streams[i].max_framerate);
+ EXPECT_EQ(cricket::kDefaultVideoMaxFramerate, streams[i].max_framerate);
EXPECT_EQ(kQpMax, streams[i].max_qp);
EXPECT_EQ(kExpected[i].min_bitrate_bps, streams[i].min_bitrate_bps);
EXPECT_EQ(kExpected[i].target_bitrate_bps, streams[i].target_bitrate_bps);
diff --git a/media/engine/webrtcvideoengine.cc b/media/engine/webrtcvideoengine.cc
index 3a72cde..245c86b 100644
--- a/media/engine/webrtcvideoengine.cc
+++ b/media/engine/webrtcvideoengine.cc
@@ -250,6 +250,18 @@
: std::vector<VideoCodec>();
}
+int GetMaxFramerate(const webrtc::VideoEncoderConfig& encoder_config,
+ size_t num_layers) {
+ int max_fps = -1;
+ for (size_t i = 0; i < num_layers; ++i) {
+ int fps = (encoder_config.simulcast_layers[i].max_framerate > 0)
+ ? encoder_config.simulcast_layers[i].max_framerate
+ : kDefaultVideoMaxFramerate;
+ max_fps = std::max(fps, max_fps);
+ }
+ return max_fps;
+}
+
static std::string CodecVectorToString(const std::vector<VideoCodec>& codecs) {
std::stringstream out;
out << '{';
@@ -1802,13 +1814,16 @@
return error;
}
- bool new_bitrate = false;
+ bool new_param = false;
for (size_t i = 0; i < rtp_parameters_.encodings.size(); ++i) {
if ((new_parameters.encodings[i].min_bitrate_bps !=
rtp_parameters_.encodings[i].min_bitrate_bps) ||
(new_parameters.encodings[i].max_bitrate_bps !=
- rtp_parameters_.encodings[i].max_bitrate_bps)) {
- new_bitrate = true;
+ rtp_parameters_.encodings[i].max_bitrate_bps) ||
+ (new_parameters.encodings[i].max_framerate !=
+ rtp_parameters_.encodings[i].max_framerate)) {
+ new_param = true;
+ break;
}
}
@@ -1822,8 +1837,8 @@
// entire encoder reconfiguration, it just needs to update the bitrate
// allocator.
bool reconfigure_encoder =
- new_bitrate || (new_parameters.encodings[0].bitrate_priority !=
- rtp_parameters_.encodings[0].bitrate_priority);
+ new_param || (new_parameters.encodings[0].bitrate_priority !=
+ rtp_parameters_.encodings[0].bitrate_priority);
// TODO(bugs.webrtc.org/8807): The active field as well should not require
// a full encoder reconfiguration, but it needs to update both the bitrate
@@ -1985,7 +2000,7 @@
// Application-controlled state is held in the encoder_config's
// simulcast_layers. Currently this is used to control which simulcast layers
- // are active and for configuring the min/max bitrate.
+ // are active and for configuring the min/max bitrate and max framerate.
// The encoder_config's simulcast_layers is also used for non-simulcast (when
// there is a single layer).
RTC_DCHECK_GE(rtp_parameters_.encodings.size(),
@@ -2003,14 +2018,17 @@
encoder_config.simulcast_layers[i].max_bitrate_bps =
*rtp_parameters_.encodings[i].max_bitrate_bps;
}
+ if (rtp_parameters_.encodings[i].max_framerate) {
+ encoder_config.simulcast_layers[i].max_framerate =
+ *rtp_parameters_.encodings[i].max_framerate;
+ }
}
int max_qp = kDefaultQpMax;
codec.GetParam(kCodecParamMaxQuantization, &max_qp);
encoder_config.video_stream_factory =
new rtc::RefCountedObject<EncoderStreamFactory>(
- codec.name, max_qp, kDefaultVideoMaxFramerate, is_screencast,
- parameters_.conference_mode);
+ codec.name, max_qp, is_screencast, parameters_.conference_mode);
return encoder_config;
}
@@ -2702,19 +2720,17 @@
return video_codecs;
}
-// TODO(bugs.webrtc.org/8785): Consider removing max_qp and max_framerate
-// as members of EncoderStreamFactory and instead set these values individually
-// for each stream in the VideoEncoderConfig.simulcast_layers.
+// TODO(bugs.webrtc.org/8785): Consider removing max_qp as member of
+// EncoderStreamFactory and instead set this value individually for each stream
+// in the VideoEncoderConfig.simulcast_layers.
EncoderStreamFactory::EncoderStreamFactory(
std::string codec_name,
int max_qp,
- int max_framerate,
bool is_screenshare,
bool screenshare_config_explicitly_enabled)
: codec_name_(codec_name),
max_qp_(max_qp),
- max_framerate_(max_framerate),
is_screenshare_(is_screenshare),
screenshare_config_explicitly_enabled_(
screenshare_config_explicitly_enabled) {}
@@ -2738,12 +2754,18 @@
bool temporal_layers_supported = CodecNamesEq(codec_name_, kVp8CodecName);
layers = GetSimulcastConfig(encoder_config.number_of_streams, width, height,
0 /*not used*/, encoder_config.bitrate_priority,
- max_qp_, max_framerate_, is_screenshare_,
+ max_qp_, 0 /*not_used*/, is_screenshare_,
temporal_layers_supported);
+ // The maximum |max_framerate| is currently used for video.
+ int max_framerate = GetMaxFramerate(encoder_config, layers.size());
// Update the active simulcast layers and configured bitrates.
bool is_highest_layer_max_bitrate_configured = false;
for (size_t i = 0; i < layers.size(); ++i) {
layers[i].active = encoder_config.simulcast_layers[i].active;
+ if (!is_screenshare_) {
+ // Update simulcast framerates with max configured max framerate.
+ layers[i].max_framerate = max_framerate;
+ }
// Update simulcast bitrates with configured min and max bitrate.
if (encoder_config.simulcast_layers[i].min_bitrate_bps > 0) {
layers[i].min_bitrate_bps =
@@ -2800,11 +2822,14 @@
if (encoder_config.max_bitrate_bps <= 0)
max_bitrate_bps = std::max(min_bitrate_bps, max_bitrate_bps);
}
+ int max_framerate = (encoder_config.simulcast_layers[0].max_framerate > 0)
+ ? encoder_config.simulcast_layers[0].max_framerate
+ : kDefaultVideoMaxFramerate;
webrtc::VideoStream layer;
layer.width = width;
layer.height = height;
- layer.max_framerate = max_framerate_;
+ layer.max_framerate = max_framerate;
// In the case that the application sets a max bitrate that's lower than the
// min bitrate, we adjust it down (see bugs.webrtc.org/9141).
diff --git a/media/engine/webrtcvideoengine.h b/media/engine/webrtcvideoengine.h
index f00ebee..06c1cfd 100644
--- a/media/engine/webrtcvideoengine.h
+++ b/media/engine/webrtcvideoengine.h
@@ -505,7 +505,6 @@
public:
EncoderStreamFactory(std::string codec_name,
int max_qp,
- int max_framerate,
bool is_screenshare,
bool screenshare_config_explicitly_enabled);
@@ -517,7 +516,6 @@
const std::string codec_name_;
const int max_qp_;
- const int max_framerate_;
const bool is_screenshare_;
// Allows a screenshare specific configuration, which enables temporal
// layering and allows simulcast.
diff --git a/media/engine/webrtcvideoengine_unittest.cc b/media/engine/webrtcvideoengine_unittest.cc
index 5361a95..825b883 100644
--- a/media/engine/webrtcvideoengine_unittest.cc
+++ b/media/engine/webrtcvideoengine_unittest.cc
@@ -5349,6 +5349,32 @@
stream->GetVideoStreams()[0].max_bitrate_bps);
}
+TEST_F(WebRtcVideoChannelTest, SetMaxFramerateOneStream) {
+ FakeVideoSendStream* stream = AddSendStream();
+
+ webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_);
+ EXPECT_EQ(1UL, parameters.encodings.size());
+ EXPECT_FALSE(parameters.encodings[0].max_framerate.has_value());
+ EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok());
+
+ // Note that this is testing the behavior of the FakeVideoSendStream, which
+ // also calls to CreateEncoderStreams to get the VideoStreams, so essentially
+ // we are just testing the behavior of
+ // EncoderStreamFactory::CreateEncoderStreams.
+ ASSERT_EQ(1UL, stream->GetVideoStreams().size());
+ EXPECT_EQ(kDefaultVideoMaxFramerate,
+ stream->GetVideoStreams()[0].max_framerate);
+
+ // Set max framerate and check that VideoStream.max_framerate is set.
+ const int kNewMaxFramerate = kDefaultVideoMaxFramerate - 1;
+ parameters = channel_->GetRtpSendParameters(last_ssrc_);
+ parameters.encodings[0].max_framerate = kNewMaxFramerate;
+ EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok());
+
+ ASSERT_EQ(1UL, stream->GetVideoStreams().size());
+ EXPECT_EQ(kNewMaxFramerate, stream->GetVideoStreams()[0].max_framerate);
+}
+
TEST_F(WebRtcVideoChannelTest,
CannotSetRtpSendParametersWithIncorrectNumberOfEncodings) {
AddSendStream();
@@ -5501,6 +5527,120 @@
EXPECT_TRUE(channel_->SetVideoSend(primary_ssrc, nullptr, nullptr));
}
+TEST_F(WebRtcVideoChannelTest, GetAndSetRtpSendParametersMaxFramerate) {
+ const size_t kNumSimulcastStreams = 3;
+ SetUpSimulcast(true, false);
+
+ // Get and set the rtp encoding parameters.
+ webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_);
+ EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size());
+ for (const auto& encoding : parameters.encodings) {
+ EXPECT_FALSE(encoding.max_framerate);
+ }
+
+ // Change the value and set it on the VideoChannel.
+ parameters.encodings[0].max_framerate = 10;
+ parameters.encodings[1].max_framerate = 20;
+ parameters.encodings[2].max_framerate = 25;
+ EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok());
+
+ // Verify that the bitrates are set on the VideoChannel.
+ parameters = channel_->GetRtpSendParameters(last_ssrc_);
+ EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size());
+ EXPECT_EQ(10, parameters.encodings[0].max_framerate);
+ EXPECT_EQ(20, parameters.encodings[1].max_framerate);
+ EXPECT_EQ(25, parameters.encodings[2].max_framerate);
+}
+
+TEST_F(WebRtcVideoChannelTest, MaxSimulcastFrameratePropagatedToEncoder) {
+ const size_t kNumSimulcastStreams = 3;
+ FakeVideoSendStream* stream = SetUpSimulcast(true, false);
+
+ // Send a full size frame so all simulcast layers are used when reconfiguring.
+ FakeVideoCapturerWithTaskQueue capturer;
+ VideoOptions options;
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, &options, &capturer));
+ EXPECT_EQ(cricket::CS_RUNNING,
+ capturer.Start(capturer.GetSupportedFormats()->front()));
+ channel_->SetSend(true);
+ EXPECT_TRUE(capturer.CaptureFrame());
+
+ // Get and set the rtp encoding parameters.
+ // Change the value and set it on the VideoChannel.
+ webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_);
+ EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size());
+ parameters.encodings[0].max_framerate = 15;
+ parameters.encodings[1].max_framerate = 25;
+ parameters.encodings[2].max_framerate = 20;
+ EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok());
+
+ // Verify that the new value propagated down to the encoder.
+ // Check that WebRtcVideoSendStream updates VideoEncoderConfig correctly.
+ EXPECT_EQ(2, stream->num_encoder_reconfigurations());
+ webrtc::VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy();
+ EXPECT_EQ(kNumSimulcastStreams, encoder_config.number_of_streams);
+ EXPECT_EQ(kNumSimulcastStreams, encoder_config.simulcast_layers.size());
+ EXPECT_EQ(15, encoder_config.simulcast_layers[0].max_framerate);
+ EXPECT_EQ(25, encoder_config.simulcast_layers[1].max_framerate);
+ EXPECT_EQ(20, encoder_config.simulcast_layers[2].max_framerate);
+
+ // FakeVideoSendStream calls CreateEncoderStreams, test that the vector of
+ // VideoStreams are created appropriately for the simulcast case.
+ // Currently the maximum |max_framerate| is used.
+ EXPECT_EQ(kNumSimulcastStreams, stream->GetVideoStreams().size());
+ EXPECT_EQ(25, stream->GetVideoStreams()[0].max_framerate);
+ EXPECT_EQ(25, stream->GetVideoStreams()[1].max_framerate);
+ EXPECT_EQ(25, stream->GetVideoStreams()[2].max_framerate);
+
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr));
+}
+
+TEST_F(WebRtcVideoChannelTest,
+ DefaultValuePropagatedToEncoderForUnsetFramerate) {
+ const size_t kNumSimulcastStreams = 3;
+ const std::vector<webrtc::VideoStream> kDefault = GetSimulcastBitrates720p();
+ FakeVideoSendStream* stream = SetUpSimulcast(true, false);
+
+ // Send a full size frame so all simulcast layers are used when reconfiguring.
+ FakeVideoCapturerWithTaskQueue capturer;
+ VideoOptions options;
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, &options, &capturer));
+ EXPECT_EQ(cricket::CS_RUNNING,
+ capturer.Start(capturer.GetSupportedFormats()->front()));
+ channel_->SetSend(true);
+ EXPECT_TRUE(capturer.CaptureFrame());
+
+ // Get and set the rtp encoding parameters.
+ // Change the value and set it on the VideoChannel.
+ webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_);
+ EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size());
+ parameters.encodings[0].max_framerate = 15;
+ parameters.encodings[2].max_framerate = 20;
+ EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok());
+
+ // Verify that the new value propagated down to the encoder.
+ // Check that WebRtcVideoSendStream updates VideoEncoderConfig correctly.
+ webrtc::VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy();
+ EXPECT_EQ(kNumSimulcastStreams, encoder_config.number_of_streams);
+ EXPECT_EQ(kNumSimulcastStreams, encoder_config.simulcast_layers.size());
+ EXPECT_EQ(15, encoder_config.simulcast_layers[0].max_framerate);
+ EXPECT_EQ(-1, encoder_config.simulcast_layers[1].max_framerate);
+ EXPECT_EQ(20, encoder_config.simulcast_layers[2].max_framerate);
+
+ // FakeVideoSendStream calls CreateEncoderStreams, test that the vector of
+ // VideoStreams are created appropriately for the simulcast case.
+ // The maximum |max_framerate| is used, kDefaultVideoMaxFramerate: 60.
+ EXPECT_EQ(kNumSimulcastStreams, stream->GetVideoStreams().size());
+ EXPECT_EQ(kDefaultVideoMaxFramerate,
+ stream->GetVideoStreams()[0].max_framerate);
+ EXPECT_EQ(kDefaultVideoMaxFramerate,
+ stream->GetVideoStreams()[1].max_framerate);
+ EXPECT_EQ(kDefaultVideoMaxFramerate,
+ stream->GetVideoStreams()[2].max_framerate);
+
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr));
+}
+
TEST_F(WebRtcVideoChannelTest, GetAndSetRtpSendParametersMinAndMaxBitrate) {
const size_t kNumSimulcastStreams = 3;
SetUpSimulcast(true, false);
diff --git a/pc/rtpsender.cc b/pc/rtpsender.cc
index 91708fa..fa17056 100644
--- a/pc/rtpsender.cc
+++ b/pc/rtpsender.cc
@@ -37,7 +37,6 @@
if (encoding_params.codec_payload_type.has_value() ||
encoding_params.fec.has_value() || encoding_params.rtx.has_value() ||
encoding_params.dtx.has_value() || encoding_params.ptime.has_value() ||
- encoding_params.max_framerate.has_value() ||
!encoding_params.rid.empty() ||
encoding_params.scale_resolution_down_by.has_value() ||
encoding_params.scale_framerate_down_by.has_value() ||
diff --git a/pc/rtpsenderreceiver_unittest.cc b/pc/rtpsenderreceiver_unittest.cc
index 4014df5..837f6b7 100644
--- a/pc/rtpsenderreceiver_unittest.cc
+++ b/pc/rtpsenderreceiver_unittest.cc
@@ -691,8 +691,7 @@
EXPECT_EQ(1u, params.encodings.size());
// Unimplemented RtpParameters: codec_payload_type, fec, rtx, dtx, ptime,
- // max_framerate, scale_resolution_down_by, scale_framerate_down_by, rid,
- // dependency_rids.
+ // scale_resolution_down_by, scale_framerate_down_by, rid, dependency_rids.
params.encodings[0].codec_payload_type = 1;
EXPECT_EQ(RTCErrorType::UNSUPPORTED_PARAMETER,
audio_rtp_sender_->SetParameters(params).type());
@@ -718,11 +717,6 @@
audio_rtp_sender_->SetParameters(params).type());
params = audio_rtp_sender_->GetParameters();
- params.encodings[0].max_framerate = 1;
- EXPECT_EQ(RTCErrorType::UNSUPPORTED_PARAMETER,
- audio_rtp_sender_->SetParameters(params).type());
- params = audio_rtp_sender_->GetParameters();
-
params.encodings[0].scale_resolution_down_by = 2.0;
EXPECT_EQ(RTCErrorType::UNSUPPORTED_PARAMETER,
audio_rtp_sender_->SetParameters(params).type());
@@ -878,8 +872,7 @@
EXPECT_EQ(1u, params.encodings.size());
// Unimplemented RtpParameters: codec_payload_type, fec, rtx, dtx, ptime,
- // max_framerate, scale_resolution_down_by, scale_framerate_down_by, rid,
- // dependency_rids.
+ // scale_resolution_down_by, scale_framerate_down_by, rid, dependency_rids.
params.encodings[0].codec_payload_type = 1;
EXPECT_EQ(RTCErrorType::UNSUPPORTED_PARAMETER,
video_rtp_sender_->SetParameters(params).type());
@@ -905,11 +898,6 @@
video_rtp_sender_->SetParameters(params).type());
params = video_rtp_sender_->GetParameters();
- params.encodings[0].max_framerate = 1;
- EXPECT_EQ(RTCErrorType::UNSUPPORTED_PARAMETER,
- video_rtp_sender_->SetParameters(params).type());
- params = video_rtp_sender_->GetParameters();
-
params.encodings[0].scale_resolution_down_by = 2.0;
EXPECT_EQ(RTCErrorType::UNSUPPORTED_PARAMETER,
video_rtp_sender_->SetParameters(params).type());
diff --git a/video/quality_scaling_tests.cc b/video/quality_scaling_tests.cc
index bb12edc..9b1d0fa 100644
--- a/video/quality_scaling_tests.cc
+++ b/video/quality_scaling_tests.cc
@@ -100,8 +100,8 @@
// Called when FrameGeneratorCapturer::AddOrUpdateSink is called.
void OnSinkWantsChanged(rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override {
- EXPECT_LT(wants.max_pixel_count, kWidth * kHeight) << "Not a downscale.";
- observation_complete_.Set();
+ if (wants.max_pixel_count < kWidth * kHeight)
+ observation_complete_.Set();
}
void ModifySenderCallConfig(Call::Config* config) override {
config->bitrate_config.start_bitrate_bps = start_bps_;
diff --git a/video/video_quality_test.cc b/video/video_quality_test.cc
index c5bd57e..e313719 100644
--- a/video/video_quality_test.cc
+++ b/video/video_quality_test.cc
@@ -290,7 +290,6 @@
encoder_config.video_stream_factory =
new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
params->video[video_idx].codec, kDefaultMaxQp,
- params->video[video_idx].fps,
params->screenshare[video_idx].enabled, true);
params->ss[video_idx].streams =
encoder_config.video_stream_factory->CreateEncoderStreams(
@@ -446,7 +445,6 @@
new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
params_.video[video_idx].codec,
params_.ss[video_idx].streams[0].max_qp,
- params_.video[video_idx].fps,
params_.screenshare[video_idx].enabled, true);
} else {
video_encoder_configs_[video_idx].video_stream_factory =
@@ -600,7 +598,7 @@
thumbnail_encoder_config.video_stream_factory =
new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
params_.video[0].codec, params_.ss[0].streams[0].max_qp,
- params_.video[0].fps, params_.screenshare[0].enabled, true);
+ params_.screenshare[0].enabled, true);
}
thumbnail_encoder_config.spatial_layers = params_.ss[0].spatial_layers;
diff --git a/video/video_stream_encoder.cc b/video/video_stream_encoder.cc
index b0d450f..4d15a52 100644
--- a/video/video_stream_encoder.cc
+++ b/video/video_stream_encoder.cc
@@ -36,7 +36,6 @@
// Time interval for logging frame counts.
const int64_t kFrameLogIntervalMs = 60000;
const int kMinFramerateFps = 2;
-const int kMaxFramerateFps = 120;
// Time to keep a single cached pending frame in paused state.
const int64_t kPendingFrameTimeoutMs = 1000;
@@ -109,7 +108,8 @@
explicit VideoSourceProxy(VideoStreamEncoder* video_stream_encoder)
: video_stream_encoder_(video_stream_encoder),
degradation_preference_(DegradationPreference::DISABLED),
- source_(nullptr) {}
+ source_(nullptr),
+ max_framerate_(std::numeric_limits<int>::max()) {}
void SetSource(rtc::VideoSourceInterface<VideoFrame>* source,
const DegradationPreference& degradation_preference) {
@@ -136,11 +136,27 @@
source->AddOrUpdateSink(video_stream_encoder_, wants);
}
+ void SetMaxFramerate(int max_framerate) {
+ RTC_DCHECK_GT(max_framerate, 0);
+ rtc::CritScope lock(&crit_);
+ if (max_framerate == max_framerate_)
+ return;
+
+ RTC_LOG(LS_INFO) << "Set max framerate: " << max_framerate;
+ max_framerate_ = max_framerate;
+ if (source_) {
+ source_->AddOrUpdateSink(video_stream_encoder_,
+ GetActiveSinkWantsInternal());
+ }
+ }
+
void SetWantsRotationApplied(bool rotation_applied) {
rtc::CritScope lock(&crit_);
sink_wants_.rotation_applied = rotation_applied;
- if (source_)
- source_->AddOrUpdateSink(video_stream_encoder_, sink_wants_);
+ if (source_) {
+ source_->AddOrUpdateSink(video_stream_encoder_,
+ GetActiveSinkWantsInternal());
+ }
}
rtc::VideoSinkWants GetActiveSinkWants() {
@@ -154,7 +170,8 @@
sink_wants_.target_pixel_count.reset();
sink_wants_.max_framerate_fps = std::numeric_limits<int>::max();
if (source_)
- source_->AddOrUpdateSink(video_stream_encoder_, sink_wants_);
+ source_->AddOrUpdateSink(video_stream_encoder_,
+ GetActiveSinkWantsInternal());
}
bool RequestResolutionLowerThan(int pixel_count,
@@ -297,6 +314,8 @@
wants.target_pixel_count.reset();
wants.max_framerate_fps = std::numeric_limits<int>::max();
}
+ // Limit to configured max framerate.
+ wants.max_framerate_fps = std::min(max_framerate_, wants.max_framerate_fps);
return wants;
}
@@ -306,6 +325,7 @@
rtc::VideoSinkWants sink_wants_ RTC_GUARDED_BY(&crit_);
DegradationPreference degradation_preference_ RTC_GUARDED_BY(&crit_);
rtc::VideoSourceInterface<VideoFrame>* source_ RTC_GUARDED_BY(&crit_);
+ int max_framerate_ RTC_GUARDED_BY(&crit_);
RTC_DISALLOW_COPY_AND_ASSIGN(VideoSourceProxy);
};
@@ -532,7 +552,13 @@
codec.startBitrate = std::min(codec.startBitrate, codec.maxBitrate);
codec.expect_encode_from_texture = last_frame_info_->is_texture;
max_framerate_ = codec.maxFramerate;
- RTC_DCHECK_LE(max_framerate_, kMaxFramerateFps);
+
+ // Inform source about max configured framerate.
+ int max_framerate = 0;
+ for (const auto& stream : streams) {
+ max_framerate = std::max(stream.max_framerate, max_framerate);
+ }
+ source_proxy_->SetMaxFramerate(max_framerate);
// Keep the same encoder, as long as the video_format is unchanged.
if (pending_encoder_creation_) {
diff --git a/video/video_stream_encoder_unittest.cc b/video/video_stream_encoder_unittest.cc
index 32053ab..58d140f 100644
--- a/video/video_stream_encoder_unittest.cc
+++ b/video/video_stream_encoder_unittest.cc
@@ -31,13 +31,6 @@
#include "video/send_statistics_proxy.h"
#include "video/video_stream_encoder.h"
-namespace {
-const int kMinPixelsPerFrame = 320 * 180;
-const int kMinFramerateFps = 2;
-const int kMinBalancedFramerateFps = 7;
-const int64_t kFrameTimeoutMs = 100;
-} // namespace
-
namespace webrtc {
using ScaleReason = AdaptationObserverInterface::AdaptReason;
@@ -45,11 +38,16 @@
using ::testing::Return;
namespace {
+const int kMinPixelsPerFrame = 320 * 180;
+const int kMinFramerateFps = 2;
+const int kMinBalancedFramerateFps = 7;
+const int64_t kFrameTimeoutMs = 100;
const size_t kMaxPayloadLength = 1440;
const int kTargetBitrateBps = 1000000;
const int kLowTargetBitrateBps = kTargetBitrateBps / 10;
const int kMaxInitialFramedrop = 4;
const int kDefaultFramerate = 30;
+const int64_t kFrameIntervalMs = rtc::kNumMillisecsPerSec / kDefaultFramerate;
class TestBuffer : public webrtc::I420Buffer {
public:
@@ -275,7 +273,7 @@
: video_send_config_(VideoSendStream::Config(nullptr)),
codec_width_(320),
codec_height_(240),
- max_framerate_(30),
+ max_framerate_(kDefaultFramerate),
fake_encoder_(),
encoder_factory_(&fake_encoder_),
stats_proxy_(new MockableSendStatisticsProxy(
@@ -378,22 +376,28 @@
EXPECT_EQ(wants1.max_pixel_count, wants2.max_pixel_count);
}
+ void VerifyFpsMaxResolutionMax(const rtc::VideoSinkWants& wants) {
+ EXPECT_EQ(kDefaultFramerate, wants.max_framerate_fps);
+ EXPECT_EQ(std::numeric_limits<int>::max(), wants.max_pixel_count);
+ EXPECT_FALSE(wants.target_pixel_count);
+ }
+
void VerifyFpsMaxResolutionLt(const rtc::VideoSinkWants& wants1,
const rtc::VideoSinkWants& wants2) {
- EXPECT_EQ(std::numeric_limits<int>::max(), wants1.max_framerate_fps);
+ EXPECT_EQ(kDefaultFramerate, wants1.max_framerate_fps);
EXPECT_LT(wants1.max_pixel_count, wants2.max_pixel_count);
EXPECT_GT(wants1.max_pixel_count, 0);
}
void VerifyFpsMaxResolutionGt(const rtc::VideoSinkWants& wants1,
const rtc::VideoSinkWants& wants2) {
- EXPECT_EQ(std::numeric_limits<int>::max(), wants1.max_framerate_fps);
+ EXPECT_EQ(kDefaultFramerate, wants1.max_framerate_fps);
EXPECT_GT(wants1.max_pixel_count, wants2.max_pixel_count);
}
void VerifyFpsMaxResolutionEq(const rtc::VideoSinkWants& wants1,
const rtc::VideoSinkWants& wants2) {
- EXPECT_EQ(std::numeric_limits<int>::max(), wants1.max_framerate_fps);
+ EXPECT_EQ(kDefaultFramerate, wants1.max_framerate_fps);
EXPECT_EQ(wants1.max_pixel_count, wants2.max_pixel_count);
}
@@ -424,7 +428,7 @@
void VerifyFpsMaxResolutionLt(const rtc::VideoSinkWants& wants,
int pixel_count) {
- EXPECT_EQ(std::numeric_limits<int>::max(), wants.max_framerate_fps);
+ EXPECT_EQ(kDefaultFramerate, wants.max_framerate_fps);
EXPECT_LT(wants.max_pixel_count, pixel_count);
EXPECT_GT(wants.max_pixel_count, 0);
}
@@ -454,7 +458,7 @@
} else if (last_frame_pixels <= 640 * 480) {
EXPECT_LE(15, fps_limit);
} else {
- EXPECT_EQ(std::numeric_limits<int>::max(), fps_limit);
+ EXPECT_EQ(kDefaultFramerate, fps_limit);
}
}
@@ -935,7 +939,7 @@
last_wants.max_framerate_fps);
}
- VerifyNoLimitation(video_source_.sink_wants());
+ VerifyFpsMaxResolutionMax(video_source_.sink_wants());
stats_proxy_->ResetMockStats();
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
@@ -950,9 +954,8 @@
const int kFrameWidth = 1280;
const int kFrameHeight = 720;
- const int kFrameIntervalMs = 1000 / 30;
- int frame_timestamp = 1;
+ int64_t frame_timestamp = 1;
video_source_.IncomingCapturedFrame(
CreateFrame(frame_timestamp, kFrameWidth, kFrameHeight));
@@ -971,8 +974,7 @@
EXPECT_FALSE(video_source_.sink_wants().target_pixel_count);
EXPECT_LT(video_source_.sink_wants().max_pixel_count,
kFrameWidth * kFrameHeight);
- EXPECT_EQ(std::numeric_limits<int>::max(),
- video_source_.sink_wants().max_framerate_fps);
+ EXPECT_EQ(kDefaultFramerate, video_source_.sink_wants().max_framerate_fps);
// Set new source, switch to maintain-resolution.
test::FrameForwarder new_video_source;
@@ -980,7 +982,7 @@
&new_video_source, webrtc::DegradationPreference::MAINTAIN_RESOLUTION);
// Initially no degradation registered.
- VerifyNoLimitation(new_video_source.sink_wants());
+ VerifyFpsMaxResolutionMax(new_video_source.sink_wants());
// Force an input frame rate to be available, or the adaptation call won't
// know what framerate to adapt form.
@@ -1004,7 +1006,7 @@
// Turn off degradation completely.
video_stream_encoder_->SetSource(&new_video_source,
webrtc::DegradationPreference::DISABLED);
- VerifyNoLimitation(new_video_source.sink_wants());
+ VerifyFpsMaxResolutionMax(new_video_source.sink_wants());
video_stream_encoder_->TriggerCpuOveruse();
new_video_source.IncomingCapturedFrame(
@@ -1013,7 +1015,7 @@
frame_timestamp += kFrameIntervalMs;
// Still no degradation.
- VerifyNoLimitation(new_video_source.sink_wants());
+ VerifyFpsMaxResolutionMax(new_video_source.sink_wants());
// Calling SetSource with resolution scaling enabled apply the old SinkWants.
video_stream_encoder_->SetSource(
@@ -1021,8 +1023,7 @@
EXPECT_LT(new_video_source.sink_wants().max_pixel_count,
kFrameWidth * kFrameHeight);
EXPECT_FALSE(new_video_source.sink_wants().target_pixel_count);
- EXPECT_EQ(std::numeric_limits<int>::max(),
- new_video_source.sink_wants().max_framerate_fps);
+ EXPECT_EQ(kDefaultFramerate, new_video_source.sink_wants().max_framerate_fps);
// Calling SetSource with framerate scaling enabled apply the old SinkWants.
video_stream_encoder_->SetSource(
@@ -1234,25 +1235,31 @@
const int kWidth = 1280;
const int kHeight = 720;
+ int64_t timestamp_ms = kFrameIntervalMs;
video_source_.set_adaptation_enabled(true);
- video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
- WaitForEncodedFrame(1);
+ video_source_.IncomingCapturedFrame(
+ CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(timestamp_ms);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
// Trigger adapt down.
video_stream_encoder_->TriggerQualityLow();
- video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
- WaitForEncodedFrame(2);
+ timestamp_ms += kFrameIntervalMs;
+ video_source_.IncomingCapturedFrame(
+ CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(timestamp_ms);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
// Trigger overuse.
video_stream_encoder_->TriggerCpuOveruse();
- video_source_.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
- WaitForEncodedFrame(3);
+ timestamp_ms += kFrameIntervalMs;
+ video_source_.IncomingCapturedFrame(
+ CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(timestamp_ms);
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -1266,9 +1273,10 @@
video_encoder_config.video_format.parameters["foo"] = "foo";
video_stream_encoder_->ConfigureEncoder(std::move(video_encoder_config),
kMaxPayloadLength);
-
- video_source_.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight));
- WaitForEncodedFrame(4);
+ timestamp_ms += kFrameIntervalMs;
+ video_source_.IncomingCapturedFrame(
+ CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(timestamp_ms);
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -1471,7 +1479,7 @@
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
WaitForEncodedFrame(1);
- VerifyNoLimitation(source.sink_wants());
+ VerifyFpsMaxResolutionMax(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -1502,7 +1510,7 @@
webrtc::DegradationPreference::BALANCED);
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
sink_.WaitForEncodedFrame(1);
- VerifyNoLimitation(source.sink_wants());
+ VerifyFpsMaxResolutionMax(source.sink_wants());
// Trigger adapt down, expect scaled down resolution.
video_stream_encoder_->TriggerQualityLow();
@@ -1543,13 +1551,13 @@
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
- VerifyNoLimitation(source.sink_wants());
+ VerifyFpsMaxResolutionMax(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
// Trigger adapt up, expect no change.
video_stream_encoder_->TriggerCpuNormalUsage();
- VerifyNoLimitation(source.sink_wants());
+ VerifyFpsMaxResolutionMax(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -1569,13 +1577,13 @@
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
- VerifyNoLimitation(source.sink_wants());
+ VerifyFpsMaxResolutionMax(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
// Trigger adapt up, expect no change.
video_stream_encoder_->TriggerCpuNormalUsage();
- VerifyNoLimitation(source.sink_wants());
+ VerifyFpsMaxResolutionMax(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -1594,14 +1602,14 @@
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
sink_.WaitForEncodedFrame(kWidth, kHeight);
- VerifyNoLimitation(source.sink_wants());
+ VerifyFpsMaxResolutionMax(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt up, expect no change.
video_stream_encoder_->TriggerQualityHigh();
- VerifyNoLimitation(source.sink_wants());
+ VerifyFpsMaxResolutionMax(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -1621,14 +1629,14 @@
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
sink_.WaitForEncodedFrame(kWidth, kHeight);
- VerifyNoLimitation(source.sink_wants());
+ VerifyFpsMaxResolutionMax(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt up, expect no change.
video_stream_encoder_->TriggerQualityHigh();
- VerifyNoLimitation(source.sink_wants());
+ VerifyFpsMaxResolutionMax(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -1650,7 +1658,7 @@
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
WaitForEncodedFrame(1);
- VerifyNoLimitation(source.sink_wants());
+ VerifyFpsMaxResolutionMax(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -1664,7 +1672,7 @@
// Trigger adapt up, expect no restriction.
video_stream_encoder_->TriggerQualityHigh();
- VerifyNoLimitation(source.sink_wants());
+ VerifyFpsMaxResolutionMax(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -1686,7 +1694,7 @@
// Expect no scaling to begin with (preference: MAINTAIN_FRAMERATE).
video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
sink_.WaitForEncodedFrame(1);
- VerifyNoLimitation(video_source_.sink_wants());
+ VerifyFpsMaxResolutionMax(video_source_.sink_wants());
// Trigger adapt down, expect scaled down resolution.
video_stream_encoder_->TriggerQualityLow();
@@ -1698,7 +1706,7 @@
test::FrameForwarder new_video_source;
video_stream_encoder_->SetSource(
&new_video_source, webrtc::DegradationPreference::MAINTAIN_RESOLUTION);
- VerifyNoLimitation(new_video_source.sink_wants());
+ VerifyFpsMaxResolutionMax(new_video_source.sink_wants());
// Trigger adapt down, expect reduced framerate.
video_stream_encoder_->TriggerQualityLow();
@@ -1708,7 +1716,7 @@
// Trigger adapt up, expect no restriction.
video_stream_encoder_->TriggerQualityHigh();
- VerifyNoLimitation(new_video_source.sink_wants());
+ VerifyFpsMaxResolutionMax(new_video_source.sink_wants());
video_stream_encoder_->Stop();
}
@@ -1729,8 +1737,9 @@
int downscales = 0;
for (size_t i = 1; i <= kNumFrames; i++) {
- video_source_.IncomingCapturedFrame(CreateFrame(i, kWidth, kHeight));
- WaitForEncodedFrame(i);
+ video_source_.IncomingCapturedFrame(
+ CreateFrame(i * kFrameIntervalMs, kWidth, kHeight));
+ WaitForEncodedFrame(i * kFrameIntervalMs);
// Trigger scale down.
rtc::VideoSinkWants last_wants = video_source_.sink_wants();
@@ -1760,41 +1769,46 @@
video_stream_encoder_->SetSource(
&source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
- source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
+ int64_t timestamp_ms = kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
- VerifyNoLimitation(source.sink_wants());
+ VerifyFpsMaxResolutionMax(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
// Trigger adapt down, expect scaled down resolution.
video_stream_encoder_->TriggerCpuOveruse();
- source.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
- WaitForEncodedFrame(2);
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(timestamp_ms);
VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
// Trigger adapt up, expect no restriction.
video_stream_encoder_->TriggerCpuNormalUsage();
- source.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
- VerifyNoLimitation(source.sink_wants());
+ VerifyFpsMaxResolutionMax(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
// Trigger adapt down, expect scaled down resolution.
video_stream_encoder_->TriggerCpuOveruse();
- source.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight));
- WaitForEncodedFrame(4);
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(timestamp_ms);
VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
// Trigger adapt up, expect no restriction.
video_stream_encoder_->TriggerCpuNormalUsage();
- source.IncomingCapturedFrame(CreateFrame(5, kWidth, kHeight));
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
sink_.WaitForEncodedFrame(kWidth, kHeight);
- VerifyNoLimitation(source.sink_wants());
+ VerifyFpsMaxResolutionMax(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(4, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -1813,41 +1827,46 @@
video_stream_encoder_->SetSource(&source,
webrtc::DegradationPreference::BALANCED);
- source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
+ int64_t timestamp_ms = kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
sink_.WaitForEncodedFrame(kWidth, kHeight);
- VerifyNoLimitation(source.sink_wants());
+ VerifyFpsMaxResolutionMax(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt down, expect scaled down resolution.
video_stream_encoder_->TriggerQualityLow();
- source.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
- sink_.WaitForEncodedFrame(2);
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ sink_.WaitForEncodedFrame(timestamp_ms);
VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt up, expect no restriction.
video_stream_encoder_->TriggerQualityHigh();
- source.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
sink_.WaitForEncodedFrame(kWidth, kHeight);
- VerifyNoLimitation(source.sink_wants());
+ VerifyFpsMaxResolutionMax(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt down, expect scaled down resolution.
video_stream_encoder_->TriggerQualityLow();
- source.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight));
- sink_.WaitForEncodedFrame(4);
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ sink_.WaitForEncodedFrame(timestamp_ms);
VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt up, expect no restriction.
video_stream_encoder_->TriggerQualityHigh();
- source.IncomingCapturedFrame(CreateFrame(5, kWidth, kHeight));
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
sink_.WaitForEncodedFrame(kWidth, kHeight);
- VerifyNoLimitation(source.sink_wants());
+ VerifyFpsMaxResolutionMax(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -1866,9 +1885,10 @@
video_stream_encoder_->SetSource(
&source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
- source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
+ int64_t timestamp_ms = kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
- VerifyNoLimitation(source.sink_wants());
+ VerifyFpsMaxResolutionMax(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -1876,8 +1896,9 @@
// Trigger cpu adapt down, expect scaled down resolution (960x540).
video_stream_encoder_->TriggerCpuOveruse();
- source.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
- WaitForEncodedFrame(2);
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(timestamp_ms);
VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
@@ -1886,8 +1907,9 @@
// Trigger cpu adapt down, expect scaled down resolution (640x360).
video_stream_encoder_->TriggerCpuOveruse();
- source.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
- WaitForEncodedFrame(3);
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(timestamp_ms);
VerifyFpsMaxResolutionLt(source.sink_wants(), source.last_wants());
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
@@ -1896,8 +1918,9 @@
// Trigger cpu adapt down, expect scaled down resolution (480x270).
video_stream_encoder_->TriggerCpuOveruse();
- source.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight));
- WaitForEncodedFrame(4);
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(timestamp_ms);
VerifyFpsMaxResolutionLt(source.sink_wants(), source.last_wants());
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
@@ -1906,8 +1929,9 @@
// Trigger quality adapt down, expect scaled down resolution (320x180).
video_stream_encoder_->TriggerQualityLow();
- source.IncomingCapturedFrame(CreateFrame(5, kWidth, kHeight));
- WaitForEncodedFrame(5);
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(timestamp_ms);
VerifyFpsMaxResolutionLt(source.sink_wants(), source.last_wants());
rtc::VideoSinkWants last_wants = source.sink_wants();
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
@@ -1917,8 +1941,9 @@
// Trigger quality adapt down, expect no change (min resolution reached).
video_stream_encoder_->TriggerQualityLow();
- source.IncomingCapturedFrame(CreateFrame(6, kWidth, kHeight));
- WaitForEncodedFrame(6);
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(timestamp_ms);
VerifyFpsMaxResolutionEq(source.sink_wants(), last_wants);
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
@@ -1927,8 +1952,9 @@
// Trigger cpu adapt up, expect upscaled resolution (480x270).
video_stream_encoder_->TriggerCpuNormalUsage();
- source.IncomingCapturedFrame(CreateFrame(7, kWidth, kHeight));
- WaitForEncodedFrame(7);
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(timestamp_ms);
VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
@@ -1937,8 +1963,9 @@
// Trigger cpu adapt up, expect upscaled resolution (640x360).
video_stream_encoder_->TriggerCpuNormalUsage();
- source.IncomingCapturedFrame(CreateFrame(8, kWidth, kHeight));
- WaitForEncodedFrame(8);
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(timestamp_ms);
VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
@@ -1947,8 +1974,9 @@
// Trigger cpu adapt up, expect upscaled resolution (960x540).
video_stream_encoder_->TriggerCpuNormalUsage();
- source.IncomingCapturedFrame(CreateFrame(9, kWidth, kHeight));
- WaitForEncodedFrame(9);
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(timestamp_ms);
VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
last_wants = source.sink_wants();
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
@@ -1958,8 +1986,9 @@
// Trigger cpu adapt up, no cpu downgrades, expect no change (960x540).
video_stream_encoder_->TriggerCpuNormalUsage();
- source.IncomingCapturedFrame(CreateFrame(10, kWidth, kHeight));
- WaitForEncodedFrame(10);
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(timestamp_ms);
VerifyFpsEqResolutionEq(source.sink_wants(), last_wants);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
@@ -1968,10 +1997,11 @@
// Trigger quality adapt up, expect no restriction (1280x720).
video_stream_encoder_->TriggerQualityHigh();
- source.IncomingCapturedFrame(CreateFrame(11, kWidth, kHeight));
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
- VerifyNoLimitation(source.sink_wants());
+ VerifyFpsMaxResolutionMax(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(6, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -2351,7 +2381,7 @@
source.IncomingCapturedFrame(CreateFrame(1, kTooSmallWidth, kTooSmallHeight));
WaitForEncodedFrame(1);
video_stream_encoder_->TriggerCpuOveruse();
- VerifyNoLimitation(source.sink_wants());
+ VerifyFpsMaxResolutionMax(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -2419,19 +2449,19 @@
video_source_.set_adaptation_enabled(true);
video_source_.IncomingCapturedFrame(
- CreateFrame(1, kFrameWidth, kFrameHeight));
+ CreateFrame(1 * kFrameIntervalMs, kFrameWidth, kFrameHeight));
WaitForEncodedFrame(kFrameWidth, kFrameHeight);
// Trigger CPU overuse, downscale by 3/4.
video_stream_encoder_->TriggerCpuOveruse();
video_source_.IncomingCapturedFrame(
- CreateFrame(2, kFrameWidth, kFrameHeight));
+ CreateFrame(2 * kFrameIntervalMs, kFrameWidth, kFrameHeight));
WaitForEncodedFrame((kFrameWidth * 3) / 4, (kFrameHeight * 3) / 4);
// Trigger CPU normal use, return to original resolution.
video_stream_encoder_->TriggerCpuNormalUsage();
video_source_.IncomingCapturedFrame(
- CreateFrame(3, kFrameWidth, kFrameHeight));
+ CreateFrame(3 * kFrameIntervalMs, kFrameWidth, kFrameHeight));
WaitForEncodedFrame(kFrameWidth, kFrameHeight);
video_stream_encoder_->Stop();
@@ -2441,7 +2471,6 @@
AdaptsFramerateOnOveruse_MaintainResolutionMode) {
const int kFrameWidth = 1280;
const int kFrameHeight = 720;
- int kFrameIntervalMs = rtc::kNumMillisecsPerSec / max_framerate_;
video_stream_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
video_stream_encoder_->SetSource(
@@ -2487,7 +2516,7 @@
// Trigger CPU overuse, reduce framerate by 2/3 again.
video_stream_encoder_->TriggerCpuOveruse();
num_frames_dropped = 0;
- for (int i = 0; i < max_framerate_; ++i) {
+ for (int i = 0; i <= max_framerate_; ++i) {
timestamp_ms += kFrameIntervalMs;
video_source_.IncomingCapturedFrame(
CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
@@ -2593,7 +2622,7 @@
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
- VerifyNoLimitation(source.sink_wants());
+ VerifyFpsMaxResolutionMax(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -2745,14 +2774,14 @@
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
- VerifyNoLimitation(source.sink_wants());
+ VerifyFpsMaxResolutionMax(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(14, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt up, expect no change.
video_stream_encoder_->TriggerQualityHigh();
- VerifyNoLimitation(source.sink_wants());
+ VerifyFpsMaxResolutionMax(source.sink_wants());
EXPECT_EQ(14, stats_proxy_->GetStats().number_of_quality_adapt_changes);
video_stream_encoder_->Stop();
@@ -2773,7 +2802,7 @@
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
- VerifyNoLimitation(source.sink_wants());
+ VerifyFpsMaxResolutionMax(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
@@ -2852,7 +2881,7 @@
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
- VerifyNoLimitation(source.sink_wants());
+ VerifyFpsMaxResolutionMax(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
@@ -2862,7 +2891,7 @@
// Trigger adapt up, expect no change.
video_stream_encoder_->TriggerQualityHigh();
- VerifyNoLimitation(source.sink_wants());
+ VerifyFpsMaxResolutionMax(source.sink_wants());
EXPECT_EQ(4, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -2886,7 +2915,7 @@
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
- VerifyNoLimitation(source.sink_wants());
+ VerifyFpsMaxResolutionMax(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
@@ -2938,7 +2967,7 @@
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyNoLimitation(source.sink_wants());
+ VerifyFpsMaxResolutionMax(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
@@ -2948,7 +2977,7 @@
// Trigger adapt up, expect no change.
video_stream_encoder_->TriggerQualityHigh();
- VerifyNoLimitation(source.sink_wants());
+ VerifyFpsMaxResolutionMax(source.sink_wants());
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);