Reland of Periodically update codec bit/frame rate settings. (patchset #1 id:1 of https://codereview.webrtc.org/2923993002/ )
Reason for revert:
Create reland cl that we can patch with fix.
Original issue's description:
> Revert of Periodically update codec bit/frame rate settings. (patchset #8 id:140001 of https://codereview.webrtc.org/2883963002/ )
>
> Reason for revert:
> Breaks some Call perf tests that are not run by the try bots....
>
> Original issue's description:
> > Fix bug in vie_encoder.cc which caused channel parameters not to be updated at regular intervals, as it was intended.
> >
> > That however exposes a bunch of failed test, so this CL also fixed a few other things:
> > * FakeEncoder should trust the configured FPS value rather than guesstimating itself based on the realtime clock, so as not to completely undershoot targets in offline mode. Also, compensate for key-frame overshoots when outputting delta frames.
> > * FrameDropper should not assuming incoming frame rate is 0 if no frames have been seen.
> > * Fix a bunch of test cases that started failing because they were relying on the fake encoder undershooting.
> > * Fix test
> >
> > BUG=7664
> >
> > Review-Url: https://codereview.webrtc.org/2883963002
> > Cr-Commit-Position: refs/heads/master@{#18473}
> > Committed: https://chromium.googlesource.com/external/webrtc/+/6431e21da672a5f3bbf166d3d4d98b171d015706
>
> TBR=stefan@webrtc.org,holmer@google.com
> # Skipping CQ checks because original CL landed less than 1 days ago.
> NOPRESUBMIT=true
> NOTREECHECKS=true
> NOTRY=true
> BUG=7664
>
> Review-Url: https://codereview.webrtc.org/2923993002
> Cr-Commit-Position: refs/heads/master@{#18475}
> Committed: https://chromium.googlesource.com/external/webrtc/+/5390c4814d7880ea79edcd55596ea25e0d9b97ad
TBR=stefan@webrtc.org,holmer@google.com
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=7664
Review-Url: https://codereview.webrtc.org/2924023002
Cr-Commit-Position: refs/heads/master@{#18497}
diff --git a/webrtc/media/engine/simulcast.cc b/webrtc/media/engine/simulcast.cc
index 4dd8c31..f1cd2ce 100644
--- a/webrtc/media/engine/simulcast.cc
+++ b/webrtc/media/engine/simulcast.cc
@@ -49,7 +49,7 @@
{0, 0, 1, 200, 150, 30}
};
-const int kMaxScreenshareSimulcastStreams = 2;
+const int kDefaultScreenshareSimulcastStreams = 2;
// Multiway: Number of temporal layers for each simulcast stream, for maximum
// possible number of simulcast streams |kMaxSimulcastStreams|. The array
@@ -176,12 +176,8 @@
bool is_screencast) {
size_t num_simulcast_layers;
if (is_screencast) {
- if (UseSimulcastScreenshare()) {
- num_simulcast_layers =
- std::min<int>(max_streams, kMaxScreenshareSimulcastStreams);
- } else {
- num_simulcast_layers = 1;
- }
+ num_simulcast_layers =
+ UseSimulcastScreenshare() ? kDefaultScreenshareSimulcastStreams : 1;
} else {
num_simulcast_layers = FindSimulcastMaxLayers(width, height);
}
@@ -198,60 +194,33 @@
std::vector<webrtc::VideoStream> streams;
streams.resize(num_simulcast_layers);
- if (is_screencast) {
- ScreenshareLayerConfig config = ScreenshareLayerConfig::GetDefault();
- // For legacy screenshare in conference mode, tl0 and tl1 bitrates are
- // piggybacked on the VideoCodec struct as target and max bitrates,
- // respectively. See eg. webrtc::VP8EncoderImpl::SetRates().
- streams[0].width = width;
- streams[0].height = height;
- streams[0].max_qp = max_qp;
- streams[0].max_framerate = 5;
- streams[0].min_bitrate_bps = kMinVideoBitrateKbps * 1000;
- streams[0].target_bitrate_bps = config.tl0_bitrate_kbps * 1000;
- streams[0].max_bitrate_bps = config.tl1_bitrate_kbps * 1000;
- streams[0].temporal_layer_thresholds_bps.clear();
- streams[0].temporal_layer_thresholds_bps.push_back(config.tl0_bitrate_kbps *
- 1000);
-
- // With simulcast enabled, add another spatial layer. This one will have a
- // more normal layout, with the regular 3 temporal layer pattern and no fps
- // restrictions. The base simulcast stream will still use legacy setup.
- if (num_simulcast_layers == kMaxScreenshareSimulcastStreams) {
- // Add optional upper simulcast layer.
- // Lowest temporal layers of a 3 layer setup will have 40% of the total
- // bitrate allocation for that stream. Make sure the gap between the
- // target of the lower stream and first temporal layer of the higher one
- // is at most 2x the bitrate, so that upswitching is not hampered by
- // stalled bitrate estimates.
- int max_bitrate_bps = 2 * ((streams[0].target_bitrate_bps * 10) / 4);
- // Cap max bitrate so it isn't overly high for the given resolution.
- max_bitrate_bps = std::min<int>(
- max_bitrate_bps, FindSimulcastMaxBitrateBps(width, height));
-
- streams[1].width = width;
- streams[1].height = height;
- streams[1].max_qp = max_qp;
- streams[1].max_framerate = max_framerate;
- // Three temporal layers means two thresholds.
- streams[1].temporal_layer_thresholds_bps.resize(2);
- streams[1].min_bitrate_bps = streams[0].target_bitrate_bps * 2;
- streams[1].target_bitrate_bps = max_bitrate_bps;
- streams[1].max_bitrate_bps = max_bitrate_bps;
- }
- } else {
+ if (!is_screencast) {
// Format width and height has to be divisible by |2 ^ number_streams - 1|.
width = NormalizeSimulcastSize(width, num_simulcast_layers);
height = NormalizeSimulcastSize(height, num_simulcast_layers);
+ }
- // Add simulcast sub-streams from lower resolution to higher resolutions.
- // Add simulcast streams, from highest resolution (|s| = number_streams -1)
- // to lowest resolution at |s| = 0.
- for (size_t s = num_simulcast_layers - 1;; --s) {
- streams[s].width = width;
- streams[s].height = height;
- // TODO(pbos): Fill actual temporal-layer bitrate thresholds.
- streams[s].max_qp = max_qp;
+ // Add simulcast sub-streams from lower resolution to higher resolutions.
+ // Add simulcast streams, from highest resolution (|s| = number_streams -1)
+ // to lowest resolution at |s| = 0.
+ for (size_t s = num_simulcast_layers - 1;; --s) {
+ streams[s].width = width;
+ streams[s].height = height;
+ // TODO(pbos): Fill actual temporal-layer bitrate thresholds.
+ streams[s].max_qp = max_qp;
+ if (is_screencast && s == 0) {
+ ScreenshareLayerConfig config = ScreenshareLayerConfig::GetDefault();
+ // For legacy screenshare in conference mode, tl0 and tl1 bitrates are
+ // piggybacked on the VideoCodec struct as target and max bitrates,
+ // respectively. See eg. webrtc::VP8EncoderImpl::SetRates().
+ streams[s].min_bitrate_bps = kMinVideoBitrateKbps * 1000;
+ streams[s].target_bitrate_bps = config.tl0_bitrate_kbps * 1000;
+ streams[s].max_bitrate_bps = config.tl1_bitrate_kbps * 1000;
+ streams[s].temporal_layer_thresholds_bps.clear();
+ streams[s].temporal_layer_thresholds_bps.push_back(
+ config.tl0_bitrate_kbps * 1000);
+ streams[s].max_framerate = 5;
+ } else {
streams[s].temporal_layer_thresholds_bps.resize(
kDefaultConferenceNumberOfTemporalLayers[s] - 1);
streams[s].max_bitrate_bps = FindSimulcastMaxBitrateBps(width, height);
@@ -259,19 +228,20 @@
FindSimulcastTargetBitrateBps(width, height);
streams[s].min_bitrate_bps = FindSimulcastMinBitrateBps(width, height);
streams[s].max_framerate = max_framerate;
+ }
+ if (!is_screencast) {
width /= 2;
height /= 2;
-
- if (s == 0)
- break;
}
+ if (s == 0)
+ break;
+ }
- // Spend additional bits to boost the max stream.
- int bitrate_left_bps = max_bitrate_bps - GetTotalMaxBitrateBps(streams);
- if (bitrate_left_bps > 0) {
- streams.back().max_bitrate_bps += bitrate_left_bps;
- }
+ // Spend additional bits to boost the max stream.
+ int bitrate_left_bps = max_bitrate_bps - GetTotalMaxBitrateBps(streams);
+ if (bitrate_left_bps > 0) {
+ streams.back().max_bitrate_bps += bitrate_left_bps;
}
return streams;
diff --git a/webrtc/modules/video_coding/media_optimization.cc b/webrtc/modules/video_coding/media_optimization.cc
index ccd23f6..f0cbd62 100644
--- a/webrtc/modules/video_coding/media_optimization.cc
+++ b/webrtc/modules/video_coding/media_optimization.cc
@@ -118,7 +118,13 @@
// Update encoding rates following protection settings.
float target_video_bitrate_kbps =
static_cast<float>(video_target_bitrate_) / 1000.0f;
- frame_dropper_->SetRates(target_video_bitrate_kbps, incoming_frame_rate_);
+ float framerate = incoming_frame_rate_;
+ if (framerate == 0.0) {
+ // No framerate estimate available, use configured max framerate instead.
+ framerate = user_frame_rate_;
+ }
+
+ frame_dropper_->SetRates(target_video_bitrate_kbps, framerate);
return video_target_bitrate_;
}
diff --git a/webrtc/modules/video_coding/video_sender.cc b/webrtc/modules/video_coding/video_sender.cc
index 0b54d13..50d5620 100644
--- a/webrtc/modules/video_coding/video_sender.cc
+++ b/webrtc/modules/video_coding/video_sender.cc
@@ -103,6 +103,11 @@
numLayers = sendCodec->VP8().numberOfTemporalLayers;
} else if (sendCodec->codecType == kVideoCodecVP9) {
numLayers = sendCodec->VP9().numberOfTemporalLayers;
+ } else if (sendCodec->codecType == kVideoCodecGeneric &&
+ sendCodec->numberOfSimulcastStreams > 0) {
+ // This is mainly for unit testing, disabling frame dropping.
+ // TODO(sprang): Add a better way to disable frame dropping.
+ numLayers = sendCodec->simulcastStream[0].numberOfTemporalLayers;
} else {
numLayers = 1;
}
diff --git a/webrtc/test/fake_encoder.cc b/webrtc/test/fake_encoder.cc
index fce12c6..fdad4ca 100644
--- a/webrtc/test/fake_encoder.cc
+++ b/webrtc/test/fake_encoder.cc
@@ -24,11 +24,15 @@
namespace webrtc {
namespace test {
+const int kKeyframeSizeFactor = 10;
+
FakeEncoder::FakeEncoder(Clock* clock)
: clock_(clock),
callback_(nullptr),
+ configured_input_framerate_(-1),
max_target_bitrate_kbps_(-1),
- last_encode_time_ms_(0) {
+ pending_keyframe_(true),
+ debt_bytes_(0) {
// Generate some arbitrary not-all-zero data
for (size_t i = 0; i < sizeof(encoded_buffer_); ++i) {
encoded_buffer_[i] = static_cast<uint8_t>(i);
@@ -47,6 +51,8 @@
rtc::CritScope cs(&crit_sect_);
config_ = *config;
target_bitrate_.SetBitrate(0, 0, config_.startBitrate * 1000);
+ configured_input_framerate_ = config_.maxFramerate;
+ pending_keyframe_ = true;
return 0;
}
@@ -59,9 +65,10 @@
EncodedImageCallback* callback;
uint32_t target_bitrate_sum_kbps;
int max_target_bitrate_kbps;
- int64_t last_encode_time_ms;
size_t num_encoded_bytes;
+ int framerate;
VideoCodecMode mode;
+ bool keyframe;
{
rtc::CritScope cs(&crit_sect_);
max_framerate = config_.maxFramerate;
@@ -72,42 +79,32 @@
callback = callback_;
target_bitrate_sum_kbps = target_bitrate_.get_sum_kbps();
max_target_bitrate_kbps = max_target_bitrate_kbps_;
- last_encode_time_ms = last_encode_time_ms_;
num_encoded_bytes = sizeof(encoded_buffer_);
mode = config_.mode;
+ if (configured_input_framerate_ > 0) {
+ framerate = configured_input_framerate_;
+ } else {
+ framerate = max_framerate;
+ }
+ keyframe = pending_keyframe_;
+ pending_keyframe_ = false;
}
- int64_t time_now_ms = clock_->TimeInMilliseconds();
- const bool first_encode = (last_encode_time_ms == 0);
+ for (FrameType frame_type : *frame_types) {
+ if (frame_type == kVideoFrameKey) {
+ keyframe = true;
+ break;
+ }
+ }
+
RTC_DCHECK_GT(max_framerate, 0);
- int64_t time_since_last_encode_ms = 1000 / max_framerate;
- if (!first_encode) {
- // For all frames but the first we can estimate the display time by looking
- // at the display time of the previous frame.
- time_since_last_encode_ms = time_now_ms - last_encode_time_ms;
- }
- if (time_since_last_encode_ms > 3 * 1000 / max_framerate) {
- // Rudimentary check to make sure we don't widely overshoot bitrate target
- // when resuming encoding after a suspension.
- time_since_last_encode_ms = 3 * 1000 / max_framerate;
- }
- size_t bits_available =
- static_cast<size_t>(target_bitrate_sum_kbps * time_since_last_encode_ms);
- size_t min_bits = static_cast<size_t>(simulcast_streams[0].minBitrate *
- time_since_last_encode_ms);
+ size_t bitrate =
+ std::max(target_bitrate_sum_kbps, simulcast_streams[0].minBitrate);
+ if (max_target_bitrate_kbps > 0)
+ bitrate = std::min(bitrate, static_cast<size_t>(max_target_bitrate_kbps));
- if (bits_available < min_bits)
- bits_available = min_bits;
- size_t max_bits =
- static_cast<size_t>(max_target_bitrate_kbps * time_since_last_encode_ms);
- if (max_bits > 0 && max_bits < bits_available)
- bits_available = max_bits;
-
- {
- rtc::CritScope cs(&crit_sect_);
- last_encode_time_ms_ = time_now_ms;
- }
+ size_t bits_available = bitrate * 1000 / framerate;
RTC_DCHECK_GT(num_simulcast_streams, 0);
for (unsigned char i = 0; i < num_simulcast_streams; ++i) {
@@ -116,18 +113,27 @@
specifics.codecType = kVideoCodecGeneric;
specifics.codecSpecific.generic.simulcast_idx = i;
size_t min_stream_bits = static_cast<size_t>(
- simulcast_streams[i].minBitrate * time_since_last_encode_ms);
+ (simulcast_streams[i].minBitrate * 1000) / framerate);
size_t max_stream_bits = static_cast<size_t>(
- simulcast_streams[i].maxBitrate * time_since_last_encode_ms);
+ (simulcast_streams[i].maxBitrate * 1000) / framerate);
size_t stream_bits = (bits_available > max_stream_bits) ? max_stream_bits :
bits_available;
size_t stream_bytes = (stream_bits + 7) / 8;
- if (first_encode) {
+ if (keyframe) {
// The first frame is a key frame and should be larger.
- // TODO(holmer): The FakeEncoder should store the bits_available between
- // encodes so that it can compensate for oversized frames.
- stream_bytes *= 10;
+ // Store the overshoot bytes and distribute them over the coming frames,
+ // so that we on average meet the bitrate target.
+ debt_bytes_ += (kKeyframeSizeFactor - 1) * stream_bytes;
+ stream_bytes *= kKeyframeSizeFactor;
+ } else {
+ if (debt_bytes_ > 0) {
+ // Pay at most half of the frame size for old debts.
+ size_t payment_size = std::min(stream_bytes / 2, debt_bytes_);
+ debt_bytes_ -= payment_size;
+ stream_bytes -= payment_size;
+ }
}
+
if (stream_bytes > num_encoded_bytes)
stream_bytes = num_encoded_bytes;
@@ -175,6 +181,7 @@
uint32_t framerate) {
rtc::CritScope cs(&crit_sect_);
target_bitrate_ = rate_allocation;
+ configured_input_framerate_ = framerate;
return 0;
}
@@ -183,6 +190,11 @@
return kImplementationName;
}
+int FakeEncoder::GetConfiguredInputFramerate() const {
+ rtc::CritScope cs(&crit_sect_);
+ return configured_input_framerate_;
+}
+
FakeH264Encoder::FakeH264Encoder(Clock* clock)
: FakeEncoder(clock), callback_(nullptr), idr_counter_(0) {
FakeEncoder::RegisterEncodeCompleteCallback(this);
diff --git a/webrtc/test/fake_encoder.h b/webrtc/test/fake_encoder.h
index e3878ec..4487c52 100644
--- a/webrtc/test/fake_encoder.h
+++ b/webrtc/test/fake_encoder.h
@@ -45,6 +45,7 @@
int32_t SetRateAllocation(const BitrateAllocation& rate_allocation,
uint32_t framerate) override;
const char* ImplementationName() const override;
+ int GetConfiguredInputFramerate() const;
static const char* kImplementationName;
@@ -53,11 +54,16 @@
VideoCodec config_ GUARDED_BY(crit_sect_);
EncodedImageCallback* callback_ GUARDED_BY(crit_sect_);
BitrateAllocation target_bitrate_ GUARDED_BY(crit_sect_);
+ int configured_input_framerate_ GUARDED_BY(crit_sect_);
int max_target_bitrate_kbps_ GUARDED_BY(crit_sect_);
- int64_t last_encode_time_ms_ GUARDED_BY(crit_sect_);
+ bool pending_keyframe_ GUARDED_BY(crit_sect_);
rtc::CriticalSection crit_sect_;
uint8_t encoded_buffer_[100000];
+
+ // Current byte debt to be payed over a number of frames.
+ // The debt is acquired by keyframes overshooting the bitrate target.
+ size_t debt_bytes_;
};
class FakeH264Encoder : public FakeEncoder, public EncodedImageCallback {
diff --git a/webrtc/video/video_send_stream_tests.cc b/webrtc/video/video_send_stream_tests.cc
index de3da25..97b2f6a 100644
--- a/webrtc/video/video_send_stream_tests.cc
+++ b/webrtc/video/video_send_stream_tests.cc
@@ -937,10 +937,12 @@
void TriggerLossReport(const RTPHeader& header) {
// Send lossy receive reports to trigger FEC enabling.
- if (packet_count_++ % 2 != 0) {
- // Receive statistics reporting having lost 50% of the packets.
+ const int kLossPercent = 5;
+ if (packet_count_++ % (100 / kLossPercent) != 0) {
FakeReceiveStatistics lossy_receive_stats(
- kVideoSendSsrcs[0], header.sequenceNumber, packet_count_ / 2, 127);
+ kVideoSendSsrcs[0], header.sequenceNumber,
+ (packet_count_ * (100 - kLossPercent)) / 100, // Cumulative lost.
+ static_cast<uint8_t>((255 * kLossPercent) / 100)); // Loss percent.
RTCPSender rtcp_sender(false, Clock::GetRealTimeClock(),
&lossy_receive_stats, nullptr, nullptr,
transport_adapter_.get());
@@ -993,6 +995,35 @@
// Make sure there is at least one extension header, to make the RTP
// header larger than the base length of 12 bytes.
EXPECT_FALSE(send_config->rtp.extensions.empty());
+
+ // Setup screen content disables frame dropping which makes this easier.
+ class VideoStreamFactory
+ : public VideoEncoderConfig::VideoStreamFactoryInterface {
+ public:
+ explicit VideoStreamFactory(size_t num_temporal_layers)
+ : num_temporal_layers_(num_temporal_layers) {
+ EXPECT_GT(num_temporal_layers, 0u);
+ }
+
+ private:
+ std::vector<VideoStream> CreateEncoderStreams(
+ int width,
+ int height,
+ const VideoEncoderConfig& encoder_config) override {
+ std::vector<VideoStream> streams =
+ test::CreateVideoStreams(width, height, encoder_config);
+ for (VideoStream& stream : streams) {
+ stream.temporal_layer_thresholds_bps.resize(num_temporal_layers_ -
+ 1);
+ }
+ return streams;
+ }
+ const size_t num_temporal_layers_;
+ };
+
+ encoder_config->video_stream_factory =
+ new rtc::RefCountedObject<VideoStreamFactory>(2);
+ encoder_config->content_type = VideoEncoderConfig::ContentType::kScreen;
}
void PerformTest() override {
diff --git a/webrtc/video/vie_encoder.cc b/webrtc/video/vie_encoder.cc
index 203d1c4..6e48f97 100644
--- a/webrtc/video/vie_encoder.cc
+++ b/webrtc/video/vie_encoder.cc
@@ -211,7 +211,6 @@
// the used degradation_preference.
switch (degradation_preference_) {
case VideoSendStream::DegradationPreference::kBalanced:
- FALLTHROUGH();
case VideoSendStream::DegradationPreference::kMaintainFramerate:
wants.max_framerate_fps = std::numeric_limits<int>::max();
break;
@@ -677,13 +676,14 @@
int64_t now_ms = clock_->TimeInMilliseconds();
if (pending_encoder_reconfiguration_) {
ReconfigureEncoder();
+ last_parameters_update_ms_.emplace(now_ms);
} else if (!last_parameters_update_ms_ ||
now_ms - *last_parameters_update_ms_ >=
vcm::VCMProcessTimer::kDefaultProcessIntervalMs) {
video_sender_.UpdateChannelParemeters(rate_allocator_.get(),
bitrate_observer_);
+ last_parameters_update_ms_.emplace(now_ms);
}
- last_parameters_update_ms_.emplace(now_ms);
if (EncoderPaused()) {
TraceFrameDropStart();
@@ -806,7 +806,6 @@
int max_downgrades = 0;
switch (degradation_preference_) {
case VideoSendStream::DegradationPreference::kBalanced:
- FALLTHROUGH();
case VideoSendStream::DegradationPreference::kMaintainFramerate:
max_downgrades = kMaxCpuResolutionDowngrades;
if (downgrade_requested &&
@@ -842,7 +841,6 @@
switch (degradation_preference_) {
case VideoSendStream::DegradationPreference::kBalanced:
- FALLTHROUGH();
case VideoSendStream::DegradationPreference::kMaintainFramerate:
// Scale down resolution.
if (!source_proxy_->RequestResolutionLowerThan(
@@ -890,7 +888,6 @@
switch (degradation_preference_) {
case VideoSendStream::DegradationPreference::kBalanced:
- FALLTHROUGH();
case VideoSendStream::DegradationPreference::kMaintainFramerate:
if (adapt_up_requested &&
adaptation_request.input_pixel_count_ <=
@@ -910,7 +907,6 @@
switch (degradation_preference_) {
case VideoSendStream::DegradationPreference::kBalanced:
- FALLTHROUGH();
case VideoSendStream::DegradationPreference::kMaintainFramerate: {
// Scale up resolution.
int pixel_count = adaptation_request.input_pixel_count_;
diff --git a/webrtc/video/vie_encoder_unittest.cc b/webrtc/video/vie_encoder_unittest.cc
index 7a54fbe..4d72a82 100644
--- a/webrtc/video/vie_encoder_unittest.cc
+++ b/webrtc/video/vie_encoder_unittest.cc
@@ -218,6 +218,7 @@
: video_send_config_(VideoSendStream::Config(nullptr)),
codec_width_(320),
codec_height_(240),
+ max_framerate_(30),
fake_encoder_(),
stats_proxy_(new MockableSendStatisticsProxy(
Clock::GetRealTimeClock(),
@@ -234,7 +235,17 @@
VideoEncoderConfig video_encoder_config;
test::FillEncoderConfiguration(1, &video_encoder_config);
+ video_encoder_config.video_stream_factory =
+ new rtc::RefCountedObject<VideoStreamFactory>(1);
video_encoder_config_ = video_encoder_config.Copy();
+
+ // Framerate limit is specified by the VideoStreamFactory.
+ std::vector<VideoStream> streams =
+ video_encoder_config.video_stream_factory->CreateEncoderStreams(
+ codec_width_, codec_height_, video_encoder_config);
+ max_framerate_ = streams[0].max_framerate;
+ fake_clock_.SetTimeMicros(1234);
+
ConfigureEncoder(std::move(video_encoder_config), true /* nack_enabled */);
}
@@ -257,7 +268,8 @@
void ResetEncoder(const std::string& payload_name,
size_t num_streams,
size_t num_temporal_layers,
- bool nack_enabled) {
+ bool nack_enabled,
+ bool screenshare) {
video_send_config_.encoder_settings.payload_name = payload_name;
VideoEncoderConfig video_encoder_config;
@@ -265,6 +277,9 @@
video_encoder_config.max_bitrate_bps = kTargetBitrateBps;
video_encoder_config.video_stream_factory =
new rtc::RefCountedObject<VideoStreamFactory>(num_temporal_layers);
+ video_encoder_config.content_type =
+ screenshare ? VideoEncoderConfig::ContentType::kScreen
+ : VideoEncoderConfig::ContentType::kRealtimeVideo;
ConfigureEncoder(std::move(video_encoder_config), nack_enabled);
}
@@ -324,6 +339,33 @@
EXPECT_FALSE(wants.target_pixel_count);
}
+ void WaitForEncodedFrame(int64_t expected_ntp_time) {
+ sink_.WaitForEncodedFrame(expected_ntp_time);
+ fake_clock_.AdvanceTimeMicros(rtc::kNumMicrosecsPerSec / max_framerate_);
+ }
+
+ bool TimedWaitForEncodedFrame(int64_t expected_ntp_time, int64_t timeout_ms) {
+ bool ok = sink_.TimedWaitForEncodedFrame(expected_ntp_time, timeout_ms);
+ fake_clock_.AdvanceTimeMicros(rtc::kNumMicrosecsPerSec / max_framerate_);
+ return ok;
+ }
+
+ void WaitForEncodedFrame(uint32_t expected_width, uint32_t expected_height) {
+ sink_.WaitForEncodedFrame(expected_width, expected_height);
+ fake_clock_.AdvanceTimeMicros(rtc::kNumMicrosecsPerSec / max_framerate_);
+ }
+
+ void ExpectDroppedFrame() {
+ sink_.ExpectDroppedFrame();
+ fake_clock_.AdvanceTimeMicros(rtc::kNumMicrosecsPerSec / max_framerate_);
+ }
+
+ bool WaitForFrame(int64_t timeout_ms) {
+ bool ok = sink_.WaitForFrame(timeout_ms);
+ fake_clock_.AdvanceTimeMicros(rtc::kNumMicrosecsPerSec / max_framerate_);
+ return ok;
+ }
+
class TestEncoder : public test::FakeEncoder {
public:
TestEncoder()
@@ -432,13 +474,21 @@
: test_encoder_(test_encoder), encoded_frame_event_(false, false) {}
void WaitForEncodedFrame(int64_t expected_ntp_time) {
+ EXPECT_TRUE(
+ TimedWaitForEncodedFrame(expected_ntp_time, kDefaultTimeoutMs));
+ }
+
+ bool TimedWaitForEncodedFrame(int64_t expected_ntp_time,
+ int64_t timeout_ms) {
uint32_t timestamp = 0;
- EXPECT_TRUE(encoded_frame_event_.Wait(kDefaultTimeoutMs));
+ if (!encoded_frame_event_.Wait(timeout_ms))
+ return false;
{
rtc::CritScope lock(&crit_);
timestamp = last_timestamp_;
}
test_encoder_->CheckLastTimeStampsMatch(expected_ntp_time, timestamp);
+ return true;
}
void WaitForEncodedFrame(uint32_t expected_width,
@@ -517,18 +567,20 @@
VideoEncoderConfig video_encoder_config_;
int codec_width_;
int codec_height_;
+ int max_framerate_;
TestEncoder fake_encoder_;
std::unique_ptr<MockableSendStatisticsProxy> stats_proxy_;
TestSink sink_;
AdaptingFrameForwarder video_source_;
std::unique_ptr<ViEEncoderUnderTest> vie_encoder_;
+ rtc::ScopedFakeClock fake_clock_;
};
TEST_F(ViEEncoderTest, EncodeOneFrame) {
vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
rtc::Event frame_destroyed_event(false, false);
video_source_.IncomingCapturedFrame(CreateFrame(1, &frame_destroyed_event));
- sink_.WaitForEncodedFrame(1);
+ WaitForEncodedFrame(1);
EXPECT_TRUE(frame_destroyed_event.Wait(kDefaultTimeoutMs));
vie_encoder_->Stop();
}
@@ -542,14 +594,14 @@
vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr));
- sink_.WaitForEncodedFrame(2);
+ WaitForEncodedFrame(2);
vie_encoder_->Stop();
}
TEST_F(ViEEncoderTest, DropsFramesWhenRateSetToZero) {
vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
- sink_.WaitForEncodedFrame(1);
+ WaitForEncodedFrame(1);
vie_encoder_->OnBitrateUpdated(0, 0, 0);
// Dropped since bitrate is zero.
@@ -557,20 +609,20 @@
vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
video_source_.IncomingCapturedFrame(CreateFrame(3, nullptr));
- sink_.WaitForEncodedFrame(3);
+ WaitForEncodedFrame(3);
vie_encoder_->Stop();
}
TEST_F(ViEEncoderTest, DropsFramesWithSameOrOldNtpTimestamp) {
vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
- sink_.WaitForEncodedFrame(1);
+ WaitForEncodedFrame(1);
// This frame will be dropped since it has the same ntp timestamp.
video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr));
- sink_.WaitForEncodedFrame(2);
+ WaitForEncodedFrame(2);
vie_encoder_->Stop();
}
@@ -578,7 +630,7 @@
vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
- sink_.WaitForEncodedFrame(1);
+ WaitForEncodedFrame(1);
vie_encoder_->Stop();
sink_.SetExpectNoFrames();
@@ -592,13 +644,13 @@
fake_encoder_.BlockNextEncode();
video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
- sink_.WaitForEncodedFrame(1);
+ WaitForEncodedFrame(1);
// Here, the encoder thread will be blocked in the TestEncoder waiting for a
// call to ContinueEncode.
video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr));
video_source_.IncomingCapturedFrame(CreateFrame(3, nullptr));
fake_encoder_.ContinueEncode();
- sink_.WaitForEncodedFrame(3);
+ WaitForEncodedFrame(3);
vie_encoder_->Stop();
}
@@ -609,7 +661,7 @@
// Capture a frame and wait for it to synchronize with the encoder thread.
video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
- sink_.WaitForEncodedFrame(1);
+ WaitForEncodedFrame(1);
// The encoder will have been configured once when the first frame is
// received.
EXPECT_EQ(1, sink_.number_of_reconfigurations());
@@ -622,7 +674,7 @@
// Capture a frame and wait for it to synchronize with the encoder thread.
video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr));
- sink_.WaitForEncodedFrame(2);
+ WaitForEncodedFrame(2);
EXPECT_EQ(2, sink_.number_of_reconfigurations());
EXPECT_EQ(9999, sink_.last_min_transmit_bitrate());
@@ -634,7 +686,7 @@
// Capture a frame and wait for it to synchronize with the encoder thread.
video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
- sink_.WaitForEncodedFrame(1);
+ WaitForEncodedFrame(1);
// The encoder will have been configured once.
EXPECT_EQ(1, sink_.number_of_reconfigurations());
EXPECT_EQ(codec_width_, fake_encoder_.codec_config().width);
@@ -645,7 +697,7 @@
// Capture a frame with a higher resolution and wait for it to synchronize
// with the encoder thread.
video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr));
- sink_.WaitForEncodedFrame(2);
+ WaitForEncodedFrame(2);
EXPECT_EQ(codec_width_, fake_encoder_.codec_config().width);
EXPECT_EQ(codec_height_, fake_encoder_.codec_config().height);
EXPECT_EQ(2, sink_.number_of_reconfigurations());
@@ -657,12 +709,12 @@
const bool kNackEnabled = true;
const size_t kNumStreams = 1;
const size_t kNumTl = 1;
- ResetEncoder("VP8", kNumStreams, kNumTl, kNackEnabled);
+ ResetEncoder("VP8", kNumStreams, kNumTl, kNackEnabled, false);
vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
// Capture a frame and wait for it to synchronize with the encoder thread.
video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
- sink_.WaitForEncodedFrame(1);
+ WaitForEncodedFrame(1);
// The encoder have been configured once when the first frame is received.
EXPECT_EQ(1, sink_.number_of_reconfigurations());
EXPECT_EQ(kVideoCodecVP8, fake_encoder_.codec_config().codecType);
@@ -677,12 +729,12 @@
const bool kNackEnabled = true;
const size_t kNumStreams = 2;
const size_t kNumTl = 1;
- ResetEncoder("VP8", kNumStreams, kNumTl, kNackEnabled);
+ ResetEncoder("VP8", kNumStreams, kNumTl, kNackEnabled, false);
vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
// Capture a frame and wait for it to synchronize with the encoder thread.
video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
- sink_.WaitForEncodedFrame(1);
+ WaitForEncodedFrame(1);
// The encoder have been configured once when the first frame is received.
EXPECT_EQ(1, sink_.number_of_reconfigurations());
EXPECT_EQ(kVideoCodecVP8, fake_encoder_.codec_config().codecType);
@@ -697,12 +749,12 @@
const bool kNackEnabled = false;
const size_t kNumStreams = 1;
const size_t kNumTl = 1;
- ResetEncoder("VP8", kNumStreams, kNumTl, kNackEnabled);
+ ResetEncoder("VP8", kNumStreams, kNumTl, kNackEnabled, false);
vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
// Capture a frame and wait for it to synchronize with the encoder thread.
video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
- sink_.WaitForEncodedFrame(1);
+ WaitForEncodedFrame(1);
// The encoder have been configured once when the first frame is received.
EXPECT_EQ(1, sink_.number_of_reconfigurations());
EXPECT_EQ(kVideoCodecVP8, fake_encoder_.codec_config().codecType);
@@ -717,12 +769,12 @@
const bool kNackEnabled = true;
const size_t kNumStreams = 1;
const size_t kNumTl = 2;
- ResetEncoder("VP8", kNumStreams, kNumTl, kNackEnabled);
+ ResetEncoder("VP8", kNumStreams, kNumTl, kNackEnabled, false);
vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
// Capture a frame and wait for it to synchronize with the encoder thread.
video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
- sink_.WaitForEncodedFrame(1);
+ WaitForEncodedFrame(1);
// The encoder have been configured once when the first frame is received.
EXPECT_EQ(1, sink_.number_of_reconfigurations());
EXPECT_EQ(kVideoCodecVP8, fake_encoder_.codec_config().codecType);
@@ -766,7 +818,7 @@
for (int i = 1; i <= kMaxDowngrades; ++i) {
video_source_.IncomingCapturedFrame(
CreateFrame(i, frame_width, frame_height));
- sink_.WaitForEncodedFrame(i);
+ WaitForEncodedFrame(i);
vie_encoder_->TriggerCpuOveruse();
@@ -785,7 +837,7 @@
rtc::VideoSinkWants current_wants = video_source_.sink_wants();
video_source_.IncomingCapturedFrame(
CreateFrame(kMaxDowngrades + 1, frame_width, frame_height));
- sink_.WaitForEncodedFrame(kMaxDowngrades + 1);
+ WaitForEncodedFrame(kMaxDowngrades + 1);
vie_encoder_->TriggerCpuOveruse();
EXPECT_EQ(video_source_.sink_wants().target_pixel_count,
current_wants.target_pixel_count);
@@ -820,14 +872,14 @@
video_source_.IncomingCapturedFrame(
CreateFrame(frame_timestamp, kFrameWidth, kFrameHeight));
- sink_.WaitForEncodedFrame(frame_timestamp);
+ WaitForEncodedFrame(frame_timestamp);
frame_timestamp += kFrameIntervalMs;
// Trigger CPU overuse.
vie_encoder_->TriggerCpuOveruse();
video_source_.IncomingCapturedFrame(
CreateFrame(frame_timestamp, kFrameWidth, kFrameHeight));
- sink_.WaitForEncodedFrame(frame_timestamp);
+ WaitForEncodedFrame(frame_timestamp);
frame_timestamp += kFrameIntervalMs;
// Default degradation preference is maintain-framerate, so will lower max
@@ -857,7 +909,7 @@
vie_encoder_->TriggerCpuOveruse();
new_video_source.IncomingCapturedFrame(
CreateFrame(frame_timestamp, kFrameWidth, kFrameHeight));
- sink_.WaitForEncodedFrame(frame_timestamp);
+ WaitForEncodedFrame(frame_timestamp);
frame_timestamp += kFrameIntervalMs;
// Some framerate constraint should be set.
@@ -875,7 +927,7 @@
vie_encoder_->TriggerCpuOveruse();
new_video_source.IncomingCapturedFrame(
CreateFrame(frame_timestamp, kFrameWidth, kFrameHeight));
- sink_.WaitForEncodedFrame(frame_timestamp);
+ WaitForEncodedFrame(frame_timestamp);
frame_timestamp += kFrameIntervalMs;
// Still no degradation.
@@ -909,7 +961,7 @@
const int kWidth = 1280;
const int kHeight = 720;
video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
- sink_.WaitForEncodedFrame(1);
+ WaitForEncodedFrame(1);
VideoSendStream::Stats stats = stats_proxy_->GetStats();
EXPECT_FALSE(stats.bw_limited_resolution);
EXPECT_EQ(0, stats.number_of_quality_adapt_changes);
@@ -917,7 +969,7 @@
// Trigger adapt down.
vie_encoder_->TriggerQualityLow();
video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
- sink_.WaitForEncodedFrame(2);
+ WaitForEncodedFrame(2);
stats = stats_proxy_->GetStats();
EXPECT_TRUE(stats.bw_limited_resolution);
@@ -926,7 +978,7 @@
// Trigger adapt up.
vie_encoder_->TriggerQualityHigh();
video_source_.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
- sink_.WaitForEncodedFrame(3);
+ WaitForEncodedFrame(3);
stats = stats_proxy_->GetStats();
EXPECT_FALSE(stats.bw_limited_resolution);
@@ -942,7 +994,7 @@
const int kWidth = 1280;
const int kHeight = 720;
video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
- sink_.WaitForEncodedFrame(1);
+ WaitForEncodedFrame(1);
VideoSendStream::Stats stats = stats_proxy_->GetStats();
EXPECT_FALSE(stats.cpu_limited_resolution);
EXPECT_EQ(0, stats.number_of_cpu_adapt_changes);
@@ -950,7 +1002,7 @@
// Trigger CPU overuse.
vie_encoder_->TriggerCpuOveruse();
video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
- sink_.WaitForEncodedFrame(2);
+ WaitForEncodedFrame(2);
stats = stats_proxy_->GetStats();
EXPECT_TRUE(stats.cpu_limited_resolution);
@@ -959,7 +1011,7 @@
// Trigger CPU normal use.
vie_encoder_->TriggerCpuNormalUsage();
video_source_.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
- sink_.WaitForEncodedFrame(3);
+ WaitForEncodedFrame(3);
stats = stats_proxy_->GetStats();
EXPECT_FALSE(stats.cpu_limited_resolution);
@@ -975,7 +1027,7 @@
const int kWidth = 1280;
const int kHeight = 720;
video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
- sink_.WaitForEncodedFrame(1);
+ WaitForEncodedFrame(1);
VideoSendStream::Stats stats = stats_proxy_->GetStats();
EXPECT_FALSE(stats.bw_limited_resolution);
EXPECT_FALSE(stats.cpu_limited_resolution);
@@ -984,7 +1036,7 @@
// Trigger CPU overuse.
vie_encoder_->TriggerCpuOveruse();
video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
- sink_.WaitForEncodedFrame(2);
+ WaitForEncodedFrame(2);
stats = stats_proxy_->GetStats();
EXPECT_FALSE(stats.bw_limited_resolution);
EXPECT_TRUE(stats.cpu_limited_resolution);
@@ -997,7 +1049,7 @@
VideoSendStream::DegradationPreference::kMaintainFramerate);
new_video_source.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
- sink_.WaitForEncodedFrame(3);
+ WaitForEncodedFrame(3);
stats = stats_proxy_->GetStats();
EXPECT_FALSE(stats.bw_limited_resolution);
EXPECT_TRUE(stats.cpu_limited_resolution);
@@ -1009,7 +1061,7 @@
VideoSendStream::DegradationPreference::kDegradationDisabled);
new_video_source.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight));
- sink_.WaitForEncodedFrame(4);
+ WaitForEncodedFrame(4);
stats = stats_proxy_->GetStats();
EXPECT_FALSE(stats.bw_limited_resolution);
EXPECT_FALSE(stats.cpu_limited_resolution);
@@ -1021,7 +1073,7 @@
VideoSendStream::DegradationPreference::kMaintainFramerate);
new_video_source.IncomingCapturedFrame(CreateFrame(5, kWidth, kHeight));
- sink_.WaitForEncodedFrame(5);
+ WaitForEncodedFrame(5);
stats = stats_proxy_->GetStats();
EXPECT_FALSE(stats.bw_limited_resolution);
EXPECT_TRUE(stats.cpu_limited_resolution);
@@ -1030,7 +1082,7 @@
// Trigger CPU normal use.
vie_encoder_->TriggerCpuNormalUsage();
new_video_source.IncomingCapturedFrame(CreateFrame(6, kWidth, kHeight));
- sink_.WaitForEncodedFrame(6);
+ WaitForEncodedFrame(6);
stats = stats_proxy_->GetStats();
EXPECT_FALSE(stats.bw_limited_resolution);
EXPECT_FALSE(stats.cpu_limited_resolution);
@@ -1046,7 +1098,7 @@
const int kWidth = 1280;
const int kHeight = 720;
video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
- sink_.WaitForEncodedFrame(1);
+ WaitForEncodedFrame(1);
VideoSendStream::Stats stats = stats_proxy_->GetStats();
EXPECT_FALSE(stats.bw_limited_resolution);
EXPECT_FALSE(stats.bw_limited_framerate);
@@ -1058,7 +1110,7 @@
VideoSendStream::DegradationPreference::kBalanced);
new_video_source.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
- sink_.WaitForEncodedFrame(2);
+ WaitForEncodedFrame(2);
stats = stats_proxy_->GetStats();
EXPECT_FALSE(stats.bw_limited_resolution);
EXPECT_FALSE(stats.bw_limited_framerate);
@@ -1067,7 +1119,7 @@
// Trigger adapt down.
vie_encoder_->TriggerQualityLow();
new_video_source.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
- sink_.WaitForEncodedFrame(3);
+ WaitForEncodedFrame(3);
stats = stats_proxy_->GetStats();
EXPECT_TRUE(stats.bw_limited_resolution);
EXPECT_FALSE(stats.bw_limited_framerate);
@@ -1078,7 +1130,7 @@
VideoSendStream::DegradationPreference::kBalanced);
new_video_source.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight));
- sink_.WaitForEncodedFrame(4);
+ WaitForEncodedFrame(4);
stats = stats_proxy_->GetStats();
EXPECT_TRUE(stats.bw_limited_resolution);
EXPECT_FALSE(stats.bw_limited_framerate);
@@ -1090,7 +1142,7 @@
VideoSendStream::DegradationPreference::kMaintainResolution);
new_video_source.IncomingCapturedFrame(CreateFrame(5, kWidth, kHeight));
- sink_.WaitForEncodedFrame(5);
+ WaitForEncodedFrame(5);
stats = stats_proxy_->GetStats();
EXPECT_FALSE(stats.bw_limited_resolution);
EXPECT_FALSE(stats.bw_limited_framerate);
@@ -1107,7 +1159,7 @@
const int kHeight = 720;
video_source_.set_adaptation_enabled(true);
video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
- sink_.WaitForEncodedFrame(1);
+ WaitForEncodedFrame(1);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -1115,7 +1167,7 @@
// Trigger adapt down.
vie_encoder_->TriggerQualityLow();
video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
- sink_.WaitForEncodedFrame(2);
+ WaitForEncodedFrame(2);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -1123,7 +1175,7 @@
// Trigger overuse.
vie_encoder_->TriggerCpuOveruse();
video_source_.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
- sink_.WaitForEncodedFrame(3);
+ WaitForEncodedFrame(3);
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -1135,7 +1187,7 @@
VideoSendStream::DegradationPreference::kMaintainFramerate);
video_source_.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight));
- sink_.WaitForEncodedFrame(4);
+ WaitForEncodedFrame(4);
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -1151,7 +1203,7 @@
int sequence = 1;
video_source_.IncomingCapturedFrame(CreateFrame(sequence, kWidth, kHeight));
- sink_.WaitForEncodedFrame(sequence++);
+ WaitForEncodedFrame(sequence++);
VideoSendStream::Stats stats = stats_proxy_->GetStats();
EXPECT_FALSE(stats.cpu_limited_resolution);
EXPECT_FALSE(stats.cpu_limited_framerate);
@@ -1160,7 +1212,7 @@
// Trigger CPU overuse, should now adapt down.
vie_encoder_->TriggerCpuOveruse();
video_source_.IncomingCapturedFrame(CreateFrame(sequence, kWidth, kHeight));
- sink_.WaitForEncodedFrame(sequence++);
+ WaitForEncodedFrame(sequence++);
stats = stats_proxy_->GetStats();
EXPECT_TRUE(stats.cpu_limited_resolution);
EXPECT_FALSE(stats.cpu_limited_framerate);
@@ -1174,7 +1226,7 @@
new_video_source.IncomingCapturedFrame(
CreateFrame(sequence, kWidth, kHeight));
- sink_.WaitForEncodedFrame(sequence++);
+ WaitForEncodedFrame(sequence++);
stats = stats_proxy_->GetStats();
EXPECT_TRUE(stats.cpu_limited_resolution);
EXPECT_FALSE(stats.cpu_limited_framerate);
@@ -1186,7 +1238,7 @@
VideoSendStream::DegradationPreference::kMaintainResolution);
new_video_source.IncomingCapturedFrame(
CreateFrame(sequence, kWidth, kHeight));
- sink_.WaitForEncodedFrame(sequence++);
+ WaitForEncodedFrame(sequence++);
stats = stats_proxy_->GetStats();
// Not adapted at first.
EXPECT_FALSE(stats.cpu_limited_resolution);
@@ -1203,7 +1255,7 @@
new_video_source.IncomingCapturedFrame(
CreateFrame(sequence, kWidth, kHeight));
- sink_.WaitForEncodedFrame(sequence++);
+ WaitForEncodedFrame(sequence++);
// Framerate now adapted.
stats = stats_proxy_->GetStats();
@@ -1217,7 +1269,7 @@
VideoSendStream::DegradationPreference::kDegradationDisabled);
new_video_source.IncomingCapturedFrame(
CreateFrame(sequence, kWidth, kHeight));
- sink_.WaitForEncodedFrame(sequence++);
+ WaitForEncodedFrame(sequence++);
stats = stats_proxy_->GetStats();
EXPECT_FALSE(stats.cpu_limited_resolution);
@@ -1239,7 +1291,7 @@
&video_source_,
VideoSendStream::DegradationPreference::kMaintainFramerate);
video_source_.IncomingCapturedFrame(CreateFrame(sequence, kWidth, kHeight));
- sink_.WaitForEncodedFrame(sequence++);
+ WaitForEncodedFrame(sequence++);
stats = stats_proxy_->GetStats();
EXPECT_TRUE(stats.cpu_limited_resolution);
EXPECT_FALSE(stats.cpu_limited_framerate);
@@ -1248,7 +1300,7 @@
// Trigger CPU normal usage.
vie_encoder_->TriggerCpuNormalUsage();
video_source_.IncomingCapturedFrame(CreateFrame(sequence, kWidth, kHeight));
- sink_.WaitForEncodedFrame(sequence++);
+ WaitForEncodedFrame(sequence++);
stats = stats_proxy_->GetStats();
EXPECT_FALSE(stats.cpu_limited_resolution);
EXPECT_FALSE(stats.cpu_limited_framerate);
@@ -1260,7 +1312,7 @@
VideoSendStream::DegradationPreference::kMaintainResolution);
new_video_source.IncomingCapturedFrame(
CreateFrame(sequence, kWidth, kHeight));
- sink_.WaitForEncodedFrame(sequence++);
+ WaitForEncodedFrame(sequence++);
stats = stats_proxy_->GetStats();
// Disabled, since we previously switched the source to disabled.
EXPECT_FALSE(stats.cpu_limited_resolution);
@@ -1271,7 +1323,7 @@
vie_encoder_->TriggerCpuNormalUsage();
new_video_source.IncomingCapturedFrame(
CreateFrame(sequence, kWidth, kHeight));
- sink_.WaitForEncodedFrame(sequence++);
+ WaitForEncodedFrame(sequence++);
stats = stats_proxy_->GetStats();
EXPECT_FALSE(stats.cpu_limited_resolution);
EXPECT_FALSE(stats.cpu_limited_framerate);
@@ -1287,7 +1339,7 @@
const int kWidth = 1280;
const int kHeight = 720;
video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
- sink_.WaitForEncodedFrame(1);
+ WaitForEncodedFrame(1);
VideoSendStream::Stats stats = stats_proxy_->GetStats();
EXPECT_EQ(video_encoder_config_.max_bitrate_bps,
@@ -1305,13 +1357,13 @@
VerifyNoLimitation(video_source_.sink_wants());
video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
- sink_.WaitForEncodedFrame(1);
+ WaitForEncodedFrame(1);
// Trigger scale down.
vie_encoder_->TriggerQualityLow();
video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
- sink_.WaitForEncodedFrame(2);
+ WaitForEncodedFrame(2);
// Expect a scale down.
EXPECT_TRUE(video_source_.sink_wants().max_pixel_count);
@@ -1326,7 +1378,7 @@
// Trigger scale down.
vie_encoder_->TriggerQualityLow();
new_video_source.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
- sink_.WaitForEncodedFrame(3);
+ WaitForEncodedFrame(3);
// Expect no scaling.
EXPECT_EQ(std::numeric_limits<int>::max(),
@@ -1335,7 +1387,7 @@
// Trigger scale up.
vie_encoder_->TriggerQualityHigh();
new_video_source.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight));
- sink_.WaitForEncodedFrame(4);
+ WaitForEncodedFrame(4);
// Expect nothing to change, still no scaling.
EXPECT_EQ(std::numeric_limits<int>::max(),
@@ -1355,7 +1407,7 @@
&source, VideoSendStream::DegradationPreference::kMaintainFramerate);
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
- sink_.WaitForEncodedFrame(1);
+ WaitForEncodedFrame(1);
VerifyNoLimitation(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -1387,7 +1439,7 @@
&source, VideoSendStream::DegradationPreference::kMaintainFramerate);
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
- sink_.WaitForEncodedFrame(kWidth, kHeight);
+ WaitForEncodedFrame(kWidth, kHeight);
VerifyNoLimitation(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -1412,7 +1464,7 @@
&source, VideoSendStream::DegradationPreference::kMaintainResolution);
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
- sink_.WaitForEncodedFrame(kWidth, kHeight);
+ WaitForEncodedFrame(kWidth, kHeight);
VerifyNoLimitation(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -1465,7 +1517,7 @@
&source, VideoSendStream::DegradationPreference::kMaintainFramerate);
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
- sink_.WaitForEncodedFrame(1);
+ WaitForEncodedFrame(1);
VerifyNoLimitation(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -1473,7 +1525,7 @@
// Trigger adapt down, expect scaled down resolution.
vie_encoder_->TriggerQualityLow();
source.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
- sink_.WaitForEncodedFrame(2);
+ WaitForEncodedFrame(2);
VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -1546,7 +1598,7 @@
int downscales = 0;
for (size_t i = 1; i <= kNumFrames; i++) {
video_source_.IncomingCapturedFrame(CreateFrame(i, kWidth, kHeight));
- sink_.WaitForEncodedFrame(i);
+ WaitForEncodedFrame(i);
// Trigger scale down.
rtc::VideoSinkWants last_wants = video_source_.sink_wants();
@@ -1577,7 +1629,7 @@
&source, VideoSendStream::DegradationPreference::kMaintainFramerate);
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
- sink_.WaitForEncodedFrame(kWidth, kHeight);
+ WaitForEncodedFrame(kWidth, kHeight);
VerifyNoLimitation(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -1585,7 +1637,7 @@
// Trigger adapt down, expect scaled down resolution.
vie_encoder_->TriggerCpuOveruse();
source.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
- sink_.WaitForEncodedFrame(2);
+ WaitForEncodedFrame(2);
VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -1593,7 +1645,7 @@
// Trigger adapt up, expect no restriction.
vie_encoder_->TriggerCpuNormalUsage();
source.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
- sink_.WaitForEncodedFrame(kWidth, kHeight);
+ WaitForEncodedFrame(kWidth, kHeight);
VerifyNoLimitation(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -1601,7 +1653,7 @@
// Trigger adapt down, expect scaled down resolution.
vie_encoder_->TriggerCpuOveruse();
source.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight));
- sink_.WaitForEncodedFrame(4);
+ WaitForEncodedFrame(4);
VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -1630,7 +1682,7 @@
&source, VideoSendStream::DegradationPreference::kMaintainFramerate);
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
- sink_.WaitForEncodedFrame(kWidth, kHeight);
+ WaitForEncodedFrame(kWidth, kHeight);
VerifyNoLimitation(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
@@ -1640,7 +1692,7 @@
// Trigger cpu adapt down, expect scaled down resolution (960x540).
vie_encoder_->TriggerCpuOveruse();
source.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
- sink_.WaitForEncodedFrame(2);
+ WaitForEncodedFrame(2);
VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
@@ -1650,7 +1702,7 @@
// Trigger cpu adapt down, expect scaled down resolution (640x360).
vie_encoder_->TriggerCpuOveruse();
source.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
- sink_.WaitForEncodedFrame(3);
+ WaitForEncodedFrame(3);
VerifyFpsMaxResolutionLt(source.sink_wants(), source.last_wants());
rtc::VideoSinkWants last_wants = source.sink_wants();
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
@@ -1661,7 +1713,7 @@
// Trigger cpu adapt down, max cpu downgrades reached, expect no change.
vie_encoder_->TriggerCpuOveruse();
source.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight));
- sink_.WaitForEncodedFrame(4);
+ WaitForEncodedFrame(4);
VerifyFpsEqResolutionEq(source.sink_wants(), last_wants);
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
@@ -1671,7 +1723,7 @@
// Trigger quality adapt down, expect scaled down resolution (480x270).
vie_encoder_->TriggerQualityLow();
source.IncomingCapturedFrame(CreateFrame(5, kWidth, kHeight));
- sink_.WaitForEncodedFrame(5);
+ WaitForEncodedFrame(5);
VerifyFpsMaxResolutionLt(source.sink_wants(), source.last_wants());
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
@@ -1681,7 +1733,7 @@
// Trigger cpu adapt up, expect upscaled resolution (640x360).
vie_encoder_->TriggerCpuNormalUsage();
source.IncomingCapturedFrame(CreateFrame(6, kWidth, kHeight));
- sink_.WaitForEncodedFrame(6);
+ WaitForEncodedFrame(6);
VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
@@ -1691,7 +1743,7 @@
// Trigger cpu adapt up, expect upscaled resolution (960x540).
vie_encoder_->TriggerCpuNormalUsage();
source.IncomingCapturedFrame(CreateFrame(7, kWidth, kHeight));
- sink_.WaitForEncodedFrame(7);
+ WaitForEncodedFrame(7);
VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
last_wants = source.sink_wants();
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
@@ -1702,7 +1754,7 @@
// Trigger cpu adapt up, no cpu downgrades, expect no change (960x540).
vie_encoder_->TriggerCpuNormalUsage();
source.IncomingCapturedFrame(CreateFrame(8, kWidth, kHeight));
- sink_.WaitForEncodedFrame(8);
+ WaitForEncodedFrame(8);
VerifyFpsEqResolutionEq(source.sink_wants(), last_wants);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
@@ -1712,7 +1764,7 @@
// Trigger quality adapt up, expect no restriction (1280x720).
vie_encoder_->TriggerQualityHigh();
source.IncomingCapturedFrame(CreateFrame(9, kWidth, kHeight));
- sink_.WaitForEncodedFrame(kWidth, kHeight);
+ WaitForEncodedFrame(kWidth, kHeight);
VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
VerifyNoLimitation(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
@@ -1724,21 +1776,21 @@
}
TEST_F(ViEEncoderTest, CpuLimitedHistogramIsReported) {
- vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
const int kWidth = 640;
const int kHeight = 360;
+ vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
+
for (int i = 1; i <= SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) {
video_source_.IncomingCapturedFrame(CreateFrame(i, kWidth, kHeight));
- sink_.WaitForEncodedFrame(i);
+ WaitForEncodedFrame(i);
}
vie_encoder_->TriggerCpuOveruse();
for (int i = 1; i <= SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) {
video_source_.IncomingCapturedFrame(CreateFrame(
SendStatisticsProxy::kMinRequiredMetricsSamples + i, kWidth, kHeight));
- sink_.WaitForEncodedFrame(SendStatisticsProxy::kMinRequiredMetricsSamples +
- i);
+ WaitForEncodedFrame(SendStatisticsProxy::kMinRequiredMetricsSamples + i);
}
vie_encoder_->Stop();
@@ -1762,7 +1814,7 @@
for (int i = 1; i <= SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) {
video_source_.IncomingCapturedFrame(CreateFrame(i, kWidth, kHeight));
- sink_.WaitForEncodedFrame(i);
+ WaitForEncodedFrame(i);
}
vie_encoder_->Stop();
@@ -1793,26 +1845,25 @@
const int64_t kStartTimeMs = 1;
video_source_.IncomingCapturedFrame(
CreateFrame(kStartTimeMs, codec_width_, codec_height_));
- sink_.WaitForEncodedFrame(kStartTimeMs);
+ WaitForEncodedFrame(kStartTimeMs);
// Not called on second frame.
EXPECT_CALL(bitrate_observer, OnBitrateAllocationUpdated(expected_bitrate))
.Times(0);
video_source_.IncomingCapturedFrame(
CreateFrame(kStartTimeMs + 1, codec_width_, codec_height_));
- sink_.WaitForEncodedFrame(kStartTimeMs + 1);
+ WaitForEncodedFrame(kStartTimeMs + 1);
// Called after a process interval.
const int64_t kProcessIntervalMs =
vcm::VCMProcessTimer::kDefaultProcessIntervalMs;
- // TODO(sprang): ViEEncoder should die and/or get injectable clock.
- // Sleep for one processing interval plus one frame to avoid flakiness.
- SleepMs(kProcessIntervalMs + 1000 / kDefaultFps);
+ fake_clock_.AdvanceTimeMicros(rtc::kNumMicrosecsPerMillisec *
+ (kProcessIntervalMs + (1000 / kDefaultFps)));
EXPECT_CALL(bitrate_observer, OnBitrateAllocationUpdated(expected_bitrate))
.Times(1);
video_source_.IncomingCapturedFrame(CreateFrame(
kStartTimeMs + kProcessIntervalMs, codec_width_, codec_height_));
- sink_.WaitForEncodedFrame(kStartTimeMs + kProcessIntervalMs);
+ WaitForEncodedFrame(kStartTimeMs + kProcessIntervalMs);
vie_encoder_->Stop();
}
@@ -1826,7 +1877,7 @@
video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
// Expect to drop this frame, the wait should time out.
- sink_.ExpectDroppedFrame();
+ ExpectDroppedFrame();
// Expect the sink_wants to specify a scaled frame.
EXPECT_LT(video_source_.sink_wants().max_pixel_count, kWidth * kHeight);
@@ -1838,7 +1889,7 @@
CreateFrame(2, kWidth * 3 / 4, kHeight * 3 / 4));
// Expect to drop this frame, the wait should time out.
- sink_.ExpectDroppedFrame();
+ ExpectDroppedFrame();
EXPECT_LT(video_source_.sink_wants().max_pixel_count, last_pixel_count);
@@ -1855,11 +1906,11 @@
int i;
for (i = 1; i <= kMaxInitialFramedrop; ++i) {
video_source_.IncomingCapturedFrame(CreateFrame(i, kWidth, kHeight));
- sink_.ExpectDroppedFrame();
+ ExpectDroppedFrame();
}
// The n+1th frame should not be dropped, even though it's size is too large.
video_source_.IncomingCapturedFrame(CreateFrame(i, kWidth, kHeight));
- sink_.WaitForEncodedFrame(i);
+ WaitForEncodedFrame(i);
// Expect the sink_wants to specify a scaled frame.
EXPECT_LT(video_source_.sink_wants().max_pixel_count, kWidth * kHeight);
@@ -1879,7 +1930,7 @@
video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
// Frame should not be dropped, even if it's too large.
- sink_.WaitForEncodedFrame(1);
+ WaitForEncodedFrame(1);
vie_encoder_->Stop();
}
@@ -1896,7 +1947,7 @@
video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
// Frame should not be dropped, even if it's too large.
- sink_.WaitForEncodedFrame(1);
+ WaitForEncodedFrame(1);
vie_encoder_->Stop();
fake_encoder_.SetQualityScaling(true);
@@ -1917,7 +1968,7 @@
// Trigger adapt down, too small frame, expect no change.
source.IncomingCapturedFrame(CreateFrame(1, kTooSmallWidth, kTooSmallHeight));
- sink_.WaitForEncodedFrame(1);
+ WaitForEncodedFrame(1);
vie_encoder_->TriggerCpuOveruse();
VerifyNoLimitation(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
@@ -1929,12 +1980,12 @@
TEST_F(ViEEncoderTest, FailingInitEncodeDoesntCauseCrash) {
fake_encoder_.ForceInitEncodeFailure(true);
vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
- ResetEncoder("VP8", 2, 1, true);
+ ResetEncoder("VP8", 2, 1, true, false);
const int kFrameWidth = 1280;
const int kFrameHeight = 720;
video_source_.IncomingCapturedFrame(
CreateFrame(1, kFrameWidth, kFrameHeight));
- sink_.ExpectDroppedFrame();
+ ExpectDroppedFrame();
vie_encoder_->Stop();
}
@@ -1950,29 +2001,28 @@
video_source_.IncomingCapturedFrame(
CreateFrame(1, kFrameWidth, kFrameHeight));
- sink_.WaitForEncodedFrame(kFrameWidth, kFrameHeight);
+ WaitForEncodedFrame(kFrameWidth, kFrameHeight);
// Trigger CPU overuse, downscale by 3/4.
vie_encoder_->TriggerCpuOveruse();
video_source_.IncomingCapturedFrame(
CreateFrame(2, kFrameWidth, kFrameHeight));
- sink_.WaitForEncodedFrame((kFrameWidth * 3) / 4, (kFrameHeight * 3) / 4);
+ WaitForEncodedFrame((kFrameWidth * 3) / 4, (kFrameHeight * 3) / 4);
// Trigger CPU normal use, return to original resolution.
vie_encoder_->TriggerCpuNormalUsage();
video_source_.IncomingCapturedFrame(
CreateFrame(3, kFrameWidth, kFrameHeight));
- sink_.WaitForEncodedFrame(kFrameWidth, kFrameHeight);
+ WaitForEncodedFrame(kFrameWidth, kFrameHeight);
vie_encoder_->Stop();
}
TEST_F(ViEEncoderTest, AdaptsFramerateOnOveruse_MaintainResolutionMode) {
- const int kDefaultFramerateFps = 30;
- const int kFrameIntervalMs = rtc::kNumMillisecsPerSec / kDefaultFramerateFps;
+ // const int kDefaultFramerateFps = 30;
const int kFrameWidth = 1280;
const int kFrameHeight = 720;
- rtc::ScopedFakeClock fake_clock;
+ int kFrameIntervalMs = rtc::kNumMillisecsPerSec / max_framerate_;
vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
vie_encoder_->SetSource(
@@ -1980,93 +2030,82 @@
VideoSendStream::DegradationPreference::kMaintainResolution);
video_source_.set_adaptation_enabled(true);
- fake_clock.SetTimeMicros(kFrameIntervalMs * 1000);
- int64_t timestamp_ms = kFrameIntervalMs;
+ int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec;
video_source_.IncomingCapturedFrame(
CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
- sink_.WaitForEncodedFrame(timestamp_ms);
+ WaitForEncodedFrame(timestamp_ms);
// Try to trigger overuse. No fps estimate available => no effect.
vie_encoder_->TriggerCpuOveruse();
// Insert frames for one second to get a stable estimate.
- for (int i = 0; i < kDefaultFramerateFps; ++i) {
+ for (int i = 0; i < max_framerate_; ++i) {
timestamp_ms += kFrameIntervalMs;
- fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000);
video_source_.IncomingCapturedFrame(
CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
- sink_.WaitForEncodedFrame(timestamp_ms);
+ WaitForEncodedFrame(timestamp_ms);
}
// Trigger CPU overuse, reduce framerate by 2/3.
vie_encoder_->TriggerCpuOveruse();
int num_frames_dropped = 0;
- for (int i = 0; i < kDefaultFramerateFps; ++i) {
+ for (int i = 0; i < max_framerate_; ++i) {
timestamp_ms += kFrameIntervalMs;
- fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000);
video_source_.IncomingCapturedFrame(
CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
- if (!sink_.WaitForFrame(kFrameTimeoutMs)) {
+ if (!WaitForFrame(kFrameTimeoutMs)) {
++num_frames_dropped;
} else {
sink_.CheckLastFrameSizeMathces(kFrameWidth, kFrameHeight);
}
}
- // TODO(sprang): Find where there's rounding errors or stuff causing the
- // margin here to be a little larger than we'd like (input fps estimate is
- // off) and the frame dropping is a little too aggressive.
- const int kErrorMargin = 5;
- EXPECT_NEAR(num_frames_dropped,
- kDefaultFramerateFps - (kDefaultFramerateFps * 2 / 3),
+ // Add some slack to account for frames dropped by the frame dropper.
+ const int kErrorMargin = 1;
+ EXPECT_NEAR(num_frames_dropped, max_framerate_ - (max_framerate_ * 2 / 3),
kErrorMargin);
// Trigger CPU overuse, reduce framerate by 2/3 again.
vie_encoder_->TriggerCpuOveruse();
num_frames_dropped = 0;
- for (int i = 0; i < kDefaultFramerateFps; ++i) {
+ for (int i = 0; i < max_framerate_; ++i) {
timestamp_ms += kFrameIntervalMs;
- fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000);
video_source_.IncomingCapturedFrame(
CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
- if (!sink_.WaitForFrame(kFrameTimeoutMs)) {
+ if (!WaitForFrame(kFrameTimeoutMs)) {
++num_frames_dropped;
} else {
sink_.CheckLastFrameSizeMathces(kFrameWidth, kFrameHeight);
}
}
- EXPECT_NEAR(num_frames_dropped,
- kDefaultFramerateFps - (kDefaultFramerateFps * 4 / 9),
+ EXPECT_NEAR(num_frames_dropped, max_framerate_ - (max_framerate_ * 4 / 9),
kErrorMargin);
// Go back up one step.
vie_encoder_->TriggerCpuNormalUsage();
num_frames_dropped = 0;
- for (int i = 0; i < kDefaultFramerateFps; ++i) {
+ for (int i = 0; i < max_framerate_; ++i) {
timestamp_ms += kFrameIntervalMs;
- fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000);
video_source_.IncomingCapturedFrame(
CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
- if (!sink_.WaitForFrame(kFrameTimeoutMs)) {
+ if (!WaitForFrame(kFrameTimeoutMs)) {
++num_frames_dropped;
} else {
sink_.CheckLastFrameSizeMathces(kFrameWidth, kFrameHeight);
}
}
- EXPECT_NEAR(num_frames_dropped,
- kDefaultFramerateFps - (kDefaultFramerateFps * 2 / 3),
+ EXPECT_NEAR(num_frames_dropped, max_framerate_ - (max_framerate_ * 2 / 3),
kErrorMargin);
// Go back up to original mode.
vie_encoder_->TriggerCpuNormalUsage();
num_frames_dropped = 0;
- for (int i = 0; i < kDefaultFramerateFps; ++i) {
+ for (int i = 0; i < max_framerate_; ++i) {
timestamp_ms += kFrameIntervalMs;
- fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000);
video_source_.IncomingCapturedFrame(
CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
- if (!sink_.WaitForFrame(kFrameTimeoutMs)) {
+ if (!WaitForFrame(kFrameTimeoutMs)) {
++num_frames_dropped;
} else {
sink_.CheckLastFrameSizeMathces(kFrameWidth, kFrameHeight);
@@ -2084,15 +2123,17 @@
const int kFrameWidth = 1280;
const int kFrameHeight = 720;
- rtc::ScopedFakeClock fake_clock;
+ // Reconfigure encoder with two temporal layers and screensharing, which will
+ // disable frame dropping and make testing easier.
+ ResetEncoder("VP8", 1, 2, true, true);
+
vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
vie_encoder_->SetSource(
&video_source_,
VideoSendStream::DegradationPreference::kMaintainResolution);
video_source_.set_adaptation_enabled(true);
- fake_clock.SetTimeMicros(kFrameIntervalMs * 1000);
- int64_t timestamp_ms = kFrameIntervalMs;
+ int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec;
// Trigger overuse as much as we can.
for (int i = 0; i < ViEEncoder::kMaxCpuResolutionDowngrades; ++i) {
@@ -2101,23 +2142,72 @@
video_source_.IncomingCapturedFrame(
CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
timestamp_ms += kFrameIntervalMs;
- fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000);
}
// ...and then try to adapt again.
vie_encoder_->TriggerCpuOveruse();
}
// Drain any frame in the pipeline.
- sink_.WaitForFrame(kDefaultTimeoutMs);
+ WaitForFrame(kDefaultTimeoutMs);
// Insert frames at min fps, all should go through.
for (int i = 0; i < 10; ++i) {
timestamp_ms += kMinFpsFrameInterval;
- fake_clock.AdvanceTimeMicros(kMinFpsFrameInterval * 1000);
video_source_.IncomingCapturedFrame(
CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
- sink_.WaitForEncodedFrame(timestamp_ms);
+ WaitForEncodedFrame(timestamp_ms);
}
vie_encoder_->Stop();
}
+
+TEST_F(ViEEncoderTest, PriodicallyUpdatesChannelParameters) {
+ const int kFrameWidth = 1280;
+ const int kFrameHeight = 720;
+ const int kLowFps = 2;
+ const int kHighFps = 30;
+
+ vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
+
+ int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec;
+ max_framerate_ = kLowFps;
+
+ // Insert 2 seconds of 2fps video.
+ for (int i = 0; i < kLowFps * 2; ++i) {
+ video_source_.IncomingCapturedFrame(
+ CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
+ WaitForEncodedFrame(timestamp_ms);
+ timestamp_ms += 1000 / kLowFps;
+ }
+
+ // Make sure encoder is updated with new target.
+ vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
+ video_source_.IncomingCapturedFrame(
+ CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
+ WaitForEncodedFrame(timestamp_ms);
+ timestamp_ms += 1000 / kLowFps;
+
+ EXPECT_EQ(kLowFps, fake_encoder_.GetConfiguredInputFramerate());
+
+ // Insert 30fps frames for just a little more than the forced update period.
+ const int kVcmTimerIntervalFrames =
+ (vcm::VCMProcessTimer::kDefaultProcessIntervalMs * kHighFps) / 1000;
+ const int kFrameIntervalMs = 1000 / kHighFps;
+ max_framerate_ = kHighFps;
+ for (int i = 0; i < kVcmTimerIntervalFrames + 2; ++i) {
+ video_source_.IncomingCapturedFrame(
+ CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
+ // Wait for encoded frame, but skip ahead if it doesn't arrive as it might
+ // be dropped if the encoder hans't been updated with the new higher target
+ // framerate yet, causing it to overshoot the target bitrate and then
+ // suffering the wrath of the media optimizer.
+ TimedWaitForEncodedFrame(timestamp_ms, 2 * kFrameIntervalMs);
+ timestamp_ms += kFrameIntervalMs;
+ }
+
+ // Don expect correct measurement just yet, but it should be higher than
+ // before.
+ EXPECT_GT(fake_encoder_.GetConfiguredInputFramerate(), kLowFps);
+
+ vie_encoder_->Stop();
+}
} // namespace webrtc