Reland of Periodically update codec bit/frame rate settings.

Patch set 1 is a reland + trivial rebase.
Patch set >= 2 contains bug fixes.

> Original issue's description:
> > Fix bug in vie_encoder.cc which caused channel parameters not to be updated at regular intervals, as it was intended.
> >
> > That however exposes a bunch of failed test, so this CL also fixed a few other things:
> > * FakeEncoder should trust the configured FPS value rather than guesstimating itself based on the realtime clock, so as not to completely undershoot targets in offline mode. Also, compensate for key-frame overshoots when outputting delta frames.
> > * FrameDropper should not assuming incoming frame rate is 0 if no frames have been seen.
> > * Fix a bunch of test cases that started failing because they were relying on the fake encoder undershooting.
> > * Fix test
> >
> > BUG=7664
> >
> > Review-Url: https://codereview.webrtc.org/2883963002
> > Cr-Commit-Position: refs/heads/master@{#18473}
> > Committed: https://chromium.googlesource.com/external/webrtc/+/6431e21da672a5f3bbf166d3d4d98b171d015706

BUG=webrtc:7664

Review-Url: https://codereview.webrtc.org/2953053002
Cr-Commit-Position: refs/heads/master@{#18782}
diff --git a/webrtc/call/rampup_tests.cc b/webrtc/call/rampup_tests.cc
index f51ab31..5128d50 100644
--- a/webrtc/call/rampup_tests.cc
+++ b/webrtc/call/rampup_tests.cc
@@ -54,6 +54,7 @@
       report_perf_stats_(report_perf_stats),
       sender_call_(nullptr),
       send_stream_(nullptr),
+      send_transport_(nullptr),
       start_bitrate_bps_(start_bitrate_bps),
       min_run_time_ms_(min_run_time_ms),
       expected_bitrate_bps_(0),
diff --git a/webrtc/media/engine/simulcast.cc b/webrtc/media/engine/simulcast.cc
index 4dd8c31..f1cd2ce 100644
--- a/webrtc/media/engine/simulcast.cc
+++ b/webrtc/media/engine/simulcast.cc
@@ -49,7 +49,7 @@
   {0, 0, 1, 200, 150, 30}
 };
 
-const int kMaxScreenshareSimulcastStreams = 2;
+const int kDefaultScreenshareSimulcastStreams = 2;
 
 // Multiway: Number of temporal layers for each simulcast stream, for maximum
 // possible number of simulcast streams |kMaxSimulcastStreams|. The array
@@ -176,12 +176,8 @@
                                                     bool is_screencast) {
   size_t num_simulcast_layers;
   if (is_screencast) {
-    if (UseSimulcastScreenshare()) {
-      num_simulcast_layers =
-          std::min<int>(max_streams, kMaxScreenshareSimulcastStreams);
-    } else {
-      num_simulcast_layers = 1;
-    }
+    num_simulcast_layers =
+        UseSimulcastScreenshare() ? kDefaultScreenshareSimulcastStreams : 1;
   } else {
     num_simulcast_layers = FindSimulcastMaxLayers(width, height);
   }
@@ -198,60 +194,33 @@
   std::vector<webrtc::VideoStream> streams;
   streams.resize(num_simulcast_layers);
 
-  if (is_screencast) {
-    ScreenshareLayerConfig config = ScreenshareLayerConfig::GetDefault();
-    // For legacy screenshare in conference mode, tl0 and tl1 bitrates are
-    // piggybacked on the VideoCodec struct as target and max bitrates,
-    // respectively. See eg. webrtc::VP8EncoderImpl::SetRates().
-    streams[0].width = width;
-    streams[0].height = height;
-    streams[0].max_qp = max_qp;
-    streams[0].max_framerate = 5;
-    streams[0].min_bitrate_bps = kMinVideoBitrateKbps * 1000;
-    streams[0].target_bitrate_bps = config.tl0_bitrate_kbps * 1000;
-    streams[0].max_bitrate_bps = config.tl1_bitrate_kbps * 1000;
-    streams[0].temporal_layer_thresholds_bps.clear();
-    streams[0].temporal_layer_thresholds_bps.push_back(config.tl0_bitrate_kbps *
-                                                       1000);
-
-    // With simulcast enabled, add another spatial layer. This one will have a
-    // more normal layout, with the regular 3 temporal layer pattern and no fps
-    // restrictions. The base simulcast stream will still use legacy setup.
-    if (num_simulcast_layers == kMaxScreenshareSimulcastStreams) {
-      // Add optional upper simulcast layer.
-      // Lowest temporal layers of a 3 layer setup will have 40% of the total
-      // bitrate allocation for that stream. Make sure the gap between the
-      // target of the lower stream and first temporal layer of the higher one
-      // is at most 2x the bitrate, so that upswitching is not hampered by
-      // stalled bitrate estimates.
-      int max_bitrate_bps = 2 * ((streams[0].target_bitrate_bps * 10) / 4);
-      // Cap max bitrate so it isn't overly high for the given resolution.
-      max_bitrate_bps = std::min<int>(
-          max_bitrate_bps, FindSimulcastMaxBitrateBps(width, height));
-
-      streams[1].width = width;
-      streams[1].height = height;
-      streams[1].max_qp = max_qp;
-      streams[1].max_framerate = max_framerate;
-      // Three temporal layers means two thresholds.
-      streams[1].temporal_layer_thresholds_bps.resize(2);
-      streams[1].min_bitrate_bps = streams[0].target_bitrate_bps * 2;
-      streams[1].target_bitrate_bps = max_bitrate_bps;
-      streams[1].max_bitrate_bps = max_bitrate_bps;
-    }
-  } else {
+  if (!is_screencast) {
     // Format width and height has to be divisible by |2 ^ number_streams - 1|.
     width = NormalizeSimulcastSize(width, num_simulcast_layers);
     height = NormalizeSimulcastSize(height, num_simulcast_layers);
+  }
 
-    // Add simulcast sub-streams from lower resolution to higher resolutions.
-    // Add simulcast streams, from highest resolution (|s| = number_streams -1)
-    // to lowest resolution at |s| = 0.
-    for (size_t s = num_simulcast_layers - 1;; --s) {
-      streams[s].width = width;
-      streams[s].height = height;
-      // TODO(pbos): Fill actual temporal-layer bitrate thresholds.
-      streams[s].max_qp = max_qp;
+  // Add simulcast sub-streams from lower resolution to higher resolutions.
+  // Add simulcast streams, from highest resolution (|s| = number_streams -1)
+  // to lowest resolution at |s| = 0.
+  for (size_t s = num_simulcast_layers - 1;; --s) {
+    streams[s].width = width;
+    streams[s].height = height;
+    // TODO(pbos): Fill actual temporal-layer bitrate thresholds.
+    streams[s].max_qp = max_qp;
+    if (is_screencast && s == 0) {
+      ScreenshareLayerConfig config = ScreenshareLayerConfig::GetDefault();
+      // For legacy screenshare in conference mode, tl0 and tl1 bitrates are
+      // piggybacked on the VideoCodec struct as target and max bitrates,
+      // respectively. See eg. webrtc::VP8EncoderImpl::SetRates().
+      streams[s].min_bitrate_bps = kMinVideoBitrateKbps * 1000;
+      streams[s].target_bitrate_bps = config.tl0_bitrate_kbps * 1000;
+      streams[s].max_bitrate_bps = config.tl1_bitrate_kbps * 1000;
+      streams[s].temporal_layer_thresholds_bps.clear();
+      streams[s].temporal_layer_thresholds_bps.push_back(
+          config.tl0_bitrate_kbps * 1000);
+      streams[s].max_framerate = 5;
+    } else {
       streams[s].temporal_layer_thresholds_bps.resize(
           kDefaultConferenceNumberOfTemporalLayers[s] - 1);
       streams[s].max_bitrate_bps = FindSimulcastMaxBitrateBps(width, height);
@@ -259,19 +228,20 @@
           FindSimulcastTargetBitrateBps(width, height);
       streams[s].min_bitrate_bps = FindSimulcastMinBitrateBps(width, height);
       streams[s].max_framerate = max_framerate;
+    }
 
+    if (!is_screencast) {
       width /= 2;
       height /= 2;
-
-      if (s == 0)
-        break;
     }
+    if (s == 0)
+      break;
+  }
 
-    // Spend additional bits to boost the max stream.
-    int bitrate_left_bps = max_bitrate_bps - GetTotalMaxBitrateBps(streams);
-    if (bitrate_left_bps > 0) {
-      streams.back().max_bitrate_bps += bitrate_left_bps;
-    }
+  // Spend additional bits to boost the max stream.
+  int bitrate_left_bps = max_bitrate_bps - GetTotalMaxBitrateBps(streams);
+  if (bitrate_left_bps > 0) {
+    streams.back().max_bitrate_bps += bitrate_left_bps;
   }
 
   return streams;
diff --git a/webrtc/modules/video_coding/media_optimization.cc b/webrtc/modules/video_coding/media_optimization.cc
index ccd23f6..f0cbd62 100644
--- a/webrtc/modules/video_coding/media_optimization.cc
+++ b/webrtc/modules/video_coding/media_optimization.cc
@@ -118,7 +118,13 @@
   // Update encoding rates following protection settings.
   float target_video_bitrate_kbps =
       static_cast<float>(video_target_bitrate_) / 1000.0f;
-  frame_dropper_->SetRates(target_video_bitrate_kbps, incoming_frame_rate_);
+  float framerate = incoming_frame_rate_;
+  if (framerate == 0.0) {
+    // No framerate estimate available, use configured max framerate instead.
+    framerate = user_frame_rate_;
+  }
+
+  frame_dropper_->SetRates(target_video_bitrate_kbps, framerate);
 
   return video_target_bitrate_;
 }
diff --git a/webrtc/modules/video_coding/video_sender.cc b/webrtc/modules/video_coding/video_sender.cc
index ab77297..3dabbfc 100644
--- a/webrtc/modules/video_coding/video_sender.cc
+++ b/webrtc/modules/video_coding/video_sender.cc
@@ -103,6 +103,11 @@
     numLayers = sendCodec->VP8().numberOfTemporalLayers;
   } else if (sendCodec->codecType == kVideoCodecVP9) {
     numLayers = sendCodec->VP9().numberOfTemporalLayers;
+  } else if (sendCodec->codecType == kVideoCodecGeneric &&
+             sendCodec->numberOfSimulcastStreams > 0) {
+    // This is mainly for unit testing, disabling frame dropping.
+    // TODO(sprang): Add a better way to disable frame dropping.
+    numLayers = sendCodec->simulcastStream[0].numberOfTemporalLayers;
   } else {
     numLayers = 1;
   }
@@ -197,13 +202,17 @@
     input_frame_rate = current_codec_.maxFramerate;
 
   BitrateAllocation bitrate_allocation;
-  if (bitrate_allocator) {
-    bitrate_allocation = bitrate_allocator->GetAllocation(video_target_rate_bps,
-                                                          input_frame_rate);
-  } else {
-    DefaultVideoBitrateAllocator default_allocator(current_codec_);
-    bitrate_allocation = default_allocator.GetAllocation(video_target_rate_bps,
-                                                         input_frame_rate);
+  // Only call allocators if bitrate > 0 (ie, not suspended), otherwise they
+  // might cap the bitrate to the min bitrate configured.
+  if (target_bitrate_bps > 0) {
+    if (bitrate_allocator) {
+      bitrate_allocation = bitrate_allocator->GetAllocation(
+          video_target_rate_bps, input_frame_rate);
+    } else {
+      DefaultVideoBitrateAllocator default_allocator(current_codec_);
+      bitrate_allocation = default_allocator.GetAllocation(
+          video_target_rate_bps, input_frame_rate);
+    }
   }
   EncoderParameters new_encoder_params = {bitrate_allocation, params.loss_rate,
                                           params.rtt, input_frame_rate};
@@ -221,7 +230,7 @@
                                 encoder_params_.target_bitrate.get_sum_bps());
     target_rate = encoder_params_.target_bitrate;
   }
-  if (bitrate_updated_callback)
+  if (bitrate_updated_callback && target_rate.get_sum_bps() > 0)
     bitrate_updated_callback->OnBitrateAllocationUpdated(target_rate);
 }
 
@@ -236,7 +245,7 @@
   encoder_params.rtt = rtt;
   encoder_params = UpdateEncoderParameters(encoder_params, bitrate_allocator,
                                            target_bitrate_bps);
-  if (bitrate_updated_callback) {
+  if (bitrate_updated_callback && target_bitrate_bps > 0) {
     bitrate_updated_callback->OnBitrateAllocationUpdated(
         encoder_params.target_bitrate);
   }
diff --git a/webrtc/test/fake_encoder.cc b/webrtc/test/fake_encoder.cc
index a76ac13..922ff56 100644
--- a/webrtc/test/fake_encoder.cc
+++ b/webrtc/test/fake_encoder.cc
@@ -24,11 +24,15 @@
 namespace webrtc {
 namespace test {
 
+const int kKeyframeSizeFactor = 10;
+
 FakeEncoder::FakeEncoder(Clock* clock)
     : clock_(clock),
       callback_(nullptr),
+      configured_input_framerate_(-1),
       max_target_bitrate_kbps_(-1),
-      last_encode_time_ms_(0) {
+      pending_keyframe_(true),
+      debt_bytes_(0) {
   // Generate some arbitrary not-all-zero data
   for (size_t i = 0; i < sizeof(encoded_buffer_); ++i) {
     encoded_buffer_[i] = static_cast<uint8_t>(i);
@@ -47,6 +51,8 @@
   rtc::CritScope cs(&crit_sect_);
   config_ = *config;
   target_bitrate_.SetBitrate(0, 0, config_.startBitrate * 1000);
+  configured_input_framerate_ = config_.maxFramerate;
+  pending_keyframe_ = true;
   return 0;
 }
 
@@ -59,9 +65,10 @@
   EncodedImageCallback* callback;
   uint32_t target_bitrate_sum_kbps;
   int max_target_bitrate_kbps;
-  int64_t last_encode_time_ms;
   size_t num_encoded_bytes;
+  int framerate;
   VideoCodecMode mode;
+  bool keyframe;
   {
     rtc::CritScope cs(&crit_sect_);
     max_framerate = config_.maxFramerate;
@@ -72,42 +79,32 @@
     callback = callback_;
     target_bitrate_sum_kbps = target_bitrate_.get_sum_kbps();
     max_target_bitrate_kbps = max_target_bitrate_kbps_;
-    last_encode_time_ms = last_encode_time_ms_;
     num_encoded_bytes = sizeof(encoded_buffer_);
     mode = config_.mode;
+    if (configured_input_framerate_ > 0) {
+      framerate = configured_input_framerate_;
+    } else {
+      framerate = max_framerate;
+    }
+    keyframe = pending_keyframe_;
+    pending_keyframe_ = false;
   }
 
-  int64_t time_now_ms = clock_->TimeInMilliseconds();
-  const bool first_encode = (last_encode_time_ms == 0);
+  for (FrameType frame_type : *frame_types) {
+    if (frame_type == kVideoFrameKey) {
+      keyframe = true;
+      break;
+    }
+  }
+
   RTC_DCHECK_GT(max_framerate, 0);
-  int64_t time_since_last_encode_ms = 1000 / max_framerate;
-  if (!first_encode) {
-    // For all frames but the first we can estimate the display time by looking
-    // at the display time of the previous frame.
-    time_since_last_encode_ms = time_now_ms - last_encode_time_ms;
-  }
-  if (time_since_last_encode_ms > 3 * 1000 / max_framerate) {
-    // Rudimentary check to make sure we don't widely overshoot bitrate target
-    // when resuming encoding after a suspension.
-    time_since_last_encode_ms = 3 * 1000 / max_framerate;
-  }
 
-  size_t bits_available =
-      static_cast<size_t>(target_bitrate_sum_kbps * time_since_last_encode_ms);
-  size_t min_bits = static_cast<size_t>(simulcast_streams[0].minBitrate *
-                                        time_since_last_encode_ms);
+  size_t bitrate =
+      std::max(target_bitrate_sum_kbps, simulcast_streams[0].minBitrate);
+  if (max_target_bitrate_kbps > 0)
+    bitrate = std::min(bitrate, static_cast<size_t>(max_target_bitrate_kbps));
 
-  if (bits_available < min_bits)
-    bits_available = min_bits;
-  size_t max_bits =
-      static_cast<size_t>(max_target_bitrate_kbps * time_since_last_encode_ms);
-  if (max_bits > 0 && max_bits < bits_available)
-    bits_available = max_bits;
-
-  {
-    rtc::CritScope cs(&crit_sect_);
-    last_encode_time_ms_ = time_now_ms;
-  }
+  size_t bits_available = bitrate * 1000 / framerate;
 
   RTC_DCHECK_GT(num_simulcast_streams, 0);
   for (unsigned char i = 0; i < num_simulcast_streams; ++i) {
@@ -116,18 +113,27 @@
     specifics.codecType = kVideoCodecGeneric;
     specifics.codecSpecific.generic.simulcast_idx = i;
     size_t min_stream_bits = static_cast<size_t>(
-        simulcast_streams[i].minBitrate * time_since_last_encode_ms);
+        (simulcast_streams[i].minBitrate * 1000) / framerate);
     size_t max_stream_bits = static_cast<size_t>(
-        simulcast_streams[i].maxBitrate * time_since_last_encode_ms);
+        (simulcast_streams[i].maxBitrate * 1000) / framerate);
     size_t stream_bits = (bits_available > max_stream_bits) ? max_stream_bits :
         bits_available;
     size_t stream_bytes = (stream_bits + 7) / 8;
-    if (first_encode) {
+    if (keyframe) {
       // The first frame is a key frame and should be larger.
-      // TODO(holmer): The FakeEncoder should store the bits_available between
-      // encodes so that it can compensate for oversized frames.
-      stream_bytes *= 10;
+      // Store the overshoot bytes and distribute them over the coming frames,
+      // so that we on average meet the bitrate target.
+      debt_bytes_ += (kKeyframeSizeFactor - 1) * stream_bytes;
+      stream_bytes *= kKeyframeSizeFactor;
+    } else {
+      if (debt_bytes_ > 0) {
+        // Pay at most half of the frame size for old debts.
+        size_t payment_size = std::min(stream_bytes / 2, debt_bytes_);
+        debt_bytes_ -= payment_size;
+        stream_bytes -= payment_size;
+      }
     }
+
     if (stream_bytes > num_encoded_bytes)
       stream_bytes = num_encoded_bytes;
 
@@ -176,6 +182,7 @@
                                        uint32_t framerate) {
   rtc::CritScope cs(&crit_sect_);
   target_bitrate_ = rate_allocation;
+  configured_input_framerate_ = framerate;
   return 0;
 }
 
@@ -184,6 +191,11 @@
   return kImplementationName;
 }
 
+int FakeEncoder::GetConfiguredInputFramerate() const {
+  rtc::CritScope cs(&crit_sect_);
+  return configured_input_framerate_;
+}
+
 FakeH264Encoder::FakeH264Encoder(Clock* clock)
     : FakeEncoder(clock), callback_(nullptr), idr_counter_(0) {
   FakeEncoder::RegisterEncodeCompleteCallback(this);
diff --git a/webrtc/test/fake_encoder.h b/webrtc/test/fake_encoder.h
index e3878ec..4487c52 100644
--- a/webrtc/test/fake_encoder.h
+++ b/webrtc/test/fake_encoder.h
@@ -45,6 +45,7 @@
   int32_t SetRateAllocation(const BitrateAllocation& rate_allocation,
                             uint32_t framerate) override;
   const char* ImplementationName() const override;
+  int GetConfiguredInputFramerate() const;
 
   static const char* kImplementationName;
 
@@ -53,11 +54,16 @@
   VideoCodec config_ GUARDED_BY(crit_sect_);
   EncodedImageCallback* callback_ GUARDED_BY(crit_sect_);
   BitrateAllocation target_bitrate_ GUARDED_BY(crit_sect_);
+  int configured_input_framerate_ GUARDED_BY(crit_sect_);
   int max_target_bitrate_kbps_ GUARDED_BY(crit_sect_);
-  int64_t last_encode_time_ms_ GUARDED_BY(crit_sect_);
+  bool pending_keyframe_ GUARDED_BY(crit_sect_);
   rtc::CriticalSection crit_sect_;
 
   uint8_t encoded_buffer_[100000];
+
+  // Current byte debt to be payed over a number of frames.
+  // The debt is acquired by keyframes overshooting the bitrate target.
+  size_t debt_bytes_;
 };
 
 class FakeH264Encoder : public FakeEncoder, public EncodedImageCallback {
diff --git a/webrtc/video/video_send_stream_tests.cc b/webrtc/video/video_send_stream_tests.cc
index d759ed6..37a0249 100644
--- a/webrtc/video/video_send_stream_tests.cc
+++ b/webrtc/video/video_send_stream_tests.cc
@@ -975,10 +975,12 @@
 
     void TriggerLossReport(const RTPHeader& header) {
       // Send lossy receive reports to trigger FEC enabling.
-      if (packet_count_++ % 2 != 0) {
-        // Receive statistics reporting having lost 50% of the packets.
+      const int kLossPercent = 5;
+      if (packet_count_++ % (100 / kLossPercent) != 0) {
         FakeReceiveStatistics lossy_receive_stats(
-            kVideoSendSsrcs[0], header.sequenceNumber, packet_count_ / 2, 127);
+            kVideoSendSsrcs[0], header.sequenceNumber,
+            (packet_count_ * (100 - kLossPercent)) / 100,  // Cumulative lost.
+            static_cast<uint8_t>((255 * kLossPercent) / 100));  // Loss percent.
         RTCPSender rtcp_sender(false, Clock::GetRealTimeClock(),
                                &lossy_receive_stats, nullptr, nullptr,
                                transport_adapter_.get());
@@ -1031,6 +1033,35 @@
       // Make sure there is at least one extension header, to make the RTP
       // header larger than the base length of 12 bytes.
       EXPECT_FALSE(send_config->rtp.extensions.empty());
+
+      // Setup screen content disables frame dropping which makes this easier.
+      class VideoStreamFactory
+          : public VideoEncoderConfig::VideoStreamFactoryInterface {
+       public:
+        explicit VideoStreamFactory(size_t num_temporal_layers)
+            : num_temporal_layers_(num_temporal_layers) {
+          EXPECT_GT(num_temporal_layers, 0u);
+        }
+
+       private:
+        std::vector<VideoStream> CreateEncoderStreams(
+            int width,
+            int height,
+            const VideoEncoderConfig& encoder_config) override {
+          std::vector<VideoStream> streams =
+              test::CreateVideoStreams(width, height, encoder_config);
+          for (VideoStream& stream : streams) {
+            stream.temporal_layer_thresholds_bps.resize(num_temporal_layers_ -
+                                                        1);
+          }
+          return streams;
+        }
+        const size_t num_temporal_layers_;
+      };
+
+      encoder_config->video_stream_factory =
+          new rtc::RefCountedObject<VideoStreamFactory>(2);
+      encoder_config->content_type = VideoEncoderConfig::ContentType::kScreen;
     }
 
     void PerformTest() override {
diff --git a/webrtc/video/vie_encoder.cc b/webrtc/video/vie_encoder.cc
index 4761c12..4522300 100644
--- a/webrtc/video/vie_encoder.cc
+++ b/webrtc/video/vie_encoder.cc
@@ -778,13 +778,14 @@
   int64_t now_ms = clock_->TimeInMilliseconds();
   if (pending_encoder_reconfiguration_) {
     ReconfigureEncoder();
+    last_parameters_update_ms_.emplace(now_ms);
   } else if (!last_parameters_update_ms_ ||
              now_ms - *last_parameters_update_ms_ >=
                  vcm::VCMProcessTimer::kDefaultProcessIntervalMs) {
     video_sender_.UpdateChannelParemeters(rate_allocator_.get(),
                                           bitrate_observer_);
+    last_parameters_update_ms_.emplace(now_ms);
   }
-  last_parameters_update_ms_.emplace(now_ms);
 
   if (EncoderPaused()) {
     TraceFrameDropStart();
diff --git a/webrtc/video/vie_encoder_unittest.cc b/webrtc/video/vie_encoder_unittest.cc
index 976cf47..3a9e95e 100644
--- a/webrtc/video/vie_encoder_unittest.cc
+++ b/webrtc/video/vie_encoder_unittest.cc
@@ -251,6 +251,11 @@
   rtc::Optional<VideoSendStream::Stats> mock_stats_ GUARDED_BY(lock_);
 };
 
+class MockBitrateObserver : public VideoBitrateAllocationObserver {
+ public:
+  MOCK_METHOD1(OnBitrateAllocationUpdated, void(const BitrateAllocation&));
+};
+
 }  // namespace
 
 class ViEEncoderTest : public ::testing::Test {
@@ -261,6 +266,7 @@
       : video_send_config_(VideoSendStream::Config(nullptr)),
         codec_width_(320),
         codec_height_(240),
+        max_framerate_(30),
         fake_encoder_(),
         stats_proxy_(new MockableSendStatisticsProxy(
             Clock::GetRealTimeClock(),
@@ -277,7 +283,17 @@
 
     VideoEncoderConfig video_encoder_config;
     test::FillEncoderConfiguration(1, &video_encoder_config);
+    video_encoder_config.video_stream_factory =
+        new rtc::RefCountedObject<VideoStreamFactory>(1, max_framerate_);
     video_encoder_config_ = video_encoder_config.Copy();
+
+    // Framerate limit is specified by the VideoStreamFactory.
+    std::vector<VideoStream> streams =
+        video_encoder_config.video_stream_factory->CreateEncoderStreams(
+            codec_width_, codec_height_, video_encoder_config);
+    max_framerate_ = streams[0].max_framerate;
+    fake_clock_.SetTimeMicros(1234);
+
     ConfigureEncoder(std::move(video_encoder_config), true /* nack_enabled */);
   }
 
@@ -300,7 +316,8 @@
   void ResetEncoder(const std::string& payload_name,
                     size_t num_streams,
                     size_t num_temporal_layers,
-                    bool nack_enabled) {
+                    bool nack_enabled,
+                    bool screenshare) {
     video_send_config_.encoder_settings.payload_name = payload_name;
 
     VideoEncoderConfig video_encoder_config;
@@ -309,6 +326,9 @@
     video_encoder_config.video_stream_factory =
         new rtc::RefCountedObject<VideoStreamFactory>(num_temporal_layers,
                                                       kDefaultFramerate);
+    video_encoder_config.content_type =
+        screenshare ? VideoEncoderConfig::ContentType::kScreen
+                    : VideoEncoderConfig::ContentType::kRealtimeVideo;
     ConfigureEncoder(std::move(video_encoder_config), nack_enabled);
   }
 
@@ -406,6 +426,33 @@
     EXPECT_FALSE(wants.target_pixel_count);
   }
 
+  void WaitForEncodedFrame(int64_t expected_ntp_time) {
+    sink_.WaitForEncodedFrame(expected_ntp_time);
+    fake_clock_.AdvanceTimeMicros(rtc::kNumMicrosecsPerSec / max_framerate_);
+  }
+
+  bool TimedWaitForEncodedFrame(int64_t expected_ntp_time, int64_t timeout_ms) {
+    bool ok = sink_.TimedWaitForEncodedFrame(expected_ntp_time, timeout_ms);
+    fake_clock_.AdvanceTimeMicros(rtc::kNumMicrosecsPerSec / max_framerate_);
+    return ok;
+  }
+
+  void WaitForEncodedFrame(uint32_t expected_width, uint32_t expected_height) {
+    sink_.WaitForEncodedFrame(expected_width, expected_height);
+    fake_clock_.AdvanceTimeMicros(rtc::kNumMicrosecsPerSec / max_framerate_);
+  }
+
+  void ExpectDroppedFrame() {
+    sink_.ExpectDroppedFrame();
+    fake_clock_.AdvanceTimeMicros(rtc::kNumMicrosecsPerSec / max_framerate_);
+  }
+
+  bool WaitForFrame(int64_t timeout_ms) {
+    bool ok = sink_.WaitForFrame(timeout_ms);
+    fake_clock_.AdvanceTimeMicros(rtc::kNumMicrosecsPerSec / max_framerate_);
+    return ok;
+  }
+
   class TestEncoder : public test::FakeEncoder {
    public:
     TestEncoder()
@@ -514,13 +561,21 @@
         : test_encoder_(test_encoder), encoded_frame_event_(false, false) {}
 
     void WaitForEncodedFrame(int64_t expected_ntp_time) {
+      EXPECT_TRUE(
+          TimedWaitForEncodedFrame(expected_ntp_time, kDefaultTimeoutMs));
+    }
+
+    bool TimedWaitForEncodedFrame(int64_t expected_ntp_time,
+                                  int64_t timeout_ms) {
       uint32_t timestamp = 0;
-      EXPECT_TRUE(encoded_frame_event_.Wait(kDefaultTimeoutMs));
+      if (!encoded_frame_event_.Wait(timeout_ms))
+        return false;
       {
         rtc::CritScope lock(&crit_);
         timestamp = last_timestamp_;
       }
       test_encoder_->CheckLastTimeStampsMatch(expected_ntp_time, timestamp);
+      return true;
     }
 
     void WaitForEncodedFrame(uint32_t expected_width,
@@ -599,18 +654,20 @@
   VideoEncoderConfig video_encoder_config_;
   int codec_width_;
   int codec_height_;
+  int max_framerate_;
   TestEncoder fake_encoder_;
   std::unique_ptr<MockableSendStatisticsProxy> stats_proxy_;
   TestSink sink_;
   AdaptingFrameForwarder video_source_;
   std::unique_ptr<ViEEncoderUnderTest> vie_encoder_;
+  rtc::ScopedFakeClock fake_clock_;
 };
 
 TEST_F(ViEEncoderTest, EncodeOneFrame) {
   vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
   rtc::Event frame_destroyed_event(false, false);
   video_source_.IncomingCapturedFrame(CreateFrame(1, &frame_destroyed_event));
-  sink_.WaitForEncodedFrame(1);
+  WaitForEncodedFrame(1);
   EXPECT_TRUE(frame_destroyed_event.Wait(kDefaultTimeoutMs));
   vie_encoder_->Stop();
 }
@@ -624,14 +681,14 @@
   vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
 
   video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr));
-  sink_.WaitForEncodedFrame(2);
+  WaitForEncodedFrame(2);
   vie_encoder_->Stop();
 }
 
 TEST_F(ViEEncoderTest, DropsFramesWhenRateSetToZero) {
   vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
   video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
-  sink_.WaitForEncodedFrame(1);
+  WaitForEncodedFrame(1);
 
   vie_encoder_->OnBitrateUpdated(0, 0, 0);
   // Dropped since bitrate is zero.
@@ -639,20 +696,20 @@
 
   vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
   video_source_.IncomingCapturedFrame(CreateFrame(3, nullptr));
-  sink_.WaitForEncodedFrame(3);
+  WaitForEncodedFrame(3);
   vie_encoder_->Stop();
 }
 
 TEST_F(ViEEncoderTest, DropsFramesWithSameOrOldNtpTimestamp) {
   vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
   video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
-  sink_.WaitForEncodedFrame(1);
+  WaitForEncodedFrame(1);
 
   // This frame will be dropped since it has the same ntp timestamp.
   video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
 
   video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr));
-  sink_.WaitForEncodedFrame(2);
+  WaitForEncodedFrame(2);
   vie_encoder_->Stop();
 }
 
@@ -660,7 +717,7 @@
   vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
 
   video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
-  sink_.WaitForEncodedFrame(1);
+  WaitForEncodedFrame(1);
 
   vie_encoder_->Stop();
   sink_.SetExpectNoFrames();
@@ -674,13 +731,13 @@
 
   fake_encoder_.BlockNextEncode();
   video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
-  sink_.WaitForEncodedFrame(1);
+  WaitForEncodedFrame(1);
   // Here, the encoder thread will be blocked in the TestEncoder waiting for a
   // call to ContinueEncode.
   video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr));
   video_source_.IncomingCapturedFrame(CreateFrame(3, nullptr));
   fake_encoder_.ContinueEncode();
-  sink_.WaitForEncodedFrame(3);
+  WaitForEncodedFrame(3);
 
   vie_encoder_->Stop();
 }
@@ -691,7 +748,7 @@
 
   // Capture a frame and wait for it to synchronize with the encoder thread.
   video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
-  sink_.WaitForEncodedFrame(1);
+  WaitForEncodedFrame(1);
   // The encoder will have been configured once when the first frame is
   // received.
   EXPECT_EQ(1, sink_.number_of_reconfigurations());
@@ -704,7 +761,7 @@
 
   // Capture a frame and wait for it to synchronize with the encoder thread.
   video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr));
-  sink_.WaitForEncodedFrame(2);
+  WaitForEncodedFrame(2);
   EXPECT_EQ(2, sink_.number_of_reconfigurations());
   EXPECT_EQ(9999, sink_.last_min_transmit_bitrate());
 
@@ -716,7 +773,7 @@
 
   // Capture a frame and wait for it to synchronize with the encoder thread.
   video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
-  sink_.WaitForEncodedFrame(1);
+  WaitForEncodedFrame(1);
   // The encoder will have been configured once.
   EXPECT_EQ(1, sink_.number_of_reconfigurations());
   EXPECT_EQ(codec_width_, fake_encoder_.codec_config().width);
@@ -727,7 +784,7 @@
   // Capture a frame with a higher resolution and wait for it to synchronize
   // with the encoder thread.
   video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr));
-  sink_.WaitForEncodedFrame(2);
+  WaitForEncodedFrame(2);
   EXPECT_EQ(codec_width_, fake_encoder_.codec_config().width);
   EXPECT_EQ(codec_height_, fake_encoder_.codec_config().height);
   EXPECT_EQ(2, sink_.number_of_reconfigurations());
@@ -739,12 +796,12 @@
   const bool kNackEnabled = true;
   const size_t kNumStreams = 1;
   const size_t kNumTl = 1;
-  ResetEncoder("VP8", kNumStreams, kNumTl, kNackEnabled);
+  ResetEncoder("VP8", kNumStreams, kNumTl, kNackEnabled, false);
   vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
 
   // Capture a frame and wait for it to synchronize with the encoder thread.
   video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
-  sink_.WaitForEncodedFrame(1);
+  WaitForEncodedFrame(1);
   // The encoder have been configured once when the first frame is received.
   EXPECT_EQ(1, sink_.number_of_reconfigurations());
   EXPECT_EQ(kVideoCodecVP8, fake_encoder_.codec_config().codecType);
@@ -759,12 +816,12 @@
   const bool kNackEnabled = true;
   const size_t kNumStreams = 2;
   const size_t kNumTl = 1;
-  ResetEncoder("VP8", kNumStreams, kNumTl, kNackEnabled);
+  ResetEncoder("VP8", kNumStreams, kNumTl, kNackEnabled, false);
   vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
 
   // Capture a frame and wait for it to synchronize with the encoder thread.
   video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
-  sink_.WaitForEncodedFrame(1);
+  WaitForEncodedFrame(1);
   // The encoder have been configured once when the first frame is received.
   EXPECT_EQ(1, sink_.number_of_reconfigurations());
   EXPECT_EQ(kVideoCodecVP8, fake_encoder_.codec_config().codecType);
@@ -779,12 +836,12 @@
   const bool kNackEnabled = false;
   const size_t kNumStreams = 1;
   const size_t kNumTl = 1;
-  ResetEncoder("VP8", kNumStreams, kNumTl, kNackEnabled);
+  ResetEncoder("VP8", kNumStreams, kNumTl, kNackEnabled, false);
   vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
 
   // Capture a frame and wait for it to synchronize with the encoder thread.
   video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
-  sink_.WaitForEncodedFrame(1);
+  WaitForEncodedFrame(1);
   // The encoder have been configured once when the first frame is received.
   EXPECT_EQ(1, sink_.number_of_reconfigurations());
   EXPECT_EQ(kVideoCodecVP8, fake_encoder_.codec_config().codecType);
@@ -799,12 +856,12 @@
   const bool kNackEnabled = true;
   const size_t kNumStreams = 1;
   const size_t kNumTl = 2;
-  ResetEncoder("VP8", kNumStreams, kNumTl, kNackEnabled);
+  ResetEncoder("VP8", kNumStreams, kNumTl, kNackEnabled, false);
   vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
 
   // Capture a frame and wait for it to synchronize with the encoder thread.
   video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
-  sink_.WaitForEncodedFrame(1);
+  WaitForEncodedFrame(1);
   // The encoder have been configured once when the first frame is received.
   EXPECT_EQ(1, sink_.number_of_reconfigurations());
   EXPECT_EQ(kVideoCodecVP8, fake_encoder_.codec_config().codecType);
@@ -848,7 +905,7 @@
   for (int i = 1; i <= kMaxDowngrades; ++i) {
     video_source_.IncomingCapturedFrame(
         CreateFrame(i, frame_width, frame_height));
-    sink_.WaitForEncodedFrame(i);
+    WaitForEncodedFrame(i);
 
     vie_encoder_->TriggerCpuOveruse();
 
@@ -867,7 +924,7 @@
   rtc::VideoSinkWants current_wants = video_source_.sink_wants();
   video_source_.IncomingCapturedFrame(
       CreateFrame(kMaxDowngrades + 1, frame_width, frame_height));
-  sink_.WaitForEncodedFrame(kMaxDowngrades + 1);
+  WaitForEncodedFrame(kMaxDowngrades + 1);
   vie_encoder_->TriggerCpuOveruse();
   EXPECT_EQ(video_source_.sink_wants().target_pixel_count,
             current_wants.target_pixel_count);
@@ -955,14 +1012,14 @@
 
   video_source_.IncomingCapturedFrame(
       CreateFrame(frame_timestamp, kFrameWidth, kFrameHeight));
-  sink_.WaitForEncodedFrame(frame_timestamp);
+  WaitForEncodedFrame(frame_timestamp);
   frame_timestamp += kFrameIntervalMs;
 
   // Trigger CPU overuse.
   vie_encoder_->TriggerCpuOveruse();
   video_source_.IncomingCapturedFrame(
       CreateFrame(frame_timestamp, kFrameWidth, kFrameHeight));
-  sink_.WaitForEncodedFrame(frame_timestamp);
+  WaitForEncodedFrame(frame_timestamp);
   frame_timestamp += kFrameIntervalMs;
 
   // Default degradation preference is maintain-framerate, so will lower max
@@ -992,7 +1049,7 @@
   vie_encoder_->TriggerCpuOveruse();
   new_video_source.IncomingCapturedFrame(
       CreateFrame(frame_timestamp, kFrameWidth, kFrameHeight));
-  sink_.WaitForEncodedFrame(frame_timestamp);
+  WaitForEncodedFrame(frame_timestamp);
   frame_timestamp += kFrameIntervalMs;
 
   // Some framerate constraint should be set.
@@ -1010,7 +1067,7 @@
   vie_encoder_->TriggerCpuOveruse();
   new_video_source.IncomingCapturedFrame(
       CreateFrame(frame_timestamp, kFrameWidth, kFrameHeight));
-  sink_.WaitForEncodedFrame(frame_timestamp);
+  WaitForEncodedFrame(frame_timestamp);
   frame_timestamp += kFrameIntervalMs;
 
   // Still no degradation.
@@ -1044,7 +1101,7 @@
   const int kWidth = 1280;
   const int kHeight = 720;
   video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(1);
+  WaitForEncodedFrame(1);
   VideoSendStream::Stats stats = stats_proxy_->GetStats();
   EXPECT_FALSE(stats.bw_limited_resolution);
   EXPECT_EQ(0, stats.number_of_quality_adapt_changes);
@@ -1052,7 +1109,7 @@
   // Trigger adapt down.
   vie_encoder_->TriggerQualityLow();
   video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(2);
+  WaitForEncodedFrame(2);
 
   stats = stats_proxy_->GetStats();
   EXPECT_TRUE(stats.bw_limited_resolution);
@@ -1061,7 +1118,7 @@
   // Trigger adapt up.
   vie_encoder_->TriggerQualityHigh();
   video_source_.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(3);
+  WaitForEncodedFrame(3);
 
   stats = stats_proxy_->GetStats();
   EXPECT_FALSE(stats.bw_limited_resolution);
@@ -1077,7 +1134,7 @@
   const int kWidth = 1280;
   const int kHeight = 720;
   video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(1);
+  WaitForEncodedFrame(1);
   VideoSendStream::Stats stats = stats_proxy_->GetStats();
   EXPECT_FALSE(stats.cpu_limited_resolution);
   EXPECT_EQ(0, stats.number_of_cpu_adapt_changes);
@@ -1085,7 +1142,7 @@
   // Trigger CPU overuse.
   vie_encoder_->TriggerCpuOveruse();
   video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(2);
+  WaitForEncodedFrame(2);
 
   stats = stats_proxy_->GetStats();
   EXPECT_TRUE(stats.cpu_limited_resolution);
@@ -1094,7 +1151,7 @@
   // Trigger CPU normal use.
   vie_encoder_->TriggerCpuNormalUsage();
   video_source_.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(3);
+  WaitForEncodedFrame(3);
 
   stats = stats_proxy_->GetStats();
   EXPECT_FALSE(stats.cpu_limited_resolution);
@@ -1110,7 +1167,7 @@
   const int kWidth = 1280;
   const int kHeight = 720;
   video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(1);
+  WaitForEncodedFrame(1);
   VideoSendStream::Stats stats = stats_proxy_->GetStats();
   EXPECT_FALSE(stats.bw_limited_resolution);
   EXPECT_FALSE(stats.cpu_limited_resolution);
@@ -1119,7 +1176,7 @@
   // Trigger CPU overuse.
   vie_encoder_->TriggerCpuOveruse();
   video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(2);
+  WaitForEncodedFrame(2);
   stats = stats_proxy_->GetStats();
   EXPECT_FALSE(stats.bw_limited_resolution);
   EXPECT_TRUE(stats.cpu_limited_resolution);
@@ -1132,7 +1189,7 @@
       VideoSendStream::DegradationPreference::kMaintainFramerate);
 
   new_video_source.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(3);
+  WaitForEncodedFrame(3);
   stats = stats_proxy_->GetStats();
   EXPECT_FALSE(stats.bw_limited_resolution);
   EXPECT_TRUE(stats.cpu_limited_resolution);
@@ -1144,7 +1201,7 @@
       VideoSendStream::DegradationPreference::kDegradationDisabled);
 
   new_video_source.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(4);
+  WaitForEncodedFrame(4);
   stats = stats_proxy_->GetStats();
   EXPECT_FALSE(stats.bw_limited_resolution);
   EXPECT_FALSE(stats.cpu_limited_resolution);
@@ -1156,7 +1213,7 @@
       VideoSendStream::DegradationPreference::kMaintainFramerate);
 
   new_video_source.IncomingCapturedFrame(CreateFrame(5, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(5);
+  WaitForEncodedFrame(5);
   stats = stats_proxy_->GetStats();
   EXPECT_FALSE(stats.bw_limited_resolution);
   EXPECT_TRUE(stats.cpu_limited_resolution);
@@ -1165,7 +1222,7 @@
   // Trigger CPU normal use.
   vie_encoder_->TriggerCpuNormalUsage();
   new_video_source.IncomingCapturedFrame(CreateFrame(6, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(6);
+  WaitForEncodedFrame(6);
   stats = stats_proxy_->GetStats();
   EXPECT_FALSE(stats.bw_limited_resolution);
   EXPECT_FALSE(stats.cpu_limited_resolution);
@@ -1181,7 +1238,7 @@
   const int kWidth = 1280;
   const int kHeight = 720;
   video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(1);
+  WaitForEncodedFrame(1);
   VideoSendStream::Stats stats = stats_proxy_->GetStats();
   EXPECT_FALSE(stats.bw_limited_resolution);
   EXPECT_FALSE(stats.bw_limited_framerate);
@@ -1193,7 +1250,7 @@
                           VideoSendStream::DegradationPreference::kBalanced);
 
   new_video_source.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(2);
+  WaitForEncodedFrame(2);
   stats = stats_proxy_->GetStats();
   EXPECT_FALSE(stats.bw_limited_resolution);
   EXPECT_FALSE(stats.bw_limited_framerate);
@@ -1202,7 +1259,7 @@
   // Trigger adapt down.
   vie_encoder_->TriggerQualityLow();
   new_video_source.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(3);
+  WaitForEncodedFrame(3);
   stats = stats_proxy_->GetStats();
   EXPECT_TRUE(stats.bw_limited_resolution);
   EXPECT_FALSE(stats.bw_limited_framerate);
@@ -1213,7 +1270,7 @@
                           VideoSendStream::DegradationPreference::kBalanced);
 
   new_video_source.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(4);
+  WaitForEncodedFrame(4);
   stats = stats_proxy_->GetStats();
   EXPECT_TRUE(stats.bw_limited_resolution);
   EXPECT_FALSE(stats.bw_limited_framerate);
@@ -1225,7 +1282,7 @@
       VideoSendStream::DegradationPreference::kMaintainResolution);
 
   new_video_source.IncomingCapturedFrame(CreateFrame(5, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(5);
+  WaitForEncodedFrame(5);
   stats = stats_proxy_->GetStats();
   EXPECT_FALSE(stats.bw_limited_resolution);
   EXPECT_FALSE(stats.bw_limited_framerate);
@@ -1242,7 +1299,7 @@
   const int kHeight = 720;
   video_source_.set_adaptation_enabled(true);
   video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(1);
+  WaitForEncodedFrame(1);
   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -1250,7 +1307,7 @@
   // Trigger adapt down.
   vie_encoder_->TriggerQualityLow();
   video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(2);
+  WaitForEncodedFrame(2);
   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -1258,7 +1315,7 @@
   // Trigger overuse.
   vie_encoder_->TriggerCpuOveruse();
   video_source_.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(3);
+  WaitForEncodedFrame(3);
   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -1270,7 +1327,7 @@
       VideoSendStream::DegradationPreference::kMaintainFramerate);
 
   video_source_.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(4);
+  WaitForEncodedFrame(4);
   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -1286,7 +1343,7 @@
   int sequence = 1;
 
   video_source_.IncomingCapturedFrame(CreateFrame(sequence, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(sequence++);
+  WaitForEncodedFrame(sequence++);
   VideoSendStream::Stats stats = stats_proxy_->GetStats();
   EXPECT_FALSE(stats.cpu_limited_resolution);
   EXPECT_FALSE(stats.cpu_limited_framerate);
@@ -1295,7 +1352,7 @@
   // Trigger CPU overuse, should now adapt down.
   vie_encoder_->TriggerCpuOveruse();
   video_source_.IncomingCapturedFrame(CreateFrame(sequence, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(sequence++);
+  WaitForEncodedFrame(sequence++);
   stats = stats_proxy_->GetStats();
   EXPECT_TRUE(stats.cpu_limited_resolution);
   EXPECT_FALSE(stats.cpu_limited_framerate);
@@ -1309,7 +1366,7 @@
 
   new_video_source.IncomingCapturedFrame(
       CreateFrame(sequence, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(sequence++);
+  WaitForEncodedFrame(sequence++);
   stats = stats_proxy_->GetStats();
   EXPECT_TRUE(stats.cpu_limited_resolution);
   EXPECT_FALSE(stats.cpu_limited_framerate);
@@ -1321,7 +1378,7 @@
       VideoSendStream::DegradationPreference::kMaintainResolution);
   new_video_source.IncomingCapturedFrame(
       CreateFrame(sequence, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(sequence++);
+  WaitForEncodedFrame(sequence++);
   stats = stats_proxy_->GetStats();
   // Not adapted at first.
   EXPECT_FALSE(stats.cpu_limited_resolution);
@@ -1338,7 +1395,7 @@
 
   new_video_source.IncomingCapturedFrame(
       CreateFrame(sequence, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(sequence++);
+  WaitForEncodedFrame(sequence++);
 
   // Framerate now adapted.
   stats = stats_proxy_->GetStats();
@@ -1352,7 +1409,7 @@
       VideoSendStream::DegradationPreference::kDegradationDisabled);
   new_video_source.IncomingCapturedFrame(
       CreateFrame(sequence, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(sequence++);
+  WaitForEncodedFrame(sequence++);
 
   stats = stats_proxy_->GetStats();
   EXPECT_FALSE(stats.cpu_limited_resolution);
@@ -1374,7 +1431,7 @@
       &video_source_,
       VideoSendStream::DegradationPreference::kMaintainFramerate);
   video_source_.IncomingCapturedFrame(CreateFrame(sequence, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(sequence++);
+  WaitForEncodedFrame(sequence++);
   stats = stats_proxy_->GetStats();
   EXPECT_TRUE(stats.cpu_limited_resolution);
   EXPECT_FALSE(stats.cpu_limited_framerate);
@@ -1383,7 +1440,7 @@
   // Trigger CPU normal usage.
   vie_encoder_->TriggerCpuNormalUsage();
   video_source_.IncomingCapturedFrame(CreateFrame(sequence, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(sequence++);
+  WaitForEncodedFrame(sequence++);
   stats = stats_proxy_->GetStats();
   EXPECT_FALSE(stats.cpu_limited_resolution);
   EXPECT_FALSE(stats.cpu_limited_framerate);
@@ -1395,7 +1452,7 @@
       VideoSendStream::DegradationPreference::kMaintainResolution);
   new_video_source.IncomingCapturedFrame(
       CreateFrame(sequence, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(sequence++);
+  WaitForEncodedFrame(sequence++);
   stats = stats_proxy_->GetStats();
   // Disabled, since we previously switched the source to disabled.
   EXPECT_FALSE(stats.cpu_limited_resolution);
@@ -1406,7 +1463,7 @@
   vie_encoder_->TriggerCpuNormalUsage();
   new_video_source.IncomingCapturedFrame(
       CreateFrame(sequence, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(sequence++);
+  WaitForEncodedFrame(sequence++);
   stats = stats_proxy_->GetStats();
   EXPECT_FALSE(stats.cpu_limited_resolution);
   EXPECT_FALSE(stats.cpu_limited_framerate);
@@ -1422,7 +1479,7 @@
   const int kWidth = 1280;
   const int kHeight = 720;
   video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(1);
+  WaitForEncodedFrame(1);
 
   VideoSendStream::Stats stats = stats_proxy_->GetStats();
   EXPECT_EQ(video_encoder_config_.max_bitrate_bps,
@@ -1440,13 +1497,13 @@
   VerifyNoLimitation(video_source_.sink_wants());
 
   video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(1);
+  WaitForEncodedFrame(1);
 
   // Trigger scale down.
   vie_encoder_->TriggerQualityLow();
 
   video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(2);
+  WaitForEncodedFrame(2);
 
   // Expect a scale down.
   EXPECT_TRUE(video_source_.sink_wants().max_pixel_count);
@@ -1461,7 +1518,7 @@
   // Trigger scale down.
   vie_encoder_->TriggerQualityLow();
   new_video_source.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(3);
+  WaitForEncodedFrame(3);
 
   // Expect no scaling.
   EXPECT_EQ(std::numeric_limits<int>::max(),
@@ -1470,7 +1527,7 @@
   // Trigger scale up.
   vie_encoder_->TriggerQualityHigh();
   new_video_source.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(4);
+  WaitForEncodedFrame(4);
 
   // Expect nothing to change, still no scaling.
   EXPECT_EQ(std::numeric_limits<int>::max(),
@@ -1490,7 +1547,7 @@
       &source, VideoSendStream::DegradationPreference::kMaintainFramerate);
 
   source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(1);
+  WaitForEncodedFrame(1);
   VerifyNoLimitation(source.sink_wants());
   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -1561,7 +1618,7 @@
       &source, VideoSendStream::DegradationPreference::kMaintainFramerate);
 
   source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(kWidth, kHeight);
+  WaitForEncodedFrame(kWidth, kHeight);
   VerifyNoLimitation(source.sink_wants());
   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -1586,7 +1643,7 @@
       &source, VideoSendStream::DegradationPreference::kMaintainResolution);
 
   source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(kWidth, kHeight);
+  WaitForEncodedFrame(kWidth, kHeight);
   VerifyNoLimitation(source.sink_wants());
   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -1666,7 +1723,7 @@
       &source, VideoSendStream::DegradationPreference::kMaintainFramerate);
 
   source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(1);
+  WaitForEncodedFrame(1);
   VerifyNoLimitation(source.sink_wants());
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -1674,7 +1731,7 @@
   // Trigger adapt down, expect scaled down resolution.
   vie_encoder_->TriggerQualityLow();
   source.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(2);
+  WaitForEncodedFrame(2);
   VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -1747,7 +1804,7 @@
   int downscales = 0;
   for (size_t i = 1; i <= kNumFrames; i++) {
     video_source_.IncomingCapturedFrame(CreateFrame(i, kWidth, kHeight));
-    sink_.WaitForEncodedFrame(i);
+    WaitForEncodedFrame(i);
 
     // Trigger scale down.
     rtc::VideoSinkWants last_wants = video_source_.sink_wants();
@@ -1778,7 +1835,7 @@
       &source, VideoSendStream::DegradationPreference::kMaintainFramerate);
 
   source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(kWidth, kHeight);
+  WaitForEncodedFrame(kWidth, kHeight);
   VerifyNoLimitation(source.sink_wants());
   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
   EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -1786,7 +1843,7 @@
   // Trigger adapt down, expect scaled down resolution.
   vie_encoder_->TriggerCpuOveruse();
   source.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(2);
+  WaitForEncodedFrame(2);
   VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
   EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -1794,7 +1851,7 @@
   // Trigger adapt up, expect no restriction.
   vie_encoder_->TriggerCpuNormalUsage();
   source.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(kWidth, kHeight);
+  WaitForEncodedFrame(kWidth, kHeight);
   VerifyNoLimitation(source.sink_wants());
   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
   EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -1802,7 +1859,7 @@
   // Trigger adapt down, expect scaled down resolution.
   vie_encoder_->TriggerCpuOveruse();
   source.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(4);
+  WaitForEncodedFrame(4);
   VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
   EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -1884,7 +1941,7 @@
       &source, VideoSendStream::DegradationPreference::kMaintainFramerate);
 
   source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(kWidth, kHeight);
+  WaitForEncodedFrame(kWidth, kHeight);
   VerifyNoLimitation(source.sink_wants());
   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
@@ -1894,7 +1951,7 @@
   // Trigger cpu adapt down, expect scaled down resolution (960x540).
   vie_encoder_->TriggerCpuOveruse();
   source.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(2);
+  WaitForEncodedFrame(2);
   VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
@@ -1904,7 +1961,7 @@
   // Trigger cpu adapt down, expect scaled down resolution (640x360).
   vie_encoder_->TriggerCpuOveruse();
   source.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(3);
+  WaitForEncodedFrame(3);
   VerifyFpsMaxResolutionLt(source.sink_wants(), source.last_wants());
   rtc::VideoSinkWants last_wants = source.sink_wants();
   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
@@ -1915,7 +1972,7 @@
   // Trigger cpu adapt down, max cpu downgrades reached, expect no change.
   vie_encoder_->TriggerCpuOveruse();
   source.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(4);
+  WaitForEncodedFrame(4);
   VerifyFpsEqResolutionEq(source.sink_wants(), last_wants);
   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
@@ -1925,7 +1982,7 @@
   // Trigger quality adapt down, expect scaled down resolution (480x270).
   vie_encoder_->TriggerQualityLow();
   source.IncomingCapturedFrame(CreateFrame(5, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(5);
+  WaitForEncodedFrame(5);
   VerifyFpsMaxResolutionLt(source.sink_wants(), source.last_wants());
   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
@@ -1935,7 +1992,7 @@
   // Trigger cpu adapt up, expect upscaled resolution (640x360).
   vie_encoder_->TriggerCpuNormalUsage();
   source.IncomingCapturedFrame(CreateFrame(6, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(6);
+  WaitForEncodedFrame(6);
   VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
   EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
@@ -1945,7 +2002,7 @@
   // Trigger cpu adapt up, expect upscaled resolution (960x540).
   vie_encoder_->TriggerCpuNormalUsage();
   source.IncomingCapturedFrame(CreateFrame(7, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(7);
+  WaitForEncodedFrame(7);
   VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
   last_wants = source.sink_wants();
   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
@@ -1956,7 +2013,7 @@
   // Trigger cpu adapt up, no cpu downgrades, expect no change (960x540).
   vie_encoder_->TriggerCpuNormalUsage();
   source.IncomingCapturedFrame(CreateFrame(8, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(8);
+  WaitForEncodedFrame(8);
   VerifyFpsEqResolutionEq(source.sink_wants(), last_wants);
   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
@@ -1966,7 +2023,7 @@
   // Trigger quality adapt up, expect no restriction (1280x720).
   vie_encoder_->TriggerQualityHigh();
   source.IncomingCapturedFrame(CreateFrame(9, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(kWidth, kHeight);
+  WaitForEncodedFrame(kWidth, kHeight);
   VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
   VerifyNoLimitation(source.sink_wants());
   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
@@ -1978,21 +2035,21 @@
 }
 
 TEST_F(ViEEncoderTest, CpuLimitedHistogramIsReported) {
-  vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
   const int kWidth = 640;
   const int kHeight = 360;
 
+  vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
+
   for (int i = 1; i <= SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) {
     video_source_.IncomingCapturedFrame(CreateFrame(i, kWidth, kHeight));
-    sink_.WaitForEncodedFrame(i);
+    WaitForEncodedFrame(i);
   }
 
   vie_encoder_->TriggerCpuOveruse();
   for (int i = 1; i <= SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) {
     video_source_.IncomingCapturedFrame(CreateFrame(
         SendStatisticsProxy::kMinRequiredMetricsSamples + i, kWidth, kHeight));
-    sink_.WaitForEncodedFrame(SendStatisticsProxy::kMinRequiredMetricsSamples +
-                              i);
+    WaitForEncodedFrame(SendStatisticsProxy::kMinRequiredMetricsSamples + i);
   }
 
   vie_encoder_->Stop();
@@ -2016,7 +2073,7 @@
 
   for (int i = 1; i <= SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) {
     video_source_.IncomingCapturedFrame(CreateFrame(i, kWidth, kHeight));
-    sink_.WaitForEncodedFrame(i);
+    WaitForEncodedFrame(i);
   }
 
   vie_encoder_->Stop();
@@ -2028,10 +2085,7 @@
 }
 
 TEST_F(ViEEncoderTest, CallsBitrateObserver) {
-  class MockBitrateObserver : public VideoBitrateAllocationObserver {
-   public:
-    MOCK_METHOD1(OnBitrateAllocationUpdated, void(const BitrateAllocation&));
-  } bitrate_observer;
+  MockBitrateObserver bitrate_observer;
   vie_encoder_->SetBitrateObserver(&bitrate_observer);
 
   const int kDefaultFps = 30;
@@ -2047,26 +2101,25 @@
   const int64_t kStartTimeMs = 1;
   video_source_.IncomingCapturedFrame(
       CreateFrame(kStartTimeMs, codec_width_, codec_height_));
-  sink_.WaitForEncodedFrame(kStartTimeMs);
+  WaitForEncodedFrame(kStartTimeMs);
 
   // Not called on second frame.
   EXPECT_CALL(bitrate_observer, OnBitrateAllocationUpdated(expected_bitrate))
       .Times(0);
   video_source_.IncomingCapturedFrame(
       CreateFrame(kStartTimeMs + 1, codec_width_, codec_height_));
-  sink_.WaitForEncodedFrame(kStartTimeMs + 1);
+  WaitForEncodedFrame(kStartTimeMs + 1);
 
   // Called after a process interval.
   const int64_t kProcessIntervalMs =
       vcm::VCMProcessTimer::kDefaultProcessIntervalMs;
-  // TODO(sprang): ViEEncoder should die and/or get injectable clock.
-  // Sleep for one processing interval plus one frame to avoid flakiness.
-  SleepMs(kProcessIntervalMs + 1000 / kDefaultFps);
+  fake_clock_.AdvanceTimeMicros(rtc::kNumMicrosecsPerMillisec *
+                                (kProcessIntervalMs + (1000 / kDefaultFps)));
   EXPECT_CALL(bitrate_observer, OnBitrateAllocationUpdated(expected_bitrate))
       .Times(1);
   video_source_.IncomingCapturedFrame(CreateFrame(
       kStartTimeMs + kProcessIntervalMs, codec_width_, codec_height_));
-  sink_.WaitForEncodedFrame(kStartTimeMs + kProcessIntervalMs);
+  WaitForEncodedFrame(kStartTimeMs + kProcessIntervalMs);
 
   vie_encoder_->Stop();
 }
@@ -2238,7 +2291,7 @@
   video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
 
   // Expect to drop this frame, the wait should time out.
-  sink_.ExpectDroppedFrame();
+  ExpectDroppedFrame();
 
   // Expect the sink_wants to specify a scaled frame.
   EXPECT_LT(video_source_.sink_wants().max_pixel_count, kWidth * kHeight);
@@ -2250,7 +2303,7 @@
       CreateFrame(2, kWidth * 3 / 4, kHeight * 3 / 4));
 
   // Expect to drop this frame, the wait should time out.
-  sink_.ExpectDroppedFrame();
+  ExpectDroppedFrame();
 
   EXPECT_LT(video_source_.sink_wants().max_pixel_count, last_pixel_count);
 
@@ -2267,11 +2320,11 @@
   int i;
   for (i = 1; i <= kMaxInitialFramedrop; ++i) {
     video_source_.IncomingCapturedFrame(CreateFrame(i, kWidth, kHeight));
-    sink_.ExpectDroppedFrame();
+    ExpectDroppedFrame();
   }
   // The n+1th frame should not be dropped, even though it's size is too large.
   video_source_.IncomingCapturedFrame(CreateFrame(i, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(i);
+  WaitForEncodedFrame(i);
 
   // Expect the sink_wants to specify a scaled frame.
   EXPECT_LT(video_source_.sink_wants().max_pixel_count, kWidth * kHeight);
@@ -2291,7 +2344,7 @@
 
   video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
   // Frame should not be dropped, even if it's too large.
-  sink_.WaitForEncodedFrame(1);
+  WaitForEncodedFrame(1);
 
   vie_encoder_->Stop();
 }
@@ -2308,7 +2361,7 @@
 
   video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
   // Frame should not be dropped, even if it's too large.
-  sink_.WaitForEncodedFrame(1);
+  WaitForEncodedFrame(1);
 
   vie_encoder_->Stop();
   fake_encoder_.SetQualityScaling(true);
@@ -2329,7 +2382,7 @@
 
   // Trigger adapt down, too small frame, expect no change.
   source.IncomingCapturedFrame(CreateFrame(1, kTooSmallWidth, kTooSmallHeight));
-  sink_.WaitForEncodedFrame(1);
+  WaitForEncodedFrame(1);
   vie_encoder_->TriggerCpuOveruse();
   VerifyNoLimitation(source.sink_wants());
   EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
@@ -2354,7 +2407,7 @@
 
   // Trigger adapt down, expect limited framerate.
   source.IncomingCapturedFrame(CreateFrame(1, kTooSmallWidth, kTooSmallHeight));
-  sink_.WaitForEncodedFrame(1);
+  WaitForEncodedFrame(1);
   vie_encoder_->TriggerQualityLow();
   VerifyFpsEqResolutionMax(source.sink_wants(), kFpsLimit);
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
@@ -2363,7 +2416,7 @@
 
   // Trigger adapt down, too small frame, expect no change.
   source.IncomingCapturedFrame(CreateFrame(2, kTooSmallWidth, kTooSmallHeight));
-  sink_.WaitForEncodedFrame(2);
+  WaitForEncodedFrame(2);
   vie_encoder_->TriggerQualityLow();
   VerifyFpsEqResolutionMax(source.sink_wants(), kFpsLimit);
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
@@ -2376,12 +2429,12 @@
 TEST_F(ViEEncoderTest, FailingInitEncodeDoesntCauseCrash) {
   fake_encoder_.ForceInitEncodeFailure(true);
   vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
-  ResetEncoder("VP8", 2, 1, true);
+  ResetEncoder("VP8", 2, 1, true, false);
   const int kFrameWidth = 1280;
   const int kFrameHeight = 720;
   video_source_.IncomingCapturedFrame(
       CreateFrame(1, kFrameWidth, kFrameHeight));
-  sink_.ExpectDroppedFrame();
+  ExpectDroppedFrame();
   vie_encoder_->Stop();
 }
 
@@ -2397,29 +2450,27 @@
 
   video_source_.IncomingCapturedFrame(
       CreateFrame(1, kFrameWidth, kFrameHeight));
-  sink_.WaitForEncodedFrame(kFrameWidth, kFrameHeight);
+  WaitForEncodedFrame(kFrameWidth, kFrameHeight);
 
   // Trigger CPU overuse, downscale by 3/4.
   vie_encoder_->TriggerCpuOveruse();
   video_source_.IncomingCapturedFrame(
       CreateFrame(2, kFrameWidth, kFrameHeight));
-  sink_.WaitForEncodedFrame((kFrameWidth * 3) / 4, (kFrameHeight * 3) / 4);
+  WaitForEncodedFrame((kFrameWidth * 3) / 4, (kFrameHeight * 3) / 4);
 
   // Trigger CPU normal use, return to original resolution.
   vie_encoder_->TriggerCpuNormalUsage();
   video_source_.IncomingCapturedFrame(
       CreateFrame(3, kFrameWidth, kFrameHeight));
-  sink_.WaitForEncodedFrame(kFrameWidth, kFrameHeight);
+  WaitForEncodedFrame(kFrameWidth, kFrameHeight);
 
   vie_encoder_->Stop();
 }
 
 TEST_F(ViEEncoderTest, AdaptsFramerateOnOveruse_MaintainResolutionMode) {
-  const int kDefaultFramerateFps = 30;
-  const int kFrameIntervalMs = rtc::kNumMillisecsPerSec / kDefaultFramerateFps;
   const int kFrameWidth = 1280;
   const int kFrameHeight = 720;
-  rtc::ScopedFakeClock fake_clock;
+  int kFrameIntervalMs = rtc::kNumMillisecsPerSec / max_framerate_;
 
   vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
   vie_encoder_->SetSource(
@@ -2427,93 +2478,82 @@
       VideoSendStream::DegradationPreference::kMaintainResolution);
   video_source_.set_adaptation_enabled(true);
 
-  fake_clock.SetTimeMicros(kFrameIntervalMs * 1000);
-  int64_t timestamp_ms = kFrameIntervalMs;
+  int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec;
 
   video_source_.IncomingCapturedFrame(
       CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
-  sink_.WaitForEncodedFrame(timestamp_ms);
+  WaitForEncodedFrame(timestamp_ms);
 
   // Try to trigger overuse. No fps estimate available => no effect.
   vie_encoder_->TriggerCpuOveruse();
 
   // Insert frames for one second to get a stable estimate.
-  for (int i = 0; i < kDefaultFramerateFps; ++i) {
+  for (int i = 0; i < max_framerate_; ++i) {
     timestamp_ms += kFrameIntervalMs;
-    fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000);
     video_source_.IncomingCapturedFrame(
         CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
-    sink_.WaitForEncodedFrame(timestamp_ms);
+    WaitForEncodedFrame(timestamp_ms);
   }
 
   // Trigger CPU overuse, reduce framerate by 2/3.
   vie_encoder_->TriggerCpuOveruse();
   int num_frames_dropped = 0;
-  for (int i = 0; i < kDefaultFramerateFps; ++i) {
+  for (int i = 0; i < max_framerate_; ++i) {
     timestamp_ms += kFrameIntervalMs;
-    fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000);
     video_source_.IncomingCapturedFrame(
         CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
-    if (!sink_.WaitForFrame(kFrameTimeoutMs)) {
+    if (!WaitForFrame(kFrameTimeoutMs)) {
       ++num_frames_dropped;
     } else {
       sink_.CheckLastFrameSizeMathces(kFrameWidth, kFrameHeight);
     }
   }
 
-  // TODO(sprang): Find where there's rounding errors or stuff causing the
-  // margin here to be a little larger than we'd like (input fps estimate is
-  // off) and the frame dropping is a little too aggressive.
-  const int kErrorMargin = 5;
-  EXPECT_NEAR(num_frames_dropped,
-              kDefaultFramerateFps - (kDefaultFramerateFps * 2 / 3),
+  // Add some slack to account for frames dropped by the frame dropper.
+  const int kErrorMargin = 1;
+  EXPECT_NEAR(num_frames_dropped, max_framerate_ - (max_framerate_ * 2 / 3),
               kErrorMargin);
 
   // Trigger CPU overuse, reduce framerate by 2/3 again.
   vie_encoder_->TriggerCpuOveruse();
   num_frames_dropped = 0;
-  for (int i = 0; i < kDefaultFramerateFps; ++i) {
+  for (int i = 0; i < max_framerate_; ++i) {
     timestamp_ms += kFrameIntervalMs;
-    fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000);
     video_source_.IncomingCapturedFrame(
         CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
-    if (!sink_.WaitForFrame(kFrameTimeoutMs)) {
+    if (!WaitForFrame(kFrameTimeoutMs)) {
       ++num_frames_dropped;
     } else {
       sink_.CheckLastFrameSizeMathces(kFrameWidth, kFrameHeight);
     }
   }
-  EXPECT_NEAR(num_frames_dropped,
-              kDefaultFramerateFps - (kDefaultFramerateFps * 4 / 9),
+  EXPECT_NEAR(num_frames_dropped, max_framerate_ - (max_framerate_ * 4 / 9),
               kErrorMargin);
 
   // Go back up one step.
   vie_encoder_->TriggerCpuNormalUsage();
   num_frames_dropped = 0;
-  for (int i = 0; i < kDefaultFramerateFps; ++i) {
+  for (int i = 0; i < max_framerate_; ++i) {
     timestamp_ms += kFrameIntervalMs;
-    fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000);
     video_source_.IncomingCapturedFrame(
         CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
-    if (!sink_.WaitForFrame(kFrameTimeoutMs)) {
+    if (!WaitForFrame(kFrameTimeoutMs)) {
       ++num_frames_dropped;
     } else {
       sink_.CheckLastFrameSizeMathces(kFrameWidth, kFrameHeight);
     }
   }
-  EXPECT_NEAR(num_frames_dropped,
-              kDefaultFramerateFps - (kDefaultFramerateFps * 2 / 3),
+  EXPECT_NEAR(num_frames_dropped, max_framerate_ - (max_framerate_ * 2 / 3),
               kErrorMargin);
 
   // Go back up to original mode.
   vie_encoder_->TriggerCpuNormalUsage();
   num_frames_dropped = 0;
-  for (int i = 0; i < kDefaultFramerateFps; ++i) {
+  for (int i = 0; i < max_framerate_; ++i) {
     timestamp_ms += kFrameIntervalMs;
-    fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000);
     video_source_.IncomingCapturedFrame(
         CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
-    if (!sink_.WaitForFrame(kFrameTimeoutMs)) {
+    if (!WaitForFrame(kFrameTimeoutMs)) {
       ++num_frames_dropped;
     } else {
       sink_.CheckLastFrameSizeMathces(kFrameWidth, kFrameHeight);
@@ -2531,15 +2571,17 @@
   const int kFrameWidth = 1280;
   const int kFrameHeight = 720;
 
-  rtc::ScopedFakeClock fake_clock;
+  // Reconfigure encoder with two temporal layers and screensharing, which will
+  // disable frame dropping and make testing easier.
+  ResetEncoder("VP8", 1, 2, true, true);
+
   vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
   vie_encoder_->SetSource(
       &video_source_,
       VideoSendStream::DegradationPreference::kMaintainResolution);
   video_source_.set_adaptation_enabled(true);
 
-  fake_clock.SetTimeMicros(kFrameIntervalMs * 1000);
-  int64_t timestamp_ms = kFrameIntervalMs;
+  int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec;
 
   // Trigger overuse as much as we can.
   for (int i = 0; i < ViEEncoder::kMaxCpuResolutionDowngrades; ++i) {
@@ -2548,22 +2590,20 @@
       video_source_.IncomingCapturedFrame(
           CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
       timestamp_ms += kFrameIntervalMs;
-      fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000);
     }
     // ...and then try to adapt again.
     vie_encoder_->TriggerCpuOveruse();
   }
 
   // Drain any frame in the pipeline.
-  sink_.WaitForFrame(kDefaultTimeoutMs);
+  WaitForFrame(kDefaultTimeoutMs);
 
   // Insert frames at min fps, all should go through.
   for (int i = 0; i < 10; ++i) {
     timestamp_ms += kMinFpsFrameInterval;
-    fake_clock.AdvanceTimeMicros(kMinFpsFrameInterval * 1000);
     video_source_.IncomingCapturedFrame(
         CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
-    sink_.WaitForEncodedFrame(timestamp_ms);
+    WaitForEncodedFrame(timestamp_ms);
   }
 
   vie_encoder_->Stop();
@@ -2583,7 +2623,7 @@
                           VideoSendStream::DegradationPreference::kBalanced);
   timestamp_ms += kFrameIntervalMs;
   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(kWidth, kHeight);
+  WaitForEncodedFrame(kWidth, kHeight);
   VerifyNoLimitation(source.sink_wants());
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
@@ -2593,7 +2633,7 @@
   vie_encoder_->TriggerQualityLow();
   timestamp_ms += kFrameIntervalMs;
   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(timestamp_ms);
+  WaitForEncodedFrame(timestamp_ms);
   VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
@@ -2603,7 +2643,7 @@
   vie_encoder_->TriggerQualityLow();
   timestamp_ms += kFrameIntervalMs;
   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(timestamp_ms);
+  WaitForEncodedFrame(timestamp_ms);
   VerifyFpsMaxResolutionLt(source.sink_wants(), source.last_wants());
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
@@ -2613,7 +2653,7 @@
   vie_encoder_->TriggerQualityLow();
   timestamp_ms += kFrameIntervalMs;
   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(timestamp_ms);
+  WaitForEncodedFrame(timestamp_ms);
   VerifyFpsLtResolutionEq(source.sink_wants(), source.last_wants());
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
@@ -2623,7 +2663,7 @@
   vie_encoder_->TriggerQualityLow();
   timestamp_ms += kFrameIntervalMs;
   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(timestamp_ms);
+  WaitForEncodedFrame(timestamp_ms);
   VerifyFpsEqResolutionLt(source.sink_wants(), source.last_wants());
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
@@ -2633,7 +2673,7 @@
   vie_encoder_->TriggerQualityLow();
   timestamp_ms += kFrameIntervalMs;
   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(timestamp_ms);
+  WaitForEncodedFrame(timestamp_ms);
   VerifyFpsLtResolutionEq(source.sink_wants(), source.last_wants());
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
@@ -2643,7 +2683,7 @@
   vie_encoder_->TriggerQualityLow();
   timestamp_ms += kFrameIntervalMs;
   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(timestamp_ms);
+  WaitForEncodedFrame(timestamp_ms);
   VerifyFpsEqResolutionLt(source.sink_wants(), source.last_wants());
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
@@ -2653,7 +2693,7 @@
   vie_encoder_->TriggerQualityLow();
   timestamp_ms += kFrameIntervalMs;
   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(timestamp_ms);
+  WaitForEncodedFrame(timestamp_ms);
   VerifyFpsLtResolutionEq(source.sink_wants(), source.last_wants());
   rtc::VideoSinkWants last_wants = source.sink_wants();
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
@@ -2664,7 +2704,7 @@
   vie_encoder_->TriggerQualityLow();
   timestamp_ms += kFrameIntervalMs;
   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(timestamp_ms);
+  WaitForEncodedFrame(timestamp_ms);
   VerifyFpsEqResolutionEq(source.sink_wants(), last_wants);
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
@@ -2674,7 +2714,7 @@
   vie_encoder_->TriggerQualityHigh();
   timestamp_ms += kFrameIntervalMs;
   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(timestamp_ms);
+  WaitForEncodedFrame(timestamp_ms);
   VerifyFpsGtResolutionEq(source.sink_wants(), source.last_wants());
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
@@ -2684,7 +2724,7 @@
   vie_encoder_->TriggerQualityHigh();
   timestamp_ms += kFrameIntervalMs;
   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(timestamp_ms);
+  WaitForEncodedFrame(timestamp_ms);
   VerifyFpsEqResolutionGt(source.sink_wants(), source.last_wants());
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
@@ -2694,7 +2734,7 @@
   vie_encoder_->TriggerQualityHigh();
   timestamp_ms += kFrameIntervalMs;
   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(timestamp_ms);
+  WaitForEncodedFrame(timestamp_ms);
   VerifyFpsGtResolutionEq(source.sink_wants(), source.last_wants());
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
@@ -2704,7 +2744,7 @@
   vie_encoder_->TriggerQualityHigh();
   timestamp_ms += kFrameIntervalMs;
   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(timestamp_ms);
+  WaitForEncodedFrame(timestamp_ms);
   VerifyFpsEqResolutionGt(source.sink_wants(), source.last_wants());
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
@@ -2714,7 +2754,7 @@
   vie_encoder_->TriggerQualityHigh();
   timestamp_ms += kFrameIntervalMs;
   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(timestamp_ms);
+  WaitForEncodedFrame(timestamp_ms);
   VerifyFpsMaxResolutionEq(source.sink_wants(), source.last_wants());
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
@@ -2724,7 +2764,7 @@
   vie_encoder_->TriggerQualityHigh();
   timestamp_ms += kFrameIntervalMs;
   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(timestamp_ms);
+  WaitForEncodedFrame(timestamp_ms);
   VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
@@ -2734,7 +2774,7 @@
   vie_encoder_->TriggerQualityHigh();
   timestamp_ms += kFrameIntervalMs;
   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(kWidth, kHeight);
+  WaitForEncodedFrame(kWidth, kHeight);
   VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
   VerifyNoLimitation(source.sink_wants());
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
@@ -2763,7 +2803,7 @@
                           VideoSendStream::DegradationPreference::kBalanced);
   timestamp_ms += kFrameIntervalMs;
   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(kWidth, kHeight);
+  WaitForEncodedFrame(kWidth, kHeight);
   VerifyNoLimitation(source.sink_wants());
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
@@ -2776,7 +2816,7 @@
   vie_encoder_->TriggerCpuOveruse();
   timestamp_ms += kFrameIntervalMs;
   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(timestamp_ms);
+  WaitForEncodedFrame(timestamp_ms);
   VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
@@ -2789,7 +2829,7 @@
   vie_encoder_->TriggerCpuOveruse();
   timestamp_ms += kFrameIntervalMs;
   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(timestamp_ms);
+  WaitForEncodedFrame(timestamp_ms);
   VerifyFpsMaxResolutionLt(source.sink_wants(), source.last_wants());
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
@@ -2802,7 +2842,7 @@
   vie_encoder_->TriggerQualityLow();
   timestamp_ms += kFrameIntervalMs;
   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(timestamp_ms);
+  WaitForEncodedFrame(timestamp_ms);
   VerifyFpsLtResolutionEq(source.sink_wants(), source.last_wants());
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
@@ -2815,7 +2855,7 @@
   vie_encoder_->TriggerCpuNormalUsage();
   timestamp_ms += kFrameIntervalMs;
   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(timestamp_ms);
+  WaitForEncodedFrame(timestamp_ms);
   VerifyFpsMaxResolutionEq(source.sink_wants(), source.last_wants());
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
@@ -2828,7 +2868,7 @@
   vie_encoder_->TriggerQualityHigh();
   timestamp_ms += kFrameIntervalMs;
   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(timestamp_ms);
+  WaitForEncodedFrame(timestamp_ms);
   VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
@@ -2841,7 +2881,7 @@
   vie_encoder_->TriggerCpuNormalUsage();
   timestamp_ms += kFrameIntervalMs;
   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(kWidth, kHeight);
+  WaitForEncodedFrame(kWidth, kHeight);
   VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
   VerifyNoLimitation(source.sink_wants());
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
@@ -2875,7 +2915,7 @@
                           VideoSendStream::DegradationPreference::kBalanced);
   timestamp_ms += kFrameIntervalMs;
   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(kWidth, kHeight);
+  WaitForEncodedFrame(kWidth, kHeight);
   VerifyNoLimitation(source.sink_wants());
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
@@ -2888,7 +2928,7 @@
   vie_encoder_->TriggerCpuOveruse();
   timestamp_ms += kFrameIntervalMs;
   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(timestamp_ms);
+  WaitForEncodedFrame(timestamp_ms);
   VerifyFpsEqResolutionMax(source.sink_wants(), kFpsLimit);
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
@@ -2901,7 +2941,7 @@
   vie_encoder_->TriggerQualityLow();
   timestamp_ms += kFrameIntervalMs;
   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(timestamp_ms);
+  WaitForEncodedFrame(timestamp_ms);
   VerifyFpsEqResolutionLt(source.sink_wants(), source.last_wants());
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
@@ -2914,7 +2954,7 @@
   vie_encoder_->TriggerCpuNormalUsage();
   timestamp_ms += kFrameIntervalMs;
   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(timestamp_ms);
+  WaitForEncodedFrame(timestamp_ms);
   VerifyFpsEqResolutionGt(source.sink_wants(), source.last_wants());
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
   EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
@@ -2927,7 +2967,7 @@
   vie_encoder_->TriggerQualityHigh();
   timestamp_ms += kFrameIntervalMs;
   source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
-  sink_.WaitForEncodedFrame(timestamp_ms);
+  WaitForEncodedFrame(timestamp_ms);
   VerifyNoLimitation(source.sink_wants());
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
   EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
@@ -3000,16 +3040,104 @@
 
   video_source_.IncomingCapturedFrame(
       CreateFrame(1, kFrameWidth, kFrameHeight));
-  sink_.WaitForEncodedFrame(kFrameWidth, kFrameHeight);
+  WaitForEncodedFrame(kFrameWidth, kFrameHeight);
 
   // Trigger CPU overuse, downscale by 3/4.
   vie_encoder_->TriggerCpuOveruse();
   video_source_.IncomingCapturedFrame(
       CreateFrame(2, kFrameWidth, kFrameHeight));
-  sink_.WaitForEncodedFrame(kAdaptedFrameWidth, kAdaptedFrameHeight);
+  WaitForEncodedFrame(kAdaptedFrameWidth, kAdaptedFrameHeight);
 
   vie_encoder_->Stop();
 }
 
+TEST_F(ViEEncoderTest, PeriodicallyUpdatesChannelParameters) {
+  const int kFrameWidth = 1280;
+  const int kFrameHeight = 720;
+  const int kLowFps = 2;
+  const int kHighFps = 30;
+
+  vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
+
+  int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec;
+  max_framerate_ = kLowFps;
+
+  // Insert 2 seconds of 2fps video.
+  for (int i = 0; i < kLowFps * 2; ++i) {
+    video_source_.IncomingCapturedFrame(
+        CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
+    WaitForEncodedFrame(timestamp_ms);
+    timestamp_ms += 1000 / kLowFps;
+  }
+
+  // Make sure encoder is updated with new target.
+  vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
+  video_source_.IncomingCapturedFrame(
+      CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
+  WaitForEncodedFrame(timestamp_ms);
+  timestamp_ms += 1000 / kLowFps;
+
+  EXPECT_EQ(kLowFps, fake_encoder_.GetConfiguredInputFramerate());
+
+  // Insert 30fps frames for just a little more than the forced update period.
+  const int kVcmTimerIntervalFrames =
+      (vcm::VCMProcessTimer::kDefaultProcessIntervalMs * kHighFps) / 1000;
+  const int kFrameIntervalMs = 1000 / kHighFps;
+  max_framerate_ = kHighFps;
+  for (int i = 0; i < kVcmTimerIntervalFrames + 2; ++i) {
+    video_source_.IncomingCapturedFrame(
+        CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
+    // Wait for encoded frame, but skip ahead if it doesn't arrive as it might
+    // be dropped if the encoder hans't been updated with the new higher target
+    // framerate yet, causing it to overshoot the target bitrate and then
+    // suffering the wrath of the media optimizer.
+    TimedWaitForEncodedFrame(timestamp_ms, 2 * kFrameIntervalMs);
+    timestamp_ms += kFrameIntervalMs;
+  }
+
+  // Don expect correct measurement just yet, but it should be higher than
+  // before.
+  EXPECT_GT(fake_encoder_.GetConfiguredInputFramerate(), kLowFps);
+
+  vie_encoder_->Stop();
+}
+
+TEST_F(ViEEncoderTest, DoesNotUpdateBitrateAllocationWhenSuspended) {
+  const int kFrameWidth = 1280;
+  const int kFrameHeight = 720;
+  const int kTargetBitrateBps = 1000000;
+
+  MockBitrateObserver bitrate_observer;
+  vie_encoder_->SetBitrateObserver(&bitrate_observer);
+
+  EXPECT_CALL(bitrate_observer, OnBitrateAllocationUpdated(_)).Times(1);
+  // Initial bitrate update.
+  vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
+  vie_encoder_->WaitUntilTaskQueueIsIdle();
+
+  // Insert a first video frame, causes another bitrate update.
+  int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec;
+  EXPECT_CALL(bitrate_observer, OnBitrateAllocationUpdated(_)).Times(1);
+  video_source_.IncomingCapturedFrame(
+      CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
+  WaitForEncodedFrame(timestamp_ms);
+
+  // Next, simulate video suspension due to pacer queue overrun.
+  vie_encoder_->OnBitrateUpdated(0, 0, 1);
+
+  // Skip ahead until a new periodic parameter update should have occured.
+  timestamp_ms += vcm::VCMProcessTimer::kDefaultProcessIntervalMs;
+  fake_clock_.AdvanceTimeMicros(
+      vcm::VCMProcessTimer::kDefaultProcessIntervalMs *
+      rtc::kNumMicrosecsPerMillisec);
+
+  // Bitrate observer should not be called.
+  EXPECT_CALL(bitrate_observer, OnBitrateAllocationUpdated(_)).Times(0);
+  video_source_.IncomingCapturedFrame(
+      CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
+  ExpectDroppedFrame();
+
+  vie_encoder_->Stop();
+}
 
 }  // namespace webrtc