Lint fix for webrtc/modules/video_coding PART 1!

Trying to submit all changes at once proved impossible since there were
too many changes in too many files. The changes to PRESUBMIT.py
will be uploaded in the last CL.
(original CL: https://codereview.webrtc.org/1528503003/)

BUG=webrtc:5309
TBR=mflodman@webrtc.org

Review URL: https://codereview.webrtc.org/1541803002

Cr-Commit-Position: refs/heads/master@{#11100}
diff --git a/webrtc/modules/video_coding/codec_database.cc b/webrtc/modules/video_coding/codec_database.cc
index 20a1143..1fae435 100644
--- a/webrtc/modules/video_coding/codec_database.cc
+++ b/webrtc/modules/video_coding/codec_database.cc
@@ -218,8 +218,9 @@
   if (new_send_codec.maxBitrate == 0) {
     // max is one bit per pixel
     new_send_codec.maxBitrate = (static_cast<int>(send_codec->height) *
-        static_cast<int>(send_codec->width) *
-        static_cast<int>(send_codec->maxFramerate)) / 1000;
+                                 static_cast<int>(send_codec->width) *
+                                 static_cast<int>(send_codec->maxFramerate)) /
+                                1000;
     if (send_codec->startBitrate > new_send_codec.maxBitrate) {
       // But if the user tries to set a higher start bit rate we will
       // increase the max accordingly.
@@ -282,8 +283,8 @@
   return send_codec_.codecType;
 }
 
-bool VCMCodecDataBase::DeregisterExternalEncoder(
-    uint8_t payload_type, bool* was_send_codec) {
+bool VCMCodecDataBase::DeregisterExternalEncoder(uint8_t payload_type,
+                                                 bool* was_send_codec) {
   assert(was_send_codec);
   *was_send_codec = false;
   if (encoder_payload_type_ != payload_type) {
@@ -301,10 +302,9 @@
   return true;
 }
 
-void VCMCodecDataBase::RegisterExternalEncoder(
-    VideoEncoder* external_encoder,
-    uint8_t payload_type,
-    bool internal_source) {
+void VCMCodecDataBase::RegisterExternalEncoder(VideoEncoder* external_encoder,
+                                               uint8_t payload_type,
+                                               bool internal_source) {
   // Since only one encoder can be used at a given time, only one external
   // encoder can be registered/used.
   external_encoder_ = external_encoder;
@@ -372,8 +372,7 @@
          ++i) {
       if (memcmp(&new_send_codec.simulcastStream[i],
                  &send_codec_.simulcastStream[i],
-                 sizeof(new_send_codec.simulcastStream[i])) !=
-          0) {
+                 sizeof(new_send_codec.simulcastStream[i])) != 0) {
         return true;
       }
     }
@@ -429,10 +428,9 @@
   return !dec_map_.empty();
 }
 
-bool VCMCodecDataBase::RegisterReceiveCodec(
-    const VideoCodec* receive_codec,
-    int number_of_cores,
-    bool require_key_frame) {
+bool VCMCodecDataBase::RegisterReceiveCodec(const VideoCodec* receive_codec,
+                                            int number_of_cores,
+                                            bool require_key_frame) {
   if (number_of_cores < 0) {
     return false;
   }
@@ -442,14 +440,12 @@
     return false;
   }
   VideoCodec* new_receive_codec = new VideoCodec(*receive_codec);
-  dec_map_[receive_codec->plType] = new VCMDecoderMapItem(new_receive_codec,
-                                                          number_of_cores,
-                                                          require_key_frame);
+  dec_map_[receive_codec->plType] = new VCMDecoderMapItem(
+      new_receive_codec, number_of_cores, require_key_frame);
   return true;
 }
 
-bool VCMCodecDataBase::DeregisterReceiveCodec(
-    uint8_t payload_type) {
+bool VCMCodecDataBase::DeregisterReceiveCodec(uint8_t payload_type) {
   DecoderMap::iterator it = dec_map_.find(payload_type);
   if (it == dec_map_.end()) {
     return false;
@@ -497,9 +493,10 @@
     return nullptr;
   }
   VCMReceiveCallback* callback = decoded_frame_callback->UserReceiveCallback();
-  if (callback) callback->OnIncomingPayloadType(receive_codec_.plType);
-  if (ptr_decoder_->RegisterDecodeCompleteCallback(decoded_frame_callback)
-      < 0) {
+  if (callback)
+    callback->OnIncomingPayloadType(receive_codec_.plType);
+  if (ptr_decoder_->RegisterDecodeCompleteCallback(decoded_frame_callback) <
+      0) {
     ReleaseDecoder(ptr_decoder_);
     ptr_decoder_ = nullptr;
     memset(&receive_codec_, 0, sizeof(VideoCodec));
diff --git a/webrtc/modules/video_coding/codec_timer.cc b/webrtc/modules/video_coding/codec_timer.cc
index e987594..60add8f 100644
--- a/webrtc/modules/video_coding/codec_timer.cc
+++ b/webrtc/modules/video_coding/codec_timer.cc
@@ -12,118 +12,85 @@
 
 #include <assert.h>
 
-namespace webrtc
-{
+namespace webrtc {
 
 // The first kIgnoredSampleCount samples will be ignored.
 static const int32_t kIgnoredSampleCount = 5;
 
 VCMCodecTimer::VCMCodecTimer()
-:
-_filteredMax(0),
-_ignoredSampleCount(0),
-_shortMax(0),
-_history()
-{
-    Reset();
+    : _filteredMax(0), _ignoredSampleCount(0), _shortMax(0), _history() {
+  Reset();
 }
 
-void VCMCodecTimer::Reset()
-{
-    _filteredMax = 0;
-    _ignoredSampleCount = 0;
-    _shortMax = 0;
-    for (int i=0; i < MAX_HISTORY_SIZE; i++)
-    {
-        _history[i].shortMax = 0;
-        _history[i].timeMs = -1;
-    }
+void VCMCodecTimer::Reset() {
+  _filteredMax = 0;
+  _ignoredSampleCount = 0;
+  _shortMax = 0;
+  for (int i = 0; i < MAX_HISTORY_SIZE; i++) {
+    _history[i].shortMax = 0;
+    _history[i].timeMs = -1;
+  }
 }
 
 // Update the max-value filter
-void VCMCodecTimer::MaxFilter(int32_t decodeTime, int64_t nowMs)
-{
-    if (_ignoredSampleCount >= kIgnoredSampleCount)
-    {
-        UpdateMaxHistory(decodeTime, nowMs);
-        ProcessHistory(nowMs);
-    }
-    else
-    {
-        _ignoredSampleCount++;
-    }
+void VCMCodecTimer::MaxFilter(int32_t decodeTime, int64_t nowMs) {
+  if (_ignoredSampleCount >= kIgnoredSampleCount) {
+    UpdateMaxHistory(decodeTime, nowMs);
+    ProcessHistory(nowMs);
+  } else {
+    _ignoredSampleCount++;
+  }
 }
 
-void
-VCMCodecTimer::UpdateMaxHistory(int32_t decodeTime, int64_t now)
-{
-    if (_history[0].timeMs >= 0 &&
-        now - _history[0].timeMs < SHORT_FILTER_MS)
-    {
-        if (decodeTime > _shortMax)
-        {
-            _shortMax = decodeTime;
-        }
+void VCMCodecTimer::UpdateMaxHistory(int32_t decodeTime, int64_t now) {
+  if (_history[0].timeMs >= 0 && now - _history[0].timeMs < SHORT_FILTER_MS) {
+    if (decodeTime > _shortMax) {
+      _shortMax = decodeTime;
     }
-    else
-    {
-        // Only add a new value to the history once a second
-        if(_history[0].timeMs == -1)
-        {
-            // First, no shift
-            _shortMax = decodeTime;
-        }
-        else
-        {
-            // Shift
-            for(int i = (MAX_HISTORY_SIZE - 2); i >= 0 ; i--)
-            {
-                _history[i+1].shortMax = _history[i].shortMax;
-                _history[i+1].timeMs = _history[i].timeMs;
-            }
-        }
-        if (_shortMax == 0)
-        {
-            _shortMax = decodeTime;
-        }
+  } else {
+    // Only add a new value to the history once a second
+    if (_history[0].timeMs == -1) {
+      // First, no shift
+      _shortMax = decodeTime;
+    } else {
+      // Shift
+      for (int i = (MAX_HISTORY_SIZE - 2); i >= 0; i--) {
+        _history[i + 1].shortMax = _history[i].shortMax;
+        _history[i + 1].timeMs = _history[i].timeMs;
+      }
+    }
+    if (_shortMax == 0) {
+      _shortMax = decodeTime;
+    }
 
-        _history[0].shortMax = _shortMax;
-        _history[0].timeMs = now;
-        _shortMax = 0;
-    }
+    _history[0].shortMax = _shortMax;
+    _history[0].timeMs = now;
+    _shortMax = 0;
+  }
 }
 
-void
-VCMCodecTimer::ProcessHistory(int64_t nowMs)
-{
-    _filteredMax = _shortMax;
-    if (_history[0].timeMs == -1)
-    {
-        return;
+void VCMCodecTimer::ProcessHistory(int64_t nowMs) {
+  _filteredMax = _shortMax;
+  if (_history[0].timeMs == -1) {
+    return;
+  }
+  for (int i = 0; i < MAX_HISTORY_SIZE; i++) {
+    if (_history[i].timeMs == -1) {
+      break;
     }
-    for (int i=0; i < MAX_HISTORY_SIZE; i++)
-    {
-        if (_history[i].timeMs == -1)
-        {
-            break;
-        }
-        if (nowMs - _history[i].timeMs > MAX_HISTORY_SIZE * SHORT_FILTER_MS)
-        {
-            // This sample (and all samples after this) is too old
-            break;
-        }
-        if (_history[i].shortMax > _filteredMax)
-        {
-            // This sample is the largest one this far into the history
-            _filteredMax = _history[i].shortMax;
-        }
+    if (nowMs - _history[i].timeMs > MAX_HISTORY_SIZE * SHORT_FILTER_MS) {
+      // This sample (and all samples after this) is too old
+      break;
     }
+    if (_history[i].shortMax > _filteredMax) {
+      // This sample is the largest one this far into the history
+      _filteredMax = _history[i].shortMax;
+    }
+  }
 }
 
 // Get the maximum observed time within a time window
-int32_t VCMCodecTimer::RequiredDecodeTimeMs(FrameType /*frameType*/) const
-{
-    return _filteredMax;
+int32_t VCMCodecTimer::RequiredDecodeTimeMs(FrameType /*frameType*/) const {
+  return _filteredMax;
 }
-
-}
+}  // namespace webrtc
diff --git a/webrtc/modules/video_coding/codec_timer.h b/webrtc/modules/video_coding/codec_timer.h
index a7abeb8..8ebd82a 100644
--- a/webrtc/modules/video_coding/codec_timer.h
+++ b/webrtc/modules/video_coding/codec_timer.h
@@ -14,48 +14,44 @@
 #include "webrtc/modules/include/module_common_types.h"
 #include "webrtc/typedefs.h"
 
-namespace webrtc
-{
+namespace webrtc {
 
 // MAX_HISTORY_SIZE * SHORT_FILTER_MS defines the window size in milliseconds
 #define MAX_HISTORY_SIZE 10
 #define SHORT_FILTER_MS 1000
 
-class VCMShortMaxSample
-{
-public:
-    VCMShortMaxSample() : shortMax(0), timeMs(-1) {};
+class VCMShortMaxSample {
+ public:
+  VCMShortMaxSample() : shortMax(0), timeMs(-1) {}
 
-    int32_t     shortMax;
-    int64_t     timeMs;
+  int32_t shortMax;
+  int64_t timeMs;
 };
 
-class VCMCodecTimer
-{
-public:
-    VCMCodecTimer();
+class VCMCodecTimer {
+ public:
+  VCMCodecTimer();
 
-    // Updates the max filtered decode time.
-    void MaxFilter(int32_t newDecodeTimeMs, int64_t nowMs);
+  // Updates the max filtered decode time.
+  void MaxFilter(int32_t newDecodeTimeMs, int64_t nowMs);
 
-    // Empty the list of timers.
-    void Reset();
+  // Empty the list of timers.
+  void Reset();
 
-    // Get the required decode time in ms.
-    int32_t RequiredDecodeTimeMs(FrameType frameType) const;
+  // Get the required decode time in ms.
+  int32_t RequiredDecodeTimeMs(FrameType frameType) const;
 
-private:
-    void UpdateMaxHistory(int32_t decodeTime, int64_t now);
-    void ProcessHistory(int64_t nowMs);
+ private:
+  void UpdateMaxHistory(int32_t decodeTime, int64_t now);
+  void ProcessHistory(int64_t nowMs);
 
-    int32_t                     _filteredMax;
-    // The number of samples ignored so far.
-    int32_t                     _ignoredSampleCount;
-    int32_t                     _shortMax;
-    VCMShortMaxSample           _history[MAX_HISTORY_SIZE];
-
+  int32_t _filteredMax;
+  // The number of samples ignored so far.
+  int32_t _ignoredSampleCount;
+  int32_t _shortMax;
+  VCMShortMaxSample _history[MAX_HISTORY_SIZE];
 };
 
 }  // namespace webrtc
 
-#endif // WEBRTC_MODULES_VIDEO_CODING_CODEC_TIMER_H_
+#endif  // WEBRTC_MODULES_VIDEO_CODING_CODEC_TIMER_H_
diff --git a/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_decoder.cc b/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_decoder.cc
index a0bbb9e..6fee2e6 100644
--- a/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_decoder.cc
+++ b/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_decoder.cc
@@ -106,8 +106,7 @@
 H264VideoToolboxDecoder::H264VideoToolboxDecoder()
     : callback_(nullptr),
       video_format_(nullptr),
-      decompression_session_(nullptr) {
-}
+      decompression_session_(nullptr) {}
 
 H264VideoToolboxDecoder::~H264VideoToolboxDecoder() {
   DestroyDecompressionSession();
@@ -129,8 +128,7 @@
 
   CMSampleBufferRef sample_buffer = nullptr;
   if (!H264AnnexBBufferToCMSampleBuffer(input_image._buffer,
-                                        input_image._length,
-                                        video_format_,
+                                        input_image._length, video_format_,
                                         &sample_buffer)) {
     return WEBRTC_VIDEO_CODEC_ERROR;
   }
@@ -206,11 +204,8 @@
   int64_t nv12type = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
   CFNumberRef pixel_format =
       CFNumberCreate(nullptr, kCFNumberLongType, &nv12type);
-  CFTypeRef values[attributes_size] = {
-    kCFBooleanTrue,
-    io_surface_value,
-    pixel_format
-  };
+  CFTypeRef values[attributes_size] = {kCFBooleanTrue, io_surface_value,
+                                       pixel_format};
   CFDictionaryRef attributes =
       internal::CreateCFDictionary(keys, values, attributes_size);
   if (io_surface_value) {
diff --git a/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.cc b/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.cc
index f47f39c..7df4ec7 100644
--- a/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.cc
+++ b/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.cc
@@ -99,11 +99,7 @@
                     int32_t h,
                     int64_t rtms,
                     uint32_t ts)
-      : callback(cb),
-        width(w),
-        height(h),
-        render_time_ms(rtms),
-        timestamp(ts) {
+      : callback(cb), width(w), height(h), render_time_ms(rtms), timestamp(ts) {
     if (csi) {
       codec_specific_info = *csi;
     } else {
@@ -146,9 +142,8 @@
   int ret = libyuv::I420ToNV12(
       frame.buffer(webrtc::kYPlane), frame.stride(webrtc::kYPlane),
       frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane),
-      frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane),
-      dst_y, dst_stride_y, dst_uv, dst_stride_uv,
-      frame.width(), frame.height());
+      frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane), dst_y,
+      dst_stride_y, dst_uv, dst_stride_uv, frame.width(), frame.height());
   CVPixelBufferUnlockBaseAddress(pixel_buffer, 0);
   if (ret) {
     LOG(LS_ERROR) << "Error converting I420 VideoFrame to NV12 :" << ret;
@@ -188,10 +183,8 @@
   // TODO(tkchin): Allocate buffers through a pool.
   rtc::scoped_ptr<rtc::Buffer> buffer(new rtc::Buffer());
   rtc::scoped_ptr<webrtc::RTPFragmentationHeader> header;
-  if (!H264CMSampleBufferToAnnexBBuffer(sample_buffer,
-                                        is_keyframe,
-                                        buffer.get(),
-                                        header.accept())) {
+  if (!H264CMSampleBufferToAnnexBBuffer(sample_buffer, is_keyframe,
+                                        buffer.get(), header.accept())) {
     return;
   }
   webrtc::EncodedImage frame(buffer->data(), buffer->size(), buffer->size());
@@ -215,8 +208,7 @@
 namespace webrtc {
 
 H264VideoToolboxEncoder::H264VideoToolboxEncoder()
-    : callback_(nullptr), compression_session_(nullptr) {
-}
+    : callback_(nullptr), compression_session_(nullptr) {}
 
 H264VideoToolboxEncoder::~H264VideoToolboxEncoder() {
   DestroyCompressionSession();
@@ -289,8 +281,8 @@
       CMTimeMake(input_image.render_time_ms(), 1000);
   CFDictionaryRef frame_properties = nullptr;
   if (is_keyframe_required) {
-    CFTypeRef keys[] = { kVTEncodeFrameOptionKey_ForceKeyFrame };
-    CFTypeRef values[] = { kCFBooleanTrue };
+    CFTypeRef keys[] = {kVTEncodeFrameOptionKey_ForceKeyFrame};
+    CFTypeRef values[] = {kCFBooleanTrue};
     frame_properties = internal::CreateCFDictionary(keys, values, 1);
   }
   rtc::scoped_ptr<internal::FrameEncodeParams> encode_params;
@@ -359,11 +351,8 @@
   int64_t nv12type = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
   CFNumberRef pixel_format =
       CFNumberCreate(nullptr, kCFNumberLongType, &nv12type);
-  CFTypeRef values[attributes_size] = {
-    kCFBooleanTrue,
-    io_surface_value,
-    pixel_format
-  };
+  CFTypeRef values[attributes_size] = {kCFBooleanTrue, io_surface_value,
+                                       pixel_format};
   CFDictionaryRef source_attributes =
       internal::CreateCFDictionary(keys, values, attributes_size);
   if (io_surface_value) {
@@ -376,15 +365,11 @@
   }
   OSStatus status = VTCompressionSessionCreate(
       nullptr,  // use default allocator
-      width_,
-      height_,
-      kCMVideoCodecType_H264,
+      width_, height_, kCMVideoCodecType_H264,
       nullptr,  // use default encoder
       source_attributes,
       nullptr,  // use default compressed data allocator
-      internal::VTCompressionOutputCallback,
-      this,
-      &compression_session_);
+      internal::VTCompressionOutputCallback, this, &compression_session_);
   if (source_attributes) {
     CFRelease(source_attributes);
     source_attributes = nullptr;
diff --git a/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_nalu.cc b/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_nalu.cc
index caca96d..322c213 100644
--- a/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_nalu.cc
+++ b/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_nalu.cc
@@ -154,11 +154,10 @@
   return true;
 }
 
-bool H264AnnexBBufferToCMSampleBuffer(
-    const uint8_t* annexb_buffer,
-    size_t annexb_buffer_size,
-    CMVideoFormatDescriptionRef video_format,
-    CMSampleBufferRef* out_sample_buffer) {
+bool H264AnnexBBufferToCMSampleBuffer(const uint8_t* annexb_buffer,
+                                      size_t annexb_buffer_size,
+                                      CMVideoFormatDescriptionRef video_format,
+                                      CMSampleBufferRef* out_sample_buffer) {
   RTC_DCHECK(annexb_buffer);
   RTC_DCHECK(out_sample_buffer);
   *out_sample_buffer = nullptr;
diff --git a/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_nalu.h b/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_nalu.h
index bdb079b..31ef525 100644
--- a/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_nalu.h
+++ b/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_nalu.h
@@ -9,8 +9,8 @@
  *
  */
 
-#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_H264_H264_VIDEO_TOOLBOX_NALU_H
-#define WEBRTC_MODULES_VIDEO_CODING_CODECS_H264_H264_VIDEO_TOOLBOX_NALU_H
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_H264_H264_VIDEO_TOOLBOX_NALU_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_H264_H264_VIDEO_TOOLBOX_NALU_H_
 
 #include "webrtc/modules/video_coding/codecs/h264/include/h264.h"
 
@@ -39,11 +39,10 @@
 // If |is_keyframe| is true then |video_format| is ignored since the format will
 // be read from the buffer. Otherwise |video_format| must be provided.
 // Caller is responsible for releasing the created sample buffer.
-bool H264AnnexBBufferToCMSampleBuffer(
-    const uint8_t* annexb_buffer,
-    size_t annexb_buffer_size,
-    CMVideoFormatDescriptionRef video_format,
-    CMSampleBufferRef* out_sample_buffer);
+bool H264AnnexBBufferToCMSampleBuffer(const uint8_t* annexb_buffer,
+                                      size_t annexb_buffer_size,
+                                      CMVideoFormatDescriptionRef video_format,
+                                      CMSampleBufferRef* out_sample_buffer);
 
 // Helper class for reading NALUs from an RTP Annex B buffer.
 class AnnexBBufferReader final {
@@ -97,4 +96,4 @@
 }  // namespace webrtc
 
 #endif  // defined(WEBRTC_VIDEO_TOOLBOX_SUPPORTED)
-#endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_H264_H264_VIDEO_TOOLBOX_NALU_H
+#endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_H264_H264_VIDEO_TOOLBOX_NALU_H_
diff --git a/webrtc/modules/video_coding/codecs/i420/i420.cc b/webrtc/modules/video_coding/codecs/i420/i420.cc
index cf546a0..7f06b4c 100644
--- a/webrtc/modules/video_coding/codecs/i420/i420.cc
+++ b/webrtc/modules/video_coding/codecs/i420/i420.cc
@@ -21,20 +21,19 @@
 
 namespace webrtc {
 
-I420Encoder::I420Encoder() : _inited(false), _encodedImage(),
-    _encodedCompleteCallback(NULL) {
-}
+I420Encoder::I420Encoder()
+    : _inited(false), _encodedImage(), _encodedCompleteCallback(NULL) {}
 
 I420Encoder::~I420Encoder() {
   _inited = false;
-  delete [] _encodedImage._buffer;
+  delete[] _encodedImage._buffer;
 }
 
 int I420Encoder::Release() {
   // Should allocate an encoded frame and then release it here, for that we
   // actually need an init flag.
   if (_encodedImage._buffer != NULL) {
-    delete [] _encodedImage._buffer;
+    delete[] _encodedImage._buffer;
     _encodedImage._buffer = NULL;
   }
   _inited = false;
@@ -53,7 +52,7 @@
 
   // Allocating encoded memory.
   if (_encodedImage._buffer != NULL) {
-    delete [] _encodedImage._buffer;
+    delete[] _encodedImage._buffer;
     _encodedImage._buffer = NULL;
     _encodedImage._size = 0;
   }
@@ -101,18 +100,18 @@
       kI420HeaderSize;
   if (_encodedImage._size > req_length) {
     // Reallocate buffer.
-    delete [] _encodedImage._buffer;
+    delete[] _encodedImage._buffer;
 
     _encodedImage._buffer = new uint8_t[req_length];
     _encodedImage._size = req_length;
   }
 
-  uint8_t *buffer = _encodedImage._buffer;
+  uint8_t* buffer = _encodedImage._buffer;
 
   buffer = InsertHeader(buffer, width, height);
 
-  int ret_length = ExtractBuffer(inputImage, req_length - kI420HeaderSize,
-                                 buffer);
+  int ret_length =
+      ExtractBuffer(inputImage, req_length - kI420HeaderSize, buffer);
   if (ret_length < 0)
     return WEBRTC_VIDEO_CODEC_MEMORY;
   _encodedImage._length = ret_length + kI420HeaderSize;
@@ -121,7 +120,8 @@
   return WEBRTC_VIDEO_CODEC_OK;
 }
 
-uint8_t* I420Encoder::InsertHeader(uint8_t *buffer, uint16_t width,
+uint8_t* I420Encoder::InsertHeader(uint8_t* buffer,
+                                   uint16_t width,
                                    uint16_t height) {
   *buffer++ = static_cast<uint8_t>(width >> 8);
   *buffer++ = static_cast<uint8_t>(width & 0xFF);
@@ -130,30 +130,29 @@
   return buffer;
 }
 
-int
-I420Encoder::RegisterEncodeCompleteCallback(EncodedImageCallback* callback) {
+int I420Encoder::RegisterEncodeCompleteCallback(
+    EncodedImageCallback* callback) {
   _encodedCompleteCallback = callback;
   return WEBRTC_VIDEO_CODEC_OK;
 }
 
-
-I420Decoder::I420Decoder() : _decodedImage(), _width(0), _height(0),
-    _inited(false), _decodeCompleteCallback(NULL) {
-}
+I420Decoder::I420Decoder()
+    : _decodedImage(),
+      _width(0),
+      _height(0),
+      _inited(false),
+      _decodeCompleteCallback(NULL) {}
 
 I420Decoder::~I420Decoder() {
   Release();
 }
 
-int
-I420Decoder::Reset() {
+int I420Decoder::Reset() {
   return WEBRTC_VIDEO_CODEC_OK;
 }
 
-
-int
-I420Decoder::InitDecode(const VideoCodec* codecSettings,
-                        int /*numberOfCores */) {
+int I420Decoder::InitDecode(const VideoCodec* codecSettings,
+                            int /*numberOfCores */) {
   if (codecSettings == NULL) {
     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
   } else if (codecSettings->width < 1 || codecSettings->height < 1) {
@@ -165,7 +164,8 @@
   return WEBRTC_VIDEO_CODEC_OK;
 }
 
-int I420Decoder::Decode(const EncodedImage& inputImage, bool /*missingFrames*/,
+int I420Decoder::Decode(const EncodedImage& inputImage,
+                        bool /*missingFrames*/,
                         const RTPFragmentationHeader* /*fragmentation*/,
                         const CodecSpecificInfo* /*codecSpecificInfo*/,
                         int64_t /*renderTimeMs*/) {
@@ -203,8 +203,8 @@
   }
   // Set decoded image parameters.
   int half_width = (_width + 1) / 2;
-  _decodedImage.CreateEmptyFrame(_width, _height,
-                                 _width, half_width, half_width);
+  _decodedImage.CreateEmptyFrame(_width, _height, _width, half_width,
+                                 half_width);
   // Converting from buffer to plane representation.
   int ret = ConvertToI420(kI420, buffer, 0, 0, _width, _height, 0,
                           kVideoRotation_0, &_decodedImage);
@@ -218,7 +218,8 @@
 }
 
 const uint8_t* I420Decoder::ExtractHeader(const uint8_t* buffer,
-                                          uint16_t* width, uint16_t* height) {
+                                          uint16_t* width,
+                                          uint16_t* height) {
   *width = static_cast<uint16_t>(*buffer++) << 8;
   *width |= *buffer++;
   *height = static_cast<uint16_t>(*buffer++) << 8;
diff --git a/webrtc/modules/video_coding/codecs/i420/include/i420.h b/webrtc/modules/video_coding/codecs/i420/include/i420.h
index 165eff6..9f77845 100644
--- a/webrtc/modules/video_coding/codecs/i420/include/i420.h
+++ b/webrtc/modules/video_coding/codecs/i420/include/i420.h
@@ -8,8 +8,8 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_I420_MAIN_INTERFACE_I420_H_
-#define WEBRTC_MODULES_VIDEO_CODING_CODECS_I420_MAIN_INTERFACE_I420_H_
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_I420_INCLUDE_I420_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_I420_INCLUDE_I420_H_
 
 #include <vector>
 
@@ -24,45 +24,45 @@
 
   virtual ~I420Encoder();
 
-// Initialize the encoder with the information from the VideoCodec.
-//
-// Input:
-//          - codecSettings     : Codec settings.
-//          - numberOfCores     : Number of cores available for the encoder.
-//          - maxPayloadSize    : The maximum size each payload is allowed
-//                                to have. Usually MTU - overhead.
-//
-// Return value                 : WEBRTC_VIDEO_CODEC_OK if OK.
-//                                <0 - Error
+  // Initialize the encoder with the information from the VideoCodec.
+  //
+  // Input:
+  //          - codecSettings     : Codec settings.
+  //          - numberOfCores     : Number of cores available for the encoder.
+  //          - maxPayloadSize    : The maximum size each payload is allowed
+  //                                to have. Usually MTU - overhead.
+  //
+  // Return value                 : WEBRTC_VIDEO_CODEC_OK if OK.
+  //                                <0 - Error
   int InitEncode(const VideoCodec* codecSettings,
                  int /*numberOfCores*/,
                  size_t /*maxPayloadSize*/) override;
 
-// "Encode" an I420 image (as a part of a video stream). The encoded image
-// will be returned to the user via the encode complete callback.
-//
-// Input:
-//          - inputImage        : Image to be encoded.
-//          - codecSpecificInfo : Pointer to codec specific data.
-//          - frameType         : Frame type to be sent (Key /Delta).
-//
-// Return value                 : WEBRTC_VIDEO_CODEC_OK if OK.
-//                                <0 - Error
+  // "Encode" an I420 image (as a part of a video stream). The encoded image
+  // will be returned to the user via the encode complete callback.
+  //
+  // Input:
+  //          - inputImage        : Image to be encoded.
+  //          - codecSpecificInfo : Pointer to codec specific data.
+  //          - frameType         : Frame type to be sent (Key /Delta).
+  //
+  // Return value                 : WEBRTC_VIDEO_CODEC_OK if OK.
+  //                                <0 - Error
   int Encode(const VideoFrame& inputImage,
              const CodecSpecificInfo* /*codecSpecificInfo*/,
              const std::vector<FrameType>* /*frame_types*/) override;
 
-// Register an encode complete callback object.
-//
-// Input:
-//          - callback         : Callback object which handles encoded images.
-//
-// Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+  // Register an encode complete callback object.
+  //
+  // Input:
+  //          - callback         : Callback object which handles encoded images.
+  //
+  // Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
   int RegisterEncodeCompleteCallback(EncodedImageCallback* callback) override;
 
-// Free encoder memory.
-//
-// Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+  // Free encoder memory.
+  //
+  // Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
   int Release() override;
 
   int SetRates(uint32_t /*newBitRate*/, uint32_t /*frameRate*/) override {
@@ -76,12 +76,13 @@
   void OnDroppedFrame() override {}
 
  private:
-  static uint8_t* InsertHeader(uint8_t* buffer, uint16_t width,
+  static uint8_t* InsertHeader(uint8_t* buffer,
+                               uint16_t width,
                                uint16_t height);
 
-  bool                     _inited;
-  EncodedImage             _encodedImage;
-  EncodedImageCallback*    _encodedCompleteCallback;
+  bool _inited;
+  EncodedImage _encodedImage;
+  EncodedImageCallback* _encodedCompleteCallback;
 };  // class I420Encoder
 
 class I420Decoder : public VideoDecoder {
@@ -90,50 +91,50 @@
 
   virtual ~I420Decoder();
 
-// Initialize the decoder.
-// The user must notify the codec of width and height values.
-//
-// Return value         :  WEBRTC_VIDEO_CODEC_OK.
-//                        <0 - Errors
+  // Initialize the decoder.
+  // The user must notify the codec of width and height values.
+  //
+  // Return value         :  WEBRTC_VIDEO_CODEC_OK.
+  //                        <0 - Errors
   int InitDecode(const VideoCodec* codecSettings,
                  int /*numberOfCores*/) override;
 
-// Decode encoded image (as a part of a video stream). The decoded image
-// will be returned to the user through the decode complete callback.
-//
-// Input:
-//          - inputImage        : Encoded image to be decoded
-//          - missingFrames     : True if one or more frames have been lost
-//                                since the previous decode call.
-//          - codecSpecificInfo : pointer to specific codec data
-//          - renderTimeMs      : Render time in Ms
-//
-// Return value                 : WEBRTC_VIDEO_CODEC_OK if OK
-//                                 <0 - Error
+  // Decode encoded image (as a part of a video stream). The decoded image
+  // will be returned to the user through the decode complete callback.
+  //
+  // Input:
+  //          - inputImage        : Encoded image to be decoded
+  //          - missingFrames     : True if one or more frames have been lost
+  //                                since the previous decode call.
+  //          - codecSpecificInfo : pointer to specific codec data
+  //          - renderTimeMs      : Render time in Ms
+  //
+  // Return value                 : WEBRTC_VIDEO_CODEC_OK if OK
+  //                                 <0 - Error
   int Decode(const EncodedImage& inputImage,
              bool missingFrames,
              const RTPFragmentationHeader* /*fragmentation*/,
              const CodecSpecificInfo* /*codecSpecificInfo*/,
              int64_t /*renderTimeMs*/) override;
 
-// Register a decode complete callback object.
-//
-// Input:
-//          - callback         : Callback object which handles decoded images.
-//
-// Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+  // Register a decode complete callback object.
+  //
+  // Input:
+  //          - callback         : Callback object which handles decoded images.
+  //
+  // Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
   int RegisterDecodeCompleteCallback(DecodedImageCallback* callback) override;
 
-// Free decoder memory.
-//
-// Return value                : WEBRTC_VIDEO_CODEC_OK if OK.
-//                                  <0 - Error
+  // Free decoder memory.
+  //
+  // Return value                : WEBRTC_VIDEO_CODEC_OK if OK.
+  //                                  <0 - Error
   int Release() override;
 
-// Reset decoder state and prepare for a new call.
-//
-// Return value         :  WEBRTC_VIDEO_CODEC_OK.
-//                          <0 - Error
+  // Reset decoder state and prepare for a new call.
+  //
+  // Return value         :  WEBRTC_VIDEO_CODEC_OK.
+  //                          <0 - Error
   int Reset() override;
 
  private:
@@ -142,12 +143,12 @@
                                       uint16_t* height);
 
   VideoFrame _decodedImage;
-  int                         _width;
-  int                         _height;
-  bool                        _inited;
-  DecodedImageCallback*       _decodeCompleteCallback;
+  int _width;
+  int _height;
+  bool _inited;
+  DecodedImageCallback* _decodeCompleteCallback;
 };  // class I420Decoder
 
 }  // namespace webrtc
 
-#endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_I420_MAIN_INTERFACE_I420_H_
+#endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_I420_INCLUDE_I420_H_
diff --git a/webrtc/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h b/webrtc/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h
index 69a9375..d727e89 100644
--- a/webrtc/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h
+++ b/webrtc/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h
@@ -8,12 +8,13 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-#ifndef WEBRTC_MODULES_VIDEO_CODING_INCLUDE_MOCK_MOCK_VIDEO_CODEC_INTERFACE_H_
-#define WEBRTC_MODULES_VIDEO_CODING_INCLUDE_MOCK_MOCK_VIDEO_CODEC_INTERFACE_H_
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_MOCK_MOCK_VIDEO_CODEC_INTERFACE_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_MOCK_MOCK_VIDEO_CODEC_INTERFACE_H_
 
-#pragma message("WARNING: video_coding/codecs/interface is DEPRECATED; use video_coding/include")
-
+#pragma message("WARNING: video_coding/codecs/interface is DEPRECATED; "
+    "use video_coding/include")
 #include <string>
+#include <vector>
 
 #include "testing/gmock/include/gmock/gmock.h"
 #include "webrtc/modules/video_coding/include/video_codec_interface.h"
@@ -23,17 +24,19 @@
 
 class MockEncodedImageCallback : public EncodedImageCallback {
  public:
-  MOCK_METHOD3(Encoded, int32_t(const EncodedImage& encodedImage,
-                                const CodecSpecificInfo* codecSpecificInfo,
-                                const RTPFragmentationHeader* fragmentation));
+  MOCK_METHOD3(Encoded,
+               int32_t(const EncodedImage& encodedImage,
+                       const CodecSpecificInfo* codecSpecificInfo,
+                       const RTPFragmentationHeader* fragmentation));
 };
 
 class MockVideoEncoder : public VideoEncoder {
  public:
-  MOCK_CONST_METHOD2(Version, int32_t(int8_t *version, int32_t length));
-  MOCK_METHOD3(InitEncode, int32_t(const VideoCodec* codecSettings,
-                                   int32_t numberOfCores,
-                                   size_t maxPayloadSize));
+  MOCK_CONST_METHOD2(Version, int32_t(int8_t* version, int32_t length));
+  MOCK_METHOD3(InitEncode,
+               int32_t(const VideoCodec* codecSettings,
+                       int32_t numberOfCores,
+                       size_t maxPayloadSize));
   MOCK_METHOD3(Encode,
                int32_t(const VideoFrame& inputImage,
                        const CodecSpecificInfo* codecSpecificInfo,
@@ -49,24 +52,24 @@
 
 class MockDecodedImageCallback : public DecodedImageCallback {
  public:
-  MOCK_METHOD1(Decoded, int32_t(VideoFrame& decodedImage));
-  MOCK_METHOD2(Decoded, int32_t(VideoFrame& decodedImage,
-                                int64_t decode_time_ms));
+  MOCK_METHOD1(Decoded, int32_t(const VideoFrame& decodedImage));
+  MOCK_METHOD2(Decoded,
+               int32_t(const VideoFrame& decodedImage, int64_t decode_time_ms));
   MOCK_METHOD1(ReceivedDecodedReferenceFrame,
                int32_t(const uint64_t pictureId));
-  MOCK_METHOD1(ReceivedDecodedFrame,
-               int32_t(const uint64_t pictureId));
+  MOCK_METHOD1(ReceivedDecodedFrame, int32_t(const uint64_t pictureId));
 };
 
 class MockVideoDecoder : public VideoDecoder {
  public:
-  MOCK_METHOD2(InitDecode, int32_t(const VideoCodec* codecSettings,
-                                   int32_t numberOfCores));
-  MOCK_METHOD5(Decode, int32_t(const EncodedImage& inputImage,
-                               bool missingFrames,
-                               const RTPFragmentationHeader* fragmentation,
-                               const CodecSpecificInfo* codecSpecificInfo,
-                               int64_t renderTimeMs));
+  MOCK_METHOD2(InitDecode,
+               int32_t(const VideoCodec* codecSettings, int32_t numberOfCores));
+  MOCK_METHOD5(Decode,
+               int32_t(const EncodedImage& inputImage,
+                       bool missingFrames,
+                       const RTPFragmentationHeader* fragmentation,
+                       const CodecSpecificInfo* codecSpecificInfo,
+                       int64_t renderTimeMs));
   MOCK_METHOD1(RegisterDecodeCompleteCallback,
                int32_t(DecodedImageCallback* callback));
   MOCK_METHOD0(Release, int32_t());
@@ -76,4 +79,4 @@
 
 }  // namespace webrtc
 
-#endif  // WEBRTC_MODULES_VIDEO_CODING_INCLUDE_MOCK_MOCK_VIDEO_CODEC_INTERFACE_H_
+#endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_MOCK_MOCK_VIDEO_CODEC_INTERFACE_H_
diff --git a/webrtc/modules/video_coding/codecs/interface/video_codec_interface.h b/webrtc/modules/video_coding/codecs/interface/video_codec_interface.h
index a7bf62f..6bcfa90 100644
--- a/webrtc/modules/video_coding/codecs/interface/video_codec_interface.h
+++ b/webrtc/modules/video_coding/codecs/interface/video_codec_interface.h
@@ -8,11 +8,11 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-#ifndef WEBRTC_MODULES_VIDEO_CODING_INCLUDE_VIDEO_CODEC_INTERFACE_H_
-#define WEBRTC_MODULES_VIDEO_CODING_INCLUDE_VIDEO_CODEC_INTERFACE_H_
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_CODEC_INTERFACE_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_CODEC_INTERFACE_H_
 
-#pragma message("WARNING: video_coding/codecs/interface is DEPRECATED; use video_coding/include")
-
+#pragma message("WARNING: video_coding/codecs/interface is DEPRECATED; "
+    "use video_coding/include")
 #include <vector>
 
 #include "webrtc/common_types.h"
@@ -23,10 +23,9 @@
 #include "webrtc/video_encoder.h"
 #include "webrtc/video_frame.h"
 
-namespace webrtc
-{
+namespace webrtc {
 
-class RTPFragmentationHeader; // forward declaration
+class RTPFragmentationHeader;  // forward declaration
 
 // Note: if any pointers are added to this struct, it must be fitted
 // with a copy-constructor. See below.
@@ -92,12 +91,11 @@
 // Note: if any pointers are added to this struct or its sub-structs, it
 // must be fitted with a copy-constructor. This is because it is copied
 // in the copy-constructor of VCMEncodedFrame.
-struct CodecSpecificInfo
-{
-    VideoCodecType   codecType;
-    CodecSpecificInfoUnion codecSpecific;
+struct CodecSpecificInfo {
+  VideoCodecType codecType;
+  CodecSpecificInfoUnion codecSpecific;
 };
 
 }  // namespace webrtc
 
-#endif // WEBRTC_MODULES_VIDEO_CODING_INCLUDE_VIDEO_CODEC_INTERFACE_H_
+#endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_CODEC_INTERFACE_H_
diff --git a/webrtc/modules/video_coding/codecs/interface/video_error_codes.h b/webrtc/modules/video_coding/codecs/interface/video_error_codes.h
index 38924dc..ea8829d 100644
--- a/webrtc/modules/video_coding/codecs/interface/video_error_codes.h
+++ b/webrtc/modules/video_coding/codecs/interface/video_error_codes.h
@@ -8,10 +8,11 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-#ifndef WEBRTC_MODULES_VIDEO_CODING_INCLUDE_VIDEO_ERROR_CODES_H_
-#define WEBRTC_MODULES_VIDEO_CODING_INCLUDE_VIDEO_ERROR_CODES_H_
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_ERROR_CODES_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_ERROR_CODES_H_
 
-#pragma message("WARNING: video_coding/codecs/interface is DEPRECATED; use video_coding/include")
+#pragma message("WARNING: video_coding/codecs/interface is DEPRECATED; "
+    "use video_coding/include")
 
 // NOTE: in sync with video_coding_module_defines.h
 
@@ -31,4 +32,4 @@
 #define WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE -13
 #define WEBRTC_VIDEO_CODEC_TARGET_BITRATE_OVERSHOOT -14
 
-#endif // WEBRTC_MODULES_VIDEO_CODING_INCLUDE_VIDEO_ERROR_CODES_H_
+#endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_ERROR_CODES_H_
diff --git a/webrtc/modules/video_coding/codecs/test/packet_manipulator.cc b/webrtc/modules/video_coding/codecs/test/packet_manipulator.cc
index 36ba0e8..b554b4e 100644
--- a/webrtc/modules/video_coding/codecs/test/packet_manipulator.cc
+++ b/webrtc/modules/video_coding/codecs/test/packet_manipulator.cc
@@ -57,7 +57,7 @@
       active_burst_packets_--;
       nbr_packets_dropped++;
     } else if (RandomUniform() < config_.packet_loss_probability ||
-        packet_loss_has_occurred) {
+               packet_loss_has_occurred) {
       packet_loss_has_occurred = true;
       nbr_packets_dropped++;
       if (config_.packet_loss_mode == kBurst) {
@@ -91,9 +91,9 @@
   // get the same behavior as long as we're using a fixed initial seed.
   critsect_->Enter();
   srand(random_seed_);
-  random_seed_ = rand();
+  random_seed_ = rand();  // NOLINT (rand_r instead of rand)
   critsect_->Leave();
-  return (random_seed_ + 1.0)/(RAND_MAX + 1.0);
+  return (random_seed_ + 1.0) / (RAND_MAX + 1.0);
 }
 
 const char* PacketLossModeToStr(PacketLossMode e) {
@@ -109,4 +109,4 @@
 }
 
 }  // namespace test
-}  // namespace webrtcc
+}  // namespace webrtc
diff --git a/webrtc/modules/video_coding/codecs/test/packet_manipulator.h b/webrtc/modules/video_coding/codecs/test/packet_manipulator.h
index dfad2e9..3334be0 100644
--- a/webrtc/modules/video_coding/codecs/test/packet_manipulator.h
+++ b/webrtc/modules/video_coding/codecs/test/packet_manipulator.h
@@ -36,10 +36,11 @@
 // scenarios caused by network interference.
 struct NetworkingConfig {
   NetworkingConfig()
-  : packet_size_in_bytes(1500), max_payload_size_in_bytes(1440),
-    packet_loss_mode(kUniform), packet_loss_probability(0.0),
-    packet_loss_burst_length(1) {
-  }
+      : packet_size_in_bytes(1500),
+        max_payload_size_in_bytes(1440),
+        packet_loss_mode(kUniform),
+        packet_loss_probability(0.0),
+        packet_loss_burst_length(1) {}
 
   // Packet size in bytes. Default: 1500 bytes.
   size_t packet_size_in_bytes;
@@ -93,9 +94,11 @@
   virtual ~PacketManipulatorImpl();
   int ManipulatePackets(webrtc::EncodedImage* encoded_image) override;
   virtual void InitializeRandomSeed(unsigned int seed);
+
  protected:
   // Returns a uniformly distributed random value between 0.0 and 1.0
   virtual double RandomUniform();
+
  private:
   PacketReader* packet_reader_;
   const NetworkingConfig& config_;
diff --git a/webrtc/modules/video_coding/codecs/test/packet_manipulator_unittest.cc b/webrtc/modules/video_coding/codecs/test/packet_manipulator_unittest.cc
index 2f9d30e..8c3d30d 100644
--- a/webrtc/modules/video_coding/codecs/test/packet_manipulator_unittest.cc
+++ b/webrtc/modules/video_coding/codecs/test/packet_manipulator_unittest.cc
@@ -25,7 +25,7 @@
 const double kAlwaysDropProbability = 1.0;
 const int kBurstLength = 1;
 
-class PacketManipulatorTest: public PacketRelatedTest {
+class PacketManipulatorTest : public PacketRelatedTest {
  protected:
   PacketReader packet_reader_;
   EncodedImage image_;
@@ -50,19 +50,15 @@
 
   virtual ~PacketManipulatorTest() {}
 
-  void SetUp() {
-    PacketRelatedTest::SetUp();
-  }
+  void SetUp() { PacketRelatedTest::SetUp(); }
 
-  void TearDown() {
-    PacketRelatedTest::TearDown();
-  }
+  void TearDown() { PacketRelatedTest::TearDown(); }
 
   void VerifyPacketLoss(int expected_nbr_packets_dropped,
                         int actual_nbr_packets_dropped,
                         size_t expected_packet_data_length,
                         uint8_t* expected_packet_data,
-                        EncodedImage& actual_image) {
+                        const EncodedImage& actual_image) {
     EXPECT_EQ(expected_nbr_packets_dropped, actual_nbr_packets_dropped);
     EXPECT_EQ(expected_packet_data_length, image_._length);
     EXPECT_EQ(0, memcmp(expected_packet_data, actual_image._buffer,
@@ -75,10 +71,10 @@
 }
 
 TEST_F(PacketManipulatorTest, DropNone) {
-  PacketManipulatorImpl manipulator(&packet_reader_,  no_drop_config_, false);
+  PacketManipulatorImpl manipulator(&packet_reader_, no_drop_config_, false);
   int nbr_packets_dropped = manipulator.ManipulatePackets(&image_);
-  VerifyPacketLoss(0, nbr_packets_dropped, kPacketDataLength,
-                   packet_data_, image_);
+  VerifyPacketLoss(0, nbr_packets_dropped, kPacketDataLength, packet_data_,
+                   image_);
 }
 
 TEST_F(PacketManipulatorTest, UniformDropNoneSmallFrame) {
@@ -87,15 +83,14 @@
   PacketManipulatorImpl manipulator(&packet_reader_, no_drop_config_, false);
   int nbr_packets_dropped = manipulator.ManipulatePackets(&image_);
 
-  VerifyPacketLoss(0, nbr_packets_dropped, data_length,
-                     packet_data_, image_);
+  VerifyPacketLoss(0, nbr_packets_dropped, data_length, packet_data_, image_);
 }
 
 TEST_F(PacketManipulatorTest, UniformDropAll) {
   PacketManipulatorImpl manipulator(&packet_reader_, drop_config_, false);
   int nbr_packets_dropped = manipulator.ManipulatePackets(&image_);
-  VerifyPacketLoss(kPacketDataNumberOfPackets, nbr_packets_dropped,
-                   0, packet_data_, image_);
+  VerifyPacketLoss(kPacketDataNumberOfPackets, nbr_packets_dropped, 0,
+                   packet_data_, image_);
 }
 
 // Use our customized test class to make the second packet being lost
diff --git a/webrtc/modules/video_coding/codecs/test/predictive_packet_manipulator.cc b/webrtc/modules/video_coding/codecs/test/predictive_packet_manipulator.cc
index c92cfa4..9eba205 100644
--- a/webrtc/modules/video_coding/codecs/test/predictive_packet_manipulator.cc
+++ b/webrtc/modules/video_coding/codecs/test/predictive_packet_manipulator.cc
@@ -19,13 +19,11 @@
 namespace test {
 
 PredictivePacketManipulator::PredictivePacketManipulator(
-    PacketReader* packet_reader, const NetworkingConfig& config)
-    : PacketManipulatorImpl(packet_reader, config, false) {
-}
+    PacketReader* packet_reader,
+    const NetworkingConfig& config)
+    : PacketManipulatorImpl(packet_reader, config, false) {}
 
-PredictivePacketManipulator::~PredictivePacketManipulator() {
-}
-
+PredictivePacketManipulator::~PredictivePacketManipulator() {}
 
 void PredictivePacketManipulator::AddRandomResult(double result) {
   assert(result >= 0.0 && result <= 1.0);
@@ -33,8 +31,9 @@
 }
 
 double PredictivePacketManipulator::RandomUniform() {
-  if(random_results_.size() == 0u) {
-    fprintf(stderr, "No more stored results, please make sure AddRandomResult()"
+  if (random_results_.size() == 0u) {
+    fprintf(stderr,
+            "No more stored results, please make sure AddRandomResult()"
             "is called same amount of times you're going to invoke the "
             "RandomUniform() function, i.e. once per packet.\n");
     assert(false);
@@ -45,4 +44,4 @@
 }
 
 }  // namespace test
-}  // namespace webrtcc
+}  // namespace webrtc
diff --git a/webrtc/modules/video_coding/codecs/test/predictive_packet_manipulator.h b/webrtc/modules/video_coding/codecs/test/predictive_packet_manipulator.h
index 082712d..45c7848 100644
--- a/webrtc/modules/video_coding/codecs/test/predictive_packet_manipulator.h
+++ b/webrtc/modules/video_coding/codecs/test/predictive_packet_manipulator.h
@@ -31,6 +31,7 @@
   // FIFO queue so they will be returned in the same order they were added.
   // Result parameter must be 0.0 to 1.0.
   void AddRandomResult(double result);
+
  protected:
   // Returns a uniformly distributed random value between 0.0 and 1.0
   double RandomUniform() override;
diff --git a/webrtc/modules/video_coding/codecs/test/stats.cc b/webrtc/modules/video_coding/codecs/test/stats.cc
index f87407d..478b2f4 100644
--- a/webrtc/modules/video_coding/codecs/test/stats.cc
+++ b/webrtc/modules/video_coding/codecs/test/stats.cc
@@ -39,19 +39,19 @@
 Stats::~Stats() {}
 
 bool LessForEncodeTime(const FrameStatistic& s1, const FrameStatistic& s2) {
-    return s1.encode_time_in_us < s2.encode_time_in_us;
+  return s1.encode_time_in_us < s2.encode_time_in_us;
 }
 
 bool LessForDecodeTime(const FrameStatistic& s1, const FrameStatistic& s2) {
-    return s1.decode_time_in_us < s2.decode_time_in_us;
+  return s1.decode_time_in_us < s2.decode_time_in_us;
 }
 
 bool LessForEncodedSize(const FrameStatistic& s1, const FrameStatistic& s2) {
-    return s1.encoded_frame_length_in_bytes < s2.encoded_frame_length_in_bytes;
+  return s1.encoded_frame_length_in_bytes < s2.encoded_frame_length_in_bytes;
 }
 
 bool LessForBitRate(const FrameStatistic& s1, const FrameStatistic& s2) {
-    return s1.bit_rate_in_kbps < s2.bit_rate_in_kbps;
+  return s1.bit_rate_in_kbps < s2.bit_rate_in_kbps;
 }
 
 FrameStatistic& Stats::NewFrame(int frame_number) {
@@ -78,8 +78,7 @@
   size_t nbr_keyframes = 0;
   size_t nbr_nonkeyframes = 0;
 
-  for (FrameStatisticsIterator it = stats_.begin();
-      it != stats_.end(); ++it) {
+  for (FrameStatisticsIterator it = stats_.begin(); it != stats_.end(); ++it) {
     total_encoding_time_in_us += it->encode_time_in_us;
     total_decoding_time_in_us += it->decode_time_in_us;
     total_encoded_frames_lengths += it->encoded_frame_length_in_bytes;
@@ -96,15 +95,13 @@
 
   // ENCODING
   printf("Encoding time:\n");
-  frame = std::min_element(stats_.begin(),
-                      stats_.end(), LessForEncodeTime);
-  printf("  Min     : %7d us (frame %d)\n",
-         frame->encode_time_in_us, frame->frame_number);
+  frame = std::min_element(stats_.begin(), stats_.end(), LessForEncodeTime);
+  printf("  Min     : %7d us (frame %d)\n", frame->encode_time_in_us,
+         frame->frame_number);
 
-  frame = std::max_element(stats_.begin(),
-                      stats_.end(), LessForEncodeTime);
-  printf("  Max     : %7d us (frame %d)\n",
-         frame->encode_time_in_us, frame->frame_number);
+  frame = std::max_element(stats_.begin(), stats_.end(), LessForEncodeTime);
+  printf("  Max     : %7d us (frame %d)\n", frame->encode_time_in_us,
+         frame->frame_number);
 
   printf("  Average : %7d us\n",
          static_cast<int>(total_encoding_time_in_us / stats_.size()));
@@ -115,7 +112,7 @@
   // failures)
   std::vector<FrameStatistic> decoded_frames;
   for (std::vector<FrameStatistic>::iterator it = stats_.begin();
-      it != stats_.end(); ++it) {
+       it != stats_.end(); ++it) {
     if (it->decoding_successful) {
       decoded_frames.push_back(*it);
     }
@@ -123,15 +120,15 @@
   if (decoded_frames.size() == 0) {
     printf("No successfully decoded frames exist in this statistics.\n");
   } else {
-    frame = std::min_element(decoded_frames.begin(),
-                        decoded_frames.end(), LessForDecodeTime);
-    printf("  Min     : %7d us (frame %d)\n",
-           frame->decode_time_in_us, frame->frame_number);
+    frame = std::min_element(decoded_frames.begin(), decoded_frames.end(),
+                             LessForDecodeTime);
+    printf("  Min     : %7d us (frame %d)\n", frame->decode_time_in_us,
+           frame->frame_number);
 
-    frame = std::max_element(decoded_frames.begin(),
-                        decoded_frames.end(), LessForDecodeTime);
-    printf("  Max     : %7d us (frame %d)\n",
-           frame->decode_time_in_us, frame->frame_number);
+    frame = std::max_element(decoded_frames.begin(), decoded_frames.end(),
+                             LessForDecodeTime);
+    printf("  Max     : %7d us (frame %d)\n", frame->decode_time_in_us,
+           frame->frame_number);
 
     printf("  Average : %7d us\n",
            static_cast<int>(total_decoding_time_in_us / decoded_frames.size()));
@@ -141,13 +138,11 @@
 
   // SIZE
   printf("Frame sizes:\n");
-  frame = std::min_element(stats_.begin(),
-                      stats_.end(), LessForEncodedSize);
+  frame = std::min_element(stats_.begin(), stats_.end(), LessForEncodedSize);
   printf("  Min     : %7" PRIuS " bytes (frame %d)\n",
          frame->encoded_frame_length_in_bytes, frame->frame_number);
 
-  frame = std::max_element(stats_.begin(),
-                      stats_.end(), LessForEncodedSize);
+  frame = std::max_element(stats_.begin(), stats_.end(), LessForEncodedSize);
   printf("  Max     : %7" PRIuS " bytes (frame %d)\n",
          frame->encoded_frame_length_in_bytes, frame->frame_number);
 
@@ -167,21 +162,17 @@
 
   // BIT RATE
   printf("Bit rates:\n");
-  frame = std::min_element(stats_.begin(),
-                      stats_.end(), LessForBitRate);
-  printf("  Min bit rate: %7d kbps (frame %d)\n",
-         frame->bit_rate_in_kbps, frame->frame_number);
+  frame = std::min_element(stats_.begin(), stats_.end(), LessForBitRate);
+  printf("  Min bit rate: %7d kbps (frame %d)\n", frame->bit_rate_in_kbps,
+         frame->frame_number);
 
-  frame = std::max_element(stats_.begin(),
-                      stats_.end(), LessForBitRate);
-  printf("  Max bit rate: %7d kbps (frame %d)\n",
-         frame->bit_rate_in_kbps, frame->frame_number);
+  frame = std::max_element(stats_.begin(), stats_.end(), LessForBitRate);
+  printf("  Max bit rate: %7d kbps (frame %d)\n", frame->bit_rate_in_kbps,
+         frame->frame_number);
 
   printf("\n");
-  printf("Total encoding time  : %7d ms.\n",
-         total_encoding_time_in_us / 1000);
-  printf("Total decoding time  : %7d ms.\n",
-         total_decoding_time_in_us / 1000);
+  printf("Total encoding time  : %7d ms.\n", total_encoding_time_in_us / 1000);
+  printf("Total decoding time  : %7d ms.\n", total_decoding_time_in_us / 1000);
   printf("Total processing time: %7d ms.\n",
          (total_encoding_time_in_us + total_decoding_time_in_us) / 1000);
 }
diff --git a/webrtc/modules/video_coding/codecs/test/stats_unittest.cc b/webrtc/modules/video_coding/codecs/test/stats_unittest.cc
index a2d27e7..0403ccf 100644
--- a/webrtc/modules/video_coding/codecs/test/stats_unittest.cc
+++ b/webrtc/modules/video_coding/codecs/test/stats_unittest.cc
@@ -16,21 +16,15 @@
 namespace webrtc {
 namespace test {
 
-class StatsTest: public testing::Test {
+class StatsTest : public testing::Test {
  protected:
-  StatsTest() {
-  }
+  StatsTest() {}
 
-  virtual ~StatsTest() {
-  }
+  virtual ~StatsTest() {}
 
-  void SetUp() {
-    stats_ = new Stats();
-  }
+  void SetUp() { stats_ = new Stats(); }
 
-  void TearDown() {
-    delete stats_;
-  }
+  void TearDown() { delete stats_; }
 
   Stats* stats_;
 };
diff --git a/webrtc/modules/video_coding/codecs/test/videoprocessor.cc b/webrtc/modules/video_coding/codecs/test/videoprocessor.cc
index c814dfe..7376000 100644
--- a/webrtc/modules/video_coding/codecs/test/videoprocessor.cc
+++ b/webrtc/modules/video_coding/codecs/test/videoprocessor.cc
@@ -93,14 +93,18 @@
   int32_t register_result =
       encoder_->RegisterEncodeCompleteCallback(encode_callback_);
   if (register_result != WEBRTC_VIDEO_CODEC_OK) {
-    fprintf(stderr, "Failed to register encode complete callback, return code: "
-        "%d\n", register_result);
+    fprintf(stderr,
+            "Failed to register encode complete callback, return code: "
+            "%d\n",
+            register_result);
     return false;
   }
   register_result = decoder_->RegisterDecodeCompleteCallback(decode_callback_);
   if (register_result != WEBRTC_VIDEO_CODEC_OK) {
-    fprintf(stderr, "Failed to register decode complete callback, return code: "
-            "%d\n", register_result);
+    fprintf(stderr,
+            "Failed to register decode complete callback, return code: "
+            "%d\n",
+            register_result);
     return false;
   }
   // Init the encoder and decoder
@@ -146,13 +150,14 @@
   delete decode_callback_;
 }
 
-
 void VideoProcessorImpl::SetRates(int bit_rate, int frame_rate) {
   int set_rates_result = encoder_->SetRates(bit_rate, frame_rate);
   assert(set_rates_result >= 0);
   if (set_rates_result < 0) {
-    fprintf(stderr, "Failed to update encoder with new rate %d, "
-            "return code: %d\n", bit_rate, set_rates_result);
+    fprintf(stderr,
+            "Failed to update encoder with new rate %d, "
+            "return code: %d\n",
+            bit_rate, set_rates_result);
   }
   num_dropped_frames_ = 0;
   num_spatial_resizes_ = 0;
@@ -175,7 +180,7 @@
 }
 
 bool VideoProcessorImpl::ProcessFrame(int frame_number) {
-  assert(frame_number >=0);
+  assert(frame_number >= 0);
   if (!initialized_) {
     fprintf(stderr, "Attempting to use uninitialized VideoProcessor!\n");
     return false;
@@ -186,10 +191,8 @@
   }
   if (frame_reader_->ReadFrame(source_buffer_)) {
     // Copy the source frame to the newly read frame data.
-    source_frame_.CreateFrame(source_buffer_,
-                              config_.codec_settings->width,
-                              config_.codec_settings->height,
-                              kVideoRotation_0);
+    source_frame_.CreateFrame(source_buffer_, config_.codec_settings->width,
+                              config_.codec_settings->height, kVideoRotation_0);
 
     // Ensure we have a new statistics data object we can fill:
     FrameStatistic& stat = stats_->NewFrame(frame_number);
@@ -224,10 +227,10 @@
 
 void VideoProcessorImpl::FrameEncoded(const EncodedImage& encoded_image) {
   // Timestamp is frame number, so this gives us #dropped frames.
-  int num_dropped_from_prev_encode =  encoded_image._timeStamp -
-      prev_time_stamp_ - 1;
-  num_dropped_frames_ +=  num_dropped_from_prev_encode;
-  prev_time_stamp_ =  encoded_image._timeStamp;
+  int num_dropped_from_prev_encode =
+      encoded_image._timeStamp - prev_time_stamp_ - 1;
+  num_dropped_frames_ += num_dropped_from_prev_encode;
+  prev_time_stamp_ = encoded_image._timeStamp;
   if (num_dropped_from_prev_encode > 0) {
     // For dropped frames, we write out the last decoded frame to avoid getting
     // out of sync for the computation of PSNR and SSIM.
@@ -244,15 +247,16 @@
   TickTime encode_stop = TickTime::Now();
   int frame_number = encoded_image._timeStamp;
   FrameStatistic& stat = stats_->stats_[frame_number];
-  stat.encode_time_in_us = GetElapsedTimeMicroseconds(encode_start_,
-                                                      encode_stop);
+  stat.encode_time_in_us =
+      GetElapsedTimeMicroseconds(encode_start_, encode_stop);
   stat.encoding_successful = true;
   stat.encoded_frame_length_in_bytes = encoded_image._length;
   stat.frame_number = encoded_image._timeStamp;
   stat.frame_type = encoded_image._frameType;
   stat.bit_rate_in_kbps = encoded_image._length * bit_rate_factor_;
-  stat.total_packets = encoded_image._length /
-      config_.networking_config.packet_size_in_bytes + 1;
+  stat.total_packets =
+      encoded_image._length / config_.networking_config.packet_size_in_bytes +
+      1;
 
   // Perform packet loss if criteria is fullfilled:
   bool exclude_this_frame = false;
@@ -280,7 +284,7 @@
   copied_image._buffer = copied_buffer.get();
   if (!exclude_this_frame) {
     stat.packets_dropped =
-          packet_manipulator_->ManipulatePackets(&copied_image);
+        packet_manipulator_->ManipulatePackets(&copied_image);
   }
 
   // Keep track of if frames are lost due to packet loss so we can tell
@@ -305,26 +309,25 @@
   int frame_number = image.timestamp();
   // Report stats
   FrameStatistic& stat = stats_->stats_[frame_number];
-  stat.decode_time_in_us = GetElapsedTimeMicroseconds(decode_start_,
-                                                      decode_stop);
+  stat.decode_time_in_us =
+      GetElapsedTimeMicroseconds(decode_start_, decode_stop);
   stat.decoding_successful = true;
 
   // Check for resize action (either down or up):
   if (static_cast<int>(image.width()) != last_encoder_frame_width_ ||
-      static_cast<int>(image.height()) != last_encoder_frame_height_ ) {
+      static_cast<int>(image.height()) != last_encoder_frame_height_) {
     ++num_spatial_resizes_;
     last_encoder_frame_width_ = image.width();
     last_encoder_frame_height_ = image.height();
   }
   // Check if codec size is different from native/original size, and if so,
   // upsample back to original size: needed for PSNR and SSIM computations.
-  if (image.width() !=  config_.codec_settings->width ||
+  if (image.width() != config_.codec_settings->width ||
       image.height() != config_.codec_settings->height) {
     VideoFrame up_image;
-    int ret_val = scaler_.Set(image.width(), image.height(),
-                              config_.codec_settings->width,
-                              config_.codec_settings->height,
-                              kI420, kI420, kScaleBilinear);
+    int ret_val = scaler_.Set(
+        image.width(), image.height(), config_.codec_settings->width,
+        config_.codec_settings->height, kI420, kI420, kScaleBilinear);
     assert(ret_val >= 0);
     if (ret_val < 0) {
       fprintf(stderr, "Failed to set scalar for frame: %d, return code: %d\n",
@@ -366,7 +369,8 @@
 }
 
 int VideoProcessorImpl::GetElapsedTimeMicroseconds(
-    const webrtc::TickTime& start, const webrtc::TickTime& stop) {
+    const webrtc::TickTime& start,
+    const webrtc::TickTime& stop) {
   uint64_t encode_time = (stop - start).Microseconds();
   assert(encode_time <
          static_cast<unsigned int>(std::numeric_limits<int>::max()));
@@ -404,8 +408,7 @@
 }
 
 // Callbacks
-int32_t
-VideoProcessorImpl::VideoProcessorEncodeCompleteCallback::Encoded(
+int32_t VideoProcessorImpl::VideoProcessorEncodeCompleteCallback::Encoded(
     const EncodedImage& encoded_image,
     const webrtc::CodecSpecificInfo* codec_specific_info,
     const webrtc::RTPFragmentationHeader* fragmentation) {
diff --git a/webrtc/modules/video_coding/codecs/test/videoprocessor.h b/webrtc/modules/video_coding/codecs/test/videoprocessor.h
index 6b2180c..3ee08fd 100644
--- a/webrtc/modules/video_coding/codecs/test/videoprocessor.h
+++ b/webrtc/modules/video_coding/codecs/test/videoprocessor.h
@@ -243,17 +243,16 @@
 
   // Callback class required to implement according to the VideoDecoder API.
   class VideoProcessorDecodeCompleteCallback
-    : public webrtc::DecodedImageCallback {
+      : public webrtc::DecodedImageCallback {
    public:
-      explicit VideoProcessorDecodeCompleteCallback(VideoProcessorImpl* vp)
-      : video_processor_(vp) {
+    explicit VideoProcessorDecodeCompleteCallback(VideoProcessorImpl* vp)
+        : video_processor_(vp) {}
+    int32_t Decoded(webrtc::VideoFrame& image) override;
+    int32_t Decoded(webrtc::VideoFrame& image,
+                    int64_t decode_time_ms) override {
+      RTC_NOTREACHED();
+      return -1;
     }
-      int32_t Decoded(webrtc::VideoFrame& image) override;
-      int32_t Decoded(
-          webrtc::VideoFrame& image, int64_t decode_time_ms) override {
-        RTC_NOTREACHED();
-        return -1;
-      }
 
    private:
     VideoProcessorImpl* video_processor_;
diff --git a/webrtc/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc b/webrtc/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc
index 70fe987..d4e1e6e 100644
--- a/webrtc/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc
+++ b/webrtc/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc
@@ -81,7 +81,6 @@
   int num_key_frames;
 };
 
-
 // Sequence used is foreman (CIF): may be better to use VGA for resize test.
 const int kCIFWidth = 352;
 const int kCIFHeight = 288;
@@ -101,7 +100,7 @@
 // dropping/spatial resize, and temporal layers. The limits for the rate
 // control metrics are set to be fairly conservative, so failure should only
 // happen when some significant regression or breakdown occurs.
-class VideoProcessorIntegrationTest: public testing::Test {
+class VideoProcessorIntegrationTest : public testing::Test {
  protected:
   VideoEncoder* encoder_;
   VideoDecoder* decoder_;
@@ -148,7 +147,6 @@
   bool frame_dropper_on_;
   bool spatial_resize_on_;
 
-
   VideoProcessorIntegrationTest() {}
   virtual ~VideoProcessorIntegrationTest() {}
 
@@ -165,14 +163,13 @@
 
     // CIF is currently used for all tests below.
     // Setup the TestConfig struct for processing of a clip in CIF resolution.
-    config_.input_filename =
-        webrtc::test::ResourcePath("foreman_cif", "yuv");
+    config_.input_filename = webrtc::test::ResourcePath("foreman_cif", "yuv");
 
     // Generate an output filename in a safe way.
     config_.output_filename = webrtc::test::TempFilename(
         webrtc::test::OutputPath(), "videoprocessor_integrationtest");
-    config_.frame_length_in_bytes = CalcBufferSize(kI420,
-                                                   kCIFWidth, kCIFHeight);
+    config_.frame_length_in_bytes =
+        CalcBufferSize(kI420, kCIFWidth, kCIFHeight);
     config_.verbose = false;
     // Only allow encoder/decoder to use single core, for predictability.
     config_.use_single_core = true;
@@ -188,52 +185,46 @@
 
     // These features may be set depending on the test.
     switch (config_.codec_settings->codecType) {
-     case kVideoCodecVP8:
-       config_.codec_settings->codecSpecific.VP8.errorConcealmentOn =
-           error_concealment_on_;
-       config_.codec_settings->codecSpecific.VP8.denoisingOn =
-           denoising_on_;
-       config_.codec_settings->codecSpecific.VP8.numberOfTemporalLayers =
-           num_temporal_layers_;
-       config_.codec_settings->codecSpecific.VP8.frameDroppingOn =
-           frame_dropper_on_;
-       config_.codec_settings->codecSpecific.VP8.automaticResizeOn =
-           spatial_resize_on_;
-       config_.codec_settings->codecSpecific.VP8.keyFrameInterval =
-           kBaseKeyFrameInterval;
-       break;
-     case kVideoCodecVP9:
-       config_.codec_settings->codecSpecific.VP9.denoisingOn =
-           denoising_on_;
-       config_.codec_settings->codecSpecific.VP9.numberOfTemporalLayers =
-           num_temporal_layers_;
-       config_.codec_settings->codecSpecific.VP9.frameDroppingOn =
-           frame_dropper_on_;
-       config_.codec_settings->codecSpecific.VP9.automaticResizeOn =
-           spatial_resize_on_;
-       config_.codec_settings->codecSpecific.VP9.keyFrameInterval =
-           kBaseKeyFrameInterval;
-       break;
-     default:
-       assert(false);
-       break;
-     }
-    frame_reader_ =
-        new webrtc::test::FrameReaderImpl(config_.input_filename,
-                                          config_.frame_length_in_bytes);
-    frame_writer_ =
-        new webrtc::test::FrameWriterImpl(config_.output_filename,
-                                          config_.frame_length_in_bytes);
+      case kVideoCodecVP8:
+        config_.codec_settings->codecSpecific.VP8.errorConcealmentOn =
+            error_concealment_on_;
+        config_.codec_settings->codecSpecific.VP8.denoisingOn = denoising_on_;
+        config_.codec_settings->codecSpecific.VP8.numberOfTemporalLayers =
+            num_temporal_layers_;
+        config_.codec_settings->codecSpecific.VP8.frameDroppingOn =
+            frame_dropper_on_;
+        config_.codec_settings->codecSpecific.VP8.automaticResizeOn =
+            spatial_resize_on_;
+        config_.codec_settings->codecSpecific.VP8.keyFrameInterval =
+            kBaseKeyFrameInterval;
+        break;
+      case kVideoCodecVP9:
+        config_.codec_settings->codecSpecific.VP9.denoisingOn = denoising_on_;
+        config_.codec_settings->codecSpecific.VP9.numberOfTemporalLayers =
+            num_temporal_layers_;
+        config_.codec_settings->codecSpecific.VP9.frameDroppingOn =
+            frame_dropper_on_;
+        config_.codec_settings->codecSpecific.VP9.automaticResizeOn =
+            spatial_resize_on_;
+        config_.codec_settings->codecSpecific.VP9.keyFrameInterval =
+            kBaseKeyFrameInterval;
+        break;
+      default:
+        assert(false);
+        break;
+    }
+    frame_reader_ = new webrtc::test::FrameReaderImpl(
+        config_.input_filename, config_.frame_length_in_bytes);
+    frame_writer_ = new webrtc::test::FrameWriterImpl(
+        config_.output_filename, config_.frame_length_in_bytes);
     ASSERT_TRUE(frame_reader_->Init());
     ASSERT_TRUE(frame_writer_->Init());
 
     packet_manipulator_ = new webrtc::test::PacketManipulatorImpl(
         &packet_reader_, config_.networking_config, config_.verbose);
-    processor_ = new webrtc::test::VideoProcessorImpl(encoder_, decoder_,
-                                                      frame_reader_,
-                                                      frame_writer_,
-                                                      packet_manipulator_,
-                                                      config_, &stats_);
+    processor_ = new webrtc::test::VideoProcessorImpl(
+        encoder_, decoder_, frame_reader_, frame_writer_, packet_manipulator_,
+        config_, &stats_);
     ASSERT_TRUE(processor_->Init());
   }
 
@@ -247,7 +238,7 @@
       encoding_bitrate_[i] = 0.0f;
       // Update layer per-frame-bandwidth.
       per_frame_bandwidth_[i] = static_cast<float>(bit_rate_layer_[i]) /
-             static_cast<float>(frame_rate_layer_[i]);
+                                static_cast<float>(frame_rate_layer_[i]);
     }
     // Set maximum size of key frames, following setting in the VP8 wrapper.
     float max_key_size = kScaleKeyFrameSize * kOptimalBufferSize * frame_rate_;
@@ -274,28 +265,28 @@
     // Update rate mismatch relative to per-frame bandwidth for delta frames.
     if (frame_type == kVideoFrameDelta) {
       // TODO(marpan): Should we count dropped (zero size) frames in mismatch?
-      sum_frame_size_mismatch_[layer_] += fabs(encoded_size_kbits -
-                                               per_frame_bandwidth_[layer_]) /
-                                               per_frame_bandwidth_[layer_];
+      sum_frame_size_mismatch_[layer_] +=
+          fabs(encoded_size_kbits - per_frame_bandwidth_[layer_]) /
+          per_frame_bandwidth_[layer_];
     } else {
-      float target_size = (frame_num == 1) ? target_size_key_frame_initial_ :
-          target_size_key_frame_;
-      sum_key_frame_size_mismatch_ += fabs(encoded_size_kbits - target_size) /
-          target_size;
+      float target_size = (frame_num == 1) ? target_size_key_frame_initial_
+                                           : target_size_key_frame_;
+      sum_key_frame_size_mismatch_ +=
+          fabs(encoded_size_kbits - target_size) / target_size;
       num_key_frames_ += 1;
     }
     sum_encoded_frame_size_[layer_] += encoded_size_kbits;
     // Encoding bitrate per layer: from the start of the update/run to the
     // current frame.
     encoding_bitrate_[layer_] = sum_encoded_frame_size_[layer_] *
-        frame_rate_layer_[layer_] /
-        num_frames_per_update_[layer_];
+                                frame_rate_layer_[layer_] /
+                                num_frames_per_update_[layer_];
     // Total encoding rate: from the start of the update/run to current frame.
     sum_encoded_frame_size_total_ += encoded_size_kbits;
-    encoding_bitrate_total_ = sum_encoded_frame_size_total_ * frame_rate_ /
-        num_frames_total_;
-    perc_encoding_rate_mismatch_ =  100 * fabs(encoding_bitrate_total_ -
-                                               bit_rate_) / bit_rate_;
+    encoding_bitrate_total_ =
+        sum_encoded_frame_size_total_ * frame_rate_ / num_frames_total_;
+    perc_encoding_rate_mismatch_ =
+        100 * fabs(encoding_bitrate_total_ - bit_rate_) / bit_rate_;
     if (perc_encoding_rate_mismatch_ < kPercTargetvsActualMismatch &&
         !encoding_rate_within_target_) {
       num_frames_to_hit_target_ = num_frames_total_;
@@ -314,34 +305,38 @@
                          int num_key_frames) {
     int num_dropped_frames = processor_->NumberDroppedFrames();
     int num_resize_actions = processor_->NumberSpatialResizes();
-    printf("For update #: %d,\n "
+    printf(
+        "For update #: %d,\n "
         " Target Bitrate: %d,\n"
         " Encoding bitrate: %f,\n"
         " Frame rate: %d \n",
         update_index, bit_rate_, encoding_bitrate_total_, frame_rate_);
-    printf(" Number of frames to approach target rate = %d, \n"
-           " Number of dropped frames = %d, \n"
-           " Number of spatial resizes = %d, \n",
-           num_frames_to_hit_target_, num_dropped_frames, num_resize_actions);
+    printf(
+        " Number of frames to approach target rate = %d, \n"
+        " Number of dropped frames = %d, \n"
+        " Number of spatial resizes = %d, \n",
+        num_frames_to_hit_target_, num_dropped_frames, num_resize_actions);
     EXPECT_LE(perc_encoding_rate_mismatch_, max_encoding_rate_mismatch);
     if (num_key_frames_ > 0) {
-      int perc_key_frame_size_mismatch = 100 * sum_key_frame_size_mismatch_ /
-              num_key_frames_;
-      printf(" Number of Key frames: %d \n"
-             " Key frame rate mismatch: %d \n",
-             num_key_frames_, perc_key_frame_size_mismatch);
+      int perc_key_frame_size_mismatch =
+          100 * sum_key_frame_size_mismatch_ / num_key_frames_;
+      printf(
+          " Number of Key frames: %d \n"
+          " Key frame rate mismatch: %d \n",
+          num_key_frames_, perc_key_frame_size_mismatch);
       EXPECT_LE(perc_key_frame_size_mismatch, max_key_frame_size_mismatch);
     }
     printf("\n");
     printf("Rates statistics for Layer data \n");
-    for (int i = 0; i < num_temporal_layers_ ; i++) {
+    for (int i = 0; i < num_temporal_layers_; i++) {
       printf("Layer #%d \n", i);
-      int perc_frame_size_mismatch = 100 * sum_frame_size_mismatch_[i] /
-        num_frames_per_update_[i];
-      int perc_encoding_rate_mismatch = 100 * fabs(encoding_bitrate_[i] -
-                                                   bit_rate_layer_[i]) /
-                                                   bit_rate_layer_[i];
-      printf(" Target Layer Bit rate: %f \n"
+      int perc_frame_size_mismatch =
+          100 * sum_frame_size_mismatch_[i] / num_frames_per_update_[i];
+      int perc_encoding_rate_mismatch =
+          100 * fabs(encoding_bitrate_[i] - bit_rate_layer_[i]) /
+          bit_rate_layer_[i];
+      printf(
+          " Target Layer Bit rate: %f \n"
           " Layer frame rate: %f, \n"
           " Layer per frame bandwidth: %f, \n"
           " Layer Encoding bit rate: %f, \n"
@@ -366,13 +361,13 @@
     if (num_temporal_layers_ == 1) {
       layer_ = 0;
     } else if (num_temporal_layers_ == 2) {
-        // layer 0:  0     2     4 ...
-        // layer 1:     1     3
-        if (frame_number % 2 == 0) {
-          layer_ = 0;
-        } else {
-          layer_ = 1;
-        }
+      // layer 0:  0     2     4 ...
+      // layer 1:     1     3
+      if (frame_number % 2 == 0) {
+        layer_ = 0;
+      } else {
+        layer_ = 1;
+      }
     } else if (num_temporal_layers_ == 3) {
       // layer 0:  0            4            8 ...
       // layer 1:        2            6
@@ -391,20 +386,20 @@
 
   // Set the bitrate and frame rate per layer, for up to 3 layers.
   void SetLayerRates() {
-    assert(num_temporal_layers_<= 3);
+    assert(num_temporal_layers_ <= 3);
     for (int i = 0; i < num_temporal_layers_; i++) {
       float bit_rate_ratio =
           kVp8LayerRateAlloction[num_temporal_layers_ - 1][i];
       if (i > 0) {
-        float bit_rate_delta_ratio = kVp8LayerRateAlloction
-            [num_temporal_layers_ - 1][i] -
+        float bit_rate_delta_ratio =
+            kVp8LayerRateAlloction[num_temporal_layers_ - 1][i] -
             kVp8LayerRateAlloction[num_temporal_layers_ - 1][i - 1];
         bit_rate_layer_[i] = bit_rate_ * bit_rate_delta_ratio;
       } else {
         bit_rate_layer_[i] = bit_rate_ * bit_rate_ratio;
       }
-      frame_rate_layer_[i] = frame_rate_ / static_cast<float>(
-          1 << (num_temporal_layers_ - 1));
+      frame_rate_layer_[i] =
+          frame_rate_ / static_cast<float>(1 << (num_temporal_layers_ - 1));
     }
     if (num_temporal_layers_ == 3) {
       frame_rate_layer_[2] = frame_rate_ / 2.0f;
@@ -437,12 +432,12 @@
     spatial_resize_on_ = process.spatial_resize_on;
     SetUpCodecConfig();
     // Update the layers and the codec with the initial rates.
-    bit_rate_ =  rate_profile.target_bit_rate[0];
+    bit_rate_ = rate_profile.target_bit_rate[0];
     frame_rate_ = rate_profile.input_frame_rate[0];
     SetLayerRates();
     // Set the initial target size for key frame.
-    target_size_key_frame_initial_ = 0.5 * kInitialBufferSize *
-        bit_rate_layer_[0];
+    target_size_key_frame_initial_ =
+        0.5 * kInitialBufferSize * bit_rate_layer_[0];
     processor_->SetRates(bit_rate_, frame_rate_);
     // Process each frame, up to |num_frames|.
     int num_frames = rate_profile.num_frames;
@@ -452,7 +447,7 @@
     int frame_number = 0;
     FrameType frame_type = kVideoFrameDelta;
     while (processor_->ProcessFrame(frame_number) &&
-        frame_number < num_frames) {
+           frame_number < num_frames) {
       // Get the layer index for the frame |frame_number|.
       LayerIndexForFrame(frame_number);
       // Get the frame_type.
@@ -468,8 +463,7 @@
       if (frame_number ==
           rate_profile.frame_index_rate_update[update_index + 1]) {
         VerifyRateControl(
-            update_index,
-            rc_metrics[update_index].max_key_frame_size_mismatch,
+            update_index, rc_metrics[update_index].max_key_frame_size_mismatch,
             rc_metrics[update_index].max_delta_frame_size_mismatch,
             rc_metrics[update_index].max_encoding_rate_mismatch,
             rc_metrics[update_index].max_time_hit_target,
@@ -478,23 +472,22 @@
             rc_metrics[update_index].num_key_frames);
         // Update layer rates and the codec with new rates.
         ++update_index;
-        bit_rate_ =  rate_profile.target_bit_rate[update_index];
+        bit_rate_ = rate_profile.target_bit_rate[update_index];
         frame_rate_ = rate_profile.input_frame_rate[update_index];
         SetLayerRates();
-        ResetRateControlMetrics(rate_profile.
-                                frame_index_rate_update[update_index + 1]);
+        ResetRateControlMetrics(
+            rate_profile.frame_index_rate_update[update_index + 1]);
         processor_->SetRates(bit_rate_, frame_rate_);
       }
     }
-    VerifyRateControl(
-        update_index,
-        rc_metrics[update_index].max_key_frame_size_mismatch,
-        rc_metrics[update_index].max_delta_frame_size_mismatch,
-        rc_metrics[update_index].max_encoding_rate_mismatch,
-        rc_metrics[update_index].max_time_hit_target,
-        rc_metrics[update_index].max_num_dropped_frames,
-        rc_metrics[update_index].num_spatial_resizes,
-        rc_metrics[update_index].num_key_frames);
+    VerifyRateControl(update_index,
+                      rc_metrics[update_index].max_key_frame_size_mismatch,
+                      rc_metrics[update_index].max_delta_frame_size_mismatch,
+                      rc_metrics[update_index].max_encoding_rate_mismatch,
+                      rc_metrics[update_index].max_time_hit_target,
+                      rc_metrics[update_index].max_num_dropped_frames,
+                      rc_metrics[update_index].num_spatial_resizes,
+                      rc_metrics[update_index].num_key_frames);
     EXPECT_EQ(num_frames, frame_number);
     EXPECT_EQ(num_frames + 1, static_cast<int>(stats_.stats_.size()));
 
@@ -507,16 +500,14 @@
 
     // TODO(marpan): should compute these quality metrics per SetRates update.
     webrtc::test::QualityMetricsResult psnr_result, ssim_result;
-    EXPECT_EQ(0, webrtc::test::I420MetricsFromFiles(
-        config_.input_filename.c_str(),
-        config_.output_filename.c_str(),
-        config_.codec_settings->width,
-        config_.codec_settings->height,
-        &psnr_result,
-        &ssim_result));
+    EXPECT_EQ(
+        0, webrtc::test::I420MetricsFromFiles(
+               config_.input_filename.c_str(), config_.output_filename.c_str(),
+               config_.codec_settings->width, config_.codec_settings->height,
+               &psnr_result, &ssim_result));
     printf("PSNR avg: %f, min: %f    SSIM avg: %f, min: %f\n",
-           psnr_result.average, psnr_result.min,
-           ssim_result.average, ssim_result.min);
+           psnr_result.average, psnr_result.min, ssim_result.average,
+           ssim_result.min);
     stats_.PrintSummary();
     EXPECT_GT(psnr_result.average, quality_metrics.minimum_avg_psnr);
     EXPECT_GT(psnr_result.min, quality_metrics.minimum_min_psnr);
@@ -549,7 +540,7 @@
                         bool spatial_resize_on) {
   process_settings->codec_type = codec_type;
   process_settings->packet_loss = packet_loss;
-  process_settings->key_frame_interval =  key_frame_interval;
+  process_settings->key_frame_interval = key_frame_interval;
   process_settings->num_temporal_layers = num_temporal_layers,
   process_settings->error_concealment_on = error_concealment_on;
   process_settings->denoising_on = denoising_on;
@@ -608,9 +599,7 @@
   // Metrics for rate control.
   RateControlMetrics rc_metrics[1];
   SetRateControlMetrics(rc_metrics, 0, 0, 40, 20, 10, 20, 0, 1);
-  ProcessFramesAndVerify(quality_metrics,
-                         rate_profile,
-                         process_settings,
+  ProcessFramesAndVerify(quality_metrics, rate_profile, process_settings,
                          rc_metrics);
 }
 
@@ -632,13 +621,10 @@
   // Metrics for rate control.
   RateControlMetrics rc_metrics[1];
   SetRateControlMetrics(rc_metrics, 0, 0, 40, 20, 10, 20, 0, 1);
-  ProcessFramesAndVerify(quality_metrics,
-                         rate_profile,
-                         process_settings,
+  ProcessFramesAndVerify(quality_metrics, rate_profile, process_settings,
                          rc_metrics);
 }
 
-
 // VP9: Run with no packet loss, with varying bitrate (3 rate updates):
 // low to high to medium. Check that quality and encoder response to the new
 // target rate/per-frame bandwidth (for each rate update) is within limits.
@@ -663,9 +649,7 @@
   SetRateControlMetrics(rc_metrics, 0, 0, 30, 20, 20, 30, 0, 1);
   SetRateControlMetrics(rc_metrics, 1, 2, 0, 20, 20, 60, 0, 0);
   SetRateControlMetrics(rc_metrics, 2, 0, 0, 25, 20, 40, 0, 0);
-  ProcessFramesAndVerify(quality_metrics,
-                         rate_profile,
-                         process_settings,
+  ProcessFramesAndVerify(quality_metrics, rate_profile, process_settings,
                          rc_metrics);
 }
 
@@ -698,9 +682,7 @@
   SetRateControlMetrics(rc_metrics, 0, 35, 50, 75, 15, 45, 0, 1);
   SetRateControlMetrics(rc_metrics, 1, 10, 0, 40, 10, 30, 0, 0);
   SetRateControlMetrics(rc_metrics, 2, 5, 0, 30, 5, 20, 0, 0);
-  ProcessFramesAndVerify(quality_metrics,
-                         rate_profile,
-                         process_settings,
+  ProcessFramesAndVerify(quality_metrics, rate_profile, process_settings,
                          rc_metrics);
 }
 
@@ -721,9 +703,7 @@
   // Metrics for rate control.
   RateControlMetrics rc_metrics[1];
   SetRateControlMetrics(rc_metrics, 0, 0, 40, 20, 10, 20, 0, 1);
-  ProcessFramesAndVerify(quality_metrics,
-                         rate_profile,
-                         process_settings,
+  ProcessFramesAndVerify(quality_metrics, rate_profile, process_settings,
                          rc_metrics);
 }
 
@@ -739,17 +719,15 @@
   rate_profile.num_frames = kNbrFramesLong;
   // Codec/network settings.
   CodecConfigPars process_settings;
-  SetCodecParameters(&process_settings, kVideoCodecVP9, 0.0f, -1,
-                     1, false, false, true, true);
+  SetCodecParameters(&process_settings, kVideoCodecVP9, 0.0f, -1, 1, false,
+                     false, true, true);
   // Metrics for expected quality.
   QualityMetrics quality_metrics;
   SetQualityMetrics(&quality_metrics, 25.0, 13.0, 0.70, 0.37);
   // Metrics for rate control.
   RateControlMetrics rc_metrics[1];
   SetRateControlMetrics(rc_metrics, 0, 225, 70, 160, 15, 80, 1, 1);
-  ProcessFramesAndVerify(quality_metrics,
-                         rate_profile,
-                         process_settings,
+  ProcessFramesAndVerify(quality_metrics, rate_profile, process_settings,
                          rc_metrics);
 }
 
@@ -775,9 +753,7 @@
   // Metrics for rate control.
   RateControlMetrics rc_metrics[1];
   SetRateControlMetrics(rc_metrics, 0, 0, 40, 20, 10, 15, 0, 1);
-  ProcessFramesAndVerify(quality_metrics,
-                         rate_profile,
-                         process_settings,
+  ProcessFramesAndVerify(quality_metrics, rate_profile, process_settings,
                          rc_metrics);
 }
 
@@ -799,9 +775,7 @@
   // Metrics for rate control.
   RateControlMetrics rc_metrics[1];
   SetRateControlMetrics(rc_metrics, 0, 0, 40, 20, 10, 15, 0, 1);
-  ProcessFramesAndVerify(quality_metrics,
-                         rate_profile,
-                         process_settings,
+  ProcessFramesAndVerify(quality_metrics, rate_profile, process_settings,
                          rc_metrics);
 }
 
@@ -823,9 +797,7 @@
   // Metrics for rate control.
   RateControlMetrics rc_metrics[1];
   SetRateControlMetrics(rc_metrics, 0, 0, 40, 20, 10, 15, 0, 1);
-  ProcessFramesAndVerify(quality_metrics,
-                         rate_profile,
-                         process_settings,
+  ProcessFramesAndVerify(quality_metrics, rate_profile, process_settings,
                          rc_metrics);
 }
 
@@ -863,9 +835,7 @@
   SetRateControlMetrics(rc_metrics, 0, 0, 45, 20, 10, 15, 0, 1);
   SetRateControlMetrics(rc_metrics, 1, 0, 0, 25, 20, 10, 0, 0);
   SetRateControlMetrics(rc_metrics, 2, 0, 0, 25, 15, 10, 0, 0);
-  ProcessFramesAndVerify(quality_metrics,
-                         rate_profile,
-                         process_settings,
+  ProcessFramesAndVerify(quality_metrics, rate_profile, process_settings,
                          rc_metrics);
 }
 
@@ -898,9 +868,7 @@
   SetRateControlMetrics(rc_metrics, 0, 40, 20, 75, 15, 60, 0, 1);
   SetRateControlMetrics(rc_metrics, 1, 10, 0, 25, 10, 35, 0, 0);
   SetRateControlMetrics(rc_metrics, 2, 0, 0, 20, 10, 15, 0, 0);
-  ProcessFramesAndVerify(quality_metrics,
-                         rate_profile,
-                         process_settings,
+  ProcessFramesAndVerify(quality_metrics, rate_profile, process_settings,
                          rc_metrics);
 }
 
@@ -916,17 +884,15 @@
   rate_profile.num_frames = kNbrFramesLong;
   // Codec/network settings.
   CodecConfigPars process_settings;
-  SetCodecParameters(&process_settings, kVideoCodecVP8, 0.0f, -1,
-                     1, false, true, true, true);
+  SetCodecParameters(&process_settings, kVideoCodecVP8, 0.0f, -1, 1, false,
+                     true, true, true);
   // Metrics for expected quality.
   QualityMetrics quality_metrics;
   SetQualityMetrics(&quality_metrics, 25.0, 15.0, 0.70, 0.40);
   // Metrics for rate control.
   RateControlMetrics rc_metrics[1];
   SetRateControlMetrics(rc_metrics, 0, 160, 60, 120, 20, 70, 1, 2);
-  ProcessFramesAndVerify(quality_metrics,
-                         rate_profile,
-                         process_settings,
+  ProcessFramesAndVerify(quality_metrics, rate_profile, process_settings,
                          rc_metrics);
 }
 
@@ -955,9 +921,7 @@
   RateControlMetrics rc_metrics[2];
   SetRateControlMetrics(rc_metrics, 0, 0, 20, 30, 10, 10, 0, 1);
   SetRateControlMetrics(rc_metrics, 1, 0, 0, 30, 15, 10, 0, 0);
-  ProcessFramesAndVerify(quality_metrics,
-                         rate_profile,
-                         process_settings,
+  ProcessFramesAndVerify(quality_metrics, rate_profile, process_settings,
                          rc_metrics);
 }
 }  // namespace webrtc
diff --git a/webrtc/modules/video_coding/codecs/test/videoprocessor_unittest.cc b/webrtc/modules/video_coding/codecs/test/videoprocessor_unittest.cc
index 7dec809..148d8dc 100644
--- a/webrtc/modules/video_coding/codecs/test/videoprocessor_unittest.cc
+++ b/webrtc/modules/video_coding/codecs/test/videoprocessor_unittest.cc
@@ -29,7 +29,7 @@
 
 // Very basic testing for VideoProcessor. It's mostly tested by running the
 // video_quality_measurement program.
-class VideoProcessorTest: public testing::Test {
+class VideoProcessorTest : public testing::Test {
  protected:
   MockVideoEncoder encoder_mock_;
   MockVideoDecoder decoder_mock_;
@@ -53,44 +53,34 @@
   void TearDown() {}
 
   void ExpectInit() {
-    EXPECT_CALL(encoder_mock_, InitEncode(_, _, _))
-      .Times(1);
+    EXPECT_CALL(encoder_mock_, InitEncode(_, _, _)).Times(1);
     EXPECT_CALL(encoder_mock_, RegisterEncodeCompleteCallback(_))
-      .Times(AtLeast(1));
-    EXPECT_CALL(decoder_mock_, InitDecode(_, _))
-      .Times(1);
+        .Times(AtLeast(1));
+    EXPECT_CALL(decoder_mock_, InitDecode(_, _)).Times(1);
     EXPECT_CALL(decoder_mock_, RegisterDecodeCompleteCallback(_))
-      .Times(AtLeast(1));
-    EXPECT_CALL(frame_reader_mock_, NumberOfFrames())
-      .WillOnce(Return(1));
-    EXPECT_CALL(frame_reader_mock_, FrameLength())
-      .WillOnce(Return(152064));
+        .Times(AtLeast(1));
+    EXPECT_CALL(frame_reader_mock_, NumberOfFrames()).WillOnce(Return(1));
+    EXPECT_CALL(frame_reader_mock_, FrameLength()).WillOnce(Return(152064));
   }
 };
 
 TEST_F(VideoProcessorTest, Init) {
   ExpectInit();
-  VideoProcessorImpl video_processor(&encoder_mock_, &decoder_mock_,
-                                     &frame_reader_mock_,
-                                     &frame_writer_mock_,
-                                     &packet_manipulator_mock_, config_,
-                                     &stats_);
+  VideoProcessorImpl video_processor(
+      &encoder_mock_, &decoder_mock_, &frame_reader_mock_, &frame_writer_mock_,
+      &packet_manipulator_mock_, config_, &stats_);
   ASSERT_TRUE(video_processor.Init());
 }
 
 TEST_F(VideoProcessorTest, ProcessFrame) {
   ExpectInit();
-  EXPECT_CALL(encoder_mock_, Encode(_, _, _))
-    .Times(1);
-  EXPECT_CALL(frame_reader_mock_, ReadFrame(_))
-    .WillOnce(Return(true));
+  EXPECT_CALL(encoder_mock_, Encode(_, _, _)).Times(1);
+  EXPECT_CALL(frame_reader_mock_, ReadFrame(_)).WillOnce(Return(true));
   // Since we don't return any callback from the mock, the decoder will not
   // be more than initialized...
-  VideoProcessorImpl video_processor(&encoder_mock_, &decoder_mock_,
-                                     &frame_reader_mock_,
-                                     &frame_writer_mock_,
-                                     &packet_manipulator_mock_, config_,
-                                     &stats_);
+  VideoProcessorImpl video_processor(
+      &encoder_mock_, &decoder_mock_, &frame_reader_mock_, &frame_writer_mock_,
+      &packet_manipulator_mock_, config_, &stats_);
   ASSERT_TRUE(video_processor.Init());
   video_processor.ProcessFrame(0);
 }
diff --git a/webrtc/modules/video_coding/codecs/tools/video_quality_measurement.cc b/webrtc/modules/video_coding/codecs/tools/video_quality_measurement.cc
index 8bc6e5f..37fad48 100644
--- a/webrtc/modules/video_coding/codecs/tools/video_quality_measurement.cc
+++ b/webrtc/modules/video_coding/codecs/tools/video_quality_measurement.cc
@@ -16,7 +16,7 @@
 #include <sys/stat.h>  // To check for directory existence.
 
 #ifndef S_ISDIR  // Not defined in stat.h on Windows.
-#define S_ISDIR(mode) (((mode) & S_IFMT) == S_IFDIR)
+#define S_ISDIR(mode) (((mode)&S_IFMT) == S_IFDIR)
 #endif
 
 #include "gflags/gflags.h"
@@ -34,68 +34,102 @@
 #include "webrtc/test/testsupport/packet_reader.h"
 
 DEFINE_string(test_name, "Quality test", "The name of the test to run. ");
-DEFINE_string(test_description, "", "A more detailed description about what "
+DEFINE_string(test_description,
+              "",
+              "A more detailed description about what "
               "the current test is about.");
-DEFINE_string(input_filename, "", "Input file. "
+DEFINE_string(input_filename,
+              "",
+              "Input file. "
               "The source video file to be encoded and decoded. Must be in "
               ".yuv format");
 DEFINE_int32(width, -1, "Width in pixels of the frames in the input file.");
 DEFINE_int32(height, -1, "Height in pixels of the frames in the input file.");
-DEFINE_int32(framerate, 30, "Frame rate of the input file, in FPS "
+DEFINE_int32(framerate,
+             30,
+             "Frame rate of the input file, in FPS "
              "(frames-per-second). ");
-DEFINE_string(output_dir, ".", "Output directory. "
+DEFINE_string(output_dir,
+              ".",
+              "Output directory. "
               "The directory where the output file will be put. Must already "
               "exist.");
-DEFINE_bool(use_single_core, false, "Force using a single core. If set to "
+DEFINE_bool(use_single_core,
+            false,
+            "Force using a single core. If set to "
             "true, only one core will be used for processing. Using a single "
             "core is necessary to get a deterministic behavior for the"
             "encoded frames - using multiple cores will produce different "
             "encoded frames since multiple cores are competing to consume the "
             "byte budget for each frame in parallel. If set to false, "
             "the maximum detected number of cores will be used. ");
-DEFINE_bool(disable_fixed_random_seed , false, "Set this flag to disable the"
+DEFINE_bool(disable_fixed_random_seed,
+            false,
+            "Set this flag to disable the"
             "usage of a fixed random seed for the random generator used "
             "for packet loss. Disabling this will cause consecutive runs "
             "loose packets at different locations, which is bad for "
             "reproducibility.");
-DEFINE_string(output_filename, "", "Output file. "
+DEFINE_string(output_filename,
+              "",
+              "Output file. "
               "The name of the output video file resulting of the processing "
               "of the source file. By default this is the same name as the "
               "input file with '_out' appended before the extension.");
 DEFINE_int32(bitrate, 500, "Bit rate in kilobits/second.");
-DEFINE_int32(keyframe_interval, 0, "Forces a keyframe every Nth frame. "
+DEFINE_int32(keyframe_interval,
+             0,
+             "Forces a keyframe every Nth frame. "
              "0 means the encoder decides when to insert keyframes.  Note that "
              "the encoder may create a keyframe in other locations in addition "
              "to the interval that is set using this parameter.");
-DEFINE_int32(temporal_layers, 0, "The number of temporal layers to use "
+DEFINE_int32(temporal_layers,
+             0,
+             "The number of temporal layers to use "
              "(VP8 specific codec setting). Must be 0-4.");
-DEFINE_int32(packet_size, 1500, "Simulated network packet size in bytes (MTU). "
+DEFINE_int32(packet_size,
+             1500,
+             "Simulated network packet size in bytes (MTU). "
              "Used for packet loss simulation.");
-DEFINE_int32(max_payload_size, 1440, "Max payload size in bytes for the "
+DEFINE_int32(max_payload_size,
+             1440,
+             "Max payload size in bytes for the "
              "encoder.");
-DEFINE_string(packet_loss_mode, "uniform", "Packet loss mode. Two different "
+DEFINE_string(packet_loss_mode,
+              "uniform",
+              "Packet loss mode. Two different "
               "packet loss models are supported: uniform or burst. This "
               "setting has no effect unless packet_loss_rate is >0. ");
-DEFINE_double(packet_loss_probability, 0.0, "Packet loss probability. A value "
+DEFINE_double(packet_loss_probability,
+              0.0,
+              "Packet loss probability. A value "
               "between 0.0 and 1.0 that defines the probability of a packet "
               "being lost. 0.1 means 10% and so on.");
-DEFINE_int32(packet_loss_burst_length, 1, "Packet loss burst length. Defines "
+DEFINE_int32(packet_loss_burst_length,
+             1,
+             "Packet loss burst length. Defines "
              "how many packets will be lost in a burst when a packet has been "
              "decided to be lost. Must be >=1.");
-DEFINE_bool(csv, false, "CSV output. Enabling this will output all frame "
+DEFINE_bool(csv,
+            false,
+            "CSV output. Enabling this will output all frame "
             "statistics at the end of execution. Recommended to run combined "
             "with --noverbose to avoid mixing output.");
-DEFINE_bool(python, false, "Python output. Enabling this will output all frame "
+DEFINE_bool(python,
+            false,
+            "Python output. Enabling this will output all frame "
             "statistics as a Python script at the end of execution. "
             "Recommended to run combine with --noverbose to avoid mixing "
             "output.");
-DEFINE_bool(verbose, true, "Verbose mode. Prints a lot of debugging info. "
+DEFINE_bool(verbose,
+            true,
+            "Verbose mode. Prints a lot of debugging info. "
             "Suitable for tracking progress but not for capturing output. "
             "Disable with --noverbose flag.");
 
 // Custom log method that only prints if the verbose flag is given.
 // Supports all the standard printf parameters and formatting (just forwarded).
-int Log(const char *format, ...) {
+int Log(const char* format, ...) {
   int result = 0;
   if (FLAGS_verbose) {
     va_list args;
@@ -132,9 +166,9 @@
   // Verify the output dir exists.
   struct stat dir_info;
   if (!(stat(FLAGS_output_dir.c_str(), &dir_info) == 0 &&
-      S_ISDIR(dir_info.st_mode))) {
+        S_ISDIR(dir_info.st_mode))) {
     fprintf(stderr, "Cannot find output directory: %s\n",
-              FLAGS_output_dir.c_str());
+            FLAGS_output_dir.c_str());
     return 3;
   }
   config->output_dir = FLAGS_output_dir;
@@ -148,16 +182,16 @@
       startIndex = 0;
     }
     FLAGS_output_filename =
-        FLAGS_input_filename.substr(startIndex,
-                                    FLAGS_input_filename.find_last_of(".")
-                                    - startIndex) + "_out.yuv";
+        FLAGS_input_filename.substr(
+            startIndex, FLAGS_input_filename.find_last_of(".") - startIndex) +
+        "_out.yuv";
   }
 
   // Verify output file can be written.
   if (FLAGS_output_dir == ".") {
     config->output_filename = FLAGS_output_filename;
   } else {
-    config->output_filename = FLAGS_output_dir + "/"+ FLAGS_output_filename;
+    config->output_filename = FLAGS_output_dir + "/" + FLAGS_output_filename;
   }
   test_file = fopen(config->output_filename.c_str(), "wb");
   if (test_file == NULL) {
@@ -232,27 +266,32 @@
   // Check packet loss settings
   if (FLAGS_packet_loss_mode != "uniform" &&
       FLAGS_packet_loss_mode != "burst") {
-    fprintf(stderr, "Unsupported packet loss mode, must be 'uniform' or "
+    fprintf(stderr,
+            "Unsupported packet loss mode, must be 'uniform' or "
             "'burst'\n.");
     return 10;
   }
   config->networking_config.packet_loss_mode = webrtc::test::kUniform;
   if (FLAGS_packet_loss_mode == "burst") {
-    config->networking_config.packet_loss_mode =  webrtc::test::kBurst;
+    config->networking_config.packet_loss_mode = webrtc::test::kBurst;
   }
 
   if (FLAGS_packet_loss_probability < 0.0 ||
       FLAGS_packet_loss_probability > 1.0) {
-    fprintf(stderr, "Invalid packet loss probability. Must be 0.0 - 1.0, "
-            "was: %f\n", FLAGS_packet_loss_probability);
+    fprintf(stderr,
+            "Invalid packet loss probability. Must be 0.0 - 1.0, "
+            "was: %f\n",
+            FLAGS_packet_loss_probability);
     return 11;
   }
   config->networking_config.packet_loss_probability =
       FLAGS_packet_loss_probability;
 
   if (FLAGS_packet_loss_burst_length < 1) {
-    fprintf(stderr, "Invalid packet loss burst length, must be >=1, "
-            "was: %d\n", FLAGS_packet_loss_burst_length);
+    fprintf(stderr,
+            "Invalid packet loss burst length, must be >=1, "
+            "was: %d\n",
+            FLAGS_packet_loss_burst_length);
     return 12;
   }
   config->networking_config.packet_loss_burst_length =
@@ -264,10 +303,9 @@
 void CalculateSsimVideoMetrics(webrtc::test::TestConfig* config,
                                webrtc::test::QualityMetricsResult* result) {
   Log("Calculating SSIM...\n");
-  I420SSIMFromFiles(config->input_filename.c_str(),
-                    config->output_filename.c_str(),
-                    config->codec_settings->width,
-                    config->codec_settings->height, result);
+  I420SSIMFromFiles(
+      config->input_filename.c_str(), config->output_filename.c_str(),
+      config->codec_settings->width, config->codec_settings->height, result);
   Log("  Average: %3.2f\n", result->average);
   Log("  Min    : %3.2f (frame %d)\n", result->min, result->min_frame_number);
   Log("  Max    : %3.2f (frame %d)\n", result->max, result->max_frame_number);
@@ -276,10 +314,9 @@
 void CalculatePsnrVideoMetrics(webrtc::test::TestConfig* config,
                                webrtc::test::QualityMetricsResult* result) {
   Log("Calculating PSNR...\n");
-  I420PSNRFromFiles(config->input_filename.c_str(),
-                    config->output_filename.c_str(),
-                    config->codec_settings->width,
-                    config->codec_settings->height, result);
+  I420PSNRFromFiles(
+      config->input_filename.c_str(), config->output_filename.c_str(),
+      config->codec_settings->width, config->codec_settings->height, result);
   Log("  Average: %3.2f\n", result->average);
   Log("  Min    : %3.2f (frame %d)\n", result->min, result->min_frame_number);
   Log("  Max    : %3.2f (frame %d)\n", result->max, result->max_frame_number);
@@ -309,9 +346,11 @@
 void PrintCsvOutput(const webrtc::test::Stats& stats,
                     const webrtc::test::QualityMetricsResult& ssim_result,
                     const webrtc::test::QualityMetricsResult& psnr_result) {
-  Log("\nCSV output (recommended to run with --noverbose to skip the "
-              "above output)\n");
-  printf("frame_number encoding_successful decoding_successful "
+  Log(
+      "\nCSV output (recommended to run with --noverbose to skip the "
+      "above output)\n");
+  printf(
+      "frame_number encoding_successful decoding_successful "
       "encode_return_code decode_return_code "
       "encode_time_in_us decode_time_in_us "
       "bit_rate_in_kbps encoded_frame_length_in_bytes frame_type "
@@ -322,22 +361,13 @@
     const webrtc::test::FrameStatistic& f = stats.stats_[i];
     const webrtc::test::FrameResult& ssim = ssim_result.frames[i];
     const webrtc::test::FrameResult& psnr = psnr_result.frames[i];
-    printf("%4d, %d, %d, %2d, %2d, %6d, %6d, %5d, %7" PRIuS ", %d, %2d, %2"
-           PRIuS ", %5.3f, %5.2f\n",
-           f.frame_number,
-           f.encoding_successful,
-           f.decoding_successful,
-           f.encode_return_code,
-           f.decode_return_code,
-           f.encode_time_in_us,
-           f.decode_time_in_us,
-           f.bit_rate_in_kbps,
-           f.encoded_frame_length_in_bytes,
-           f.frame_type,
-           f.packets_dropped,
-           f.total_packets,
-           ssim.value,
-           psnr.value);
+    printf("%4d, %d, %d, %2d, %2d, %6d, %6d, %5d, %7" PRIuS
+           ", %d, %2d, %2" PRIuS ", %5.3f, %5.2f\n",
+           f.frame_number, f.encoding_successful, f.decoding_successful,
+           f.encode_return_code, f.decode_return_code, f.encode_time_in_us,
+           f.decode_time_in_us, f.bit_rate_in_kbps,
+           f.encoded_frame_length_in_bytes, f.frame_type, f.packets_dropped,
+           f.total_packets, ssim.value, psnr.value);
   }
 }
 
@@ -345,91 +375,85 @@
                        const webrtc::test::Stats& stats,
                        const webrtc::test::QualityMetricsResult& ssim_result,
                        const webrtc::test::QualityMetricsResult& psnr_result) {
-  Log("\nPython output (recommended to run with --noverbose to skip the "
-               "above output)\n");
-  printf("test_configuration = ["
-         "{'name': 'name',                      'value': '%s'},\n"
-         "{'name': 'description',               'value': '%s'},\n"
-         "{'name': 'test_number',               'value': '%d'},\n"
-         "{'name': 'input_filename',            'value': '%s'},\n"
-         "{'name': 'output_filename',           'value': '%s'},\n"
-         "{'name': 'output_dir',                'value': '%s'},\n"
-         "{'name': 'packet_size_in_bytes',      'value': '%" PRIuS "'},\n"
-         "{'name': 'max_payload_size_in_bytes', 'value': '%" PRIuS "'},\n"
-         "{'name': 'packet_loss_mode',          'value': '%s'},\n"
-         "{'name': 'packet_loss_probability',   'value': '%f'},\n"
-         "{'name': 'packet_loss_burst_length',  'value': '%d'},\n"
-         "{'name': 'exclude_frame_types',       'value': '%s'},\n"
-         "{'name': 'frame_length_in_bytes',     'value': '%" PRIuS "'},\n"
-         "{'name': 'use_single_core',           'value': '%s'},\n"
-         "{'name': 'keyframe_interval;',        'value': '%d'},\n"
-         "{'name': 'video_codec_type',          'value': '%s'},\n"
-         "{'name': 'width',                     'value': '%d'},\n"
-         "{'name': 'height',                    'value': '%d'},\n"
-         "{'name': 'bit_rate_in_kbps',          'value': '%d'},\n"
-         "]\n",
-         config.name.c_str(),
-         config.description.c_str(),
-         config.test_number,
-         config.input_filename.c_str(),
-         config.output_filename.c_str(),
-         config.output_dir.c_str(),
-         config.networking_config.packet_size_in_bytes,
-         config.networking_config.max_payload_size_in_bytes,
-         PacketLossModeToStr(config.networking_config.packet_loss_mode),
-         config.networking_config.packet_loss_probability,
-         config.networking_config.packet_loss_burst_length,
-         ExcludeFrameTypesToStr(config.exclude_frame_types),
-         config.frame_length_in_bytes,
-         config.use_single_core ? "True " : "False",
-         config.keyframe_interval,
-         webrtc::test::VideoCodecTypeToStr(config.codec_settings->codecType),
-         config.codec_settings->width,
-         config.codec_settings->height,
-         config.codec_settings->startBitrate);
-  printf("frame_data_types = {"
-         "'frame_number': ('number', 'Frame number'),\n"
-         "'encoding_successful': ('boolean', 'Encoding successful?'),\n"
-         "'decoding_successful': ('boolean', 'Decoding successful?'),\n"
-         "'encode_time': ('number', 'Encode time (us)'),\n"
-         "'decode_time': ('number', 'Decode time (us)'),\n"
-         "'encode_return_code': ('number', 'Encode return code'),\n"
-         "'decode_return_code': ('number', 'Decode return code'),\n"
-         "'bit_rate': ('number', 'Bit rate (kbps)'),\n"
-         "'encoded_frame_length': "
-         "('number', 'Encoded frame length (bytes)'),\n"
-         "'frame_type': ('string', 'Frame type'),\n"
-         "'packets_dropped': ('number', 'Packets dropped'),\n"
-         "'total_packets': ('number', 'Total packets'),\n"
-         "'ssim': ('number', 'SSIM'),\n"
-         "'psnr': ('number', 'PSNR (dB)'),\n"
-         "}\n");
+  Log(
+      "\nPython output (recommended to run with --noverbose to skip the "
+      "above output)\n");
+  printf(
+      "test_configuration = ["
+      "{'name': 'name',                      'value': '%s'},\n"
+      "{'name': 'description',               'value': '%s'},\n"
+      "{'name': 'test_number',               'value': '%d'},\n"
+      "{'name': 'input_filename',            'value': '%s'},\n"
+      "{'name': 'output_filename',           'value': '%s'},\n"
+      "{'name': 'output_dir',                'value': '%s'},\n"
+      "{'name': 'packet_size_in_bytes',      'value': '%" PRIuS
+      "'},\n"
+      "{'name': 'max_payload_size_in_bytes', 'value': '%" PRIuS
+      "'},\n"
+      "{'name': 'packet_loss_mode',          'value': '%s'},\n"
+      "{'name': 'packet_loss_probability',   'value': '%f'},\n"
+      "{'name': 'packet_loss_burst_length',  'value': '%d'},\n"
+      "{'name': 'exclude_frame_types',       'value': '%s'},\n"
+      "{'name': 'frame_length_in_bytes',     'value': '%" PRIuS
+      "'},\n"
+      "{'name': 'use_single_core',           'value': '%s'},\n"
+      "{'name': 'keyframe_interval;',        'value': '%d'},\n"
+      "{'name': 'video_codec_type',          'value': '%s'},\n"
+      "{'name': 'width',                     'value': '%d'},\n"
+      "{'name': 'height',                    'value': '%d'},\n"
+      "{'name': 'bit_rate_in_kbps',          'value': '%d'},\n"
+      "]\n",
+      config.name.c_str(), config.description.c_str(), config.test_number,
+      config.input_filename.c_str(), config.output_filename.c_str(),
+      config.output_dir.c_str(), config.networking_config.packet_size_in_bytes,
+      config.networking_config.max_payload_size_in_bytes,
+      PacketLossModeToStr(config.networking_config.packet_loss_mode),
+      config.networking_config.packet_loss_probability,
+      config.networking_config.packet_loss_burst_length,
+      ExcludeFrameTypesToStr(config.exclude_frame_types),
+      config.frame_length_in_bytes, config.use_single_core ? "True " : "False",
+      config.keyframe_interval,
+      webrtc::test::VideoCodecTypeToStr(config.codec_settings->codecType),
+      config.codec_settings->width, config.codec_settings->height,
+      config.codec_settings->startBitrate);
+  printf(
+      "frame_data_types = {"
+      "'frame_number': ('number', 'Frame number'),\n"
+      "'encoding_successful': ('boolean', 'Encoding successful?'),\n"
+      "'decoding_successful': ('boolean', 'Decoding successful?'),\n"
+      "'encode_time': ('number', 'Encode time (us)'),\n"
+      "'decode_time': ('number', 'Decode time (us)'),\n"
+      "'encode_return_code': ('number', 'Encode return code'),\n"
+      "'decode_return_code': ('number', 'Decode return code'),\n"
+      "'bit_rate': ('number', 'Bit rate (kbps)'),\n"
+      "'encoded_frame_length': "
+      "('number', 'Encoded frame length (bytes)'),\n"
+      "'frame_type': ('string', 'Frame type'),\n"
+      "'packets_dropped': ('number', 'Packets dropped'),\n"
+      "'total_packets': ('number', 'Total packets'),\n"
+      "'ssim': ('number', 'SSIM'),\n"
+      "'psnr': ('number', 'PSNR (dB)'),\n"
+      "}\n");
   printf("frame_data = [");
   for (unsigned int i = 0; i < stats.stats_.size(); ++i) {
     const webrtc::test::FrameStatistic& f = stats.stats_[i];
     const webrtc::test::FrameResult& ssim = ssim_result.frames[i];
     const webrtc::test::FrameResult& psnr = psnr_result.frames[i];
-    printf("{'frame_number': %d, "
-           "'encoding_successful': %s, 'decoding_successful': %s, "
-           "'encode_time': %d, 'decode_time': %d, "
-           "'encode_return_code': %d, 'decode_return_code': %d, "
-           "'bit_rate': %d, 'encoded_frame_length': %" PRIuS ", "
-           "'frame_type': %s, 'packets_dropped': %d, "
-           "'total_packets': %" PRIuS ", 'ssim': %f, 'psnr': %f},\n",
-           f.frame_number,
-           f.encoding_successful ? "True " : "False",
-           f.decoding_successful ? "True " : "False",
-           f.encode_time_in_us,
-           f.decode_time_in_us,
-           f.encode_return_code,
-           f.decode_return_code,
-           f.bit_rate_in_kbps,
-           f.encoded_frame_length_in_bytes,
-           f.frame_type == webrtc::kVideoFrameDelta ? "'Delta'" : "'Other'",
-           f.packets_dropped,
-           f.total_packets,
-           ssim.value,
-           psnr.value);
+    printf(
+        "{'frame_number': %d, "
+        "'encoding_successful': %s, 'decoding_successful': %s, "
+        "'encode_time': %d, 'decode_time': %d, "
+        "'encode_return_code': %d, 'decode_return_code': %d, "
+        "'bit_rate': %d, 'encoded_frame_length': %" PRIuS
+        ", "
+        "'frame_type': %s, 'packets_dropped': %d, "
+        "'total_packets': %" PRIuS ", 'ssim': %f, 'psnr': %f},\n",
+        f.frame_number, f.encoding_successful ? "True " : "False",
+        f.decoding_successful ? "True " : "False", f.encode_time_in_us,
+        f.decode_time_in_us, f.encode_return_code, f.decode_return_code,
+        f.bit_rate_in_kbps, f.encoded_frame_length_in_bytes,
+        f.frame_type == webrtc::kVideoFrameDelta ? "'Delta'" : "'Other'",
+        f.packets_dropped, f.total_packets, ssim.value, psnr.value);
   }
   printf("]\n");
 }
@@ -438,10 +462,14 @@
 // The input file must be in YUV format.
 int main(int argc, char* argv[]) {
   std::string program_name = argv[0];
-  std::string usage = "Quality test application for video comparisons.\n"
-    "Run " + program_name + " --helpshort for usage.\n"
-    "Example usage:\n" + program_name +
-    " --input_filename=filename.yuv --width=352 --height=288\n";
+  std::string usage =
+      "Quality test application for video comparisons.\n"
+      "Run " +
+      program_name +
+      " --helpshort for usage.\n"
+      "Example usage:\n" +
+      program_name +
+      " --input_filename=filename.yuv --width=352 --height=288\n";
   google::SetUsageMessage(usage);
 
   google::ParseCommandLineFlags(&argc, &argv, true);
@@ -478,10 +506,8 @@
     packet_manipulator.InitializeRandomSeed(time(NULL));
   }
   webrtc::test::VideoProcessor* processor =
-      new webrtc::test::VideoProcessorImpl(encoder, decoder,
-                                           &frame_reader,
-                                           &frame_writer,
-                                           &packet_manipulator,
+      new webrtc::test::VideoProcessorImpl(encoder, decoder, &frame_reader,
+                                           &frame_writer, &packet_manipulator,
                                            config, &stats);
   processor->Init();
 
diff --git a/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.cc b/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.cc
index a47dae0..9226fa7 100644
--- a/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.cc
@@ -41,7 +41,7 @@
   int index = pattern_idx_ % temporal_ids_length_;
   assert(index >= 0);
   return temporal_ids_[index];
- }
+}
 
 bool DefaultTemporalLayers::ConfigureBitrates(int bitrateKbit,
                                               int max_bitrate_kbit,
@@ -56,8 +56,7 @@
       cfg->ts_periodicity = temporal_ids_length_;
       cfg->ts_target_bitrate[0] = bitrateKbit;
       cfg->ts_rate_decimator[0] = 1;
-      memcpy(cfg->ts_layer_id,
-             temporal_ids_,
+      memcpy(cfg->ts_layer_id, temporal_ids_,
              sizeof(unsigned int) * temporal_ids_length_);
       temporal_pattern_length_ = 1;
       temporal_pattern_[0] = kTemporalUpdateLastRefAll;
@@ -74,8 +73,7 @@
       cfg->ts_target_bitrate[1] = bitrateKbit;
       cfg->ts_rate_decimator[0] = 2;
       cfg->ts_rate_decimator[1] = 1;
-      memcpy(cfg->ts_layer_id,
-             temporal_ids_,
+      memcpy(cfg->ts_layer_id, temporal_ids_,
              sizeof(unsigned int) * temporal_ids_length_);
       temporal_pattern_length_ = 8;
       temporal_pattern_[0] = kTemporalUpdateLastAndGoldenRefAltRef;
@@ -103,8 +101,7 @@
       cfg->ts_rate_decimator[0] = 4;
       cfg->ts_rate_decimator[1] = 2;
       cfg->ts_rate_decimator[2] = 1;
-      memcpy(cfg->ts_layer_id,
-             temporal_ids_,
+      memcpy(cfg->ts_layer_id, temporal_ids_,
              sizeof(unsigned int) * temporal_ids_length_);
       temporal_pattern_length_ = 8;
       temporal_pattern_[0] = kTemporalUpdateLastAndGoldenRefAltRef;
@@ -138,8 +135,7 @@
       cfg->ts_rate_decimator[1] = 4;
       cfg->ts_rate_decimator[2] = 2;
       cfg->ts_rate_decimator[3] = 1;
-      memcpy(cfg->ts_layer_id,
-             temporal_ids_,
+      memcpy(cfg->ts_layer_id, temporal_ids_,
              sizeof(unsigned int) * temporal_ids_length_);
       temporal_pattern_length_ = 16;
       temporal_pattern_[0] = kTemporalUpdateLast;
@@ -243,7 +239,7 @@
 
 void DefaultTemporalLayers::PopulateCodecSpecific(
     bool base_layer_sync,
-    CodecSpecificInfoVP8 *vp8_info,
+    CodecSpecificInfoVP8* vp8_info,
     uint32_t timestamp) {
   assert(number_of_temporal_layers_ > 0);
   assert(0 < temporal_ids_length_);
@@ -254,8 +250,8 @@
     vp8_info->tl0PicIdx = kNoTl0PicIdx;
   } else {
     if (base_layer_sync) {
-    vp8_info->temporalIdx = 0;
-    vp8_info->layerSync = true;
+      vp8_info->temporalIdx = 0;
+      vp8_info->layerSync = true;
     } else {
       vp8_info->temporalIdx = CurrentLayerId();
       TemporalReferences temporal_reference =
@@ -267,7 +263,7 @@
               kTemporalUpdateGoldenWithoutDependencyRefAltRef ||
           temporal_reference == kTemporalUpdateNoneNoRefGoldenRefAltRef ||
           (temporal_reference == kTemporalUpdateNone &&
-              number_of_temporal_layers_ == 4)) {
+           number_of_temporal_layers_ == 4)) {
         vp8_info->layerSync = true;
       } else {
         vp8_info->layerSync = false;
diff --git a/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers_unittest.cc b/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers_unittest.cc
index b7a6b66..461ba69 100644
--- a/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers_unittest.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers_unittest.cc
@@ -8,7 +8,6 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-
 #include "testing/gtest/include/gtest/gtest.h"
 #include "webrtc/modules/video_coding/include/video_codec_interface.h"
 #include "webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.h"
@@ -19,47 +18,36 @@
 namespace webrtc {
 
 enum {
-  kTemporalUpdateLast = VP8_EFLAG_NO_UPD_GF |
-                        VP8_EFLAG_NO_UPD_ARF |
+  kTemporalUpdateLast = VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF |
                         VP8_EFLAG_NO_REF_GF |
                         VP8_EFLAG_NO_REF_ARF,
-  kTemporalUpdateGoldenWithoutDependency = VP8_EFLAG_NO_REF_GF |
-                                           VP8_EFLAG_NO_REF_ARF |
-                                           VP8_EFLAG_NO_UPD_ARF |
-                                           VP8_EFLAG_NO_UPD_LAST,
-  kTemporalUpdateGolden = VP8_EFLAG_NO_REF_ARF |
-                          VP8_EFLAG_NO_UPD_ARF |
-                          VP8_EFLAG_NO_UPD_LAST,
-  kTemporalUpdateAltrefWithoutDependency = VP8_EFLAG_NO_REF_ARF |
-                                           VP8_EFLAG_NO_REF_GF |
-                                           VP8_EFLAG_NO_UPD_GF |
-                                           VP8_EFLAG_NO_UPD_LAST,
-  kTemporalUpdateAltref = VP8_EFLAG_NO_UPD_GF |
-                          VP8_EFLAG_NO_UPD_LAST,
-  kTemporalUpdateNone = VP8_EFLAG_NO_UPD_GF |
-                        VP8_EFLAG_NO_UPD_ARF |
+  kTemporalUpdateGoldenWithoutDependency =
+      VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_ARF |
+      VP8_EFLAG_NO_UPD_LAST,
+  kTemporalUpdateGolden =
+      VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST,
+  kTemporalUpdateAltrefWithoutDependency =
+      VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_UPD_GF |
+      VP8_EFLAG_NO_UPD_LAST,
+  kTemporalUpdateAltref = VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_LAST,
+  kTemporalUpdateNone = VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF |
                         VP8_EFLAG_NO_UPD_LAST |
                         VP8_EFLAG_NO_UPD_ENTROPY,
-  kTemporalUpdateNoneNoRefAltRef = VP8_EFLAG_NO_REF_ARF |
-                                   VP8_EFLAG_NO_UPD_GF |
+  kTemporalUpdateNoneNoRefAltRef = VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_GF |
                                    VP8_EFLAG_NO_UPD_ARF |
                                    VP8_EFLAG_NO_UPD_LAST |
                                    VP8_EFLAG_NO_UPD_ENTROPY,
-  kTemporalUpdateNoneNoRefGolden = VP8_EFLAG_NO_REF_GF |
-                                   VP8_EFLAG_NO_UPD_GF |
+  kTemporalUpdateNoneNoRefGolden = VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_UPD_GF |
                                    VP8_EFLAG_NO_UPD_ARF |
                                    VP8_EFLAG_NO_UPD_LAST |
                                    VP8_EFLAG_NO_UPD_ENTROPY,
-  kTemporalUpdateGoldenWithoutDependencyRefAltRef = VP8_EFLAG_NO_REF_GF |
-                                                    VP8_EFLAG_NO_UPD_ARF |
-                                                    VP8_EFLAG_NO_UPD_LAST,
-  kTemporalUpdateGoldenRefAltRef = VP8_EFLAG_NO_UPD_ARF |
-                                   VP8_EFLAG_NO_UPD_LAST,
-  kTemporalUpdateLastRefAltRef = VP8_EFLAG_NO_UPD_GF |
-                                 VP8_EFLAG_NO_UPD_ARF |
-                                 VP8_EFLAG_NO_REF_GF,
-  kTemporalUpdateLastAndGoldenRefAltRef = VP8_EFLAG_NO_UPD_ARF |
-                                          VP8_EFLAG_NO_REF_GF,
+  kTemporalUpdateGoldenWithoutDependencyRefAltRef =
+      VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST,
+  kTemporalUpdateGoldenRefAltRef = VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST,
+  kTemporalUpdateLastRefAltRef =
+      VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_REF_GF,
+  kTemporalUpdateLastAndGoldenRefAltRef =
+      VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_REF_GF,
 };
 
 TEST(TemporalLayersTest, 2Layers) {
@@ -68,29 +56,30 @@
   CodecSpecificInfoVP8 vp8_info;
   tl.ConfigureBitrates(500, 500, 30, &cfg);
 
-  int expected_flags[16] = { kTemporalUpdateLastAndGoldenRefAltRef,
-                             kTemporalUpdateGoldenWithoutDependencyRefAltRef,
-                             kTemporalUpdateLastRefAltRef,
-                             kTemporalUpdateGoldenRefAltRef,
-                             kTemporalUpdateLastRefAltRef,
-                             kTemporalUpdateGoldenRefAltRef,
-                             kTemporalUpdateLastRefAltRef,
-                             kTemporalUpdateNone,
-                             kTemporalUpdateLastAndGoldenRefAltRef,
-                             kTemporalUpdateGoldenWithoutDependencyRefAltRef,
-                             kTemporalUpdateLastRefAltRef,
-                             kTemporalUpdateGoldenRefAltRef,
-                             kTemporalUpdateLastRefAltRef,
-                             kTemporalUpdateGoldenRefAltRef,
-                             kTemporalUpdateLastRefAltRef,
-                             kTemporalUpdateNone,
-   };
-  int expected_temporal_idx[16] =
-      { 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1 };
+  int expected_flags[16] = {
+      kTemporalUpdateLastAndGoldenRefAltRef,
+      kTemporalUpdateGoldenWithoutDependencyRefAltRef,
+      kTemporalUpdateLastRefAltRef,
+      kTemporalUpdateGoldenRefAltRef,
+      kTemporalUpdateLastRefAltRef,
+      kTemporalUpdateGoldenRefAltRef,
+      kTemporalUpdateLastRefAltRef,
+      kTemporalUpdateNone,
+      kTemporalUpdateLastAndGoldenRefAltRef,
+      kTemporalUpdateGoldenWithoutDependencyRefAltRef,
+      kTemporalUpdateLastRefAltRef,
+      kTemporalUpdateGoldenRefAltRef,
+      kTemporalUpdateLastRefAltRef,
+      kTemporalUpdateGoldenRefAltRef,
+      kTemporalUpdateLastRefAltRef,
+      kTemporalUpdateNone,
+  };
+  int expected_temporal_idx[16] = {0, 1, 0, 1, 0, 1, 0, 1,
+                                   0, 1, 0, 1, 0, 1, 0, 1};
 
-  bool expected_layer_sync[16] =
-      { false, true, false, false, false, false, false, false,
-        false, true, false, false, false, false, false, false };
+  bool expected_layer_sync[16] = {false, true,  false, false, false, false,
+                                  false, false, false, true,  false, false,
+                                  false, false, false, false};
 
   uint32_t timestamp = 0;
   for (int i = 0; i < 16; ++i) {
@@ -108,29 +97,30 @@
   CodecSpecificInfoVP8 vp8_info;
   tl.ConfigureBitrates(500, 500, 30, &cfg);
 
-  int expected_flags[16] = { kTemporalUpdateLastAndGoldenRefAltRef,
-                             kTemporalUpdateNoneNoRefGolden,
-                             kTemporalUpdateGoldenWithoutDependencyRefAltRef,
-                             kTemporalUpdateNone,
-                             kTemporalUpdateLastRefAltRef,
-                             kTemporalUpdateNone,
-                             kTemporalUpdateGoldenRefAltRef,
-                             kTemporalUpdateNone,
-                             kTemporalUpdateLastAndGoldenRefAltRef,
-                             kTemporalUpdateNoneNoRefGolden,
-                             kTemporalUpdateGoldenWithoutDependencyRefAltRef,
-                             kTemporalUpdateNone,
-                             kTemporalUpdateLastRefAltRef,
-                             kTemporalUpdateNone,
-                             kTemporalUpdateGoldenRefAltRef,
-                             kTemporalUpdateNone,
+  int expected_flags[16] = {
+      kTemporalUpdateLastAndGoldenRefAltRef,
+      kTemporalUpdateNoneNoRefGolden,
+      kTemporalUpdateGoldenWithoutDependencyRefAltRef,
+      kTemporalUpdateNone,
+      kTemporalUpdateLastRefAltRef,
+      kTemporalUpdateNone,
+      kTemporalUpdateGoldenRefAltRef,
+      kTemporalUpdateNone,
+      kTemporalUpdateLastAndGoldenRefAltRef,
+      kTemporalUpdateNoneNoRefGolden,
+      kTemporalUpdateGoldenWithoutDependencyRefAltRef,
+      kTemporalUpdateNone,
+      kTemporalUpdateLastRefAltRef,
+      kTemporalUpdateNone,
+      kTemporalUpdateGoldenRefAltRef,
+      kTemporalUpdateNone,
   };
-  int expected_temporal_idx[16] =
-      { 0, 2, 1, 2, 0, 2, 1, 2, 0, 2, 1, 2, 0, 2, 1, 2 };
+  int expected_temporal_idx[16] = {0, 2, 1, 2, 0, 2, 1, 2,
+                                   0, 2, 1, 2, 0, 2, 1, 2};
 
-  bool expected_layer_sync[16] =
-      { false, true, true, false, false, false, false, false,
-        false, true, true, false, false, false, false, false };
+  bool expected_layer_sync[16] = {false, true,  true,  false, false, false,
+                                  false, false, false, true,  true,  false,
+                                  false, false, false, false};
 
   unsigned int timestamp = 0;
   for (int i = 0; i < 16; ++i) {
@@ -165,12 +155,12 @@
       kTemporalUpdateAltref,
       kTemporalUpdateNone,
   };
-  int expected_temporal_idx[16] =
-      { 0, 3, 2, 3, 1, 3, 2, 3, 0, 3, 2, 3, 1, 3, 2, 3 };
+  int expected_temporal_idx[16] = {0, 3, 2, 3, 1, 3, 2, 3,
+                                   0, 3, 2, 3, 1, 3, 2, 3};
 
-  bool expected_layer_sync[16] =
-      { false, true, true, true, true, true, false, true,
-        false, true, false, true, false, true, false, true };
+  bool expected_layer_sync[16] = {false, true, true,  true, true,  true,
+                                  false, true, false, true, false, true,
+                                  false, true, false, true};
 
   uint32_t timestamp = 0;
   for (int i = 0; i < 16; ++i) {
@@ -198,8 +188,7 @@
       kTemporalUpdateGoldenRefAltRef,
       kTemporalUpdateNone,
   };
-  int expected_temporal_idx[8] =
-      { 0, 0, 0, 0, 0, 0, 0, 2};
+  int expected_temporal_idx[8] = {0, 0, 0, 0, 0, 0, 0, 2};
 
   uint32_t timestamp = 0;
   for (int i = 0; i < 7; ++i) {
diff --git a/webrtc/modules/video_coding/codecs/vp8/include/vp8.h b/webrtc/modules/video_coding/codecs/vp8/include/vp8.h
index ab22db3..dd35142 100644
--- a/webrtc/modules/video_coding/codecs/vp8/include/vp8.h
+++ b/webrtc/modules/video_coding/codecs/vp8/include/vp8.h
@@ -21,16 +21,15 @@
  public:
   static VP8Encoder* Create();
 
-  virtual ~VP8Encoder() {};
+  virtual ~VP8Encoder() {}
 };  // end of VP8Encoder class
 
-
 class VP8Decoder : public VideoDecoder {
  public:
   static VP8Decoder* Create();
 
-  virtual ~VP8Decoder() {};
+  virtual ~VP8Decoder() {}
 };  // end of VP8Decoder class
 }  // namespace webrtc
 
-#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_INCLUDE_VP8_H_
+#endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_INCLUDE_VP8_H_
diff --git a/webrtc/modules/video_coding/codecs/vp8/include/vp8_common_types.h b/webrtc/modules/video_coding/codecs/vp8/include/vp8_common_types.h
index c2cefdd..7a27e44 100644
--- a/webrtc/modules/video_coding/codecs/vp8/include/vp8_common_types.h
+++ b/webrtc/modules/video_coding/codecs/vp8/include/vp8_common_types.h
@@ -8,8 +8,8 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_COMMON_TYPES_H_
-#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_COMMON_TYPES_H_
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_INCLUDE_VP8_COMMON_TYPES_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_INCLUDE_VP8_COMMON_TYPES_H_
 
 #include "webrtc/common_types.h"
 
@@ -19,11 +19,11 @@
 // Values as required for the VP8 codec (accumulating).
 static const float
     kVp8LayerRateAlloction[kMaxTemporalStreams][kMaxTemporalStreams] = {
-      {1.0f, 1.0f, 1.0f, 1.0f},  // 1 layer
-      {0.6f, 1.0f, 1.0f, 1.0f},  // 2 layers {60%, 40%}
-      {0.4f, 0.6f, 1.0f, 1.0f},  // 3 layers {40%, 20%, 40%}
-      {0.25f, 0.4f, 0.6f, 1.0f}  // 4 layers {25%, 15%, 20%, 40%}
+        {1.0f, 1.0f, 1.0f, 1.0f},  // 1 layer
+        {0.6f, 1.0f, 1.0f, 1.0f},  // 2 layers {60%, 40%}
+        {0.4f, 0.6f, 1.0f, 1.0f},  // 3 layers {40%, 20%, 40%}
+        {0.25f, 0.4f, 0.6f, 1.0f}  // 4 layers {25%, 15%, 20%, 40%}
 };
 
 }  // namespace webrtc
-#endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_COMMON_TYPES_H_
+#endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_INCLUDE_VP8_COMMON_TYPES_H_
diff --git a/webrtc/modules/video_coding/codecs/vp8/realtime_temporal_layers.cc b/webrtc/modules/video_coding/codecs/vp8/realtime_temporal_layers.cc
index 7ecc07d..d226013 100644
--- a/webrtc/modules/video_coding/codecs/vp8/realtime_temporal_layers.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/realtime_temporal_layers.cc
@@ -23,7 +23,8 @@
 namespace {
 enum {
   kTemporalUpdateLast = VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF |
-                        VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_ARF,
+                        VP8_EFLAG_NO_REF_GF |
+                        VP8_EFLAG_NO_REF_ARF,
 
   kTemporalUpdateGolden =
       VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST,
@@ -37,13 +38,15 @@
       kTemporalUpdateAltref | VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_REF_GF,
 
   kTemporalUpdateNone = VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF |
-                        VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_ENTROPY,
+                        VP8_EFLAG_NO_UPD_LAST |
+                        VP8_EFLAG_NO_UPD_ENTROPY,
 
   kTemporalUpdateNoneNoRefAltref = kTemporalUpdateNone | VP8_EFLAG_NO_REF_ARF,
 
   kTemporalUpdateNoneNoRefGoldenRefAltRef =
       VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF |
-      VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_ENTROPY,
+      VP8_EFLAG_NO_UPD_LAST |
+      VP8_EFLAG_NO_UPD_ENTROPY,
 
   kTemporalUpdateGoldenWithoutDependencyRefAltRef =
       VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST,
@@ -133,12 +136,14 @@
         layer_ids_length_ = sizeof(layer_ids) / sizeof(*layer_ids);
 
         static const int encode_flags[] = {
-          kTemporalUpdateLastAndGoldenRefAltRef,
-          kTemporalUpdateGoldenWithoutDependencyRefAltRef,
-          kTemporalUpdateLastRefAltRef, kTemporalUpdateGoldenRefAltRef,
-          kTemporalUpdateLastRefAltRef, kTemporalUpdateGoldenRefAltRef,
-          kTemporalUpdateLastRefAltRef, kTemporalUpdateNone
-        };
+            kTemporalUpdateLastAndGoldenRefAltRef,
+            kTemporalUpdateGoldenWithoutDependencyRefAltRef,
+            kTemporalUpdateLastRefAltRef,
+            kTemporalUpdateGoldenRefAltRef,
+            kTemporalUpdateLastRefAltRef,
+            kTemporalUpdateGoldenRefAltRef,
+            kTemporalUpdateLastRefAltRef,
+            kTemporalUpdateNone};
         encode_flags_length_ = sizeof(encode_flags) / sizeof(*layer_ids);
         encode_flags_ = encode_flags;
 
@@ -153,12 +158,14 @@
         layer_ids_length_ = sizeof(layer_ids) / sizeof(*layer_ids);
 
         static const int encode_flags[] = {
-          kTemporalUpdateLastAndGoldenRefAltRef,
-          kTemporalUpdateNoneNoRefGoldenRefAltRef,
-          kTemporalUpdateGoldenWithoutDependencyRefAltRef, kTemporalUpdateNone,
-          kTemporalUpdateLastRefAltRef, kTemporalUpdateNone,
-          kTemporalUpdateGoldenRefAltRef, kTemporalUpdateNone
-        };
+            kTemporalUpdateLastAndGoldenRefAltRef,
+            kTemporalUpdateNoneNoRefGoldenRefAltRef,
+            kTemporalUpdateGoldenWithoutDependencyRefAltRef,
+            kTemporalUpdateNone,
+            kTemporalUpdateLastRefAltRef,
+            kTemporalUpdateNone,
+            kTemporalUpdateGoldenRefAltRef,
+            kTemporalUpdateNone};
         encode_flags_length_ = sizeof(encode_flags) / sizeof(*layer_ids);
         encode_flags_ = encode_flags;
 
@@ -172,8 +179,8 @@
         assert(false);
         return false;
     }
-    memcpy(
-        cfg->ts_layer_id, layer_ids_, sizeof(unsigned int) * layer_ids_length_);
+    memcpy(cfg->ts_layer_id, layer_ids_,
+           sizeof(unsigned int) * layer_ids_length_);
     return true;
   }
 
diff --git a/webrtc/modules/video_coding/codecs/vp8/reference_picture_selection.cc b/webrtc/modules/video_coding/codecs/vp8/reference_picture_selection.cc
index a922e35..1838e32 100644
--- a/webrtc/modules/video_coding/codecs/vp8/reference_picture_selection.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/reference_picture_selection.cc
@@ -25,8 +25,7 @@
       last_sent_ref_update_time_(0),
       established_ref_picture_id_(0),
       last_refresh_time_(0),
-      rtt_(0) {
-}
+      rtt_(0) {}
 
 void ReferencePictureSelection::Init() {
   update_golden_next_ = true;
@@ -62,7 +61,8 @@
   return send_refresh;
 }
 
-int ReferencePictureSelection::EncodeFlags(int picture_id, bool send_refresh,
+int ReferencePictureSelection::EncodeFlags(int picture_id,
+                                           bool send_refresh,
                                            uint32_t now_ts) {
   int flags = 0;
   // We can't refresh the decoder until we have established the key frame.
@@ -87,12 +87,12 @@
       received_ack_) {
     flags |= VP8_EFLAG_NO_REF_LAST;  // Don't reference the last frame.
     if (update_golden_next_) {
-      flags |= VP8_EFLAG_FORCE_GF;  // Update the golden reference.
+      flags |= VP8_EFLAG_FORCE_GF;    // Update the golden reference.
       flags |= VP8_EFLAG_NO_UPD_ARF;  // Don't update alt-ref.
-      flags |= VP8_EFLAG_NO_REF_GF;  // Don't reference the golden frame.
+      flags |= VP8_EFLAG_NO_REF_GF;   // Don't reference the golden frame.
     } else {
-      flags |= VP8_EFLAG_FORCE_ARF;  // Update the alt-ref reference.
-      flags |= VP8_EFLAG_NO_UPD_GF;  // Don't update the golden frame.
+      flags |= VP8_EFLAG_FORCE_ARF;   // Update the alt-ref reference.
+      flags |= VP8_EFLAG_NO_UPD_GF;   // Don't update the golden frame.
       flags |= VP8_EFLAG_NO_REF_ARF;  // Don't reference the alt-ref frame.
     }
     last_sent_ref_picture_id_ = picture_id;
@@ -103,9 +103,9 @@
     if (established_golden_)
       flags |= VP8_EFLAG_NO_REF_ARF;  // Don't reference the alt-ref frame.
     else
-      flags |= VP8_EFLAG_NO_REF_GF;   // Don't reference the golden frame.
-    flags |= VP8_EFLAG_NO_UPD_GF;  // Don't update the golden frame.
-    flags |= VP8_EFLAG_NO_UPD_ARF;  // Don't update the alt-ref frame.
+      flags |= VP8_EFLAG_NO_REF_GF;  // Don't reference the golden frame.
+    flags |= VP8_EFLAG_NO_UPD_GF;    // Don't update the golden frame.
+    flags |= VP8_EFLAG_NO_UPD_ARF;   // Don't update the alt-ref frame.
   }
   return flags;
 }
diff --git a/webrtc/modules/video_coding/codecs/vp8/reference_picture_selection_unittest.cc b/webrtc/modules/video_coding/codecs/vp8/reference_picture_selection_unittest.cc
index c6474e5..742bb96 100644
--- a/webrtc/modules/video_coding/codecs/vp8/reference_picture_selection_unittest.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/reference_picture_selection_unittest.cc
@@ -22,25 +22,19 @@
 // Should match the values set in reference_picture_selection.h
 static const int kRtt = 10;
 
-static const int kNoPropagationGolden    = VP8_EFLAG_NO_REF_ARF |
-                                           VP8_EFLAG_NO_UPD_GF |
-                                           VP8_EFLAG_NO_UPD_ARF;
-static const int kNoPropagationAltRef    = VP8_EFLAG_NO_REF_GF |
-                                           VP8_EFLAG_NO_UPD_GF |
-                                           VP8_EFLAG_NO_UPD_ARF;
-static const int kPropagateGolden        = VP8_EFLAG_FORCE_GF |
-                                           VP8_EFLAG_NO_UPD_ARF |
-                                           VP8_EFLAG_NO_REF_GF |
-                                           VP8_EFLAG_NO_REF_LAST;
-static const int kPropagateAltRef        = VP8_EFLAG_FORCE_ARF |
-                                           VP8_EFLAG_NO_UPD_GF |
-                                           VP8_EFLAG_NO_REF_ARF |
-                                           VP8_EFLAG_NO_REF_LAST;
-static const int kRefreshFromGolden      = VP8_EFLAG_NO_REF_LAST |
-                                           VP8_EFLAG_NO_REF_ARF;
-static const int kRefreshFromAltRef      = VP8_EFLAG_NO_REF_LAST |
-                                           VP8_EFLAG_NO_REF_GF;
-
+static const int kNoPropagationGolden =
+    VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF;
+static const int kNoPropagationAltRef =
+    VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF;
+static const int kPropagateGolden = VP8_EFLAG_FORCE_GF | VP8_EFLAG_NO_UPD_ARF |
+                                    VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_LAST;
+static const int kPropagateAltRef = VP8_EFLAG_FORCE_ARF | VP8_EFLAG_NO_UPD_GF |
+                                    VP8_EFLAG_NO_REF_ARF |
+                                    VP8_EFLAG_NO_REF_LAST;
+static const int kRefreshFromGolden =
+    VP8_EFLAG_NO_REF_LAST | VP8_EFLAG_NO_REF_ARF;
+static const int kRefreshFromAltRef =
+    VP8_EFLAG_NO_REF_LAST | VP8_EFLAG_NO_REF_GF;
 
 class TestRPS : public ::testing::Test {
  protected:
@@ -84,15 +78,15 @@
   EXPECT_EQ(rps_.ReceivedSLI(90 * time), true);
   // Enough time have elapsed since the previous reference propagation, we will
   // therefore get both a refresh from golden and a propagation of alt-ref.
-  EXPECT_EQ(rps_.EncodeFlags(5, true, 90 * time), kRefreshFromGolden |
-            kPropagateAltRef);
+  EXPECT_EQ(rps_.EncodeFlags(5, true, 90 * time),
+            kRefreshFromGolden | kPropagateAltRef);
   rps_.ReceivedRPSI(5);
   time += kRtt + 1;
   // Enough time for a new refresh, but not enough time for a reference
   // propagation.
   EXPECT_EQ(rps_.ReceivedSLI(90 * time), true);
-  EXPECT_EQ(rps_.EncodeFlags(6, true, 90 * time), kRefreshFromAltRef |
-            kNoPropagationAltRef);
+  EXPECT_EQ(rps_.EncodeFlags(6, true, 90 * time),
+            kRefreshFromAltRef | kNoPropagationAltRef);
 }
 
 TEST_F(TestRPS, TestWrap) {
diff --git a/webrtc/modules/video_coding/codecs/vp8/screenshare_layers.cc b/webrtc/modules/video_coding/codecs/vp8/screenshare_layers.cc
index 88380c6..536587a 100644
--- a/webrtc/modules/video_coding/codecs/vp8/screenshare_layers.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/screenshare_layers.cc
@@ -11,6 +11,8 @@
 
 #include <stdlib.h>
 
+#include <algorithm>
+
 #include "webrtc/base/checks.h"
 #include "vpx/vpx_encoder.h"
 #include "vpx/vp8cx.h"
@@ -188,7 +190,7 @@
 }
 
 void ScreenshareLayers::PopulateCodecSpecific(bool base_layer_sync,
-                                              CodecSpecificInfoVP8 *vp8_info,
+                                              CodecSpecificInfoVP8* vp8_info,
                                               uint32_t timestamp) {
   int64_t unwrapped_timestamp = time_wrap_handler_.Unwrap(timestamp);
   if (number_of_temporal_layers_ == 1) {
diff --git a/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.cc b/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.cc
index 5dc4ac7..6abb435 100644
--- a/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.cc
@@ -215,9 +215,7 @@
     }
 
     VideoEncoder* encoder = factory_->Create();
-    ret = encoder->InitEncode(&stream_codec,
-                              number_of_cores,
-                              max_payload_size);
+    ret = encoder->InitEncode(&stream_codec, number_of_cores, max_payload_size);
     if (ret < 0) {
       Release();
       return ret;
@@ -284,35 +282,25 @@
     // scale it to match what the encoder expects (below).
     if ((dst_width == src_width && dst_height == src_height) ||
         input_image.IsZeroSize()) {
-      streaminfos_[stream_idx].encoder->Encode(input_image,
-                                               codec_specific_info,
+      streaminfos_[stream_idx].encoder->Encode(input_image, codec_specific_info,
                                                &stream_frame_types);
     } else {
       VideoFrame dst_frame;
       // Making sure that destination frame is of sufficient size.
       // Aligning stride values based on width.
-      dst_frame.CreateEmptyFrame(dst_width, dst_height,
-                                 dst_width, (dst_width + 1) / 2,
-                                 (dst_width + 1) / 2);
-      libyuv::I420Scale(input_image.buffer(kYPlane),
-                        input_image.stride(kYPlane),
-                        input_image.buffer(kUPlane),
-                        input_image.stride(kUPlane),
-                        input_image.buffer(kVPlane),
-                        input_image.stride(kVPlane),
-                        src_width, src_height,
-                        dst_frame.buffer(kYPlane),
-                        dst_frame.stride(kYPlane),
-                        dst_frame.buffer(kUPlane),
-                        dst_frame.stride(kUPlane),
-                        dst_frame.buffer(kVPlane),
-                        dst_frame.stride(kVPlane),
-                        dst_width, dst_height,
-                        libyuv::kFilterBilinear);
+      dst_frame.CreateEmptyFrame(dst_width, dst_height, dst_width,
+                                 (dst_width + 1) / 2, (dst_width + 1) / 2);
+      libyuv::I420Scale(
+          input_image.buffer(kYPlane), input_image.stride(kYPlane),
+          input_image.buffer(kUPlane), input_image.stride(kUPlane),
+          input_image.buffer(kVPlane), input_image.stride(kVPlane), src_width,
+          src_height, dst_frame.buffer(kYPlane), dst_frame.stride(kYPlane),
+          dst_frame.buffer(kUPlane), dst_frame.stride(kUPlane),
+          dst_frame.buffer(kVPlane), dst_frame.stride(kVPlane), dst_width,
+          dst_height, libyuv::kFilterBilinear);
       dst_frame.set_timestamp(input_image.timestamp());
       dst_frame.set_render_time_ms(input_image.render_time_ms());
-      streaminfos_[stream_idx].encoder->Encode(dst_frame,
-                                               codec_specific_info,
+      streaminfos_[stream_idx].encoder->Encode(dst_frame, codec_specific_info,
                                                &stream_frame_types);
     }
   }
@@ -426,16 +414,17 @@
     // current stream's |targetBitrate|, otherwise it's capped by |maxBitrate|.
     if (stream_idx < codec_.numberOfSimulcastStreams - 1) {
       unsigned int max_rate = codec_.simulcastStream[stream_idx].maxBitrate;
-      if (new_bitrate_kbit >= SumStreamTargetBitrate(stream_idx + 1, codec_) +
-          codec_.simulcastStream[stream_idx + 1].minBitrate) {
+      if (new_bitrate_kbit >=
+          SumStreamTargetBitrate(stream_idx + 1, codec_) +
+              codec_.simulcastStream[stream_idx + 1].minBitrate) {
         max_rate = codec_.simulcastStream[stream_idx].targetBitrate;
       }
       return std::min(new_bitrate_kbit - sum_target_lower_streams, max_rate);
     } else {
-        // For the highest stream (highest resolution), the |targetBitRate| and
-        // |maxBitrate| are not used. Any excess bitrate (above the targets of
-        // all lower streams) is given to this (highest resolution) stream.
-        return new_bitrate_kbit - sum_target_lower_streams;
+      // For the highest stream (highest resolution), the |targetBitRate| and
+      // |maxBitrate| are not used. Any excess bitrate (above the targets of
+      // all lower streams) is given to this (highest resolution) stream.
+      return new_bitrate_kbit - sum_target_lower_streams;
     }
   } else {
     // Not enough bitrate for this stream.
diff --git a/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.h b/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.h
index afec024..c3bf4cf 100644
--- a/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.h
+++ b/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.h
@@ -71,8 +71,8 @@
           send_stream(true) {}
     StreamInfo(VideoEncoder* encoder,
                EncodedImageCallback* callback,
-               unsigned short width,
-               unsigned short height,
+               uint16_t width,
+               uint16_t height,
                bool send_stream)
         : encoder(encoder),
           callback(callback),
@@ -83,8 +83,8 @@
     // Deleted by SimulcastEncoderAdapter::Release().
     VideoEncoder* encoder;
     EncodedImageCallback* callback;
-    unsigned short width;
-    unsigned short height;
+    uint16_t width;
+    uint16_t height;
     bool key_frame_request;
     bool send_stream;
   };
@@ -118,4 +118,3 @@
 }  // namespace webrtc
 
 #endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_ENCODER_ADAPTER_H_
-
diff --git a/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc b/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc
index b88ac2e..86b8e0b 100644
--- a/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc
@@ -27,12 +27,10 @@
 class TestSimulcastEncoderAdapter : public TestVp8Simulcast {
  public:
   TestSimulcastEncoderAdapter()
-     : TestVp8Simulcast(CreateTestEncoderAdapter(),
-                        VP8Decoder::Create()) {}
+      : TestVp8Simulcast(CreateTestEncoderAdapter(), VP8Decoder::Create()) {}
+
  protected:
-  virtual void SetUp() {
-    TestVp8Simulcast::SetUp();
-  }
+  virtual void SetUp() { TestVp8Simulcast::SetUp(); }
   virtual void TearDown() {
     TestVp8Simulcast::TearDown();
     VP8EncoderFactoryConfig::set_use_simulcast_adapter(false);
@@ -97,8 +95,7 @@
 
 // TODO(ronghuawu): Enable this test when SkipEncodingUnusedStreams option is
 // implemented for SimulcastEncoderAdapter.
-TEST_F(TestSimulcastEncoderAdapter,
-    DISABLED_TestSkipEncodingUnusedStreams) {
+TEST_F(TestSimulcastEncoderAdapter, DISABLED_TestSkipEncodingUnusedStreams) {
   TestVp8Simulcast::TestSkipEncodingUnusedStreams();
 }
 
@@ -127,23 +124,17 @@
     return 0;
   }
 
-  int32_t Release() override {
-    return 0;
-  }
+  int32_t Release() override { return 0; }
 
   int32_t SetRates(uint32_t newBitRate, uint32_t frameRate) override {
     return 0;
   }
 
-  MOCK_METHOD2(SetChannelParameters,
-      int32_t(uint32_t packetLoss, int64_t rtt));
+  MOCK_METHOD2(SetChannelParameters, int32_t(uint32_t packetLoss, int64_t rtt));
 
-  bool SupportsNativeHandle() const override {
-    return supports_native_handle_;
-  }
+  bool SupportsNativeHandle() const override { return supports_native_handle_; }
 
-  virtual ~MockVideoEncoder() {
-  }
+  virtual ~MockVideoEncoder() {}
 
   const VideoCodec& codec() const { return codec_; }
 
@@ -200,7 +191,8 @@
     EXPECT_TRUE(!factory_->encoders().empty());
     for (size_t i = 0; i < factory_->encoders().size(); ++i) {
       EXPECT_CALL(*factory_->encoders()[i],
-                  SetChannelParameters(packetLoss, rtt)).Times(1);
+                  SetChannelParameters(packetLoss, rtt))
+          .Times(1);
     }
   }
 
@@ -249,8 +241,7 @@
 
   void SetupCodec() {
     TestVp8Simulcast::DefaultSettings(
-      &codec_,
-      static_cast<const int*>(kTestTemporalLayerProfile));
+        &codec_, static_cast<const int*>(kTestTemporalLayerProfile));
     EXPECT_EQ(0, adapter_->InitEncode(&codec_, 1, 1200));
     adapter_->RegisterEncodeCompleteCallback(this);
   }
diff --git a/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.cc b/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.cc
index 373a552..f23affe 100644
--- a/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.cc
@@ -13,18 +13,14 @@
 namespace webrtc {
 namespace testing {
 
-class TestVp8Impl
-    : public TestVp8Simulcast {
+class TestVp8Impl : public TestVp8Simulcast {
  public:
   TestVp8Impl()
-     : TestVp8Simulcast(VP8Encoder::Create(), VP8Decoder::Create()) {}
+      : TestVp8Simulcast(VP8Encoder::Create(), VP8Decoder::Create()) {}
+
  protected:
-  virtual void SetUp() {
-    TestVp8Simulcast::SetUp();
-  }
-  virtual void TearDown() {
-    TestVp8Simulcast::TearDown();
-  }
+  virtual void SetUp() { TestVp8Simulcast::SetUp(); }
+  virtual void TearDown() { TestVp8Simulcast::TearDown(); }
 };
 
 TEST_F(TestVp8Impl, TestKeyFrameRequestsOnAllStreams) {
diff --git a/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h b/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h
index 90f6449..7a7a2c2 100644
--- a/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h
+++ b/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h
@@ -44,10 +44,8 @@
 const int kTargetBitrates[kNumberOfSimulcastStreams] = {100, 450, 1000};
 const int kDefaultTemporalLayerProfile[3] = {3, 3, 3};
 
-template<typename T> void SetExpectedValues3(T value0,
-                                             T value1,
-                                             T value2,
-                                             T* expected_values) {
+template <typename T>
+void SetExpectedValues3(T value0, T value1, T value2, T* expected_values) {
   expected_values[0] = value0;
   expected_values[1] = value1;
   expected_values[2] = value2;
@@ -55,15 +53,14 @@
 
 class Vp8TestEncodedImageCallback : public EncodedImageCallback {
  public:
-  Vp8TestEncodedImageCallback()
-       : picture_id_(-1) {
+  Vp8TestEncodedImageCallback() : picture_id_(-1) {
     memset(temporal_layer_, -1, sizeof(temporal_layer_));
     memset(layer_sync_, false, sizeof(layer_sync_));
   }
 
   ~Vp8TestEncodedImageCallback() {
-    delete [] encoded_key_frame_._buffer;
-    delete [] encoded_frame_._buffer;
+    delete[] encoded_key_frame_._buffer;
+    delete[] encoded_frame_._buffer;
   }
 
   virtual int32_t Encoded(const EncodedImage& encoded_image,
@@ -72,22 +69,20 @@
     // Only store the base layer.
     if (codec_specific_info->codecSpecific.VP8.simulcastIdx == 0) {
       if (encoded_image._frameType == kVideoFrameKey) {
-        delete [] encoded_key_frame_._buffer;
+        delete[] encoded_key_frame_._buffer;
         encoded_key_frame_._buffer = new uint8_t[encoded_image._size];
         encoded_key_frame_._size = encoded_image._size;
         encoded_key_frame_._length = encoded_image._length;
         encoded_key_frame_._frameType = kVideoFrameKey;
         encoded_key_frame_._completeFrame = encoded_image._completeFrame;
-        memcpy(encoded_key_frame_._buffer,
-               encoded_image._buffer,
+        memcpy(encoded_key_frame_._buffer, encoded_image._buffer,
                encoded_image._length);
       } else {
-        delete [] encoded_frame_._buffer;
+        delete[] encoded_frame_._buffer;
         encoded_frame_._buffer = new uint8_t[encoded_image._size];
         encoded_frame_._size = encoded_image._size;
         encoded_frame_._length = encoded_image._length;
-        memcpy(encoded_frame_._buffer,
-               encoded_image._buffer,
+        memcpy(encoded_frame_._buffer, encoded_image._buffer,
                encoded_image._length);
       }
     }
@@ -98,8 +93,10 @@
         codec_specific_info->codecSpecific.VP8.temporalIdx;
     return 0;
   }
-  void GetLastEncodedFrameInfo(int* picture_id, int* temporal_layer,
-                               bool* layer_sync, int stream) {
+  void GetLastEncodedFrameInfo(int* picture_id,
+                               int* temporal_layer,
+                               bool* layer_sync,
+                               int stream) {
     *picture_id = picture_id_;
     *temporal_layer = temporal_layer_[stream];
     *layer_sync = layer_sync_[stream];
@@ -121,9 +118,7 @@
 
 class Vp8TestDecodedImageCallback : public DecodedImageCallback {
  public:
-  Vp8TestDecodedImageCallback()
-      : decoded_frames_(0) {
-  }
+  Vp8TestDecodedImageCallback() : decoded_frames_(0) {}
   int32_t Decoded(VideoFrame& decoded_image) override {
     for (int i = 0; i < decoded_image.width(); ++i) {
       EXPECT_NEAR(kColorY, decoded_image.buffer(kYPlane)[i], 1);
@@ -141,9 +136,7 @@
     RTC_NOTREACHED();
     return -1;
   }
-  int DecodedFrames() {
-    return decoded_frames_;
-  }
+  int DecodedFrames() { return decoded_frames_; }
 
  private:
   int decoded_frames_;
@@ -166,8 +159,7 @@
     std::vector<unsigned int> configured_bitrates;
     for (std::vector<TemporalLayers*>::const_iterator it =
              spy_factory->spying_layers_.begin();
-         it != spy_factory->spying_layers_.end();
-         ++it) {
+         it != spy_factory->spying_layers_.end(); ++it) {
       configured_bitrates.push_back(
           static_cast<SpyingTemporalLayers*>(*it)->configured_bitrate_);
     }
@@ -190,8 +182,8 @@
                            int framerate,
                            vpx_codec_enc_cfg_t* cfg) override {
       configured_bitrate_ = bitrate_kbit;
-      return layers_->ConfigureBitrates(
-          bitrate_kbit, max_bitrate_kbit, framerate, cfg);
+      return layers_->ConfigureBitrates(bitrate_kbit, max_bitrate_kbit,
+                                        framerate, cfg);
     }
 
     void PopulateCodecSpecific(bool base_layer_sync,
@@ -233,16 +225,15 @@
 class TestVp8Simulcast : public ::testing::Test {
  public:
   TestVp8Simulcast(VP8Encoder* encoder, VP8Decoder* decoder)
-     : encoder_(encoder),
-       decoder_(decoder) {}
+      : encoder_(encoder), decoder_(decoder) {}
 
   // Creates an VideoFrame from |plane_colors|.
   static void CreateImage(VideoFrame* frame, int plane_colors[kNumOfPlanes]) {
     for (int plane_num = 0; plane_num < kNumOfPlanes; ++plane_num) {
-      int width = (plane_num != kYPlane ? (frame->width() + 1) / 2 :
-        frame->width());
-      int height = (plane_num != kYPlane ? (frame->height() + 1) / 2 :
-        frame->height());
+      int width =
+          (plane_num != kYPlane ? (frame->width() + 1) / 2 : frame->width());
+      int height =
+          (plane_num != kYPlane ? (frame->height() + 1) / 2 : frame->height());
       PlaneType plane_type = static_cast<PlaneType>(plane_num);
       uint8_t* data = frame->buffer(plane_type);
       // Setting allocated area to zero - setting only image size to
@@ -272,24 +263,15 @@
     settings->height = kDefaultHeight;
     settings->numberOfSimulcastStreams = kNumberOfSimulcastStreams;
     ASSERT_EQ(3, kNumberOfSimulcastStreams);
-    ConfigureStream(kDefaultWidth / 4, kDefaultHeight / 4,
-                    kMaxBitrates[0],
-                    kMinBitrates[0],
-                    kTargetBitrates[0],
-                    &settings->simulcastStream[0],
-                    temporal_layer_profile[0]);
-    ConfigureStream(kDefaultWidth / 2, kDefaultHeight / 2,
-                    kMaxBitrates[1],
-                    kMinBitrates[1],
-                    kTargetBitrates[1],
-                    &settings->simulcastStream[1],
-                    temporal_layer_profile[1]);
-    ConfigureStream(kDefaultWidth, kDefaultHeight,
-                    kMaxBitrates[2],
-                    kMinBitrates[2],
-                    kTargetBitrates[2],
-                    &settings->simulcastStream[2],
-                    temporal_layer_profile[2]);
+    ConfigureStream(kDefaultWidth / 4, kDefaultHeight / 4, kMaxBitrates[0],
+                    kMinBitrates[0], kTargetBitrates[0],
+                    &settings->simulcastStream[0], temporal_layer_profile[0]);
+    ConfigureStream(kDefaultWidth / 2, kDefaultHeight / 2, kMaxBitrates[1],
+                    kMinBitrates[1], kTargetBitrates[1],
+                    &settings->simulcastStream[1], temporal_layer_profile[1]);
+    ConfigureStream(kDefaultWidth, kDefaultHeight, kMaxBitrates[2],
+                    kMinBitrates[2], kTargetBitrates[2],
+                    &settings->simulcastStream[2], temporal_layer_profile[2]);
     settings->codecSpecific.VP8.resilience = kResilientStream;
     settings->codecSpecific.VP8.denoisingOn = true;
     settings->codecSpecific.VP8.errorConcealmentOn = false;
@@ -317,9 +299,7 @@
   }
 
  protected:
-  virtual void SetUp() {
-    SetUpCodec(kDefaultTemporalLayerProfile);
-  }
+  virtual void SetUp() { SetUpCodec(kDefaultTemporalLayerProfile); }
 
   virtual void SetUpCodec(const int* temporal_layer_profile) {
     encoder_->RegisterEncodeCompleteCallback(&encoder_callback_);
@@ -328,14 +308,14 @@
     EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200));
     EXPECT_EQ(0, decoder_->InitDecode(&settings_, 1));
     int half_width = (kDefaultWidth + 1) / 2;
-    input_frame_.CreateEmptyFrame(kDefaultWidth, kDefaultHeight,
-                                  kDefaultWidth, half_width, half_width);
+    input_frame_.CreateEmptyFrame(kDefaultWidth, kDefaultHeight, kDefaultWidth,
+                                  half_width, half_width);
     memset(input_frame_.buffer(kYPlane), 0,
-        input_frame_.allocated_size(kYPlane));
+           input_frame_.allocated_size(kYPlane));
     memset(input_frame_.buffer(kUPlane), 0,
-        input_frame_.allocated_size(kUPlane));
+           input_frame_.allocated_size(kUPlane));
     memset(input_frame_.buffer(kVPlane), 0,
-        input_frame_.allocated_size(kVPlane));
+           input_frame_.allocated_size(kVPlane));
   }
 
   virtual void TearDown() {
@@ -347,28 +327,34 @@
     ASSERT_GE(expected_video_streams, 0);
     ASSERT_LE(expected_video_streams, kNumberOfSimulcastStreams);
     if (expected_video_streams >= 1) {
-      EXPECT_CALL(encoder_callback_, Encoded(
-          AllOf(Field(&EncodedImage::_frameType, frame_type),
-                Field(&EncodedImage::_encodedWidth, kDefaultWidth / 4),
-                Field(&EncodedImage::_encodedHeight, kDefaultHeight / 4)), _, _)
-                  )
+      EXPECT_CALL(
+          encoder_callback_,
+          Encoded(
+              AllOf(Field(&EncodedImage::_frameType, frame_type),
+                    Field(&EncodedImage::_encodedWidth, kDefaultWidth / 4),
+                    Field(&EncodedImage::_encodedHeight, kDefaultHeight / 4)),
+              _, _))
           .Times(1)
           .WillRepeatedly(Return(0));
     }
     if (expected_video_streams >= 2) {
-      EXPECT_CALL(encoder_callback_, Encoded(
-          AllOf(Field(&EncodedImage::_frameType, frame_type),
-                Field(&EncodedImage::_encodedWidth, kDefaultWidth / 2),
-                Field(&EncodedImage::_encodedHeight, kDefaultHeight / 2)), _, _)
-                  )
+      EXPECT_CALL(
+          encoder_callback_,
+          Encoded(
+              AllOf(Field(&EncodedImage::_frameType, frame_type),
+                    Field(&EncodedImage::_encodedWidth, kDefaultWidth / 2),
+                    Field(&EncodedImage::_encodedHeight, kDefaultHeight / 2)),
+              _, _))
           .Times(1)
           .WillRepeatedly(Return(0));
     }
     if (expected_video_streams >= 3) {
-      EXPECT_CALL(encoder_callback_, Encoded(
-          AllOf(Field(&EncodedImage::_frameType, frame_type),
-                Field(&EncodedImage::_encodedWidth, kDefaultWidth),
-                Field(&EncodedImage::_encodedHeight, kDefaultHeight)), _, _))
+      EXPECT_CALL(
+          encoder_callback_,
+          Encoded(AllOf(Field(&EncodedImage::_frameType, frame_type),
+                        Field(&EncodedImage::_encodedWidth, kDefaultWidth),
+                        Field(&EncodedImage::_encodedHeight, kDefaultHeight)),
+                  _, _))
           .Times(1)
           .WillRepeatedly(Return(0));
     }
@@ -482,8 +468,8 @@
   void TestPaddingOneStreamTwoMaxedOut() {
     // We are just below limit of sending third stream, so we should get
     // first stream's rate maxed out at |targetBitrate|, second at |maxBitrate|.
-    encoder_->SetRates(kTargetBitrates[0] + kTargetBitrates[1] +
-                       kMinBitrates[2] - 1, 30);
+    encoder_->SetRates(
+        kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] - 1, 30);
     std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
                                        kVideoFrameDelta);
     ExpectStreams(kVideoFrameKey, 2);
@@ -496,8 +482,8 @@
 
   void TestSendAllStreams() {
     // We have just enough to send all streams.
-    encoder_->SetRates(kTargetBitrates[0] + kTargetBitrates[1] +
-                       kMinBitrates[2], 30);
+    encoder_->SetRates(
+        kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2], 30);
     std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
                                        kVideoFrameDelta);
     ExpectStreams(kVideoFrameKey, 3);
@@ -510,8 +496,7 @@
 
   void TestDisablingStreams() {
     // We should get three media streams.
-    encoder_->SetRates(kMaxBitrates[0] + kMaxBitrates[1] +
-                       kMaxBitrates[2], 30);
+    encoder_->SetRates(kMaxBitrates[0] + kMaxBitrates[1] + kMaxBitrates[2], 30);
     std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
                                        kVideoFrameDelta);
     ExpectStreams(kVideoFrameKey, 3);
@@ -522,8 +507,8 @@
     EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
 
     // We should only get two streams and padding for one.
-    encoder_->SetRates(kTargetBitrates[0] + kTargetBitrates[1] +
-                       kMinBitrates[2] / 2, 30);
+    encoder_->SetRates(
+        kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] / 2, 30);
     ExpectStreams(kVideoFrameDelta, 2);
     input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
     EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
@@ -542,16 +527,16 @@
     EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
 
     // We should only get two streams and padding for one.
-    encoder_->SetRates(kTargetBitrates[0] + kTargetBitrates[1] +
-                       kMinBitrates[2] / 2, 30);
+    encoder_->SetRates(
+        kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] / 2, 30);
     // We get a key frame because a new stream is being enabled.
     ExpectStreams(kVideoFrameKey, 2);
     input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
     EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
 
     // We should get all three streams.
-    encoder_->SetRates(kTargetBitrates[0] + kTargetBitrates[1] +
-                       kTargetBitrates[2], 30);
+    encoder_->SetRates(
+        kTargetBitrates[0] + kTargetBitrates[1] + kTargetBitrates[2], 30);
     // We get a key frame because a new stream is being enabled.
     ExpectStreams(kVideoFrameKey, 3);
     input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
@@ -576,20 +561,20 @@
     input_frame_.CreateEmptyFrame(settings_.width, settings_.height,
                                   settings_.width, half_width, half_width);
     memset(input_frame_.buffer(kYPlane), 0,
-        input_frame_.allocated_size(kYPlane));
+           input_frame_.allocated_size(kYPlane));
     memset(input_frame_.buffer(kUPlane), 0,
-        input_frame_.allocated_size(kUPlane));
+           input_frame_.allocated_size(kUPlane));
     memset(input_frame_.buffer(kVPlane), 0,
-        input_frame_.allocated_size(kVPlane));
+           input_frame_.allocated_size(kVPlane));
 
     // The for loop above did not set the bitrate of the highest layer.
-    settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].
-        maxBitrate = 0;
+    settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1]
+        .maxBitrate = 0;
     // The highest layer has to correspond to the non-simulcast resolution.
-    settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].
-        width = settings_.width;
-    settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].
-        height = settings_.height;
+    settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].width =
+        settings_.width;
+    settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].height =
+        settings_.height;
     EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200));
 
     // Encode one frame and verify.
@@ -617,21 +602,17 @@
     input_frame_.CreateEmptyFrame(settings_.width, settings_.height,
                                   settings_.width, half_width, half_width);
     memset(input_frame_.buffer(kYPlane), 0,
-        input_frame_.allocated_size(kYPlane));
+           input_frame_.allocated_size(kYPlane));
     memset(input_frame_.buffer(kUPlane), 0,
-        input_frame_.allocated_size(kUPlane));
+           input_frame_.allocated_size(kUPlane));
     memset(input_frame_.buffer(kVPlane), 0,
-        input_frame_.allocated_size(kVPlane));
+           input_frame_.allocated_size(kVPlane));
     EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
   }
 
-  void TestSwitchingToOneStream() {
-    SwitchingToOneStream(1024, 768);
-  }
+  void TestSwitchingToOneStream() { SwitchingToOneStream(1024, 768); }
 
-  void TestSwitchingToOneOddStream() {
-    SwitchingToOneStream(1023, 769);
-  }
+  void TestSwitchingToOneOddStream() { SwitchingToOneStream(1023, 769); }
 
   void TestRPSIEncoder() {
     Vp8TestEncodedImageCallback encoder_callback;
@@ -782,67 +763,55 @@
     encoder_->RegisterEncodeCompleteCallback(&encoder_callback);
     encoder_->SetRates(kMaxBitrates[2], 30);  // To get all three streams.
 
-    int expected_temporal_idx[3] = { -1, -1, -1};
+    int expected_temporal_idx[3] = {-1, -1, -1};
     bool expected_layer_sync[3] = {false, false, false};
 
     // First frame: #0.
     EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
     SetExpectedValues3<int>(0, 0, 0, expected_temporal_idx);
     SetExpectedValues3<bool>(true, true, true, expected_layer_sync);
-    VerifyTemporalIdxAndSyncForAllSpatialLayers(&encoder_callback,
-                                                expected_temporal_idx,
-                                                expected_layer_sync,
-                                                3);
+    VerifyTemporalIdxAndSyncForAllSpatialLayers(
+        &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
 
     // Next frame: #1.
     input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
     EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
     SetExpectedValues3<int>(2, 2, 2, expected_temporal_idx);
     SetExpectedValues3<bool>(true, true, true, expected_layer_sync);
-    VerifyTemporalIdxAndSyncForAllSpatialLayers(&encoder_callback,
-                                                expected_temporal_idx,
-                                                expected_layer_sync,
-                                                3);
+    VerifyTemporalIdxAndSyncForAllSpatialLayers(
+        &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
 
     // Next frame: #2.
     input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
     EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
     SetExpectedValues3<int>(1, 1, 1, expected_temporal_idx);
     SetExpectedValues3<bool>(true, true, true, expected_layer_sync);
-    VerifyTemporalIdxAndSyncForAllSpatialLayers(&encoder_callback,
-                                                expected_temporal_idx,
-                                                expected_layer_sync,
-                                                3);
+    VerifyTemporalIdxAndSyncForAllSpatialLayers(
+        &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
 
     // Next frame: #3.
     input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
     EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
     SetExpectedValues3<int>(2, 2, 2, expected_temporal_idx);
     SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
-    VerifyTemporalIdxAndSyncForAllSpatialLayers(&encoder_callback,
-                                                expected_temporal_idx,
-                                                expected_layer_sync,
-                                                3);
+    VerifyTemporalIdxAndSyncForAllSpatialLayers(
+        &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
 
     // Next frame: #4.
     input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
     EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
     SetExpectedValues3<int>(0, 0, 0, expected_temporal_idx);
     SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
-    VerifyTemporalIdxAndSyncForAllSpatialLayers(&encoder_callback,
-                                                expected_temporal_idx,
-                                                expected_layer_sync,
-                                                3);
+    VerifyTemporalIdxAndSyncForAllSpatialLayers(
+        &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
 
     // Next frame: #5.
     input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
     EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
     SetExpectedValues3<int>(2, 2, 2, expected_temporal_idx);
     SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
-    VerifyTemporalIdxAndSyncForAllSpatialLayers(&encoder_callback,
-                                                expected_temporal_idx,
-                                                expected_layer_sync,
-                                                3);
+    VerifyTemporalIdxAndSyncForAllSpatialLayers(
+        &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
   }
 
   // Test the layer pattern and sync flag for various spatial-temporal patterns.
@@ -863,67 +832,55 @@
     encoder_->RegisterEncodeCompleteCallback(&encoder_callback);
     encoder_->SetRates(kMaxBitrates[2], 30);  // To get all three streams.
 
-    int expected_temporal_idx[3] = { -1, -1, -1};
+    int expected_temporal_idx[3] = {-1, -1, -1};
     bool expected_layer_sync[3] = {false, false, false};
 
     // First frame: #0.
     EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
     SetExpectedValues3<int>(0, 0, 255, expected_temporal_idx);
     SetExpectedValues3<bool>(true, true, false, expected_layer_sync);
-    VerifyTemporalIdxAndSyncForAllSpatialLayers(&encoder_callback,
-                                                expected_temporal_idx,
-                                                expected_layer_sync,
-                                                3);
+    VerifyTemporalIdxAndSyncForAllSpatialLayers(
+        &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
 
     // Next frame: #1.
     input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
     EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
     SetExpectedValues3<int>(2, 1, 255, expected_temporal_idx);
     SetExpectedValues3<bool>(true, true, false, expected_layer_sync);
-    VerifyTemporalIdxAndSyncForAllSpatialLayers(&encoder_callback,
-                                                expected_temporal_idx,
-                                                expected_layer_sync,
-                                                3);
+    VerifyTemporalIdxAndSyncForAllSpatialLayers(
+        &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
 
     // Next frame: #2.
     input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
     EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
     SetExpectedValues3<int>(1, 0, 255, expected_temporal_idx);
     SetExpectedValues3<bool>(true, false, false, expected_layer_sync);
-    VerifyTemporalIdxAndSyncForAllSpatialLayers(&encoder_callback,
-                                                expected_temporal_idx,
-                                                expected_layer_sync,
-                                                3);
+    VerifyTemporalIdxAndSyncForAllSpatialLayers(
+        &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
 
     // Next frame: #3.
     input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
     EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
     SetExpectedValues3<int>(2, 1, 255, expected_temporal_idx);
     SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
-    VerifyTemporalIdxAndSyncForAllSpatialLayers(&encoder_callback,
-                                                expected_temporal_idx,
-                                                expected_layer_sync,
-                                                3);
+    VerifyTemporalIdxAndSyncForAllSpatialLayers(
+        &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
 
     // Next frame: #4.
     input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
     EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
     SetExpectedValues3<int>(0, 0, 255, expected_temporal_idx);
     SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
-    VerifyTemporalIdxAndSyncForAllSpatialLayers(&encoder_callback,
-                                                expected_temporal_idx,
-                                                expected_layer_sync,
-                                                3);
+    VerifyTemporalIdxAndSyncForAllSpatialLayers(
+        &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
 
     // Next frame: #5.
     input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
     EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
     SetExpectedValues3<int>(2, 1, 255, expected_temporal_idx);
     SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
-    VerifyTemporalIdxAndSyncForAllSpatialLayers(&encoder_callback,
-                                                expected_temporal_idx,
-                                                expected_layer_sync,
-                                                3);
+    VerifyTemporalIdxAndSyncForAllSpatialLayers(
+        &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
   }
 
   void TestStrideEncodeDecode() {
@@ -937,8 +894,8 @@
     // 1. stride > width 2. stride_y != stride_uv/2
     int stride_y = kDefaultWidth + 20;
     int stride_uv = ((kDefaultWidth + 1) / 2) + 5;
-    input_frame_.CreateEmptyFrame(kDefaultWidth, kDefaultHeight,
-                                 stride_y, stride_uv, stride_uv);
+    input_frame_.CreateEmptyFrame(kDefaultWidth, kDefaultHeight, stride_y,
+                                  stride_uv, stride_uv);
     // Set color.
     int plane_offset[kNumOfPlanes];
     plane_offset[kYPlane] = kColorY;
@@ -968,10 +925,9 @@
   void TestSkipEncodingUnusedStreams() {
     SkipEncodingUnusedStreamsTest test;
     std::vector<unsigned int> configured_bitrate =
-        test.RunTest(encoder_.get(),
-                     &settings_,
-                     1);    // Target bit rate 1, to force all streams but the
-                            // base one to be exceeding bandwidth constraints.
+        test.RunTest(encoder_.get(), &settings_,
+                     1);  // Target bit rate 1, to force all streams but the
+                          // base one to be exceeding bandwidth constraints.
     EXPECT_EQ(static_cast<size_t>(kNumberOfSimulcastStreams),
               configured_bitrate.size());
 
@@ -980,8 +936,7 @@
     int stream = 0;
     for (std::vector<unsigned int>::const_iterator it =
              configured_bitrate.begin();
-         it != configured_bitrate.end();
-         ++it) {
+         it != configured_bitrate.end(); ++it) {
       if (stream == 0) {
         EXPECT_EQ(min_bitrate, *it);
       } else {
diff --git a/webrtc/modules/video_coding/codecs/vp8/vp8_factory.h b/webrtc/modules/video_coding/codecs/vp8/vp8_factory.h
index 84745ea..52f8aa3 100644
--- a/webrtc/modules/video_coding/codecs/vp8/vp8_factory.h
+++ b/webrtc/modules/video_coding/codecs/vp8/vp8_factory.h
@@ -32,4 +32,3 @@
 }  // namespace webrtc
 
 #endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_VP8_FACTORY_H_
-
diff --git a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
index a608c10..5a04f6a 100644
--- a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
@@ -16,7 +16,7 @@
 #include <algorithm>
 
 // NOTE(ajm): Path provided by gyp.
-#include "libyuv/scale.h"  // NOLINT
+#include "libyuv/scale.h"    // NOLINT
 #include "libyuv/convert.h"  // NOLINT
 
 #include "webrtc/base/checks.h"
@@ -68,10 +68,9 @@
   std::vector<int> bitrates_kbps(codec.numberOfSimulcastStreams);
   // Allocate min -> target bitrates as long as we have bitrate to spend.
   size_t last_active_stream = 0;
-  for (size_t i = 0;
-       i < static_cast<size_t>(codec.numberOfSimulcastStreams) &&
-           bitrate_to_allocate_kbps >=
-               static_cast<int>(codec.simulcastStream[i].minBitrate);
+  for (size_t i = 0; i < static_cast<size_t>(codec.numberOfSimulcastStreams) &&
+                     bitrate_to_allocate_kbps >=
+                         static_cast<int>(codec.simulcastStream[i].minBitrate);
        ++i) {
     last_active_stream = i;
     int allocated_bitrate_kbps =
@@ -132,7 +131,7 @@
   return true;
 }
 
-int NumStreamsDisabled(std::vector<bool>& streams) {
+int NumStreamsDisabled(const std::vector<bool>& streams) {
   int num_disabled = 0;
   for (bool stream : streams) {
     if (!stream)
@@ -183,7 +182,7 @@
 
   while (!encoded_images_.empty()) {
     EncodedImage& image = encoded_images_.back();
-    delete [] image._buffer;
+    delete[] image._buffer;
     encoded_images_.pop_back();
   }
   while (!encoders_.empty()) {
@@ -289,10 +288,8 @@
       target_bitrate = tl0_bitrate;
     }
     configurations_[i].rc_target_bitrate = target_bitrate;
-    temporal_layers_[stream_idx]->ConfigureBitrates(target_bitrate,
-                                                    max_bitrate,
-                                                    framerate,
-                                                    &configurations_[i]);
+    temporal_layers_[stream_idx]->ConfigureBitrates(
+        target_bitrate, max_bitrate, framerate, &configurations_[i]);
     if (vpx_codec_enc_config_set(&encoders_[i], &configurations_[i])) {
       return WEBRTC_VIDEO_CODEC_ERROR;
     }
@@ -315,8 +312,8 @@
 }
 
 void VP8EncoderImpl::SetupTemporalLayers(int num_streams,
-                                                 int num_temporal_layers,
-                                                 const VideoCodec& codec) {
+                                         int num_temporal_layers,
+                                         const VideoCodec& codec) {
   const Config default_options;
   const TemporalLayers::Factory& tl_factory =
       (codec.extra_options ? codec.extra_options : &default_options)
@@ -334,15 +331,16 @@
     for (int i = 0; i < num_streams; ++i) {
       // TODO(andresp): crash if layers is invalid.
       int layers = codec.simulcastStream[i].numberOfTemporalLayers;
-      if (layers < 1) layers = 1;
+      if (layers < 1)
+        layers = 1;
       temporal_layers_.push_back(tl_factory.Create(layers, rand()));
     }
   }
 }
 
 int VP8EncoderImpl::InitEncode(const VideoCodec* inst,
-                                       int number_of_cores,
-                                       size_t /*maxPayloadSize */) {
+                               int number_of_cores,
+                               size_t /*maxPayloadSize */) {
   if (inst == NULL) {
     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
   }
@@ -379,12 +377,13 @@
     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
   }
 
-  int num_temporal_layers = doing_simulcast ?
-      inst->simulcastStream[0].numberOfTemporalLayers :
-      inst->codecSpecific.VP8.numberOfTemporalLayers;
+  int num_temporal_layers =
+      doing_simulcast ? inst->simulcastStream[0].numberOfTemporalLayers
+                      : inst->codecSpecific.VP8.numberOfTemporalLayers;
 
   // TODO(andresp): crash if num temporal layers is bananas.
-  if (num_temporal_layers < 1) num_temporal_layers = 1;
+  if (num_temporal_layers < 1)
+    num_temporal_layers = 1;
   SetupTemporalLayers(number_of_streams, num_temporal_layers, *inst);
 
   feedback_mode_ = inst->codecSpecific.VP8.feedbackModeOn;
@@ -414,7 +413,7 @@
   int idx = number_of_streams - 1;
   for (int i = 0; i < (number_of_streams - 1); ++i, --idx) {
     int gcd = GCD(inst->simulcastStream[idx].width,
-                  inst->simulcastStream[idx-1].width);
+                  inst->simulcastStream[idx - 1].width);
     downsampling_factors_[i].num = inst->simulcastStream[idx].width / gcd;
     downsampling_factors_[i].den = inst->simulcastStream[idx - 1].width / gcd;
     send_stream_[i] = false;
@@ -426,20 +425,20 @@
   }
   for (int i = 0; i < number_of_streams; ++i) {
     // Random start, 16 bits is enough.
-    picture_id_[i] = static_cast<uint16_t>(rand()) & 0x7FFF;
+    picture_id_[i] = static_cast<uint16_t>(rand()) & 0x7FFF;  // NOLINT
     last_key_frame_picture_id_[i] = -1;
     // allocate memory for encoded image
     if (encoded_images_[i]._buffer != NULL) {
-      delete [] encoded_images_[i]._buffer;
+      delete[] encoded_images_[i]._buffer;
     }
-    encoded_images_[i]._size = CalcBufferSize(kI420,
-                                              codec_.width, codec_.height);
+    encoded_images_[i]._size =
+        CalcBufferSize(kI420, codec_.width, codec_.height);
     encoded_images_[i]._buffer = new uint8_t[encoded_images_[i]._size];
     encoded_images_[i]._completeFrame = true;
   }
   // populate encoder configuration with default values
-  if (vpx_codec_enc_config_default(vpx_codec_vp8_cx(),
-                                   &configurations_[0], 0)) {
+  if (vpx_codec_enc_config_default(vpx_codec_vp8_cx(), &configurations_[0],
+                                   0)) {
     return WEBRTC_VIDEO_CODEC_ERROR;
   }
   // setting the time base of the codec
@@ -463,8 +462,8 @@
       break;
     case kResilientFrames:
 #ifdef INDEPENDENT_PARTITIONS
-      configurations_[0]-g_error_resilient = VPX_ERROR_RESILIENT_DEFAULT |
-      VPX_ERROR_RESILIENT_PARTITIONS;
+      configurations_[0] - g_error_resilient =
+          VPX_ERROR_RESILIENT_DEFAULT | VPX_ERROR_RESILIENT_PARTITIONS;
       break;
 #else
       return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;  // Not supported
@@ -540,20 +539,18 @@
 
   // Determine number of threads based on the image size and #cores.
   // TODO(fbarchard): Consider number of Simulcast layers.
-  configurations_[0].g_threads = NumberOfThreads(configurations_[0].g_w,
-                                                 configurations_[0].g_h,
-                                                 number_of_cores);
+  configurations_[0].g_threads = NumberOfThreads(
+      configurations_[0].g_w, configurations_[0].g_h, number_of_cores);
 
   // Creating a wrapper to the image - setting image data to NULL.
   // Actual pointer will be set in encode. Setting align to 1, as it
   // is meaningless (no memory allocation is done here).
-  vpx_img_wrap(&raw_images_[0], VPX_IMG_FMT_I420, inst->width, inst->height,
-               1, NULL);
+  vpx_img_wrap(&raw_images_[0], VPX_IMG_FMT_I420, inst->width, inst->height, 1,
+               NULL);
 
   if (encoders_.size() == 1) {
     configurations_[0].rc_target_bitrate = inst->startBitrate;
-    temporal_layers_[0]->ConfigureBitrates(inst->startBitrate,
-                                           inst->maxBitrate,
+    temporal_layers_[0]->ConfigureBitrates(inst->startBitrate, inst->maxBitrate,
                                            inst->maxFramerate,
                                            &configurations_[0]);
   } else {
@@ -645,20 +642,15 @@
   flags |= VPX_CODEC_USE_OUTPUT_PARTITION;
 
   if (encoders_.size() > 1) {
-    int error = vpx_codec_enc_init_multi(&encoders_[0],
-                                 vpx_codec_vp8_cx(),
-                                 &configurations_[0],
-                                 encoders_.size(),
-                                 flags,
-                                 &downsampling_factors_[0]);
+    int error = vpx_codec_enc_init_multi(&encoders_[0], vpx_codec_vp8_cx(),
+                                         &configurations_[0], encoders_.size(),
+                                         flags, &downsampling_factors_[0]);
     if (error) {
       return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
     }
   } else {
-    if (vpx_codec_enc_init(&encoders_[0],
-                           vpx_codec_vp8_cx(),
-                           &configurations_[0],
-                           flags)) {
+    if (vpx_codec_enc_init(&encoders_[0], vpx_codec_vp8_cx(),
+                           &configurations_[0], flags)) {
       return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
     }
   }
@@ -675,13 +667,13 @@
 #else
   denoiser_state = kDenoiserOnAdaptive;
 #endif
-  vpx_codec_control(&encoders_[0], VP8E_SET_NOISE_SENSITIVITY,
-                    codec_.codecSpecific.VP8.denoisingOn ?
-                    denoiser_state : kDenoiserOff);
+  vpx_codec_control(
+      &encoders_[0], VP8E_SET_NOISE_SENSITIVITY,
+      codec_.codecSpecific.VP8.denoisingOn ? denoiser_state : kDenoiserOff);
   if (encoders_.size() > 2) {
-    vpx_codec_control(&encoders_[1], VP8E_SET_NOISE_SENSITIVITY,
-                      codec_.codecSpecific.VP8.denoisingOn ?
-                      denoiser_state : kDenoiserOff);
+    vpx_codec_control(
+        &encoders_[1], VP8E_SET_NOISE_SENSITIVITY,
+        codec_.codecSpecific.VP8.denoisingOn ? denoiser_state : kDenoiserOff);
   }
   for (size_t i = 0; i < encoders_.size(); ++i) {
     // Allow more screen content to be detected as static.
@@ -714,7 +706,7 @@
 
   // Don't go below 3 times the per frame bandwidth.
   const uint32_t minIntraTh = 300;
-  return (targetPct < minIntraTh) ? minIntraTh: targetPct;
+  return (targetPct < minIntraTh) ? minIntraTh : targetPct;
 }
 
 int VP8EncoderImpl::Encode(const VideoFrame& frame,
@@ -733,7 +725,7 @@
       quality_scaler_enabled_ ? quality_scaler_.GetScaledFrame(frame) : frame;
 
   if (quality_scaler_enabled_ && (input_image.width() != codec_.width ||
-      input_image.height() != codec_.height)) {
+                                  input_image.height() != codec_.height)) {
     int ret = UpdateCodecFrameSize(input_image);
     if (ret < 0)
       return ret;
@@ -749,11 +741,11 @@
   // Image in vpx_image_t format.
   // Input image is const. VP8's raw image is not defined as const.
   raw_images_[0].planes[VPX_PLANE_Y] =
-     const_cast<uint8_t*>(input_image.buffer(kYPlane));
+      const_cast<uint8_t*>(input_image.buffer(kYPlane));
   raw_images_[0].planes[VPX_PLANE_U] =
-     const_cast<uint8_t*>(input_image.buffer(kUPlane));
+      const_cast<uint8_t*>(input_image.buffer(kUPlane));
   raw_images_[0].planes[VPX_PLANE_V] =
-     const_cast<uint8_t*>(input_image.buffer(kVPlane));
+      const_cast<uint8_t*>(input_image.buffer(kVPlane));
 
   raw_images_[0].stride[VPX_PLANE_Y] = input_image.stride(kYPlane);
   raw_images_[0].stride[VPX_PLANE_U] = input_image.stride(kUPlane);
@@ -762,17 +754,17 @@
   for (size_t i = 1; i < encoders_.size(); ++i) {
     // Scale the image down a number of times by downsampling factor
     libyuv::I420Scale(
-        raw_images_[i-1].planes[VPX_PLANE_Y],
-        raw_images_[i-1].stride[VPX_PLANE_Y],
-        raw_images_[i-1].planes[VPX_PLANE_U],
-        raw_images_[i-1].stride[VPX_PLANE_U],
-        raw_images_[i-1].planes[VPX_PLANE_V],
-        raw_images_[i-1].stride[VPX_PLANE_V],
-        raw_images_[i-1].d_w, raw_images_[i-1].d_h,
-        raw_images_[i].planes[VPX_PLANE_Y], raw_images_[i].stride[VPX_PLANE_Y],
-        raw_images_[i].planes[VPX_PLANE_U], raw_images_[i].stride[VPX_PLANE_U],
-        raw_images_[i].planes[VPX_PLANE_V], raw_images_[i].stride[VPX_PLANE_V],
-        raw_images_[i].d_w, raw_images_[i].d_h, libyuv::kFilterBilinear);
+        raw_images_[i - 1].planes[VPX_PLANE_Y],
+        raw_images_[i - 1].stride[VPX_PLANE_Y],
+        raw_images_[i - 1].planes[VPX_PLANE_U],
+        raw_images_[i - 1].stride[VPX_PLANE_U],
+        raw_images_[i - 1].planes[VPX_PLANE_V],
+        raw_images_[i - 1].stride[VPX_PLANE_V], raw_images_[i - 1].d_w,
+        raw_images_[i - 1].d_h, raw_images_[i].planes[VPX_PLANE_Y],
+        raw_images_[i].stride[VPX_PLANE_Y], raw_images_[i].planes[VPX_PLANE_U],
+        raw_images_[i].stride[VPX_PLANE_U], raw_images_[i].planes[VPX_PLANE_V],
+        raw_images_[i].stride[VPX_PLANE_V], raw_images_[i].d_w,
+        raw_images_[i].d_h, libyuv::kFilterBilinear);
   }
   vpx_enc_frame_flags_t flags[kMaxSimulcastStreams];
   for (size_t i = 0; i < encoders_.size(); ++i) {
@@ -807,8 +799,8 @@
   if (send_key_frame) {
     // Adapt the size of the key frame when in screenshare with 1 temporal
     // layer.
-    if (encoders_.size() == 1 && codec_.mode == kScreensharing
-        && codec_.codecSpecific.VP8.numberOfTemporalLayers <= 1) {
+    if (encoders_.size() == 1 && codec_.mode == kScreensharing &&
+        codec_.codecSpecific.VP8.numberOfTemporalLayers <= 1) {
       const uint32_t forceKeyFrameIntraTh = 100;
       vpx_codec_control(&(encoders_[0]), VP8E_SET_MAX_INTRA_BITRATE_PCT,
                         forceKeyFrameIntraTh);
@@ -820,13 +812,12 @@
     }
     std::fill(key_frame_request_.begin(), key_frame_request_.end(), false);
   } else if (codec_specific_info &&
-      codec_specific_info->codecType == kVideoCodecVP8) {
+             codec_specific_info->codecType == kVideoCodecVP8) {
     if (feedback_mode_) {
       // Handle RPSI and SLI messages and set up the appropriate encode flags.
       bool sendRefresh = false;
       if (codec_specific_info->codecSpecific.VP8.hasReceivedRPSI) {
-        rps_.ReceivedRPSI(
-            codec_specific_info->codecSpecific.VP8.pictureIdRPSI);
+        rps_.ReceivedRPSI(codec_specific_info->codecSpecific.VP8.pictureIdRPSI);
       }
       if (codec_specific_info->codecSpecific.VP8.hasReceivedSLI) {
         sendRefresh = rps_.ReceivedSLI(input_image.timestamp());
@@ -878,8 +869,7 @@
     }
 
     vpx_codec_control(&encoders_[i], VP8E_SET_FRAME_FLAGS, flags[stream_idx]);
-    vpx_codec_control(&encoders_[i],
-                      VP8E_SET_TEMPORAL_LAYER_ID,
+    vpx_codec_control(&encoders_[i], VP8E_SET_TEMPORAL_LAYER_ID,
                       temporal_layers_[stream_idx]->CurrentLayerId());
   }
   // TODO(holmer): Ideally the duration should be the timestamp diff of this
@@ -897,7 +887,7 @@
   // Reset specific intra frame thresholds, following the key frame.
   if (send_key_frame) {
     vpx_codec_control(&(encoders_[0]), VP8E_SET_MAX_INTRA_BITRATE_PCT,
-        rc_max_intra_target_);
+                      rc_max_intra_target_);
   }
   if (error)
     return WEBRTC_VIDEO_CODEC_ERROR;
@@ -915,8 +905,7 @@
     codec_.simulcastStream[0].height = input_image.height();
   }
   // Update the cpu_speed setting for resolution change.
-  vpx_codec_control(&(encoders_[0]),
-                    VP8E_SET_CPUUSED,
+  vpx_codec_control(&(encoders_[0]), VP8E_SET_CPUUSED,
                     SetCpuSpeed(codec_.width, codec_.height));
   raw_images_[0].w = codec_.width;
   raw_images_[0].h = codec_.height;
@@ -949,13 +938,12 @@
   }
   vp8Info->simulcastIdx = stream_idx;
   vp8Info->keyIdx = kNoKeyIdx;  // TODO(hlundin) populate this
-  vp8Info->nonReference = (pkt.data.frame.flags & VPX_FRAME_IS_DROPPABLE) ?
-      true : false;
+  vp8Info->nonReference =
+      (pkt.data.frame.flags & VPX_FRAME_IS_DROPPABLE) ? true : false;
   bool base_layer_sync_point = (pkt.data.frame.flags & VPX_FRAME_IS_KEY) ||
-                                only_predicting_from_key_frame;
+                               only_predicting_from_key_frame;
   temporal_layers_[stream_idx]->PopulateCodecSpecific(base_layer_sync_point,
-                                                      vp8Info,
-                                                      timestamp);
+                                                      vp8Info, timestamp);
   // Prepare next.
   picture_id_[stream_idx] = (picture_id_[stream_idx] + 1) & 0x7FFF;
 }
@@ -968,27 +956,26 @@
   int stream_idx = static_cast<int>(encoders_.size()) - 1;
   int result = WEBRTC_VIDEO_CODEC_OK;
   for (size_t encoder_idx = 0; encoder_idx < encoders_.size();
-      ++encoder_idx, --stream_idx) {
+       ++encoder_idx, --stream_idx) {
     vpx_codec_iter_t iter = NULL;
     int part_idx = 0;
     encoded_images_[encoder_idx]._length = 0;
     encoded_images_[encoder_idx]._frameType = kVideoFrameDelta;
     RTPFragmentationHeader frag_info;
     // token_partitions_ is number of bits used.
-    frag_info.VerifyAndAllocateFragmentationHeader((1 << token_partitions_)
-                                                   + 1);
+    frag_info.VerifyAndAllocateFragmentationHeader((1 << token_partitions_) +
+                                                   1);
     CodecSpecificInfo codec_specific;
-    const vpx_codec_cx_pkt_t *pkt = NULL;
-    while ((pkt = vpx_codec_get_cx_data(&encoders_[encoder_idx],
-                                        &iter)) != NULL) {
+    const vpx_codec_cx_pkt_t* pkt = NULL;
+    while ((pkt = vpx_codec_get_cx_data(&encoders_[encoder_idx], &iter)) !=
+           NULL) {
       switch (pkt->kind) {
         case VPX_CODEC_CX_FRAME_PKT: {
           uint32_t length = encoded_images_[encoder_idx]._length;
           memcpy(&encoded_images_[encoder_idx]._buffer[length],
-                 pkt->data.frame.buf,
-                 pkt->data.frame.sz);
+                 pkt->data.frame.buf, pkt->data.frame.sz);
           frag_info.fragmentationOffset[part_idx] = length;
-          frag_info.fragmentationLength[part_idx] =  pkt->data.frame.sz;
+          frag_info.fragmentationLength[part_idx] = pkt->data.frame.sz;
           frag_info.fragmentationPlType[part_idx] = 0;  // not known here
           frag_info.fragmentationTimeDiff[part_idx] = 0;
           encoded_images_[encoder_idx]._length += pkt->data.frame.sz;
@@ -1065,7 +1052,6 @@
   return WEBRTC_VIDEO_CODEC_OK;
 }
 
-
 VP8DecoderImpl::VP8DecoderImpl()
     : decode_complete_callback_(NULL),
       inited_(false),
@@ -1077,8 +1063,7 @@
       propagation_cnt_(-1),
       last_frame_width_(0),
       last_frame_height_(0),
-      key_frame_required_(true) {
-}
+      key_frame_required_(true) {}
 
 VP8DecoderImpl::~VP8DecoderImpl() {
   inited_ = true;  // in order to do the actual release
@@ -1094,8 +1079,7 @@
   return WEBRTC_VIDEO_CODEC_OK;
 }
 
-int VP8DecoderImpl::InitDecode(const VideoCodec* inst,
-                                       int number_of_cores) {
+int VP8DecoderImpl::InitDecode(const VideoCodec* inst, int number_of_cores) {
   int ret_val = Release();
   if (ret_val < 0) {
     return ret_val;
@@ -1106,12 +1090,12 @@
   if (inst && inst->codecType == kVideoCodecVP8) {
     feedback_mode_ = inst->codecSpecific.VP8.feedbackModeOn;
   }
-  vpx_codec_dec_cfg_t  cfg;
+  vpx_codec_dec_cfg_t cfg;
   // Setting number of threads to a constant value (1)
   cfg.threads = 1;
   cfg.h = cfg.w = 0;  // set after decode
 
-vpx_codec_flags_t flags = 0;
+  vpx_codec_flags_t flags = 0;
 #if !defined(WEBRTC_ARCH_ARM) && !defined(WEBRTC_ARCH_ARM64)
   flags = VPX_CODEC_USE_POSTPROC;
 #ifdef INDEPENDENT_PARTITIONS
@@ -1136,10 +1120,10 @@
 }
 
 int VP8DecoderImpl::Decode(const EncodedImage& input_image,
-                                   bool missing_frames,
-                                   const RTPFragmentationHeader* fragmentation,
-                                   const CodecSpecificInfo* codec_specific_info,
-                                   int64_t /*render_time_ms*/) {
+                           bool missing_frames,
+                           const RTPFragmentationHeader* fragmentation,
+                           const CodecSpecificInfo* codec_specific_info,
+                           int64_t /*render_time_ms*/) {
   if (!inited_) {
     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
   }
@@ -1190,9 +1174,9 @@
     if (input_image._frameType == kVideoFrameKey &&
         input_image._completeFrame) {
       propagation_cnt_ = -1;
-    // Start count on first loss.
+      // Start count on first loss.
     } else if ((!input_image._completeFrame || missing_frames) &&
-        propagation_cnt_ == -1) {
+               propagation_cnt_ == -1) {
       propagation_cnt_ = 0;
     }
     if (propagation_cnt_ >= 0) {
@@ -1244,15 +1228,15 @@
   if (input_image._frameType == kVideoFrameKey && input_image._buffer != NULL) {
     const uint32_t bytes_to_copy = input_image._length;
     if (last_keyframe_._size < bytes_to_copy) {
-      delete [] last_keyframe_._buffer;
+      delete[] last_keyframe_._buffer;
       last_keyframe_._buffer = NULL;
       last_keyframe_._size = 0;
     }
     uint8_t* temp_buffer = last_keyframe_._buffer;  // Save buffer ptr.
-    uint32_t temp_size = last_keyframe_._size;  // Save size.
-    last_keyframe_ = input_image;  // Shallow copy.
-    last_keyframe_._buffer = temp_buffer;  // Restore buffer ptr.
-    last_keyframe_._size = temp_size;  // Restore buffer size.
+    uint32_t temp_size = last_keyframe_._size;      // Save size.
+    last_keyframe_ = input_image;                   // Shallow copy.
+    last_keyframe_._buffer = temp_buffer;           // Restore buffer ptr.
+    last_keyframe_._size = temp_size;               // Restore buffer size.
     if (!last_keyframe_._buffer) {
       // Allocate memory.
       last_keyframe_._size = bytes_to_copy;
@@ -1302,7 +1286,8 @@
     }
     if (picture_id > -1) {
       if (((reference_updates & VP8_GOLD_FRAME) ||
-          (reference_updates & VP8_ALTR_FRAME)) && !corrupted) {
+           (reference_updates & VP8_ALTR_FRAME)) &&
+          !corrupted) {
         decode_complete_callback_->ReceivedDecodedReferenceFrame(picture_id);
       }
       decode_complete_callback_->ReceivedDecodedFrame(picture_id);
@@ -1325,14 +1310,10 @@
     const EncodedImage& input_image,
     const RTPFragmentationHeader* fragmentation) {
   for (int i = 0; i < fragmentation->fragmentationVectorSize; ++i) {
-    const uint8_t* partition = input_image._buffer +
-        fragmentation->fragmentationOffset[i];
-    const uint32_t partition_length =
-        fragmentation->fragmentationLength[i];
-    if (vpx_codec_decode(decoder_,
-                         partition,
-                         partition_length,
-                         0,
+    const uint8_t* partition =
+        input_image._buffer + fragmentation->fragmentationOffset[i];
+    const uint32_t partition_length = fragmentation->fragmentationLength[i];
+    if (vpx_codec_decode(decoder_, partition, partition_length, 0,
                          VPX_DL_REALTIME)) {
       return WEBRTC_VIDEO_CODEC_ERROR;
     }
@@ -1345,8 +1326,8 @@
 }
 
 int VP8DecoderImpl::ReturnFrame(const vpx_image_t* img,
-                                        uint32_t timestamp,
-                                        int64_t ntp_time_ms) {
+                                uint32_t timestamp,
+                                int64_t ntp_time_ms) {
   if (img == NULL) {
     // Decoder OK and NULL image => No show frame
     return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
@@ -1356,14 +1337,13 @@
   // Allocate memory for decoded image.
   VideoFrame decoded_image(buffer_pool_.CreateBuffer(img->d_w, img->d_h),
                            timestamp, 0, kVideoRotation_0);
-  libyuv::I420Copy(
-      img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y],
-      img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U],
-      img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V],
-      decoded_image.buffer(kYPlane), decoded_image.stride(kYPlane),
-      decoded_image.buffer(kUPlane), decoded_image.stride(kUPlane),
-      decoded_image.buffer(kVPlane), decoded_image.stride(kVPlane),
-      img->d_w, img->d_h);
+  libyuv::I420Copy(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y],
+                   img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U],
+                   img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V],
+                   decoded_image.buffer(kYPlane), decoded_image.stride(kYPlane),
+                   decoded_image.buffer(kUPlane), decoded_image.stride(kUPlane),
+                   decoded_image.buffer(kVPlane), decoded_image.stride(kVPlane),
+                   img->d_w, img->d_h);
   decoded_image.set_ntp_time_ms(ntp_time_ms);
   int ret = decode_complete_callback_->Decoded(decoded_image);
   if (ret != 0)
@@ -1382,7 +1362,7 @@
 
 int VP8DecoderImpl::Release() {
   if (last_keyframe_._buffer != NULL) {
-    delete [] last_keyframe_._buffer;
+    delete[] last_keyframe_._buffer;
     last_keyframe_._buffer = NULL;
   }
   if (decoder_ != NULL) {
@@ -1409,12 +1389,12 @@
 int VP8DecoderImpl::CopyReference(VP8DecoderImpl* copy) {
   // The type of frame to copy should be set in ref_frame_->frame_type
   // before the call to this function.
-  if (vpx_codec_control(decoder_, VP8_COPY_REFERENCE, ref_frame_)
-      != VPX_CODEC_OK) {
+  if (vpx_codec_control(decoder_, VP8_COPY_REFERENCE, ref_frame_) !=
+      VPX_CODEC_OK) {
     return -1;
   }
-  if (vpx_codec_control(copy->decoder_, VP8_SET_REFERENCE, ref_frame_)
-      != VPX_CODEC_OK) {
+  if (vpx_codec_control(copy->decoder_, VP8_SET_REFERENCE, ref_frame_) !=
+      VPX_CODEC_OK) {
     return -1;
   }
   return 0;
diff --git a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.h b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.h
index e673ad4..9d5fb71 100644
--- a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.h
+++ b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.h
@@ -61,7 +61,8 @@
   const char* ImplementationName() const override;
 
  private:
-  void SetupTemporalLayers(int num_streams, int num_temporal_layers,
+  void SetupTemporalLayers(int num_streams,
+                           int num_temporal_layers,
                            const VideoCodec& codec);
 
   // Set the cpu_speed setting for encoder based on resolution and/or platform.
@@ -128,10 +129,10 @@
   int InitDecode(const VideoCodec* inst, int number_of_cores) override;
 
   int Decode(const EncodedImage& input_image,
-                     bool missing_frames,
-                     const RTPFragmentationHeader* fragmentation,
-                     const CodecSpecificInfo* codec_specific_info,
-                     int64_t /*render_time_ms*/) override;
+             bool missing_frames,
+             const RTPFragmentationHeader* fragmentation,
+             const CodecSpecificInfo* codec_specific_info,
+             int64_t /*render_time_ms*/) override;
 
   int RegisterDecodeCompleteCallback(DecodedImageCallback* callback) override;
   int Release() override;
@@ -169,4 +170,3 @@
 }  // namespace webrtc
 
 #endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_VP8_IMPL_H_
-
diff --git a/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc b/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc
index d2d3177..9e54665 100644
--- a/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc
@@ -1,4 +1,4 @@
- /*
+/*
  *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
@@ -23,8 +23,7 @@
 class Vp8SequenceCoderEncodeCallback : public webrtc::EncodedImageCallback {
  public:
   explicit Vp8SequenceCoderEncodeCallback(FILE* encoded_file)
-      : encoded_file_(encoded_file),
-        encoded_bytes_(0) {}
+      : encoded_file_(encoded_file), encoded_bytes_(0) {}
   ~Vp8SequenceCoderEncodeCallback();
   int Encoded(const webrtc::EncodedImage& encoded_image,
               const webrtc::CodecSpecificInfo* codecSpecificInfo,
@@ -32,6 +31,7 @@
   // Returns the encoded image.
   webrtc::EncodedImage encoded_image() { return encoded_image_; }
   size_t encoded_bytes() { return encoded_bytes_; }
+
  private:
   webrtc::EncodedImage encoded_image_;
   FILE* encoded_file_;
@@ -39,7 +39,7 @@
 };
 
 Vp8SequenceCoderEncodeCallback::~Vp8SequenceCoderEncodeCallback() {
-  delete [] encoded_image_._buffer;
+  delete[] encoded_image_._buffer;
   encoded_image_._buffer = NULL;
 }
 int Vp8SequenceCoderEncodeCallback::Encoded(
@@ -47,7 +47,7 @@
     const webrtc::CodecSpecificInfo* codecSpecificInfo,
     const webrtc::RTPFragmentationHeader* fragmentation) {
   if (encoded_image_._size < encoded_image._size) {
-    delete [] encoded_image_._buffer;
+    delete[] encoded_image_._buffer;
     encoded_image_._buffer = NULL;
     encoded_image_._buffer = new uint8_t[encoded_image._size];
     encoded_image_._size = encoded_image._size;
@@ -72,7 +72,7 @@
   int32_t Decoded(webrtc::VideoFrame& frame) override;
   int32_t Decoded(webrtc::VideoFrame& frame, int64_t decode_time_ms) override {
     RTC_NOTREACHED();
-    return -1;;
+    return -1;
   }
   bool DecodeComplete();
 
@@ -85,16 +85,16 @@
   return 0;
 }
 
-int SequenceCoder(webrtc::test::CommandLineParser& parser) {
-  int width = strtol((parser.GetFlag("w")).c_str(), NULL, 10);
-  int height = strtol((parser.GetFlag("h")).c_str(), NULL, 10);
-  int framerate = strtol((parser.GetFlag("f")).c_str(), NULL, 10);
+int SequenceCoder(webrtc::test::CommandLineParser* parser) {
+  int width = strtol((parser->GetFlag("w")).c_str(), NULL, 10);
+  int height = strtol((parser->GetFlag("h")).c_str(), NULL, 10);
+  int framerate = strtol((parser->GetFlag("f")).c_str(), NULL, 10);
 
   if (width <= 0 || height <= 0 || framerate <= 0) {
     fprintf(stderr, "Error: Resolution cannot be <= 0!\n");
     return -1;
   }
-  int target_bitrate = strtol((parser.GetFlag("b")).c_str(), NULL, 10);
+  int target_bitrate = strtol((parser->GetFlag("b")).c_str(), NULL, 10);
   if (target_bitrate <= 0) {
     fprintf(stderr, "Error: Bit-rate cannot be <= 0!\n");
     return -1;
@@ -102,20 +102,20 @@
 
   // SetUp
   // Open input file.
-  std::string encoded_file_name = parser.GetFlag("encoded_file");
+  std::string encoded_file_name = parser->GetFlag("encoded_file");
   FILE* encoded_file = fopen(encoded_file_name.c_str(), "wb");
   if (encoded_file == NULL) {
     fprintf(stderr, "Error: Cannot open encoded file\n");
     return -1;
   }
-  std::string input_file_name = parser.GetFlag("input_file");
+  std::string input_file_name = parser->GetFlag("input_file");
   FILE* input_file = fopen(input_file_name.c_str(), "rb");
   if (input_file == NULL) {
     fprintf(stderr, "Error: Cannot open input file\n");
     return -1;
   }
   // Open output file.
-  std::string output_file_name = parser.GetFlag("output_file");
+  std::string output_file_name = parser->GetFlag("output_file");
   FILE* output_file = fopen(output_file_name.c_str(), "wb");
   if (output_file == NULL) {
     fprintf(stderr, "Error: Cannot open output file\n");
@@ -123,8 +123,8 @@
   }
 
   // Get range of frames: will encode num_frames following start_frame).
-  int start_frame = strtol((parser.GetFlag("start_frame")).c_str(), NULL, 10);
-  int num_frames = strtol((parser.GetFlag("num_frames")).c_str(), NULL, 10);
+  int start_frame = strtol((parser->GetFlag("start_frame")).c_str(), NULL, 10);
+  int num_frames = strtol((parser->GetFlag("num_frames")).c_str(), NULL, 10);
 
   // Codec SetUp.
   webrtc::VideoCodec inst;
@@ -162,8 +162,8 @@
   int frames_processed = 0;
   input_frame.CreateEmptyFrame(width, height, width, half_width, half_width);
   while (!feof(input_file) &&
-      (num_frames == -1 || frames_processed < num_frames)) {
-     if (fread(frame_buffer.get(), 1, length, input_file) != length)
+         (num_frames == -1 || frames_processed < num_frames)) {
+    if (fread(frame_buffer.get(), 1, length, input_file) != length)
       continue;
     if (frame_cnt >= start_frame) {
       webrtc::ConvertToI420(webrtc::kI420, frame_buffer.get(), 0, 0, width,
@@ -184,33 +184,35 @@
   printf("Actual bitrate: %f kbps\n", actual_bit_rate / 1000);
   webrtc::test::QualityMetricsResult psnr_result, ssim_result;
   EXPECT_EQ(0, webrtc::test::I420MetricsFromFiles(
-      input_file_name.c_str(), output_file_name.c_str(),
-      inst.width, inst.height,
-      &psnr_result, &ssim_result));
+                   input_file_name.c_str(), output_file_name.c_str(),
+                   inst.width, inst.height, &psnr_result, &ssim_result));
   printf("PSNR avg: %f[dB], min: %f[dB]\nSSIM avg: %f, min: %f\n",
-          psnr_result.average, psnr_result.min,
-          ssim_result.average, ssim_result.min);
+         psnr_result.average, psnr_result.min, ssim_result.average,
+         ssim_result.min);
   return frame_cnt;
 }
 
 int main(int argc, char** argv) {
   std::string program_name = argv[0];
-  std::string usage = "Encode and decodes a video sequence, and writes"
-  "results to a file.\n"
-  "Example usage:\n" + program_name + " functionality"
-  " --w=352 --h=288 --input_file=input.yuv --output_file=output.yuv "
-  " Command line flags:\n"
-  "  - width(int): The width of the input file. Default: 352\n"
-  "  - height(int): The height of the input file. Default: 288\n"
-  "  - input_file(string): The YUV file to encode."
-  "      Default: foreman.yuv\n"
-  "  - encoded_file(string): The vp8 encoded file (encoder output)."
-  "      Default: vp8_encoded.vp8\n"
-  "  - output_file(string): The yuv decoded file (decoder output)."
-  "      Default: vp8_decoded.yuv\n."
-  "  - start_frame - frame number in which encoding will begin. Default: 0"
-  "  - num_frames - Number of frames to be processed. "
-  "      Default: -1 (entire sequence).";
+  std::string usage =
+      "Encode and decodes a video sequence, and writes"
+      "results to a file.\n"
+      "Example usage:\n" +
+      program_name +
+      " functionality"
+      " --w=352 --h=288 --input_file=input.yuv --output_file=output.yuv "
+      " Command line flags:\n"
+      "  - width(int): The width of the input file. Default: 352\n"
+      "  - height(int): The height of the input file. Default: 288\n"
+      "  - input_file(string): The YUV file to encode."
+      "      Default: foreman.yuv\n"
+      "  - encoded_file(string): The vp8 encoded file (encoder output)."
+      "      Default: vp8_encoded.vp8\n"
+      "  - output_file(string): The yuv decoded file (decoder output)."
+      "      Default: vp8_decoded.yuv\n."
+      "  - start_frame - frame number in which encoding will begin. Default: 0"
+      "  - num_frames - Number of frames to be processed. "
+      "      Default: -1 (entire sequence).";
 
   webrtc::test::CommandLineParser parser;
 
@@ -228,8 +230,8 @@
   parser.SetFlag("output_file", webrtc::test::OutputPath() + "vp8_decoded.yuv");
   parser.SetFlag("encoded_file",
                  webrtc::test::OutputPath() + "vp8_encoded.vp8");
-  parser.SetFlag("input_file", webrtc::test::ResourcePath("foreman_cif",
-                                                          "yuv"));
+  parser.SetFlag("input_file",
+                 webrtc::test::ResourcePath("foreman_cif", "yuv"));
   parser.SetFlag("help", "false");
 
   parser.ProcessFlags();
@@ -239,5 +241,5 @@
   }
   parser.PrintEnteredFlags();
 
-  return SequenceCoder(parser);
+  return SequenceCoder(&parser);
 }
diff --git a/webrtc/modules/video_coding/codecs/vp9/include/vp9.h b/webrtc/modules/video_coding/codecs/vp9/include/vp9.h
index 1d241ed..3bcbe46 100644
--- a/webrtc/modules/video_coding/codecs/vp9/include/vp9.h
+++ b/webrtc/modules/video_coding/codecs/vp9/include/vp9.h
@@ -23,7 +23,6 @@
   virtual ~VP9Encoder() {}
 };
 
-
 class VP9Decoder : public VideoDecoder {
  public:
   static VP9Decoder* Create();
diff --git a/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc b/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
index a00af64..e554795 100644
--- a/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
+++ b/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
@@ -54,7 +54,7 @@
 
 void VP9EncoderImpl::EncoderOutputCodedPacketCallback(vpx_codec_cx_pkt* pkt,
                                                       void* user_data) {
-  VP9EncoderImpl* enc = (VP9EncoderImpl*)(user_data);
+  VP9EncoderImpl* enc = static_cast<VP9EncoderImpl*>(user_data);
   enc->GetEncodedLayerFrame(pkt);
 }
 
@@ -88,7 +88,7 @@
 
 int VP9EncoderImpl::Release() {
   if (encoded_image_._buffer != NULL) {
-    delete [] encoded_image_._buffer;
+    delete[] encoded_image_._buffer;
     encoded_image_._buffer = NULL;
   }
   if (encoder_ != NULL) {
@@ -267,10 +267,10 @@
     num_temporal_layers_ = 1;
 
   // Random start 16 bits is enough.
-  picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF;
+  picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF;  // NOLINT
   // Allocate memory for encoded image
   if (encoded_image_._buffer != NULL) {
-    delete [] encoded_image_._buffer;
+    delete[] encoded_image_._buffer;
   }
   encoded_image_._size = CalcBufferSize(kI420, codec_.width, codec_.height);
   encoded_image_._buffer = new uint8_t[encoded_image_._size];
@@ -278,8 +278,8 @@
   // Creating a wrapper to the image - setting image data to NULL. Actual
   // pointer will be set in encode. Setting align to 1, as it is meaningless
   // (actual memory is not allocated).
-  raw_ = vpx_img_wrap(NULL, VPX_IMG_FMT_I420, codec_.width, codec_.height,
-                      1, NULL);
+  raw_ = vpx_img_wrap(NULL, VPX_IMG_FMT_I420, codec_.width, codec_.height, 1,
+                      NULL);
   // Populate encoder configuration with default values.
   if (vpx_codec_enc_config_default(vpx_codec_vp9_cx(), config_, 0)) {
     return WEBRTC_VIDEO_CODEC_ERROR;
@@ -294,8 +294,8 @@
   config_->g_lag_in_frames = 0;  // 0- no frame lagging
   config_->g_threads = 1;
   // Rate control settings.
-  config_->rc_dropframe_thresh = inst->codecSpecific.VP9.frameDroppingOn ?
-      30 : 0;
+  config_->rc_dropframe_thresh =
+      inst->codecSpecific.VP9.frameDroppingOn ? 30 : 0;
   config_->rc_end_usage = VPX_CBR;
   config_->g_pass = VPX_RC_ONE_PASS;
   config_->rc_min_quantizer = 2;
@@ -307,7 +307,7 @@
   config_->rc_buf_sz = 1000;
   // Set the maximum target size of any key-frame.
   rc_max_intra_target_ = MaxIntraTarget(config_->rc_buf_optimal_sz);
-  if (inst->codecSpecific.VP9.keyFrameInterval  > 0) {
+  if (inst->codecSpecific.VP9.keyFrameInterval > 0) {
     config_->kf_mode = VPX_KF_AUTO;
     config_->kf_max_dist = inst->codecSpecific.VP9.keyFrameInterval;
     // Needs to be set (in svc mode) to get correct periodic key frame interval
@@ -316,12 +316,11 @@
   } else {
     config_->kf_mode = VPX_KF_DISABLED;
   }
-  config_->rc_resize_allowed = inst->codecSpecific.VP9.automaticResizeOn ?
-      1 : 0;
+  config_->rc_resize_allowed =
+      inst->codecSpecific.VP9.automaticResizeOn ? 1 : 0;
   // Determine number of threads based on the image size and #cores.
-  config_->g_threads = NumberOfThreads(config_->g_w,
-                                       config_->g_h,
-                                       number_of_cores);
+  config_->g_threads =
+      NumberOfThreads(config_->g_w, config_->g_h, number_of_cores);
 
   cpu_speed_ = GetCpuSpeed(config_->g_w, config_->g_h);
 
@@ -365,7 +364,7 @@
     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
   }
 
-  tl0_pic_idx_ = static_cast<uint8_t>(rand());
+  tl0_pic_idx_ = static_cast<uint8_t>(rand());  // NOLINT
 
   return InitAndSetControlSettings(inst);
 }
@@ -432,8 +431,10 @@
   }
   // Register callback for getting each spatial layer.
   vpx_codec_priv_output_cx_pkt_cb_pair_t cbp = {
-      VP9EncoderImpl::EncoderOutputCodedPacketCallback, (void*)(this)};
-  vpx_codec_control(encoder_, VP9E_REGISTER_CX_CALLBACK, (void*)(&cbp));
+      VP9EncoderImpl::EncoderOutputCodedPacketCallback,
+      reinterpret_cast<void*>(this)};
+  vpx_codec_control(encoder_, VP9E_REGISTER_CX_CALLBACK,
+                    reinterpret_cast<void*>(&cbp));
 
   // Control function to set the number of column tiles in encoding a frame, in
   // log2 unit: e.g., 0 = 1 tile column, 1 = 2 tile columns, 2 = 4 tile columns.
@@ -468,7 +469,7 @@
       optimal_buffer_size * scale_par * codec_.maxFramerate / 10;
   // Don't go below 3 times the per frame bandwidth.
   const uint32_t min_intra_size = 300;
-  return (target_pct < min_intra_size) ? min_intra_size: target_pct;
+  return (target_pct < min_intra_size) ? min_intra_size : target_pct;
 }
 
 int VP9EncoderImpl::Encode(const VideoFrame& input_image,
@@ -547,11 +548,11 @@
 }
 
 void VP9EncoderImpl::PopulateCodecSpecific(CodecSpecificInfo* codec_specific,
-                                       const vpx_codec_cx_pkt& pkt,
-                                       uint32_t timestamp) {
+                                           const vpx_codec_cx_pkt& pkt,
+                                           uint32_t timestamp) {
   assert(codec_specific != NULL);
   codec_specific->codecType = kVideoCodecVP9;
-  CodecSpecificInfoVP9 *vp9_info = &(codec_specific->codecSpecific.VP9);
+  CodecSpecificInfoVP9* vp9_info = &(codec_specific->codecSpecific.VP9);
   // TODO(asapersson): Set correct value.
   vp9_info->inter_pic_predicted =
       (pkt.data.frame.flags & VPX_FRAME_IS_KEY) ? false : true;
@@ -857,7 +858,7 @@
   if (decoder_ == NULL) {
     decoder_ = new vpx_codec_ctx_t;
   }
-  vpx_codec_dec_cfg_t  cfg;
+  vpx_codec_dec_cfg_t cfg;
   // Setting number of threads to a constant value (1)
   cfg.threads = 1;
   cfg.h = cfg.w = 0;  // set after decode
@@ -910,10 +911,8 @@
   }
   // During decode libvpx may get and release buffers from |frame_buffer_pool_|.
   // In practice libvpx keeps a few (~3-4) buffers alive at a time.
-  if (vpx_codec_decode(decoder_,
-                       buffer,
-                       static_cast<unsigned int>(input_image._length),
-                       0,
+  if (vpx_codec_decode(decoder_, buffer,
+                       static_cast<unsigned int>(input_image._length), 0,
                        VPX_DL_REALTIME)) {
     return WEBRTC_VIDEO_CODEC_ERROR;
   }
@@ -943,10 +942,10 @@
   // using a WrappedI420Buffer.
   rtc::scoped_refptr<WrappedI420Buffer> img_wrapped_buffer(
       new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
-          img->d_w, img->d_h,
-          img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y],
-          img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U],
-          img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V],
+          img->d_w, img->d_h, img->planes[VPX_PLANE_Y],
+          img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U],
+          img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V],
+          img->stride[VPX_PLANE_V],
           // WrappedI420Buffer's mechanism for allowing the release of its frame
           // buffer is through a callback function. This is where we should
           // release |img_buffer|.
diff --git a/webrtc/modules/video_coding/codecs/vp9/vp9_impl.h b/webrtc/modules/video_coding/codecs/vp9/vp9_impl.h
index 91475c9..bfa4540 100644
--- a/webrtc/modules/video_coding/codecs/vp9/vp9_impl.h
+++ b/webrtc/modules/video_coding/codecs/vp9/vp9_impl.h
@@ -9,8 +9,10 @@
  *
  */
 
-#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP9_IMPL_H_
-#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP9_IMPL_H_
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP9_VP9_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP9_VP9_IMPL_H_
+
+#include <vector>
 
 #include "webrtc/modules/video_coding/codecs/vp9/include/vp9.h"
 #include "webrtc/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h"
@@ -129,7 +131,6 @@
   rtc::scoped_ptr<ScreenshareLayersVP9> spatial_layer_;
 };
 
-
 class VP9DecoderImpl : public VP9Decoder {
  public:
   VP9DecoderImpl();
@@ -165,4 +166,4 @@
 };
 }  // namespace webrtc
 
-#endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP9_IMPL_H_
+#endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP9_VP9_IMPL_H_