Rename EncodedImage::_length --> size_, and make private.

Use size() accessor function. Also replace most nearby uses of _buffer
with data().

Bug: webrtc:9378
Change-Id: I1ac3459612f7c6151bd057d05448da1c4e1c6e3d
Reviewed-on: https://webrtc-review.googlesource.com/c/116783
Commit-Queue: Niels Moller <nisse@webrtc.org>
Reviewed-by: Karl Wiberg <kwiberg@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#26273}
diff --git a/sdk/android/src/jni/android_media_decoder.cc b/sdk/android/src/jni/android_media_decoder.cc
index a73167d..35f415e 100644
--- a/sdk/android/src/jni/android_media_decoder.cc
+++ b/sdk/android/src/jni/android_media_decoder.cc
@@ -360,7 +360,7 @@
     ALOGE << "Decode() - callback_ is NULL";
     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
   }
-  if (inputImage._buffer == NULL && inputImage._length > 0) {
+  if (inputImage.data() == NULL && inputImage.size() > 0) {
     ALOGE << "Decode() - inputImage is incorrect";
     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
   }
@@ -408,7 +408,7 @@
     }
     key_frame_required_ = false;
   }
-  if (inputImage._length == 0) {
+  if (inputImage.size() == 0) {
     return WEBRTC_VIDEO_CODEC_ERROR;
   }
 
@@ -476,35 +476,34 @@
   RTC_CHECK(buffer) << "Indirect buffer??";
   size_t buffer_capacity =
       rtc::dchecked_cast<size_t>(jni->GetDirectBufferCapacity(j_input_buffer));
-  if (CheckException(jni) || buffer_capacity < inputImage._length) {
-    ALOGE << "Input frame size " << inputImage._length
+  if (CheckException(jni) || buffer_capacity < inputImage.size()) {
+    ALOGE << "Input frame size " << inputImage.size()
           << " is bigger than buffer size " << buffer_capacity;
     return ProcessHWErrorOnCodecThread();
   }
   jlong presentation_timestamp_us = static_cast<jlong>(
       static_cast<int64_t>(frames_received_) * 1000000 / codec_.maxFramerate);
-  memcpy(buffer, inputImage._buffer, inputImage._length);
+  memcpy(buffer, inputImage.data(), inputImage.size());
 
   if (frames_decoded_ < frames_decoded_logged_) {
     ALOGD << "Decoder frame in # " << frames_received_
           << ". Type: " << inputImage._frameType << ". Buffer # "
           << j_input_buffer_index
           << ". TS: " << presentation_timestamp_us / 1000
-          << ". Size: " << inputImage._length;
+          << ". Size: " << inputImage.size();
   }
 
   // Save input image timestamps for later output.
   frames_received_++;
-  current_bytes_ += inputImage._length;
+  current_bytes_ += inputImage.size();
   absl::optional<uint8_t> qp;
   if (codecType_ == kVideoCodecVP8) {
     int qp_int;
-    if (vp8::GetQp(inputImage._buffer, inputImage._length, &qp_int)) {
+    if (vp8::GetQp(inputImage.data(), inputImage.size(), &qp_int)) {
       qp = qp_int;
     }
   } else if (codecType_ == kVideoCodecH264) {
-    h264_bitstream_parser_.ParseBitstream(inputImage._buffer,
-                                          inputImage._length);
+    h264_bitstream_parser_.ParseBitstream(inputImage.data(), inputImage.size());
     int qp_int;
     if (h264_bitstream_parser_.GetLastSliceQp(&qp_int)) {
       qp = qp_int;
@@ -515,7 +514,7 @@
   // Feed input to decoder.
   bool success = Java_MediaCodecVideoDecoder_queueInputBuffer(
       jni, j_media_codec_video_decoder_, j_input_buffer_index,
-      static_cast<int>(inputImage._length), presentation_timestamp_us,
+      static_cast<int>(inputImage.size()), presentation_timestamp_us,
       static_cast<int64_t>(inputImage.Timestamp()), inputImage.ntp_time_ms_);
   if (CheckException(jni) || !success) {
     ALOGE << "queueInputBuffer error";
diff --git a/sdk/android/src/jni/android_media_encoder.cc b/sdk/android/src/jni/android_media_encoder.cc
index aaa2d9a..69f0134 100644
--- a/sdk/android/src/jni/android_media_encoder.cc
+++ b/sdk/android/src/jni/android_media_encoder.cc
@@ -1047,7 +1047,7 @@
       if (codec_type == kVideoCodecVP8 || codec_type == kVideoCodecVP9) {
         header.VerifyAndAllocateFragmentationHeader(1);
         header.fragmentationOffset[0] = 0;
-        header.fragmentationLength[0] = image->_length;
+        header.fragmentationLength[0] = image->size();
         header.fragmentationPlType[0] = 0;
         header.fragmentationTimeDiff[0] = 0;
         if (codec_type == kVideoCodecVP8) {
diff --git a/sdk/android/src/jni/encoded_image.cc b/sdk/android/src/jni/encoded_image.cc
index daa7788..c801ce5f 100644
--- a/sdk/android/src/jni/encoded_image.cc
+++ b/sdk/android/src/jni/encoded_image.cc
@@ -27,8 +27,8 @@
 ScopedJavaLocalRef<jobject> NativeToJavaEncodedImage(
     JNIEnv* jni,
     const EncodedImage& image) {
-  ScopedJavaLocalRef<jobject> buffer =
-      NewDirectByteBuffer(jni, image._buffer, image._length);
+  ScopedJavaLocalRef<jobject> buffer = NewDirectByteBuffer(
+      jni, const_cast<uint8_t*>(image.data()), image.size());
   ScopedJavaLocalRef<jobject> frame_type =
       NativeToJavaFrameType(jni, image._frameType);
   ScopedJavaLocalRef<jobject> qp;
diff --git a/sdk/android/src/jni/video_decoder_wrapper.cc b/sdk/android/src/jni/video_decoder_wrapper.cc
index 26dc3d5..037aef1 100644
--- a/sdk/android/src/jni/video_decoder_wrapper.cc
+++ b/sdk/android/src/jni/video_decoder_wrapper.cc
@@ -237,21 +237,21 @@
   switch (codec_settings_.codecType) {
     case kVideoCodecVP8: {
       int qp_int;
-      if (vp8::GetQp(input_image._buffer, input_image._length, &qp_int)) {
+      if (vp8::GetQp(input_image.data(), input_image.size(), &qp_int)) {
         qp = qp_int;
       }
       break;
     }
     case kVideoCodecVP9: {
       int qp_int;
-      if (vp9::GetQp(input_image._buffer, input_image._length, &qp_int)) {
+      if (vp9::GetQp(input_image.data(), input_image.size(), &qp_int)) {
         qp = qp_int;
       }
       break;
     }
     case kVideoCodecH264: {
-      h264_bitstream_parser_.ParseBitstream(input_image._buffer,
-                                            input_image._length);
+      h264_bitstream_parser_.ParseBitstream(input_image.data(),
+                                            input_image.size());
       int qp_int;
       if (h264_bitstream_parser_.GetLastSliceQp(&qp_int)) {
         qp = qp_int;
diff --git a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm
index 6f2d1f4..59962c8 100644
--- a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm
+++ b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm
@@ -17,9 +17,9 @@
 - (instancetype)initWithNativeEncodedImage:(webrtc::EncodedImage)encodedImage {
   if (self = [super init]) {
     // Wrap the buffer in NSData without copying, do not take ownership.
-    self.buffer = [NSData dataWithBytesNoCopy:encodedImage._buffer
-                                   length:encodedImage._length
-                             freeWhenDone:NO];
+    self.buffer = [NSData dataWithBytesNoCopy:encodedImage.data()
+                                       length:encodedImage.size()
+                                 freeWhenDone:NO];
     self.encodedWidth = rtc::dchecked_cast<int32_t>(encodedImage._encodedWidth);
     self.encodedHeight = rtc::dchecked_cast<int32_t>(encodedImage._encodedHeight);
     self.timeStamp = encodedImage.Timestamp();