Delete AndroidVideoCapturer::FrameFactory.
Splits VideoCapturer::OnFrameCaptured into helper methods,
which enables use of the VideoAdaptation logic without
using a frame factory.
Refactors AndroidVideoCapturer to make adaptation decision
earlier, so we can crop and rotate using
NV12ToI420Rotate.
BUG=webrtc:5682
Review-Url: https://codereview.webrtc.org/1973873003
Cr-Commit-Position: refs/heads/master@{#12895}
diff --git a/webrtc/api/java/jni/androidmediaencoder_jni.cc b/webrtc/api/java/jni/androidmediaencoder_jni.cc
index da0f2e6..0e36aa1 100644
--- a/webrtc/api/java/jni/androidmediaencoder_jni.cc
+++ b/webrtc/api/java/jni/androidmediaencoder_jni.cc
@@ -683,7 +683,7 @@
rtc::scoped_refptr<webrtc::VideoFrameBuffer> scaled_buffer(
static_cast<AndroidTextureBuffer*>(
frame.video_frame_buffer().get())->CropScaleAndRotate(
- frame.width(), frame.height(),
+ frame.width(), frame.height(), 0, 0,
scaled_resolution.width, scaled_resolution.height,
webrtc::kVideoRotation_0));
input_frame.set_video_frame_buffer(scaled_buffer);
@@ -824,9 +824,7 @@
RTC_CHECK(use_surface_);
NativeHandleImpl* handle = static_cast<NativeHandleImpl*>(
frame.video_frame_buffer()->native_handle());
- jfloatArray sampling_matrix = jni->NewFloatArray(16);
- jni->SetFloatArrayRegion(sampling_matrix, 0, 16, handle->sampling_matrix);
-
+ jfloatArray sampling_matrix = handle->sampling_matrix.ToJava(jni);
bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
j_encode_texture_method_,
key_frame,
diff --git a/webrtc/api/java/jni/androidvideocapturer_jni.cc b/webrtc/api/java/jni/androidvideocapturer_jni.cc
index 5b4a92c..0e8e867 100644
--- a/webrtc/api/java/jni/androidvideocapturer_jni.cc
+++ b/webrtc/api/java/jni/androidvideocapturer_jni.cc
@@ -13,6 +13,7 @@
#include "webrtc/api/java/jni/native_handle_impl.h"
#include "webrtc/api/java/jni/surfacetexturehelper_jni.h"
#include "third_party/libyuv/include/libyuv/convert.h"
+#include "third_party/libyuv/include/libyuv/scale.h"
#include "webrtc/base/bind.h"
namespace webrtc_jni {
@@ -169,25 +170,79 @@
int height,
int rotation,
int64_t timestamp_ns) {
- const uint8_t* y_plane = static_cast<uint8_t*>(video_frame);
- const uint8_t* vu_plane = y_plane + width * height;
-
- rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
- buffer_pool_.CreateBuffer(width, height);
- libyuv::NV21ToI420(
- y_plane, width,
- vu_plane, width,
- buffer->MutableData(webrtc::kYPlane), buffer->stride(webrtc::kYPlane),
- buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane),
- buffer->MutableData(webrtc::kVPlane), buffer->stride(webrtc::kVPlane),
- width, height);
-
+ RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
+ rotation == 270);
rtc::CritScope cs(&capturer_lock_);
- if (!capturer_) {
- LOG(LS_WARNING) << "OnMemoryBufferFrame() called for closed capturer.";
+
+ int adapted_width;
+ int adapted_height;
+ int crop_width;
+ int crop_height;
+ int crop_x;
+ int crop_y;
+
+ if (!capturer_->AdaptFrame(width, height, timestamp_ns,
+ &adapted_width, &adapted_height,
+ &crop_width, &crop_height, &crop_x, &crop_y)) {
return;
}
- capturer_->OnIncomingFrame(buffer, rotation, timestamp_ns);
+
+ int rotated_width = crop_width;
+ int rotated_height = crop_height;
+
+ if (capturer_->apply_rotation() && (rotation == 90 || rotation == 270)) {
+ std::swap(adapted_width, adapted_height);
+ std::swap(rotated_width, rotated_height);
+ }
+
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
+ pre_scale_pool_.CreateBuffer(rotated_width, rotated_height);
+
+ const uint8_t* y_plane = static_cast<const uint8_t*>(video_frame);
+ const uint8_t* uv_plane = y_plane + width * height;
+
+ // Can only crop at even pixels.
+ crop_x &= ~1;
+ crop_y &= ~1;
+ int uv_width = (width + 1) / 2;
+
+ libyuv::NV12ToI420Rotate(
+ y_plane + width * crop_y + crop_x, width,
+ uv_plane + uv_width * crop_y + crop_x, width,
+ buffer->MutableData(webrtc::kYPlane), buffer->stride(webrtc::kYPlane),
+ // Swap U and V, since we have NV21, not NV12.
+ buffer->MutableData(webrtc::kVPlane), buffer->stride(webrtc::kVPlane),
+ buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane),
+ crop_width, crop_height, static_cast<libyuv::RotationMode>(
+ capturer_->apply_rotation() ? rotation : 0));
+
+ if (adapted_width != rotated_width || adapted_height != rotated_height) {
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> scaled =
+ post_scale_pool_.CreateBuffer(adapted_width, adapted_height);
+ // TODO(nisse): This should be done by some Scale method in
+ // I420Buffer, but we can't do that right now, since
+ // I420BufferPool uses a wrapper object.
+ if (libyuv::I420Scale(buffer->DataY(), buffer->StrideY(),
+ buffer->DataU(), buffer->StrideU(),
+ buffer->DataV(), buffer->StrideV(),
+ rotated_width, rotated_height,
+ scaled->MutableDataY(), scaled->StrideY(),
+ scaled->MutableDataU(), scaled->StrideU(),
+ scaled->MutableDataV(), scaled->StrideV(),
+ adapted_width, adapted_height,
+ libyuv::kFilterBox) < 0) {
+ LOG(LS_WARNING) << "I420Scale failed";
+ return;
+ }
+ buffer = scaled;
+ }
+ // TODO(nisse): Use microsecond time instead.
+ capturer_->OnFrame(cricket::WebRtcVideoFrame(
+ buffer, timestamp_ns,
+ capturer_->apply_rotation()
+ ? webrtc::kVideoRotation_0
+ : static_cast<webrtc::VideoRotation>(rotation)),
+ width, height);
}
void AndroidVideoCapturerJni::OnTextureFrame(int width,
@@ -195,15 +250,48 @@
int rotation,
int64_t timestamp_ns,
const NativeHandleImpl& handle) {
- rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
- surface_texture_helper_->CreateTextureFrame(width, height, handle));
-
+ RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
+ rotation == 270);
rtc::CritScope cs(&capturer_lock_);
- if (!capturer_) {
- LOG(LS_WARNING) << "OnTextureFrame() called for closed capturer.";
+
+ int adapted_width;
+ int adapted_height;
+ int crop_width;
+ int crop_height;
+ int crop_x;
+ int crop_y;
+
+ if (!capturer_->AdaptFrame(width, height, timestamp_ns,
+ &adapted_width, &adapted_height,
+ &crop_width, &crop_height, &crop_x, &crop_y)) {
return;
}
- capturer_->OnIncomingFrame(buffer, rotation, timestamp_ns);
+
+ Matrix matrix = handle.sampling_matrix;
+
+ matrix.Crop(crop_width / static_cast<float>(width),
+ crop_height / static_cast<float>(height),
+ crop_x / static_cast<float>(width),
+ crop_y / static_cast<float>(height));
+
+ if (capturer_->apply_rotation()) {
+ if (rotation == webrtc::kVideoRotation_90 ||
+ rotation == webrtc::kVideoRotation_270) {
+ std::swap(adapted_width, adapted_height);
+ }
+ matrix.Rotate(static_cast<webrtc::VideoRotation>(rotation));
+ }
+
+ // TODO(nisse): Use microsecond time instead.
+ capturer_->OnFrame(
+ cricket::WebRtcVideoFrame(
+ surface_texture_helper_->CreateTextureFrame(
+ adapted_width, adapted_height,
+ NativeHandleImpl(handle.oes_texture_id, matrix)),
+ timestamp_ns, capturer_->apply_rotation()
+ ? webrtc::kVideoRotation_0
+ : static_cast<webrtc::VideoRotation>(rotation)),
+ width, height);
}
void AndroidVideoCapturerJni::OnOutputFormatRequest(int width,
diff --git a/webrtc/api/java/jni/androidvideocapturer_jni.h b/webrtc/api/java/jni/androidvideocapturer_jni.h
index eea56ad..4a803d9 100644
--- a/webrtc/api/java/jni/androidvideocapturer_jni.h
+++ b/webrtc/api/java/jni/androidvideocapturer_jni.h
@@ -77,7 +77,8 @@
const ScopedGlobalRef<jclass> j_observer_class_;
// Used on the Java thread running the camera.
- webrtc::I420BufferPool buffer_pool_;
+ webrtc::I420BufferPool pre_scale_pool_;
+ webrtc::I420BufferPool post_scale_pool_;
rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_;
rtc::ThreadChecker thread_checker_;
diff --git a/webrtc/api/java/jni/native_handle_impl.cc b/webrtc/api/java/jni/native_handle_impl.cc
index 1f180ad..eb71088 100644
--- a/webrtc/api/java/jni/native_handle_impl.cc
+++ b/webrtc/api/java/jni/native_handle_impl.cc
@@ -21,9 +21,24 @@
using webrtc::NativeHandleBuffer;
-namespace {
+namespace webrtc_jni {
-void RotateMatrix(float a[16], webrtc::VideoRotation rotation) {
+Matrix::Matrix(JNIEnv* jni, jfloatArray a) {
+ RTC_CHECK_EQ(16, jni->GetArrayLength(a));
+ jfloat* ptr = jni->GetFloatArrayElements(a, nullptr);
+ for (int i = 0; i < 16; ++i) {
+ elem_[i] = ptr[i];
+ }
+ jni->ReleaseFloatArrayElements(a, ptr, 0);
+}
+
+jfloatArray Matrix::ToJava(JNIEnv* jni) {
+ jfloatArray matrix = jni->NewFloatArray(16);
+ jni->SetFloatArrayRegion(matrix, 0, 16, elem_);
+ return matrix;
+}
+
+void Matrix::Rotate(webrtc::VideoRotation rotation) {
// Texture coordinates are in the range 0 to 1. The transformation of the last
// row in each rotation matrix is needed for proper translation, e.g, to
// mirror x, we don't replace x by -x, but by 1-x.
@@ -32,35 +47,36 @@
break;
case webrtc::kVideoRotation_90: {
const float ROTATE_90[16] =
- { a[4], a[5], a[6], a[7],
- -a[0], -a[1], -a[2], -a[3],
- a[8], a[9], a[10], a[11],
- a[0] + a[12], a[1] + a[13], a[2] + a[14], a[3] + a[15]};
- memcpy(a, ROTATE_90, sizeof(ROTATE_90));
+ { elem_[4], elem_[5], elem_[6], elem_[7],
+ -elem_[0], -elem_[1], -elem_[2], -elem_[3],
+ elem_[8], elem_[9], elem_[10], elem_[11],
+ elem_[0] + elem_[12], elem_[1] + elem_[13],
+ elem_[2] + elem_[14], elem_[3] + elem_[15]};
+ memcpy(elem_, ROTATE_90, sizeof(elem_));
} break;
case webrtc::kVideoRotation_180: {
const float ROTATE_180[16] =
- { -a[0], -a[1], -a[2], -a[3],
- -a[4], -a[5], -a[6], -a[7],
- a[8], a[9], a[10], a[11],
- a[0] + a[4] + a[12], a[1] +a[5] + a[13], a[2] + a[6] + a[14],
- a[3] + a[11]+ a[15]};
- memcpy(a, ROTATE_180, sizeof(ROTATE_180));
- }
- break;
+ { -elem_[0], -elem_[1], -elem_[2], -elem_[3],
+ -elem_[4], -elem_[5], -elem_[6], -elem_[7],
+ elem_[8], elem_[9], elem_[10], elem_[11],
+ elem_[0] + elem_[4] + elem_[12], elem_[1] + elem_[5] + elem_[13],
+ elem_[2] + elem_[6] + elem_[14], elem_[3] + elem_[11]+ elem_[15]};
+ memcpy(elem_, ROTATE_180, sizeof(elem_));
+ } break;
case webrtc::kVideoRotation_270: {
const float ROTATE_270[16] =
- { -a[4], -a[5], -a[6], -a[7],
- a[0], a[1], a[2], a[3],
- a[8], a[9], a[10], a[11],
- a[4] + a[12], a[5] + a[13], a[6] + a[14], a[7] + a[15]};
- memcpy(a, ROTATE_270, sizeof(ROTATE_270));
+ { -elem_[4], -elem_[5], -elem_[6], -elem_[7],
+ elem_[0], elem_[1], elem_[2], elem_[3],
+ elem_[8], elem_[9], elem_[10], elem_[11],
+ elem_[4] + elem_[12], elem_[5] + elem_[13],
+ elem_[6] + elem_[14], elem_[7] + elem_[15]};
+ memcpy(elem_, ROTATE_270, sizeof(elem_));
} break;
}
}
// Calculates result = a * b, in column-major order.
-void MultiplyMatrix(const float a[16], const float b[16], float result[16]) {
+void Matrix::Multiply(const float a[16], const float b[16], float result[16]) {
for (int i = 0; i < 4; ++i) {
for (int j = 0; j < 4; ++j) {
float sum = 0;
@@ -75,40 +91,30 @@
// Center crop by keeping xFraction of the width and yFraction of the height,
// so e.g. cropping from 640x480 to 640x360 would use
// xFraction=1, yFraction=360/480.
-void CropMatrix(float a[16], float xFraction, float yFraction) {
- // Move cropped area to the center of the frame by offsetting half the
- // removed area.
- const float xOffset = (1 - xFraction) / 2;
- const float yOffset = (1 - yFraction) / 2;
- const float crop_matrix[16] = {
- xFraction, 0, 0, 0,
- 0, yFraction, 0, 0,
- 0, 0, 1, 0,
- xOffset, yOffset, 0, 1};
- float mul_result[16];
- MultiplyMatrix(crop_matrix, a, mul_result);
- memcpy(a, mul_result, sizeof(mul_result));
+void Matrix::Crop(float xFraction,
+ float yFraction,
+ float xOffset,
+ float yOffset) {
+ const float crop_matrix[16] =
+ {xFraction, 0, 0, 0,
+ 0, yFraction, 0, 0,
+ 0, 0, 1, 0,
+ xOffset, yOffset, 0, 1};
+ const Matrix old = *this;
+ Multiply(crop_matrix, old.elem_, this->elem_);
}
-} // anonymouse namespace
-
-namespace webrtc_jni {
-
// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD.
static const int kBufferAlignment = 64;
+NativeHandleImpl::NativeHandleImpl(int id, const Matrix& matrix)
+ : oes_texture_id(id), sampling_matrix(matrix) {}
+
NativeHandleImpl::NativeHandleImpl(JNIEnv* jni,
jint j_oes_texture_id,
jfloatArray j_transform_matrix)
- : oes_texture_id(j_oes_texture_id) {
- RTC_CHECK_EQ(16, jni->GetArrayLength(j_transform_matrix));
- jfloat* transform_matrix_ptr =
- jni->GetFloatArrayElements(j_transform_matrix, nullptr);
- for (int i = 0; i < 16; ++i) {
- sampling_matrix[i] = transform_matrix_ptr[i];
- }
- jni->ReleaseFloatArrayElements(j_transform_matrix, transform_matrix_ptr, 0);
-}
+ : oes_texture_id(j_oes_texture_id),
+ sampling_matrix(jni, j_transform_matrix) {}
AndroidTextureBuffer::AndroidTextureBuffer(
int width,
@@ -162,11 +168,7 @@
jobject byte_buffer = jni->NewDirectByteBuffer(y_data, size);
- // TODO(nisse): Keep java transform matrix around.
- jfloatArray sampling_matrix = jni->NewFloatArray(16);
- jni->SetFloatArrayRegion(sampling_matrix, 0, 16,
- native_handle_.sampling_matrix);
-
+ jfloatArray sampling_matrix = native_handle_.sampling_matrix.ToJava(jni);
jni->CallVoidMethod(surface_texture_helper_,
transform_mid,
byte_buffer, width(), height(), stride,
@@ -179,6 +181,8 @@
rtc::scoped_refptr<AndroidTextureBuffer>
AndroidTextureBuffer::CropScaleAndRotate(int cropped_width,
int cropped_height,
+ int crop_x,
+ int crop_y,
int dst_width,
int dst_height,
webrtc::VideoRotation rotation) {
@@ -198,11 +202,13 @@
surface_texture_helper_, rtc::KeepRefUntilDone(this)));
if (cropped_width != width() || cropped_height != height()) {
- CropMatrix(buffer->native_handle_.sampling_matrix,
- cropped_width / static_cast<float>(width()),
- cropped_height / static_cast<float>(height()));
+ buffer->native_handle_.sampling_matrix.Crop(
+ cropped_width / static_cast<float>(width()),
+ cropped_height / static_cast<float>(height()),
+ crop_x / static_cast<float>(width()),
+ crop_y / static_cast<float>(height()));
}
- RotateMatrix(buffer->native_handle_.sampling_matrix, rotation);
+ buffer->native_handle_.sampling_matrix.Rotate(rotation);
return buffer;
}
diff --git a/webrtc/api/java/jni/native_handle_impl.h b/webrtc/api/java/jni/native_handle_impl.h
index b781815..0d01532 100644
--- a/webrtc/api/java/jni/native_handle_impl.h
+++ b/webrtc/api/java/jni/native_handle_impl.h
@@ -18,14 +18,37 @@
namespace webrtc_jni {
+// Open gl texture matrix, in column-major order. Operations are
+// in-place.
+class Matrix {
+ public:
+ Matrix(JNIEnv* jni, jfloatArray a);
+
+ jfloatArray ToJava(JNIEnv* jni);
+
+ // Crop arguments are relative to original size.
+ void Crop(float cropped_width,
+ float cropped_height,
+ float crop_x,
+ float crop_y);
+
+ void Rotate(webrtc::VideoRotation rotation);
+
+ private:
+ static void Multiply(const float a[16], const float b[16], float result[16]);
+ float elem_[16];
+};
+
// Wrapper for texture object.
struct NativeHandleImpl {
NativeHandleImpl(JNIEnv* jni,
jint j_oes_texture_id,
jfloatArray j_transform_matrix);
+ NativeHandleImpl(int id, const Matrix& matrix);
+
const int oes_texture_id;
- float sampling_matrix[16];
+ Matrix sampling_matrix;
};
class AndroidTextureBuffer : public webrtc::NativeHandleBuffer {
@@ -42,6 +65,8 @@
rtc::scoped_refptr<AndroidTextureBuffer> CropScaleAndRotate(
int cropped_width,
int cropped_height,
+ int crop_x,
+ int crop_y,
int dst_width,
int dst_height,
webrtc::VideoRotation rotation);
diff --git a/webrtc/api/java/jni/peerconnection_jni.cc b/webrtc/api/java/jni/peerconnection_jni.cc
index 522ae75..a075805 100644
--- a/webrtc/api/java/jni/peerconnection_jni.cc
+++ b/webrtc/api/java/jni/peerconnection_jni.cc
@@ -794,8 +794,8 @@
jobject CricketToJavaTextureFrame(const cricket::VideoFrame* frame) {
NativeHandleImpl* handle = reinterpret_cast<NativeHandleImpl*>(
frame->video_frame_buffer()->native_handle());
- jfloatArray sampling_matrix = jni()->NewFloatArray(16);
- jni()->SetFloatArrayRegion(sampling_matrix, 0, 16, handle->sampling_matrix);
+ jfloatArray sampling_matrix = handle->sampling_matrix.ToJava(jni());
+
return jni()->NewObject(
*j_frame_class_, j_texture_frame_ctor_id_,
frame->width(), frame->height(),