Patchset 1 is a pure
revert of "Revert of "Android MediaCodecVideoDecoder: Manage lifetime of texture frames" https://codereview.webrtc.org/1378033003/
Following patchsets move the responsibility of calculating the decode time to Java.
TESTED= Apprtc loopback using H264 and VP8 on N5, N6, N7, S5
Review URL: https://codereview.webrtc.org/1422963003
Cr-Commit-Position: refs/heads/master@{#10597}
diff --git a/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java b/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java
index 33ccff2..c999947 100644
--- a/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java
+++ b/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java
@@ -30,9 +30,7 @@
import android.content.Context;
import android.content.res.Resources.NotFoundException;
import android.graphics.Point;
-import android.graphics.SurfaceTexture;
import android.opengl.GLES20;
-import android.opengl.Matrix;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.AttributeSet;
@@ -457,25 +455,10 @@
}
final long startTimeNs = System.nanoTime();
- final float[] samplingMatrix;
- if (frame.yuvFrame) {
- // The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
- // top-left corner of the image, but in glTexImage2D() the first element corresponds to the
- // bottom-left corner. We correct this discrepancy by setting a vertical flip as sampling
- // matrix.
- samplingMatrix = RendererCommon.verticalFlipMatrix();
- } else {
- // TODO(magjed): Move updateTexImage() to the video source instead.
- SurfaceTexture surfaceTexture = (SurfaceTexture) frame.textureObject;
- surfaceTexture.updateTexImage();
- samplingMatrix = new float[16];
- surfaceTexture.getTransformMatrix(samplingMatrix);
- }
-
final float[] texMatrix;
synchronized (layoutLock) {
final float[] rotatedSamplingMatrix =
- RendererCommon.rotateTextureMatrix(samplingMatrix, frame.rotationDegree);
+ RendererCommon.rotateTextureMatrix(frame.samplingMatrix, frame.rotationDegree);
final float[] layoutMatrix = RendererCommon.getLayoutMatrix(
mirror, frameAspectRatio(), (float) layoutWidth / layoutHeight);
texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
diff --git a/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java b/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
index bacd0cf..757c4d9 100644
--- a/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
+++ b/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
@@ -38,7 +38,6 @@
import android.annotation.SuppressLint;
import android.graphics.Point;
import android.graphics.Rect;
-import android.graphics.SurfaceTexture;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
@@ -241,29 +240,15 @@
}
if (isNewFrame) {
+ rotatedSamplingMatrix = RendererCommon.rotateTextureMatrix(
+ pendingFrame.samplingMatrix, pendingFrame.rotationDegree);
if (pendingFrame.yuvFrame) {
rendererType = RendererType.RENDERER_YUV;
drawer.uploadYuvData(yuvTextures, pendingFrame.width, pendingFrame.height,
pendingFrame.yuvStrides, pendingFrame.yuvPlanes);
- // The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
- // top-left corner of the image, but in glTexImage2D() the first element corresponds to
- // the bottom-left corner. We correct this discrepancy by setting a vertical flip as
- // sampling matrix.
- final float[] samplingMatrix = RendererCommon.verticalFlipMatrix();
- rotatedSamplingMatrix =
- RendererCommon.rotateTextureMatrix(samplingMatrix, pendingFrame.rotationDegree);
} else {
rendererType = RendererType.RENDERER_TEXTURE;
- // External texture rendering. Update texture image to latest and make a deep copy of
- // the external texture.
- // TODO(magjed): Move updateTexImage() to the video source instead.
- final SurfaceTexture surfaceTexture = (SurfaceTexture) pendingFrame.textureObject;
- surfaceTexture.updateTexImage();
- final float[] samplingMatrix = new float[16];
- surfaceTexture.getTransformMatrix(samplingMatrix);
- rotatedSamplingMatrix =
- RendererCommon.rotateTextureMatrix(samplingMatrix, pendingFrame.rotationDegree);
-
+ // External texture rendering. Make a deep copy of the external texture.
// Reallocate offscreen texture if necessary.
textureCopy.setSize(pendingFrame.rotatedWidth(), pendingFrame.rotatedHeight());
diff --git a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
index 3bfad68..e506bc3 100644
--- a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
+++ b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
@@ -33,6 +33,7 @@
#include "talk/app/webrtc/java/jni/androidmediacodeccommon.h"
#include "talk/app/webrtc/java/jni/classreferenceholder.h"
#include "talk/app/webrtc/java/jni/native_handle_impl.h"
+#include "talk/app/webrtc/java/jni/surfacetexturehelper_jni.h"
#include "webrtc/base/bind.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
@@ -112,7 +113,7 @@
bool use_surface_;
VideoCodec codec_;
webrtc::I420BufferPool decoded_frame_pool_;
- NativeHandleImpl native_handle_;
+ rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_;
DecodedImageCallback* callback_;
int frames_received_; // Number of frames received by decoder.
int frames_decoded_; // Number of frames decoded by decoder.
@@ -123,8 +124,6 @@
uint32_t max_pending_frames_; // Maximum number of pending input frames
std::vector<int32_t> timestamps_;
std::vector<int64_t> ntp_times_ms_;
- std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to
- // decoder input.
// State that is constant for the lifetime of this object once the ctor
// returns.
@@ -135,7 +134,8 @@
jmethodID j_release_method_;
jmethodID j_dequeue_input_buffer_method_;
jmethodID j_queue_input_buffer_method_;
- jmethodID j_dequeue_output_buffer_method_;
+ jmethodID j_dequeue_byte_buffer_method_;
+ jmethodID j_dequeue_texture_buffer_method_;
jmethodID j_return_decoded_byte_buffer_method_;
// MediaCodecVideoDecoder fields.
jfieldID j_input_buffers_field_;
@@ -145,20 +145,21 @@
jfieldID j_height_field_;
jfieldID j_stride_field_;
jfieldID j_slice_height_field_;
- jfieldID j_surface_texture_field_;
// MediaCodecVideoDecoder.DecodedTextureBuffer fields.
- jfieldID j_textureID_field_;
+ jfieldID j_texture_id_field_;
+ jfieldID j_transform_matrix_field_;
jfieldID j_texture_presentation_timestamp_us_field_;
- // MediaCodecVideoDecoder.DecodedByteBuffer fields.
+ jfieldID j_texture_decode_time_ms_field_;
+ jfieldID j_texture_frame_delay_ms_field_;
+ // MediaCodecVideoDecoder.DecodedOutputBuffer fields.
jfieldID j_info_index_field_;
jfieldID j_info_offset_field_;
jfieldID j_info_size_field_;
jfieldID j_info_presentation_timestamp_us_field_;
+ jfieldID j_byte_buffer_decode_time_ms_field_;
// Global references; must be deleted in Release().
std::vector<jobject> input_buffers_;
- jobject surface_texture_;
- jobject previous_surface_texture_;
// Render EGL context - owned by factory, should not be allocated/destroyed
// by VideoDecoder.
@@ -172,8 +173,6 @@
key_frame_required_(true),
inited_(false),
sw_fallback_required_(false),
- surface_texture_(NULL),
- previous_surface_texture_(NULL),
codec_thread_(new Thread()),
j_media_codec_video_decoder_class_(
jni,
@@ -192,19 +191,22 @@
j_init_decode_method_ = GetMethodID(
jni, *j_media_codec_video_decoder_class_, "initDecode",
"(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;"
- "IILjavax/microedition/khronos/egl/EGLContext;)Z");
+ "IILorg/webrtc/SurfaceTextureHelper;)Z");
j_release_method_ =
GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V");
j_dequeue_input_buffer_method_ = GetMethodID(
jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I");
j_queue_input_buffer_method_ = GetMethodID(
jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z");
- j_dequeue_output_buffer_method_ = GetMethodID(
+ j_dequeue_byte_buffer_method_ = GetMethodID(
jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer",
- "(I)Ljava/lang/Object;");
+ "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer;");
+ j_dequeue_texture_buffer_method_ = GetMethodID(
+ jni, *j_media_codec_video_decoder_class_, "dequeueTextureBuffer",
+ "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer;");
j_return_decoded_byte_buffer_method_ =
GetMethodID(jni, *j_media_codec_video_decoder_class_,
- "returnDecodedByteBuffer", "(I)V");
+ "returnDecodedOutputBuffer", "(I)V");
j_input_buffers_field_ = GetFieldID(
jni, *j_media_codec_video_decoder_class_,
@@ -222,28 +224,32 @@
jni, *j_media_codec_video_decoder_class_, "stride", "I");
j_slice_height_field_ = GetFieldID(
jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I");
- j_surface_texture_field_ = GetFieldID(
- jni, *j_media_codec_video_decoder_class_, "surfaceTexture",
- "Landroid/graphics/SurfaceTexture;");
- jclass j_decoder_decoded_texture_buffer_class = FindClass(jni,
+ jclass j_decoded_texture_buffer_class = FindClass(jni,
"org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer");
- j_textureID_field_ = GetFieldID(
- jni, j_decoder_decoded_texture_buffer_class, "textureID", "I");
- j_texture_presentation_timestamp_us_field_ =
- GetFieldID(jni, j_decoder_decoded_texture_buffer_class,
- "presentationTimestampUs", "J");
+ j_texture_id_field_ = GetFieldID(
+ jni, j_decoded_texture_buffer_class, "textureID", "I");
+ j_transform_matrix_field_ = GetFieldID(
+ jni, j_decoded_texture_buffer_class, "transformMatrix", "[F");
+ j_texture_presentation_timestamp_us_field_ = GetFieldID(
+ jni, j_decoded_texture_buffer_class, "presentationTimestampUs", "J");
+ j_texture_decode_time_ms_field_ = GetFieldID(
+ jni, j_decoded_texture_buffer_class, "decodeTimeMs", "J");
+ j_texture_frame_delay_ms_field_ = GetFieldID(
+ jni, j_decoded_texture_buffer_class, "frameDelayMs", "J");
- jclass j_decoder_decoded_byte_buffer_class = FindClass(jni,
- "org/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer");
+ jclass j_decoded_output_buffer_class = FindClass(jni,
+ "org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer");
j_info_index_field_ = GetFieldID(
- jni, j_decoder_decoded_byte_buffer_class, "index", "I");
+ jni, j_decoded_output_buffer_class, "index", "I");
j_info_offset_field_ = GetFieldID(
- jni, j_decoder_decoded_byte_buffer_class, "offset", "I");
+ jni, j_decoded_output_buffer_class, "offset", "I");
j_info_size_field_ = GetFieldID(
- jni, j_decoder_decoded_byte_buffer_class, "size", "I");
+ jni, j_decoded_output_buffer_class, "size", "I");
j_info_presentation_timestamp_us_field_ = GetFieldID(
- jni, j_decoder_decoded_byte_buffer_class, "presentationTimestampUs", "J");
+ jni, j_decoded_output_buffer_class, "presentationTimestampUs", "J");
+ j_byte_buffer_decode_time_ms_field_ = GetFieldID(
+ jni, j_decoded_output_buffer_class, "decodeTimeMs", "J");
CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed";
use_surface_ = (render_egl_context_ != NULL);
@@ -255,14 +261,6 @@
MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
// Call Release() to ensure no more callbacks to us after we are deleted.
Release();
- // Delete global references.
- JNIEnv* jni = AttachCurrentThreadIfNeeded();
- if (previous_surface_texture_ != NULL) {
- jni->DeleteGlobalRef(previous_surface_texture_);
- }
- if (surface_texture_ != NULL) {
- jni->DeleteGlobalRef(surface_texture_);
- }
}
int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
@@ -313,6 +311,11 @@
frames_received_ = 0;
frames_decoded_ = 0;
+ if (use_surface_) {
+ surface_texture_helper_ = new rtc::RefCountedObject<SurfaceTextureHelper>(
+ jni, render_egl_context_);
+ }
+
jobject j_video_codec_enum = JavaEnumFromIndex(
jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_);
bool success = jni->CallBooleanMethod(
@@ -321,7 +324,8 @@
j_video_codec_enum,
codec_.width,
codec_.height,
- use_surface_ ? render_egl_context_ : nullptr);
+ use_surface_ ? surface_texture_helper_->GetJavaSurfaceTextureHelper()
+ : nullptr);
if (CheckException(jni) || !success) {
ALOGE << "Codec initialization error - fallback to SW codec.";
sw_fallback_required_ = true;
@@ -348,7 +352,6 @@
current_decoding_time_ms_ = 0;
timestamps_.clear();
ntp_times_ms_.clear();
- frame_rtc_times_ms_.clear();
jobjectArray input_buffers = (jobjectArray)GetObjectField(
jni, *j_media_codec_video_decoder_, j_input_buffers_field_);
@@ -365,15 +368,6 @@
}
}
- if (use_surface_) {
- jobject surface_texture = GetObjectField(
- jni, *j_media_codec_video_decoder_, j_surface_texture_field_);
- if (previous_surface_texture_ != NULL) {
- jni->DeleteGlobalRef(previous_surface_texture_);
- }
- previous_surface_texture_ = surface_texture_;
- surface_texture_ = jni->NewGlobalRef(surface_texture);
- }
codec_thread_->PostDelayed(kMediaCodecPollMs, this);
return WEBRTC_VIDEO_CODEC_OK;
@@ -399,6 +393,7 @@
}
input_buffers_.clear();
jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_);
+ surface_texture_helper_ = nullptr;
inited_ = false;
rtc::MessageQueueManager::Clear(this);
if (CheckException(jni)) {
@@ -508,7 +503,7 @@
if (frames_received_ > frames_decoded_ + max_pending_frames_) {
ALOGV("Received: %d. Decoded: %d. Wait for output...",
frames_received_, frames_decoded_);
- if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs * 1000)) {
+ if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs)) {
ALOGE << "DeliverPendingOutputs error. Frames received: " <<
frames_received_ << ". Frames decoded: " << frames_decoded_;
return ProcessHWErrorOnCodecThread();
@@ -553,7 +548,6 @@
current_bytes_ += inputImage._length;
timestamps_.push_back(inputImage._timeStamp);
ntp_times_ms_.push_back(inputImage.ntp_time_ms_);
- frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
// Feed input to decoder.
bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_,
@@ -576,16 +570,18 @@
}
bool MediaCodecVideoDecoder::DeliverPendingOutputs(
- JNIEnv* jni, int dequeue_timeout_us) {
+ JNIEnv* jni, int dequeue_timeout_ms) {
if (frames_received_ <= frames_decoded_) {
// No need to query for output buffers - decoder is drained.
return true;
}
// Get decoder output.
- jobject j_decoder_output_buffer = jni->CallObjectMethod(
- *j_media_codec_video_decoder_,
- j_dequeue_output_buffer_method_,
- dequeue_timeout_us);
+ jobject j_decoder_output_buffer =
+ jni->CallObjectMethod(*j_media_codec_video_decoder_,
+ use_surface_ ? j_dequeue_texture_buffer_method_
+ : j_dequeue_byte_buffer_method_,
+ dequeue_timeout_ms);
+
if (CheckException(jni)) {
ALOGE << "dequeueOutputBuffer() error";
return false;
@@ -605,19 +601,30 @@
j_slice_height_field_);
rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer;
- long output_timestamps_ms = 0;
+ int64_t output_timestamps_ms = 0;
+ int decode_time_ms = 0;
+ int64_t frame_delayed_ms = 0;
if (use_surface_) {
// Extract data from Java DecodedTextureBuffer.
const int texture_id =
- GetIntField(jni, j_decoder_output_buffer, j_textureID_field_);
- const int64_t timestamp_us =
- GetLongField(jni, j_decoder_output_buffer,
- j_texture_presentation_timestamp_us_field_);
- output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec;
- // Create webrtc::VideoFrameBuffer with native texture handle.
- native_handle_.SetTextureObject(surface_texture_, texture_id);
- frame_buffer = new rtc::RefCountedObject<JniNativeHandleBuffer>(
- &native_handle_, width, height);
+ GetIntField(jni, j_decoder_output_buffer, j_texture_id_field_);
+ if (texture_id != 0) { // |texture_id| == 0 represents a dropped frame.
+ const jfloatArray j_transform_matrix =
+ reinterpret_cast<jfloatArray>(GetObjectField(
+ jni, j_decoder_output_buffer, j_transform_matrix_field_));
+ const int64_t timestamp_us =
+ GetLongField(jni, j_decoder_output_buffer,
+ j_texture_presentation_timestamp_us_field_);
+ output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec;
+ decode_time_ms = GetLongField(jni, j_decoder_output_buffer,
+ j_texture_decode_time_ms_field_);
+ frame_delayed_ms = GetLongField(jni, j_decoder_output_buffer,
+ j_texture_frame_delay_ms_field_);
+
+ // Create webrtc::VideoFrameBuffer with native texture handle.
+ frame_buffer = surface_texture_helper_->CreateTextureFrame(
+ width, height, NativeHandleImpl(jni, texture_id, j_transform_matrix));
+ }
} else {
// Extract data from Java ByteBuffer and create output yuv420 frame -
// for non surface decoding only.
@@ -630,6 +637,8 @@
const int64_t timestamp_us = GetLongField(
jni, j_decoder_output_buffer, j_info_presentation_timestamp_us_field_);
output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec;
+ decode_time_ms = GetLongField(jni, j_decoder_output_buffer,
+ j_byte_buffer_decode_time_ms_field_);
if (output_buffer_size < width * height * 3 / 2) {
ALOGE << "Insufficient output buffer size: " << output_buffer_size;
@@ -687,7 +696,7 @@
j_return_decoded_byte_buffer_method_,
output_buffer_index);
if (CheckException(jni)) {
- ALOGE << "returnDecodedByteBuffer error";
+ ALOGE << "returnDecodedOutputBuffer error";
return false;
}
}
@@ -702,26 +711,24 @@
decoded_frame.set_ntp_time_ms(ntp_times_ms_.front());
ntp_times_ms_.erase(ntp_times_ms_.begin());
}
- int64_t frame_decoding_time_ms = 0;
- if (frame_rtc_times_ms_.size() > 0) {
- frame_decoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front();
- frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
- }
+
if (frames_decoded_ < kMaxDecodedLogFrames) {
ALOGD << "Decoder frame out # " << frames_decoded_ << ". " << width <<
" x " << height << ". " << stride << " x " << slice_height <<
". Color: " << color_format << ". TS:" << (int)output_timestamps_ms <<
- ". DecTime: " << (int)frame_decoding_time_ms;
+ ". DecTime: " << (int)decode_time_ms <<
+ ". DelayTime: " << (int)frame_delayed_ms;
}
// Calculate and print decoding statistics - every 3 seconds.
frames_decoded_++;
current_frames_++;
- current_decoding_time_ms_ += frame_decoding_time_ms;
+ current_decoding_time_ms_ += decode_time_ms;
int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
current_frames_ > 0) {
- ALOGD << "Decoded frames: " << frames_decoded_ << ". Bitrate: " <<
+ ALOGD << "Decoded frames: " << frames_decoded_ << ". Received frames: "
+ << frames_received_ << ". Bitrate: " <<
(current_bytes_ * 8 / statistic_time_ms) << " kbps, fps: " <<
((current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms)
<< ". decTime: " << (current_decoding_time_ms_ / current_frames_) <<
@@ -732,12 +739,15 @@
current_decoding_time_ms_ = 0;
}
- // Callback - output decoded frame.
- const int32_t callback_status = callback_->Decoded(decoded_frame);
- if (callback_status > 0) {
- ALOGE << "callback error";
+ // |.IsZeroSize())| returns true when a frame has been dropped.
+ if (!decoded_frame.IsZeroSize()) {
+ // Callback - output decoded frame.
+ const int32_t callback_status =
+ callback_->Decoded(decoded_frame, decode_time_ms);
+ if (callback_status > 0) {
+ ALOGE << "callback error";
+ }
}
-
return true;
}
diff --git a/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc b/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc
index 02b9f22..054719a 100644
--- a/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc
+++ b/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc
@@ -180,16 +180,10 @@
buffer, rotation, timestamp_ns);
}
-void AndroidVideoCapturerJni::OnTextureFrame(
- int width,
- int height,
- int64_t timestamp_ns,
- const NativeTextureHandleImpl& handle) {
- // TODO(magjed): Fix this. See bug webrtc:4993.
- RTC_NOTREACHED()
- << "The rest of the stack for Android expects the native "
- "handle to be a NativeHandleImpl with a SurfaceTexture, not a "
- "NativeTextureHandleImpl";
+void AndroidVideoCapturerJni::OnTextureFrame(int width,
+ int height,
+ int64_t timestamp_ns,
+ const NativeHandleImpl& handle) {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
new rtc::RefCountedObject<AndroidTextureBuffer>(
width, height, handle,
@@ -234,8 +228,8 @@
jlong j_timestamp) {
reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)
->OnTextureFrame(j_width, j_height, j_timestamp,
- NativeTextureHandleImpl(jni, j_oes_texture_id,
- j_transform_matrix));
+ NativeHandleImpl(jni, j_oes_texture_id,
+ j_transform_matrix));
}
JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeCapturerStarted)
diff --git a/talk/app/webrtc/java/jni/androidvideocapturer_jni.h b/talk/app/webrtc/java/jni/androidvideocapturer_jni.h
index d1eb3a0..96def5e 100644
--- a/talk/app/webrtc/java/jni/androidvideocapturer_jni.h
+++ b/talk/app/webrtc/java/jni/androidvideocapturer_jni.h
@@ -39,7 +39,7 @@
namespace webrtc_jni {
-class NativeTextureHandleImpl;
+class NativeHandleImpl;
// AndroidVideoCapturerJni implements AndroidVideoCapturerDelegate.
// The purpose of the delegate is to hide the JNI specifics from the C++ only
@@ -61,7 +61,7 @@
void OnMemoryBufferFrame(void* video_frame, int length, int width,
int height, int rotation, int64_t timestamp_ns);
void OnTextureFrame(int width, int height, int64_t timestamp_ns,
- const NativeTextureHandleImpl& handle);
+ const NativeHandleImpl& handle);
void OnOutputFormatRequest(int width, int height, int fps);
protected:
diff --git a/talk/app/webrtc/java/jni/classreferenceholder.cc b/talk/app/webrtc/java/jni/classreferenceholder.cc
index 4c836f8..13883be 100644
--- a/talk/app/webrtc/java/jni/classreferenceholder.cc
+++ b/talk/app/webrtc/java/jni/classreferenceholder.cc
@@ -85,7 +85,7 @@
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$VideoCodecType");
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder");
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer");
- LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer");
+ LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer");
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$VideoCodecType");
LoadClass(jni, "org/webrtc/SurfaceTextureHelper");
#endif
diff --git a/talk/app/webrtc/java/jni/native_handle_impl.cc b/talk/app/webrtc/java/jni/native_handle_impl.cc
index ac3e045..ed9ad8e 100644
--- a/talk/app/webrtc/java/jni/native_handle_impl.cc
+++ b/talk/app/webrtc/java/jni/native_handle_impl.cc
@@ -31,9 +31,9 @@
namespace webrtc_jni {
-NativeTextureHandleImpl::NativeTextureHandleImpl(JNIEnv* jni,
- jint j_oes_texture_id,
- jfloatArray j_transform_matrix)
+NativeHandleImpl::NativeHandleImpl(JNIEnv* jni,
+ jint j_oes_texture_id,
+ jfloatArray j_transform_matrix)
: oes_texture_id(j_oes_texture_id) {
RTC_CHECK_EQ(16, jni->GetArrayLength(j_transform_matrix));
jfloat* transform_matrix_ptr =
@@ -44,38 +44,10 @@
jni->ReleaseFloatArrayElements(j_transform_matrix, transform_matrix_ptr, 0);
}
-NativeHandleImpl::NativeHandleImpl() : texture_object_(NULL), texture_id_(-1) {}
-
-void* NativeHandleImpl::GetHandle() {
- return texture_object_;
-}
-
-int NativeHandleImpl::GetTextureId() {
- return texture_id_;
-}
-
-void NativeHandleImpl::SetTextureObject(void* texture_object, int texture_id) {
- texture_object_ = reinterpret_cast<jobject>(texture_object);
- texture_id_ = texture_id;
-}
-
-JniNativeHandleBuffer::JniNativeHandleBuffer(void* native_handle,
- int width,
- int height)
- : NativeHandleBuffer(native_handle, width, height) {}
-
-rtc::scoped_refptr<webrtc::VideoFrameBuffer>
-JniNativeHandleBuffer::NativeToI420Buffer() {
- // TODO(pbos): Implement before using this in the encoder pipeline (or
- // remove the RTC_CHECK() in VideoCapture).
- RTC_NOTREACHED();
- return nullptr;
-}
-
AndroidTextureBuffer::AndroidTextureBuffer(
int width,
int height,
- const NativeTextureHandleImpl& native_handle,
+ const NativeHandleImpl& native_handle,
const rtc::Callback0<void>& no_longer_used)
: webrtc::NativeHandleBuffer(&native_handle_, width, height),
native_handle_(native_handle),
diff --git a/talk/app/webrtc/java/jni/native_handle_impl.h b/talk/app/webrtc/java/jni/native_handle_impl.h
index dd04bc2..16d3d7c 100644
--- a/talk/app/webrtc/java/jni/native_handle_impl.h
+++ b/talk/app/webrtc/java/jni/native_handle_impl.h
@@ -36,51 +36,26 @@
namespace webrtc_jni {
// Wrapper for texture object.
-struct NativeTextureHandleImpl {
- NativeTextureHandleImpl(JNIEnv* jni,
- jint j_oes_texture_id,
- jfloatArray j_transform_matrix);
+struct NativeHandleImpl {
+ NativeHandleImpl(JNIEnv* jni,
+ jint j_oes_texture_id,
+ jfloatArray j_transform_matrix);
const int oes_texture_id;
float sampling_matrix[16];
};
-// Native handle for SurfaceTexture + texture id.
-class NativeHandleImpl {
- public:
- NativeHandleImpl();
-
- void* GetHandle();
- int GetTextureId();
- void SetTextureObject(void* texture_object, int texture_id);
-
- private:
- jobject texture_object_;
- int32_t texture_id_;
-};
-
-class JniNativeHandleBuffer : public webrtc::NativeHandleBuffer {
- public:
- JniNativeHandleBuffer(void* native_handle, int width, int height);
-
- // TODO(pbos): Override destructor to release native handle, at the moment the
- // native handle is not released based on refcount.
-
- private:
- rtc::scoped_refptr<webrtc::VideoFrameBuffer> NativeToI420Buffer() override;
-};
-
class AndroidTextureBuffer : public webrtc::NativeHandleBuffer {
public:
AndroidTextureBuffer(int width,
int height,
- const NativeTextureHandleImpl& native_handle,
+ const NativeHandleImpl& native_handle,
const rtc::Callback0<void>& no_longer_used);
~AndroidTextureBuffer();
rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override;
private:
- NativeTextureHandleImpl native_handle_;
+ NativeHandleImpl native_handle_;
rtc::Callback0<void> no_longer_used_cb_;
};
diff --git a/talk/app/webrtc/java/jni/peerconnection_jni.cc b/talk/app/webrtc/java/jni/peerconnection_jni.cc
index e75cd55..984227d 100644
--- a/talk/app/webrtc/java/jni/peerconnection_jni.cc
+++ b/talk/app/webrtc/java/jni/peerconnection_jni.cc
@@ -773,7 +773,7 @@
jni, *j_frame_class_, "<init>", "(III[I[Ljava/nio/ByteBuffer;J)V")),
j_texture_frame_ctor_id_(GetMethodID(
jni, *j_frame_class_, "<init>",
- "(IIILjava/lang/Object;IJ)V")),
+ "(IIII[FJ)V")),
j_byte_buffer_class_(jni, FindClass(jni, "java/nio/ByteBuffer")) {
CHECK_EXCEPTION(jni);
}
@@ -829,13 +829,13 @@
jobject CricketToJavaTextureFrame(const cricket::VideoFrame* frame) {
NativeHandleImpl* handle =
reinterpret_cast<NativeHandleImpl*>(frame->GetNativeHandle());
- jobject texture_object = reinterpret_cast<jobject>(handle->GetHandle());
- int texture_id = handle->GetTextureId();
+ jfloatArray sampling_matrix = jni()->NewFloatArray(16);
+ jni()->SetFloatArrayRegion(sampling_matrix, 0, 16, handle->sampling_matrix);
return jni()->NewObject(
*j_frame_class_, j_texture_frame_ctor_id_,
frame->GetWidth(), frame->GetHeight(),
static_cast<int>(frame->GetVideoRotation()),
- texture_object, texture_id, javaShallowCopy(frame));
+ handle->oes_texture_id, sampling_matrix, javaShallowCopy(frame));
}
JNIEnv* jni() {
diff --git a/talk/app/webrtc/java/jni/surfacetexturehelper_jni.cc b/talk/app/webrtc/java/jni/surfacetexturehelper_jni.cc
index 05f1b23..65c1737 100644
--- a/talk/app/webrtc/java/jni/surfacetexturehelper_jni.cc
+++ b/talk/app/webrtc/java/jni/surfacetexturehelper_jni.cc
@@ -70,7 +70,7 @@
rtc::scoped_refptr<webrtc::VideoFrameBuffer>
SurfaceTextureHelper::CreateTextureFrame(int width, int height,
- const NativeTextureHandleImpl& native_handle) {
+ const NativeHandleImpl& native_handle) {
return new rtc::RefCountedObject<AndroidTextureBuffer>(
width, height, native_handle,
rtc::Bind(&SurfaceTextureHelper::ReturnTextureFrame, this));
diff --git a/talk/app/webrtc/java/jni/surfacetexturehelper_jni.h b/talk/app/webrtc/java/jni/surfacetexturehelper_jni.h
index dc9d2b8..5bd94b5 100644
--- a/talk/app/webrtc/java/jni/surfacetexturehelper_jni.h
+++ b/talk/app/webrtc/java/jni/surfacetexturehelper_jni.h
@@ -66,7 +66,7 @@
rtc::scoped_refptr<webrtc::VideoFrameBuffer> CreateTextureFrame(
int width,
int height,
- const NativeTextureHandleImpl& native_handle);
+ const NativeHandleImpl& native_handle);
protected:
~SurfaceTextureHelper();
diff --git a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
index 0443114..2d91991 100644
--- a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
+++ b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
@@ -27,25 +27,24 @@
package org.webrtc;
-import android.graphics.SurfaceTexture;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaCodecList;
import android.media.MediaFormat;
-import android.opengl.GLES11Ext;
-import android.opengl.GLES20;
import android.os.Build;
+import android.os.SystemClock;
import android.view.Surface;
import org.webrtc.Logging;
import java.nio.ByteBuffer;
import java.util.Arrays;
+import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
-
-import javax.microedition.khronos.egl.EGLContext;
+import java.util.Queue;
+import java.util.concurrent.TimeUnit;
// Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder.
// This class is an implementation detail of the Java PeerConnection API.
@@ -103,14 +102,21 @@
private int height;
private int stride;
private int sliceHeight;
+ private boolean hasDecodedFirstFrame;
+ private final Queue<Long> decodeStartTimeMs = new LinkedList<Long>();
private boolean useSurface;
- private int textureID = 0;
- private SurfaceTexture surfaceTexture = null;
- private Surface surface = null;
- private EglBase eglBase;
- private MediaCodecVideoDecoder() {
- }
+ // The below variables are only used when decoding to a Surface.
+ private TextureListener textureListener;
+ // Max number of output buffers queued before starting to drop decoded frames.
+ private static final int MAX_QUEUED_OUTPUTBUFFERS = 3;
+ private int droppedFrames;
+ // |isWaitingForTexture| is true when waiting for the transition:
+ // MediaCodec.releaseOutputBuffer() -> onTextureFrameAvailable().
+ private boolean isWaitingForTexture;
+ private Surface surface = null;
+ private final Queue<DecodedOutputBuffer>
+ dequeuedSurfaceOutputBuffers = new LinkedList<DecodedOutputBuffer>();
// MediaCodec error handler - invoked when critical error happens which may prevent
// further use of media codec API. Now it means that one of media codec instances
@@ -222,12 +228,13 @@
}
}
- // Pass null in |sharedContext| to configure the codec for ByteBuffer output.
- private boolean initDecode(VideoCodecType type, int width, int height, EGLContext sharedContext) {
+ // Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer output.
+ private boolean initDecode(
+ VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTextureHelper) {
if (mediaCodecThread != null) {
throw new RuntimeException("Forgot to release()?");
}
- useSurface = (sharedContext != null);
+ useSurface = (surfaceTextureHelper != null);
String mime = null;
String[] supportedCodecPrefixes = null;
if (type == VideoCodecType.VIDEO_CODEC_VP8) {
@@ -249,9 +256,6 @@
Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height +
". Color: 0x" + Integer.toHexString(properties.colorFormat) +
". Use Surface: " + useSurface);
- if (sharedContext != null) {
- Logging.d(TAG, "Decoder shared EGL Context: " + sharedContext);
- }
runningInstance = this; // Decoder is now running and can be queried for stack traces.
mediaCodecThread = Thread.currentThread();
try {
@@ -261,16 +265,8 @@
sliceHeight = height;
if (useSurface) {
- // Create shared EGL context.
- eglBase = new EglBase(sharedContext, EglBase.ConfigType.PIXEL_BUFFER);
- eglBase.createDummyPbufferSurface();
- eglBase.makeCurrent();
-
- // Create output surface
- textureID = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
- Logging.d(TAG, "Video decoder TextureID = " + textureID);
- surfaceTexture = new SurfaceTexture(textureID);
- surface = new Surface(surfaceTexture);
+ textureListener = new TextureListener(surfaceTextureHelper);
+ surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
}
MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
@@ -289,6 +285,11 @@
colorFormat = properties.colorFormat;
outputBuffers = mediaCodec.getOutputBuffers();
inputBuffers = mediaCodec.getInputBuffers();
+ decodeStartTimeMs.clear();
+ hasDecodedFirstFrame = false;
+ dequeuedSurfaceOutputBuffers.clear();
+ droppedFrames = 0;
+ isWaitingForTexture = false;
Logging.d(TAG, "Input buffers: " + inputBuffers.length +
". Output buffers: " + outputBuffers.length);
return true;
@@ -299,7 +300,7 @@
}
private void release() {
- Logging.d(TAG, "Java releaseDecoder");
+ Logging.d(TAG, "Java releaseDecoder. Total number of dropped frames: " + droppedFrames);
checkOnMediaCodecThread();
// Run Mediacodec stop() and release() on separate thread since sometime
@@ -337,11 +338,7 @@
if (useSurface) {
surface.release();
surface = null;
- Logging.d(TAG, "Delete video decoder TextureID " + textureID);
- GLES20.glDeleteTextures(1, new int[] {textureID}, 0);
- textureID = 0;
- eglBase.release();
- eglBase = null;
+ textureListener.release();
}
Logging.d(TAG, "Java releaseDecoder done");
}
@@ -364,6 +361,7 @@
try {
inputBuffers[inputBufferIndex].position(0);
inputBuffers[inputBufferIndex].limit(size);
+ decodeStartTimeMs.add(SystemClock.elapsedRealtime());
mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, timestampUs, 0);
return true;
}
@@ -373,57 +371,156 @@
}
}
- // Helper structs for dequeueOutputBuffer() below.
- private static class DecodedByteBuffer {
- public DecodedByteBuffer(int index, int offset, int size, long presentationTimestampUs) {
+ // Helper struct for dequeueOutputBuffer() below.
+ private static class DecodedOutputBuffer {
+ public DecodedOutputBuffer(int index, int offset, int size, long presentationTimestampUs,
+ long decodeTime, long endDecodeTime) {
this.index = index;
this.offset = offset;
this.size = size;
this.presentationTimestampUs = presentationTimestampUs;
+ this.decodeTimeMs = decodeTime;
+ this.endDecodeTimeMs = endDecodeTime;
}
private final int index;
private final int offset;
private final int size;
private final long presentationTimestampUs;
+ // Number of ms it took to decode this frame.
+ private final long decodeTimeMs;
+ // System time when this frame finished decoding.
+ private final long endDecodeTimeMs;
}
+ // Helper struct for dequeueTextureBuffer() below.
private static class DecodedTextureBuffer {
private final int textureID;
+ private final float[] transformMatrix;
private final long presentationTimestampUs;
+ private final long decodeTimeMs;
+ // Interval from when the frame finished decoding until this buffer has been created.
+ // Since there is only one texture, this interval depend on the time from when
+ // a frame is decoded and provided to C++ and until that frame is returned to the MediaCodec
+ // so that the texture can be updated with the next decoded frame.
+ private final long frameDelayMs;
- public DecodedTextureBuffer(int textureID, long presentationTimestampUs) {
+ // A DecodedTextureBuffer with zero |textureID| has special meaning and represents a frame
+ // that was dropped.
+ public DecodedTextureBuffer(int textureID, float[] transformMatrix,
+ long presentationTimestampUs, long decodeTimeMs, long frameDelay) {
this.textureID = textureID;
+ this.transformMatrix = transformMatrix;
this.presentationTimestampUs = presentationTimestampUs;
+ this.decodeTimeMs = decodeTimeMs;
+ this.frameDelayMs = frameDelay;
}
}
- // Returns null if no decoded buffer is available, and otherwise either a DecodedByteBuffer or
- // DecodedTexturebuffer depending on |useSurface| configuration.
+ // Poll based texture listener.
+ private static class TextureListener
+ implements SurfaceTextureHelper.OnTextureFrameAvailableListener {
+ public static class TextureInfo {
+ private final int textureID;
+ private final float[] transformMatrix;
+
+ TextureInfo(int textureId, float[] transformMatrix) {
+ this.textureID = textureId;
+ this.transformMatrix = transformMatrix;
+ }
+ }
+ private final SurfaceTextureHelper surfaceTextureHelper;
+ private TextureInfo textureInfo;
+ // |newFrameLock| is used to synchronize arrival of new frames with wait()/notifyAll().
+ private final Object newFrameLock = new Object();
+
+ public TextureListener(SurfaceTextureHelper surfaceTextureHelper) {
+ this.surfaceTextureHelper = surfaceTextureHelper;
+ surfaceTextureHelper.setListener(this);
+ }
+
+ // Callback from |surfaceTextureHelper|. May be called on an arbitrary thread.
+ @Override
+ public void onTextureFrameAvailable(
+ int oesTextureId, float[] transformMatrix, long timestampNs) {
+ synchronized (newFrameLock) {
+ if (textureInfo != null) {
+ Logging.e(TAG,
+ "Unexpected onTextureFrameAvailable() called while already holding a texture.");
+ throw new IllegalStateException("Already holding a texture.");
+ }
+ // |timestampNs| is always zero on some Android versions.
+ textureInfo = new TextureInfo(oesTextureId, transformMatrix);
+ newFrameLock.notifyAll();
+ }
+ }
+
+ // Dequeues and returns a TextureInfo if available, or null otherwise.
+ public TextureInfo dequeueTextureInfo(int timeoutMs) {
+ synchronized (newFrameLock) {
+ if (textureInfo == null && timeoutMs > 0) {
+ try {
+ newFrameLock.wait(timeoutMs);
+ } catch(InterruptedException e) {
+ // Restore the interrupted status by reinterrupting the thread.
+ Thread.currentThread().interrupt();
+ }
+ }
+ TextureInfo returnedInfo = textureInfo;
+ textureInfo = null;
+ return returnedInfo;
+ }
+ }
+
+ public void release() {
+ // SurfaceTextureHelper.disconnect() will block until any onTextureFrameAvailable() in
+ // progress is done. Therefore, the call to disconnect() must be outside any synchronized
+ // statement that is also used in the onTextureFrameAvailable() above to avoid deadlocks.
+ surfaceTextureHelper.disconnect();
+ synchronized (newFrameLock) {
+ if (textureInfo != null) {
+ surfaceTextureHelper.returnTextureFrame();
+ textureInfo = null;
+ }
+ }
+ }
+ }
+
+ // Returns null if no decoded buffer is available, and otherwise a DecodedByteBuffer.
// Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
// unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException
// upon codec error.
- private Object dequeueOutputBuffer(int dequeueTimeoutUs)
- throws IllegalStateException, MediaCodec.CodecException {
+ private DecodedOutputBuffer dequeueOutputBuffer(int dequeueTimeoutMs) {
checkOnMediaCodecThread();
-
+ if (decodeStartTimeMs.isEmpty()) {
+ return null;
+ }
// Drain the decoder until receiving a decoded buffer or hitting
// MediaCodec.INFO_TRY_AGAIN_LATER.
final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
while (true) {
- final int result = mediaCodec.dequeueOutputBuffer(info, dequeueTimeoutUs);
+ final int result = mediaCodec.dequeueOutputBuffer(
+ info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
switch (result) {
- case MediaCodec.INFO_TRY_AGAIN_LATER:
- return null;
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
outputBuffers = mediaCodec.getOutputBuffers();
Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.length);
+ if (hasDecodedFirstFrame) {
+ throw new RuntimeException("Unexpected output buffer change event.");
+ }
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
MediaFormat format = mediaCodec.getOutputFormat();
Logging.d(TAG, "Decoder format changed: " + format.toString());
+ int new_width = format.getInteger(MediaFormat.KEY_WIDTH);
+ int new_height = format.getInteger(MediaFormat.KEY_HEIGHT);
+ if (hasDecodedFirstFrame && (new_width != width || new_height != height)) {
+ throw new RuntimeException("Unexpected size change. Configured " + width + "*" +
+ height + ". New " + new_width + "*" + new_height);
+ }
width = format.getInteger(MediaFormat.KEY_WIDTH);
height = format.getInteger(MediaFormat.KEY_HEIGHT);
+
if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
@@ -441,30 +538,87 @@
stride = Math.max(width, stride);
sliceHeight = Math.max(height, sliceHeight);
break;
+ case MediaCodec.INFO_TRY_AGAIN_LATER:
+ return null;
default:
- // Output buffer decoded.
- if (useSurface) {
- mediaCodec.releaseOutputBuffer(result, true /* render */);
- // TODO(magjed): Wait for SurfaceTexture.onFrameAvailable() before returning a texture
- // frame.
- return new DecodedTextureBuffer(textureID, info.presentationTimeUs);
- } else {
- return new DecodedByteBuffer(result, info.offset, info.size, info.presentationTimeUs);
- }
- }
+ hasDecodedFirstFrame = true;
+ return new DecodedOutputBuffer(result, info.offset, info.size, info.presentationTimeUs,
+ SystemClock.elapsedRealtime() - decodeStartTimeMs.remove(),
+ SystemClock.elapsedRealtime());
+ }
}
}
+ // Returns null if no decoded buffer is available, and otherwise a DecodedTextureBuffer.
+ // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
+ // unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException
+ // upon codec error.
+ private DecodedTextureBuffer dequeueTextureBuffer(int dequeueTimeoutMs) {
+ checkOnMediaCodecThread();
+ if (!useSurface) {
+ throw new IllegalStateException("dequeueTexture() called for byte buffer decoding.");
+ }
+
+ DecodedOutputBuffer outputBuffer = dequeueOutputBuffer(dequeueTimeoutMs);
+ if (outputBuffer != null) {
+ if (dequeuedSurfaceOutputBuffers.size() >= Math.min(
+ MAX_QUEUED_OUTPUTBUFFERS, outputBuffers.length)) {
+ ++droppedFrames;
+ Logging.w(TAG, "Too many output buffers. Dropping frame. Total number of dropped frames: "
+ + droppedFrames);
+ // Drop the newest frame. Don't drop the oldest since if |isWaitingForTexture|
+ // releaseOutputBuffer has already been called. Dropping the newest frame will lead to a
+ // shift of timestamps by one frame in MediaCodecVideoDecoder::DeliverPendingOutputs.
+ mediaCodec.releaseOutputBuffer(outputBuffer.index, false /* render */);
+ return new DecodedTextureBuffer(0, null, outputBuffer.presentationTimestampUs,
+ outputBuffer.decodeTimeMs,
+ SystemClock.elapsedRealtime() - outputBuffer.endDecodeTimeMs);
+ }
+ dequeuedSurfaceOutputBuffers.add(outputBuffer);
+ }
+
+ if (dequeuedSurfaceOutputBuffers.isEmpty()) {
+ return null;
+ }
+
+ if (!isWaitingForTexture) {
+ // Get the first frame in the queue and render to the decoder output surface.
+ mediaCodec.releaseOutputBuffer(dequeuedSurfaceOutputBuffers.peek().index, true /* render */);
+ isWaitingForTexture = true;
+ }
+
+ // We are waiting for a frame to be rendered to the decoder surface.
+ // Check if it is ready now by waiting max |dequeueTimeoutMs|. There can only be one frame
+ // rendered at a time.
+ TextureListener.TextureInfo info = textureListener.dequeueTextureInfo(dequeueTimeoutMs);
+ if (info != null) {
+ isWaitingForTexture = false;
+ final DecodedOutputBuffer renderedBuffer =
+ dequeuedSurfaceOutputBuffers.remove();
+ if (!dequeuedSurfaceOutputBuffers.isEmpty()) {
+ // Get the next frame in the queue and render to the decoder output surface.
+ mediaCodec.releaseOutputBuffer(
+ dequeuedSurfaceOutputBuffers.peek().index, true /* render */);
+ isWaitingForTexture = true;
+ }
+
+ return new DecodedTextureBuffer(info.textureID, info.transformMatrix,
+ renderedBuffer.presentationTimestampUs, renderedBuffer.decodeTimeMs,
+ SystemClock.elapsedRealtime() - renderedBuffer.endDecodeTimeMs);
+ }
+ return null;
+ }
+
// Release a dequeued output byte buffer back to the codec for re-use. Should only be called for
// non-surface decoding.
// Throws IllegalStateException if the call is made on the wrong thread, if codec is configured
// for surface decoding, or if |mediaCodec| is not in the Executing state. Throws
// MediaCodec.CodecException upon codec error.
- private void returnDecodedByteBuffer(int index)
+ private void returnDecodedOutputBuffer(int index)
throws IllegalStateException, MediaCodec.CodecException {
checkOnMediaCodecThread();
if (useSurface) {
- throw new IllegalStateException("returnDecodedByteBuffer() called for surface decoding.");
+ throw new IllegalStateException("returnDecodedOutputBuffer() called for surface decoding.");
}
mediaCodec.releaseOutputBuffer(index, false /* render */);
}
diff --git a/talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java b/talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java
index 3c255dd..2e307fc 100644
--- a/talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java
+++ b/talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java
@@ -46,7 +46,11 @@
public final int[] yuvStrides;
public ByteBuffer[] yuvPlanes;
public final boolean yuvFrame;
- public Object textureObject;
+ // Matrix that transforms standard coordinates to their proper sampling locations in
+ // the texture. This transform compensates for any properties of the video source that
+ // cause it to appear different from a normalized texture. This matrix does not take
+ // |rotationDegree| into account.
+ public final float[] samplingMatrix;
public int textureId;
// Frame pointer in C++.
private long nativeFramePointer;
@@ -70,19 +74,27 @@
if (rotationDegree % 90 != 0) {
throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
}
+ // The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
+ // top-left corner of the image, but in glTexImage2D() the first element corresponds to the
+ // bottom-left corner. This discrepancy is corrected by setting a vertical flip as sampling
+ // matrix.
+ samplingMatrix = new float[] {
+ 1, 0, 0, 0,
+ 0, -1, 0, 0,
+ 0, 0, 1, 0,
+ 0, 1, 0, 1};
}
/**
* Construct a texture frame of the given dimensions with data in SurfaceTexture
*/
- I420Frame(
- int width, int height, int rotationDegree,
- Object textureObject, int textureId, long nativeFramePointer) {
+ I420Frame(int width, int height, int rotationDegree, int textureId, float[] samplingMatrix,
+ long nativeFramePointer) {
this.width = width;
this.height = height;
this.yuvStrides = null;
this.yuvPlanes = null;
- this.textureObject = textureObject;
+ this.samplingMatrix = samplingMatrix;
this.textureId = textureId;
this.yuvFrame = false;
this.rotationDegree = rotationDegree;
@@ -125,7 +137,6 @@
*/
public static void renderFrameDone(I420Frame frame) {
frame.yuvPlanes = null;
- frame.textureObject = null;
frame.textureId = 0;
if (frame.nativeFramePointer != 0) {
releaseNativeFrame(frame.nativeFramePointer);