Revert of "Android MediaCodecVideoDecoder: Manage lifetime of texture frames" https://codereview.webrtc.org/1378033003/

The code that depends on the reverted CL is disabled but not removed. NativeHandleImpl is reverted to the previous implementation, and the new implementation is renamed to NativeTextureHandleImpl. Texture capture can not be used anymore, because it will crash in peerconnection_jni.cc.

Reason for revert:
Increased HW decoder latency and crashes related to that. Also suspected cause of video tearing.

Original issue's description:
> This CL should be the last one in a series to finally
> unblock camera texture capture.
>
> The SurfaceTexture.updateTexImage() calls are moved from
> the video renderers into MediaCodecVideoDecoder, and the
> destructor of the texture frames will signal
> MediaCodecVideoDecoder that the frame has returned. This
> CL also removes the SurfaceTexture from the native handle
> and only exposes the texture matrix instead, because only
> the video source should access the SurfaceTexture.
>
> BUG=webrtc:4993
> R=glaznev@webrtc.org, perkj@webrtc.org
>
> Committed: https://crrev.com/91b348c7029d843e06868ed12b728a809c53176c
> Cr-Commit-Position: refs/heads/master@{#10203}

TBR=glaznev
BUG=webrtc:4993

Review URL: https://codereview.webrtc.org/1394103005

Cr-Commit-Position: refs/heads/master@{#10288}
diff --git a/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java b/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java
index 93d0e86..265f34d 100644
--- a/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java
+++ b/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java
@@ -432,10 +432,25 @@
     }
 
     final long startTimeNs = System.nanoTime();
+    final float[] samplingMatrix;
+    if (frame.yuvFrame) {
+      // The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
+      // top-left corner of the image, but in glTexImage2D() the first element corresponds to the
+      // bottom-left corner. We correct this discrepancy by setting a vertical flip as sampling
+      // matrix.
+      samplingMatrix = RendererCommon.verticalFlipMatrix();
+    } else {
+      // TODO(magjed): Move updateTexImage() to the video source instead.
+      SurfaceTexture surfaceTexture = (SurfaceTexture) frame.textureObject;
+      surfaceTexture.updateTexImage();
+      samplingMatrix = new float[16];
+      surfaceTexture.getTransformMatrix(samplingMatrix);
+    }
+
     final float[] texMatrix;
     synchronized (layoutLock) {
       final float[] rotatedSamplingMatrix =
-          RendererCommon.rotateTextureMatrix(frame.samplingMatrix, frame.rotationDegree);
+          RendererCommon.rotateTextureMatrix(samplingMatrix, frame.rotationDegree);
       final float[] layoutMatrix = RendererCommon.getLayoutMatrix(
           mirror, frameAspectRatio(), (float) layoutWidth / layoutHeight);
       texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
diff --git a/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java b/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
index 1d413b8..edb9fd6 100644
--- a/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
+++ b/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
@@ -36,6 +36,7 @@
 import android.annotation.SuppressLint;
 import android.graphics.Point;
 import android.graphics.Rect;
+import android.graphics.SurfaceTexture;
 import android.opengl.EGL14;
 import android.opengl.EGLContext;
 import android.opengl.GLES20;
@@ -245,15 +246,29 @@
         }
 
         if (isNewFrame) {
-          rotatedSamplingMatrix = RendererCommon.rotateTextureMatrix(
-              pendingFrame.samplingMatrix, pendingFrame.rotationDegree);
           if (pendingFrame.yuvFrame) {
             rendererType = RendererType.RENDERER_YUV;
             drawer.uploadYuvData(yuvTextures, pendingFrame.width, pendingFrame.height,
                 pendingFrame.yuvStrides, pendingFrame.yuvPlanes);
+            // The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
+            // top-left corner of the image, but in glTexImage2D() the first element corresponds to
+            // the bottom-left corner. We correct this discrepancy by setting a vertical flip as
+            // sampling matrix.
+            final float[] samplingMatrix = RendererCommon.verticalFlipMatrix();
+            rotatedSamplingMatrix =
+                RendererCommon.rotateTextureMatrix(samplingMatrix, pendingFrame.rotationDegree);
           } else {
             rendererType = RendererType.RENDERER_TEXTURE;
-            // External texture rendering. Make a deep copy of the external texture.
+            // External texture rendering. Update texture image to latest and make a deep copy of
+            // the external texture.
+            // TODO(magjed): Move updateTexImage() to the video source instead.
+            final SurfaceTexture surfaceTexture = (SurfaceTexture) pendingFrame.textureObject;
+            surfaceTexture.updateTexImage();
+            final float[] samplingMatrix = new float[16];
+            surfaceTexture.getTransformMatrix(samplingMatrix);
+            rotatedSamplingMatrix =
+                RendererCommon.rotateTextureMatrix(samplingMatrix, pendingFrame.rotationDegree);
+
             // Reallocate offscreen texture if necessary.
             textureCopy.setSize(pendingFrame.rotatedWidth(), pendingFrame.rotatedHeight());
 
diff --git a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
index ed5ba0b..dce5a22 100644
--- a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
+++ b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
@@ -33,7 +33,6 @@
 #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h"
 #include "talk/app/webrtc/java/jni/classreferenceholder.h"
 #include "talk/app/webrtc/java/jni/native_handle_impl.h"
-#include "talk/app/webrtc/java/jni/surfacetexturehelper_jni.h"
 #include "webrtc/base/bind.h"
 #include "webrtc/base/checks.h"
 #include "webrtc/base/logging.h"
@@ -112,7 +111,7 @@
   bool use_surface_;
   VideoCodec codec_;
   webrtc::I420BufferPool decoded_frame_pool_;
-  rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_;
+  NativeHandleImpl native_handle_;
   DecodedImageCallback* callback_;
   int frames_received_;  // Number of frames received by decoder.
   int frames_decoded_;  // Number of frames decoded by decoder.
@@ -145,10 +144,10 @@
   jfieldID j_height_field_;
   jfieldID j_stride_field_;
   jfieldID j_slice_height_field_;
+  jfieldID j_surface_texture_field_;
   // MediaCodecVideoDecoder.DecodedTextureBuffer fields.
   jfieldID j_textureID_field_;
-  jfieldID j_transform_matrix_field_;
-  jfieldID j_texture_timestamp_ns_field_;
+  jfieldID j_texture_presentation_timestamp_us_field_;
   // MediaCodecVideoDecoder.DecodedByteBuffer fields.
   jfieldID j_info_index_field_;
   jfieldID j_info_offset_field_;
@@ -157,6 +156,8 @@
 
   // Global references; must be deleted in Release().
   std::vector<jobject> input_buffers_;
+  jobject surface_texture_;
+  jobject previous_surface_texture_;
 
   // Render EGL context - owned by factory, should not be allocated/destroyed
   // by VideoDecoder.
@@ -170,6 +171,8 @@
     key_frame_required_(true),
     inited_(false),
     sw_fallback_required_(false),
+    surface_texture_(NULL),
+    previous_surface_texture_(NULL),
     codec_thread_(new Thread()),
     j_media_codec_video_decoder_class_(
         jni,
@@ -188,7 +191,7 @@
   j_init_decode_method_ = GetMethodID(
       jni, *j_media_codec_video_decoder_class_, "initDecode",
       "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;"
-      "IILorg/webrtc/SurfaceTextureHelper;)Z");
+      "IILandroid/opengl/EGLContext;)Z");
   j_release_method_ =
       GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V");
   j_dequeue_input_buffer_method_ = GetMethodID(
@@ -218,15 +221,17 @@
       jni, *j_media_codec_video_decoder_class_, "stride", "I");
   j_slice_height_field_ = GetFieldID(
       jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I");
+  j_surface_texture_field_ = GetFieldID(
+      jni, *j_media_codec_video_decoder_class_, "surfaceTexture",
+      "Landroid/graphics/SurfaceTexture;");
 
   jclass j_decoder_decoded_texture_buffer_class = FindClass(jni,
       "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer");
   j_textureID_field_ = GetFieldID(
       jni, j_decoder_decoded_texture_buffer_class, "textureID", "I");
-  j_transform_matrix_field_ = GetFieldID(
-      jni, j_decoder_decoded_texture_buffer_class, "transformMatrix", "[F");
-  j_texture_timestamp_ns_field_ = GetFieldID(
-      jni, j_decoder_decoded_texture_buffer_class, "timestampNs", "J");
+  j_texture_presentation_timestamp_us_field_ =
+      GetFieldID(jni, j_decoder_decoded_texture_buffer_class,
+                 "presentationTimestampUs", "J");
 
   jclass j_decoder_decoded_byte_buffer_class = FindClass(jni,
       "org/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer");
@@ -249,6 +254,14 @@
 MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
   // Call Release() to ensure no more callbacks to us after we are deleted.
   Release();
+  // Delete global references.
+  JNIEnv* jni = AttachCurrentThreadIfNeeded();
+  if (previous_surface_texture_ != NULL) {
+    jni->DeleteGlobalRef(previous_surface_texture_);
+  }
+  if (surface_texture_ != NULL) {
+    jni->DeleteGlobalRef(surface_texture_);
+  }
 }
 
 int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
@@ -298,11 +311,6 @@
   frames_received_ = 0;
   frames_decoded_ = 0;
 
-  if (use_surface_) {
-    surface_texture_helper_ = new rtc::RefCountedObject<SurfaceTextureHelper>(
-        jni, render_egl_context_);
-  }
-
   jobject j_video_codec_enum = JavaEnumFromIndex(
       jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_);
   bool success = jni->CallBooleanMethod(
@@ -311,8 +319,7 @@
       j_video_codec_enum,
       codec_.width,
       codec_.height,
-      use_surface_ ? surface_texture_helper_->GetJavaSurfaceTextureHelper()
-                   : nullptr);
+      use_surface_ ? render_egl_context_ : nullptr);
   if (CheckException(jni) || !success) {
     ALOGE << "Codec initialization error - fallback to SW codec.";
     sw_fallback_required_ = true;
@@ -354,6 +361,15 @@
     }
   }
 
+  if (use_surface_) {
+    jobject surface_texture = GetObjectField(
+        jni, *j_media_codec_video_decoder_, j_surface_texture_field_);
+    if (previous_surface_texture_ != NULL) {
+      jni->DeleteGlobalRef(previous_surface_texture_);
+    }
+    previous_surface_texture_ = surface_texture_;
+    surface_texture_ = jni->NewGlobalRef(surface_texture);
+  }
   codec_thread_->PostDelayed(kMediaCodecPollMs, this);
 
   return WEBRTC_VIDEO_CODEC_OK;
@@ -378,7 +394,6 @@
   }
   input_buffers_.clear();
   jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_);
-  surface_texture_helper_ = nullptr;
   inited_ = false;
   rtc::MessageQueueManager::Clear(this);
   if (CheckException(jni)) {
@@ -488,7 +503,7 @@
   if (frames_received_ > frames_decoded_ + max_pending_frames_) {
     ALOGV("Received: %d. Decoded: %d. Wait for output...",
         frames_received_, frames_decoded_);
-    if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs)) {
+    if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs * 1000)) {
       ALOGE << "DeliverPendingOutputs error";
       return ProcessHWErrorOnCodecThread();
     }
@@ -551,7 +566,7 @@
 }
 
 bool MediaCodecVideoDecoder::DeliverPendingOutputs(
-    JNIEnv* jni, int dequeue_timeout_ms) {
+    JNIEnv* jni, int dequeue_timeout_us) {
   if (frames_received_ <= frames_decoded_) {
     // No need to query for output buffers - decoder is drained.
     return true;
@@ -560,7 +575,7 @@
   jobject j_decoder_output_buffer = jni->CallObjectMethod(
       *j_media_codec_video_decoder_,
       j_dequeue_output_buffer_method_,
-      dequeue_timeout_ms);
+      dequeue_timeout_us);
   if (CheckException(jni)) {
     ALOGE << "dequeueOutputBuffer() error";
     return false;
@@ -585,15 +600,14 @@
     // Extract data from Java DecodedTextureBuffer.
     const int texture_id =
         GetIntField(jni, j_decoder_output_buffer, j_textureID_field_);
-    const jfloatArray j_transform_matrix =
-        reinterpret_cast<jfloatArray>(GetObjectField(
-            jni, j_decoder_output_buffer, j_transform_matrix_field_));
-    const int64_t timestamp_ns = GetLongField(jni, j_decoder_output_buffer,
-                                              j_texture_timestamp_ns_field_);
-    output_timestamps_ms = timestamp_ns / rtc::kNumNanosecsPerMillisec;
+    const int64_t timestamp_us =
+        GetLongField(jni, j_decoder_output_buffer,
+                     j_texture_presentation_timestamp_us_field_);
+    output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec;
     // Create webrtc::VideoFrameBuffer with native texture handle.
-    frame_buffer = surface_texture_helper_->CreateTextureFrame(
-        width, height, NativeHandleImpl(jni, texture_id, j_transform_matrix));
+    native_handle_.SetTextureObject(surface_texture_, texture_id);
+    frame_buffer = new rtc::RefCountedObject<JniNativeHandleBuffer>(
+        &native_handle_, width, height);
   } else {
     // Extract data from Java ByteBuffer and create output yuv420 frame -
     // for non surface decoding only.
diff --git a/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc b/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc
index 6d4891e..cd6cfc0 100644
--- a/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc
+++ b/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc
@@ -180,10 +180,16 @@
                       buffer, rotation, timestamp_ns);
 }
 
-void AndroidVideoCapturerJni::OnTextureFrame(int width,
-                                             int height,
-                                             int64_t timestamp_ns,
-                                             const NativeHandleImpl& handle) {
+void AndroidVideoCapturerJni::OnTextureFrame(
+    int width,
+    int height,
+    int64_t timestamp_ns,
+    const NativeTextureHandleImpl& handle) {
+  // TODO(magjed): Fix this. See bug webrtc:4993.
+  RTC_NOTREACHED()
+      << "The rest of the stack for Android expects the native "
+         "handle to be a NativeHandleImpl with a SurfaceTexture, not a "
+         "NativeTextureHandleImpl";
   rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
       new rtc::RefCountedObject<AndroidTextureBuffer>(
           width, height, handle,
@@ -228,8 +234,8 @@
         jlong j_timestamp) {
    reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)
          ->OnTextureFrame(j_width, j_height, j_timestamp,
-                          NativeHandleImpl(jni, j_oes_texture_id,
-                                           j_transform_matrix));
+                          NativeTextureHandleImpl(jni, j_oes_texture_id,
+                                                  j_transform_matrix));
 }
 
 JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeCapturerStarted)
diff --git a/talk/app/webrtc/java/jni/androidvideocapturer_jni.h b/talk/app/webrtc/java/jni/androidvideocapturer_jni.h
index 0a88df6..360674b 100644
--- a/talk/app/webrtc/java/jni/androidvideocapturer_jni.h
+++ b/talk/app/webrtc/java/jni/androidvideocapturer_jni.h
@@ -39,7 +39,7 @@
 
 namespace webrtc_jni {
 
-class NativeHandleImpl;
+class NativeTextureHandleImpl;
 
 // AndroidVideoCapturerJni implements AndroidVideoCapturerDelegate.
 // The purpose of the delegate is to hide the JNI specifics from the C++ only
@@ -61,7 +61,7 @@
   void OnMemoryBufferFrame(void* video_frame, int length, int width,
                            int height, int rotation, int64_t timestamp_ns);
   void OnTextureFrame(int width, int height, int64_t timestamp_ns,
-                      const NativeHandleImpl& handle);
+                      const NativeTextureHandleImpl& handle);
   void OnOutputFormatRequest(int width, int height, int fps);
 
  protected:
diff --git a/talk/app/webrtc/java/jni/native_handle_impl.cc b/talk/app/webrtc/java/jni/native_handle_impl.cc
index ed9ad8e..ac3e045 100644
--- a/talk/app/webrtc/java/jni/native_handle_impl.cc
+++ b/talk/app/webrtc/java/jni/native_handle_impl.cc
@@ -31,9 +31,9 @@
 
 namespace webrtc_jni {
 
-NativeHandleImpl::NativeHandleImpl(JNIEnv* jni,
-                                   jint j_oes_texture_id,
-                                   jfloatArray j_transform_matrix)
+NativeTextureHandleImpl::NativeTextureHandleImpl(JNIEnv* jni,
+                                                 jint j_oes_texture_id,
+                                                 jfloatArray j_transform_matrix)
     : oes_texture_id(j_oes_texture_id) {
   RTC_CHECK_EQ(16, jni->GetArrayLength(j_transform_matrix));
   jfloat* transform_matrix_ptr =
@@ -44,10 +44,38 @@
   jni->ReleaseFloatArrayElements(j_transform_matrix, transform_matrix_ptr, 0);
 }
 
+NativeHandleImpl::NativeHandleImpl() : texture_object_(NULL), texture_id_(-1) {}
+
+void* NativeHandleImpl::GetHandle() {
+  return texture_object_;
+}
+
+int NativeHandleImpl::GetTextureId() {
+  return texture_id_;
+}
+
+void NativeHandleImpl::SetTextureObject(void* texture_object, int texture_id) {
+  texture_object_ = reinterpret_cast<jobject>(texture_object);
+  texture_id_ = texture_id;
+}
+
+JniNativeHandleBuffer::JniNativeHandleBuffer(void* native_handle,
+                                             int width,
+                                             int height)
+    : NativeHandleBuffer(native_handle, width, height) {}
+
+rtc::scoped_refptr<webrtc::VideoFrameBuffer>
+JniNativeHandleBuffer::NativeToI420Buffer() {
+  // TODO(pbos): Implement before using this in the encoder pipeline (or
+  // remove the RTC_CHECK() in VideoCapture).
+  RTC_NOTREACHED();
+  return nullptr;
+}
+
 AndroidTextureBuffer::AndroidTextureBuffer(
     int width,
     int height,
-    const NativeHandleImpl& native_handle,
+    const NativeTextureHandleImpl& native_handle,
     const rtc::Callback0<void>& no_longer_used)
     : webrtc::NativeHandleBuffer(&native_handle_, width, height),
       native_handle_(native_handle),
diff --git a/talk/app/webrtc/java/jni/native_handle_impl.h b/talk/app/webrtc/java/jni/native_handle_impl.h
index 16d3d7c..dd04bc2 100644
--- a/talk/app/webrtc/java/jni/native_handle_impl.h
+++ b/talk/app/webrtc/java/jni/native_handle_impl.h
@@ -36,26 +36,51 @@
 namespace webrtc_jni {
 
 // Wrapper for texture object.
-struct NativeHandleImpl {
-  NativeHandleImpl(JNIEnv* jni,
-                   jint j_oes_texture_id,
-                   jfloatArray j_transform_matrix);
+struct NativeTextureHandleImpl {
+  NativeTextureHandleImpl(JNIEnv* jni,
+                          jint j_oes_texture_id,
+                          jfloatArray j_transform_matrix);
 
   const int oes_texture_id;
   float sampling_matrix[16];
 };
 
+// Native handle for SurfaceTexture + texture id.
+class NativeHandleImpl {
+ public:
+  NativeHandleImpl();
+
+  void* GetHandle();
+  int GetTextureId();
+  void SetTextureObject(void* texture_object, int texture_id);
+
+ private:
+  jobject texture_object_;
+  int32_t texture_id_;
+};
+
+class JniNativeHandleBuffer : public webrtc::NativeHandleBuffer {
+ public:
+  JniNativeHandleBuffer(void* native_handle, int width, int height);
+
+  // TODO(pbos): Override destructor to release native handle, at the moment the
+  // native handle is not released based on refcount.
+
+ private:
+  rtc::scoped_refptr<webrtc::VideoFrameBuffer> NativeToI420Buffer() override;
+};
+
 class AndroidTextureBuffer : public webrtc::NativeHandleBuffer {
  public:
   AndroidTextureBuffer(int width,
                        int height,
-                       const NativeHandleImpl& native_handle,
+                       const NativeTextureHandleImpl& native_handle,
                        const rtc::Callback0<void>& no_longer_used);
   ~AndroidTextureBuffer();
   rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override;
 
  private:
-  NativeHandleImpl native_handle_;
+  NativeTextureHandleImpl native_handle_;
   rtc::Callback0<void> no_longer_used_cb_;
 };
 
diff --git a/talk/app/webrtc/java/jni/peerconnection_jni.cc b/talk/app/webrtc/java/jni/peerconnection_jni.cc
index ddf1c8e..c3ccb8a 100644
--- a/talk/app/webrtc/java/jni/peerconnection_jni.cc
+++ b/talk/app/webrtc/java/jni/peerconnection_jni.cc
@@ -771,7 +771,7 @@
             jni, *j_frame_class_, "<init>", "(III[I[Ljava/nio/ByteBuffer;J)V")),
         j_texture_frame_ctor_id_(GetMethodID(
             jni, *j_frame_class_, "<init>",
-            "(IIII[FJ)V")),
+            "(IIILjava/lang/Object;IJ)V")),
         j_byte_buffer_class_(jni, FindClass(jni, "java/nio/ByteBuffer")) {
     CHECK_EXCEPTION(jni);
   }
@@ -827,13 +827,13 @@
   jobject CricketToJavaTextureFrame(const cricket::VideoFrame* frame) {
     NativeHandleImpl* handle =
         reinterpret_cast<NativeHandleImpl*>(frame->GetNativeHandle());
-    jfloatArray sampling_matrix = jni()->NewFloatArray(16);
-    jni()->SetFloatArrayRegion(sampling_matrix, 0, 16, handle->sampling_matrix);
+    jobject texture_object = reinterpret_cast<jobject>(handle->GetHandle());
+    int texture_id = handle->GetTextureId();
     return jni()->NewObject(
         *j_frame_class_, j_texture_frame_ctor_id_,
         frame->GetWidth(), frame->GetHeight(),
         static_cast<int>(frame->GetVideoRotation()),
-        handle->oes_texture_id, sampling_matrix, javaShallowCopy(frame));
+        texture_object, texture_id, javaShallowCopy(frame));
   }
 
   JNIEnv* jni() {
diff --git a/talk/app/webrtc/java/jni/surfacetexturehelper_jni.cc b/talk/app/webrtc/java/jni/surfacetexturehelper_jni.cc
index ad1db1a..9e1092f 100644
--- a/talk/app/webrtc/java/jni/surfacetexturehelper_jni.cc
+++ b/talk/app/webrtc/java/jni/surfacetexturehelper_jni.cc
@@ -68,7 +68,7 @@
 
 rtc::scoped_refptr<webrtc::VideoFrameBuffer>
 SurfaceTextureHelper::CreateTextureFrame(int width, int height,
-    const NativeHandleImpl& native_handle) {
+    const NativeTextureHandleImpl& native_handle) {
   return new rtc::RefCountedObject<AndroidTextureBuffer>(
       width, height, native_handle,
       rtc::Bind(&SurfaceTextureHelper::ReturnTextureFrame, this));
diff --git a/talk/app/webrtc/java/jni/surfacetexturehelper_jni.h b/talk/app/webrtc/java/jni/surfacetexturehelper_jni.h
index 5bd94b5..dc9d2b8 100644
--- a/talk/app/webrtc/java/jni/surfacetexturehelper_jni.h
+++ b/talk/app/webrtc/java/jni/surfacetexturehelper_jni.h
@@ -66,7 +66,7 @@
   rtc::scoped_refptr<webrtc::VideoFrameBuffer> CreateTextureFrame(
       int width,
       int height,
-      const NativeHandleImpl& native_handle);
+      const NativeTextureHandleImpl& native_handle);
 
  protected:
   ~SurfaceTextureHelper();
diff --git a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
index a1cc041..b0fc867 100644
--- a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
+++ b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
@@ -27,11 +27,15 @@
 
 package org.webrtc;
 
+import android.graphics.SurfaceTexture;
 import android.media.MediaCodec;
 import android.media.MediaCodecInfo;
 import android.media.MediaCodecInfo.CodecCapabilities;
 import android.media.MediaCodecList;
 import android.media.MediaFormat;
+import android.opengl.EGLContext;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
 import android.os.Build;
 import android.view.Surface;
 
@@ -40,7 +44,6 @@
 import java.nio.ByteBuffer;
 import java.util.Arrays;
 import java.util.List;
-import java.util.concurrent.TimeUnit;
 
 // Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder.
 // This class is an implementation detail of the Java PeerConnection API.
@@ -91,11 +94,10 @@
   private int stride;
   private int sliceHeight;
   private boolean useSurface;
-  // |isWaitingForTexture| is true when waiting for the transition:
-  // MediaCodec.releaseOutputBuffer() -> onTextureFrameAvailable().
-  private boolean isWaitingForTexture = false;
-  private TextureListener textureListener;
+  private int textureID = 0;
+  private SurfaceTexture surfaceTexture = null;
   private Surface surface = null;
+  private EglBase eglBase;
 
   private MediaCodecVideoDecoder() {
     instance = this;
@@ -193,13 +195,12 @@
     }
   }
 
-  // Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer output.
-  private boolean initDecode(
-      VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTextureHelper) {
+  // Pass null in |sharedContext| to configure the codec for ByteBuffer output.
+  private boolean initDecode(VideoCodecType type, int width, int height, EGLContext sharedContext) {
     if (mediaCodecThread != null) {
       throw new RuntimeException("Forgot to release()?");
     }
-    useSurface = (surfaceTextureHelper != null);
+    useSurface = (sharedContext != null);
     String mime = null;
     String[] supportedCodecPrefixes = null;
     if (type == VideoCodecType.VIDEO_CODEC_VP8) {
@@ -218,6 +219,9 @@
     Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height +
         ". Color: 0x" + Integer.toHexString(properties.colorFormat) +
         ". Use Surface: " + useSurface);
+    if (sharedContext != null) {
+      Logging.d(TAG, "Decoder shared EGL Context: " + sharedContext);
+    }
     mediaCodecThread = Thread.currentThread();
     try {
       this.width = width;
@@ -226,8 +230,16 @@
       sliceHeight = height;
 
       if (useSurface) {
-        textureListener = new TextureListener(surfaceTextureHelper);
-        surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
+        // Create shared EGL context.
+        eglBase = new EglBase(sharedContext, EglBase.ConfigType.PIXEL_BUFFER);
+        eglBase.createDummyPbufferSurface();
+        eglBase.makeCurrent();
+
+        // Create output surface
+        textureID = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
+        Logging.d(TAG, "Video decoder TextureID = " + textureID);
+        surfaceTexture = new SurfaceTexture(textureID);
+        surface = new Surface(surfaceTexture);
       }
 
       MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
@@ -270,7 +282,11 @@
     if (useSurface) {
       surface.release();
       surface = null;
-      textureListener.release();
+      Logging.d(TAG, "Delete video decoder TextureID " + textureID);
+      GLES20.glDeleteTextures(1, new int[] {textureID}, 0);
+      textureID = 0;
+      eglBase.release();
+      eglBase = null;
     }
     Logging.d(TAG, "Java releaseDecoder done");
   }
@@ -319,72 +335,11 @@
 
   private static class DecodedTextureBuffer {
     private final int textureID;
-    private final float[] transformMatrix;
-    private final long timestampNs;
+    private final long presentationTimestampUs;
 
-    public DecodedTextureBuffer(int textureID, float[] transformMatrix, long timestampNs) {
+    public DecodedTextureBuffer(int textureID, long presentationTimestampUs) {
       this.textureID = textureID;
-      this.transformMatrix = transformMatrix;
-      this.timestampNs = timestampNs;
-    }
-  }
-
-  // Poll based texture listener.
-  private static class TextureListener
-      implements SurfaceTextureHelper.OnTextureFrameAvailableListener {
-    private final SurfaceTextureHelper surfaceTextureHelper;
-    private DecodedTextureBuffer textureBuffer;
-    // |newFrameLock| is used to synchronize arrival of new frames with wait()/notifyAll().
-    private final Object newFrameLock = new Object();
-
-    public TextureListener(SurfaceTextureHelper surfaceTextureHelper) {
-      this.surfaceTextureHelper = surfaceTextureHelper;
-      surfaceTextureHelper.setListener(this);
-    }
-
-    // Callback from |surfaceTextureHelper|. May be called on an arbitrary thread.
-    @Override
-    public void onTextureFrameAvailable(
-        int oesTextureId, float[] transformMatrix, long timestampNs) {
-      synchronized (newFrameLock) {
-        if (textureBuffer != null) {
-          Logging.e(TAG,
-              "Unexpected onTextureFrameAvailable() called while already holding a texture.");
-          throw new IllegalStateException("Already holding a texture.");
-        }
-        textureBuffer = new DecodedTextureBuffer(oesTextureId, transformMatrix, timestampNs);
-        newFrameLock.notifyAll();
-      }
-    }
-
-    // Dequeues and returns a texture buffer if available, or null otherwise.
-    public DecodedTextureBuffer dequeueTextureFrame(int timeoutMs) {
-      synchronized (newFrameLock) {
-        if (textureBuffer == null && timeoutMs > 0) {
-          try {
-            newFrameLock.wait(timeoutMs);
-          } catch(InterruptedException e) {
-            // Restore the interrupted status by reinterrupting the thread.
-            Thread.currentThread().interrupt();
-          }
-        }
-        final DecodedTextureBuffer textureBuffer = this.textureBuffer;
-        this.textureBuffer = null;
-        return textureBuffer;
-      }
-    }
-
-    public void release() {
-      // SurfaceTextureHelper.disconnect() will block until any onTextureFrameAvailable() in
-      // progress is done. Therefore, the call to disconnect() must be outside any synchronized
-      // statement that is also used in the onTextureFrameAvailable() above to avoid deadlocks.
-      surfaceTextureHelper.disconnect();
-      synchronized (newFrameLock) {
-        if (textureBuffer != null) {
-          surfaceTextureHelper.returnTextureFrame();
-          textureBuffer = null;
-        }
-      }
+      this.presentationTimestampUs = presentationTimestampUs;
     }
   }
 
@@ -393,25 +348,14 @@
   // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
   // unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException
   // upon codec error.
-  private Object dequeueOutputBuffer(int dequeueTimeoutMs)
+  private Object dequeueOutputBuffer(int dequeueTimeoutUs)
       throws IllegalStateException, MediaCodec.CodecException {
     checkOnMediaCodecThread();
-    // Calling multiple MediaCodec.releaseOutputBuffer() with render=true in a row will result in
-    // dropped texture frames. Therefore, wait for any pending onTextureFrameAvailable() before
-    // proceeding.
-    if (isWaitingForTexture) {
-      final DecodedTextureBuffer textureBuffer =
-          textureListener.dequeueTextureFrame(dequeueTimeoutMs);
-      isWaitingForTexture = (textureBuffer == null);
-      return textureBuffer;
-    }
-
     // Drain the decoder until receiving a decoded buffer or hitting
     // MediaCodec.INFO_TRY_AGAIN_LATER.
     final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
     while (true) {
-      final int result = mediaCodec.dequeueOutputBuffer(
-          info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
+      final int result = mediaCodec.dequeueOutputBuffer(info, dequeueTimeoutUs);
       switch (result) {
         case MediaCodec.INFO_TRY_AGAIN_LATER:
           return null;
@@ -445,10 +389,9 @@
           // Output buffer decoded.
           if (useSurface) {
             mediaCodec.releaseOutputBuffer(result, true /* render */);
-            final DecodedTextureBuffer textureBuffer =
-                textureListener.dequeueTextureFrame(dequeueTimeoutMs);
-            isWaitingForTexture = (textureBuffer == null);
-            return textureBuffer;
+            // TODO(magjed): Wait for SurfaceTexture.onFrameAvailable() before returning a texture
+            // frame.
+            return new DecodedTextureBuffer(textureID, info.presentationTimeUs);
           } else {
             return new DecodedByteBuffer(result, info.offset, info.size, info.presentationTimeUs);
           }
diff --git a/talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java b/talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java
index 2e307fc..3c255dd 100644
--- a/talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java
+++ b/talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java
@@ -46,11 +46,7 @@
     public final int[] yuvStrides;
     public ByteBuffer[] yuvPlanes;
     public final boolean yuvFrame;
-    // Matrix that transforms standard coordinates to their proper sampling locations in
-    // the texture. This transform compensates for any properties of the video source that
-    // cause it to appear different from a normalized texture. This matrix does not take
-    // |rotationDegree| into account.
-    public final float[] samplingMatrix;
+    public Object textureObject;
     public int textureId;
     // Frame pointer in C++.
     private long nativeFramePointer;
@@ -74,27 +70,19 @@
       if (rotationDegree % 90 != 0) {
         throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
       }
-      // The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
-      // top-left corner of the image, but in glTexImage2D() the first element corresponds to the
-      // bottom-left corner. This discrepancy is corrected by setting a vertical flip as sampling
-      // matrix.
-      samplingMatrix = new float[] {
-          1,  0, 0, 0,
-          0, -1, 0, 0,
-          0,  0, 1, 0,
-          0,  1, 0, 1};
     }
 
     /**
      * Construct a texture frame of the given dimensions with data in SurfaceTexture
      */
-    I420Frame(int width, int height, int rotationDegree, int textureId, float[] samplingMatrix,
-        long nativeFramePointer) {
+    I420Frame(
+        int width, int height, int rotationDegree,
+        Object textureObject, int textureId, long nativeFramePointer) {
       this.width = width;
       this.height = height;
       this.yuvStrides = null;
       this.yuvPlanes = null;
-      this.samplingMatrix = samplingMatrix;
+      this.textureObject = textureObject;
       this.textureId = textureId;
       this.yuvFrame = false;
       this.rotationDegree = rotationDegree;
@@ -137,6 +125,7 @@
     */
    public static void renderFrameDone(I420Frame frame) {
      frame.yuvPlanes = null;
+     frame.textureObject = null;
      frame.textureId = 0;
      if (frame.nativeFramePointer != 0) {
        releaseNativeFrame(frame.nativeFramePointer);