Android: Output VideoFrames from SurfaceTextureHelper

Bug: webrtc:9412
Change-Id: Iffc8dae2fdfb8d7e5c730b433614b7aa30ceb55b
Reviewed-on: https://webrtc-review.googlesource.com/83943
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23867}
diff --git a/sdk/android/api/org/webrtc/MediaCodecVideoDecoder.java b/sdk/android/api/org/webrtc/MediaCodecVideoDecoder.java
index 169874d..f0ec00e 100644
--- a/sdk/android/api/org/webrtc/MediaCodecVideoDecoder.java
+++ b/sdk/android/api/org/webrtc/MediaCodecVideoDecoder.java
@@ -368,6 +368,7 @@
             SurfaceTextureHelper.create("Decoder SurfaceTextureHelper", eglContext);
         if (surfaceTextureHelper != null) {
           textureListener = new TextureListener(surfaceTextureHelper);
+          textureListener.setSize(width, height);
           surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
         }
       }
@@ -414,6 +415,9 @@
 
     this.width = width;
     this.height = height;
+    if (textureListener != null) {
+      textureListener.setSize(width, height);
+    }
     decodeStartTimeMs.clear();
     dequeuedSurfaceOutputBuffers.clear();
     hasDecodedFirstFrame = false;
@@ -634,12 +638,12 @@
   }
 
   // Poll based texture listener.
-  private class TextureListener implements SurfaceTextureHelper.OnTextureFrameAvailableListener {
+  private class TextureListener implements VideoSink {
     private final SurfaceTextureHelper surfaceTextureHelper;
     // |newFrameLock| is used to synchronize arrival of new frames with wait()/notifyAll().
     private final Object newFrameLock = new Object();
     // |bufferToRender| is non-null when waiting for transition between addBufferToRender() to
-    // onTextureFrameAvailable().
+    // onFrame().
     @Nullable private DecodedOutputBuffer bufferToRender;
     @Nullable private DecodedTextureBuffer renderedBuffer;
 
@@ -662,19 +666,21 @@
       }
     }
 
+    public void setSize(int width, int height) {
+      surfaceTextureHelper.setTextureSize(width, height);
+    }
+
     // Callback from |surfaceTextureHelper|. May be called on an arbitrary thread.
     @Override
-    public void onTextureFrameAvailable(
-        int oesTextureId, float[] transformMatrix, long timestampNs) {
+    public void onFrame(VideoFrame frame) {
       synchronized (newFrameLock) {
         if (renderedBuffer != null) {
-          Logging.e(
-              TAG, "Unexpected onTextureFrameAvailable() called while already holding a texture.");
+          Logging.e(TAG, "Unexpected onFrame() called while already holding a texture.");
           throw new IllegalStateException("Already holding a texture.");
         }
         // |timestampNs| is always zero on some Android versions.
-        final VideoFrame.Buffer buffer = surfaceTextureHelper.createTextureBuffer(
-            width, height, RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
+        final VideoFrame.Buffer buffer = frame.getBuffer();
+        buffer.retain();
         renderedBuffer = new DecodedTextureBuffer(buffer, bufferToRender.presentationTimeStampMs,
             bufferToRender.timeStampMs, bufferToRender.ntpTimeStampMs, bufferToRender.decodeTimeMs,
             SystemClock.elapsedRealtime() - bufferToRender.endDecodeTimeMs);
@@ -703,9 +709,9 @@
     }
 
     public void release() {
-      // SurfaceTextureHelper.stopListening() will block until any onTextureFrameAvailable() in
-      // progress is done. Therefore, the call must be outside any synchronized
-      // statement that is also used in the onTextureFrameAvailable() above to avoid deadlocks.
+      // SurfaceTextureHelper.stopListening() will block until any onFrame() in progress is done.
+      // Therefore, the call must be outside any synchronized statement that is also used in the
+      // onFrame() above to avoid deadlocks.
       surfaceTextureHelper.stopListening();
       synchronized (newFrameLock) {
         if (renderedBuffer != null) {
@@ -763,6 +769,9 @@
           }
           width = newWidth;
           height = newHeight;
+          if (textureListener != null) {
+            textureListener.setSize(width, height);
+          }
 
           if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
             colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
diff --git a/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java b/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java
index d448e29..9a1bf61 100644
--- a/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java
+++ b/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java
@@ -33,8 +33,7 @@
  * frames. At any time, at most one frame is being processed.
  */
 @TargetApi(21)
-public class ScreenCapturerAndroid
-    implements VideoCapturer, SurfaceTextureHelper.OnTextureFrameAvailableListener {
+public class ScreenCapturerAndroid implements VideoCapturer, VideoSink {
   private static final int DISPLAY_FLAGS =
       DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC | DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION;
   // DPI for VirtualDisplay, does not seem to matter for us.
@@ -186,7 +185,7 @@
   }
 
   private void createVirtualDisplay() {
-    surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height);
+    surfaceTextureHelper.setTextureSize(width, height);
     virtualDisplay = mediaProjection.createVirtualDisplay("WebRTC_ScreenCapture", width, height,
         VIRTUAL_DISPLAY_DPI, DISPLAY_FLAGS, new Surface(surfaceTextureHelper.getSurfaceTexture()),
         null /* callback */, null /* callback handler */);
@@ -194,13 +193,9 @@
 
   // This is called on the internal looper thread of {@Code SurfaceTextureHelper}.
   @Override
-  public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) {
+  public void onFrame(VideoFrame frame) {
     numCapturedFrames++;
-    final VideoFrame.Buffer buffer = surfaceTextureHelper.createTextureBuffer(
-        width, height, RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
-    final VideoFrame frame = new VideoFrame(buffer, 0 /* rotation */, timestampNs);
     capturerObserver.onFrameCaptured(frame);
-    frame.release();
   }
 
   @Override
diff --git a/sdk/android/api/org/webrtc/SurfaceTextureHelper.java b/sdk/android/api/org/webrtc/SurfaceTextureHelper.java
index 2d6d13d..146eb22 100644
--- a/sdk/android/api/org/webrtc/SurfaceTextureHelper.java
+++ b/sdk/android/api/org/webrtc/SurfaceTextureHelper.java
@@ -25,14 +25,11 @@
 import org.webrtc.VideoFrame.TextureBuffer;
 
 /**
- * Helper class to create and synchronize access to a SurfaceTexture. The caller will get notified
- * of new frames in onTextureFrameAvailable(), and should call returnTextureFrame() when done with
- * the frame. Only one texture frame can be in flight at once, so returnTextureFrame() must be
- * called in order to receive a new frame. Call stopListening() to stop receiveing new frames. Call
- * dispose to release all resources once the texture frame is returned.
- * Note that there is a C++ counter part of this class that optionally can be used. It is used for
- * wrapping texture frames into webrtc::VideoFrames and also handles calling returnTextureFrame()
- * when the webrtc::VideoFrame is no longer used.
+ * Helper class for using a SurfaceTexture to create WebRTC VideoFrames. In order to create WebRTC
+ * VideoFrames, render onto the SurfaceTexture. The frames will be delivered to the listener. Only
+ * one texture frame can be in flight at once, so the frame must be released in order to receive a
+ * new frame. Call stopListening() to stop receiveing new frames. Call dispose to release all
+ * resources once the texture frame is released.
  */
 public class SurfaceTextureHelper {
   private static final String TAG = "SurfaceTextureHelper";
@@ -40,10 +37,12 @@
    * Callback interface for being notified that a new texture frame is available. The calls will be
    * made on the SurfaceTextureHelper handler thread, with a bound EGLContext. The callee is not
    * allowed to make another EGLContext current on the calling thread.
+   *
+   * @deprecated Use a VideoSink as listener instead.
    */
+  @Deprecated
   public interface OnTextureFrameAvailableListener {
-    abstract void onTextureFrameAvailable(
-        int oesTextureId, float[] transformMatrix, long timestampNs);
+    void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs);
   }
 
   /**
@@ -82,14 +81,20 @@
   private final YuvConverter yuvConverter = new YuvConverter();
 
   // These variables are only accessed from the |handler| thread.
-  @Nullable private OnTextureFrameAvailableListener listener;
+  // The type of |listener| is either a VideoSink or the deprecated OnTextureFrameAvailableListener.
+  @Nullable private Object listener;
   // The possible states of this class.
   private boolean hasPendingTexture = false;
   private volatile boolean isTextureInUse = false;
   private boolean isQuitting = false;
+  private int frameRotation;
+  private int textureWidth;
+  private int textureHeight;
   // |pendingListener| is set in setListener() and the runnable is posted to the handler thread.
   // setListener() is not allowed to be called again before stopListening(), so this is thread safe.
-  @Nullable private OnTextureFrameAvailableListener pendingListener;
+  // The type of |pendingListener| is either a VideoSink or the deprecated
+  // OnTextureFrameAvailableListener.
+  @Nullable private Object pendingListener;
   final Runnable setListenerRunnable = new Runnable() {
     @Override
     public void run() {
@@ -148,8 +153,23 @@
   /**
    * Start to stream textures to the given |listener|. If you need to change listener, you need to
    * call stopListening() first.
+   *
+   * @deprecated Use a VideoSink as listener instead.
    */
+  @Deprecated
   public void startListening(final OnTextureFrameAvailableListener listener) {
+    startListeningInternal(listener);
+  }
+
+  /**
+   * Start to stream textures to the given |listener|. If you need to change listener, you need to
+   * call stopListening() first.
+   */
+  public void startListening(final VideoSink listener) {
+    startListeningInternal(listener);
+  }
+
+  private void startListeningInternal(Object listener) {
     if (this.listener != null || this.pendingListener != null) {
       throw new IllegalStateException("SurfaceTextureHelper listener has already been set.");
     }
@@ -164,16 +184,37 @@
   public void stopListening() {
     Logging.d(TAG, "stopListening()");
     handler.removeCallbacks(setListenerRunnable);
-    ThreadUtils.invokeAtFrontUninterruptibly(handler, new Runnable() {
-      @Override
-      public void run() {
-        listener = null;
-        pendingListener = null;
-      }
+    ThreadUtils.invokeAtFrontUninterruptibly(handler, () -> {
+      listener = null;
+      pendingListener = null;
     });
   }
 
   /**
+   * Use this function to set the texture size. Note, do not call setDefaultBufferSize() yourself
+   * since this class needs to be aware of the texture size.
+   */
+  public void setTextureSize(int textureWidth, int textureHeight) {
+    if (textureWidth <= 0) {
+      throw new IllegalArgumentException("Texture width must be positive, but was " + textureWidth);
+    }
+    if (textureHeight <= 0) {
+      throw new IllegalArgumentException(
+          "Texture height must be positive, but was " + textureHeight);
+    }
+    surfaceTexture.setDefaultBufferSize(textureWidth, textureHeight);
+    handler.post(() -> {
+      this.textureWidth = textureWidth;
+      this.textureHeight = textureHeight;
+    });
+  }
+
+  /** Set the rotation of the delivered frames. */
+  public void setFrameRotation(int rotation) {
+    handler.post(() -> this.frameRotation = rotation);
+  }
+
+  /**
    * Retrieve the underlying SurfaceTexture. The SurfaceTexture should be passed in to a video
    * producer such as a camera or decoder.
    */
@@ -193,17 +234,17 @@
    * Call this function to signal that you are done with the frame received in
    * onTextureFrameAvailable(). Only one texture frame can be in flight at once, so you must call
    * this function in order to receive a new frame.
+   *
+   * @deprecated Use a VideoSink as listener instead.
    */
+  @Deprecated
   public void returnTextureFrame() {
-    handler.post(new Runnable() {
-      @Override
-      public void run() {
-        isTextureInUse = false;
-        if (isQuitting) {
-          release();
-        } else {
-          tryDeliverTextureFrame();
-        }
+    handler.post(() -> {
+      isTextureInUse = false;
+      if (isQuitting) {
+        release();
+      } else {
+        tryDeliverTextureFrame();
       }
     });
   }
@@ -219,23 +260,22 @@
    */
   public void dispose() {
     Logging.d(TAG, "dispose()");
-    ThreadUtils.invokeAtFrontUninterruptibly(handler, new Runnable() {
-      @Override
-      public void run() {
-        isQuitting = true;
-        if (!isTextureInUse) {
-          release();
-        }
+    ThreadUtils.invokeAtFrontUninterruptibly(handler, () -> {
+      isQuitting = true;
+      if (!isTextureInUse) {
+        release();
       }
     });
   }
 
   /**
    * Posts to the correct thread to convert |textureBuffer| to I420.
+   *
+   * @deprecated Use toI420() instead.
    */
+  @Deprecated
   public VideoFrame.I420Buffer textureToYuv(final TextureBuffer textureBuffer) {
-    return ThreadUtils.invokeAtFrontUninterruptibly(
-        handler, () -> yuvConverter.convert(textureBuffer));
+    return textureBuffer.toI420();
   }
 
   private void updateTexImage() {
@@ -262,7 +302,19 @@
     final float[] transformMatrix = new float[16];
     surfaceTexture.getTransformMatrix(transformMatrix);
     final long timestampNs = surfaceTexture.getTimestamp();
-    listener.onTextureFrameAvailable(oesTextureId, transformMatrix, timestampNs);
+    if (listener instanceof OnTextureFrameAvailableListener) {
+      ((OnTextureFrameAvailableListener) listener)
+          .onTextureFrameAvailable(oesTextureId, transformMatrix, timestampNs);
+    } else if (listener instanceof VideoSink) {
+      if (textureWidth == 0 || textureHeight == 0) {
+        throw new RuntimeException("Texture size has not been set.");
+      }
+      final VideoFrame.Buffer buffer = createTextureBuffer(textureWidth, textureHeight,
+          RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
+      final VideoFrame frame = new VideoFrame(buffer, frameRotation, timestampNs);
+      ((VideoSink) listener).onFrame(frame);
+      frame.release();
+    }
   }
 
   private void release() {
@@ -286,7 +338,10 @@
    *
    * The returned TextureBuffer holds a reference to the SurfaceTextureHelper that created it. The
    * buffer calls returnTextureFrame() when it is released.
+   *
+   * @deprecated Use a VideoSink as listener instead.
    */
+  @Deprecated
   public TextureBufferImpl createTextureBuffer(int width, int height, Matrix transformMatrix) {
     return new TextureBufferImpl(width, height, TextureBuffer.Type.OES, oesTextureId,
         transformMatrix, handler, yuvConverter, this ::returnTextureFrame);