Add option to inject YuvConverter to SurfaceTextureHelper.

Add option to inject VideoFrameDrawer to YuvConverter and EglRenderer.

Bug: none
Change-Id: I0aab0026c30b41d72f70fb00b251aed5e4a4a774
Reviewed-on: https://webrtc-review.googlesource.com/c/123443
Commit-Queue: Åsa Persson <asapersson@webrtc.org>
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#26848}
diff --git a/sdk/android/api/org/webrtc/EglRenderer.java b/sdk/android/api/org/webrtc/EglRenderer.java
index d87c517..2ab2779 100644
--- a/sdk/android/api/org/webrtc/EglRenderer.java
+++ b/sdk/android/api/org/webrtc/EglRenderer.java
@@ -123,7 +123,7 @@
   // EGL and GL resources for drawing YUV/OES textures. After initilization, these are only accessed
   // from the render thread.
   @Nullable private EglBase eglBase;
-  private final VideoFrameDrawer frameDrawer = new VideoFrameDrawer();
+  private final VideoFrameDrawer frameDrawer;
   @Nullable private RendererCommon.GlDrawer drawer;
   private boolean usePresentationTimeStamp;
   private final Matrix drawMatrix = new Matrix();
@@ -181,7 +181,12 @@
    * logging. In order to render something, you must first call init() and createEglSurface.
    */
   public EglRenderer(String name) {
+    this(name, new VideoFrameDrawer());
+  }
+
+  public EglRenderer(String name, VideoFrameDrawer videoFrameDrawer) {
     this.name = name;
+    this.frameDrawer = videoFrameDrawer;
   }
 
   /**
diff --git a/sdk/android/api/org/webrtc/SurfaceTextureHelper.java b/sdk/android/api/org/webrtc/SurfaceTextureHelper.java
index f1abb28..f11b70b 100644
--- a/sdk/android/api/org/webrtc/SurfaceTextureHelper.java
+++ b/sdk/android/api/org/webrtc/SurfaceTextureHelper.java
@@ -42,8 +42,9 @@
    * PeerConnectionFactory.createVideoSource(). This makes the timestamps more accurate and
    * closer to actual creation time.
    */
-  public static SurfaceTextureHelper create(
-      final String threadName, final EglBase.Context sharedContext, boolean alignTimestamps) {
+  public static SurfaceTextureHelper create(final String threadName,
+      final EglBase.Context sharedContext, boolean alignTimestamps,
+      final YuvConverter yuvConverter) {
     final HandlerThread thread = new HandlerThread(threadName);
     thread.start();
     final Handler handler = new Handler(thread.getLooper());
@@ -57,7 +58,7 @@
       @Override
       public SurfaceTextureHelper call() {
         try {
-          return new SurfaceTextureHelper(sharedContext, handler, alignTimestamps);
+          return new SurfaceTextureHelper(sharedContext, handler, alignTimestamps, yuvConverter);
         } catch (RuntimeException e) {
           Logging.e(TAG, threadName + " create failure", e);
           return null;
@@ -67,20 +68,30 @@
   }
 
   /**
-   * Same as above with alignTimestamps set to false.
+   * Same as above with alignTimestamps set to false and yuvConverter set to new YuvConverter.
    *
-   * @see #create(String, EglBase.Context, boolean)
+   * @see #create(String, EglBase.Context, boolean, YuvConverter)
    */
   public static SurfaceTextureHelper create(
       final String threadName, final EglBase.Context sharedContext) {
-    return create(threadName, sharedContext, /* alignTimestamps= */ false);
+    return create(threadName, sharedContext, /* alignTimestamps= */ false, new YuvConverter());
+  }
+
+  /**
+   * Same as above with yuvConverter set to new YuvConverter.
+   *
+   * @see #create(String, EglBase.Context, boolean, YuvConverter)
+   */
+  public static SurfaceTextureHelper create(
+      final String threadName, final EglBase.Context sharedContext, boolean alignTimestamps) {
+    return create(threadName, sharedContext, alignTimestamps, new YuvConverter());
   }
 
   private final Handler handler;
   private final EglBase eglBase;
   private final SurfaceTexture surfaceTexture;
   private final int oesTextureId;
-  private final YuvConverter yuvConverter = new YuvConverter();
+  private final YuvConverter yuvConverter;
   @Nullable private final TimestampAligner timestampAligner;
 
   // These variables are only accessed from the |handler| thread.
@@ -110,13 +121,14 @@
     }
   };
 
-  private SurfaceTextureHelper(
-      EglBase.Context sharedContext, Handler handler, boolean alignTimestamps) {
+  private SurfaceTextureHelper(EglBase.Context sharedContext, Handler handler,
+      boolean alignTimestamps, YuvConverter yuvConverter) {
     if (handler.getLooper().getThread() != Thread.currentThread()) {
       throw new IllegalStateException("SurfaceTextureHelper must be created on the handler thread");
     }
     this.handler = handler;
     this.timestampAligner = alignTimestamps ? new TimestampAligner() : null;
+    this.yuvConverter = yuvConverter;
 
     eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_BUFFER);
     try {
diff --git a/sdk/android/api/org/webrtc/TextureBufferImpl.java b/sdk/android/api/org/webrtc/TextureBufferImpl.java
index 988519a..a24f284 100644
--- a/sdk/android/api/org/webrtc/TextureBufferImpl.java
+++ b/sdk/android/api/org/webrtc/TextureBufferImpl.java
@@ -135,6 +135,14 @@
     return unscaledHeight;
   }
 
+  public Handler getToI420Handler() {
+    return toI420Handler;
+  }
+
+  public YuvConverter getYuvConverter() {
+    return yuvConverter;
+  }
+
   /**
    * Create a new TextureBufferImpl with an applied transform matrix and a new size. The
    * existing buffer is unchanged. The given transform matrix is applied first when texture
diff --git a/sdk/android/api/org/webrtc/VideoFrameDrawer.java b/sdk/android/api/org/webrtc/VideoFrameDrawer.java
index 0dd22ad..076aabd 100644
--- a/sdk/android/api/org/webrtc/VideoFrameDrawer.java
+++ b/sdk/android/api/org/webrtc/VideoFrameDrawer.java
@@ -28,7 +28,7 @@
    * used multiplied together with the transformation matrix of the frame. (M = renderMatrix *
    * transformationMatrix)
    */
-  static void drawTexture(RendererCommon.GlDrawer drawer, VideoFrame.TextureBuffer buffer,
+  public static void drawTexture(RendererCommon.GlDrawer drawer, VideoFrame.TextureBuffer buffer,
       Matrix renderMatrix, int frameWidth, int frameHeight, int viewportX, int viewportY,
       int viewportWidth, int viewportHeight) {
     Matrix finalMatrix = new Matrix(buffer.getTransformMatrix());
@@ -224,6 +224,12 @@
     }
   }
 
+  public VideoFrame.Buffer prepareBufferForViewportSize(
+      VideoFrame.Buffer buffer, int width, int height) {
+    buffer.retain();
+    return buffer;
+  }
+
   public void release() {
     yuvUploader.release();
     lastI420Frame = null;
diff --git a/sdk/android/api/org/webrtc/YuvConverter.java b/sdk/android/api/org/webrtc/YuvConverter.java
index 7d55f07..0e2d505 100644
--- a/sdk/android/api/org/webrtc/YuvConverter.java
+++ b/sdk/android/api/org/webrtc/YuvConverter.java
@@ -107,17 +107,27 @@
       new GlTextureFrameBuffer(GLES20.GL_RGBA);
   private final ShaderCallbacks shaderCallbacks = new ShaderCallbacks();
   private final GlGenericDrawer drawer = new GlGenericDrawer(FRAGMENT_SHADER, shaderCallbacks);
+  private final VideoFrameDrawer videoFrameDrawer;
 
   /**
    * This class should be constructed on a thread that has an active EGL context.
    */
   public YuvConverter() {
+    this(new VideoFrameDrawer());
+  }
+
+  public YuvConverter(VideoFrameDrawer videoFrameDrawer) {
+    this.videoFrameDrawer = videoFrameDrawer;
     threadChecker.detachThread();
   }
 
   /** Converts the texture buffer to I420. */
   public I420Buffer convert(TextureBuffer inputTextureBuffer) {
     threadChecker.checkIsOnValidThread();
+
+    TextureBuffer preparedBuffer = (TextureBuffer) videoFrameDrawer.prepareBufferForViewportSize(
+        inputTextureBuffer, inputTextureBuffer.getWidth(), inputTextureBuffer.getHeight());
+
     // We draw into a buffer laid out like
     //
     //    +---------+
@@ -146,8 +156,8 @@
     // Since the V data needs to start on a boundary of such a
     // larger pixel, it is not sufficient that |stride| is even, it
     // has to be a multiple of 8 pixels.
-    final int frameWidth = inputTextureBuffer.getWidth();
-    final int frameHeight = inputTextureBuffer.getHeight();
+    final int frameWidth = preparedBuffer.getWidth();
+    final int frameHeight = preparedBuffer.getHeight();
     final int stride = ((frameWidth + 7) / 8) * 8;
     final int uvHeight = (frameHeight + 1) / 2;
     // Total height of the combined memory layout.
@@ -171,19 +181,19 @@
 
     // Draw Y.
     shaderCallbacks.setPlaneY();
-    VideoFrameDrawer.drawTexture(drawer, inputTextureBuffer, renderMatrix, frameWidth, frameHeight,
+    VideoFrameDrawer.drawTexture(drawer, preparedBuffer, renderMatrix, frameWidth, frameHeight,
         /* viewportX= */ 0, /* viewportY= */ 0, viewportWidth,
         /* viewportHeight= */ frameHeight);
 
     // Draw U.
     shaderCallbacks.setPlaneU();
-    VideoFrameDrawer.drawTexture(drawer, inputTextureBuffer, renderMatrix, frameWidth, frameHeight,
+    VideoFrameDrawer.drawTexture(drawer, preparedBuffer, renderMatrix, frameWidth, frameHeight,
         /* viewportX= */ 0, /* viewportY= */ frameHeight, viewportWidth / 2,
         /* viewportHeight= */ uvHeight);
 
     // Draw V.
     shaderCallbacks.setPlaneV();
-    VideoFrameDrawer.drawTexture(drawer, inputTextureBuffer, renderMatrix, frameWidth, frameHeight,
+    VideoFrameDrawer.drawTexture(drawer, preparedBuffer, renderMatrix, frameWidth, frameHeight,
         /* viewportX= */ viewportWidth / 2, /* viewportY= */ frameHeight, viewportWidth / 2,
         /* viewportHeight= */ uvHeight);
 
@@ -215,6 +225,8 @@
     i420ByteBuffer.limit(vPos + uvSize);
     final ByteBuffer dataV = i420ByteBuffer.slice();
 
+    preparedBuffer.release();
+
     return JavaI420Buffer.wrap(frameWidth, frameHeight, dataY, stride, dataU, stride, dataV, stride,
         () -> { JniCommon.nativeFreeByteBuffer(i420ByteBuffer); });
   }
@@ -223,6 +235,7 @@
     threadChecker.checkIsOnValidThread();
     drawer.release();
     i420TextureFrameBuffer.release();
+    videoFrameDrawer.release();
     // Allow this class to be reused.
     threadChecker.detachThread();
   }