Android: Output VideoFrames from SurfaceTextureHelper
Bug: webrtc:9412
Change-Id: Iffc8dae2fdfb8d7e5c730b433614b7aa30ceb55b
Reviewed-on: https://webrtc-review.googlesource.com/83943
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23867}
diff --git a/sdk/android/api/org/webrtc/SurfaceTextureHelper.java b/sdk/android/api/org/webrtc/SurfaceTextureHelper.java
index 2d6d13d..146eb22 100644
--- a/sdk/android/api/org/webrtc/SurfaceTextureHelper.java
+++ b/sdk/android/api/org/webrtc/SurfaceTextureHelper.java
@@ -25,14 +25,11 @@
import org.webrtc.VideoFrame.TextureBuffer;
/**
- * Helper class to create and synchronize access to a SurfaceTexture. The caller will get notified
- * of new frames in onTextureFrameAvailable(), and should call returnTextureFrame() when done with
- * the frame. Only one texture frame can be in flight at once, so returnTextureFrame() must be
- * called in order to receive a new frame. Call stopListening() to stop receiveing new frames. Call
- * dispose to release all resources once the texture frame is returned.
- * Note that there is a C++ counter part of this class that optionally can be used. It is used for
- * wrapping texture frames into webrtc::VideoFrames and also handles calling returnTextureFrame()
- * when the webrtc::VideoFrame is no longer used.
+ * Helper class for using a SurfaceTexture to create WebRTC VideoFrames. In order to create WebRTC
+ * VideoFrames, render onto the SurfaceTexture. The frames will be delivered to the listener. Only
+ * one texture frame can be in flight at once, so the frame must be released in order to receive a
+ * new frame. Call stopListening() to stop receiveing new frames. Call dispose to release all
+ * resources once the texture frame is released.
*/
public class SurfaceTextureHelper {
private static final String TAG = "SurfaceTextureHelper";
@@ -40,10 +37,12 @@
* Callback interface for being notified that a new texture frame is available. The calls will be
* made on the SurfaceTextureHelper handler thread, with a bound EGLContext. The callee is not
* allowed to make another EGLContext current on the calling thread.
+ *
+ * @deprecated Use a VideoSink as listener instead.
*/
+ @Deprecated
public interface OnTextureFrameAvailableListener {
- abstract void onTextureFrameAvailable(
- int oesTextureId, float[] transformMatrix, long timestampNs);
+ void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs);
}
/**
@@ -82,14 +81,20 @@
private final YuvConverter yuvConverter = new YuvConverter();
// These variables are only accessed from the |handler| thread.
- @Nullable private OnTextureFrameAvailableListener listener;
+ // The type of |listener| is either a VideoSink or the deprecated OnTextureFrameAvailableListener.
+ @Nullable private Object listener;
// The possible states of this class.
private boolean hasPendingTexture = false;
private volatile boolean isTextureInUse = false;
private boolean isQuitting = false;
+ private int frameRotation;
+ private int textureWidth;
+ private int textureHeight;
// |pendingListener| is set in setListener() and the runnable is posted to the handler thread.
// setListener() is not allowed to be called again before stopListening(), so this is thread safe.
- @Nullable private OnTextureFrameAvailableListener pendingListener;
+ // The type of |pendingListener| is either a VideoSink or the deprecated
+ // OnTextureFrameAvailableListener.
+ @Nullable private Object pendingListener;
final Runnable setListenerRunnable = new Runnable() {
@Override
public void run() {
@@ -148,8 +153,23 @@
/**
* Start to stream textures to the given |listener|. If you need to change listener, you need to
* call stopListening() first.
+ *
+ * @deprecated Use a VideoSink as listener instead.
*/
+ @Deprecated
public void startListening(final OnTextureFrameAvailableListener listener) {
+ startListeningInternal(listener);
+ }
+
+ /**
+ * Start to stream textures to the given |listener|. If you need to change listener, you need to
+ * call stopListening() first.
+ */
+ public void startListening(final VideoSink listener) {
+ startListeningInternal(listener);
+ }
+
+ private void startListeningInternal(Object listener) {
if (this.listener != null || this.pendingListener != null) {
throw new IllegalStateException("SurfaceTextureHelper listener has already been set.");
}
@@ -164,16 +184,37 @@
public void stopListening() {
Logging.d(TAG, "stopListening()");
handler.removeCallbacks(setListenerRunnable);
- ThreadUtils.invokeAtFrontUninterruptibly(handler, new Runnable() {
- @Override
- public void run() {
- listener = null;
- pendingListener = null;
- }
+ ThreadUtils.invokeAtFrontUninterruptibly(handler, () -> {
+ listener = null;
+ pendingListener = null;
});
}
/**
+ * Use this function to set the texture size. Note, do not call setDefaultBufferSize() yourself
+ * since this class needs to be aware of the texture size.
+ */
+ public void setTextureSize(int textureWidth, int textureHeight) {
+ if (textureWidth <= 0) {
+ throw new IllegalArgumentException("Texture width must be positive, but was " + textureWidth);
+ }
+ if (textureHeight <= 0) {
+ throw new IllegalArgumentException(
+ "Texture height must be positive, but was " + textureHeight);
+ }
+ surfaceTexture.setDefaultBufferSize(textureWidth, textureHeight);
+ handler.post(() -> {
+ this.textureWidth = textureWidth;
+ this.textureHeight = textureHeight;
+ });
+ }
+
+ /** Set the rotation of the delivered frames. */
+ public void setFrameRotation(int rotation) {
+ handler.post(() -> this.frameRotation = rotation);
+ }
+
+ /**
* Retrieve the underlying SurfaceTexture. The SurfaceTexture should be passed in to a video
* producer such as a camera or decoder.
*/
@@ -193,17 +234,17 @@
* Call this function to signal that you are done with the frame received in
* onTextureFrameAvailable(). Only one texture frame can be in flight at once, so you must call
* this function in order to receive a new frame.
+ *
+ * @deprecated Use a VideoSink as listener instead.
*/
+ @Deprecated
public void returnTextureFrame() {
- handler.post(new Runnable() {
- @Override
- public void run() {
- isTextureInUse = false;
- if (isQuitting) {
- release();
- } else {
- tryDeliverTextureFrame();
- }
+ handler.post(() -> {
+ isTextureInUse = false;
+ if (isQuitting) {
+ release();
+ } else {
+ tryDeliverTextureFrame();
}
});
}
@@ -219,23 +260,22 @@
*/
public void dispose() {
Logging.d(TAG, "dispose()");
- ThreadUtils.invokeAtFrontUninterruptibly(handler, new Runnable() {
- @Override
- public void run() {
- isQuitting = true;
- if (!isTextureInUse) {
- release();
- }
+ ThreadUtils.invokeAtFrontUninterruptibly(handler, () -> {
+ isQuitting = true;
+ if (!isTextureInUse) {
+ release();
}
});
}
/**
* Posts to the correct thread to convert |textureBuffer| to I420.
+ *
+ * @deprecated Use toI420() instead.
*/
+ @Deprecated
public VideoFrame.I420Buffer textureToYuv(final TextureBuffer textureBuffer) {
- return ThreadUtils.invokeAtFrontUninterruptibly(
- handler, () -> yuvConverter.convert(textureBuffer));
+ return textureBuffer.toI420();
}
private void updateTexImage() {
@@ -262,7 +302,19 @@
final float[] transformMatrix = new float[16];
surfaceTexture.getTransformMatrix(transformMatrix);
final long timestampNs = surfaceTexture.getTimestamp();
- listener.onTextureFrameAvailable(oesTextureId, transformMatrix, timestampNs);
+ if (listener instanceof OnTextureFrameAvailableListener) {
+ ((OnTextureFrameAvailableListener) listener)
+ .onTextureFrameAvailable(oesTextureId, transformMatrix, timestampNs);
+ } else if (listener instanceof VideoSink) {
+ if (textureWidth == 0 || textureHeight == 0) {
+ throw new RuntimeException("Texture size has not been set.");
+ }
+ final VideoFrame.Buffer buffer = createTextureBuffer(textureWidth, textureHeight,
+ RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
+ final VideoFrame frame = new VideoFrame(buffer, frameRotation, timestampNs);
+ ((VideoSink) listener).onFrame(frame);
+ frame.release();
+ }
}
private void release() {
@@ -286,7 +338,10 @@
*
* The returned TextureBuffer holds a reference to the SurfaceTextureHelper that created it. The
* buffer calls returnTextureFrame() when it is released.
+ *
+ * @deprecated Use a VideoSink as listener instead.
*/
+ @Deprecated
public TextureBufferImpl createTextureBuffer(int width, int height, Matrix transformMatrix) {
return new TextureBufferImpl(width, height, TextureBuffer.Type.OES, oesTextureId,
transformMatrix, handler, yuvConverter, this ::returnTextureFrame);