blob: 1e8a04d3f3ce3a52da429cb12fb1a417bf12b862 [file] [log] [blame]
magjed7cede372017-09-11 06:12:07 -07001/*
2 * Copyright 2017 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11package org.webrtc;
12
13import android.graphics.Matrix;
14import android.graphics.Point;
15import android.opengl.GLES20;
16import java.nio.ByteBuffer;
Sami Kalliomäkiee98be72018-05-08 15:22:08 +020017import javax.annotation.Nullable;
magjed7cede372017-09-11 06:12:07 -070018
19/**
20 * Helper class to draw VideoFrames. Calls either drawer.drawOes, drawer.drawRgb, or
21 * drawer.drawYuv depending on the type of the buffer. The frame will be rendered with rotation
22 * taken into account. You can supply an additional render matrix for custom transformations.
23 */
24public class VideoFrameDrawer {
25 /**
26 * Draws a VideoFrame.TextureBuffer. Calls either drawer.drawOes or drawer.drawRgb
27 * depending on the type of the buffer. You can supply an additional render matrix. This is
28 * used multiplied together with the transformation matrix of the frame. (M = renderMatrix *
29 * transformationMatrix)
30 */
31 static void drawTexture(RendererCommon.GlDrawer drawer, VideoFrame.TextureBuffer buffer,
32 Matrix renderMatrix, int frameWidth, int frameHeight, int viewportX, int viewportY,
33 int viewportWidth, int viewportHeight) {
34 Matrix finalMatrix = new Matrix(buffer.getTransformMatrix());
35 finalMatrix.preConcat(renderMatrix);
36 float[] finalGlMatrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(finalMatrix);
37 switch (buffer.getType()) {
38 case OES:
39 drawer.drawOes(buffer.getTextureId(), finalGlMatrix, frameWidth, frameHeight, viewportX,
40 viewportY, viewportWidth, viewportHeight);
41 break;
42 case RGB:
43 drawer.drawRgb(buffer.getTextureId(), finalGlMatrix, frameWidth, frameHeight, viewportX,
44 viewportY, viewportWidth, viewportHeight);
45 break;
46 default:
47 throw new RuntimeException("Unknown texture type.");
48 }
49 }
50
51 /**
52 * Helper class for uploading YUV bytebuffer frames to textures that handles stride > width. This
53 * class keeps an internal ByteBuffer to avoid unnecessary allocations for intermediate copies.
54 */
55 private static class YuvUploader {
56 // Intermediate copy buffer for uploading yuv frames that are not packed, i.e. stride > width.
57 // TODO(magjed): Investigate when GL_UNPACK_ROW_LENGTH is available, or make a custom shader
58 // that handles stride and compare performance with intermediate copy.
Sami Kalliomäkie7592d82018-03-22 13:32:44 +010059 @Nullable private ByteBuffer copyBuffer;
60 @Nullable private int[] yuvTextures;
magjed7cede372017-09-11 06:12:07 -070061
62 /**
63 * Upload |planes| into OpenGL textures, taking stride into consideration.
64 *
65 * @return Array of three texture indices corresponding to Y-, U-, and V-plane respectively.
66 */
Sami Kalliomäkie7592d82018-03-22 13:32:44 +010067 @Nullable
magjed7cede372017-09-11 06:12:07 -070068 public int[] uploadYuvData(int width, int height, int[] strides, ByteBuffer[] planes) {
69 final int[] planeWidths = new int[] {width, width / 2, width / 2};
70 final int[] planeHeights = new int[] {height, height / 2, height / 2};
71 // Make a first pass to see if we need a temporary copy buffer.
72 int copyCapacityNeeded = 0;
73 for (int i = 0; i < 3; ++i) {
74 if (strides[i] > planeWidths[i]) {
75 copyCapacityNeeded = Math.max(copyCapacityNeeded, planeWidths[i] * planeHeights[i]);
76 }
77 }
78 // Allocate copy buffer if necessary.
79 if (copyCapacityNeeded > 0
80 && (copyBuffer == null || copyBuffer.capacity() < copyCapacityNeeded)) {
81 copyBuffer = ByteBuffer.allocateDirect(copyCapacityNeeded);
82 }
83 // Make sure YUV textures are allocated.
84 if (yuvTextures == null) {
85 yuvTextures = new int[3];
86 for (int i = 0; i < 3; i++) {
87 yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
88 }
89 }
90 // Upload each plane.
91 for (int i = 0; i < 3; ++i) {
92 GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
93 GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
94 // GLES only accepts packed data, i.e. stride == planeWidth.
95 final ByteBuffer packedByteBuffer;
96 if (strides[i] == planeWidths[i]) {
97 // Input is packed already.
98 packedByteBuffer = planes[i];
99 } else {
Sami Kalliomäkiee98be72018-05-08 15:22:08 +0200100 YuvHelper.copyPlane(
101 planes[i], strides[i], copyBuffer, planeWidths[i], planeWidths[i], planeHeights[i]);
magjed7cede372017-09-11 06:12:07 -0700102 packedByteBuffer = copyBuffer;
103 }
104 GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, planeWidths[i],
105 planeHeights[i], 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, packedByteBuffer);
106 }
107 return yuvTextures;
108 }
109
Sami Kalliomäkie7592d82018-03-22 13:32:44 +0100110 @Nullable
magjed7cede372017-09-11 06:12:07 -0700111 public int[] uploadFromBuffer(VideoFrame.I420Buffer buffer) {
112 int[] strides = {buffer.getStrideY(), buffer.getStrideU(), buffer.getStrideV()};
113 ByteBuffer[] planes = {buffer.getDataY(), buffer.getDataU(), buffer.getDataV()};
114 return uploadYuvData(buffer.getWidth(), buffer.getHeight(), strides, planes);
115 }
116
Sami Kalliomäkie7592d82018-03-22 13:32:44 +0100117 @Nullable
magjed7cede372017-09-11 06:12:07 -0700118 public int[] getYuvTextures() {
119 return yuvTextures;
120 }
121
122 /**
123 * Releases cached resources. Uploader can still be used and the resources will be reallocated
124 * on first use.
125 */
126 public void release() {
127 copyBuffer = null;
128 if (yuvTextures != null) {
129 GLES20.glDeleteTextures(3, yuvTextures, 0);
130 yuvTextures = null;
131 }
132 }
133 }
134
135 private static int distance(float x0, float y0, float x1, float y1) {
136 return (int) Math.round(Math.hypot(x1 - x0, y1 - y0));
137 }
138
139 // These points are used to calculate the size of the part of the frame we are rendering.
140 final static float[] srcPoints =
141 new float[] {0f /* x0 */, 0f /* y0 */, 1f /* x1 */, 0f /* y1 */, 0f /* x2 */, 1f /* y2 */};
142 private final float[] dstPoints = new float[6];
143 private final Point renderSize = new Point();
144 private int renderWidth;
145 private int renderHeight;
146
147 // Calculate the frame size after |renderMatrix| is applied. Stores the output in member variables
148 // |renderWidth| and |renderHeight| to avoid allocations since this function is called for every
149 // frame.
150 private void calculateTransformedRenderSize(
Sami Kalliomäkie7592d82018-03-22 13:32:44 +0100151 int frameWidth, int frameHeight, @Nullable Matrix renderMatrix) {
magjed7cede372017-09-11 06:12:07 -0700152 if (renderMatrix == null) {
153 renderWidth = frameWidth;
154 renderHeight = frameHeight;
155 return;
156 }
157 // Transform the texture coordinates (in the range [0, 1]) according to |renderMatrix|.
158 renderMatrix.mapPoints(dstPoints, srcPoints);
159
160 // Multiply with the width and height to get the positions in terms of pixels.
161 for (int i = 0; i < 3; ++i) {
162 dstPoints[i * 2 + 0] *= frameWidth;
163 dstPoints[i * 2 + 1] *= frameHeight;
164 }
165
166 // Get the length of the sides of the transformed rectangle in terms of pixels.
167 renderWidth = distance(dstPoints[0], dstPoints[1], dstPoints[2], dstPoints[3]);
168 renderHeight = distance(dstPoints[0], dstPoints[1], dstPoints[4], dstPoints[5]);
169 }
170
171 private final YuvUploader yuvUploader = new YuvUploader();
172 // This variable will only be used for checking reference equality and is used for caching I420
173 // textures.
Sami Kalliomäkie7592d82018-03-22 13:32:44 +0100174 @Nullable private VideoFrame lastI420Frame;
magjed7cede372017-09-11 06:12:07 -0700175 private final Matrix renderMatrix = new Matrix();
176
177 public void drawFrame(VideoFrame frame, RendererCommon.GlDrawer drawer) {
178 drawFrame(frame, drawer, null /* additionalRenderMatrix */);
179 }
180
181 public void drawFrame(
182 VideoFrame frame, RendererCommon.GlDrawer drawer, Matrix additionalRenderMatrix) {
183 drawFrame(frame, drawer, additionalRenderMatrix, 0 /* viewportX */, 0 /* viewportY */,
184 frame.getRotatedWidth(), frame.getRotatedHeight());
185 }
186
187 public void drawFrame(VideoFrame frame, RendererCommon.GlDrawer drawer,
Sami Kalliomäkie7592d82018-03-22 13:32:44 +0100188 @Nullable Matrix additionalRenderMatrix, int viewportX, int viewportY, int viewportWidth,
magjed7cede372017-09-11 06:12:07 -0700189 int viewportHeight) {
190 final int width = frame.getRotatedWidth();
191 final int height = frame.getRotatedHeight();
192
193 calculateTransformedRenderSize(width, height, additionalRenderMatrix);
194
195 final boolean isTextureFrame = frame.getBuffer() instanceof VideoFrame.TextureBuffer;
196 renderMatrix.reset();
197 renderMatrix.preTranslate(0.5f, 0.5f);
198 if (!isTextureFrame) {
199 renderMatrix.preScale(1f, -1f); // I420-frames are upside down
200 }
201 renderMatrix.preRotate(frame.getRotation());
202 renderMatrix.preTranslate(-0.5f, -0.5f);
203 if (additionalRenderMatrix != null) {
204 renderMatrix.preConcat(additionalRenderMatrix);
205 }
206
207 if (isTextureFrame) {
208 lastI420Frame = null;
209 drawTexture(drawer, (VideoFrame.TextureBuffer) frame.getBuffer(), renderMatrix, renderWidth,
210 renderHeight, viewportX, viewportY, viewportWidth, viewportHeight);
211 } else {
212 // Only upload the I420 data to textures once per frame, if we are called multiple times
213 // with the same frame.
214 if (frame != lastI420Frame) {
215 lastI420Frame = frame;
216 final VideoFrame.I420Buffer i420Buffer = frame.getBuffer().toI420();
217 yuvUploader.uploadFromBuffer(i420Buffer);
218 i420Buffer.release();
219 }
220
221 drawer.drawYuv(yuvUploader.getYuvTextures(),
222 RendererCommon.convertMatrixFromAndroidGraphicsMatrix(renderMatrix), renderWidth,
223 renderHeight, viewportX, viewportY, viewportWidth, viewportHeight);
224 }
225 }
226
227 public void release() {
228 yuvUploader.release();
229 lastI420Frame = null;
230 }
magjed7cede372017-09-11 06:12:07 -0700231}