magjed | 7cede37 | 2017-09-11 06:12:07 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2017 The WebRTC project authors. All Rights Reserved. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license |
| 5 | * that can be found in the LICENSE file in the root of the source |
| 6 | * tree. An additional intellectual property rights grant can be found |
| 7 | * in the file PATENTS. All contributing project authors may |
| 8 | * be found in the AUTHORS file in the root of the source tree. |
| 9 | */ |
| 10 | |
| 11 | package org.webrtc; |
| 12 | |
| 13 | import android.graphics.Matrix; |
| 14 | import android.graphics.Point; |
| 15 | import android.opengl.GLES20; |
| 16 | import java.nio.ByteBuffer; |
| 17 | |
| 18 | /** |
| 19 | * Helper class to draw VideoFrames. Calls either drawer.drawOes, drawer.drawRgb, or |
| 20 | * drawer.drawYuv depending on the type of the buffer. The frame will be rendered with rotation |
| 21 | * taken into account. You can supply an additional render matrix for custom transformations. |
| 22 | */ |
| 23 | public class VideoFrameDrawer { |
| 24 | /** |
| 25 | * Draws a VideoFrame.TextureBuffer. Calls either drawer.drawOes or drawer.drawRgb |
| 26 | * depending on the type of the buffer. You can supply an additional render matrix. This is |
| 27 | * used multiplied together with the transformation matrix of the frame. (M = renderMatrix * |
| 28 | * transformationMatrix) |
| 29 | */ |
| 30 | static void drawTexture(RendererCommon.GlDrawer drawer, VideoFrame.TextureBuffer buffer, |
| 31 | Matrix renderMatrix, int frameWidth, int frameHeight, int viewportX, int viewportY, |
| 32 | int viewportWidth, int viewportHeight) { |
| 33 | Matrix finalMatrix = new Matrix(buffer.getTransformMatrix()); |
| 34 | finalMatrix.preConcat(renderMatrix); |
| 35 | float[] finalGlMatrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(finalMatrix); |
| 36 | switch (buffer.getType()) { |
| 37 | case OES: |
| 38 | drawer.drawOes(buffer.getTextureId(), finalGlMatrix, frameWidth, frameHeight, viewportX, |
| 39 | viewportY, viewportWidth, viewportHeight); |
| 40 | break; |
| 41 | case RGB: |
| 42 | drawer.drawRgb(buffer.getTextureId(), finalGlMatrix, frameWidth, frameHeight, viewportX, |
| 43 | viewportY, viewportWidth, viewportHeight); |
| 44 | break; |
| 45 | default: |
| 46 | throw new RuntimeException("Unknown texture type."); |
| 47 | } |
| 48 | } |
| 49 | |
| 50 | /** |
| 51 | * Helper class for uploading YUV bytebuffer frames to textures that handles stride > width. This |
| 52 | * class keeps an internal ByteBuffer to avoid unnecessary allocations for intermediate copies. |
| 53 | */ |
| 54 | private static class YuvUploader { |
| 55 | // Intermediate copy buffer for uploading yuv frames that are not packed, i.e. stride > width. |
| 56 | // TODO(magjed): Investigate when GL_UNPACK_ROW_LENGTH is available, or make a custom shader |
| 57 | // that handles stride and compare performance with intermediate copy. |
| 58 | private ByteBuffer copyBuffer; |
| 59 | private int[] yuvTextures; |
| 60 | |
| 61 | /** |
| 62 | * Upload |planes| into OpenGL textures, taking stride into consideration. |
| 63 | * |
| 64 | * @return Array of three texture indices corresponding to Y-, U-, and V-plane respectively. |
| 65 | */ |
| 66 | public int[] uploadYuvData(int width, int height, int[] strides, ByteBuffer[] planes) { |
| 67 | final int[] planeWidths = new int[] {width, width / 2, width / 2}; |
| 68 | final int[] planeHeights = new int[] {height, height / 2, height / 2}; |
| 69 | // Make a first pass to see if we need a temporary copy buffer. |
| 70 | int copyCapacityNeeded = 0; |
| 71 | for (int i = 0; i < 3; ++i) { |
| 72 | if (strides[i] > planeWidths[i]) { |
| 73 | copyCapacityNeeded = Math.max(copyCapacityNeeded, planeWidths[i] * planeHeights[i]); |
| 74 | } |
| 75 | } |
| 76 | // Allocate copy buffer if necessary. |
| 77 | if (copyCapacityNeeded > 0 |
| 78 | && (copyBuffer == null || copyBuffer.capacity() < copyCapacityNeeded)) { |
| 79 | copyBuffer = ByteBuffer.allocateDirect(copyCapacityNeeded); |
| 80 | } |
| 81 | // Make sure YUV textures are allocated. |
| 82 | if (yuvTextures == null) { |
| 83 | yuvTextures = new int[3]; |
| 84 | for (int i = 0; i < 3; i++) { |
| 85 | yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D); |
| 86 | } |
| 87 | } |
| 88 | // Upload each plane. |
| 89 | for (int i = 0; i < 3; ++i) { |
| 90 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i); |
| 91 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]); |
| 92 | // GLES only accepts packed data, i.e. stride == planeWidth. |
| 93 | final ByteBuffer packedByteBuffer; |
| 94 | if (strides[i] == planeWidths[i]) { |
| 95 | // Input is packed already. |
| 96 | packedByteBuffer = planes[i]; |
| 97 | } else { |
Magnus Jedvert | 84d8ae5 | 2017-12-20 15:12:10 +0100 | [diff] [blame] | 98 | VideoRenderer.nativeCopyPlane( |
magjed | 7cede37 | 2017-09-11 06:12:07 -0700 | [diff] [blame] | 99 | planes[i], planeWidths[i], planeHeights[i], strides[i], copyBuffer, planeWidths[i]); |
| 100 | packedByteBuffer = copyBuffer; |
| 101 | } |
| 102 | GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, planeWidths[i], |
| 103 | planeHeights[i], 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, packedByteBuffer); |
| 104 | } |
| 105 | return yuvTextures; |
| 106 | } |
| 107 | |
| 108 | public int[] uploadFromBuffer(VideoFrame.I420Buffer buffer) { |
| 109 | int[] strides = {buffer.getStrideY(), buffer.getStrideU(), buffer.getStrideV()}; |
| 110 | ByteBuffer[] planes = {buffer.getDataY(), buffer.getDataU(), buffer.getDataV()}; |
| 111 | return uploadYuvData(buffer.getWidth(), buffer.getHeight(), strides, planes); |
| 112 | } |
| 113 | |
| 114 | public int[] getYuvTextures() { |
| 115 | return yuvTextures; |
| 116 | } |
| 117 | |
| 118 | /** |
| 119 | * Releases cached resources. Uploader can still be used and the resources will be reallocated |
| 120 | * on first use. |
| 121 | */ |
| 122 | public void release() { |
| 123 | copyBuffer = null; |
| 124 | if (yuvTextures != null) { |
| 125 | GLES20.glDeleteTextures(3, yuvTextures, 0); |
| 126 | yuvTextures = null; |
| 127 | } |
| 128 | } |
| 129 | } |
| 130 | |
| 131 | private static int distance(float x0, float y0, float x1, float y1) { |
| 132 | return (int) Math.round(Math.hypot(x1 - x0, y1 - y0)); |
| 133 | } |
| 134 | |
| 135 | // These points are used to calculate the size of the part of the frame we are rendering. |
| 136 | final static float[] srcPoints = |
| 137 | new float[] {0f /* x0 */, 0f /* y0 */, 1f /* x1 */, 0f /* y1 */, 0f /* x2 */, 1f /* y2 */}; |
| 138 | private final float[] dstPoints = new float[6]; |
| 139 | private final Point renderSize = new Point(); |
| 140 | private int renderWidth; |
| 141 | private int renderHeight; |
| 142 | |
| 143 | // Calculate the frame size after |renderMatrix| is applied. Stores the output in member variables |
| 144 | // |renderWidth| and |renderHeight| to avoid allocations since this function is called for every |
| 145 | // frame. |
| 146 | private void calculateTransformedRenderSize( |
| 147 | int frameWidth, int frameHeight, Matrix renderMatrix) { |
| 148 | if (renderMatrix == null) { |
| 149 | renderWidth = frameWidth; |
| 150 | renderHeight = frameHeight; |
| 151 | return; |
| 152 | } |
| 153 | // Transform the texture coordinates (in the range [0, 1]) according to |renderMatrix|. |
| 154 | renderMatrix.mapPoints(dstPoints, srcPoints); |
| 155 | |
| 156 | // Multiply with the width and height to get the positions in terms of pixels. |
| 157 | for (int i = 0; i < 3; ++i) { |
| 158 | dstPoints[i * 2 + 0] *= frameWidth; |
| 159 | dstPoints[i * 2 + 1] *= frameHeight; |
| 160 | } |
| 161 | |
| 162 | // Get the length of the sides of the transformed rectangle in terms of pixels. |
| 163 | renderWidth = distance(dstPoints[0], dstPoints[1], dstPoints[2], dstPoints[3]); |
| 164 | renderHeight = distance(dstPoints[0], dstPoints[1], dstPoints[4], dstPoints[5]); |
| 165 | } |
| 166 | |
| 167 | private final YuvUploader yuvUploader = new YuvUploader(); |
| 168 | // This variable will only be used for checking reference equality and is used for caching I420 |
| 169 | // textures. |
| 170 | private VideoFrame lastI420Frame; |
| 171 | private final Matrix renderMatrix = new Matrix(); |
| 172 | |
| 173 | public void drawFrame(VideoFrame frame, RendererCommon.GlDrawer drawer) { |
| 174 | drawFrame(frame, drawer, null /* additionalRenderMatrix */); |
| 175 | } |
| 176 | |
| 177 | public void drawFrame( |
| 178 | VideoFrame frame, RendererCommon.GlDrawer drawer, Matrix additionalRenderMatrix) { |
| 179 | drawFrame(frame, drawer, additionalRenderMatrix, 0 /* viewportX */, 0 /* viewportY */, |
| 180 | frame.getRotatedWidth(), frame.getRotatedHeight()); |
| 181 | } |
| 182 | |
| 183 | public void drawFrame(VideoFrame frame, RendererCommon.GlDrawer drawer, |
| 184 | Matrix additionalRenderMatrix, int viewportX, int viewportY, int viewportWidth, |
| 185 | int viewportHeight) { |
| 186 | final int width = frame.getRotatedWidth(); |
| 187 | final int height = frame.getRotatedHeight(); |
| 188 | |
| 189 | calculateTransformedRenderSize(width, height, additionalRenderMatrix); |
| 190 | |
| 191 | final boolean isTextureFrame = frame.getBuffer() instanceof VideoFrame.TextureBuffer; |
| 192 | renderMatrix.reset(); |
| 193 | renderMatrix.preTranslate(0.5f, 0.5f); |
| 194 | if (!isTextureFrame) { |
| 195 | renderMatrix.preScale(1f, -1f); // I420-frames are upside down |
| 196 | } |
| 197 | renderMatrix.preRotate(frame.getRotation()); |
| 198 | renderMatrix.preTranslate(-0.5f, -0.5f); |
| 199 | if (additionalRenderMatrix != null) { |
| 200 | renderMatrix.preConcat(additionalRenderMatrix); |
| 201 | } |
| 202 | |
| 203 | if (isTextureFrame) { |
| 204 | lastI420Frame = null; |
| 205 | drawTexture(drawer, (VideoFrame.TextureBuffer) frame.getBuffer(), renderMatrix, renderWidth, |
| 206 | renderHeight, viewportX, viewportY, viewportWidth, viewportHeight); |
| 207 | } else { |
| 208 | // Only upload the I420 data to textures once per frame, if we are called multiple times |
| 209 | // with the same frame. |
| 210 | if (frame != lastI420Frame) { |
| 211 | lastI420Frame = frame; |
| 212 | final VideoFrame.I420Buffer i420Buffer = frame.getBuffer().toI420(); |
| 213 | yuvUploader.uploadFromBuffer(i420Buffer); |
| 214 | i420Buffer.release(); |
| 215 | } |
| 216 | |
| 217 | drawer.drawYuv(yuvUploader.getYuvTextures(), |
| 218 | RendererCommon.convertMatrixFromAndroidGraphicsMatrix(renderMatrix), renderWidth, |
| 219 | renderHeight, viewportX, viewportY, viewportWidth, viewportHeight); |
| 220 | } |
| 221 | } |
| 222 | |
| 223 | public void release() { |
| 224 | yuvUploader.release(); |
| 225 | lastI420Frame = null; |
| 226 | } |
| 227 | } |