summaryrefslogtreecommitdiffstats
path: root/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFrameDrawer.java
blob: af32587886f18a87260c0d69007d64de98f6a134 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
/*
 *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
 *
 *  Use of this source code is governed by a BSD-style license
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
 *  in the file PATENTS.  All contributing project authors may
 *  be found in the AUTHORS file in the root of the source tree.
 */

package org.webrtc;

import android.graphics.Matrix;
import android.graphics.Point;
import android.opengl.GLES20;
import androidx.annotation.Nullable;
import java.nio.ByteBuffer;

/**
 * Helper class to draw VideoFrames. Calls either drawer.drawOes, drawer.drawRgb, or
 * drawer.drawYuv depending on the type of the buffer. The frame will be rendered with rotation
 * taken into account. You can supply an additional render matrix for custom transformations.
 */
public class VideoFrameDrawer {
  public static final String TAG = "VideoFrameDrawer";
  /**
   * Draws a VideoFrame.TextureBuffer. Calls either drawer.drawOes or drawer.drawRgb
   * depending on the type of the buffer. You can supply an additional render matrix. This is
   * used multiplied together with the transformation matrix of the frame. (M = renderMatrix *
   * transformationMatrix)
   */
  public static void drawTexture(RendererCommon.GlDrawer drawer, VideoFrame.TextureBuffer buffer,
      Matrix renderMatrix, int frameWidth, int frameHeight, int viewportX, int viewportY,
      int viewportWidth, int viewportHeight) {
    Matrix finalMatrix = new Matrix(buffer.getTransformMatrix());
    finalMatrix.preConcat(renderMatrix);
    float[] finalGlMatrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(finalMatrix);
    switch (buffer.getType()) {
      case OES:
        drawer.drawOes(buffer.getTextureId(), finalGlMatrix, frameWidth, frameHeight, viewportX,
            viewportY, viewportWidth, viewportHeight);
        break;
      case RGB:
        drawer.drawRgb(buffer.getTextureId(), finalGlMatrix, frameWidth, frameHeight, viewportX,
            viewportY, viewportWidth, viewportHeight);
        break;
      default:
        throw new RuntimeException("Unknown texture type.");
    }
  }

  /**
   * Helper class for uploading YUV bytebuffer frames to textures that handles stride > width. This
   * class keeps an internal ByteBuffer to avoid unnecessary allocations for intermediate copies.
   */
  private static class YuvUploader {
    // Intermediate copy buffer for uploading yuv frames that are not packed, i.e. stride > width.
    // TODO(magjed): Investigate when GL_UNPACK_ROW_LENGTH is available, or make a custom shader
    // that handles stride and compare performance with intermediate copy.
    @Nullable private ByteBuffer copyBuffer;
    @Nullable private int[] yuvTextures;

    /**
     * Upload `planes` into OpenGL textures, taking stride into consideration.
     *
     * @return Array of three texture indices corresponding to Y-, U-, and V-plane respectively.
     */
    @Nullable
    public int[] uploadYuvData(int width, int height, int[] strides, ByteBuffer[] planes) {
      final int[] planeWidths = new int[] {width, width / 2, width / 2};
      final int[] planeHeights = new int[] {height, height / 2, height / 2};
      // Make a first pass to see if we need a temporary copy buffer.
      int copyCapacityNeeded = 0;
      for (int i = 0; i < 3; ++i) {
        if (strides[i] > planeWidths[i]) {
          copyCapacityNeeded = Math.max(copyCapacityNeeded, planeWidths[i] * planeHeights[i]);
        }
      }
      // Allocate copy buffer if necessary.
      if (copyCapacityNeeded > 0
          && (copyBuffer == null || copyBuffer.capacity() < copyCapacityNeeded)) {
        copyBuffer = ByteBuffer.allocateDirect(copyCapacityNeeded);
      }
      // Make sure YUV textures are allocated.
      if (yuvTextures == null) {
        yuvTextures = new int[3];
        for (int i = 0; i < 3; i++) {
          yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
        }
      }
      // Upload each plane.
      for (int i = 0; i < 3; ++i) {
        GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
        // GLES only accepts packed data, i.e. stride == planeWidth.
        final ByteBuffer packedByteBuffer;
        if (strides[i] == planeWidths[i]) {
          // Input is packed already.
          packedByteBuffer = planes[i];
        } else {
          YuvHelper.copyPlane(
              planes[i], strides[i], copyBuffer, planeWidths[i], planeWidths[i], planeHeights[i]);
          packedByteBuffer = copyBuffer;
        }
        GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, planeWidths[i],
            planeHeights[i], 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, packedByteBuffer);
      }
      return yuvTextures;
    }

    @Nullable
    public int[] uploadFromBuffer(VideoFrame.I420Buffer buffer) {
      int[] strides = {buffer.getStrideY(), buffer.getStrideU(), buffer.getStrideV()};
      ByteBuffer[] planes = {buffer.getDataY(), buffer.getDataU(), buffer.getDataV()};
      return uploadYuvData(buffer.getWidth(), buffer.getHeight(), strides, planes);
    }

    @Nullable
    public int[] getYuvTextures() {
      return yuvTextures;
    }

    /**
     * Releases cached resources. Uploader can still be used and the resources will be reallocated
     * on first use.
     */
    public void release() {
      copyBuffer = null;
      if (yuvTextures != null) {
        GLES20.glDeleteTextures(3, yuvTextures, 0);
        yuvTextures = null;
      }
    }
  }

  private static int distance(float x0, float y0, float x1, float y1) {
    return (int) Math.round(Math.hypot(x1 - x0, y1 - y0));
  }

  // These points are used to calculate the size of the part of the frame we are rendering.
  final static float[] srcPoints =
      new float[] {0f /* x0 */, 0f /* y0 */, 1f /* x1 */, 0f /* y1 */, 0f /* x2 */, 1f /* y2 */};
  private final float[] dstPoints = new float[6];
  private final Point renderSize = new Point();
  private int renderWidth;
  private int renderHeight;

  // Calculate the frame size after `renderMatrix` is applied. Stores the output in member variables
  // `renderWidth` and `renderHeight` to avoid allocations since this function is called for every
  // frame.
  private void calculateTransformedRenderSize(
      int frameWidth, int frameHeight, @Nullable Matrix renderMatrix) {
    if (renderMatrix == null) {
      renderWidth = frameWidth;
      renderHeight = frameHeight;
      return;
    }
    // Transform the texture coordinates (in the range [0, 1]) according to `renderMatrix`.
    renderMatrix.mapPoints(dstPoints, srcPoints);

    // Multiply with the width and height to get the positions in terms of pixels.
    for (int i = 0; i < 3; ++i) {
      dstPoints[i * 2 + 0] *= frameWidth;
      dstPoints[i * 2 + 1] *= frameHeight;
    }

    // Get the length of the sides of the transformed rectangle in terms of pixels.
    renderWidth = distance(dstPoints[0], dstPoints[1], dstPoints[2], dstPoints[3]);
    renderHeight = distance(dstPoints[0], dstPoints[1], dstPoints[4], dstPoints[5]);
  }

  private final YuvUploader yuvUploader = new YuvUploader();
  // This variable will only be used for checking reference equality and is used for caching I420
  // textures.
  @Nullable private VideoFrame lastI420Frame;
  private final Matrix renderMatrix = new Matrix();

  public void drawFrame(VideoFrame frame, RendererCommon.GlDrawer drawer) {
    drawFrame(frame, drawer, null /* additionalRenderMatrix */);
  }

  public void drawFrame(
      VideoFrame frame, RendererCommon.GlDrawer drawer, Matrix additionalRenderMatrix) {
    drawFrame(frame, drawer, additionalRenderMatrix, 0 /* viewportX */, 0 /* viewportY */,
        frame.getRotatedWidth(), frame.getRotatedHeight());
  }

  public void drawFrame(VideoFrame frame, RendererCommon.GlDrawer drawer,
      @Nullable Matrix additionalRenderMatrix, int viewportX, int viewportY, int viewportWidth,
      int viewportHeight) {
    final int width = frame.getRotatedWidth();
    final int height = frame.getRotatedHeight();
    calculateTransformedRenderSize(width, height, additionalRenderMatrix);
    if (renderWidth <= 0 || renderHeight <= 0) {
      Logging.w(TAG, "Illegal frame size: " + renderWidth + "x" + renderHeight);
      return;
    }

    final boolean isTextureFrame = frame.getBuffer() instanceof VideoFrame.TextureBuffer;
    renderMatrix.reset();
    renderMatrix.preTranslate(0.5f, 0.5f);
    if (!isTextureFrame) {
      renderMatrix.preScale(1f, -1f); // I420-frames are upside down
    }
    renderMatrix.preRotate(frame.getRotation());
    renderMatrix.preTranslate(-0.5f, -0.5f);
    if (additionalRenderMatrix != null) {
      renderMatrix.preConcat(additionalRenderMatrix);
    }

    if (isTextureFrame) {
      lastI420Frame = null;
      drawTexture(drawer, (VideoFrame.TextureBuffer) frame.getBuffer(), renderMatrix, renderWidth,
          renderHeight, viewportX, viewportY, viewportWidth, viewportHeight);
    } else {
      // Only upload the I420 data to textures once per frame, if we are called multiple times
      // with the same frame.
      if (frame != lastI420Frame) {
        lastI420Frame = frame;
        final VideoFrame.I420Buffer i420Buffer = frame.getBuffer().toI420();
        yuvUploader.uploadFromBuffer(i420Buffer);
        i420Buffer.release();
      }

      drawer.drawYuv(yuvUploader.getYuvTextures(),
          RendererCommon.convertMatrixFromAndroidGraphicsMatrix(renderMatrix), renderWidth,
          renderHeight, viewportX, viewportY, viewportWidth, viewportHeight);
    }
  }

  public VideoFrame.Buffer prepareBufferForViewportSize(
      VideoFrame.Buffer buffer, int width, int height) {
    buffer.retain();
    return buffer;
  }

  public void release() {
    yuvUploader.release();
    lastI420Frame = null;
  }
}