Android EglRenderer: Add Bitmap frame listener functionality.

BUG=webrtc:6470

Review-Url: https://codereview.webrtc.org/2456873002
Cr-Commit-Position: refs/heads/master@{#14921}
This commit is contained in:
sakal 2016-11-03 09:15:34 -07:00 committed by Commit bot
parent 8848828708
commit fb0c573263
4 changed files with 390 additions and 7 deletions

View File

@ -473,6 +473,7 @@ if (rtc_include_tests) {
"androidtests/src/org/webrtc/Camera1CapturerUsingTextureTest.java",
"androidtests/src/org/webrtc/Camera2CapturerTest.java",
"androidtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java",
"androidtests/src/org/webrtc/EglRendererTest.java",
"androidtests/src/org/webrtc/GlRectDrawerTest.java",
"androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java",
"androidtests/src/org/webrtc/NetworkMonitorTest.java",

View File

@ -10,12 +10,16 @@
package org.webrtc;
import android.graphics.Bitmap;
import android.graphics.SurfaceTexture;
import android.opengl.GLES20;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.view.Surface;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
@ -29,6 +33,18 @@ public class EglRenderer implements VideoRenderer.Callbacks {
private static final long LOG_INTERVAL_SEC = 4;
private static final int MAX_SURFACE_CLEAR_COUNT = 3;
public interface FrameListener { void onFrame(Bitmap frame); }
private static class ScaleAndFrameListener {
public final float scale;
public final FrameListener listener;
public ScaleAndFrameListener(float scale, FrameListener listener) {
this.scale = scale;
this.listener = listener;
}
}
private class EglSurfaceCreation implements Runnable {
private Object surface;
@ -60,6 +76,9 @@ public class EglRenderer implements VideoRenderer.Callbacks {
private final Object handlerLock = new Object();
private Handler renderThreadHandler;
private final Object frameListenerLock = new Object();
private final ArrayList<ScaleAndFrameListener> frameListeners = new ArrayList<>();
// Variables for fps reduction.
private final Object fpsReductionLock = new Object();
// Time for when next frame should be rendered.
@ -104,6 +123,9 @@ public class EglRenderer implements VideoRenderer.Callbacks {
// Time in ns spent by the render thread in the swapBuffers() function.
private long renderSwapBufferTimeNs;
// Used for bitmap capturing.
private GlTextureFrameBuffer bitmapTextureFramebuffer;
// Runnable for posting frames to render thread.
private final Runnable renderFrameRunnable = new Runnable() {
@Override
@ -220,6 +242,10 @@ public class EglRenderer implements VideoRenderer.Callbacks {
GLES20.glDeleteTextures(3, yuvTextures, 0);
yuvTextures = null;
}
if (bitmapTextureFramebuffer != null) {
bitmapTextureFramebuffer.release();
bitmapTextureFramebuffer = null;
}
if (eglBase != null) {
logD("eglBase detach and release.");
eglBase.detachCurrent();
@ -333,6 +359,36 @@ public class EglRenderer implements VideoRenderer.Callbacks {
setFpsReduction(0 /* fps */);
}
/**
* Register a callback to be invoked when a new video frame has been received.
*
* @param listener The callback to be invoked.
* @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
* required.
*/
public void addFrameListener(FrameListener listener, float scale) {
synchronized (frameListenerLock) {
frameListeners.add(new ScaleAndFrameListener(scale, listener));
}
}
/**
* Remove any pending callback that was added with addFrameListener. If the callback is not in
* the queue, nothing happens.
*
* @param runnable The callback to remove.
*/
public void removeFrameListener(FrameListener listener) {
synchronized (frameListenerLock) {
final Iterator<ScaleAndFrameListener> iter = frameListeners.iterator();
while (iter.hasNext()) {
if (iter.next().listener == listener) {
iter.remove();
}
}
}
}
// VideoRenderer.Callbacks interface.
@Override
public void renderFrame(VideoRenderer.I420Frame frame) {
@ -472,8 +528,9 @@ public class EglRenderer implements VideoRenderer.Callbacks {
}
final long startTimeNs = System.nanoTime();
float[] texMatrix =
final float[] texMatrix =
RendererCommon.rotateTextureMatrix(frame.samplingMatrix, frame.rotationDegree);
final float[] drawMatrix;
// After a surface size change, the EGLSurface might still have a buffer of the old size in the
// pipeline. Querying the EGLSurface will show if the underlying buffer dimensions haven't yet
@ -487,7 +544,8 @@ public class EglRenderer implements VideoRenderer.Callbacks {
VideoRenderer.renderFrameDone(frame);
return;
}
logD("Surface size mismatch - clearing surface.");
logD("Surface size mismatch - clearing surface. Size: " + eglBase.surfaceWidth() + "x"
+ eglBase.surfaceHeight() + " Expected: " + surfaceWidth + "x" + surfaceHeight);
clearSurfaceOnRenderThread();
}
final float[] layoutMatrix;
@ -498,7 +556,7 @@ public class EglRenderer implements VideoRenderer.Callbacks {
layoutMatrix =
mirror ? RendererCommon.horizontalFlipMatrix() : RendererCommon.identityMatrix();
}
texMatrix = RendererCommon.multiplyMatrices(texMatrix, layoutMatrix);
drawMatrix = RendererCommon.multiplyMatrices(texMatrix, layoutMatrix);
}
GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
@ -513,16 +571,15 @@ public class EglRenderer implements VideoRenderer.Callbacks {
}
yuvUploader.uploadYuvData(
yuvTextures, frame.width, frame.height, frame.yuvStrides, frame.yuvPlanes);
drawer.drawYuv(yuvTextures, texMatrix, frame.rotatedWidth(), frame.rotatedHeight(), 0, 0,
drawer.drawYuv(yuvTextures, drawMatrix, frame.rotatedWidth(), frame.rotatedHeight(), 0, 0,
surfaceWidth, surfaceHeight);
} else {
drawer.drawOes(frame.textureId, texMatrix, frame.rotatedWidth(), frame.rotatedHeight(), 0, 0,
drawer.drawOes(frame.textureId, drawMatrix, frame.rotatedWidth(), frame.rotatedHeight(), 0, 0,
surfaceWidth, surfaceHeight);
}
final long swapBuffersStartTimeNs = System.nanoTime();
eglBase.swapBuffers();
VideoRenderer.renderFrameDone(frame);
final long currentTimeNs = System.nanoTime();
synchronized (statisticsLock) {
@ -530,6 +587,65 @@ public class EglRenderer implements VideoRenderer.Callbacks {
renderTimeNs += (currentTimeNs - startTimeNs);
renderSwapBufferTimeNs += (currentTimeNs - swapBuffersStartTimeNs);
}
notifyCallbacks(frame, texMatrix);
VideoRenderer.renderFrameDone(frame);
}
private void notifyCallbacks(VideoRenderer.I420Frame frame, float[] texMatrix) {
// Make temporary copy of callback list to avoid ConcurrentModificationException, in case
// callbacks call addFramelistener or removeFrameListener.
final ArrayList<ScaleAndFrameListener> tmpList;
synchronized (frameListenerLock) {
if (frameListeners.isEmpty())
return;
tmpList = new ArrayList<>(frameListeners);
frameListeners.clear();
}
final float[] bitmapMatrix = RendererCommon.multiplyMatrices(
RendererCommon.multiplyMatrices(texMatrix,
mirror ? RendererCommon.horizontalFlipMatrix() : RendererCommon.identityMatrix()),
RendererCommon.verticalFlipMatrix());
for (ScaleAndFrameListener scaleAndListener : tmpList) {
final int scaledWidth = (int) (scaleAndListener.scale * frame.rotatedWidth());
final int scaledHeight = (int) (scaleAndListener.scale * frame.rotatedHeight());
if (scaledWidth == 0 || scaledHeight == 0) {
scaleAndListener.listener.onFrame(null);
continue;
}
if (bitmapTextureFramebuffer == null) {
bitmapTextureFramebuffer = new GlTextureFrameBuffer(GLES20.GL_RGBA);
}
bitmapTextureFramebuffer.setSize(scaledWidth, scaledHeight);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, bitmapTextureFramebuffer.getFrameBufferId());
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
GLES20.GL_TEXTURE_2D, bitmapTextureFramebuffer.getTextureId(), 0);
if (frame.yuvFrame) {
drawer.drawYuv(yuvTextures, bitmapMatrix, frame.rotatedWidth(), frame.rotatedHeight(),
0 /* viewportX */, 0 /* viewportY */, scaledWidth, scaledHeight);
} else {
drawer.drawOes(frame.textureId, bitmapMatrix, frame.rotatedWidth(), frame.rotatedHeight(),
0 /* viewportX */, 0 /* viewportY */, scaledWidth, scaledHeight);
}
final ByteBuffer bitmapBuffer = ByteBuffer.allocateDirect(scaledWidth * scaledHeight * 4);
GLES20.glViewport(0, 0, scaledWidth, scaledHeight);
GLES20.glReadPixels(
0, 0, scaledWidth, scaledHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, bitmapBuffer);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
GlUtil.checkNoGLES2Error("EglRenderer.notifyCallbacks");
final Bitmap bitmap = Bitmap.createBitmap(scaledWidth, scaledHeight, Bitmap.Config.ARGB_8888);
bitmap.copyPixelsFromBuffer(bitmapBuffer);
scaleAndListener.listener.onFrame(bitmap);
}
}
private String averageTimeAsString(long sumTimeNs, int count) {

View File

@ -12,7 +12,6 @@ package org.webrtc;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import java.nio.FloatBuffer;
import java.util.IdentityHashMap;
import java.util.Map;

View File

@ -0,0 +1,267 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.graphics.Bitmap;
import android.graphics.SurfaceTexture;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.test.InstrumentationTestCase;
import android.test.suitebuilder.annotation.SmallTest;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
// EmptyActivity is needed for the surface.
public class EglRendererTest extends InstrumentationTestCase {
final static String TAG = "EglRendererTest";
final static int RENDER_WAIT_MS = 1000;
final static int SURFACE_WAIT_MS = 1000;
final static int TEST_FRAME_WIDTH = 4;
final static int TEST_FRAME_HEIGHT = 4;
// Some arbitrary frames.
final static ByteBuffer[][] TEST_FRAMES = {
{
ByteBuffer.wrap(new byte[] {
11, -12, 13, -14, -15, 16, -17, 18, 19, -110, 111, -112, -113, 114, -115, 116}),
ByteBuffer.wrap(new byte[] {117, 118, 119, 120}),
ByteBuffer.wrap(new byte[] {121, 122, 123, 124}),
},
{
ByteBuffer.wrap(new byte[] {-11, -12, -13, -14, -15, -16, -17, -18, -19, -110, -111, -112,
-113, -114, -115, -116}),
ByteBuffer.wrap(new byte[] {-121, -122, -123, -124}),
ByteBuffer.wrap(new byte[] {-117, -118, -119, -120}),
},
{
ByteBuffer.wrap(new byte[] {-11, -12, -13, -14, -15, -16, -17, -18, -19, -110, -111, -112,
-113, -114, -115, -116}),
ByteBuffer.wrap(new byte[] {117, 118, 119, 120}),
ByteBuffer.wrap(new byte[] {121, 122, 123, 124}),
},
};
private class TestFrameListener implements EglRenderer.FrameListener {
final private ArrayList<Bitmap> bitmaps = new ArrayList<Bitmap>();
boolean bitmapReceived;
Bitmap storedBitmap;
@Override
public synchronized void onFrame(Bitmap bitmap) {
if (bitmapReceived) {
fail("Unexpected bitmap was received.");
}
bitmapReceived = true;
storedBitmap = bitmap;
notify();
}
public synchronized boolean waitForBitmap(int timeoutMs) throws InterruptedException {
if (!bitmapReceived) {
wait(timeoutMs);
}
return bitmapReceived;
}
public synchronized Bitmap resetAndGetBitmap() {
bitmapReceived = false;
return storedBitmap;
}
}
final TestFrameListener testFrameListener = new TestFrameListener();
EglRenderer eglRenderer;
CountDownLatch surfaceReadyLatch = new CountDownLatch(1);
int oesTextureId;
SurfaceTexture surfaceTexture;
@Override
protected void setUp() throws Exception {
PeerConnectionFactory.initializeAndroidGlobals(getInstrumentation().getTargetContext(),
true /* initializeAudio */, true /* initializeVideo */, true /* videoHwAcceleration */);
eglRenderer = new EglRenderer("TestRenderer: ");
eglRenderer.init(null /* sharedContext */, EglBase.CONFIG_RGBA, new GlRectDrawer());
oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
surfaceTexture = new SurfaceTexture(oesTextureId);
surfaceTexture.setDefaultBufferSize(1 /* width */, 1 /* height */);
eglRenderer.createEglSurface(surfaceTexture);
eglRenderer.surfaceSizeChanged(1 /* width */, 1 /* height */);
}
@Override
public void tearDown() {
surfaceTexture.release();
GLES20.glDeleteTextures(1 /* n */, new int[] {oesTextureId}, 0 /* offset */);
eglRenderer.release();
}
/** Checks the bitmap is not null and the correct size. */
private static void checkBitmap(Bitmap bitmap, float scale) {
assertNotNull(bitmap);
assertEquals((int) (TEST_FRAME_WIDTH * scale), bitmap.getWidth());
assertEquals((int) (TEST_FRAME_HEIGHT * scale), bitmap.getHeight());
}
/**
* Does linear sampling on U/V plane of test data.
*
* @param data Plane data to be sampled from.
* @param planeWidth Width of the plane data. This is also assumed to be the stride.
* @param planeHeight Height of the plane data.
* @param x X-coordinate in range [0, 1].
* @param y Y-coordinate in range [0, 1].
*/
private static float linearSample(
ByteBuffer plane, int planeWidth, int planeHeight, float x, float y) {
final int stride = planeWidth;
final float coordX = x * planeWidth;
final float coordY = y * planeHeight;
int lowIndexX = (int) Math.floor(coordX - 0.5f);
int lowIndexY = (int) Math.floor(coordY - 0.5f);
int highIndexX = lowIndexX + 1;
int highIndexY = lowIndexY + 1;
final float highWeightX = coordX - lowIndexX - 0.5f;
final float highWeightY = coordY - lowIndexY - 0.5f;
final float lowWeightX = 1f - highWeightX;
final float lowWeightY = 1f - highWeightY;
// Clamp on the edges.
lowIndexX = Math.max(0, lowIndexX);
lowIndexY = Math.max(0, lowIndexY);
highIndexX = Math.min(planeWidth - 1, highIndexX);
highIndexY = Math.min(planeHeight - 1, highIndexY);
float lowYValue = (plane.get(lowIndexY * stride + lowIndexX) & 0xFF) * lowWeightX
+ (plane.get(lowIndexY * stride + highIndexX) & 0xFF) * highWeightX;
float highYValue = (plane.get(highIndexY * stride + lowIndexX) & 0xFF) * lowWeightX
+ (plane.get(highIndexY * stride + highIndexX) & 0xFF) * highWeightX;
return (lowWeightY * lowYValue + highWeightY * highYValue) / 255f;
}
private static byte saturatedFloatToByte(float c) {
return (byte) Math.round(255f * Math.max(0f, Math.min(1f, c)));
}
/**
* Converts test data YUV frame to expected RGBA frame. Tries to match the behavior of OpenGL
* YUV drawer shader. Does linear sampling on the U- and V-planes.
*
* @param yuvFrame Array of size 3 containing Y-, U-, V-planes for image of size
* (TEST_FRAME_WIDTH, TEST_FRAME_HEIGHT). U- and V-planes should be half the size
* of the Y-plane.
*/
private static byte[] convertYUVFrameToRGBA(ByteBuffer[] yuvFrame) {
final byte[] argbFrame = new byte[TEST_FRAME_WIDTH * TEST_FRAME_HEIGHT * 4];
final int argbStride = TEST_FRAME_WIDTH * 4;
final int yStride = TEST_FRAME_WIDTH;
final int vStride = TEST_FRAME_WIDTH / 2;
for (int y = 0; y < TEST_FRAME_HEIGHT; y++) {
for (int x = 0; x < TEST_FRAME_WIDTH; x++) {
final int x2 = x / 2;
final int y2 = y / 2;
final float yC = (yuvFrame[0].get(y * yStride + x) & 0xFF) / 255f;
final float uC = linearSample(yuvFrame[1], TEST_FRAME_WIDTH / 2, TEST_FRAME_HEIGHT / 2,
(x + 0.5f) / TEST_FRAME_WIDTH, (y + 0.5f) / TEST_FRAME_HEIGHT)
- 0.5f;
final float vC = linearSample(yuvFrame[2], TEST_FRAME_WIDTH / 2, TEST_FRAME_HEIGHT / 2,
(x + 0.5f) / TEST_FRAME_WIDTH, (y + 0.5f) / TEST_FRAME_HEIGHT)
- 0.5f;
final float rC = yC + 1.403f * vC;
final float gC = yC - 0.344f * uC - 0.714f * vC;
final float bC = yC + 1.77f * uC;
argbFrame[y * argbStride + x * 4 + 0] = saturatedFloatToByte(rC);
argbFrame[y * argbStride + x * 4 + 1] = saturatedFloatToByte(gC);
argbFrame[y * argbStride + x * 4 + 2] = saturatedFloatToByte(bC);
argbFrame[y * argbStride + x * 4 + 3] = (byte) 255;
}
}
return argbFrame;
}
/** Checks that the bitmap content matches the test frame with the given index. */
private static void checkBitmapContent(Bitmap bitmap, int frame) {
checkBitmap(bitmap, 1f);
byte[] expectedRGBA = convertYUVFrameToRGBA(TEST_FRAMES[frame]);
ByteBuffer bitmapBuffer = ByteBuffer.allocateDirect(bitmap.getByteCount());
bitmap.copyPixelsToBuffer(bitmapBuffer);
for (int i = 0; i < expectedRGBA.length; i++) {
int expected = expectedRGBA[i] & 0xFF;
int value = bitmapBuffer.get(i) & 0xFF;
// Due to unknown conversion differences check value matches +-1.
if (Math.abs(value - expected) > 1) {
Logging.d(TAG, "Expected bitmap content: " + Arrays.toString(expectedRGBA));
Logging.d(TAG, "Bitmap content: " + Arrays.toString(bitmapBuffer.array()));
fail("Frame doesn't match original frame on byte " + i + ". Expected: " + expected
+ " Result: " + value);
}
}
}
/** Tells eglRenderer to render test frame with given index. */
private void feedFrame(int i) {
eglRenderer.renderFrame(new VideoRenderer.I420Frame(TEST_FRAME_WIDTH, TEST_FRAME_HEIGHT, 0,
new int[] {TEST_FRAME_WIDTH, TEST_FRAME_WIDTH / 2, TEST_FRAME_WIDTH / 2}, TEST_FRAMES[i],
0));
}
@SmallTest
public void testAddFrameListener() throws Exception {
eglRenderer.addFrameListener(testFrameListener, 0f /* scaleFactor */);
feedFrame(0);
assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
assertNull(testFrameListener.resetAndGetBitmap());
eglRenderer.addFrameListener(testFrameListener, 0f /* scaleFactor */);
feedFrame(1);
assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
assertNull(testFrameListener.resetAndGetBitmap());
feedFrame(2);
// Check we get no more bitmaps than two.
assertFalse(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
}
@SmallTest
public void testAddFrameListenerBitmap() throws Exception {
eglRenderer.addFrameListener(testFrameListener, 1f /* scaleFactor */);
feedFrame(0);
assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
checkBitmapContent(testFrameListener.resetAndGetBitmap(), 0);
eglRenderer.addFrameListener(testFrameListener, 1f /* scaleFactor */);
feedFrame(1);
assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
checkBitmapContent(testFrameListener.resetAndGetBitmap(), 1);
}
@SmallTest
public void testAddFrameListenerBitmapScale() throws Exception {
for (int i = 0; i < 3; ++i) {
float scale = i * 0.5f + 0.5f;
eglRenderer.addFrameListener(testFrameListener, scale);
feedFrame(i);
assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
checkBitmap(testFrameListener.resetAndGetBitmap(), scale);
}
}
}