diff --git a/webrtc/api/android/java/src/org/webrtc/SurfaceTextureHelper.java b/webrtc/api/android/java/src/org/webrtc/SurfaceTextureHelper.java index e7c767b057..48a57bca5e 100644 --- a/webrtc/api/android/java/src/org/webrtc/SurfaceTextureHelper.java +++ b/webrtc/api/android/java/src/org/webrtc/SurfaceTextureHelper.java @@ -134,18 +134,6 @@ class SurfaceTextureHelper { }); } - private YuvConverter getYuvConverter() { - // yuvConverter is assigned once - if (yuvConverter != null) - return yuvConverter; - - synchronized (this) { - if (yuvConverter == null) - yuvConverter = new YuvConverter(eglBase.getEglBaseContext()); - return yuvConverter; - } - } - /** * Start to stream textures to the given |listener|. If you need to change listener, you need to * call stopListening() first. @@ -231,12 +219,21 @@ class SurfaceTextureHelper { }); } - public void textureToYUV( - ByteBuffer buf, int width, int height, int stride, int textureId, float[] transformMatrix) { - if (textureId != oesTextureId) + public void textureToYUV(final ByteBuffer buf, final int width, final int height, + final int stride, final int textureId, final float[] transformMatrix) { + if (textureId != oesTextureId) { throw new IllegalStateException("textureToByteBuffer called with unexpected textureId"); + } - getYuvConverter().convert(buf, width, height, stride, textureId, transformMatrix); + ThreadUtils.invokeAtFrontUninterruptibly(handler, new Runnable() { + @Override + public void run() { + if (yuvConverter == null) { + yuvConverter = new YuvConverter(); + } + yuvConverter.convert(buf, width, height, stride, textureId, transformMatrix); + } + }); } private void updateTexImage() { @@ -275,9 +272,8 @@ class SurfaceTextureHelper { if (isTextureInUse || !isQuitting) { throw new IllegalStateException("Unexpected release."); } - synchronized (this) { - if (yuvConverter != null) - yuvConverter.release(); + if (yuvConverter != null) { + yuvConverter.release(); } GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0); surfaceTexture.release(); diff --git a/webrtc/api/android/java/src/org/webrtc/VideoFileRenderer.java b/webrtc/api/android/java/src/org/webrtc/VideoFileRenderer.java index b76d1900b5..613ed1aaaf 100644 --- a/webrtc/api/android/java/src/org/webrtc/VideoFileRenderer.java +++ b/webrtc/api/android/java/src/org/webrtc/VideoFileRenderer.java @@ -15,7 +15,6 @@ import android.os.HandlerThread; import java.nio.ByteBuffer; import java.io.FileOutputStream; import java.io.IOException; -import java.util.concurrent.CountDownLatch; /** * Can be used to save the video frames to file. @@ -23,7 +22,6 @@ import java.util.concurrent.CountDownLatch; public class VideoFileRenderer implements VideoRenderer.Callbacks { private static final String TAG = "VideoFileRenderer"; - private final YuvConverter yuvConverter; private final HandlerThread renderThread; private final Object handlerLock = new Object(); private final Handler renderThreadHandler; @@ -32,13 +30,14 @@ public class VideoFileRenderer implements VideoRenderer.Callbacks { private final int outputFileHeight; private final int outputFrameSize; private final ByteBuffer outputFrameBuffer; + private EglBase eglBase; + private YuvConverter yuvConverter; public VideoFileRenderer(String outputFile, int outputFileWidth, int outputFileHeight, - EglBase.Context sharedContext) throws IOException { + final EglBase.Context sharedContext) throws IOException { if ((outputFileWidth % 2) == 1 || (outputFileHeight % 2) == 1) { throw new IllegalArgumentException("Does not support uneven width or height"); } - yuvConverter = new YuvConverter(sharedContext); this.outputFileWidth = outputFileWidth; this.outputFileHeight = outputFileHeight; @@ -54,6 +53,16 @@ public class VideoFileRenderer implements VideoRenderer.Callbacks { renderThread = new HandlerThread(TAG); renderThread.start(); renderThreadHandler = new Handler(renderThread.getLooper()); + + ThreadUtils.invokeAtFrontUninterruptibly(renderThreadHandler, new Runnable() { + @Override + public void run() { + eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_BUFFER); + eglBase.createDummyPbufferSurface(); + eglBase.makeCurrent(); + yuvConverter = new YuvConverter(); + } + }); } @Override @@ -113,8 +122,7 @@ public class VideoFileRenderer implements VideoRenderer.Callbacks { } public void release() { - final CountDownLatch cleanupBarrier = new CountDownLatch(1); - renderThreadHandler.post(new Runnable() { + ThreadUtils.invokeAtFrontUninterruptibly(renderThreadHandler, new Runnable() { @Override public void run() { try { @@ -122,11 +130,11 @@ public class VideoFileRenderer implements VideoRenderer.Callbacks { } catch (IOException e) { Logging.d(TAG, "Error closing output video file"); } - cleanupBarrier.countDown(); + yuvConverter.release(); + eglBase.release(); + renderThread.quit(); } }); - ThreadUtils.awaitUninterruptibly(cleanupBarrier); - renderThread.quit(); } public static native void nativeI420Scale(ByteBuffer srcY, int strideY, ByteBuffer srcU, diff --git a/webrtc/api/android/java/src/org/webrtc/YuvConverter.java b/webrtc/api/android/java/src/org/webrtc/YuvConverter.java index 1203d86515..050d69de8e 100644 --- a/webrtc/api/android/java/src/org/webrtc/YuvConverter.java +++ b/webrtc/api/android/java/src/org/webrtc/YuvConverter.java @@ -16,13 +16,10 @@ import java.nio.ByteBuffer; import java.nio.FloatBuffer; /** - * Class for converting OES textures to a YUV ByteBuffer. + * Class for converting OES textures to a YUV ByteBuffer. It should be constructed on a thread with + * an active EGL context, and only be used from that thread. */ class YuvConverter { - private final EglBase eglBase; - private final GlShader shader; - private boolean released = false; - // Vertex coordinates in Normalized Device Coordinates, i.e. // (-1, -1) is bottom-left and (1, 1) is top-right. private static final FloatBuffer DEVICE_RECTANGLE = GlUtil.createFloatBuffer(new float[] { @@ -83,14 +80,40 @@ class YuvConverter { + "}\n"; // clang-format on - private int texMatrixLoc; - private int xUnitLoc; - private int coeffsLoc; + private final int frameBufferId; + private final int frameTextureId; + private final GlShader shader; + private final int texMatrixLoc; + private final int xUnitLoc; + private final int coeffsLoc; + private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker(); + private int frameBufferWidth; + private int frameBufferHeight; + private boolean released = false; - public YuvConverter(EglBase.Context sharedContext) { - eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_RGBA_BUFFER); - eglBase.createDummyPbufferSurface(); - eglBase.makeCurrent(); + /** + * This class should be constructed on a thread that has an active EGL context. + */ + public YuvConverter() { + threadChecker.checkIsOnValidThread(); + frameTextureId = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D); + this.frameBufferWidth = 0; + this.frameBufferHeight = 0; + + // Create framebuffer object and bind it. + final int frameBuffers[] = new int[1]; + GLES20.glGenFramebuffers(1, frameBuffers, 0); + frameBufferId = frameBuffers[0]; + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId); + GlUtil.checkNoGLES2Error("Generate framebuffer"); + + // Attach the texture to the framebuffer as color attachment. + GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, + GLES20.GL_TEXTURE_2D, frameTextureId, 0); + GlUtil.checkNoGLES2Error("Attach texture to framebuffer"); + + // Restore normal framebuffer. + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); shader = new GlShader(VERTEX_SHADER, FRAGMENT_SHADER); shader.useProgram(); @@ -104,11 +127,11 @@ class YuvConverter { // If the width is not a multiple of 4 pixels, the texture // will be scaled up slightly and clipped at the right border. shader.setVertexAttribArray("in_tc", 2, TEXTURE_RECTANGLE); - eglBase.detachCurrent(); } - synchronized public void convert( - ByteBuffer buf, int width, int height, int stride, int textureId, float[] transformMatrix) { + public void convert(ByteBuffer buf, int width, int height, int stride, int srcTextureId, + float[] transformMatrix) { + threadChecker.checkIsOnValidThread(); if (released) { throw new IllegalStateException("YuvConverter.convert called on released object"); } @@ -163,20 +186,28 @@ class YuvConverter { transformMatrix = RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.verticalFlipMatrix()); - // Create new pBuffferSurface with the correct size if needed. - if (eglBase.hasSurface()) { - if (eglBase.surfaceWidth() != stride / 4 || eglBase.surfaceHeight() != total_height) { - eglBase.releaseSurface(); - eglBase.createPbufferSurface(stride / 4, total_height); + // Bind our framebuffer. + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId); + GlUtil.checkNoGLES2Error("glBindFramebuffer"); + + if (frameBufferWidth != stride / 4 || frameBufferHeight != total_height) { + frameBufferWidth = stride / 4; + frameBufferHeight = total_height; + // (Re)-Allocate texture. + GLES20.glActiveTexture(GLES20.GL_TEXTURE0); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, frameTextureId); + GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, frameBufferWidth, + frameBufferHeight, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null); + + // Check that the framebuffer is in a good state. + final int status = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER); + if (status != GLES20.GL_FRAMEBUFFER_COMPLETE) { + throw new IllegalStateException("Framebuffer not complete, status: " + status); } - } else { - eglBase.createPbufferSurface(stride / 4, total_height); } - eglBase.makeCurrent(); - GLES20.glActiveTexture(GLES20.GL_TEXTURE0); - GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId); + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, srcTextureId); GLES20.glUniformMatrix4fv(texMatrixLoc, 1, false, transformMatrix, 0); // Draw Y @@ -203,20 +234,26 @@ class YuvConverter { GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); GLES20.glReadPixels( - 0, 0, stride / 4, total_height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf); + 0, 0, frameBufferWidth, frameBufferHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf); GlUtil.checkNoGLES2Error("YuvConverter.convert"); + // Restore normal framebuffer. + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); + // Unbind texture. Reportedly needed on some devices to get // the texture updated from the camera. GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0); - eglBase.detachCurrent(); } - synchronized public void release() { + public void release() { + threadChecker.checkIsOnValidThread(); released = true; - eglBase.makeCurrent(); shader.release(); - eglBase.release(); + GLES20.glDeleteTextures(1, new int[] {frameTextureId}, 0); + GLES20.glDeleteFramebuffers(1, new int[] {frameBufferId}, 0); + frameBufferWidth = 0; + frameBufferHeight = 0; } }