diff --git a/sdk/android/api/org/webrtc/MediaCodecVideoDecoder.java b/sdk/android/api/org/webrtc/MediaCodecVideoDecoder.java index 169874d06d..f0ec00e1a7 100644 --- a/sdk/android/api/org/webrtc/MediaCodecVideoDecoder.java +++ b/sdk/android/api/org/webrtc/MediaCodecVideoDecoder.java @@ -368,6 +368,7 @@ public class MediaCodecVideoDecoder { SurfaceTextureHelper.create("Decoder SurfaceTextureHelper", eglContext); if (surfaceTextureHelper != null) { textureListener = new TextureListener(surfaceTextureHelper); + textureListener.setSize(width, height); surface = new Surface(surfaceTextureHelper.getSurfaceTexture()); } } @@ -414,6 +415,9 @@ public class MediaCodecVideoDecoder { this.width = width; this.height = height; + if (textureListener != null) { + textureListener.setSize(width, height); + } decodeStartTimeMs.clear(); dequeuedSurfaceOutputBuffers.clear(); hasDecodedFirstFrame = false; @@ -634,12 +638,12 @@ public class MediaCodecVideoDecoder { } // Poll based texture listener. - private class TextureListener implements SurfaceTextureHelper.OnTextureFrameAvailableListener { + private class TextureListener implements VideoSink { private final SurfaceTextureHelper surfaceTextureHelper; // |newFrameLock| is used to synchronize arrival of new frames with wait()/notifyAll(). private final Object newFrameLock = new Object(); // |bufferToRender| is non-null when waiting for transition between addBufferToRender() to - // onTextureFrameAvailable(). + // onFrame(). @Nullable private DecodedOutputBuffer bufferToRender; @Nullable private DecodedTextureBuffer renderedBuffer; @@ -662,19 +666,21 @@ public class MediaCodecVideoDecoder { } } + public void setSize(int width, int height) { + surfaceTextureHelper.setTextureSize(width, height); + } + // Callback from |surfaceTextureHelper|. May be called on an arbitrary thread. @Override - public void onTextureFrameAvailable( - int oesTextureId, float[] transformMatrix, long timestampNs) { + public void onFrame(VideoFrame frame) { synchronized (newFrameLock) { if (renderedBuffer != null) { - Logging.e( - TAG, "Unexpected onTextureFrameAvailable() called while already holding a texture."); + Logging.e(TAG, "Unexpected onFrame() called while already holding a texture."); throw new IllegalStateException("Already holding a texture."); } // |timestampNs| is always zero on some Android versions. - final VideoFrame.Buffer buffer = surfaceTextureHelper.createTextureBuffer( - width, height, RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix)); + final VideoFrame.Buffer buffer = frame.getBuffer(); + buffer.retain(); renderedBuffer = new DecodedTextureBuffer(buffer, bufferToRender.presentationTimeStampMs, bufferToRender.timeStampMs, bufferToRender.ntpTimeStampMs, bufferToRender.decodeTimeMs, SystemClock.elapsedRealtime() - bufferToRender.endDecodeTimeMs); @@ -703,9 +709,9 @@ public class MediaCodecVideoDecoder { } public void release() { - // SurfaceTextureHelper.stopListening() will block until any onTextureFrameAvailable() in - // progress is done. Therefore, the call must be outside any synchronized - // statement that is also used in the onTextureFrameAvailable() above to avoid deadlocks. + // SurfaceTextureHelper.stopListening() will block until any onFrame() in progress is done. + // Therefore, the call must be outside any synchronized statement that is also used in the + // onFrame() above to avoid deadlocks. surfaceTextureHelper.stopListening(); synchronized (newFrameLock) { if (renderedBuffer != null) { @@ -763,6 +769,9 @@ public class MediaCodecVideoDecoder { } width = newWidth; height = newHeight; + if (textureListener != null) { + textureListener.setSize(width, height); + } if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) { colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); diff --git a/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java b/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java index d448e292d0..9a1bf61c6b 100644 --- a/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java +++ b/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java @@ -33,8 +33,7 @@ import javax.annotation.Nullable; * frames. At any time, at most one frame is being processed. */ @TargetApi(21) -public class ScreenCapturerAndroid - implements VideoCapturer, SurfaceTextureHelper.OnTextureFrameAvailableListener { +public class ScreenCapturerAndroid implements VideoCapturer, VideoSink { private static final int DISPLAY_FLAGS = DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC | DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION; // DPI for VirtualDisplay, does not seem to matter for us. @@ -186,7 +185,7 @@ public class ScreenCapturerAndroid } private void createVirtualDisplay() { - surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height); + surfaceTextureHelper.setTextureSize(width, height); virtualDisplay = mediaProjection.createVirtualDisplay("WebRTC_ScreenCapture", width, height, VIRTUAL_DISPLAY_DPI, DISPLAY_FLAGS, new Surface(surfaceTextureHelper.getSurfaceTexture()), null /* callback */, null /* callback handler */); @@ -194,13 +193,9 @@ public class ScreenCapturerAndroid // This is called on the internal looper thread of {@Code SurfaceTextureHelper}. @Override - public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) { + public void onFrame(VideoFrame frame) { numCapturedFrames++; - final VideoFrame.Buffer buffer = surfaceTextureHelper.createTextureBuffer( - width, height, RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix)); - final VideoFrame frame = new VideoFrame(buffer, 0 /* rotation */, timestampNs); capturerObserver.onFrameCaptured(frame); - frame.release(); } @Override diff --git a/sdk/android/api/org/webrtc/SurfaceTextureHelper.java b/sdk/android/api/org/webrtc/SurfaceTextureHelper.java index 2d6d13dc11..146eb22437 100644 --- a/sdk/android/api/org/webrtc/SurfaceTextureHelper.java +++ b/sdk/android/api/org/webrtc/SurfaceTextureHelper.java @@ -25,14 +25,11 @@ import org.webrtc.EglBase; import org.webrtc.VideoFrame.TextureBuffer; /** - * Helper class to create and synchronize access to a SurfaceTexture. The caller will get notified - * of new frames in onTextureFrameAvailable(), and should call returnTextureFrame() when done with - * the frame. Only one texture frame can be in flight at once, so returnTextureFrame() must be - * called in order to receive a new frame. Call stopListening() to stop receiveing new frames. Call - * dispose to release all resources once the texture frame is returned. - * Note that there is a C++ counter part of this class that optionally can be used. It is used for - * wrapping texture frames into webrtc::VideoFrames and also handles calling returnTextureFrame() - * when the webrtc::VideoFrame is no longer used. + * Helper class for using a SurfaceTexture to create WebRTC VideoFrames. In order to create WebRTC + * VideoFrames, render onto the SurfaceTexture. The frames will be delivered to the listener. Only + * one texture frame can be in flight at once, so the frame must be released in order to receive a + * new frame. Call stopListening() to stop receiveing new frames. Call dispose to release all + * resources once the texture frame is released. */ public class SurfaceTextureHelper { private static final String TAG = "SurfaceTextureHelper"; @@ -40,10 +37,12 @@ public class SurfaceTextureHelper { * Callback interface for being notified that a new texture frame is available. The calls will be * made on the SurfaceTextureHelper handler thread, with a bound EGLContext. The callee is not * allowed to make another EGLContext current on the calling thread. + * + * @deprecated Use a VideoSink as listener instead. */ + @Deprecated public interface OnTextureFrameAvailableListener { - abstract void onTextureFrameAvailable( - int oesTextureId, float[] transformMatrix, long timestampNs); + void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs); } /** @@ -82,14 +81,20 @@ public class SurfaceTextureHelper { private final YuvConverter yuvConverter = new YuvConverter(); // These variables are only accessed from the |handler| thread. - @Nullable private OnTextureFrameAvailableListener listener; + // The type of |listener| is either a VideoSink or the deprecated OnTextureFrameAvailableListener. + @Nullable private Object listener; // The possible states of this class. private boolean hasPendingTexture = false; private volatile boolean isTextureInUse = false; private boolean isQuitting = false; + private int frameRotation; + private int textureWidth; + private int textureHeight; // |pendingListener| is set in setListener() and the runnable is posted to the handler thread. // setListener() is not allowed to be called again before stopListening(), so this is thread safe. - @Nullable private OnTextureFrameAvailableListener pendingListener; + // The type of |pendingListener| is either a VideoSink or the deprecated + // OnTextureFrameAvailableListener. + @Nullable private Object pendingListener; final Runnable setListenerRunnable = new Runnable() { @Override public void run() { @@ -148,8 +153,23 @@ public class SurfaceTextureHelper { /** * Start to stream textures to the given |listener|. If you need to change listener, you need to * call stopListening() first. + * + * @deprecated Use a VideoSink as listener instead. */ + @Deprecated public void startListening(final OnTextureFrameAvailableListener listener) { + startListeningInternal(listener); + } + + /** + * Start to stream textures to the given |listener|. If you need to change listener, you need to + * call stopListening() first. + */ + public void startListening(final VideoSink listener) { + startListeningInternal(listener); + } + + private void startListeningInternal(Object listener) { if (this.listener != null || this.pendingListener != null) { throw new IllegalStateException("SurfaceTextureHelper listener has already been set."); } @@ -164,15 +184,36 @@ public class SurfaceTextureHelper { public void stopListening() { Logging.d(TAG, "stopListening()"); handler.removeCallbacks(setListenerRunnable); - ThreadUtils.invokeAtFrontUninterruptibly(handler, new Runnable() { - @Override - public void run() { - listener = null; - pendingListener = null; - } + ThreadUtils.invokeAtFrontUninterruptibly(handler, () -> { + listener = null; + pendingListener = null; }); } + /** + * Use this function to set the texture size. Note, do not call setDefaultBufferSize() yourself + * since this class needs to be aware of the texture size. + */ + public void setTextureSize(int textureWidth, int textureHeight) { + if (textureWidth <= 0) { + throw new IllegalArgumentException("Texture width must be positive, but was " + textureWidth); + } + if (textureHeight <= 0) { + throw new IllegalArgumentException( + "Texture height must be positive, but was " + textureHeight); + } + surfaceTexture.setDefaultBufferSize(textureWidth, textureHeight); + handler.post(() -> { + this.textureWidth = textureWidth; + this.textureHeight = textureHeight; + }); + } + + /** Set the rotation of the delivered frames. */ + public void setFrameRotation(int rotation) { + handler.post(() -> this.frameRotation = rotation); + } + /** * Retrieve the underlying SurfaceTexture. The SurfaceTexture should be passed in to a video * producer such as a camera or decoder. @@ -193,17 +234,17 @@ public class SurfaceTextureHelper { * Call this function to signal that you are done with the frame received in * onTextureFrameAvailable(). Only one texture frame can be in flight at once, so you must call * this function in order to receive a new frame. + * + * @deprecated Use a VideoSink as listener instead. */ + @Deprecated public void returnTextureFrame() { - handler.post(new Runnable() { - @Override - public void run() { - isTextureInUse = false; - if (isQuitting) { - release(); - } else { - tryDeliverTextureFrame(); - } + handler.post(() -> { + isTextureInUse = false; + if (isQuitting) { + release(); + } else { + tryDeliverTextureFrame(); } }); } @@ -219,23 +260,22 @@ public class SurfaceTextureHelper { */ public void dispose() { Logging.d(TAG, "dispose()"); - ThreadUtils.invokeAtFrontUninterruptibly(handler, new Runnable() { - @Override - public void run() { - isQuitting = true; - if (!isTextureInUse) { - release(); - } + ThreadUtils.invokeAtFrontUninterruptibly(handler, () -> { + isQuitting = true; + if (!isTextureInUse) { + release(); } }); } /** * Posts to the correct thread to convert |textureBuffer| to I420. + * + * @deprecated Use toI420() instead. */ + @Deprecated public VideoFrame.I420Buffer textureToYuv(final TextureBuffer textureBuffer) { - return ThreadUtils.invokeAtFrontUninterruptibly( - handler, () -> yuvConverter.convert(textureBuffer)); + return textureBuffer.toI420(); } private void updateTexImage() { @@ -262,7 +302,19 @@ public class SurfaceTextureHelper { final float[] transformMatrix = new float[16]; surfaceTexture.getTransformMatrix(transformMatrix); final long timestampNs = surfaceTexture.getTimestamp(); - listener.onTextureFrameAvailable(oesTextureId, transformMatrix, timestampNs); + if (listener instanceof OnTextureFrameAvailableListener) { + ((OnTextureFrameAvailableListener) listener) + .onTextureFrameAvailable(oesTextureId, transformMatrix, timestampNs); + } else if (listener instanceof VideoSink) { + if (textureWidth == 0 || textureHeight == 0) { + throw new RuntimeException("Texture size has not been set."); + } + final VideoFrame.Buffer buffer = createTextureBuffer(textureWidth, textureHeight, + RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix)); + final VideoFrame frame = new VideoFrame(buffer, frameRotation, timestampNs); + ((VideoSink) listener).onFrame(frame); + frame.release(); + } } private void release() { @@ -286,7 +338,10 @@ public class SurfaceTextureHelper { * * The returned TextureBuffer holds a reference to the SurfaceTextureHelper that created it. The * buffer calls returnTextureFrame() when it is released. + * + * @deprecated Use a VideoSink as listener instead. */ + @Deprecated public TextureBufferImpl createTextureBuffer(int width, int height, Matrix transformMatrix) { return new TextureBufferImpl(width, height, TextureBuffer.Type.OES, oesTextureId, transformMatrix, handler, yuvConverter, this ::returnTextureFrame); diff --git a/sdk/android/instrumentationtests/src/org/webrtc/GlRectDrawerTest.java b/sdk/android/instrumentationtests/src/org/webrtc/GlRectDrawerTest.java index f99b3b02ac..02be56861a 100644 --- a/sdk/android/instrumentationtests/src/org/webrtc/GlRectDrawerTest.java +++ b/sdk/android/instrumentationtests/src/org/webrtc/GlRectDrawerTest.java @@ -237,11 +237,11 @@ public class GlRectDrawerTest { private final GlRectDrawer drawer; private final int rgbTexture; - public StubOesTextureProducer( - EglBase.Context sharedContext, SurfaceTexture surfaceTexture, int width, int height) { + public StubOesTextureProducer(EglBase.Context sharedContext, + SurfaceTextureHelper surfaceTextureHelper, int width, int height) { eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PLAIN); - surfaceTexture.setDefaultBufferSize(width, height); - eglBase.createSurface(surfaceTexture); + surfaceTextureHelper.setTextureSize(width, height); + eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture()); assertEquals(eglBase.surfaceWidth(), width); assertEquals(eglBase.surfaceHeight(), height); @@ -281,7 +281,7 @@ public class GlRectDrawerTest { final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create( "SurfaceTextureHelper test" /* threadName */, eglBase.getEglBaseContext()); final StubOesTextureProducer oesProducer = new StubOesTextureProducer( - eglBase.getEglBaseContext(), surfaceTextureHelper.getSurfaceTexture(), WIDTH, HEIGHT); + eglBase.getEglBaseContext(), surfaceTextureHelper, WIDTH, HEIGHT); final SurfaceTextureHelperTest.MockTextureListener listener = new SurfaceTextureHelperTest.MockTextureListener(); surfaceTextureHelper.startListening(listener); diff --git a/sdk/android/instrumentationtests/src/org/webrtc/SurfaceTextureHelperTest.java b/sdk/android/instrumentationtests/src/org/webrtc/SurfaceTextureHelperTest.java index 7eeb54d0b0..40bdb21db2 100644 --- a/sdk/android/instrumentationtests/src/org/webrtc/SurfaceTextureHelperTest.java +++ b/sdk/android/instrumentationtests/src/org/webrtc/SurfaceTextureHelperTest.java @@ -129,7 +129,7 @@ public class SurfaceTextureHelperTest { "SurfaceTextureHelper test" /* threadName */, eglBase.getEglBaseContext()); final MockTextureListener listener = new MockTextureListener(); surfaceTextureHelper.startListening(listener); - surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height); + surfaceTextureHelper.setTextureSize(width, height); // Create resources for stubbing an OES texture producer. |eglOesBase| has the SurfaceTexture in // |surfaceTextureHelper| as the target EGLSurface. @@ -197,7 +197,7 @@ public class SurfaceTextureHelperTest { "SurfaceTextureHelper test" /* threadName */, eglBase.getEglBaseContext()); final MockTextureListener listener = new MockTextureListener(); surfaceTextureHelper.startListening(listener); - surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height); + surfaceTextureHelper.setTextureSize(width, height); // Create resources for stubbing an OES texture producer. |eglOesBase| has the SurfaceTexture in // |surfaceTextureHelper| as the target EGLSurface. @@ -453,7 +453,7 @@ public class SurfaceTextureHelperTest { "SurfaceTextureHelper test" /* threadName */, eglBase.getEglBaseContext()); final MockTextureListener listener = new MockTextureListener(); surfaceTextureHelper.startListening(listener); - surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height); + surfaceTextureHelper.setTextureSize(width, height); // Create resources for stubbing an OES texture producer. |eglBase| has the SurfaceTexture in // |surfaceTextureHelper| as the target EGLSurface. diff --git a/sdk/android/instrumentationtests/src/org/webrtc/VideoFrameBufferTest.java b/sdk/android/instrumentationtests/src/org/webrtc/VideoFrameBufferTest.java index 955e8cdf5d..c36570eae3 100644 --- a/sdk/android/instrumentationtests/src/org/webrtc/VideoFrameBufferTest.java +++ b/sdk/android/instrumentationtests/src/org/webrtc/VideoFrameBufferTest.java @@ -181,8 +181,7 @@ public class VideoFrameBufferTest { // Create resources for generating OES textures. final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create("SurfaceTextureHelper test", eglContext); - final SurfaceTexture surfaceTexture = surfaceTextureHelper.getSurfaceTexture(); - surfaceTexture.setDefaultBufferSize(width, height); + surfaceTextureHelper.setTextureSize(width, height); final HandlerThread renderThread = new HandlerThread("OES texture thread"); renderThread.start(); @@ -191,7 +190,7 @@ public class VideoFrameBufferTest { ThreadUtils.invokeAtFrontUninterruptibly(renderThreadHandler, () -> { // Create EGL base with the SurfaceTexture as display output. final EglBase eglBase = EglBase.create(eglContext, EglBase.CONFIG_PLAIN); - eglBase.createSurface(surfaceTexture); + eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture()); eglBase.makeCurrent(); assertEquals(width, eglBase.surfaceWidth()); assertEquals(height, eglBase.surfaceHeight()); diff --git a/sdk/android/src/java/org/webrtc/Camera1Session.java b/sdk/android/src/java/org/webrtc/Camera1Session.java index 03a5363ca7..e4c68c48ca 100644 --- a/sdk/android/src/java/org/webrtc/Camera1Session.java +++ b/sdk/android/src/java/org/webrtc/Camera1Session.java @@ -174,6 +174,8 @@ class Camera1Session implements CameraSession { this.captureFormat = captureFormat; this.constructionTimeNs = constructionTimeNs; + surfaceTextureHelper.setTextureSize(captureFormat.width, captureFormat.height); + startCapturing(); } @@ -247,40 +249,31 @@ class Camera1Session implements CameraSession { } private void listenForTextureFrames() { - surfaceTextureHelper.startListening(new SurfaceTextureHelper.OnTextureFrameAvailableListener() { - @Override - public void onTextureFrameAvailable( - int oesTextureId, float[] transformMatrix, long timestampNs) { - checkIsOnCameraThread(); + surfaceTextureHelper.startListening((VideoFrame frame) -> { + checkIsOnCameraThread(); - final TextureBufferImpl buffer = - surfaceTextureHelper.createTextureBuffer(captureFormat.width, captureFormat.height, - RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix)); - - if (state != SessionState.RUNNING) { - Logging.d(TAG, "Texture frame captured but camera is no longer running."); - buffer.release(); - return; - } - - if (!firstFrameReported) { - final int startTimeMs = - (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs); - camera1StartTimeMsHistogram.addSample(startTimeMs); - firstFrameReported = true; - } - - // Undo the mirror that the OS "helps" us with. - // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int) - final VideoFrame frame = new VideoFrame( - CameraSession.createTextureBufferWithModifiedTransformMatrix(buffer, - /* mirror= */ info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT, - /* rotation= */ 0), - /* rotation= */ getFrameOrientation(), timestampNs); - buffer.release(); - events.onFrameCaptured(Camera1Session.this, frame); - frame.release(); + if (state != SessionState.RUNNING) { + Logging.d(TAG, "Texture frame captured but camera is no longer running."); + return; } + + if (!firstFrameReported) { + final int startTimeMs = + (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs); + camera1StartTimeMsHistogram.addSample(startTimeMs); + firstFrameReported = true; + } + + // Undo the mirror that the OS "helps" us with. + // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int) + final VideoFrame modifiedFrame = new VideoFrame( + CameraSession.createTextureBufferWithModifiedTransformMatrix( + (TextureBufferImpl) frame.getBuffer(), + /* mirror= */ info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT, + /* rotation= */ 0), + /* rotation= */ getFrameOrientation(), frame.getTimestampNs()); + events.onFrameCaptured(Camera1Session.this, modifiedFrame); + modifiedFrame.release(); }); } diff --git a/sdk/android/src/java/org/webrtc/Camera2Session.java b/sdk/android/src/java/org/webrtc/Camera2Session.java index 59494b1edd..fd34ce7ef6 100644 --- a/sdk/android/src/java/org/webrtc/Camera2Session.java +++ b/sdk/android/src/java/org/webrtc/Camera2Session.java @@ -121,9 +121,8 @@ class Camera2Session implements CameraSession { Logging.d(TAG, "Camera opened."); cameraDevice = camera; - final SurfaceTexture surfaceTexture = surfaceTextureHelper.getSurfaceTexture(); - surfaceTexture.setDefaultBufferSize(captureFormat.width, captureFormat.height); - surface = new Surface(surfaceTexture); + surfaceTextureHelper.setTextureSize(captureFormat.width, captureFormat.height); + surface = new Surface(surfaceTextureHelper.getSurfaceTexture()); try { camera.createCaptureSession( Arrays.asList(surface), new CaptureSessionCallback(), cameraThreadHandler); @@ -183,43 +182,33 @@ class Camera2Session implements CameraSession { return; } - surfaceTextureHelper.startListening( - new SurfaceTextureHelper.OnTextureFrameAvailableListener() { - @Override - public void onTextureFrameAvailable( - int oesTextureId, float[] transformMatrix, long timestampNs) { - checkIsOnCameraThread(); + surfaceTextureHelper.startListening((VideoFrame frame) -> { + checkIsOnCameraThread(); - final TextureBufferImpl buffer = surfaceTextureHelper.createTextureBuffer( - captureFormat.width, captureFormat.height, - RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix)); + if (state != SessionState.RUNNING) { + Logging.d(TAG, "Texture frame captured but camera is no longer running."); + return; + } - if (state != SessionState.RUNNING) { - Logging.d(TAG, "Texture frame captured but camera is no longer running."); - buffer.release(); - return; - } + if (!firstFrameReported) { + firstFrameReported = true; + final int startTimeMs = + (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs); + camera2StartTimeMsHistogram.addSample(startTimeMs); + } - if (!firstFrameReported) { - firstFrameReported = true; - final int startTimeMs = - (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs); - camera2StartTimeMsHistogram.addSample(startTimeMs); - } - - // Undo the mirror that the OS "helps" us with. - // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int) - // Also, undo camera orientation, we report it as rotation instead. - final VideoFrame frame = new VideoFrame( - CameraSession.createTextureBufferWithModifiedTransformMatrix(buffer, - /* mirror= */ isCameraFrontFacing, - /* rotation= */ -cameraOrientation), - /* rotation= */ getFrameOrientation(), timestampNs); - buffer.release(); - events.onFrameCaptured(Camera2Session.this, frame); - frame.release(); - } - }); + // Undo the mirror that the OS "helps" us with. + // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int) + // Also, undo camera orientation, we report it as rotation instead. + final VideoFrame modifiedFrame = + new VideoFrame(CameraSession.createTextureBufferWithModifiedTransformMatrix( + (TextureBufferImpl) frame.getBuffer(), + /* mirror= */ isCameraFrontFacing, + /* rotation= */ -cameraOrientation), + /* rotation= */ getFrameOrientation(), frame.getTimestampNs()); + events.onFrameCaptured(Camera2Session.this, modifiedFrame); + modifiedFrame.release(); + }); Logging.d(TAG, "Camera device successfully started."); callback.onDone(Camera2Session.this); } diff --git a/sdk/android/src/java/org/webrtc/HardwareVideoDecoder.java b/sdk/android/src/java/org/webrtc/HardwareVideoDecoder.java index 4c235a9147..d50ef3d271 100644 --- a/sdk/android/src/java/org/webrtc/HardwareVideoDecoder.java +++ b/sdk/android/src/java/org/webrtc/HardwareVideoDecoder.java @@ -27,8 +27,7 @@ import org.webrtc.ThreadUtils.ThreadChecker; /** Android hardware video decoder. */ @TargetApi(16) @SuppressWarnings("deprecation") // Cannot support API 16 without using deprecated methods. -class HardwareVideoDecoder - implements VideoDecoder, SurfaceTextureHelper.OnTextureFrameAvailableListener { +class HardwareVideoDecoder implements VideoDecoder, VideoSink { private static final String TAG = "HardwareVideoDecoder"; // TODO(magjed): Use MediaFormat.KEY_* constants when part of the public API. @@ -106,17 +105,10 @@ class HardwareVideoDecoder @Nullable private Surface surface = null; private static class DecodedTextureMetadata { - final int width; - final int height; - final int rotation; final long presentationTimestampUs; final Integer decodeTimeMs; - DecodedTextureMetadata( - int width, int height, int rotation, long presentationTimestampUs, Integer decodeTimeMs) { - this.width = width; - this.height = height; - this.rotation = rotation; + DecodedTextureMetadata(long presentationTimestampUs, Integer decodeTimeMs) { this.presentationTimestampUs = presentationTimestampUs; this.decodeTimeMs = decodeTimeMs; } @@ -223,7 +215,8 @@ class HardwareVideoDecoder } // Load dimensions from shared memory under the dimension lock. - int width, height; + final int width; + final int height; synchronized (dimensionLock) { width = this.width; height = this.height; @@ -418,7 +411,8 @@ class HardwareVideoDecoder private void deliverTextureFrame(final int index, final MediaCodec.BufferInfo info, final int rotation, final Integer decodeTimeMs) { // Load dimensions from shared memory under the dimension lock. - final int width, height; + final int width; + final int height; synchronized (dimensionLock) { width = this.width; height = this.height; @@ -428,32 +422,31 @@ class HardwareVideoDecoder if (renderedTextureMetadata != null) { return; // We are still waiting for texture for the previous frame, drop this one. } - renderedTextureMetadata = new DecodedTextureMetadata( - width, height, rotation, info.presentationTimeUs, decodeTimeMs); - codec.releaseOutputBuffer(index, true); + surfaceTextureHelper.setTextureSize(width, height); + surfaceTextureHelper.setFrameRotation(rotation); + renderedTextureMetadata = new DecodedTextureMetadata(info.presentationTimeUs, decodeTimeMs); + codec.releaseOutputBuffer(index, /* render= */ true); } } @Override - public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) { - final VideoFrame frame; + public void onFrame(VideoFrame frame) { + final VideoFrame newFrame; final int decodeTimeMs; + final long timestampNs; synchronized (renderedTextureMetadataLock) { if (renderedTextureMetadata == null) { throw new IllegalStateException( "Rendered texture metadata was null in onTextureFrameAvailable."); } - VideoFrame.TextureBuffer oesBuffer = surfaceTextureHelper.createTextureBuffer( - renderedTextureMetadata.width, renderedTextureMetadata.height, - RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix)); - frame = new VideoFrame(oesBuffer, renderedTextureMetadata.rotation, - renderedTextureMetadata.presentationTimestampUs * 1000); + timestampNs = renderedTextureMetadata.presentationTimestampUs * 1000; decodeTimeMs = renderedTextureMetadata.decodeTimeMs; renderedTextureMetadata = null; } - - callback.onDecodedFrame(frame, decodeTimeMs, null /* qp */); - frame.release(); + // Change timestamp of frame. + final VideoFrame frameWithModifiedTimeStamp = + new VideoFrame(frame.getBuffer(), frame.getRotation(), timestampNs); + callback.onDecodedFrame(frameWithModifiedTimeStamp, decodeTimeMs, null /* qp */); } private void deliverByteFrame( @@ -493,7 +486,7 @@ class HardwareVideoDecoder // All other supported color formats are NV12. frameBuffer = copyNV12ToI420Buffer(buffer, stride, sliceHeight, width, height); } - codec.releaseOutputBuffer(result, false); + codec.releaseOutputBuffer(result, /* render= */ false); long presentationTimeNs = info.presentationTimeUs * 1000; VideoFrame frame = new VideoFrame(frameBuffer, rotation, presentationTimeNs);