From 1a759c6354fa7d10177c53a2f513503687f88175 Mon Sep 17 00:00:00 2001 From: Magnus Jedvert Date: Tue, 24 Apr 2018 15:11:02 +0200 Subject: [PATCH] Android: Only use Java VideoFrames internally MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This CL removes internal support for anything else than Android frames that are wrapped Java VideoFrames. This allows for a big internal cleanup and we can remove the internal class AndroidTextureBuffer and all logic related to that. Also, the C++ AndroidVideoTrackSource no longer needs to hold on to a C++ SurfaceTextureHelper and we can remove all JNI code related to SurfaceTextureHelper. Also, when these methods are removed, it's possible to let VideoSource implement the CapturerObserver interface directly and there is no longer any need for AndroidVideoTrackSourceObserver. Clients can then initialize VideoCapturers themselves outside the PeerConnectionFactory, and a new method is added in the PeerConnectionFactory to allow clients to create standalone VideoSources that can be connected to a VideoCapturer outside the factory. Bug: webrtc:9181 Change-Id: Ie292ea9214f382d44dce9120725c62602a646ed8 Reviewed-on: https://webrtc-review.googlesource.com/71666 Commit-Queue: Magnus Jedvert Reviewed-by: Sami Kalliomäki Cr-Commit-Position: refs/heads/master@{#23004} --- .../java/src/org/webrtc/UnityUtility.java | 7 +- .../unityplugin/simple_peer_connection.cc | 3 +- sdk/android/BUILD.gn | 7 +- .../org/webrtc/MediaCodecVideoEncoder.java | 19 -- .../api/org/webrtc/PeerConnectionFactory.java | 19 +- .../api/org/webrtc/ScreenCapturerAndroid.java | 2 +- .../api/org/webrtc/SurfaceTextureHelper.java | 17 -- sdk/android/api/org/webrtc/VideoCapturer.java | 4 +- sdk/android/api/org/webrtc/VideoRenderer.java | 7 - sdk/android/api/org/webrtc/VideoSource.java | 79 +++++++++ .../webrtc/MediaCodecVideoEncoderTest.java | 46 ----- .../AndroidVideoTrackSourceObserver.java | 63 ------- .../src/java/org/webrtc/CameraCapturer.java | 2 +- sdk/android/src/jni/androidmediadecoder.cc | 1 - sdk/android/src/jni/androidmediaencoder.cc | 61 +------ .../src/jni/androidvideotracksource.cc | 163 +----------------- sdk/android/src/jni/androidvideotracksource.h | 7 - sdk/android/src/jni/nv21buffer.cc | 5 - sdk/android/src/jni/pc/null_video.cc | 9 - .../src/jni/pc/peerconnectionfactory.cc | 7 +- sdk/android/src/jni/pc/video.cc | 6 +- sdk/android/src/jni/pc/video.h | 3 - sdk/android/src/jni/surfacetexturehelper.cc | 78 --------- sdk/android/src/jni/surfacetexturehelper.h | 82 --------- sdk/android/src/jni/video_renderer.cc | 26 +-- sdk/android/src/jni/videoframe.cc | 100 +---------- sdk/android/src/jni/videoframe.h | 44 +---- 27 files changed, 124 insertions(+), 743 deletions(-) delete mode 100644 sdk/android/src/java/org/webrtc/AndroidVideoTrackSourceObserver.java delete mode 100644 sdk/android/src/jni/surfacetexturehelper.cc delete mode 100644 sdk/android/src/jni/surfacetexturehelper.h diff --git a/examples/unityplugin/java/src/org/webrtc/UnityUtility.java b/examples/unityplugin/java/src/org/webrtc/UnityUtility.java index 93c06936a7..5118bac507 100644 --- a/examples/unityplugin/java/src/org/webrtc/UnityUtility.java +++ b/examples/unityplugin/java/src/org/webrtc/UnityUtility.java @@ -48,11 +48,10 @@ public class UnityUtility { VideoCapturer capturer = createCameraCapturer(new Camera2Enumerator(ContextUtils.getApplicationContext())); - VideoCapturer.CapturerObserver capturerObserver = - new AndroidVideoTrackSourceObserver(nativeTrackSource); + VideoSource videoSource = new VideoSource(nativeTrackSource); - capturer.initialize( - surfaceTextureHelper, ContextUtils.getApplicationContext(), capturerObserver); + capturer.initialize(surfaceTextureHelper, ContextUtils.getApplicationContext(), + videoSource.getCapturerObserver()); capturer.startCapture(720, 480, 30); return capturer; diff --git a/examples/unityplugin/simple_peer_connection.cc b/examples/unityplugin/simple_peer_connection.cc index 2b8ef93a5d..02911f6cd9 100644 --- a/examples/unityplugin/simple_peer_connection.cc +++ b/examples/unityplugin/simple_peer_connection.cc @@ -447,8 +447,7 @@ void SimplePeerConnection::AddStreams(bool audio_only) { rtc::scoped_refptr source( new rtc::RefCountedObject( - g_signaling_thread.get(), env, - webrtc::JavaParamRef(texture_helper), false)); + g_signaling_thread.get(), env, false)); rtc::scoped_refptr proxy_source = webrtc::VideoTrackSourceProxy::Create(g_signaling_thread.get(), g_worker_thread.get(), source); diff --git a/sdk/android/BUILD.gn b/sdk/android/BUILD.gn index 2610b5a5f5..1d9bb8666d 100644 --- a/sdk/android/BUILD.gn +++ b/sdk/android/BUILD.gn @@ -269,7 +269,6 @@ generate_jni("generated_video_jni") { "api/org/webrtc/EncodedImage.java", "api/org/webrtc/MediaCodecVideoDecoder.java", "api/org/webrtc/MediaCodecVideoEncoder.java", - "api/org/webrtc/SurfaceTextureHelper.java", "api/org/webrtc/VideoCodecInfo.java", "api/org/webrtc/VideoCodecStatus.java", "api/org/webrtc/VideoDecoder.java", @@ -285,7 +284,6 @@ generate_jni("generated_video_jni") { "api/org/webrtc/VideoSource.java", "api/org/webrtc/VideoTrack.java", "api/org/webrtc/YuvHelper.java", - "src/java/org/webrtc/AndroidVideoTrackSourceObserver.java", "src/java/org/webrtc/EglBase14.java", "src/java/org/webrtc/NV12Buffer.java", "src/java/org/webrtc/NV21Buffer.java", @@ -315,8 +313,6 @@ rtc_static_library("video_jni") { "src/jni/nv12buffer.cc", "src/jni/nv21buffer.cc", "src/jni/pc/video.cc", - "src/jni/surfacetexturehelper.cc", - "src/jni/surfacetexturehelper.h", "src/jni/video_renderer.cc", "src/jni/videocodecinfo.cc", "src/jni/videocodecinfo.h", @@ -838,7 +834,7 @@ rtc_android_library("video_java") { "api/org/webrtc/VideoDecoderFallback.java", "api/org/webrtc/VideoEncoderFallback.java", "api/org/webrtc/VideoFrameDrawer.java", - "src/java/org/webrtc/AndroidVideoTrackSourceObserver.java", + "src/java/org/webrtc/NV21Buffer.java", "src/java/org/webrtc/VideoDecoderWrapper.java", "src/java/org/webrtc/VideoEncoderWrapper.java", "src/java/org/webrtc/WrappedNativeI420Buffer.java", @@ -866,7 +862,6 @@ rtc_android_library("camera_java") { "src/java/org/webrtc/Camera2Session.java", "src/java/org/webrtc/CameraCapturer.java", "src/java/org/webrtc/CameraSession.java", - "src/java/org/webrtc/NV21Buffer.java", ] deps = [ diff --git a/sdk/android/api/org/webrtc/MediaCodecVideoEncoder.java b/sdk/android/api/org/webrtc/MediaCodecVideoEncoder.java index 0b0ff51983..e4eb90c5cf 100644 --- a/sdk/android/api/org/webrtc/MediaCodecVideoEncoder.java +++ b/sdk/android/api/org/webrtc/MediaCodecVideoEncoder.java @@ -612,25 +612,6 @@ public class MediaCodecVideoEncoder { } } - @CalledByNativeUnchecked - boolean encodeTexture(boolean isKeyframe, int oesTextureId, float[] transformationMatrix, - long presentationTimestampUs) { - checkOnMediaCodecThread(); - try { - checkKeyFrameRequired(isKeyframe, presentationTimestampUs); - eglBase.makeCurrent(); - // TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway, - // but it's a workaround for bug webrtc:5147. - GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); - drawer.drawOes(oesTextureId, transformationMatrix, width, height, 0, 0, width, height); - eglBase.swapBuffers(TimeUnit.MICROSECONDS.toNanos(presentationTimestampUs)); - return true; - } catch (RuntimeException e) { - Logging.e(TAG, "encodeTexture failed", e); - return false; - } - } - /** * Encodes a new style VideoFrame. |bufferIndex| is -1 if we are not encoding in surface mode. */ diff --git a/sdk/android/api/org/webrtc/PeerConnectionFactory.java b/sdk/android/api/org/webrtc/PeerConnectionFactory.java index e3baeb9a6c..b880f9b6ed 100644 --- a/sdk/android/api/org/webrtc/PeerConnectionFactory.java +++ b/sdk/android/api/org/webrtc/PeerConnectionFactory.java @@ -350,18 +350,20 @@ public class PeerConnectionFactory { return new MediaStream(nativeCreateLocalMediaStream(nativeFactory, label)); } + public VideoSource createVideoSource(boolean isScreencast) { + return new VideoSource(nativeCreateVideoSource(nativeFactory, isScreencast)); + } + public VideoSource createVideoSource(VideoCapturer capturer) { final EglBase.Context eglContext = localEglbase == null ? null : localEglbase.getEglBaseContext(); final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(VIDEO_CAPTURER_THREAD_NAME, eglContext); - long nativeAndroidVideoTrackSource = - nativeCreateVideoSource(nativeFactory, surfaceTextureHelper, capturer.isScreencast()); - VideoCapturer.CapturerObserver capturerObserver = - new AndroidVideoTrackSourceObserver(nativeAndroidVideoTrackSource); - capturer.initialize( - surfaceTextureHelper, ContextUtils.getApplicationContext(), capturerObserver); - return new VideoSource(nativeAndroidVideoTrackSource); + final VideoSource videoSource = new VideoSource( + nativeCreateVideoSource(nativeFactory, capturer.isScreencast()), surfaceTextureHelper); + capturer.initialize(surfaceTextureHelper, ContextUtils.getApplicationContext(), + videoSource.getCapturerObserver()); + return videoSource; } public VideoTrack createVideoTrack(String id, VideoSource source) { @@ -498,8 +500,7 @@ public class PeerConnectionFactory { private static native long nativeCreatePeerConnection(long factory, PeerConnection.RTCConfiguration rtcConfig, MediaConstraints constraints, long nativeObserver); private static native long nativeCreateLocalMediaStream(long factory, String label); - private static native long nativeCreateVideoSource( - long factory, SurfaceTextureHelper surfaceTextureHelper, boolean is_screencast); + private static native long nativeCreateVideoSource(long factory, boolean is_screencast); private static native long nativeCreateVideoTrack( long factory, String id, long nativeVideoSource); private static native long nativeCreateAudioSource(long factory, MediaConstraints constraints); diff --git a/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java b/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java index 1b5dde74e9..d448e292d0 100644 --- a/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java +++ b/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java @@ -27,7 +27,7 @@ import javax.annotation.Nullable; * {@code SurfaceTexture} using a {@code SurfaceTextureHelper}. * The {@code SurfaceTextureHelper} is created by the native code and passed to this capturer in * {@code VideoCapturer.initialize()}. On receiving a new frame, this capturer passes it - * as a texture to the native code via {@code CapturerObserver.onTextureFrameCaptured()}. This takes + * as a texture to the native code via {@code CapturerObserver.onFrameCaptured()}. This takes * place on the HandlerThread of the given {@code SurfaceTextureHelper}. When done with each frame, * the native code returns the buffer to the {@code SurfaceTextureHelper} to be used for new * frames. At any time, at most one frame is being processed. diff --git a/sdk/android/api/org/webrtc/SurfaceTextureHelper.java b/sdk/android/api/org/webrtc/SurfaceTextureHelper.java index 458176f631..0b5bcf805f 100644 --- a/sdk/android/api/org/webrtc/SurfaceTextureHelper.java +++ b/sdk/android/api/org/webrtc/SurfaceTextureHelper.java @@ -51,7 +51,6 @@ public class SurfaceTextureHelper { * thread and handler is created for handling the SurfaceTexture. May return null if EGL fails to * initialize a pixel buffer surface and make it current. */ - @CalledByNative public static SurfaceTextureHelper create( final String threadName, final EglBase.Context sharedContext) { final HandlerThread thread = new HandlerThread(threadName); @@ -195,7 +194,6 @@ public class SurfaceTextureHelper { * onTextureFrameAvailable(). Only one texture frame can be in flight at once, so you must call * this function in order to receive a new frame. */ - @CalledByNative public void returnTextureFrame() { handler.post(new Runnable() { @Override @@ -219,7 +217,6 @@ public class SurfaceTextureHelper { * stopped when the texture frame has been returned by a call to returnTextureFrame(). You are * guaranteed to not receive any more onTextureFrameAvailable() after this function returns. */ - @CalledByNative public void dispose() { Logging.d(TAG, "dispose()"); ThreadUtils.invokeAtFrontUninterruptibly(handler, new Runnable() { @@ -233,20 +230,6 @@ public class SurfaceTextureHelper { }); } - /** Deprecated, use textureToYuv. */ - @Deprecated - @SuppressWarnings("deprecation") // yuvConverter.convert is deprecated - @CalledByNative - void textureToYUV(final ByteBuffer buf, final int width, final int height, final int stride, - final int textureId, final float[] transformMatrix) { - if (textureId != oesTextureId) { - throw new IllegalStateException("textureToByteBuffer called with unexpected textureId"); - } - - ThreadUtils.invokeAtFrontUninterruptibly(handler, - () -> yuvConverter.convert(buf, width, height, stride, textureId, transformMatrix)); - } - /** * Posts to the correct thread to convert |textureBuffer| to I420. */ diff --git a/sdk/android/api/org/webrtc/VideoCapturer.java b/sdk/android/api/org/webrtc/VideoCapturer.java index e0c16b749b..632dad5663 100644 --- a/sdk/android/api/org/webrtc/VideoCapturer.java +++ b/sdk/android/api/org/webrtc/VideoCapturer.java @@ -46,8 +46,8 @@ public interface VideoCapturer { * capture observer. It will be called only once and before any startCapture() request. The * camera thread is guaranteed to be valid until dispose() is called. If the VideoCapturer wants * to deliver texture frames, it should do this by rendering on the SurfaceTexture in - * |surfaceTextureHelper|, register itself as a listener, and forward the texture frames to - * CapturerObserver.onTextureFrameCaptured(). + * |surfaceTextureHelper|, register itself as a listener, and forward the frames to + * CapturerObserver.onFrameCaptured(). */ void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext, CapturerObserver capturerObserver); diff --git a/sdk/android/api/org/webrtc/VideoRenderer.java b/sdk/android/api/org/webrtc/VideoRenderer.java index 3e75db6ef4..7b289b0f7d 100644 --- a/sdk/android/api/org/webrtc/VideoRenderer.java +++ b/sdk/android/api/org/webrtc/VideoRenderer.java @@ -186,13 +186,6 @@ public class VideoRenderer { return new I420Frame(width, height, rotationDegree, new int[] {y_stride, u_stride, v_stride}, new ByteBuffer[] {y_buffer, u_buffer, v_buffer}, nativeFramePointer); } - - @CalledByNative("I420Frame") - static I420Frame createTextureFrame(int width, int height, int rotationDegree, int textureId, - float[] samplingMatrix, long nativeFramePointer) { - return new I420Frame( - width, height, rotationDegree, textureId, samplingMatrix, nativeFramePointer); - } } /** The real meat of VideoSinkInterface. */ diff --git a/sdk/android/api/org/webrtc/VideoSource.java b/sdk/android/api/org/webrtc/VideoSource.java index 425f86152d..9420da7463 100644 --- a/sdk/android/api/org/webrtc/VideoSource.java +++ b/sdk/android/api/org/webrtc/VideoSource.java @@ -10,13 +10,84 @@ package org.webrtc; +import javax.annotation.Nullable; + /** * Java wrapper of native AndroidVideoTrackSource. */ @JNINamespace("webrtc::jni") public class VideoSource extends MediaSource { + private static class NativeCapturerObserver implements VideoCapturer.CapturerObserver { + private final long nativeSource; + // TODO(bugs.webrtc.org/9181): Remove. + @Nullable private final SurfaceTextureHelper surfaceTextureHelper; + + public NativeCapturerObserver(long nativeSource) { + this.nativeSource = nativeSource; + this.surfaceTextureHelper = null; + } + + // TODO(bugs.webrtc.org/9181): Remove. + public NativeCapturerObserver(long nativeSource, SurfaceTextureHelper surfaceTextureHelper) { + this.nativeSource = nativeSource; + this.surfaceTextureHelper = surfaceTextureHelper; + } + + @Override + public void onCapturerStarted(boolean success) { + nativeCapturerStarted(nativeSource, success); + } + + @Override + public void onCapturerStopped() { + nativeCapturerStopped(nativeSource); + } + + // TODO(bugs.webrtc.org/9181): Remove. + @Override + @SuppressWarnings("deprecation") + public void onByteBufferFrameCaptured( + byte[] data, int width, int height, int rotation, long timestampNs) { + // This NV21Buffer is not possible to retain. This is safe only because the native code will + // always call cropAndScale() and directly make a deep copy of the buffer. + final VideoFrame.Buffer nv21Buffer = + new NV21Buffer(data, width, height, null /* releaseCallback */); + final VideoFrame frame = new VideoFrame(nv21Buffer, rotation, timestampNs); + onFrameCaptured(frame); + frame.release(); + } + + // TODO(bugs.webrtc.org/9181): Remove. + @Override + @SuppressWarnings("deprecation") + public void onTextureFrameCaptured(int width, int height, int oesTextureId, + float[] transformMatrix, int rotation, long timestampNs) { + final VideoFrame.Buffer buffer = surfaceTextureHelper.createTextureBuffer( + width, height, RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix)); + final VideoFrame frame = new VideoFrame(buffer, rotation, timestampNs); + onFrameCaptured(frame); + frame.release(); + } + + @Override + public void onFrameCaptured(VideoFrame frame) { + nativeOnFrameCaptured(nativeSource, frame.getBuffer().getWidth(), + frame.getBuffer().getHeight(), frame.getRotation(), frame.getTimestampNs(), + frame.getBuffer()); + } + } + + private final VideoCapturer.CapturerObserver capturerObserver; + public VideoSource(long nativeSource) { super(nativeSource); + this.capturerObserver = new NativeCapturerObserver(nativeSource); + } + + // TODO(bugs.webrtc.org/9181): Remove. + VideoSource(long nativeSource, SurfaceTextureHelper surfaceTextureHelper) { + super(nativeSource); + this.capturerObserver = new NativeCapturerObserver(nativeSource, surfaceTextureHelper); } /** @@ -29,5 +100,13 @@ public class VideoSource extends MediaSource { nativeAdaptOutputFormat(nativeSource, width, height, fps); } + public VideoCapturer.CapturerObserver getCapturerObserver() { + return capturerObserver; + } + private static native void nativeAdaptOutputFormat(long source, int width, int height, int fps); + private static native void nativeCapturerStarted(long source, boolean success); + private static native void nativeCapturerStopped(long source); + private static native void nativeOnFrameCaptured( + long source, int width, int height, int rotation, long timestampNs, VideoFrame.Buffer frame); } diff --git a/sdk/android/instrumentationtests/src/org/webrtc/MediaCodecVideoEncoderTest.java b/sdk/android/instrumentationtests/src/org/webrtc/MediaCodecVideoEncoderTest.java index c48bc98686..f224169863 100644 --- a/sdk/android/instrumentationtests/src/org/webrtc/MediaCodecVideoEncoderTest.java +++ b/sdk/android/instrumentationtests/src/org/webrtc/MediaCodecVideoEncoderTest.java @@ -121,50 +121,4 @@ public class MediaCodecVideoEncoderTest { encoder.release(); } - - @Test - @SmallTest - public void testEncoderUsingTextures() throws InterruptedException { - if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) { - Log.i(TAG, "Hardware does not support VP8 encoding, skipping testEncoderUsingTextures"); - return; - } - - final int width = 640; - final int height = 480; - final long presentationTs = 2; - - final EglBase14 eglOesBase = new EglBase14(null, EglBase.CONFIG_PIXEL_BUFFER); - eglOesBase.createDummyPbufferSurface(); - eglOesBase.makeCurrent(); - int oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES); - - // TODO(perkj): This test is week since we don't fill the texture with valid data with correct - // width and height and verify the encoded data. Fill the OES texture and figure out a way to - // verify that the output make sense. - - MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder(); - - assertTrue(encoder.initEncode(MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, profile, - width, height, 300, 30, eglOesBase.getEglBaseContext())); - assertTrue( - encoder.encodeTexture(true, oesTextureId, RendererCommon.identityMatrix(), presentationTs)); - GlUtil.checkNoGLES2Error("encodeTexture"); - - // It should be Ok to delete the texture after calling encodeTexture. - GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0); - - OutputBufferInfo info = encoder.dequeueOutputBuffer(); - while (info == null) { - info = encoder.dequeueOutputBuffer(); - Thread.sleep(20); - } - assertTrue(info.index != -1); - assertTrue(info.buffer.capacity() > 0); - assertEquals(presentationTs, info.presentationTimestampUs); - encoder.releaseOutputBuffer(info.index); - - encoder.release(); - eglOesBase.release(); - } } diff --git a/sdk/android/src/java/org/webrtc/AndroidVideoTrackSourceObserver.java b/sdk/android/src/java/org/webrtc/AndroidVideoTrackSourceObserver.java deleted file mode 100644 index b5e4af37b8..0000000000 --- a/sdk/android/src/java/org/webrtc/AndroidVideoTrackSourceObserver.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -package org.webrtc; - -/** An implementation of CapturerObserver that forwards all calls from Java to the C layer. */ -@JNINamespace("webrtc::jni") -class AndroidVideoTrackSourceObserver implements VideoCapturer.CapturerObserver { - // Pointer to VideoTrackSourceProxy proxying AndroidVideoTrackSource. - private final long nativeSource; - - public AndroidVideoTrackSourceObserver(long nativeSource) { - this.nativeSource = nativeSource; - } - - @Override - public void onCapturerStarted(boolean success) { - nativeCapturerStarted(nativeSource, success); - } - - @Override - public void onCapturerStopped() { - nativeCapturerStopped(nativeSource); - } - - @Override - @SuppressWarnings("deprecation") - public void onByteBufferFrameCaptured( - byte[] data, int width, int height, int rotation, long timeStamp) { - nativeOnByteBufferFrameCaptured( - nativeSource, data, data.length, width, height, rotation, timeStamp); - } - - @Override - @SuppressWarnings("deprecation") - public void onTextureFrameCaptured(int width, int height, int oesTextureId, - float[] transformMatrix, int rotation, long timestamp) { - nativeOnTextureFrameCaptured( - nativeSource, width, height, oesTextureId, transformMatrix, rotation, timestamp); - } - - @Override - public void onFrameCaptured(VideoFrame frame) { - nativeOnFrameCaptured(nativeSource, frame.getBuffer().getWidth(), frame.getBuffer().getHeight(), - frame.getRotation(), frame.getTimestampNs(), frame.getBuffer()); - } - - private static native void nativeCapturerStarted(long source, boolean success); - private static native void nativeCapturerStopped(long source); - private static native void nativeOnByteBufferFrameCaptured( - long source, byte[] data, int length, int width, int height, int rotation, long timeStamp); - private static native void nativeOnTextureFrameCaptured(long source, int width, int height, - int oesTextureId, float[] transformMatrix, int rotation, long timestamp); - private static native void nativeOnFrameCaptured( - long source, int width, int height, int rotation, long timestampNs, VideoFrame.Buffer frame); -} diff --git a/sdk/android/src/java/org/webrtc/CameraCapturer.java b/sdk/android/src/java/org/webrtc/CameraCapturer.java index cc8cc01437..7055f5367c 100644 --- a/sdk/android/src/java/org/webrtc/CameraCapturer.java +++ b/sdk/android/src/java/org/webrtc/CameraCapturer.java @@ -184,7 +184,7 @@ abstract class CameraCapturer implements CameraVideoCapturer { checkIsOnCameraThread(); synchronized (stateLock) { if (session != currentSession) { - Logging.w(TAG, "onTextureFrameCaptured from another session."); + Logging.w(TAG, "onFrameCaptured from another session."); return; } if (!firstFrameObserved) { diff --git a/sdk/android/src/jni/androidmediadecoder.cc b/sdk/android/src/jni/androidmediadecoder.cc index 88cfb96d13..68ebaa2568 100644 --- a/sdk/android/src/jni/androidmediadecoder.cc +++ b/sdk/android/src/jni/androidmediadecoder.cc @@ -30,7 +30,6 @@ #include "sdk/android/generated_video_jni/jni/MediaCodecVideoDecoder_jni.h" #include "sdk/android/native_api/jni/java_types.h" #include "sdk/android/src/jni/androidmediacodeccommon.h" -#include "sdk/android/src/jni/surfacetexturehelper.h" #include "sdk/android/src/jni/videoframe.h" #include "third_party/libyuv/include/libyuv/convert.h" #include "third_party/libyuv/include/libyuv/convert_from.h" diff --git a/sdk/android/src/jni/androidmediaencoder.cc b/sdk/android/src/jni/androidmediaencoder.cc index 5e59f06f07..91cdeb7f35 100644 --- a/sdk/android/src/jni/androidmediaencoder.cc +++ b/sdk/android/src/jni/androidmediaencoder.cc @@ -172,7 +172,6 @@ class MediaCodecVideoEncoder : public VideoEncoder { bool key_frame, const VideoFrame& frame, int input_buffer_index); - bool EncodeTexture(JNIEnv* jni, bool key_frame, const VideoFrame& frame); // Encodes a new style org.webrtc.VideoFrame. Might be a I420 or a texture // frame. bool EncodeJavaFrame(JNIEnv* jni, @@ -721,25 +720,10 @@ int32_t MediaCodecVideoEncoder::Encode( encode_status = EncodeByteBuffer(jni, key_frame, input_frame, j_input_buffer_index); } else { - AndroidVideoFrameBuffer* android_buffer = - static_cast( - input_frame.video_frame_buffer().get()); - switch (android_buffer->android_type()) { - case AndroidVideoFrameBuffer::AndroidType::kTextureBuffer: - encode_status = EncodeTexture(jni, key_frame, input_frame); - break; - case AndroidVideoFrameBuffer::AndroidType::kJavaBuffer: { - ScopedJavaLocalRef j_frame = - NativeToJavaVideoFrame(jni, frame); - encode_status = - EncodeJavaFrame(jni, key_frame, j_frame, j_input_buffer_index); - ReleaseJavaVideoFrame(jni, j_frame); - break; - } - default: - RTC_NOTREACHED(); - return WEBRTC_VIDEO_CODEC_ERROR; - } + ScopedJavaLocalRef j_frame = NativeToJavaVideoFrame(jni, frame); + encode_status = + EncodeJavaFrame(jni, key_frame, j_frame, j_input_buffer_index); + ReleaseJavaVideoFrame(jni, j_frame); } if (!encode_status) { @@ -808,20 +792,9 @@ bool MediaCodecVideoEncoder::IsTextureFrame(JNIEnv* jni, if (frame.video_frame_buffer()->type() != VideoFrameBuffer::Type::kNative) { return false; } - - AndroidVideoFrameBuffer* android_buffer = - static_cast(frame.video_frame_buffer().get()); - switch (android_buffer->android_type()) { - case AndroidVideoFrameBuffer::AndroidType::kTextureBuffer: - return true; - case AndroidVideoFrameBuffer::AndroidType::kJavaBuffer: - return Java_MediaCodecVideoEncoder_isTextureBuffer( - jni, static_cast(android_buffer) - ->video_frame_buffer()); - default: - RTC_NOTREACHED(); - return false; - } + return Java_MediaCodecVideoEncoder_isTextureBuffer( + jni, static_cast(frame.video_frame_buffer().get()) + ->video_frame_buffer()); } bool MediaCodecVideoEncoder::EncodeByteBuffer(JNIEnv* jni, @@ -874,26 +847,6 @@ bool MediaCodecVideoEncoder::FillInputBuffer(JNIEnv* jni, return true; } -bool MediaCodecVideoEncoder::EncodeTexture(JNIEnv* jni, - bool key_frame, - const VideoFrame& frame) { - RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); - RTC_CHECK(use_surface_); - NativeHandleImpl handle = - static_cast(frame.video_frame_buffer().get()) - ->native_handle_impl(); - - bool encode_status = Java_MediaCodecVideoEncoder_encodeTexture( - jni, j_media_codec_video_encoder_, key_frame, handle.oes_texture_id, - handle.sampling_matrix.ToJava(jni), current_timestamp_us_); - if (CheckException(jni)) { - ALOGE << "Exception in encode texture."; - ProcessHWError(true /* reset_if_fallback_unavailable */); - return false; - } - return encode_status; -} - bool MediaCodecVideoEncoder::EncodeJavaFrame(JNIEnv* jni, bool key_frame, const JavaRef& frame, diff --git a/sdk/android/src/jni/androidvideotracksource.cc b/sdk/android/src/jni/androidvideotracksource.cc index 4733cdf8d7..522ba4d843 100644 --- a/sdk/android/src/jni/androidvideotracksource.cc +++ b/sdk/android/src/jni/androidvideotracksource.cc @@ -14,7 +14,6 @@ #include "api/videosourceproxy.h" #include "rtc_base/logging.h" -#include "sdk/android/generated_video_jni/jni/AndroidVideoTrackSourceObserver_jni.h" #include "sdk/android/generated_video_jni/jni/VideoSource_jni.h" namespace webrtc { @@ -40,13 +39,9 @@ AndroidVideoTrackSource* AndroidVideoTrackSourceFromJavaProxy(jlong j_proxy) { AndroidVideoTrackSource::AndroidVideoTrackSource( rtc::Thread* signaling_thread, JNIEnv* jni, - const JavaRef& j_surface_texture_helper, bool is_screencast) : AdaptedVideoTrackSource(kRequiredResolutionAlignment), signaling_thread_(signaling_thread), - surface_texture_helper_(new rtc::RefCountedObject( - jni, - j_surface_texture_helper)), is_screencast_(is_screencast) { RTC_LOG(LS_INFO) << "AndroidVideoTrackSource ctor"; camera_thread_checker_.DetachFromThread(); @@ -83,106 +78,6 @@ bool AndroidVideoTrackSource::remote() const { return false; } -void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data, - int length, - int width, - int height, - VideoRotation rotation, - int64_t timestamp_ns) { - RTC_DCHECK(camera_thread_checker_.CalledOnValidThread()); - - int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec; - int64_t translated_camera_time_us = - timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros()); - - int adapted_width; - int adapted_height; - int crop_width; - int crop_height; - int crop_x; - int crop_y; - - if (!AdaptFrame(width, height, camera_time_us, &adapted_width, - &adapted_height, &crop_width, &crop_height, &crop_x, - &crop_y)) { - return; - } - - const uint8_t* y_plane = static_cast(frame_data); - const uint8_t* uv_plane = y_plane + width * height; - const int uv_width = (width + 1) / 2; - - RTC_CHECK_GE(length, width * height + 2 * uv_width * ((height + 1) / 2)); - - // Can only crop at even pixels. - crop_x &= ~1; - crop_y &= ~1; - // Crop just by modifying pointers. - y_plane += width * crop_y + crop_x; - uv_plane += uv_width * crop_y + crop_x; - - rtc::scoped_refptr buffer = - buffer_pool_.CreateBuffer(adapted_width, adapted_height); - - nv12toi420_scaler_.NV12ToI420Scale( - y_plane, width, uv_plane, uv_width * 2, crop_width, crop_height, - buffer->MutableDataY(), buffer->StrideY(), - // Swap U and V, since we have NV21, not NV12. - buffer->MutableDataV(), buffer->StrideV(), buffer->MutableDataU(), - buffer->StrideU(), buffer->width(), buffer->height()); - - OnFrame(VideoFrame(buffer, rotation, translated_camera_time_us)); -} - -void AndroidVideoTrackSource::OnTextureFrameCaptured( - int width, - int height, - VideoRotation rotation, - int64_t timestamp_ns, - const NativeHandleImpl& handle) { - RTC_DCHECK(camera_thread_checker_.CalledOnValidThread()); - - int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec; - int64_t translated_camera_time_us = - timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros()); - - int adapted_width; - int adapted_height; - int crop_width; - int crop_height; - int crop_x; - int crop_y; - - if (!AdaptFrame(width, height, camera_time_us, &adapted_width, - &adapted_height, &crop_width, &crop_height, &crop_x, - &crop_y)) { - surface_texture_helper_->ReturnTextureFrame(); - return; - } - - Matrix matrix = handle.sampling_matrix; - - matrix.Crop(crop_width / static_cast(width), - crop_height / static_cast(height), - crop_x / static_cast(width), - crop_y / static_cast(height)); - - // Note that apply_rotation() may change under our feet, so we should only - // check once. - if (apply_rotation()) { - if (rotation == kVideoRotation_90 || rotation == kVideoRotation_270) { - std::swap(adapted_width, adapted_height); - } - matrix.Rotate(rotation); - rotation = kVideoRotation_0; - } - - OnFrame(VideoFrame(surface_texture_helper_->CreateTextureFrame( - adapted_width, adapted_height, - NativeHandleImpl(handle.oes_texture_id, matrix)), - rotation, translated_camera_time_us)); -} - void AndroidVideoTrackSource::OnFrameCaptured( JNIEnv* jni, int width, @@ -230,47 +125,7 @@ void AndroidVideoTrackSource::OnOutputFormatRequest(int width, video_adapter()->OnOutputFormatRequest(format); } -rtc::scoped_refptr -AndroidVideoTrackSource::surface_texture_helper() { - return surface_texture_helper_; -} - -static void JNI_AndroidVideoTrackSourceObserver_OnByteBufferFrameCaptured( - JNIEnv* jni, - const JavaParamRef&, - jlong j_source, - const JavaParamRef& j_frame, - jint length, - jint width, - jint height, - jint rotation, - jlong timestamp) { - AndroidVideoTrackSource* source = - AndroidVideoTrackSourceFromJavaProxy(j_source); - jbyte* bytes = jni->GetByteArrayElements(j_frame.obj(), nullptr); - source->OnByteBufferFrameCaptured(bytes, length, width, height, - jintToVideoRotation(rotation), timestamp); - jni->ReleaseByteArrayElements(j_frame.obj(), bytes, JNI_ABORT); -} - -static void JNI_AndroidVideoTrackSourceObserver_OnTextureFrameCaptured( - JNIEnv* jni, - const JavaParamRef&, - jlong j_source, - jint j_width, - jint j_height, - jint j_oes_texture_id, - const JavaParamRef& j_transform_matrix, - jint j_rotation, - jlong j_timestamp) { - AndroidVideoTrackSource* source = - AndroidVideoTrackSourceFromJavaProxy(j_source); - source->OnTextureFrameCaptured( - j_width, j_height, jintToVideoRotation(j_rotation), j_timestamp, - NativeHandleImpl(jni, j_oes_texture_id, j_transform_matrix)); -} - -static void JNI_AndroidVideoTrackSourceObserver_OnFrameCaptured( +static void JNI_VideoSource_OnFrameCaptured( JNIEnv* jni, const JavaParamRef&, jlong j_source, @@ -286,11 +141,10 @@ static void JNI_AndroidVideoTrackSourceObserver_OnFrameCaptured( j_video_frame_buffer); } -static void JNI_AndroidVideoTrackSourceObserver_CapturerStarted( - JNIEnv* jni, - const JavaParamRef&, - jlong j_source, - jboolean j_success) { +static void JNI_VideoSource_CapturerStarted(JNIEnv* jni, + const JavaParamRef&, + jlong j_source, + jboolean j_success) { RTC_LOG(LS_INFO) << "AndroidVideoTrackSourceObserve_nativeCapturerStarted"; AndroidVideoTrackSource* source = AndroidVideoTrackSourceFromJavaProxy(j_source); @@ -298,10 +152,9 @@ static void JNI_AndroidVideoTrackSourceObserver_CapturerStarted( : AndroidVideoTrackSource::SourceState::kEnded); } -static void JNI_AndroidVideoTrackSourceObserver_CapturerStopped( - JNIEnv* jni, - const JavaParamRef&, - jlong j_source) { +static void JNI_VideoSource_CapturerStopped(JNIEnv* jni, + const JavaParamRef&, + jlong j_source) { RTC_LOG(LS_INFO) << "AndroidVideoTrackSourceObserve_nativeCapturerStopped"; AndroidVideoTrackSource* source = AndroidVideoTrackSourceFromJavaProxy(j_source); diff --git a/sdk/android/src/jni/androidvideotracksource.h b/sdk/android/src/jni/androidvideotracksource.h index fba5e84def..3dbcb2ac3b 100644 --- a/sdk/android/src/jni/androidvideotracksource.h +++ b/sdk/android/src/jni/androidvideotracksource.h @@ -20,7 +20,6 @@ #include "rtc_base/checks.h" #include "rtc_base/thread_checker.h" #include "rtc_base/timestampaligner.h" -#include "sdk/android/src/jni/surfacetexturehelper.h" #include "sdk/android/src/jni/videoframe.h" namespace webrtc { @@ -30,7 +29,6 @@ class AndroidVideoTrackSource : public rtc::AdaptedVideoTrackSource { public: AndroidVideoTrackSource(rtc::Thread* signaling_thread, JNIEnv* jni, - const JavaRef& j_surface_texture_helper, bool is_screencast = false); ~AndroidVideoTrackSource() override; @@ -70,17 +68,12 @@ class AndroidVideoTrackSource : public rtc::AdaptedVideoTrackSource { void OnOutputFormatRequest(int width, int height, int fps); - rtc::scoped_refptr surface_texture_helper(); - private: rtc::Thread* signaling_thread_; rtc::AsyncInvoker invoker_; rtc::ThreadChecker camera_thread_checker_; SourceState state_; rtc::TimestampAligner timestamp_aligner_; - NV12ToI420Scaler nv12toi420_scaler_; - I420BufferPool buffer_pool_; - rtc::scoped_refptr surface_texture_helper_; const bool is_screencast_; }; diff --git a/sdk/android/src/jni/nv21buffer.cc b/sdk/android/src/jni/nv21buffer.cc index 7f11488def..d619aac710 100644 --- a/sdk/android/src/jni/nv21buffer.cc +++ b/sdk/android/src/jni/nv21buffer.cc @@ -42,11 +42,6 @@ static void JNI_NV21Buffer_CropAndScale(JNIEnv* jni, const int src_stride_uv = src_width; const int crop_chroma_x = crop_x / 2; const int crop_chroma_y = crop_y / 2; - const int crop_chroma_width = (crop_width + 1) / 2; - const int crop_chroma_height = (crop_height + 1) / 2; - const int tmp_stride_u = crop_chroma_width; - const int tmp_stride_v = crop_chroma_width; - const int tmp_size = crop_chroma_height * (tmp_stride_u + tmp_stride_v); jboolean was_copy; jbyte* src_bytes = jni->GetByteArrayElements(j_src.obj(), &was_copy); diff --git a/sdk/android/src/jni/pc/null_video.cc b/sdk/android/src/jni/pc/null_video.cc index b3cf5079eb..72cc5066f7 100644 --- a/sdk/android/src/jni/pc/null_video.cc +++ b/sdk/android/src/jni/pc/null_video.cc @@ -35,7 +35,6 @@ void SetEglContext(JNIEnv* env, void* CreateVideoSource(JNIEnv* env, rtc::Thread* signaling_thread, rtc::Thread* worker_thread, - const JavaParamRef& j_surface_texture_helper, jboolean is_screencast) { return nullptr; } @@ -47,14 +46,6 @@ void SetEglContext(JNIEnv* env, cricket::WebRtcVideoDecoderFactory* decoder_factory, jobject egl_context) {} -void* CreateVideoSource(JNIEnv* env, - rtc::Thread* signaling_thread, - rtc::Thread* worker_thread, - jobject j_surface_texture_helper, - jboolean is_screencast) { - return nullptr; -} - cricket::WebRtcVideoEncoderFactory* CreateLegacyVideoEncoderFactory() { return nullptr; } diff --git a/sdk/android/src/jni/pc/peerconnectionfactory.cc b/sdk/android/src/jni/pc/peerconnectionfactory.cc index 24b3a717d1..481d904aa0 100644 --- a/sdk/android/src/jni/pc/peerconnectionfactory.cc +++ b/sdk/android/src/jni/pc/peerconnectionfactory.cc @@ -480,13 +480,12 @@ static jlong JNI_PeerConnectionFactory_CreateVideoSource( JNIEnv* jni, const JavaParamRef&, jlong native_factory, - const JavaParamRef& j_surface_texture_helper, jboolean is_screencast) { OwnedFactoryAndThreads* factory = reinterpret_cast(native_factory); - return jlongFromPointer(CreateVideoSource( - jni, factory->signaling_thread(), factory->worker_thread(), - j_surface_texture_helper, is_screencast)); + return jlongFromPointer(CreateVideoSource(jni, factory->signaling_thread(), + factory->worker_thread(), + is_screencast)); } static jlong JNI_PeerConnectionFactory_CreateVideoTrack( diff --git a/sdk/android/src/jni/pc/video.cc b/sdk/android/src/jni/pc/video.cc index 06dbc5c483..9bf528ce4b 100644 --- a/sdk/android/src/jni/pc/video.cc +++ b/sdk/android/src/jni/pc/video.cc @@ -26,7 +26,6 @@ #include "sdk/android/src/jni/androidmediadecoder_jni.h" #include "sdk/android/src/jni/androidmediaencoder_jni.h" #include "sdk/android/src/jni/androidvideotracksource.h" -#include "sdk/android/src/jni/surfacetexturehelper.h" #include "sdk/android/src/jni/videodecoderfactorywrapper.h" #include "sdk/android/src/jni/videoencoderfactorywrapper.h" @@ -82,11 +81,10 @@ void SetEglContext(JNIEnv* env, void* CreateVideoSource(JNIEnv* env, rtc::Thread* signaling_thread, rtc::Thread* worker_thread, - const JavaParamRef& j_surface_texture_helper, jboolean is_screencast) { rtc::scoped_refptr source( - new rtc::RefCountedObject( - signaling_thread, env, j_surface_texture_helper, is_screencast)); + new rtc::RefCountedObject(signaling_thread, env, + is_screencast)); return VideoTrackSourceProxy::Create(signaling_thread, worker_thread, source) .release(); } diff --git a/sdk/android/src/jni/pc/video.h b/sdk/android/src/jni/pc/video.h index 4f9b48c874..a341488877 100644 --- a/sdk/android/src/jni/pc/video.h +++ b/sdk/android/src/jni/pc/video.h @@ -30,8 +30,6 @@ class VideoDecoderFactory; namespace webrtc { namespace jni { -class SurfaceTextureHelper; - VideoEncoderFactory* CreateVideoEncoderFactory( JNIEnv* jni, const JavaRef& j_encoder_factory); @@ -50,7 +48,6 @@ void SetEglContext(JNIEnv* env, void* CreateVideoSource(JNIEnv* env, rtc::Thread* signaling_thread, rtc::Thread* worker_thread, - const JavaParamRef& j_surface_texture_helper, jboolean is_screencast); cricket::WebRtcVideoEncoderFactory* CreateLegacyVideoEncoderFactory(); diff --git a/sdk/android/src/jni/surfacetexturehelper.cc b/sdk/android/src/jni/surfacetexturehelper.cc deleted file mode 100644 index be8611848d..0000000000 --- a/sdk/android/src/jni/surfacetexturehelper.cc +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "sdk/android/src/jni/surfacetexturehelper.h" - -#include "rtc_base/bind.h" -#include "rtc_base/logging.h" -#include "sdk/android/generated_video_jni/jni/SurfaceTextureHelper_jni.h" -#include "sdk/android/native_api/jni/java_types.h" -#include "sdk/android/src/jni/videoframe.h" - -namespace webrtc { -namespace jni { - -void SurfaceTextureHelperTextureToYUV( - JNIEnv* env, - const JavaRef& j_surface_texture_helper, - const JavaRef& buffer, - int width, - int height, - int stride, - const NativeHandleImpl& native_handle) { - Java_SurfaceTextureHelper_textureToYUV( - env, j_surface_texture_helper, buffer, width, height, stride, - native_handle.oes_texture_id, native_handle.sampling_matrix.ToJava(env)); -} - -rtc::scoped_refptr SurfaceTextureHelper::create( - JNIEnv* jni, - const char* thread_name, - const JavaRef& j_egl_context) { - ScopedJavaLocalRef j_surface_texture_helper = - Java_SurfaceTextureHelper_create( - jni, NativeToJavaString(jni, thread_name), j_egl_context); - if (IsNull(jni, j_surface_texture_helper)) - return nullptr; - return new rtc::RefCountedObject( - jni, j_surface_texture_helper); -} - -SurfaceTextureHelper::SurfaceTextureHelper( - JNIEnv* jni, - const JavaRef& j_surface_texture_helper) - : j_surface_texture_helper_(jni, j_surface_texture_helper) {} - -SurfaceTextureHelper::~SurfaceTextureHelper() { - RTC_LOG(LS_INFO) << "SurfaceTextureHelper dtor"; - JNIEnv* jni = AttachCurrentThreadIfNeeded(); - Java_SurfaceTextureHelper_dispose(jni, j_surface_texture_helper_); -} - -const ScopedJavaGlobalRef& -SurfaceTextureHelper::GetJavaSurfaceTextureHelper() const { - return j_surface_texture_helper_; -} - -void SurfaceTextureHelper::ReturnTextureFrame() const { - JNIEnv* jni = AttachCurrentThreadIfNeeded(); - Java_SurfaceTextureHelper_returnTextureFrame(jni, j_surface_texture_helper_); -} - -rtc::scoped_refptr SurfaceTextureHelper::CreateTextureFrame( - int width, - int height, - const NativeHandleImpl& native_handle) { - return new rtc::RefCountedObject(width, height, - native_handle, this); -} - -} // namespace jni -} // namespace webrtc diff --git a/sdk/android/src/jni/surfacetexturehelper.h b/sdk/android/src/jni/surfacetexturehelper.h deleted file mode 100644 index 06b396954c..0000000000 --- a/sdk/android/src/jni/surfacetexturehelper.h +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef SDK_ANDROID_SRC_JNI_SURFACETEXTUREHELPER_H_ -#define SDK_ANDROID_SRC_JNI_SURFACETEXTUREHELPER_H_ - -#include - -#include "api/video/video_frame_buffer.h" -#include "rtc_base/refcount.h" -#include "rtc_base/scoped_ref_ptr.h" -#include "sdk/android/src/jni/jni_helpers.h" - -namespace webrtc { -namespace jni { - -struct NativeHandleImpl; - -// Helper class to create and synchronize access to an Android SurfaceTexture. -// It is used for creating VideoFrameBuffers from a SurfaceTexture when -// the SurfaceTexture has been updated. -// When the VideoFrameBuffer is released, this class returns the buffer to the -// java SurfaceTextureHelper so it can be updated safely. The VideoFrameBuffer -// can be released on an arbitrary thread. -// SurfaceTextureHelper is reference counted to make sure that it is not -// destroyed while a VideoFrameBuffer is in use. -// This class is the C++ counterpart of the java class SurfaceTextureHelper. -// It owns the corresponding java object, and calls the java dispose -// method when destroyed. -// Usage: -// 1. Create an instance of this class. -// 2. Get the Java SurfaceTextureHelper with GetJavaSurfaceTextureHelper(). -// 3. Register a listener to the Java SurfaceListener and start producing -// new buffers. -// 4. Call CreateTextureFrame to wrap the Java texture in a VideoFrameBuffer. -class SurfaceTextureHelper : public rtc::RefCountInterface { - public: - // Might return null if creating the Java SurfaceTextureHelper fails. - static rtc::scoped_refptr create( - JNIEnv* jni, - const char* thread_name, - const JavaRef& j_egl_context); - - const ScopedJavaGlobalRef& GetJavaSurfaceTextureHelper() const; - - rtc::scoped_refptr CreateTextureFrame( - int width, - int height, - const NativeHandleImpl& native_handle); - - // May be called on arbitrary thread. - void ReturnTextureFrame() const; - - protected: - ~SurfaceTextureHelper() override; - SurfaceTextureHelper(JNIEnv* jni, - const JavaRef& j_surface_texture_helper); - - private: - const ScopedJavaGlobalRef j_surface_texture_helper_; -}; - -void SurfaceTextureHelperTextureToYUV( - JNIEnv* env, - const JavaRef& j_surface_texture_helper, - const JavaRef& buffer, - int width, - int height, - int stride, - const NativeHandleImpl& native_handle); - -} // namespace jni -} // namespace webrtc - -#endif // SDK_ANDROID_SRC_JNI_SURFACETEXTUREHELPER_H_ diff --git a/sdk/android/src/jni/video_renderer.cc b/sdk/android/src/jni/video_renderer.cc index 15a08f8a24..0271bd1da5 100644 --- a/sdk/android/src/jni/video_renderer.cc +++ b/sdk/android/src/jni/video_renderer.cc @@ -34,19 +34,7 @@ class JavaVideoRendererWrapper : public rtc::VideoSinkInterface { ScopedJavaLocalRef j_frame; if (video_frame.video_frame_buffer()->type() == VideoFrameBuffer::Type::kNative) { - AndroidVideoFrameBuffer* android_buffer = - static_cast( - video_frame.video_frame_buffer().get()); - switch (android_buffer->android_type()) { - case AndroidVideoFrameBuffer::AndroidType::kTextureBuffer: - j_frame = ToJavaTextureFrame(env, video_frame); - break; - case AndroidVideoFrameBuffer::AndroidType::kJavaBuffer: - j_frame = FromWrappedJavaBuffer(env, video_frame); - break; - default: - RTC_NOTREACHED(); - } + j_frame = FromWrappedJavaBuffer(env, video_frame); } else { j_frame = ToJavaI420Frame(env, video_frame); } @@ -94,18 +82,6 @@ class JavaVideoRendererWrapper : public rtc::VideoSinkInterface { i420_buffer->StrideV(), v_buffer, javaShallowCopy(frame)); } - // Return a VideoRenderer.I420Frame referring texture object in |frame|. - ScopedJavaLocalRef ToJavaTextureFrame(JNIEnv* env, - const VideoFrame& frame) { - NativeHandleImpl handle = - static_cast(frame.video_frame_buffer().get()) - ->native_handle_impl(); - return Java_I420Frame_createTextureFrame( - env, frame.width(), frame.height(), static_cast(frame.rotation()), - handle.oes_texture_id, handle.sampling_matrix.ToJava(env), - javaShallowCopy(frame)); - } - ScopedJavaGlobalRef j_callbacks_; }; diff --git a/sdk/android/src/jni/videoframe.cc b/sdk/android/src/jni/videoframe.cc index e1fb4315a4..ba9fca99dc 100644 --- a/sdk/android/src/jni/videoframe.cc +++ b/sdk/android/src/jni/videoframe.cc @@ -22,7 +22,6 @@ #include "rtc_base/timeutils.h" #include "sdk/android/generated_video_jni/jni/VideoFrame_jni.h" #include "sdk/android/src/jni/jni_helpers.h" -#include "sdk/android/src/jni/surfacetexturehelper.h" #include "sdk/android/src/jni/wrapped_native_i420_buffer.h" #include "third_party/libyuv/include/libyuv/scale.h" @@ -218,77 +217,6 @@ NativeHandleImpl::NativeHandleImpl( : oes_texture_id(j_oes_texture_id), sampling_matrix(jni, j_transform_matrix) {} -AndroidTextureBuffer::AndroidTextureBuffer( - int width, - int height, - const NativeHandleImpl& native_handle, - const rtc::scoped_refptr& surface_texture_helper) - : width_(width), - height_(height), - native_handle_(native_handle), - surface_texture_helper_(surface_texture_helper) {} - -AndroidTextureBuffer::~AndroidTextureBuffer() { - surface_texture_helper_->ReturnTextureFrame(); -} - -VideoFrameBuffer::Type AndroidTextureBuffer::type() const { - return Type::kNative; -} - -NativeHandleImpl AndroidTextureBuffer::native_handle_impl() const { - return native_handle_; -} - -int AndroidTextureBuffer::width() const { - return width_; -} - -int AndroidTextureBuffer::height() const { - return height_; -} - -rtc::scoped_refptr AndroidTextureBuffer::ToI420() { - int uv_width = (width() + 7) / 8; - int stride = 8 * uv_width; - int uv_height = (height() + 1) / 2; - size_t size = stride * (height() + uv_height); - // The data is owned by the frame, and the normal case is that the - // data is deleted by the frame's destructor callback. - // - // TODO(nisse): Use an I420BufferPool. We then need to extend that - // class, and I420Buffer, to support our memory layout. - // TODO(nisse): Depending on - // system_wrappers/include/aligned_malloc.h violate current DEPS - // rules. We get away for now only because it is indirectly included - // by i420_buffer.h - std::unique_ptr yuv_data( - static_cast(AlignedMalloc(size, kBufferAlignment))); - // See YuvConverter.java for the required layout. - uint8_t* y_data = yuv_data.get(); - uint8_t* u_data = y_data + height() * stride; - uint8_t* v_data = u_data + stride / 2; - - rtc::scoped_refptr copy = webrtc::WrapI420Buffer( - width(), height(), y_data, stride, u_data, stride, v_data, stride, - rtc::Bind(&AlignedFree, yuv_data.release())); - - JNIEnv* jni = AttachCurrentThreadIfNeeded(); - - // TODO(sakal): This call to a deperecated method will be removed when - // AndroidTextureBuffer is removed. - ScopedJavaLocalRef byte_buffer = - NewDirectByteBuffer(jni, y_data, size); - SurfaceTextureHelperTextureToYUV( - jni, surface_texture_helper_->GetJavaSurfaceTextureHelper(), byte_buffer, - width(), height(), stride, native_handle_); - return copy; -} - -AndroidVideoFrameBuffer::AndroidType AndroidTextureBuffer::android_type() { - return AndroidType::kTextureBuffer; -} - rtc::scoped_refptr AndroidVideoBuffer::Adopt( JNIEnv* jni, const JavaRef& j_video_frame_buffer) { @@ -355,10 +283,6 @@ rtc::scoped_refptr AndroidVideoBuffer::ToI420() { return AndroidVideoI420Buffer::Adopt(jni, width_, height_, j_i420_buffer); } -AndroidVideoFrameBuffer::AndroidType AndroidVideoBuffer::android_type() { - return AndroidType::kJavaBuffer; -} - VideoFrame JavaToNativeFrame(JNIEnv* jni, const JavaRef& j_video_frame, uint32_t timestamp_rtp) { @@ -373,31 +297,15 @@ VideoFrame JavaToNativeFrame(JNIEnv* jni, static_cast(rotation)); } -static bool IsJavaVideoBuffer(rtc::scoped_refptr buffer) { - if (buffer->type() != VideoFrameBuffer::Type::kNative) { - return false; - } - AndroidVideoFrameBuffer* android_buffer = - static_cast(buffer.get()); - return android_buffer->android_type() == - AndroidVideoFrameBuffer::AndroidType::kJavaBuffer; -} - ScopedJavaLocalRef NativeToJavaVideoFrame(JNIEnv* jni, const VideoFrame& frame) { rtc::scoped_refptr buffer = frame.video_frame_buffer(); - if (IsJavaVideoBuffer(buffer)) { - RTC_DCHECK(buffer->type() == VideoFrameBuffer::Type::kNative); - AndroidVideoFrameBuffer* android_buffer = - static_cast(buffer.get()); - RTC_DCHECK(android_buffer->android_type() == - AndroidVideoFrameBuffer::AndroidType::kJavaBuffer); - AndroidVideoBuffer* android_video_buffer = - static_cast(android_buffer); - + if (buffer->type() == VideoFrameBuffer::Type::kNative) { + AndroidVideoBuffer* android_buffer = + static_cast(buffer.get()); ScopedJavaLocalRef j_video_frame_buffer( - jni, android_video_buffer->video_frame_buffer()); + jni, android_buffer->video_frame_buffer()); Java_Buffer_retain(jni, j_video_frame_buffer); return Java_VideoFrame_Constructor( jni, j_video_frame_buffer, static_cast(frame.rotation()), diff --git a/sdk/android/src/jni/videoframe.h b/sdk/android/src/jni/videoframe.h index 6f5720d78d..8b99701268 100644 --- a/sdk/android/src/jni/videoframe.h +++ b/sdk/android/src/jni/videoframe.h @@ -18,7 +18,6 @@ #include "api/video/video_rotation.h" #include "rtc_base/callback.h" #include "sdk/android/src/jni/jni_helpers.h" -#include "sdk/android/src/jni/surfacetexturehelper.h" namespace webrtc { namespace jni { @@ -26,8 +25,6 @@ namespace jni { // TODO(sakal): Remove once clients have migrated. using ::webrtc::JavaParamRef; -class SurfaceTextureHelper; - // Open gl texture matrix, in column-major order. Operations are // in-place. class Matrix { @@ -66,44 +63,7 @@ struct NativeHandleImpl { Matrix sampling_matrix; }; -// Base class to differentiate between the old texture frames and the new -// Java-based frames. -// TODO(sakal): Remove this and AndroidTextureBuffer once they are no longer -// needed. -class AndroidVideoFrameBuffer : public VideoFrameBuffer { - public: - enum class AndroidType { kTextureBuffer, kJavaBuffer }; - - virtual AndroidType android_type() = 0; -}; - -class AndroidTextureBuffer : public AndroidVideoFrameBuffer { - public: - AndroidTextureBuffer( - int width, - int height, - const NativeHandleImpl& native_handle, - const rtc::scoped_refptr& surface_texture_helper); - ~AndroidTextureBuffer() override; - - NativeHandleImpl native_handle_impl() const; - - private: - Type type() const override; - int width() const override; - int height() const override; - - rtc::scoped_refptr ToI420() override; - - AndroidType android_type() override; - - const int width_; - const int height_; - NativeHandleImpl native_handle_; - rtc::scoped_refptr surface_texture_helper_; -}; - -class AndroidVideoBuffer : public AndroidVideoFrameBuffer { +class AndroidVideoBuffer : public VideoFrameBuffer { public: // Creates a native VideoFrameBuffer from a Java VideoFrame.Buffer. static rtc::scoped_refptr Create( @@ -143,8 +103,6 @@ class AndroidVideoBuffer : public AndroidVideoFrameBuffer { rtc::scoped_refptr ToI420() override; - AndroidType android_type() override; - const int width_; const int height_; // Holds a VideoFrame.Buffer.