Android: Only use Java VideoFrames internally

This CL removes internal support for anything else than Android frames
that are wrapped Java VideoFrames. This allows for a big internal
cleanup and we can remove the internal class AndroidTextureBuffer and
all logic related to that. Also, the C++ AndroidVideoTrackSource no
longer needs to hold on to a C++ SurfaceTextureHelper and we can
remove all JNI code related to SurfaceTextureHelper. Also, when these
methods are removed, it's possible to let VideoSource implement the
CapturerObserver interface directly and there is no longer any need for
AndroidVideoTrackSourceObserver. Clients can then initialize
VideoCapturers themselves outside the PeerConnectionFactory, and a new
method is added in the PeerConnectionFactory to allow clients to create
standalone VideoSources that can be connected to a VideoCapturer outside
the factory.

Bug: webrtc:9181
Change-Id: Ie292ea9214f382d44dce9120725c62602a646ed8
Reviewed-on: https://webrtc-review.googlesource.com/71666
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23004}
This commit is contained in:
Magnus Jedvert 2018-04-24 15:11:02 +02:00 committed by Commit Bot
parent 498592d391
commit 1a759c6354
27 changed files with 124 additions and 743 deletions

View File

@ -48,11 +48,10 @@ public class UnityUtility {
VideoCapturer capturer =
createCameraCapturer(new Camera2Enumerator(ContextUtils.getApplicationContext()));
VideoCapturer.CapturerObserver capturerObserver =
new AndroidVideoTrackSourceObserver(nativeTrackSource);
VideoSource videoSource = new VideoSource(nativeTrackSource);
capturer.initialize(
surfaceTextureHelper, ContextUtils.getApplicationContext(), capturerObserver);
capturer.initialize(surfaceTextureHelper, ContextUtils.getApplicationContext(),
videoSource.getCapturerObserver());
capturer.startCapture(720, 480, 30);
return capturer;

View File

@ -447,8 +447,7 @@ void SimplePeerConnection::AddStreams(bool audio_only) {
rtc::scoped_refptr<webrtc::jni::AndroidVideoTrackSource> source(
new rtc::RefCountedObject<webrtc::jni::AndroidVideoTrackSource>(
g_signaling_thread.get(), env,
webrtc::JavaParamRef<jobject>(texture_helper), false));
g_signaling_thread.get(), env, false));
rtc::scoped_refptr<webrtc::VideoTrackSourceProxy> proxy_source =
webrtc::VideoTrackSourceProxy::Create(g_signaling_thread.get(),
g_worker_thread.get(), source);

View File

@ -269,7 +269,6 @@ generate_jni("generated_video_jni") {
"api/org/webrtc/EncodedImage.java",
"api/org/webrtc/MediaCodecVideoDecoder.java",
"api/org/webrtc/MediaCodecVideoEncoder.java",
"api/org/webrtc/SurfaceTextureHelper.java",
"api/org/webrtc/VideoCodecInfo.java",
"api/org/webrtc/VideoCodecStatus.java",
"api/org/webrtc/VideoDecoder.java",
@ -285,7 +284,6 @@ generate_jni("generated_video_jni") {
"api/org/webrtc/VideoSource.java",
"api/org/webrtc/VideoTrack.java",
"api/org/webrtc/YuvHelper.java",
"src/java/org/webrtc/AndroidVideoTrackSourceObserver.java",
"src/java/org/webrtc/EglBase14.java",
"src/java/org/webrtc/NV12Buffer.java",
"src/java/org/webrtc/NV21Buffer.java",
@ -315,8 +313,6 @@ rtc_static_library("video_jni") {
"src/jni/nv12buffer.cc",
"src/jni/nv21buffer.cc",
"src/jni/pc/video.cc",
"src/jni/surfacetexturehelper.cc",
"src/jni/surfacetexturehelper.h",
"src/jni/video_renderer.cc",
"src/jni/videocodecinfo.cc",
"src/jni/videocodecinfo.h",
@ -838,7 +834,7 @@ rtc_android_library("video_java") {
"api/org/webrtc/VideoDecoderFallback.java",
"api/org/webrtc/VideoEncoderFallback.java",
"api/org/webrtc/VideoFrameDrawer.java",
"src/java/org/webrtc/AndroidVideoTrackSourceObserver.java",
"src/java/org/webrtc/NV21Buffer.java",
"src/java/org/webrtc/VideoDecoderWrapper.java",
"src/java/org/webrtc/VideoEncoderWrapper.java",
"src/java/org/webrtc/WrappedNativeI420Buffer.java",
@ -866,7 +862,6 @@ rtc_android_library("camera_java") {
"src/java/org/webrtc/Camera2Session.java",
"src/java/org/webrtc/CameraCapturer.java",
"src/java/org/webrtc/CameraSession.java",
"src/java/org/webrtc/NV21Buffer.java",
]
deps = [

View File

@ -612,25 +612,6 @@ public class MediaCodecVideoEncoder {
}
}
@CalledByNativeUnchecked
boolean encodeTexture(boolean isKeyframe, int oesTextureId, float[] transformationMatrix,
long presentationTimestampUs) {
checkOnMediaCodecThread();
try {
checkKeyFrameRequired(isKeyframe, presentationTimestampUs);
eglBase.makeCurrent();
// TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway,
// but it's a workaround for bug webrtc:5147.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
drawer.drawOes(oesTextureId, transformationMatrix, width, height, 0, 0, width, height);
eglBase.swapBuffers(TimeUnit.MICROSECONDS.toNanos(presentationTimestampUs));
return true;
} catch (RuntimeException e) {
Logging.e(TAG, "encodeTexture failed", e);
return false;
}
}
/**
* Encodes a new style VideoFrame. |bufferIndex| is -1 if we are not encoding in surface mode.
*/

View File

@ -350,18 +350,20 @@ public class PeerConnectionFactory {
return new MediaStream(nativeCreateLocalMediaStream(nativeFactory, label));
}
public VideoSource createVideoSource(boolean isScreencast) {
return new VideoSource(nativeCreateVideoSource(nativeFactory, isScreencast));
}
public VideoSource createVideoSource(VideoCapturer capturer) {
final EglBase.Context eglContext =
localEglbase == null ? null : localEglbase.getEglBaseContext();
final SurfaceTextureHelper surfaceTextureHelper =
SurfaceTextureHelper.create(VIDEO_CAPTURER_THREAD_NAME, eglContext);
long nativeAndroidVideoTrackSource =
nativeCreateVideoSource(nativeFactory, surfaceTextureHelper, capturer.isScreencast());
VideoCapturer.CapturerObserver capturerObserver =
new AndroidVideoTrackSourceObserver(nativeAndroidVideoTrackSource);
capturer.initialize(
surfaceTextureHelper, ContextUtils.getApplicationContext(), capturerObserver);
return new VideoSource(nativeAndroidVideoTrackSource);
final VideoSource videoSource = new VideoSource(
nativeCreateVideoSource(nativeFactory, capturer.isScreencast()), surfaceTextureHelper);
capturer.initialize(surfaceTextureHelper, ContextUtils.getApplicationContext(),
videoSource.getCapturerObserver());
return videoSource;
}
public VideoTrack createVideoTrack(String id, VideoSource source) {
@ -498,8 +500,7 @@ public class PeerConnectionFactory {
private static native long nativeCreatePeerConnection(long factory,
PeerConnection.RTCConfiguration rtcConfig, MediaConstraints constraints, long nativeObserver);
private static native long nativeCreateLocalMediaStream(long factory, String label);
private static native long nativeCreateVideoSource(
long factory, SurfaceTextureHelper surfaceTextureHelper, boolean is_screencast);
private static native long nativeCreateVideoSource(long factory, boolean is_screencast);
private static native long nativeCreateVideoTrack(
long factory, String id, long nativeVideoSource);
private static native long nativeCreateAudioSource(long factory, MediaConstraints constraints);

View File

@ -27,7 +27,7 @@ import javax.annotation.Nullable;
* {@code SurfaceTexture} using a {@code SurfaceTextureHelper}.
* The {@code SurfaceTextureHelper} is created by the native code and passed to this capturer in
* {@code VideoCapturer.initialize()}. On receiving a new frame, this capturer passes it
* as a texture to the native code via {@code CapturerObserver.onTextureFrameCaptured()}. This takes
* as a texture to the native code via {@code CapturerObserver.onFrameCaptured()}. This takes
* place on the HandlerThread of the given {@code SurfaceTextureHelper}. When done with each frame,
* the native code returns the buffer to the {@code SurfaceTextureHelper} to be used for new
* frames. At any time, at most one frame is being processed.

View File

@ -51,7 +51,6 @@ public class SurfaceTextureHelper {
* thread and handler is created for handling the SurfaceTexture. May return null if EGL fails to
* initialize a pixel buffer surface and make it current.
*/
@CalledByNative
public static SurfaceTextureHelper create(
final String threadName, final EglBase.Context sharedContext) {
final HandlerThread thread = new HandlerThread(threadName);
@ -195,7 +194,6 @@ public class SurfaceTextureHelper {
* onTextureFrameAvailable(). Only one texture frame can be in flight at once, so you must call
* this function in order to receive a new frame.
*/
@CalledByNative
public void returnTextureFrame() {
handler.post(new Runnable() {
@Override
@ -219,7 +217,6 @@ public class SurfaceTextureHelper {
* stopped when the texture frame has been returned by a call to returnTextureFrame(). You are
* guaranteed to not receive any more onTextureFrameAvailable() after this function returns.
*/
@CalledByNative
public void dispose() {
Logging.d(TAG, "dispose()");
ThreadUtils.invokeAtFrontUninterruptibly(handler, new Runnable() {
@ -233,20 +230,6 @@ public class SurfaceTextureHelper {
});
}
/** Deprecated, use textureToYuv. */
@Deprecated
@SuppressWarnings("deprecation") // yuvConverter.convert is deprecated
@CalledByNative
void textureToYUV(final ByteBuffer buf, final int width, final int height, final int stride,
final int textureId, final float[] transformMatrix) {
if (textureId != oesTextureId) {
throw new IllegalStateException("textureToByteBuffer called with unexpected textureId");
}
ThreadUtils.invokeAtFrontUninterruptibly(handler,
() -> yuvConverter.convert(buf, width, height, stride, textureId, transformMatrix));
}
/**
* Posts to the correct thread to convert |textureBuffer| to I420.
*/

View File

@ -46,8 +46,8 @@ public interface VideoCapturer {
* capture observer. It will be called only once and before any startCapture() request. The
* camera thread is guaranteed to be valid until dispose() is called. If the VideoCapturer wants
* to deliver texture frames, it should do this by rendering on the SurfaceTexture in
* |surfaceTextureHelper|, register itself as a listener, and forward the texture frames to
* CapturerObserver.onTextureFrameCaptured().
* |surfaceTextureHelper|, register itself as a listener, and forward the frames to
* CapturerObserver.onFrameCaptured().
*/
void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
CapturerObserver capturerObserver);

View File

@ -186,13 +186,6 @@ public class VideoRenderer {
return new I420Frame(width, height, rotationDegree, new int[] {y_stride, u_stride, v_stride},
new ByteBuffer[] {y_buffer, u_buffer, v_buffer}, nativeFramePointer);
}
@CalledByNative("I420Frame")
static I420Frame createTextureFrame(int width, int height, int rotationDegree, int textureId,
float[] samplingMatrix, long nativeFramePointer) {
return new I420Frame(
width, height, rotationDegree, textureId, samplingMatrix, nativeFramePointer);
}
}
/** The real meat of VideoSinkInterface. */

View File

@ -10,13 +10,84 @@
package org.webrtc;
import javax.annotation.Nullable;
/**
* Java wrapper of native AndroidVideoTrackSource.
*/
@JNINamespace("webrtc::jni")
public class VideoSource extends MediaSource {
private static class NativeCapturerObserver implements VideoCapturer.CapturerObserver {
private final long nativeSource;
// TODO(bugs.webrtc.org/9181): Remove.
@Nullable private final SurfaceTextureHelper surfaceTextureHelper;
public NativeCapturerObserver(long nativeSource) {
this.nativeSource = nativeSource;
this.surfaceTextureHelper = null;
}
// TODO(bugs.webrtc.org/9181): Remove.
public NativeCapturerObserver(long nativeSource, SurfaceTextureHelper surfaceTextureHelper) {
this.nativeSource = nativeSource;
this.surfaceTextureHelper = surfaceTextureHelper;
}
@Override
public void onCapturerStarted(boolean success) {
nativeCapturerStarted(nativeSource, success);
}
@Override
public void onCapturerStopped() {
nativeCapturerStopped(nativeSource);
}
// TODO(bugs.webrtc.org/9181): Remove.
@Override
@SuppressWarnings("deprecation")
public void onByteBufferFrameCaptured(
byte[] data, int width, int height, int rotation, long timestampNs) {
// This NV21Buffer is not possible to retain. This is safe only because the native code will
// always call cropAndScale() and directly make a deep copy of the buffer.
final VideoFrame.Buffer nv21Buffer =
new NV21Buffer(data, width, height, null /* releaseCallback */);
final VideoFrame frame = new VideoFrame(nv21Buffer, rotation, timestampNs);
onFrameCaptured(frame);
frame.release();
}
// TODO(bugs.webrtc.org/9181): Remove.
@Override
@SuppressWarnings("deprecation")
public void onTextureFrameCaptured(int width, int height, int oesTextureId,
float[] transformMatrix, int rotation, long timestampNs) {
final VideoFrame.Buffer buffer = surfaceTextureHelper.createTextureBuffer(
width, height, RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
final VideoFrame frame = new VideoFrame(buffer, rotation, timestampNs);
onFrameCaptured(frame);
frame.release();
}
@Override
public void onFrameCaptured(VideoFrame frame) {
nativeOnFrameCaptured(nativeSource, frame.getBuffer().getWidth(),
frame.getBuffer().getHeight(), frame.getRotation(), frame.getTimestampNs(),
frame.getBuffer());
}
}
private final VideoCapturer.CapturerObserver capturerObserver;
public VideoSource(long nativeSource) {
super(nativeSource);
this.capturerObserver = new NativeCapturerObserver(nativeSource);
}
// TODO(bugs.webrtc.org/9181): Remove.
VideoSource(long nativeSource, SurfaceTextureHelper surfaceTextureHelper) {
super(nativeSource);
this.capturerObserver = new NativeCapturerObserver(nativeSource, surfaceTextureHelper);
}
/**
@ -29,5 +100,13 @@ public class VideoSource extends MediaSource {
nativeAdaptOutputFormat(nativeSource, width, height, fps);
}
public VideoCapturer.CapturerObserver getCapturerObserver() {
return capturerObserver;
}
private static native void nativeAdaptOutputFormat(long source, int width, int height, int fps);
private static native void nativeCapturerStarted(long source, boolean success);
private static native void nativeCapturerStopped(long source);
private static native void nativeOnFrameCaptured(
long source, int width, int height, int rotation, long timestampNs, VideoFrame.Buffer frame);
}

View File

@ -121,50 +121,4 @@ public class MediaCodecVideoEncoderTest {
encoder.release();
}
@Test
@SmallTest
public void testEncoderUsingTextures() throws InterruptedException {
if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) {
Log.i(TAG, "Hardware does not support VP8 encoding, skipping testEncoderUsingTextures");
return;
}
final int width = 640;
final int height = 480;
final long presentationTs = 2;
final EglBase14 eglOesBase = new EglBase14(null, EglBase.CONFIG_PIXEL_BUFFER);
eglOesBase.createDummyPbufferSurface();
eglOesBase.makeCurrent();
int oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
// TODO(perkj): This test is week since we don't fill the texture with valid data with correct
// width and height and verify the encoded data. Fill the OES texture and figure out a way to
// verify that the output make sense.
MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
assertTrue(encoder.initEncode(MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, profile,
width, height, 300, 30, eglOesBase.getEglBaseContext()));
assertTrue(
encoder.encodeTexture(true, oesTextureId, RendererCommon.identityMatrix(), presentationTs));
GlUtil.checkNoGLES2Error("encodeTexture");
// It should be Ok to delete the texture after calling encodeTexture.
GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0);
OutputBufferInfo info = encoder.dequeueOutputBuffer();
while (info == null) {
info = encoder.dequeueOutputBuffer();
Thread.sleep(20);
}
assertTrue(info.index != -1);
assertTrue(info.buffer.capacity() > 0);
assertEquals(presentationTs, info.presentationTimestampUs);
encoder.releaseOutputBuffer(info.index);
encoder.release();
eglOesBase.release();
}
}

View File

@ -1,63 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** An implementation of CapturerObserver that forwards all calls from Java to the C layer. */
@JNINamespace("webrtc::jni")
class AndroidVideoTrackSourceObserver implements VideoCapturer.CapturerObserver {
// Pointer to VideoTrackSourceProxy proxying AndroidVideoTrackSource.
private final long nativeSource;
public AndroidVideoTrackSourceObserver(long nativeSource) {
this.nativeSource = nativeSource;
}
@Override
public void onCapturerStarted(boolean success) {
nativeCapturerStarted(nativeSource, success);
}
@Override
public void onCapturerStopped() {
nativeCapturerStopped(nativeSource);
}
@Override
@SuppressWarnings("deprecation")
public void onByteBufferFrameCaptured(
byte[] data, int width, int height, int rotation, long timeStamp) {
nativeOnByteBufferFrameCaptured(
nativeSource, data, data.length, width, height, rotation, timeStamp);
}
@Override
@SuppressWarnings("deprecation")
public void onTextureFrameCaptured(int width, int height, int oesTextureId,
float[] transformMatrix, int rotation, long timestamp) {
nativeOnTextureFrameCaptured(
nativeSource, width, height, oesTextureId, transformMatrix, rotation, timestamp);
}
@Override
public void onFrameCaptured(VideoFrame frame) {
nativeOnFrameCaptured(nativeSource, frame.getBuffer().getWidth(), frame.getBuffer().getHeight(),
frame.getRotation(), frame.getTimestampNs(), frame.getBuffer());
}
private static native void nativeCapturerStarted(long source, boolean success);
private static native void nativeCapturerStopped(long source);
private static native void nativeOnByteBufferFrameCaptured(
long source, byte[] data, int length, int width, int height, int rotation, long timeStamp);
private static native void nativeOnTextureFrameCaptured(long source, int width, int height,
int oesTextureId, float[] transformMatrix, int rotation, long timestamp);
private static native void nativeOnFrameCaptured(
long source, int width, int height, int rotation, long timestampNs, VideoFrame.Buffer frame);
}

View File

@ -184,7 +184,7 @@ abstract class CameraCapturer implements CameraVideoCapturer {
checkIsOnCameraThread();
synchronized (stateLock) {
if (session != currentSession) {
Logging.w(TAG, "onTextureFrameCaptured from another session.");
Logging.w(TAG, "onFrameCaptured from another session.");
return;
}
if (!firstFrameObserved) {

View File

@ -30,7 +30,6 @@
#include "sdk/android/generated_video_jni/jni/MediaCodecVideoDecoder_jni.h"
#include "sdk/android/native_api/jni/java_types.h"
#include "sdk/android/src/jni/androidmediacodeccommon.h"
#include "sdk/android/src/jni/surfacetexturehelper.h"
#include "sdk/android/src/jni/videoframe.h"
#include "third_party/libyuv/include/libyuv/convert.h"
#include "third_party/libyuv/include/libyuv/convert_from.h"

View File

@ -172,7 +172,6 @@ class MediaCodecVideoEncoder : public VideoEncoder {
bool key_frame,
const VideoFrame& frame,
int input_buffer_index);
bool EncodeTexture(JNIEnv* jni, bool key_frame, const VideoFrame& frame);
// Encodes a new style org.webrtc.VideoFrame. Might be a I420 or a texture
// frame.
bool EncodeJavaFrame(JNIEnv* jni,
@ -721,25 +720,10 @@ int32_t MediaCodecVideoEncoder::Encode(
encode_status =
EncodeByteBuffer(jni, key_frame, input_frame, j_input_buffer_index);
} else {
AndroidVideoFrameBuffer* android_buffer =
static_cast<AndroidVideoFrameBuffer*>(
input_frame.video_frame_buffer().get());
switch (android_buffer->android_type()) {
case AndroidVideoFrameBuffer::AndroidType::kTextureBuffer:
encode_status = EncodeTexture(jni, key_frame, input_frame);
break;
case AndroidVideoFrameBuffer::AndroidType::kJavaBuffer: {
ScopedJavaLocalRef<jobject> j_frame =
NativeToJavaVideoFrame(jni, frame);
encode_status =
EncodeJavaFrame(jni, key_frame, j_frame, j_input_buffer_index);
ReleaseJavaVideoFrame(jni, j_frame);
break;
}
default:
RTC_NOTREACHED();
return WEBRTC_VIDEO_CODEC_ERROR;
}
ScopedJavaLocalRef<jobject> j_frame = NativeToJavaVideoFrame(jni, frame);
encode_status =
EncodeJavaFrame(jni, key_frame, j_frame, j_input_buffer_index);
ReleaseJavaVideoFrame(jni, j_frame);
}
if (!encode_status) {
@ -808,20 +792,9 @@ bool MediaCodecVideoEncoder::IsTextureFrame(JNIEnv* jni,
if (frame.video_frame_buffer()->type() != VideoFrameBuffer::Type::kNative) {
return false;
}
AndroidVideoFrameBuffer* android_buffer =
static_cast<AndroidVideoFrameBuffer*>(frame.video_frame_buffer().get());
switch (android_buffer->android_type()) {
case AndroidVideoFrameBuffer::AndroidType::kTextureBuffer:
return true;
case AndroidVideoFrameBuffer::AndroidType::kJavaBuffer:
return Java_MediaCodecVideoEncoder_isTextureBuffer(
jni, static_cast<AndroidVideoBuffer*>(android_buffer)
->video_frame_buffer());
default:
RTC_NOTREACHED();
return false;
}
return Java_MediaCodecVideoEncoder_isTextureBuffer(
jni, static_cast<AndroidVideoBuffer*>(frame.video_frame_buffer().get())
->video_frame_buffer());
}
bool MediaCodecVideoEncoder::EncodeByteBuffer(JNIEnv* jni,
@ -874,26 +847,6 @@ bool MediaCodecVideoEncoder::FillInputBuffer(JNIEnv* jni,
return true;
}
bool MediaCodecVideoEncoder::EncodeTexture(JNIEnv* jni,
bool key_frame,
const VideoFrame& frame) {
RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_);
RTC_CHECK(use_surface_);
NativeHandleImpl handle =
static_cast<AndroidTextureBuffer*>(frame.video_frame_buffer().get())
->native_handle_impl();
bool encode_status = Java_MediaCodecVideoEncoder_encodeTexture(
jni, j_media_codec_video_encoder_, key_frame, handle.oes_texture_id,
handle.sampling_matrix.ToJava(jni), current_timestamp_us_);
if (CheckException(jni)) {
ALOGE << "Exception in encode texture.";
ProcessHWError(true /* reset_if_fallback_unavailable */);
return false;
}
return encode_status;
}
bool MediaCodecVideoEncoder::EncodeJavaFrame(JNIEnv* jni,
bool key_frame,
const JavaRef<jobject>& frame,

View File

@ -14,7 +14,6 @@
#include "api/videosourceproxy.h"
#include "rtc_base/logging.h"
#include "sdk/android/generated_video_jni/jni/AndroidVideoTrackSourceObserver_jni.h"
#include "sdk/android/generated_video_jni/jni/VideoSource_jni.h"
namespace webrtc {
@ -40,13 +39,9 @@ AndroidVideoTrackSource* AndroidVideoTrackSourceFromJavaProxy(jlong j_proxy) {
AndroidVideoTrackSource::AndroidVideoTrackSource(
rtc::Thread* signaling_thread,
JNIEnv* jni,
const JavaRef<jobject>& j_surface_texture_helper,
bool is_screencast)
: AdaptedVideoTrackSource(kRequiredResolutionAlignment),
signaling_thread_(signaling_thread),
surface_texture_helper_(new rtc::RefCountedObject<SurfaceTextureHelper>(
jni,
j_surface_texture_helper)),
is_screencast_(is_screencast) {
RTC_LOG(LS_INFO) << "AndroidVideoTrackSource ctor";
camera_thread_checker_.DetachFromThread();
@ -83,106 +78,6 @@ bool AndroidVideoTrackSource::remote() const {
return false;
}
void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data,
int length,
int width,
int height,
VideoRotation rotation,
int64_t timestamp_ns) {
RTC_DCHECK(camera_thread_checker_.CalledOnValidThread());
int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec;
int64_t translated_camera_time_us =
timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros());
int adapted_width;
int adapted_height;
int crop_width;
int crop_height;
int crop_x;
int crop_y;
if (!AdaptFrame(width, height, camera_time_us, &adapted_width,
&adapted_height, &crop_width, &crop_height, &crop_x,
&crop_y)) {
return;
}
const uint8_t* y_plane = static_cast<const uint8_t*>(frame_data);
const uint8_t* uv_plane = y_plane + width * height;
const int uv_width = (width + 1) / 2;
RTC_CHECK_GE(length, width * height + 2 * uv_width * ((height + 1) / 2));
// Can only crop at even pixels.
crop_x &= ~1;
crop_y &= ~1;
// Crop just by modifying pointers.
y_plane += width * crop_y + crop_x;
uv_plane += uv_width * crop_y + crop_x;
rtc::scoped_refptr<I420Buffer> buffer =
buffer_pool_.CreateBuffer(adapted_width, adapted_height);
nv12toi420_scaler_.NV12ToI420Scale(
y_plane, width, uv_plane, uv_width * 2, crop_width, crop_height,
buffer->MutableDataY(), buffer->StrideY(),
// Swap U and V, since we have NV21, not NV12.
buffer->MutableDataV(), buffer->StrideV(), buffer->MutableDataU(),
buffer->StrideU(), buffer->width(), buffer->height());
OnFrame(VideoFrame(buffer, rotation, translated_camera_time_us));
}
void AndroidVideoTrackSource::OnTextureFrameCaptured(
int width,
int height,
VideoRotation rotation,
int64_t timestamp_ns,
const NativeHandleImpl& handle) {
RTC_DCHECK(camera_thread_checker_.CalledOnValidThread());
int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec;
int64_t translated_camera_time_us =
timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros());
int adapted_width;
int adapted_height;
int crop_width;
int crop_height;
int crop_x;
int crop_y;
if (!AdaptFrame(width, height, camera_time_us, &adapted_width,
&adapted_height, &crop_width, &crop_height, &crop_x,
&crop_y)) {
surface_texture_helper_->ReturnTextureFrame();
return;
}
Matrix matrix = handle.sampling_matrix;
matrix.Crop(crop_width / static_cast<float>(width),
crop_height / static_cast<float>(height),
crop_x / static_cast<float>(width),
crop_y / static_cast<float>(height));
// Note that apply_rotation() may change under our feet, so we should only
// check once.
if (apply_rotation()) {
if (rotation == kVideoRotation_90 || rotation == kVideoRotation_270) {
std::swap(adapted_width, adapted_height);
}
matrix.Rotate(rotation);
rotation = kVideoRotation_0;
}
OnFrame(VideoFrame(surface_texture_helper_->CreateTextureFrame(
adapted_width, adapted_height,
NativeHandleImpl(handle.oes_texture_id, matrix)),
rotation, translated_camera_time_us));
}
void AndroidVideoTrackSource::OnFrameCaptured(
JNIEnv* jni,
int width,
@ -230,47 +125,7 @@ void AndroidVideoTrackSource::OnOutputFormatRequest(int width,
video_adapter()->OnOutputFormatRequest(format);
}
rtc::scoped_refptr<SurfaceTextureHelper>
AndroidVideoTrackSource::surface_texture_helper() {
return surface_texture_helper_;
}
static void JNI_AndroidVideoTrackSourceObserver_OnByteBufferFrameCaptured(
JNIEnv* jni,
const JavaParamRef<jclass>&,
jlong j_source,
const JavaParamRef<jbyteArray>& j_frame,
jint length,
jint width,
jint height,
jint rotation,
jlong timestamp) {
AndroidVideoTrackSource* source =
AndroidVideoTrackSourceFromJavaProxy(j_source);
jbyte* bytes = jni->GetByteArrayElements(j_frame.obj(), nullptr);
source->OnByteBufferFrameCaptured(bytes, length, width, height,
jintToVideoRotation(rotation), timestamp);
jni->ReleaseByteArrayElements(j_frame.obj(), bytes, JNI_ABORT);
}
static void JNI_AndroidVideoTrackSourceObserver_OnTextureFrameCaptured(
JNIEnv* jni,
const JavaParamRef<jclass>&,
jlong j_source,
jint j_width,
jint j_height,
jint j_oes_texture_id,
const JavaParamRef<jfloatArray>& j_transform_matrix,
jint j_rotation,
jlong j_timestamp) {
AndroidVideoTrackSource* source =
AndroidVideoTrackSourceFromJavaProxy(j_source);
source->OnTextureFrameCaptured(
j_width, j_height, jintToVideoRotation(j_rotation), j_timestamp,
NativeHandleImpl(jni, j_oes_texture_id, j_transform_matrix));
}
static void JNI_AndroidVideoTrackSourceObserver_OnFrameCaptured(
static void JNI_VideoSource_OnFrameCaptured(
JNIEnv* jni,
const JavaParamRef<jclass>&,
jlong j_source,
@ -286,11 +141,10 @@ static void JNI_AndroidVideoTrackSourceObserver_OnFrameCaptured(
j_video_frame_buffer);
}
static void JNI_AndroidVideoTrackSourceObserver_CapturerStarted(
JNIEnv* jni,
const JavaParamRef<jclass>&,
jlong j_source,
jboolean j_success) {
static void JNI_VideoSource_CapturerStarted(JNIEnv* jni,
const JavaParamRef<jclass>&,
jlong j_source,
jboolean j_success) {
RTC_LOG(LS_INFO) << "AndroidVideoTrackSourceObserve_nativeCapturerStarted";
AndroidVideoTrackSource* source =
AndroidVideoTrackSourceFromJavaProxy(j_source);
@ -298,10 +152,9 @@ static void JNI_AndroidVideoTrackSourceObserver_CapturerStarted(
: AndroidVideoTrackSource::SourceState::kEnded);
}
static void JNI_AndroidVideoTrackSourceObserver_CapturerStopped(
JNIEnv* jni,
const JavaParamRef<jclass>&,
jlong j_source) {
static void JNI_VideoSource_CapturerStopped(JNIEnv* jni,
const JavaParamRef<jclass>&,
jlong j_source) {
RTC_LOG(LS_INFO) << "AndroidVideoTrackSourceObserve_nativeCapturerStopped";
AndroidVideoTrackSource* source =
AndroidVideoTrackSourceFromJavaProxy(j_source);

View File

@ -20,7 +20,6 @@
#include "rtc_base/checks.h"
#include "rtc_base/thread_checker.h"
#include "rtc_base/timestampaligner.h"
#include "sdk/android/src/jni/surfacetexturehelper.h"
#include "sdk/android/src/jni/videoframe.h"
namespace webrtc {
@ -30,7 +29,6 @@ class AndroidVideoTrackSource : public rtc::AdaptedVideoTrackSource {
public:
AndroidVideoTrackSource(rtc::Thread* signaling_thread,
JNIEnv* jni,
const JavaRef<jobject>& j_surface_texture_helper,
bool is_screencast = false);
~AndroidVideoTrackSource() override;
@ -70,17 +68,12 @@ class AndroidVideoTrackSource : public rtc::AdaptedVideoTrackSource {
void OnOutputFormatRequest(int width, int height, int fps);
rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper();
private:
rtc::Thread* signaling_thread_;
rtc::AsyncInvoker invoker_;
rtc::ThreadChecker camera_thread_checker_;
SourceState state_;
rtc::TimestampAligner timestamp_aligner_;
NV12ToI420Scaler nv12toi420_scaler_;
I420BufferPool buffer_pool_;
rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_;
const bool is_screencast_;
};

View File

@ -42,11 +42,6 @@ static void JNI_NV21Buffer_CropAndScale(JNIEnv* jni,
const int src_stride_uv = src_width;
const int crop_chroma_x = crop_x / 2;
const int crop_chroma_y = crop_y / 2;
const int crop_chroma_width = (crop_width + 1) / 2;
const int crop_chroma_height = (crop_height + 1) / 2;
const int tmp_stride_u = crop_chroma_width;
const int tmp_stride_v = crop_chroma_width;
const int tmp_size = crop_chroma_height * (tmp_stride_u + tmp_stride_v);
jboolean was_copy;
jbyte* src_bytes = jni->GetByteArrayElements(j_src.obj(), &was_copy);

View File

@ -35,7 +35,6 @@ void SetEglContext(JNIEnv* env,
void* CreateVideoSource(JNIEnv* env,
rtc::Thread* signaling_thread,
rtc::Thread* worker_thread,
const JavaParamRef<jobject>& j_surface_texture_helper,
jboolean is_screencast) {
return nullptr;
}
@ -47,14 +46,6 @@ void SetEglContext(JNIEnv* env,
cricket::WebRtcVideoDecoderFactory* decoder_factory,
jobject egl_context) {}
void* CreateVideoSource(JNIEnv* env,
rtc::Thread* signaling_thread,
rtc::Thread* worker_thread,
jobject j_surface_texture_helper,
jboolean is_screencast) {
return nullptr;
}
cricket::WebRtcVideoEncoderFactory* CreateLegacyVideoEncoderFactory() {
return nullptr;
}

View File

@ -480,13 +480,12 @@ static jlong JNI_PeerConnectionFactory_CreateVideoSource(
JNIEnv* jni,
const JavaParamRef<jclass>&,
jlong native_factory,
const JavaParamRef<jobject>& j_surface_texture_helper,
jboolean is_screencast) {
OwnedFactoryAndThreads* factory =
reinterpret_cast<OwnedFactoryAndThreads*>(native_factory);
return jlongFromPointer(CreateVideoSource(
jni, factory->signaling_thread(), factory->worker_thread(),
j_surface_texture_helper, is_screencast));
return jlongFromPointer(CreateVideoSource(jni, factory->signaling_thread(),
factory->worker_thread(),
is_screencast));
}
static jlong JNI_PeerConnectionFactory_CreateVideoTrack(

View File

@ -26,7 +26,6 @@
#include "sdk/android/src/jni/androidmediadecoder_jni.h"
#include "sdk/android/src/jni/androidmediaencoder_jni.h"
#include "sdk/android/src/jni/androidvideotracksource.h"
#include "sdk/android/src/jni/surfacetexturehelper.h"
#include "sdk/android/src/jni/videodecoderfactorywrapper.h"
#include "sdk/android/src/jni/videoencoderfactorywrapper.h"
@ -82,11 +81,10 @@ void SetEglContext(JNIEnv* env,
void* CreateVideoSource(JNIEnv* env,
rtc::Thread* signaling_thread,
rtc::Thread* worker_thread,
const JavaParamRef<jobject>& j_surface_texture_helper,
jboolean is_screencast) {
rtc::scoped_refptr<AndroidVideoTrackSource> source(
new rtc::RefCountedObject<AndroidVideoTrackSource>(
signaling_thread, env, j_surface_texture_helper, is_screencast));
new rtc::RefCountedObject<AndroidVideoTrackSource>(signaling_thread, env,
is_screencast));
return VideoTrackSourceProxy::Create(signaling_thread, worker_thread, source)
.release();
}

View File

@ -30,8 +30,6 @@ class VideoDecoderFactory;
namespace webrtc {
namespace jni {
class SurfaceTextureHelper;
VideoEncoderFactory* CreateVideoEncoderFactory(
JNIEnv* jni,
const JavaRef<jobject>& j_encoder_factory);
@ -50,7 +48,6 @@ void SetEglContext(JNIEnv* env,
void* CreateVideoSource(JNIEnv* env,
rtc::Thread* signaling_thread,
rtc::Thread* worker_thread,
const JavaParamRef<jobject>& j_surface_texture_helper,
jboolean is_screencast);
cricket::WebRtcVideoEncoderFactory* CreateLegacyVideoEncoderFactory();

View File

@ -1,78 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "sdk/android/src/jni/surfacetexturehelper.h"
#include "rtc_base/bind.h"
#include "rtc_base/logging.h"
#include "sdk/android/generated_video_jni/jni/SurfaceTextureHelper_jni.h"
#include "sdk/android/native_api/jni/java_types.h"
#include "sdk/android/src/jni/videoframe.h"
namespace webrtc {
namespace jni {
void SurfaceTextureHelperTextureToYUV(
JNIEnv* env,
const JavaRef<jobject>& j_surface_texture_helper,
const JavaRef<jobject>& buffer,
int width,
int height,
int stride,
const NativeHandleImpl& native_handle) {
Java_SurfaceTextureHelper_textureToYUV(
env, j_surface_texture_helper, buffer, width, height, stride,
native_handle.oes_texture_id, native_handle.sampling_matrix.ToJava(env));
}
rtc::scoped_refptr<SurfaceTextureHelper> SurfaceTextureHelper::create(
JNIEnv* jni,
const char* thread_name,
const JavaRef<jobject>& j_egl_context) {
ScopedJavaLocalRef<jobject> j_surface_texture_helper =
Java_SurfaceTextureHelper_create(
jni, NativeToJavaString(jni, thread_name), j_egl_context);
if (IsNull(jni, j_surface_texture_helper))
return nullptr;
return new rtc::RefCountedObject<SurfaceTextureHelper>(
jni, j_surface_texture_helper);
}
SurfaceTextureHelper::SurfaceTextureHelper(
JNIEnv* jni,
const JavaRef<jobject>& j_surface_texture_helper)
: j_surface_texture_helper_(jni, j_surface_texture_helper) {}
SurfaceTextureHelper::~SurfaceTextureHelper() {
RTC_LOG(LS_INFO) << "SurfaceTextureHelper dtor";
JNIEnv* jni = AttachCurrentThreadIfNeeded();
Java_SurfaceTextureHelper_dispose(jni, j_surface_texture_helper_);
}
const ScopedJavaGlobalRef<jobject>&
SurfaceTextureHelper::GetJavaSurfaceTextureHelper() const {
return j_surface_texture_helper_;
}
void SurfaceTextureHelper::ReturnTextureFrame() const {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
Java_SurfaceTextureHelper_returnTextureFrame(jni, j_surface_texture_helper_);
}
rtc::scoped_refptr<VideoFrameBuffer> SurfaceTextureHelper::CreateTextureFrame(
int width,
int height,
const NativeHandleImpl& native_handle) {
return new rtc::RefCountedObject<AndroidTextureBuffer>(width, height,
native_handle, this);
}
} // namespace jni
} // namespace webrtc

View File

@ -1,82 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef SDK_ANDROID_SRC_JNI_SURFACETEXTUREHELPER_H_
#define SDK_ANDROID_SRC_JNI_SURFACETEXTUREHELPER_H_
#include <jni.h>
#include "api/video/video_frame_buffer.h"
#include "rtc_base/refcount.h"
#include "rtc_base/scoped_ref_ptr.h"
#include "sdk/android/src/jni/jni_helpers.h"
namespace webrtc {
namespace jni {
struct NativeHandleImpl;
// Helper class to create and synchronize access to an Android SurfaceTexture.
// It is used for creating VideoFrameBuffers from a SurfaceTexture when
// the SurfaceTexture has been updated.
// When the VideoFrameBuffer is released, this class returns the buffer to the
// java SurfaceTextureHelper so it can be updated safely. The VideoFrameBuffer
// can be released on an arbitrary thread.
// SurfaceTextureHelper is reference counted to make sure that it is not
// destroyed while a VideoFrameBuffer is in use.
// This class is the C++ counterpart of the java class SurfaceTextureHelper.
// It owns the corresponding java object, and calls the java dispose
// method when destroyed.
// Usage:
// 1. Create an instance of this class.
// 2. Get the Java SurfaceTextureHelper with GetJavaSurfaceTextureHelper().
// 3. Register a listener to the Java SurfaceListener and start producing
// new buffers.
// 4. Call CreateTextureFrame to wrap the Java texture in a VideoFrameBuffer.
class SurfaceTextureHelper : public rtc::RefCountInterface {
public:
// Might return null if creating the Java SurfaceTextureHelper fails.
static rtc::scoped_refptr<SurfaceTextureHelper> create(
JNIEnv* jni,
const char* thread_name,
const JavaRef<jobject>& j_egl_context);
const ScopedJavaGlobalRef<jobject>& GetJavaSurfaceTextureHelper() const;
rtc::scoped_refptr<VideoFrameBuffer> CreateTextureFrame(
int width,
int height,
const NativeHandleImpl& native_handle);
// May be called on arbitrary thread.
void ReturnTextureFrame() const;
protected:
~SurfaceTextureHelper() override;
SurfaceTextureHelper(JNIEnv* jni,
const JavaRef<jobject>& j_surface_texture_helper);
private:
const ScopedJavaGlobalRef<jobject> j_surface_texture_helper_;
};
void SurfaceTextureHelperTextureToYUV(
JNIEnv* env,
const JavaRef<jobject>& j_surface_texture_helper,
const JavaRef<jobject>& buffer,
int width,
int height,
int stride,
const NativeHandleImpl& native_handle);
} // namespace jni
} // namespace webrtc
#endif // SDK_ANDROID_SRC_JNI_SURFACETEXTUREHELPER_H_

View File

@ -34,19 +34,7 @@ class JavaVideoRendererWrapper : public rtc::VideoSinkInterface<VideoFrame> {
ScopedJavaLocalRef<jobject> j_frame;
if (video_frame.video_frame_buffer()->type() ==
VideoFrameBuffer::Type::kNative) {
AndroidVideoFrameBuffer* android_buffer =
static_cast<AndroidVideoFrameBuffer*>(
video_frame.video_frame_buffer().get());
switch (android_buffer->android_type()) {
case AndroidVideoFrameBuffer::AndroidType::kTextureBuffer:
j_frame = ToJavaTextureFrame(env, video_frame);
break;
case AndroidVideoFrameBuffer::AndroidType::kJavaBuffer:
j_frame = FromWrappedJavaBuffer(env, video_frame);
break;
default:
RTC_NOTREACHED();
}
j_frame = FromWrappedJavaBuffer(env, video_frame);
} else {
j_frame = ToJavaI420Frame(env, video_frame);
}
@ -94,18 +82,6 @@ class JavaVideoRendererWrapper : public rtc::VideoSinkInterface<VideoFrame> {
i420_buffer->StrideV(), v_buffer, javaShallowCopy(frame));
}
// Return a VideoRenderer.I420Frame referring texture object in |frame|.
ScopedJavaLocalRef<jobject> ToJavaTextureFrame(JNIEnv* env,
const VideoFrame& frame) {
NativeHandleImpl handle =
static_cast<AndroidTextureBuffer*>(frame.video_frame_buffer().get())
->native_handle_impl();
return Java_I420Frame_createTextureFrame(
env, frame.width(), frame.height(), static_cast<int>(frame.rotation()),
handle.oes_texture_id, handle.sampling_matrix.ToJava(env),
javaShallowCopy(frame));
}
ScopedJavaGlobalRef<jobject> j_callbacks_;
};

View File

@ -22,7 +22,6 @@
#include "rtc_base/timeutils.h"
#include "sdk/android/generated_video_jni/jni/VideoFrame_jni.h"
#include "sdk/android/src/jni/jni_helpers.h"
#include "sdk/android/src/jni/surfacetexturehelper.h"
#include "sdk/android/src/jni/wrapped_native_i420_buffer.h"
#include "third_party/libyuv/include/libyuv/scale.h"
@ -218,77 +217,6 @@ NativeHandleImpl::NativeHandleImpl(
: oes_texture_id(j_oes_texture_id),
sampling_matrix(jni, j_transform_matrix) {}
AndroidTextureBuffer::AndroidTextureBuffer(
int width,
int height,
const NativeHandleImpl& native_handle,
const rtc::scoped_refptr<SurfaceTextureHelper>& surface_texture_helper)
: width_(width),
height_(height),
native_handle_(native_handle),
surface_texture_helper_(surface_texture_helper) {}
AndroidTextureBuffer::~AndroidTextureBuffer() {
surface_texture_helper_->ReturnTextureFrame();
}
VideoFrameBuffer::Type AndroidTextureBuffer::type() const {
return Type::kNative;
}
NativeHandleImpl AndroidTextureBuffer::native_handle_impl() const {
return native_handle_;
}
int AndroidTextureBuffer::width() const {
return width_;
}
int AndroidTextureBuffer::height() const {
return height_;
}
rtc::scoped_refptr<I420BufferInterface> AndroidTextureBuffer::ToI420() {
int uv_width = (width() + 7) / 8;
int stride = 8 * uv_width;
int uv_height = (height() + 1) / 2;
size_t size = stride * (height() + uv_height);
// The data is owned by the frame, and the normal case is that the
// data is deleted by the frame's destructor callback.
//
// TODO(nisse): Use an I420BufferPool. We then need to extend that
// class, and I420Buffer, to support our memory layout.
// TODO(nisse): Depending on
// system_wrappers/include/aligned_malloc.h violate current DEPS
// rules. We get away for now only because it is indirectly included
// by i420_buffer.h
std::unique_ptr<uint8_t, AlignedFreeDeleter> yuv_data(
static_cast<uint8_t*>(AlignedMalloc(size, kBufferAlignment)));
// See YuvConverter.java for the required layout.
uint8_t* y_data = yuv_data.get();
uint8_t* u_data = y_data + height() * stride;
uint8_t* v_data = u_data + stride / 2;
rtc::scoped_refptr<I420BufferInterface> copy = webrtc::WrapI420Buffer(
width(), height(), y_data, stride, u_data, stride, v_data, stride,
rtc::Bind(&AlignedFree, yuv_data.release()));
JNIEnv* jni = AttachCurrentThreadIfNeeded();
// TODO(sakal): This call to a deperecated method will be removed when
// AndroidTextureBuffer is removed.
ScopedJavaLocalRef<jobject> byte_buffer =
NewDirectByteBuffer(jni, y_data, size);
SurfaceTextureHelperTextureToYUV(
jni, surface_texture_helper_->GetJavaSurfaceTextureHelper(), byte_buffer,
width(), height(), stride, native_handle_);
return copy;
}
AndroidVideoFrameBuffer::AndroidType AndroidTextureBuffer::android_type() {
return AndroidType::kTextureBuffer;
}
rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBuffer::Adopt(
JNIEnv* jni,
const JavaRef<jobject>& j_video_frame_buffer) {
@ -355,10 +283,6 @@ rtc::scoped_refptr<I420BufferInterface> AndroidVideoBuffer::ToI420() {
return AndroidVideoI420Buffer::Adopt(jni, width_, height_, j_i420_buffer);
}
AndroidVideoFrameBuffer::AndroidType AndroidVideoBuffer::android_type() {
return AndroidType::kJavaBuffer;
}
VideoFrame JavaToNativeFrame(JNIEnv* jni,
const JavaRef<jobject>& j_video_frame,
uint32_t timestamp_rtp) {
@ -373,31 +297,15 @@ VideoFrame JavaToNativeFrame(JNIEnv* jni,
static_cast<VideoRotation>(rotation));
}
static bool IsJavaVideoBuffer(rtc::scoped_refptr<VideoFrameBuffer> buffer) {
if (buffer->type() != VideoFrameBuffer::Type::kNative) {
return false;
}
AndroidVideoFrameBuffer* android_buffer =
static_cast<AndroidVideoFrameBuffer*>(buffer.get());
return android_buffer->android_type() ==
AndroidVideoFrameBuffer::AndroidType::kJavaBuffer;
}
ScopedJavaLocalRef<jobject> NativeToJavaVideoFrame(JNIEnv* jni,
const VideoFrame& frame) {
rtc::scoped_refptr<VideoFrameBuffer> buffer = frame.video_frame_buffer();
if (IsJavaVideoBuffer(buffer)) {
RTC_DCHECK(buffer->type() == VideoFrameBuffer::Type::kNative);
AndroidVideoFrameBuffer* android_buffer =
static_cast<AndroidVideoFrameBuffer*>(buffer.get());
RTC_DCHECK(android_buffer->android_type() ==
AndroidVideoFrameBuffer::AndroidType::kJavaBuffer);
AndroidVideoBuffer* android_video_buffer =
static_cast<AndroidVideoBuffer*>(android_buffer);
if (buffer->type() == VideoFrameBuffer::Type::kNative) {
AndroidVideoBuffer* android_buffer =
static_cast<AndroidVideoBuffer*>(buffer.get());
ScopedJavaLocalRef<jobject> j_video_frame_buffer(
jni, android_video_buffer->video_frame_buffer());
jni, android_buffer->video_frame_buffer());
Java_Buffer_retain(jni, j_video_frame_buffer);
return Java_VideoFrame_Constructor(
jni, j_video_frame_buffer, static_cast<jint>(frame.rotation()),

View File

@ -18,7 +18,6 @@
#include "api/video/video_rotation.h"
#include "rtc_base/callback.h"
#include "sdk/android/src/jni/jni_helpers.h"
#include "sdk/android/src/jni/surfacetexturehelper.h"
namespace webrtc {
namespace jni {
@ -26,8 +25,6 @@ namespace jni {
// TODO(sakal): Remove once clients have migrated.
using ::webrtc::JavaParamRef;
class SurfaceTextureHelper;
// Open gl texture matrix, in column-major order. Operations are
// in-place.
class Matrix {
@ -66,44 +63,7 @@ struct NativeHandleImpl {
Matrix sampling_matrix;
};
// Base class to differentiate between the old texture frames and the new
// Java-based frames.
// TODO(sakal): Remove this and AndroidTextureBuffer once they are no longer
// needed.
class AndroidVideoFrameBuffer : public VideoFrameBuffer {
public:
enum class AndroidType { kTextureBuffer, kJavaBuffer };
virtual AndroidType android_type() = 0;
};
class AndroidTextureBuffer : public AndroidVideoFrameBuffer {
public:
AndroidTextureBuffer(
int width,
int height,
const NativeHandleImpl& native_handle,
const rtc::scoped_refptr<SurfaceTextureHelper>& surface_texture_helper);
~AndroidTextureBuffer() override;
NativeHandleImpl native_handle_impl() const;
private:
Type type() const override;
int width() const override;
int height() const override;
rtc::scoped_refptr<I420BufferInterface> ToI420() override;
AndroidType android_type() override;
const int width_;
const int height_;
NativeHandleImpl native_handle_;
rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_;
};
class AndroidVideoBuffer : public AndroidVideoFrameBuffer {
class AndroidVideoBuffer : public VideoFrameBuffer {
public:
// Creates a native VideoFrameBuffer from a Java VideoFrame.Buffer.
static rtc::scoped_refptr<AndroidVideoBuffer> Create(
@ -143,8 +103,6 @@ class AndroidVideoBuffer : public AndroidVideoFrameBuffer {
rtc::scoped_refptr<I420BufferInterface> ToI420() override;
AndroidType android_type() override;
const int width_;
const int height_;
// Holds a VideoFrame.Buffer.