Android: Remove deprecated VideoRenderer and I420Frame

Bug: webrtc:9181
Change-Id: I9a38a35ae33ed385a9a5add0a5f51ec035019d91
Reviewed-on: https://webrtc-review.googlesource.com/71661
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23849}
This commit is contained in:
Magnus Jedvert 2018-06-09 19:41:58 +02:00 committed by Commit Bot
parent 4077814031
commit 431f14ef69
5 changed files with 3 additions and 379 deletions

View File

@ -271,7 +271,6 @@ generate_jni("generated_video_jni") {
"api/org/webrtc/VideoEncoderFactory.java", "api/org/webrtc/VideoEncoderFactory.java",
"api/org/webrtc/VideoEncoderFallback.java", "api/org/webrtc/VideoEncoderFallback.java",
"api/org/webrtc/VideoFrame.java", "api/org/webrtc/VideoFrame.java",
"api/org/webrtc/VideoRenderer.java",
"api/org/webrtc/VideoSink.java", "api/org/webrtc/VideoSink.java",
"api/org/webrtc/VideoSource.java", "api/org/webrtc/VideoSource.java",
"api/org/webrtc/VideoTrack.java", "api/org/webrtc/VideoTrack.java",
@ -306,7 +305,6 @@ rtc_static_library("video_jni") {
"src/jni/nv12buffer.cc", "src/jni/nv12buffer.cc",
"src/jni/nv21buffer.cc", "src/jni/nv21buffer.cc",
"src/jni/pc/video.cc", "src/jni/pc/video.cc",
"src/jni/video_renderer.cc",
"src/jni/videocodecinfo.cc", "src/jni/videocodecinfo.cc",
"src/jni/videocodecinfo.h", "src/jni/videocodecinfo.h",
"src/jni/videocodecstatus.cc", "src/jni/videocodecstatus.cc",
@ -753,7 +751,6 @@ rtc_android_library("video_api_java") {
"api/org/webrtc/VideoEncoder.java", "api/org/webrtc/VideoEncoder.java",
"api/org/webrtc/VideoEncoderFactory.java", "api/org/webrtc/VideoEncoderFactory.java",
"api/org/webrtc/VideoFrame.java", "api/org/webrtc/VideoFrame.java",
"api/org/webrtc/VideoRenderer.java", # TODO(sakal): Remove
"api/org/webrtc/VideoSink.java", "api/org/webrtc/VideoSink.java",
] ]

View File

@ -27,11 +27,10 @@ import java.util.concurrent.TimeUnit;
import javax.annotation.Nullable; import javax.annotation.Nullable;
/** /**
* Implements org.webrtc.VideoRenderer.Callbacks by displaying the video stream on an EGL Surface. * Implements VideoSink by displaying the video stream on an EGL Surface. This class is intended to
* This class is intended to be used as a helper class for rendering on SurfaceViews and * be used as a helper class for rendering on SurfaceViews and TextureViews.
* TextureViews.
*/ */
public class EglRenderer implements VideoRenderer.Callbacks, VideoSink { public class EglRenderer implements VideoSink {
private static final String TAG = "EglRenderer"; private static final String TAG = "EglRenderer";
private static final long LOG_INTERVAL_SEC = 4; private static final long LOG_INTERVAL_SEC = 4;
@ -421,14 +420,6 @@ public class EglRenderer implements VideoRenderer.Callbacks, VideoSink {
ThreadUtils.awaitUninterruptibly(latch); ThreadUtils.awaitUninterruptibly(latch);
} }
// VideoRenderer.Callbacks interface.
@Override
public void renderFrame(VideoRenderer.I420Frame frame) {
VideoFrame videoFrame = frame.toVideoFrame();
onFrame(videoFrame);
videoFrame.release();
}
// VideoSink interface. // VideoSink interface.
@Override @Override
public void onFrame(VideoFrame frame) { public void onFrame(VideoFrame frame) {

View File

@ -1,234 +0,0 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import javax.annotation.Nullable;
import java.nio.ByteBuffer;
import org.webrtc.VideoFrame;
/**
* Java version of VideoSinkInterface. In addition to allowing clients to
* define their own rendering behavior (by passing in a Callbacks object), this
* class also provides a createGui() method for creating a GUI-rendering window
* on various platforms.
*/
// Use VideoSink instead.
@Deprecated
public class VideoRenderer {
/**
* Java version of webrtc::VideoFrame. Frames are only constructed from native code and test
* code.
*/
// Use VideoFrame instead.
@Deprecated
public static class I420Frame {
public final int width;
public final int height;
@Nullable public final int[] yuvStrides;
@Nullable public ByteBuffer[] yuvPlanes;
public final boolean yuvFrame;
// Matrix that transforms standard coordinates to their proper sampling locations in
// the texture. This transform compensates for any properties of the video source that
// cause it to appear different from a normalized texture. This matrix does not take
// |rotationDegree| into account.
@Nullable public final float[] samplingMatrix;
public int textureId;
// Frame pointer in C++.
private long nativeFramePointer;
// rotationDegree is the degree that the frame must be rotated clockwisely
// to be rendered correctly.
public int rotationDegree;
// If this I420Frame was constructed from VideoFrame.Buffer, this points to
// the backing buffer.
@Nullable private final VideoFrame.Buffer backingBuffer;
/**
* Construct a frame of the given dimensions with the specified planar data.
*/
public I420Frame(int width, int height, int rotationDegree, int[] yuvStrides,
ByteBuffer[] yuvPlanes, long nativeFramePointer) {
this.width = width;
this.height = height;
this.yuvStrides = yuvStrides;
this.yuvPlanes = yuvPlanes;
this.yuvFrame = true;
this.rotationDegree = rotationDegree;
this.nativeFramePointer = nativeFramePointer;
backingBuffer = null;
if (rotationDegree % 90 != 0) {
throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
}
// The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
// top-left corner of the image, but in glTexImage2D() the first element corresponds to the
// bottom-left corner. This discrepancy is corrected by setting a vertical flip as sampling
// matrix.
samplingMatrix = RendererCommon.verticalFlipMatrix();
}
/**
* Construct a texture frame of the given dimensions with data in SurfaceTexture
*/
public I420Frame(int width, int height, int rotationDegree, int textureId,
float[] samplingMatrix, long nativeFramePointer) {
this.width = width;
this.height = height;
this.yuvStrides = null;
this.yuvPlanes = null;
this.samplingMatrix = samplingMatrix;
this.textureId = textureId;
this.yuvFrame = false;
this.rotationDegree = rotationDegree;
this.nativeFramePointer = nativeFramePointer;
backingBuffer = null;
if (rotationDegree % 90 != 0) {
throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
}
}
/**
* Construct a frame from VideoFrame.Buffer.
*/
@CalledByNative("I420Frame")
public I420Frame(int rotationDegree, VideoFrame.Buffer buffer, long nativeFramePointer) {
this.width = buffer.getWidth();
this.height = buffer.getHeight();
this.rotationDegree = rotationDegree;
if (rotationDegree % 90 != 0) {
throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
}
if (buffer instanceof VideoFrame.TextureBuffer
&& ((VideoFrame.TextureBuffer) buffer).getType() == VideoFrame.TextureBuffer.Type.OES) {
VideoFrame.TextureBuffer textureBuffer = (VideoFrame.TextureBuffer) buffer;
this.yuvFrame = false;
this.textureId = textureBuffer.getTextureId();
this.samplingMatrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(
textureBuffer.getTransformMatrix());
this.yuvStrides = null;
this.yuvPlanes = null;
} else if (buffer instanceof VideoFrame.I420Buffer) {
VideoFrame.I420Buffer i420Buffer = (VideoFrame.I420Buffer) buffer;
this.yuvFrame = true;
this.yuvStrides =
new int[] {i420Buffer.getStrideY(), i420Buffer.getStrideU(), i420Buffer.getStrideV()};
this.yuvPlanes =
new ByteBuffer[] {i420Buffer.getDataY(), i420Buffer.getDataU(), i420Buffer.getDataV()};
// The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
// top-left corner of the image, but in glTexImage2D() the first element corresponds to the
// bottom-left corner. This discrepancy is corrected by multiplying the sampling matrix with
// a vertical flip matrix.
this.samplingMatrix = RendererCommon.verticalFlipMatrix();
this.textureId = 0;
} else {
this.yuvFrame = false;
this.textureId = 0;
this.samplingMatrix = null;
this.yuvStrides = null;
this.yuvPlanes = null;
}
this.nativeFramePointer = nativeFramePointer;
backingBuffer = buffer;
}
public int rotatedWidth() {
return (rotationDegree % 180 == 0) ? width : height;
}
public int rotatedHeight() {
return (rotationDegree % 180 == 0) ? height : width;
}
@Override
public String toString() {
final String type = yuvFrame
? "Y: " + yuvStrides[0] + ", U: " + yuvStrides[1] + ", V: " + yuvStrides[2]
: "Texture: " + textureId;
return width + "x" + height + ", " + type;
}
/**
* Convert the frame to VideoFrame. It is no longer safe to use the I420Frame after calling
* this.
*/
VideoFrame toVideoFrame() {
final VideoFrame.Buffer buffer;
if (backingBuffer != null) {
// We were construted from a VideoFrame.Buffer, just return it.
// Make sure webrtc::VideoFrame object is released.
backingBuffer.retain();
VideoRenderer.renderFrameDone(this);
buffer = backingBuffer;
} else if (yuvFrame) {
buffer = JavaI420Buffer.wrap(width, height, yuvPlanes[0], yuvStrides[0], yuvPlanes[1],
yuvStrides[1], yuvPlanes[2], yuvStrides[2],
() -> { VideoRenderer.renderFrameDone(this); });
} else {
// Note: No Handler or YuvConverter means calling toI420 will crash.
buffer = new TextureBufferImpl(width, height, VideoFrame.TextureBuffer.Type.OES, textureId,
RendererCommon.convertMatrixToAndroidGraphicsMatrix(samplingMatrix), null /* handler */,
null /* yuvConverter */, () -> VideoRenderer.renderFrameDone(this));
}
return new VideoFrame(buffer, rotationDegree, 0 /* timestampNs */);
}
@CalledByNative("I420Frame")
static I420Frame createI420Frame(int width, int height, int rotationDegree, int y_stride,
ByteBuffer y_buffer, int u_stride, ByteBuffer u_buffer, int v_stride, ByteBuffer v_buffer,
long nativeFramePointer) {
return new I420Frame(width, height, rotationDegree, new int[] {y_stride, u_stride, v_stride},
new ByteBuffer[] {y_buffer, u_buffer, v_buffer}, nativeFramePointer);
}
}
/** The real meat of VideoSinkInterface. */
public static interface Callbacks {
// |frame| might have pending rotation and implementation of Callbacks
// should handle that by applying rotation during rendering. The callee
// is responsible for signaling when it is done with |frame| by calling
// renderFrameDone(frame).
@CalledByNative("Callbacks") void renderFrame(I420Frame frame);
}
/**
* This must be called after every renderFrame() to release the frame.
*/
public static void renderFrameDone(I420Frame frame) {
frame.yuvPlanes = null;
frame.textureId = 0;
if (frame.nativeFramePointer != 0) {
nativeReleaseFrame(frame.nativeFramePointer);
frame.nativeFramePointer = 0;
}
}
long nativeVideoRenderer;
public VideoRenderer(Callbacks callbacks) {
nativeVideoRenderer = nativeCreateVideoRenderer(callbacks);
}
public void dispose() {
if (nativeVideoRenderer == 0) {
// Already disposed.
return;
}
nativeFreeWrappedVideoRenderer(nativeVideoRenderer);
nativeVideoRenderer = 0;
}
private static native long nativeCreateVideoRenderer(Callbacks callbacks);
private static native void nativeFreeWrappedVideoRenderer(long videoRenderer);
private static native void nativeReleaseFrame(long framePointer);
}

View File

@ -16,7 +16,6 @@ import java.util.List;
/** Java version of VideoTrackInterface. */ /** Java version of VideoTrackInterface. */
public class VideoTrack extends MediaStreamTrack { public class VideoTrack extends MediaStreamTrack {
private final List<VideoRenderer> renderers = new ArrayList<>();
private final IdentityHashMap<VideoSink, Long> sinks = new IdentityHashMap<VideoSink, Long>(); private final IdentityHashMap<VideoSink, Long> sinks = new IdentityHashMap<VideoSink, Long>();
public VideoTrack(long nativeTrack) { public VideoTrack(long nativeTrack) {
@ -57,26 +56,8 @@ public class VideoTrack extends MediaStreamTrack {
} }
} }
public void addRenderer(VideoRenderer renderer) {
renderers.add(renderer);
nativeAddSink(nativeTrack, renderer.nativeVideoRenderer);
}
public void removeRenderer(VideoRenderer renderer) {
if (!renderers.remove(renderer)) {
return;
}
nativeRemoveSink(nativeTrack, renderer.nativeVideoRenderer);
renderer.dispose();
}
@Override @Override
public void dispose() { public void dispose() {
for (VideoRenderer renderer : renderers) {
nativeRemoveSink(nativeTrack, renderer.nativeVideoRenderer);
renderer.dispose();
}
renderers.clear();
for (long nativeSink : sinks.values()) { for (long nativeSink : sinks.values()) {
nativeRemoveSink(nativeTrack, nativeSink); nativeRemoveSink(nativeTrack, nativeSink);
nativeFreeSink(nativeSink); nativeFreeSink(nativeSink);

View File

@ -1,111 +0,0 @@
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <jni.h>
#include "api/video/video_frame.h"
#include "api/video/video_sink_interface.h"
#include "sdk/android/generated_video_jni/jni/VideoRenderer_jni.h"
#include "sdk/android/src/jni/jni_helpers.h"
#include "sdk/android/src/jni/videoframe.h"
namespace webrtc {
namespace jni {
// Wrapper dispatching rtc::VideoSinkInterface to a Java VideoRenderer
// instance.
class JavaVideoRendererWrapper : public rtc::VideoSinkInterface<VideoFrame> {
public:
JavaVideoRendererWrapper(JNIEnv* jni, const JavaRef<jobject>& j_callbacks)
: j_callbacks_(jni, j_callbacks) {}
~JavaVideoRendererWrapper() override {}
void OnFrame(const VideoFrame& video_frame) override {
JNIEnv* env = AttachCurrentThreadIfNeeded();
ScopedJavaLocalRef<jobject> j_frame;
if (video_frame.video_frame_buffer()->type() ==
VideoFrameBuffer::Type::kNative) {
j_frame = FromWrappedJavaBuffer(env, video_frame);
} else {
j_frame = ToJavaI420Frame(env, video_frame);
}
// |j_callbacks_| is responsible for releasing |j_frame| with
// VideoRenderer.renderFrameDone().
Java_Callbacks_renderFrame(env, j_callbacks_, j_frame);
}
private:
// Make a shallow copy of |frame| to be used with Java. The callee has
// ownership of the frame, and the frame should be released with
// VideoRenderer.releaseNativeFrame().
static jlong javaShallowCopy(const VideoFrame& frame) {
return jlongFromPointer(new VideoFrame(frame));
}
// Return a VideoRenderer.I420Frame referring to the data in |frame|.
ScopedJavaLocalRef<jobject> FromWrappedJavaBuffer(JNIEnv* env,
const VideoFrame& frame) {
return Java_I420Frame_Constructor(
env, frame.rotation(),
static_cast<AndroidVideoBuffer*>(frame.video_frame_buffer().get())
->video_frame_buffer(),
javaShallowCopy(frame));
}
// Return a VideoRenderer.I420Frame referring to the data in |frame|.
ScopedJavaLocalRef<jobject> ToJavaI420Frame(JNIEnv* env,
const VideoFrame& frame) {
rtc::scoped_refptr<I420BufferInterface> i420_buffer =
frame.video_frame_buffer()->ToI420();
ScopedJavaLocalRef<jobject> y_buffer =
NewDirectByteBuffer(env, const_cast<uint8_t*>(i420_buffer->DataY()),
i420_buffer->StrideY() * i420_buffer->height());
size_t chroma_height = i420_buffer->ChromaHeight();
ScopedJavaLocalRef<jobject> u_buffer =
NewDirectByteBuffer(env, const_cast<uint8_t*>(i420_buffer->DataU()),
i420_buffer->StrideU() * chroma_height);
ScopedJavaLocalRef<jobject> v_buffer =
NewDirectByteBuffer(env, const_cast<uint8_t*>(i420_buffer->DataV()),
i420_buffer->StrideV() * chroma_height);
return Java_I420Frame_createI420Frame(
env, frame.width(), frame.height(), static_cast<int>(frame.rotation()),
i420_buffer->StrideY(), y_buffer, i420_buffer->StrideU(), u_buffer,
i420_buffer->StrideV(), v_buffer, javaShallowCopy(frame));
}
ScopedJavaGlobalRef<jobject> j_callbacks_;
};
static void JNI_VideoRenderer_FreeWrappedVideoRenderer(
JNIEnv*,
const JavaParamRef<jclass>&,
jlong j_p) {
delete reinterpret_cast<JavaVideoRendererWrapper*>(j_p);
}
static void JNI_VideoRenderer_ReleaseFrame(JNIEnv* jni,
const JavaParamRef<jclass>&,
jlong j_frame_ptr) {
delete reinterpret_cast<const VideoFrame*>(j_frame_ptr);
}
static jlong JNI_VideoRenderer_CreateVideoRenderer(
JNIEnv* jni,
const JavaParamRef<jclass>&,
const JavaParamRef<jobject>& j_callbacks) {
std::unique_ptr<JavaVideoRendererWrapper> renderer(
new JavaVideoRendererWrapper(jni, j_callbacks));
return jlongFromPointer(renderer.release());
}
} // namespace jni
} // namespace webrtc