Update Android video buffers to new VideoFrameBuffer interface

This is a follow-up cleanup for CL
https://codereview.webrtc.org/2847383002/.

Bug: webrtc:7632
Change-Id: I1e17358c70a12c75e8732fee5bbab6a552c4e6c3
Reviewed-on: https://chromium-review.googlesource.com/524063
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Reviewed-by: Niels Moller <nisse@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#18532}
This commit is contained in:
Magnus Jedvert 2017-06-07 13:06:06 +02:00 committed by Commit Bot
parent f184138a5f
commit c276ecf5c4
5 changed files with 82 additions and 106 deletions

View File

@ -1,10 +1,11 @@
include_rules = [ include_rules = [
"+third_party/libyuv", "+third_party/libyuv",
"+webrtc/common_video/h264/h264_bitstream_parser.h", "+webrtc/common_video/h264/h264_bitstream_parser.h",
"+webrtc/common_video/include",
"+webrtc/common_video/libyuv/include/webrtc_libyuv.h", "+webrtc/common_video/libyuv/include/webrtc_libyuv.h",
"+webrtc/modules/utility/include/jvm_android.h", "+webrtc/modules/utility/include/jvm_android.h",
"+webrtc/modules/video_coding/utility/vp8_header_parser.h", "+webrtc/modules/video_coding/utility/vp8_header_parser.h",
"+webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.h", "+webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.h",
"+webrtc/pc", "+webrtc/pc",
"+webrtc/system_wrappers/include/field_trial_default.h", "+webrtc/system_wrappers/include",
] ]

View File

@ -734,7 +734,8 @@ int32_t MediaCodecVideoEncoder::Encode(
const bool key_frame = const bool key_frame =
frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame; frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame;
bool encode_status = true; bool encode_status = true;
if (!input_frame.video_frame_buffer()->native_handle()) { if (input_frame.video_frame_buffer()->type() !=
webrtc::VideoFrameBuffer::Type::kNative) {
int j_input_buffer_index = jni->CallIntMethod( int j_input_buffer_index = jni->CallIntMethod(
*j_media_codec_video_encoder_, j_dequeue_input_buffer_method_); *j_media_codec_video_encoder_, j_dequeue_input_buffer_method_);
if (CheckException(jni)) { if (CheckException(jni)) {
@ -794,9 +795,7 @@ bool MediaCodecVideoEncoder::MaybeReconfigureEncoder(
const webrtc::VideoFrame& frame) { const webrtc::VideoFrame& frame) {
RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_);
const bool is_texture_frame = const bool reconfigure_due_to_format = frame.is_texture() != use_surface_;
frame.video_frame_buffer()->native_handle() != nullptr;
const bool reconfigure_due_to_format = is_texture_frame != use_surface_;
const bool reconfigure_due_to_size = const bool reconfigure_due_to_size =
frame.width() != width_ || frame.height() != height_; frame.width() != width_ || frame.height() != height_;
@ -821,7 +820,7 @@ bool MediaCodecVideoEncoder::MaybeReconfigureEncoder(
Release(); Release();
return InitEncodeInternal(width_, height_, 0, 0, is_texture_frame) == return InitEncodeInternal(width_, height_, 0, 0, frame.is_texture()) ==
WEBRTC_VIDEO_CODEC_OK; WEBRTC_VIDEO_CODEC_OK;
} }
@ -841,13 +840,11 @@ bool MediaCodecVideoEncoder::EncodeByteBuffer(JNIEnv* jni,
return false; return false;
} }
RTC_CHECK(yuv_buffer) << "Indirect buffer??"; RTC_CHECK(yuv_buffer) << "Indirect buffer??";
rtc::scoped_refptr<webrtc::I420BufferInterface> i420_buffer =
frame.video_frame_buffer()->ToI420();
RTC_CHECK(!libyuv::ConvertFromI420( RTC_CHECK(!libyuv::ConvertFromI420(
frame.video_frame_buffer()->DataY(), i420_buffer->DataY(), i420_buffer->StrideY(), i420_buffer->DataU(),
frame.video_frame_buffer()->StrideY(), i420_buffer->StrideU(), i420_buffer->DataV(), i420_buffer->StrideV(),
frame.video_frame_buffer()->DataU(),
frame.video_frame_buffer()->StrideU(),
frame.video_frame_buffer()->DataV(),
frame.video_frame_buffer()->StrideV(),
yuv_buffer, width_, width_, height_, encoder_fourcc_)) yuv_buffer, width_, width_, height_, encoder_fourcc_))
<< "ConvertFromI420 failed"; << "ConvertFromI420 failed";
@ -870,15 +867,14 @@ bool MediaCodecVideoEncoder::EncodeTexture(JNIEnv* jni,
const webrtc::VideoFrame& frame) { const webrtc::VideoFrame& frame) {
RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_);
RTC_CHECK(use_surface_); RTC_CHECK(use_surface_);
NativeHandleImpl* handle = static_cast<NativeHandleImpl*>( NativeHandleImpl handle =
frame.video_frame_buffer()->native_handle()); static_cast<AndroidTextureBuffer*>(frame.video_frame_buffer().get())
jfloatArray sampling_matrix = handle->sampling_matrix.ToJava(jni); ->native_handle_impl();
bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
j_encode_texture_method_, jfloatArray sampling_matrix = handle.sampling_matrix.ToJava(jni);
key_frame, bool encode_status = jni->CallBooleanMethod(
handle->oes_texture_id, *j_media_codec_video_encoder_, j_encode_texture_method_, key_frame,
sampling_matrix, handle.oes_texture_id, sampling_matrix, current_timestamp_us_);
current_timestamp_us_);
if (CheckException(jni)) { if (CheckException(jni)) {
ALOGE << "Exception in encode texture."; ALOGE << "Exception in encode texture.";
ProcessHWError(true /* reset_if_fallback_unavailable */); ProcessHWError(true /* reset_if_fallback_unavailable */);

View File

@ -12,15 +12,14 @@
#include <memory> #include <memory>
#include "webrtc/api/video/i420_buffer.h"
#include "webrtc/base/bind.h" #include "webrtc/base/bind.h"
#include "webrtc/base/checks.h" #include "webrtc/base/checks.h"
#include "webrtc/base/keep_ref_until_done.h" #include "webrtc/base/keep_ref_until_done.h"
#include "webrtc/base/logging.h" #include "webrtc/base/logging.h"
#include "webrtc/base/scoped_ref_ptr.h" #include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/common_video/include/video_frame_buffer.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h" #include "webrtc/sdk/android/src/jni/jni_helpers.h"
#include "webrtc/system_wrappers/include/aligned_malloc.h"
using webrtc::NativeHandleBuffer;
namespace webrtc_jni { namespace webrtc_jni {
@ -123,7 +122,8 @@ AndroidTextureBuffer::AndroidTextureBuffer(
const NativeHandleImpl& native_handle, const NativeHandleImpl& native_handle,
jobject surface_texture_helper, jobject surface_texture_helper,
const rtc::Callback0<void>& no_longer_used) const rtc::Callback0<void>& no_longer_used)
: webrtc::NativeHandleBuffer(&native_handle_, width, height), : width_(width),
height_(height),
native_handle_(native_handle), native_handle_(native_handle),
surface_texture_helper_(surface_texture_helper), surface_texture_helper_(surface_texture_helper),
no_longer_used_cb_(no_longer_used) {} no_longer_used_cb_(no_longer_used) {}
@ -132,8 +132,23 @@ AndroidTextureBuffer::~AndroidTextureBuffer() {
no_longer_used_cb_(); no_longer_used_cb_();
} }
rtc::scoped_refptr<webrtc::VideoFrameBuffer> webrtc::VideoFrameBuffer::Type AndroidTextureBuffer::type() const {
AndroidTextureBuffer::NativeToI420Buffer() { return Type::kNative;
}
NativeHandleImpl AndroidTextureBuffer::native_handle_impl() const {
return native_handle_;
}
int AndroidTextureBuffer::width() const {
return width_;
}
int AndroidTextureBuffer::height() const {
return height_;
}
rtc::scoped_refptr<webrtc::I420BufferInterface> AndroidTextureBuffer::ToI420() {
int uv_width = (width()+7) / 8; int uv_width = (width()+7) / 8;
int stride = 8 * uv_width; int stride = 8 * uv_width;
int uv_height = (height()+1)/2; int uv_height = (height()+1)/2;
@ -154,13 +169,10 @@ AndroidTextureBuffer::NativeToI420Buffer() {
uint8_t* u_data = y_data + height() * stride; uint8_t* u_data = y_data + height() * stride;
uint8_t* v_data = u_data + stride/2; uint8_t* v_data = u_data + stride/2;
rtc::scoped_refptr<webrtc::VideoFrameBuffer> copy = rtc::scoped_refptr<webrtc::I420BufferInterface> copy =
new rtc::RefCountedObject<webrtc::WrappedI420Buffer>( new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
width(), height(), width(), height(), y_data, stride, u_data, stride, v_data, stride,
y_data, stride, rtc::Bind(&webrtc::AlignedFree, yuv_data.release()));
u_data, stride,
v_data, stride,
rtc::Bind(&webrtc::AlignedFree, yuv_data.release()));
JNIEnv* jni = AttachCurrentThreadIfNeeded(); JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni); ScopedLocalRefFrame local_ref_frame(jni);
@ -183,38 +195,4 @@ AndroidTextureBuffer::NativeToI420Buffer() {
return copy; return copy;
} }
rtc::scoped_refptr<AndroidTextureBuffer>
AndroidTextureBuffer::CropScaleAndRotate(int cropped_width,
int cropped_height,
int crop_x,
int crop_y,
int dst_width,
int dst_height,
webrtc::VideoRotation rotation) {
if (cropped_width == dst_width && cropped_height == dst_height &&
width() == dst_width && height() == dst_height &&
rotation == webrtc::kVideoRotation_0) {
return this;
}
int rotated_width = (rotation % 180 == 0) ? dst_width : dst_height;
int rotated_height = (rotation % 180 == 0) ? dst_height : dst_width;
// Here we use Bind magic to add a reference count to |this| until the newly
// created AndroidTextureBuffer is destructed
rtc::scoped_refptr<AndroidTextureBuffer> buffer(
new rtc::RefCountedObject<AndroidTextureBuffer>(
rotated_width, rotated_height, native_handle_,
surface_texture_helper_, rtc::KeepRefUntilDone(this)));
if (cropped_width != width() || cropped_height != height()) {
buffer->native_handle_.sampling_matrix.Crop(
cropped_width / static_cast<float>(width()),
cropped_height / static_cast<float>(height()),
crop_x / static_cast<float>(width()),
crop_y / static_cast<float>(height()));
}
buffer->native_handle_.sampling_matrix.Rotate(rotation);
return buffer;
}
} // namespace webrtc_jni } // namespace webrtc_jni

View File

@ -13,8 +13,9 @@
#include <jni.h> #include <jni.h>
#include "webrtc/api/video/video_frame_buffer.h"
#include "webrtc/api/video/video_rotation.h" #include "webrtc/api/video/video_rotation.h"
#include "webrtc/common_video/include/video_frame_buffer.h" #include "webrtc/base/callback.h"
namespace webrtc_jni { namespace webrtc_jni {
@ -51,7 +52,7 @@ struct NativeHandleImpl {
Matrix sampling_matrix; Matrix sampling_matrix;
}; };
class AndroidTextureBuffer : public webrtc::NativeHandleBuffer { class AndroidTextureBuffer : public webrtc::VideoFrameBuffer {
public: public:
AndroidTextureBuffer(int width, AndroidTextureBuffer(int width,
int height, int height,
@ -59,19 +60,18 @@ class AndroidTextureBuffer : public webrtc::NativeHandleBuffer {
jobject surface_texture_helper, jobject surface_texture_helper,
const rtc::Callback0<void>& no_longer_used); const rtc::Callback0<void>& no_longer_used);
~AndroidTextureBuffer(); ~AndroidTextureBuffer();
rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override;
// First crop, then scale to dst resolution, and then rotate. NativeHandleImpl native_handle_impl() const;
rtc::scoped_refptr<AndroidTextureBuffer> CropScaleAndRotate(
int cropped_width,
int cropped_height,
int crop_x,
int crop_y,
int dst_width,
int dst_height,
webrtc::VideoRotation rotation);
private: private:
Type type() const override;
int width() const override;
int height() const override;
rtc::scoped_refptr<webrtc::I420BufferInterface> ToI420() override;
const int width_;
const int height_;
NativeHandleImpl native_handle_; NativeHandleImpl native_handle_;
// Raw object pointer, relying on the caller, i.e., // Raw object pointer, relying on the caller, i.e.,
// AndroidVideoCapturerJni or the C++ SurfaceTextureHelper, to keep // AndroidVideoCapturerJni or the C++ SurfaceTextureHelper, to keep

View File

@ -871,10 +871,10 @@ class JavaVideoRendererWrapper
void OnFrame(const webrtc::VideoFrame& video_frame) override { void OnFrame(const webrtc::VideoFrame& video_frame) override {
ScopedLocalRefFrame local_ref_frame(jni()); ScopedLocalRefFrame local_ref_frame(jni());
jobject j_frame = jobject j_frame = (video_frame.video_frame_buffer()->type() ==
(video_frame.video_frame_buffer()->native_handle() != nullptr) webrtc::VideoFrameBuffer::Type::kNative)
? CricketToJavaTextureFrame(&video_frame) ? ToJavaTextureFrame(&video_frame)
: CricketToJavaI420Frame(&video_frame); : ToJavaI420Frame(&video_frame);
// |j_callbacks_| is responsible for releasing |j_frame| with // |j_callbacks_| is responsible for releasing |j_frame| with
// VideoRenderer.renderFrameDone(). // VideoRenderer.renderFrameDone().
jni()->CallVoidMethod(*j_callbacks_, j_render_frame_id_, j_frame); jni()->CallVoidMethod(*j_callbacks_, j_render_frame_id_, j_frame);
@ -890,25 +890,26 @@ class JavaVideoRendererWrapper
} }
// Return a VideoRenderer.I420Frame referring to the data in |frame|. // Return a VideoRenderer.I420Frame referring to the data in |frame|.
jobject CricketToJavaI420Frame(const webrtc::VideoFrame* frame) { jobject ToJavaI420Frame(const webrtc::VideoFrame* frame) {
jintArray strides = jni()->NewIntArray(3); jintArray strides = jni()->NewIntArray(3);
jint* strides_array = jni()->GetIntArrayElements(strides, NULL); jint* strides_array = jni()->GetIntArrayElements(strides, NULL);
strides_array[0] = frame->video_frame_buffer()->StrideY(); rtc::scoped_refptr<webrtc::I420BufferInterface> i420_buffer =
strides_array[1] = frame->video_frame_buffer()->StrideU(); frame->video_frame_buffer()->ToI420();
strides_array[2] = frame->video_frame_buffer()->StrideV(); strides_array[0] = i420_buffer->StrideY();
strides_array[1] = i420_buffer->StrideU();
strides_array[2] = i420_buffer->StrideV();
jni()->ReleaseIntArrayElements(strides, strides_array, 0); jni()->ReleaseIntArrayElements(strides, strides_array, 0);
jobjectArray planes = jni()->NewObjectArray(3, *j_byte_buffer_class_, NULL); jobjectArray planes = jni()->NewObjectArray(3, *j_byte_buffer_class_, NULL);
jobject y_buffer = jni()->NewDirectByteBuffer( jobject y_buffer = jni()->NewDirectByteBuffer(
const_cast<uint8_t*>(frame->video_frame_buffer()->DataY()), const_cast<uint8_t*>(i420_buffer->DataY()),
frame->video_frame_buffer()->StrideY() * i420_buffer->StrideY() * i420_buffer->height());
frame->video_frame_buffer()->height()); size_t chroma_height = i420_buffer->ChromaHeight();
size_t chroma_height = (frame->height() + 1) / 2; jobject u_buffer =
jobject u_buffer = jni()->NewDirectByteBuffer( jni()->NewDirectByteBuffer(const_cast<uint8_t*>(i420_buffer->DataU()),
const_cast<uint8_t*>(frame->video_frame_buffer()->DataU()), i420_buffer->StrideU() * chroma_height);
frame->video_frame_buffer()->StrideU() * chroma_height); jobject v_buffer =
jobject v_buffer = jni()->NewDirectByteBuffer( jni()->NewDirectByteBuffer(const_cast<uint8_t*>(i420_buffer->DataV()),
const_cast<uint8_t*>(frame->video_frame_buffer()->DataV()), i420_buffer->StrideV() * chroma_height);
frame->video_frame_buffer()->StrideV() * chroma_height);
jni()->SetObjectArrayElement(planes, 0, y_buffer); jni()->SetObjectArrayElement(planes, 0, y_buffer);
jni()->SetObjectArrayElement(planes, 1, u_buffer); jni()->SetObjectArrayElement(planes, 1, u_buffer);
@ -921,16 +922,16 @@ class JavaVideoRendererWrapper
} }
// Return a VideoRenderer.I420Frame referring texture object in |frame|. // Return a VideoRenderer.I420Frame referring texture object in |frame|.
jobject CricketToJavaTextureFrame(const webrtc::VideoFrame* frame) { jobject ToJavaTextureFrame(const webrtc::VideoFrame* frame) {
NativeHandleImpl* handle = reinterpret_cast<NativeHandleImpl*>( NativeHandleImpl handle =
frame->video_frame_buffer()->native_handle()); static_cast<AndroidTextureBuffer*>(frame->video_frame_buffer().get())
jfloatArray sampling_matrix = handle->sampling_matrix.ToJava(jni()); ->native_handle_impl();
jfloatArray sampling_matrix = handle.sampling_matrix.ToJava(jni());
return jni()->NewObject( return jni()->NewObject(
*j_frame_class_, j_texture_frame_ctor_id_, *j_frame_class_, j_texture_frame_ctor_id_, frame->width(),
frame->width(), frame->height(), frame->height(), static_cast<int>(frame->rotation()),
static_cast<int>(frame->rotation()), handle.oes_texture_id, sampling_matrix, javaShallowCopy(frame));
handle->oes_texture_id, sampling_matrix, javaShallowCopy(frame));
} }
JNIEnv* jni() { JNIEnv* jni() {