From c276ecf5c4540b86abc18f79889a8596befa4960 Mon Sep 17 00:00:00 2001 From: Magnus Jedvert Date: Wed, 7 Jun 2017 13:06:06 +0200 Subject: [PATCH] Update Android video buffers to new VideoFrameBuffer interface This is a follow-up cleanup for CL https://codereview.webrtc.org/2847383002/. Bug: webrtc:7632 Change-Id: I1e17358c70a12c75e8732fee5bbab6a552c4e6c3 Reviewed-on: https://chromium-review.googlesource.com/524063 Commit-Queue: Magnus Jedvert Reviewed-by: Niels Moller Cr-Commit-Position: refs/heads/master@{#18532} --- webrtc/sdk/android/src/jni/DEPS | 3 +- .../src/jni/androidmediaencoder_jni.cc | 36 +++++----- .../sdk/android/src/jni/native_handle_impl.cc | 72 +++++++------------ .../sdk/android/src/jni/native_handle_impl.h | 24 +++---- .../sdk/android/src/jni/peerconnection_jni.cc | 53 +++++++------- 5 files changed, 82 insertions(+), 106 deletions(-) diff --git a/webrtc/sdk/android/src/jni/DEPS b/webrtc/sdk/android/src/jni/DEPS index 0052c975b1..108f71f4c2 100644 --- a/webrtc/sdk/android/src/jni/DEPS +++ b/webrtc/sdk/android/src/jni/DEPS @@ -1,10 +1,11 @@ include_rules = [ "+third_party/libyuv", "+webrtc/common_video/h264/h264_bitstream_parser.h", + "+webrtc/common_video/include", "+webrtc/common_video/libyuv/include/webrtc_libyuv.h", "+webrtc/modules/utility/include/jvm_android.h", "+webrtc/modules/video_coding/utility/vp8_header_parser.h", "+webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.h", "+webrtc/pc", - "+webrtc/system_wrappers/include/field_trial_default.h", + "+webrtc/system_wrappers/include", ] diff --git a/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc b/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc index 40d21d243a..39c1e96cbe 100644 --- a/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc +++ b/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc @@ -734,7 +734,8 @@ int32_t MediaCodecVideoEncoder::Encode( const bool key_frame = frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame; bool encode_status = true; - if (!input_frame.video_frame_buffer()->native_handle()) { + if (input_frame.video_frame_buffer()->type() != + webrtc::VideoFrameBuffer::Type::kNative) { int j_input_buffer_index = jni->CallIntMethod( *j_media_codec_video_encoder_, j_dequeue_input_buffer_method_); if (CheckException(jni)) { @@ -794,9 +795,7 @@ bool MediaCodecVideoEncoder::MaybeReconfigureEncoder( const webrtc::VideoFrame& frame) { RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); - const bool is_texture_frame = - frame.video_frame_buffer()->native_handle() != nullptr; - const bool reconfigure_due_to_format = is_texture_frame != use_surface_; + const bool reconfigure_due_to_format = frame.is_texture() != use_surface_; const bool reconfigure_due_to_size = frame.width() != width_ || frame.height() != height_; @@ -821,7 +820,7 @@ bool MediaCodecVideoEncoder::MaybeReconfigureEncoder( Release(); - return InitEncodeInternal(width_, height_, 0, 0, is_texture_frame) == + return InitEncodeInternal(width_, height_, 0, 0, frame.is_texture()) == WEBRTC_VIDEO_CODEC_OK; } @@ -841,13 +840,11 @@ bool MediaCodecVideoEncoder::EncodeByteBuffer(JNIEnv* jni, return false; } RTC_CHECK(yuv_buffer) << "Indirect buffer??"; + rtc::scoped_refptr i420_buffer = + frame.video_frame_buffer()->ToI420(); RTC_CHECK(!libyuv::ConvertFromI420( - frame.video_frame_buffer()->DataY(), - frame.video_frame_buffer()->StrideY(), - frame.video_frame_buffer()->DataU(), - frame.video_frame_buffer()->StrideU(), - frame.video_frame_buffer()->DataV(), - frame.video_frame_buffer()->StrideV(), + i420_buffer->DataY(), i420_buffer->StrideY(), i420_buffer->DataU(), + i420_buffer->StrideU(), i420_buffer->DataV(), i420_buffer->StrideV(), yuv_buffer, width_, width_, height_, encoder_fourcc_)) << "ConvertFromI420 failed"; @@ -870,15 +867,14 @@ bool MediaCodecVideoEncoder::EncodeTexture(JNIEnv* jni, const webrtc::VideoFrame& frame) { RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); RTC_CHECK(use_surface_); - NativeHandleImpl* handle = static_cast( - frame.video_frame_buffer()->native_handle()); - jfloatArray sampling_matrix = handle->sampling_matrix.ToJava(jni); - bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_, - j_encode_texture_method_, - key_frame, - handle->oes_texture_id, - sampling_matrix, - current_timestamp_us_); + NativeHandleImpl handle = + static_cast(frame.video_frame_buffer().get()) + ->native_handle_impl(); + + jfloatArray sampling_matrix = handle.sampling_matrix.ToJava(jni); + bool encode_status = jni->CallBooleanMethod( + *j_media_codec_video_encoder_, j_encode_texture_method_, key_frame, + handle.oes_texture_id, sampling_matrix, current_timestamp_us_); if (CheckException(jni)) { ALOGE << "Exception in encode texture."; ProcessHWError(true /* reset_if_fallback_unavailable */); diff --git a/webrtc/sdk/android/src/jni/native_handle_impl.cc b/webrtc/sdk/android/src/jni/native_handle_impl.cc index 90ade58234..0f93cbdc26 100644 --- a/webrtc/sdk/android/src/jni/native_handle_impl.cc +++ b/webrtc/sdk/android/src/jni/native_handle_impl.cc @@ -12,15 +12,14 @@ #include -#include "webrtc/api/video/i420_buffer.h" #include "webrtc/base/bind.h" #include "webrtc/base/checks.h" #include "webrtc/base/keep_ref_until_done.h" #include "webrtc/base/logging.h" #include "webrtc/base/scoped_ref_ptr.h" +#include "webrtc/common_video/include/video_frame_buffer.h" #include "webrtc/sdk/android/src/jni/jni_helpers.h" - -using webrtc::NativeHandleBuffer; +#include "webrtc/system_wrappers/include/aligned_malloc.h" namespace webrtc_jni { @@ -123,7 +122,8 @@ AndroidTextureBuffer::AndroidTextureBuffer( const NativeHandleImpl& native_handle, jobject surface_texture_helper, const rtc::Callback0& no_longer_used) - : webrtc::NativeHandleBuffer(&native_handle_, width, height), + : width_(width), + height_(height), native_handle_(native_handle), surface_texture_helper_(surface_texture_helper), no_longer_used_cb_(no_longer_used) {} @@ -132,8 +132,23 @@ AndroidTextureBuffer::~AndroidTextureBuffer() { no_longer_used_cb_(); } -rtc::scoped_refptr -AndroidTextureBuffer::NativeToI420Buffer() { +webrtc::VideoFrameBuffer::Type AndroidTextureBuffer::type() const { + return Type::kNative; +} + +NativeHandleImpl AndroidTextureBuffer::native_handle_impl() const { + return native_handle_; +} + +int AndroidTextureBuffer::width() const { + return width_; +} + +int AndroidTextureBuffer::height() const { + return height_; +} + +rtc::scoped_refptr AndroidTextureBuffer::ToI420() { int uv_width = (width()+7) / 8; int stride = 8 * uv_width; int uv_height = (height()+1)/2; @@ -154,13 +169,10 @@ AndroidTextureBuffer::NativeToI420Buffer() { uint8_t* u_data = y_data + height() * stride; uint8_t* v_data = u_data + stride/2; - rtc::scoped_refptr copy = - new rtc::RefCountedObject( - width(), height(), - y_data, stride, - u_data, stride, - v_data, stride, - rtc::Bind(&webrtc::AlignedFree, yuv_data.release())); + rtc::scoped_refptr copy = + new rtc::RefCountedObject( + width(), height(), y_data, stride, u_data, stride, v_data, stride, + rtc::Bind(&webrtc::AlignedFree, yuv_data.release())); JNIEnv* jni = AttachCurrentThreadIfNeeded(); ScopedLocalRefFrame local_ref_frame(jni); @@ -183,38 +195,4 @@ AndroidTextureBuffer::NativeToI420Buffer() { return copy; } -rtc::scoped_refptr -AndroidTextureBuffer::CropScaleAndRotate(int cropped_width, - int cropped_height, - int crop_x, - int crop_y, - int dst_width, - int dst_height, - webrtc::VideoRotation rotation) { - if (cropped_width == dst_width && cropped_height == dst_height && - width() == dst_width && height() == dst_height && - rotation == webrtc::kVideoRotation_0) { - return this; - } - int rotated_width = (rotation % 180 == 0) ? dst_width : dst_height; - int rotated_height = (rotation % 180 == 0) ? dst_height : dst_width; - - // Here we use Bind magic to add a reference count to |this| until the newly - // created AndroidTextureBuffer is destructed - rtc::scoped_refptr buffer( - new rtc::RefCountedObject( - rotated_width, rotated_height, native_handle_, - surface_texture_helper_, rtc::KeepRefUntilDone(this))); - - if (cropped_width != width() || cropped_height != height()) { - buffer->native_handle_.sampling_matrix.Crop( - cropped_width / static_cast(width()), - cropped_height / static_cast(height()), - crop_x / static_cast(width()), - crop_y / static_cast(height())); - } - buffer->native_handle_.sampling_matrix.Rotate(rotation); - return buffer; -} - } // namespace webrtc_jni diff --git a/webrtc/sdk/android/src/jni/native_handle_impl.h b/webrtc/sdk/android/src/jni/native_handle_impl.h index 6829b7ba56..b12c3c4651 100644 --- a/webrtc/sdk/android/src/jni/native_handle_impl.h +++ b/webrtc/sdk/android/src/jni/native_handle_impl.h @@ -13,8 +13,9 @@ #include +#include "webrtc/api/video/video_frame_buffer.h" #include "webrtc/api/video/video_rotation.h" -#include "webrtc/common_video/include/video_frame_buffer.h" +#include "webrtc/base/callback.h" namespace webrtc_jni { @@ -51,7 +52,7 @@ struct NativeHandleImpl { Matrix sampling_matrix; }; -class AndroidTextureBuffer : public webrtc::NativeHandleBuffer { +class AndroidTextureBuffer : public webrtc::VideoFrameBuffer { public: AndroidTextureBuffer(int width, int height, @@ -59,19 +60,18 @@ class AndroidTextureBuffer : public webrtc::NativeHandleBuffer { jobject surface_texture_helper, const rtc::Callback0& no_longer_used); ~AndroidTextureBuffer(); - rtc::scoped_refptr NativeToI420Buffer() override; - // First crop, then scale to dst resolution, and then rotate. - rtc::scoped_refptr CropScaleAndRotate( - int cropped_width, - int cropped_height, - int crop_x, - int crop_y, - int dst_width, - int dst_height, - webrtc::VideoRotation rotation); + NativeHandleImpl native_handle_impl() const; private: + Type type() const override; + int width() const override; + int height() const override; + + rtc::scoped_refptr ToI420() override; + + const int width_; + const int height_; NativeHandleImpl native_handle_; // Raw object pointer, relying on the caller, i.e., // AndroidVideoCapturerJni or the C++ SurfaceTextureHelper, to keep diff --git a/webrtc/sdk/android/src/jni/peerconnection_jni.cc b/webrtc/sdk/android/src/jni/peerconnection_jni.cc index adcdb55e49..5f33ba35ca 100644 --- a/webrtc/sdk/android/src/jni/peerconnection_jni.cc +++ b/webrtc/sdk/android/src/jni/peerconnection_jni.cc @@ -871,10 +871,10 @@ class JavaVideoRendererWrapper void OnFrame(const webrtc::VideoFrame& video_frame) override { ScopedLocalRefFrame local_ref_frame(jni()); - jobject j_frame = - (video_frame.video_frame_buffer()->native_handle() != nullptr) - ? CricketToJavaTextureFrame(&video_frame) - : CricketToJavaI420Frame(&video_frame); + jobject j_frame = (video_frame.video_frame_buffer()->type() == + webrtc::VideoFrameBuffer::Type::kNative) + ? ToJavaTextureFrame(&video_frame) + : ToJavaI420Frame(&video_frame); // |j_callbacks_| is responsible for releasing |j_frame| with // VideoRenderer.renderFrameDone(). jni()->CallVoidMethod(*j_callbacks_, j_render_frame_id_, j_frame); @@ -890,25 +890,26 @@ class JavaVideoRendererWrapper } // Return a VideoRenderer.I420Frame referring to the data in |frame|. - jobject CricketToJavaI420Frame(const webrtc::VideoFrame* frame) { + jobject ToJavaI420Frame(const webrtc::VideoFrame* frame) { jintArray strides = jni()->NewIntArray(3); jint* strides_array = jni()->GetIntArrayElements(strides, NULL); - strides_array[0] = frame->video_frame_buffer()->StrideY(); - strides_array[1] = frame->video_frame_buffer()->StrideU(); - strides_array[2] = frame->video_frame_buffer()->StrideV(); + rtc::scoped_refptr i420_buffer = + frame->video_frame_buffer()->ToI420(); + strides_array[0] = i420_buffer->StrideY(); + strides_array[1] = i420_buffer->StrideU(); + strides_array[2] = i420_buffer->StrideV(); jni()->ReleaseIntArrayElements(strides, strides_array, 0); jobjectArray planes = jni()->NewObjectArray(3, *j_byte_buffer_class_, NULL); jobject y_buffer = jni()->NewDirectByteBuffer( - const_cast(frame->video_frame_buffer()->DataY()), - frame->video_frame_buffer()->StrideY() * - frame->video_frame_buffer()->height()); - size_t chroma_height = (frame->height() + 1) / 2; - jobject u_buffer = jni()->NewDirectByteBuffer( - const_cast(frame->video_frame_buffer()->DataU()), - frame->video_frame_buffer()->StrideU() * chroma_height); - jobject v_buffer = jni()->NewDirectByteBuffer( - const_cast(frame->video_frame_buffer()->DataV()), - frame->video_frame_buffer()->StrideV() * chroma_height); + const_cast(i420_buffer->DataY()), + i420_buffer->StrideY() * i420_buffer->height()); + size_t chroma_height = i420_buffer->ChromaHeight(); + jobject u_buffer = + jni()->NewDirectByteBuffer(const_cast(i420_buffer->DataU()), + i420_buffer->StrideU() * chroma_height); + jobject v_buffer = + jni()->NewDirectByteBuffer(const_cast(i420_buffer->DataV()), + i420_buffer->StrideV() * chroma_height); jni()->SetObjectArrayElement(planes, 0, y_buffer); jni()->SetObjectArrayElement(planes, 1, u_buffer); @@ -921,16 +922,16 @@ class JavaVideoRendererWrapper } // Return a VideoRenderer.I420Frame referring texture object in |frame|. - jobject CricketToJavaTextureFrame(const webrtc::VideoFrame* frame) { - NativeHandleImpl* handle = reinterpret_cast( - frame->video_frame_buffer()->native_handle()); - jfloatArray sampling_matrix = handle->sampling_matrix.ToJava(jni()); + jobject ToJavaTextureFrame(const webrtc::VideoFrame* frame) { + NativeHandleImpl handle = + static_cast(frame->video_frame_buffer().get()) + ->native_handle_impl(); + jfloatArray sampling_matrix = handle.sampling_matrix.ToJava(jni()); return jni()->NewObject( - *j_frame_class_, j_texture_frame_ctor_id_, - frame->width(), frame->height(), - static_cast(frame->rotation()), - handle->oes_texture_id, sampling_matrix, javaShallowCopy(frame)); + *j_frame_class_, j_texture_frame_ctor_id_, frame->width(), + frame->height(), static_cast(frame->rotation()), + handle.oes_texture_id, sampling_matrix, javaShallowCopy(frame)); } JNIEnv* jni() {