diff --git a/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java b/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java index 85afdf7ba7..b4e687f79e 100644 --- a/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java +++ b/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java @@ -389,25 +389,10 @@ public class SurfaceViewRenderer extends SurfaceView } final long startTimeNs = System.nanoTime(); - final float[] samplingMatrix; - if (frame.yuvFrame) { - // The convention in WebRTC is that the first element in a ByteBuffer corresponds to the - // top-left corner of the image, but in glTexImage2D() the first element corresponds to the - // bottom-left corner. We correct this discrepancy by setting a vertical flip as sampling - // matrix. - samplingMatrix = RendererCommon.verticalFlipMatrix(); - } else { - // TODO(magjed): Move updateTexImage() to the video source instead. - SurfaceTexture surfaceTexture = (SurfaceTexture) frame.textureObject; - surfaceTexture.updateTexImage(); - samplingMatrix = new float[16]; - surfaceTexture.getTransformMatrix(samplingMatrix); - } - final float[] texMatrix; synchronized (layoutLock) { final float[] rotatedSamplingMatrix = - RendererCommon.rotateTextureMatrix(samplingMatrix, frame.rotationDegree); + RendererCommon.rotateTextureMatrix(frame.samplingMatrix, frame.rotationDegree); final float[] layoutMatrix = RendererCommon.getLayoutMatrix( mirror, frameAspectRatio(), (float) layoutWidth / layoutHeight); texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix); diff --git a/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java b/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java index a97b160529..afaebf6f2e 100644 --- a/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java +++ b/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java @@ -244,29 +244,15 @@ public class VideoRendererGui implements GLSurfaceView.Renderer { } if (isNewFrame) { + rotatedSamplingMatrix = RendererCommon.rotateTextureMatrix( + pendingFrame.samplingMatrix, pendingFrame.rotationDegree); if (pendingFrame.yuvFrame) { rendererType = RendererType.RENDERER_YUV; drawer.uploadYuvData(yuvTextures, pendingFrame.width, pendingFrame.height, pendingFrame.yuvStrides, pendingFrame.yuvPlanes); - // The convention in WebRTC is that the first element in a ByteBuffer corresponds to the - // top-left corner of the image, but in glTexImage2D() the first element corresponds to - // the bottom-left corner. We correct this discrepancy by setting a vertical flip as - // sampling matrix. - final float[] samplingMatrix = RendererCommon.verticalFlipMatrix(); - rotatedSamplingMatrix = - RendererCommon.rotateTextureMatrix(samplingMatrix, pendingFrame.rotationDegree); } else { rendererType = RendererType.RENDERER_TEXTURE; - // External texture rendering. Update texture image to latest and make a deep copy of - // the external texture. - // TODO(magjed): Move updateTexImage() to the video source instead. - final SurfaceTexture surfaceTexture = (SurfaceTexture) pendingFrame.textureObject; - surfaceTexture.updateTexImage(); - final float[] samplingMatrix = new float[16]; - surfaceTexture.getTransformMatrix(samplingMatrix); - rotatedSamplingMatrix = - RendererCommon.rotateTextureMatrix(samplingMatrix, pendingFrame.rotationDegree); - + // External texture rendering. Make a deep copy of the external texture. // Reallocate offscreen texture if necessary. textureCopy.setSize(pendingFrame.rotatedWidth(), pendingFrame.rotatedHeight()); diff --git a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc index 1f6313119e..6012e186f9 100644 --- a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc +++ b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc @@ -32,6 +32,7 @@ #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h" #include "talk/app/webrtc/java/jni/classreferenceholder.h" #include "talk/app/webrtc/java/jni/native_handle_impl.h" +#include "talk/app/webrtc/java/jni/surfacetexturehelper_jni.h" #include "webrtc/base/bind.h" #include "webrtc/base/checks.h" #include "webrtc/base/logging.h" @@ -110,7 +111,7 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder, bool use_surface_; VideoCodec codec_; webrtc::I420BufferPool decoded_frame_pool_; - NativeHandleImpl native_handle_; + rtc::scoped_refptr surface_texture_helper_; DecodedImageCallback* callback_; int frames_received_; // Number of frames received by decoder. int frames_decoded_; // Number of frames decoded by decoder. @@ -143,10 +144,10 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder, jfieldID j_height_field_; jfieldID j_stride_field_; jfieldID j_slice_height_field_; - jfieldID j_surface_texture_field_; // MediaCodecVideoDecoder.DecodedTextureBuffer fields. jfieldID j_textureID_field_; - jfieldID j_texture_presentation_timestamp_us_field_; + jfieldID j_transform_matrix_field_; + jfieldID j_texture_timestamp_ns_field_; // MediaCodecVideoDecoder.DecodedByteBuffer fields. jfieldID j_info_index_field_; jfieldID j_info_offset_field_; @@ -155,8 +156,6 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder, // Global references; must be deleted in Release(). std::vector input_buffers_; - jobject surface_texture_; - jobject previous_surface_texture_; // Render EGL context - owned by factory, should not be allocated/destroyed // by VideoDecoder. @@ -170,8 +169,6 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder( key_frame_required_(true), inited_(false), sw_fallback_required_(false), - surface_texture_(NULL), - previous_surface_texture_(NULL), codec_thread_(new Thread()), j_media_codec_video_decoder_class_( jni, @@ -190,7 +187,7 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder( j_init_decode_method_ = GetMethodID( jni, *j_media_codec_video_decoder_class_, "initDecode", "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;" - "IILandroid/opengl/EGLContext;)Z"); + "IILorg/webrtc/SurfaceTextureHelper;)Z"); j_release_method_ = GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V"); j_dequeue_input_buffer_method_ = GetMethodID( @@ -220,17 +217,15 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder( jni, *j_media_codec_video_decoder_class_, "stride", "I"); j_slice_height_field_ = GetFieldID( jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I"); - j_surface_texture_field_ = GetFieldID( - jni, *j_media_codec_video_decoder_class_, "surfaceTexture", - "Landroid/graphics/SurfaceTexture;"); jclass j_decoder_decoded_texture_buffer_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer"); j_textureID_field_ = GetFieldID( jni, j_decoder_decoded_texture_buffer_class, "textureID", "I"); - j_texture_presentation_timestamp_us_field_ = - GetFieldID(jni, j_decoder_decoded_texture_buffer_class, - "presentationTimestampUs", "J"); + j_transform_matrix_field_ = GetFieldID( + jni, j_decoder_decoded_texture_buffer_class, "transformMatrix", "[F"); + j_texture_timestamp_ns_field_ = GetFieldID( + jni, j_decoder_decoded_texture_buffer_class, "timestampNs", "J"); jclass j_decoder_decoded_byte_buffer_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer"); @@ -253,14 +248,6 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder( MediaCodecVideoDecoder::~MediaCodecVideoDecoder() { // Call Release() to ensure no more callbacks to us after we are deleted. Release(); - // Delete global references. - JNIEnv* jni = AttachCurrentThreadIfNeeded(); - if (previous_surface_texture_ != NULL) { - jni->DeleteGlobalRef(previous_surface_texture_); - } - if (surface_texture_ != NULL) { - jni->DeleteGlobalRef(surface_texture_); - } } int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst, @@ -310,6 +297,11 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() { frames_received_ = 0; frames_decoded_ = 0; + if (use_surface_) { + surface_texture_helper_ = new rtc::RefCountedObject( + jni, render_egl_context_); + } + jobject j_video_codec_enum = JavaEnumFromIndex( jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_); bool success = jni->CallBooleanMethod( @@ -318,7 +310,8 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() { j_video_codec_enum, codec_.width, codec_.height, - use_surface_ ? render_egl_context_ : nullptr); + use_surface_ ? surface_texture_helper_->GetJavaSurfaceTextureHelper() + : nullptr); if (CheckException(jni) || !success) { ALOGE("Codec initialization error - fallback to SW codec."); sw_fallback_required_ = true; @@ -358,15 +351,6 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() { } } - if (use_surface_) { - jobject surface_texture = GetObjectField( - jni, *j_media_codec_video_decoder_, j_surface_texture_field_); - if (previous_surface_texture_ != NULL) { - jni->DeleteGlobalRef(previous_surface_texture_); - } - previous_surface_texture_ = surface_texture_; - surface_texture_ = jni->NewGlobalRef(surface_texture); - } codec_thread_->PostDelayed(kMediaCodecPollMs, this); return WEBRTC_VIDEO_CODEC_OK; @@ -391,6 +375,7 @@ int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() { } input_buffers_.clear(); jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_); + surface_texture_helper_ = nullptr; inited_ = false; rtc::MessageQueueManager::Clear(this); if (CheckException(jni)) { @@ -499,7 +484,7 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread( if (frames_received_ > frames_decoded_ + max_pending_frames_) { ALOGV("Received: %d. Decoded: %d. Wait for output...", frames_received_, frames_decoded_); - if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs * 1000)) { + if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs)) { ALOGE("DeliverPendingOutputs error"); return ProcessHWErrorOnCodecThread(); } @@ -562,7 +547,7 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread( } bool MediaCodecVideoDecoder::DeliverPendingOutputs( - JNIEnv* jni, int dequeue_timeout_us) { + JNIEnv* jni, int dequeue_timeout_ms) { if (frames_received_ <= frames_decoded_) { // No need to query for output buffers - decoder is drained. return true; @@ -571,7 +556,7 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs( jobject j_decoder_output_buffer = jni->CallObjectMethod( *j_media_codec_video_decoder_, j_dequeue_output_buffer_method_, - dequeue_timeout_us); + dequeue_timeout_ms); if (CheckException(jni)) { ALOGE("dequeueOutputBuffer() error"); return false; @@ -596,14 +581,15 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs( // Extract data from Java DecodedTextureBuffer. const int texture_id = GetIntField(jni, j_decoder_output_buffer, j_textureID_field_); - const int64_t timestamp_us = - GetLongField(jni, j_decoder_output_buffer, - j_texture_presentation_timestamp_us_field_); - output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec; + const jfloatArray j_transform_matrix = + reinterpret_cast(GetObjectField( + jni, j_decoder_output_buffer, j_transform_matrix_field_)); + const int64_t timestamp_ns = GetLongField(jni, j_decoder_output_buffer, + j_texture_timestamp_ns_field_); + output_timestamps_ms = timestamp_ns / rtc::kNumNanosecsPerMillisec; // Create webrtc::VideoFrameBuffer with native texture handle. - native_handle_.SetTextureObject(surface_texture_, texture_id); - frame_buffer = new rtc::RefCountedObject( - &native_handle_, width, height); + frame_buffer = surface_texture_helper_->CreateTextureFrame( + width, height, NativeHandleImpl(jni, texture_id, j_transform_matrix)); } else { // Extract data from Java ByteBuffer and create output yuv420 frame - // for non surface decoding only. diff --git a/talk/app/webrtc/java/jni/native_handle_impl.cc b/talk/app/webrtc/java/jni/native_handle_impl.cc index 37f5489ac5..98af4d8b7d 100644 --- a/talk/app/webrtc/java/jni/native_handle_impl.cc +++ b/talk/app/webrtc/java/jni/native_handle_impl.cc @@ -31,32 +31,17 @@ namespace webrtc_jni { -NativeHandleImpl::NativeHandleImpl() : texture_object_(NULL), texture_id_(-1) {} - -void* NativeHandleImpl::GetHandle() { - return texture_object_; -} - -int NativeHandleImpl::GetTextureId() { - return texture_id_; -} - -void NativeHandleImpl::SetTextureObject(void* texture_object, int texture_id) { - texture_object_ = reinterpret_cast(texture_object); - texture_id_ = texture_id; -} - -JniNativeHandleBuffer::JniNativeHandleBuffer(void* native_handle, - int width, - int height) - : NativeHandleBuffer(native_handle, width, height) {} - -rtc::scoped_refptr -JniNativeHandleBuffer::NativeToI420Buffer() { - // TODO(pbos): Implement before using this in the encoder pipeline (or - // remove the RTC_CHECK() in VideoCapture). - RTC_NOTREACHED(); - return nullptr; +NativeHandleImpl::NativeHandleImpl(JNIEnv* jni, + jint j_oes_texture_id, + jfloatArray j_transform_matrix) + : oes_texture_id(j_oes_texture_id) { + RTC_CHECK_EQ(16, jni->GetArrayLength(j_transform_matrix)); + jfloat* transform_matrix_ptr = + jni->GetFloatArrayElements(j_transform_matrix, nullptr); + for (int i = 0; i < 16; ++i) { + sampling_matrix[i] = transform_matrix_ptr[i]; + } + jni->ReleaseFloatArrayElements(j_transform_matrix, transform_matrix_ptr, 0); } } // namespace webrtc_jni diff --git a/talk/app/webrtc/java/jni/native_handle_impl.h b/talk/app/webrtc/java/jni/native_handle_impl.h index 2ce2b73b13..370039e50e 100644 --- a/talk/app/webrtc/java/jni/native_handle_impl.h +++ b/talk/app/webrtc/java/jni/native_handle_impl.h @@ -31,33 +31,16 @@ #include -#include "webrtc/common_video/interface/video_frame_buffer.h" - namespace webrtc_jni { // Wrapper for texture object. -class NativeHandleImpl { - public: - NativeHandleImpl(); +struct NativeHandleImpl { + NativeHandleImpl(JNIEnv* jni, + jint j_oes_texture_id, + jfloatArray j_transform_matrix); - void* GetHandle(); - int GetTextureId(); - void SetTextureObject(void* texture_object, int texture_id); - - private: - jobject texture_object_; - int32_t texture_id_; -}; - -class JniNativeHandleBuffer : public webrtc::NativeHandleBuffer { - public: - JniNativeHandleBuffer(void* native_handle, int width, int height); - - // TODO(pbos): Override destructor to release native handle, at the moment the - // native handle is not released based on refcount. - - private: - rtc::scoped_refptr NativeToI420Buffer() override; + const int oes_texture_id; + float sampling_matrix[16]; }; } // namespace webrtc_jni diff --git a/talk/app/webrtc/java/jni/peerconnection_jni.cc b/talk/app/webrtc/java/jni/peerconnection_jni.cc index fc6ce50c51..2d14deef34 100644 --- a/talk/app/webrtc/java/jni/peerconnection_jni.cc +++ b/talk/app/webrtc/java/jni/peerconnection_jni.cc @@ -771,7 +771,7 @@ class JavaVideoRendererWrapper : public VideoRendererInterface { jni, *j_frame_class_, "", "(III[I[Ljava/nio/ByteBuffer;J)V")), j_texture_frame_ctor_id_(GetMethodID( jni, *j_frame_class_, "", - "(IIILjava/lang/Object;IJ)V")), + "(IIII[FJ)V")), j_byte_buffer_class_(jni, FindClass(jni, "java/nio/ByteBuffer")) { CHECK_EXCEPTION(jni); } @@ -827,13 +827,13 @@ class JavaVideoRendererWrapper : public VideoRendererInterface { jobject CricketToJavaTextureFrame(const cricket::VideoFrame* frame) { NativeHandleImpl* handle = reinterpret_cast(frame->GetNativeHandle()); - jobject texture_object = reinterpret_cast(handle->GetHandle()); - int texture_id = handle->GetTextureId(); + jfloatArray sampling_matrix = jni()->NewFloatArray(16); + jni()->SetFloatArrayRegion(sampling_matrix, 0, 16, handle->sampling_matrix); return jni()->NewObject( *j_frame_class_, j_texture_frame_ctor_id_, frame->GetWidth(), frame->GetHeight(), static_cast(frame->GetVideoRotation()), - texture_object, texture_id, javaShallowCopy(frame)); + handle->oes_texture_id, sampling_matrix, javaShallowCopy(frame)); } JNIEnv* jni() { diff --git a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java index ef2055645d..5312fe39a8 100644 --- a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java +++ b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java @@ -27,7 +27,6 @@ package org.webrtc; -import android.graphics.SurfaceTexture; import android.media.MediaCodec; import android.media.MediaCodecInfo; import android.media.MediaCodecInfo.CodecCapabilities; @@ -42,8 +41,9 @@ import android.view.Surface; import org.webrtc.Logging; import java.nio.ByteBuffer; -import java.util.List; import java.util.Arrays; +import java.util.List; +import java.util.concurrent.TimeUnit; // Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder. // This class is an implementation detail of the Java PeerConnection API. @@ -93,10 +93,11 @@ public class MediaCodecVideoDecoder { private int stride; private int sliceHeight; private boolean useSurface; - private int textureID = 0; - private SurfaceTexture surfaceTexture = null; + // |isWaitingForTexture| is true when waiting for the transition: + // MediaCodec.releaseOutputBuffer() -> onTextureFrameAvailable(). + private boolean isWaitingForTexture = false; + private TextureListener textureListener; private Surface surface = null; - private EglBase eglBase; private MediaCodecVideoDecoder() { } @@ -180,12 +181,13 @@ public class MediaCodecVideoDecoder { } } - // Pass null in |sharedContext| to configure the codec for ByteBuffer output. - private boolean initDecode(VideoCodecType type, int width, int height, EGLContext sharedContext) { + // Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer output. + private boolean initDecode( + VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTextureHelper) { if (mediaCodecThread != null) { throw new RuntimeException("Forgot to release()?"); } - useSurface = (sharedContext != null); + useSurface = (surfaceTextureHelper != null); String mime = null; String[] supportedCodecPrefixes = null; if (type == VideoCodecType.VIDEO_CODEC_VP8) { @@ -204,9 +206,6 @@ public class MediaCodecVideoDecoder { Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height + ". Color: 0x" + Integer.toHexString(properties.colorFormat) + ". Use Surface: " + useSurface); - if (sharedContext != null) { - Logging.d(TAG, "Decoder shared EGL Context: " + sharedContext); - } mediaCodecThread = Thread.currentThread(); try { this.width = width; @@ -215,16 +214,8 @@ public class MediaCodecVideoDecoder { sliceHeight = height; if (useSurface) { - // Create shared EGL context. - eglBase = new EglBase(sharedContext, EglBase.ConfigType.PIXEL_BUFFER); - eglBase.createDummyPbufferSurface(); - eglBase.makeCurrent(); - - // Create output surface - textureID = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES); - Logging.d(TAG, "Video decoder TextureID = " + textureID); - surfaceTexture = new SurfaceTexture(textureID); - surface = new Surface(surfaceTexture); + textureListener = new TextureListener(surfaceTextureHelper); + surface = new Surface(surfaceTextureHelper.getSurfaceTexture()); } MediaFormat format = MediaFormat.createVideoFormat(mime, width, height); @@ -265,11 +256,7 @@ public class MediaCodecVideoDecoder { if (useSurface) { surface.release(); surface = null; - Logging.d(TAG, "Delete video decoder TextureID " + textureID); - GLES20.glDeleteTextures(1, new int[] {textureID}, 0); - textureID = 0; - eglBase.release(); - eglBase = null; + textureListener.release(); } } @@ -317,11 +304,72 @@ public class MediaCodecVideoDecoder { private static class DecodedTextureBuffer { private final int textureID; - private final long presentationTimestampUs; + private final float[] transformMatrix; + private final long timestampNs; - public DecodedTextureBuffer(int textureID, long presentationTimestampUs) { + public DecodedTextureBuffer(int textureID, float[] transformMatrix, long timestampNs) { this.textureID = textureID; - this.presentationTimestampUs = presentationTimestampUs; + this.transformMatrix = transformMatrix; + this.timestampNs = timestampNs; + } + } + + // Poll based texture listener. + private static class TextureListener + implements SurfaceTextureHelper.OnTextureFrameAvailableListener { + private final SurfaceTextureHelper surfaceTextureHelper; + private DecodedTextureBuffer textureBuffer; + // |newFrameLock| is used to synchronize arrival of new frames with wait()/notifyAll(). + private final Object newFrameLock = new Object(); + + public TextureListener(SurfaceTextureHelper surfaceTextureHelper) { + this.surfaceTextureHelper = surfaceTextureHelper; + surfaceTextureHelper.setListener(this); + } + + // Callback from |surfaceTextureHelper|. May be called on an arbitrary thread. + @Override + public void onTextureFrameAvailable( + int oesTextureId, float[] transformMatrix, long timestampNs) { + synchronized (newFrameLock) { + if (textureBuffer != null) { + Logging.e(TAG, + "Unexpected onTextureFrameAvailable() called while already holding a texture."); + throw new IllegalStateException("Already holding a texture."); + } + textureBuffer = new DecodedTextureBuffer(oesTextureId, transformMatrix, timestampNs); + newFrameLock.notifyAll(); + } + } + + // Dequeues and returns a texture buffer if available, or null otherwise. + public DecodedTextureBuffer dequeueTextureFrame(int timeoutMs) { + synchronized (newFrameLock) { + if (textureBuffer == null && timeoutMs > 0) { + try { + newFrameLock.wait(timeoutMs); + } catch(InterruptedException e) { + // Restore the interrupted status by reinterrupting the thread. + Thread.currentThread().interrupt(); + } + } + final DecodedTextureBuffer textureBuffer = this.textureBuffer; + this.textureBuffer = null; + return textureBuffer; + } + } + + public void release() { + // SurfaceTextureHelper.disconnect() will block until any onTextureFrameAvailable() in + // progress is done. Therefore, the call to disconnect() must be outside any synchronized + // statement that is also used in the onTextureFrameAvailable() above to avoid deadlocks. + surfaceTextureHelper.disconnect(); + synchronized (newFrameLock) { + if (textureBuffer != null) { + surfaceTextureHelper.returnTextureFrame(); + textureBuffer = null; + } + } } } @@ -330,14 +378,25 @@ public class MediaCodecVideoDecoder { // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an // unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException // upon codec error. - private Object dequeueOutputBuffer(int dequeueTimeoutUs) + private Object dequeueOutputBuffer(int dequeueTimeoutMs) throws IllegalStateException, MediaCodec.CodecException { checkOnMediaCodecThread(); + // Calling multiple MediaCodec.releaseOutputBuffer() with render=true in a row will result in + // dropped texture frames. Therefore, wait for any pending onTextureFrameAvailable() before + // proceeding. + if (isWaitingForTexture) { + final DecodedTextureBuffer textureBuffer = + textureListener.dequeueTextureFrame(dequeueTimeoutMs); + isWaitingForTexture = (textureBuffer == null); + return textureBuffer; + } + // Drain the decoder until receiving a decoded buffer or hitting // MediaCodec.INFO_TRY_AGAIN_LATER. final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); while (true) { - final int result = mediaCodec.dequeueOutputBuffer(info, dequeueTimeoutUs); + final int result = mediaCodec.dequeueOutputBuffer( + info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs)); switch (result) { case MediaCodec.INFO_TRY_AGAIN_LATER: return null; @@ -371,9 +430,10 @@ public class MediaCodecVideoDecoder { // Output buffer decoded. if (useSurface) { mediaCodec.releaseOutputBuffer(result, true /* render */); - // TODO(magjed): Wait for SurfaceTexture.onFrameAvailable() before returning a texture - // frame. - return new DecodedTextureBuffer(textureID, info.presentationTimeUs); + final DecodedTextureBuffer textureBuffer = + textureListener.dequeueTextureFrame(dequeueTimeoutMs); + isWaitingForTexture = (textureBuffer == null); + return textureBuffer; } else { return new DecodedByteBuffer(result, info.offset, info.size, info.presentationTimeUs); } diff --git a/talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java b/talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java index afda84897c..4b51e63c5d 100644 --- a/talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java +++ b/talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java @@ -44,7 +44,11 @@ public class VideoRenderer { public final int[] yuvStrides; public ByteBuffer[] yuvPlanes; public final boolean yuvFrame; - public Object textureObject; + // Matrix that transforms standard coordinates to their proper sampling locations in + // the texture. This transform compensates for any properties of the video source that + // cause it to appear different from a normalized texture. This matrix does not take + // |rotationDegree| into account. + public final float[] samplingMatrix; public int textureId; // Frame pointer in C++. private long nativeFramePointer; @@ -69,6 +73,15 @@ public class VideoRenderer { if (rotationDegree % 90 != 0) { throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree); } + // The convention in WebRTC is that the first element in a ByteBuffer corresponds to the + // top-left corner of the image, but in glTexImage2D() the first element corresponds to the + // bottom-left corner. This discrepancy is corrected by setting a vertical flip as sampling + // matrix. + samplingMatrix = new float[] { + 1, 0, 0, 0, + 0, -1, 0, 0, + 0, 0, 1, 0, + 0, 1, 0, 1}; } /** @@ -76,12 +89,12 @@ public class VideoRenderer { */ private I420Frame( int width, int height, int rotationDegree, - Object textureObject, int textureId, long nativeFramePointer) { + int textureId, float[] samplingMatrix, long nativeFramePointer) { this.width = width; this.height = height; this.yuvStrides = null; this.yuvPlanes = null; - this.textureObject = textureObject; + this.samplingMatrix = samplingMatrix; this.textureId = textureId; this.yuvFrame = false; this.rotationDegree = rotationDegree; @@ -124,7 +137,6 @@ public class VideoRenderer { */ public static void renderFrameDone(I420Frame frame) { frame.yuvPlanes = null; - frame.textureObject = null; frame.textureId = 0; if (frame.nativeFramePointer != 0) { releaseNativeFrame(frame.nativeFramePointer);