diff --git a/webrtc/sdk/android/api/org/webrtc/HardwareVideoDecoderFactory.java b/webrtc/sdk/android/api/org/webrtc/HardwareVideoDecoderFactory.java index 256f99e026..5f86e0a8ec 100644 --- a/webrtc/sdk/android/api/org/webrtc/HardwareVideoDecoderFactory.java +++ b/webrtc/sdk/android/api/org/webrtc/HardwareVideoDecoderFactory.java @@ -26,6 +26,22 @@ import android.os.Build; public class HardwareVideoDecoderFactory implements VideoDecoderFactory { private static final String TAG = "HardwareVideoDecoderFactory"; + private final EglBase.Context sharedContext; + + /** Creates a HardwareVideoDecoderFactory that does not use surface textures. */ + @Deprecated // Not removed yet to avoid breaking callers. + public HardwareVideoDecoderFactory() { + this(null); + } + + /** + * Creates a HardwareVideoDecoderFactory that supports surface texture rendering using the given + * shared context. The context may be null. If it is null, then surface support is disabled. + */ + public HardwareVideoDecoderFactory(EglBase.Context sharedContext) { + this.sharedContext = sharedContext; + } + @Override public VideoDecoder createDecoder(String codecType) { VideoCodecType type = VideoCodecType.valueOf(codecType); @@ -37,7 +53,8 @@ public class HardwareVideoDecoderFactory implements VideoDecoderFactory { CodecCapabilities capabilities = info.getCapabilitiesForType(type.mimeType()); return new HardwareVideoDecoder(info.getName(), type, - MediaCodecUtils.selectColorFormat(MediaCodecUtils.DECODER_COLOR_FORMATS, capabilities)); + MediaCodecUtils.selectColorFormat(MediaCodecUtils.DECODER_COLOR_FORMATS, capabilities), + sharedContext); } private MediaCodecInfo findCodecForType(VideoCodecType type) { diff --git a/webrtc/sdk/android/api/org/webrtc/RendererCommon.java b/webrtc/sdk/android/api/org/webrtc/RendererCommon.java index 63ffb8e552..0708d262c0 100644 --- a/webrtc/sdk/android/api/org/webrtc/RendererCommon.java +++ b/webrtc/sdk/android/api/org/webrtc/RendererCommon.java @@ -258,6 +258,21 @@ public class RendererCommon { return matrix; } + /** Converts a float[16] matrix array to android.graphics.Matrix. */ + public static android.graphics.Matrix convertMatrixToAndroidGraphicsMatrix(float[] matrix4x4) { + // clang-format off + float[] values = { + matrix4x4[0 * 4 + 0], matrix4x4[1 * 4 + 0], matrix4x4[3 * 4 + 0], + matrix4x4[0 * 4 + 1], matrix4x4[1 * 4 + 1], matrix4x4[3 * 4 + 1], + matrix4x4[0 * 4 + 3], matrix4x4[1 * 4 + 3], matrix4x4[3 * 4 + 3], + }; + // clang-format on + + android.graphics.Matrix matrix = new android.graphics.Matrix(); + matrix.setValues(values); + return matrix; + } + /** * Calculate display size based on scaling type, video aspect ratio, and maximum display size. */ diff --git a/webrtc/sdk/android/api/org/webrtc/SurfaceTextureHelper.java b/webrtc/sdk/android/api/org/webrtc/SurfaceTextureHelper.java index 1b6a124064..bd3d545a18 100644 --- a/webrtc/sdk/android/api/org/webrtc/SurfaceTextureHelper.java +++ b/webrtc/sdk/android/api/org/webrtc/SurfaceTextureHelper.java @@ -21,6 +21,8 @@ import java.nio.ByteBuffer; import java.nio.FloatBuffer; import java.util.concurrent.Callable; import java.util.concurrent.TimeUnit; +import org.webrtc.VideoFrame.I420Buffer; +import org.webrtc.VideoFrame.TextureBuffer; /** * Helper class to create and synchronize access to a SurfaceTexture. The caller will get notified @@ -277,4 +279,95 @@ public class SurfaceTextureHelper { eglBase.release(); handler.getLooper().quit(); } + + /** + * Creates a VideoFrame buffer backed by this helper's texture. The |width| and |height| should + * match the dimensions of the data placed in the texture. The correct |transformMatrix| may be + * obtained from callbacks to OnTextureFrameAvailableListener. + * + * The returned TextureBuffer holds a reference to the SurfaceTextureHelper that created it. The + * buffer calls returnTextureFrame() when it is released. + */ + public TextureBuffer createTextureBuffer(int width, int height, float[] transformMatrix) { + return new OesTextureBuffer(oesTextureId, width, height, transformMatrix, this); + } + + /** + * Android OES texture buffer backed by a SurfaceTextureHelper's texture. The buffer calls + * returnTextureFrame() when it is released. + */ + private static class OesTextureBuffer implements TextureBuffer { + private final int id; + private final int width; + private final int height; + private final float[] transformMatrix; + private final SurfaceTextureHelper helper; + private int refCount; + + OesTextureBuffer( + int id, int width, int height, float[] transformMatrix, SurfaceTextureHelper helper) { + this.id = id; + this.width = width; + this.height = height; + this.transformMatrix = transformMatrix; + this.helper = helper; + this.refCount = 1; // Creator implicitly holds a reference. + } + + @Override + public TextureBuffer.Type getType() { + return TextureBuffer.Type.OES; + } + + @Override + public int getTextureId() { + return id; + } + + @Override + public int getWidth() { + return width; + } + + @Override + public int getHeight() { + return height; + } + + @Override + public I420Buffer toI420() { + // SurfaceTextureHelper requires a stride that is divisible by 8. Round width up. + // See SurfaceTextureHelper for details on the size and format. + int stride = ((width + 7) / 8) * 8; + int uvHeight = (height + 1) / 2; + // Due to the layout used by SurfaceTextureHelper, vPos + stride * uvHeight would overrun the + // buffer. Add one row at the bottom to compensate for this. There will never be data in the + // extra row, but now other code does not have to deal with v stride * v height exceeding the + // buffer's capacity. + int size = stride * (height + uvHeight + 1); + ByteBuffer buffer = ByteBuffer.allocateDirect(size); + helper.textureToYUV(buffer, width, height, stride, id, transformMatrix); + + int yPos = 0; + int uPos = yPos + stride * height; + // Rows of U and V alternate in the buffer, so V data starts after the first row of U. + int vPos = yPos + stride / 2; + + // SurfaceTextureHelper uses the same stride for Y, U, and V data. + return new I420BufferImpl( + buffer, width, height, yPos, stride, uPos, stride, vPos, stride, null); + } + + @Override + public void retain() { + ++refCount; + } + + @Override + public void release() { + if (--refCount == 0) { + helper.returnTextureFrame(); + } + } + } } diff --git a/webrtc/sdk/android/instrumentationtests/src/org/webrtc/HardwareVideoDecoderTest.java b/webrtc/sdk/android/instrumentationtests/src/org/webrtc/HardwareVideoDecoderTest.java index bb889de3df..6c9d0b7b5c 100644 --- a/webrtc/sdk/android/instrumentationtests/src/org/webrtc/HardwareVideoDecoderTest.java +++ b/webrtc/sdk/android/instrumentationtests/src/org/webrtc/HardwareVideoDecoderTest.java @@ -11,6 +11,8 @@ package org.webrtc; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; import android.annotation.TargetApi; import android.graphics.Matrix; @@ -44,13 +46,35 @@ public final class HardwareVideoDecoderTest { return; } - HardwareVideoDecoderFactory decoderFactory = new HardwareVideoDecoderFactory(); + HardwareVideoDecoderFactory decoderFactory = new HardwareVideoDecoderFactory(null); VideoDecoder decoder = decoderFactory.createDecoder(supportedCodecs[0].name); assertEquals(decoder.initDecode(SETTINGS, null), VideoCodecStatus.OK); assertEquals(decoder.release(), VideoCodecStatus.OK); } + @Test + @MediumTest + public void testInitializeUsingTextures() { + HardwareVideoEncoderFactory encoderFactory = + new HardwareVideoEncoderFactory(ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE); + VideoCodecInfo[] supportedCodecs = encoderFactory.getSupportedCodecs(); + if (supportedCodecs.length == 0) { + Log.i(TAG, "No hardware encoding support, skipping testInitialize"); + return; + } + + EglBase14 eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN); + HardwareVideoDecoderFactory decoderFactory = + new HardwareVideoDecoderFactory(eglBase.getEglBaseContext()); + + VideoDecoder decoder = decoderFactory.createDecoder(supportedCodecs[0].name); + assertEquals(decoder.initDecode(SETTINGS, null), VideoCodecStatus.OK); + assertEquals(decoder.release(), VideoCodecStatus.OK); + + eglBase.release(); + } + @Test @MediumTest public void testDecode() throws InterruptedException { @@ -63,7 +87,7 @@ public final class HardwareVideoDecoderTest { } // Set up the decoder. - HardwareVideoDecoderFactory decoderFactory = new HardwareVideoDecoderFactory(); + HardwareVideoDecoderFactory decoderFactory = new HardwareVideoDecoderFactory(null); VideoDecoder decoder = decoderFactory.createDecoder(supportedCodecs[0].name); final long presentationTimestampUs = 20000; @@ -74,6 +98,7 @@ public final class HardwareVideoDecoderTest { VideoDecoder.Callback decodeCallback = new VideoDecoder.Callback() { @Override public void onDecodedFrame(VideoFrame frame, Integer decodeTimeMs, Integer qp) { + frame.retain(); decoded.set(frame); decodeDone.countDown(); } @@ -97,7 +122,7 @@ public final class HardwareVideoDecoderTest { VideoCodecStatus.OK); // First, encode a frame. - VideoFrame.I420Buffer buffer = new I420BufferImpl(SETTINGS.width, SETTINGS.height); + VideoFrame.I420Buffer buffer = I420BufferImpl.allocate(SETTINGS.width, SETTINGS.height); VideoFrame frame = new VideoFrame(buffer, rotation, presentationTimestampUs * 1000, new Matrix()); VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo( @@ -120,7 +145,92 @@ public final class HardwareVideoDecoderTest { assertEquals(frame.getWidth(), SETTINGS.width); assertEquals(frame.getHeight(), SETTINGS.height); + frame.release(); assertEquals(decoder.release(), VideoCodecStatus.OK); assertEquals(encoder.release(), VideoCodecStatus.OK); } + + @Test + @MediumTest + public void testDecodeUsingTextures() throws InterruptedException { + HardwareVideoEncoderFactory encoderFactory = + new HardwareVideoEncoderFactory(ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE); + VideoCodecInfo[] supportedCodecs = encoderFactory.getSupportedCodecs(); + if (supportedCodecs.length == 0) { + Log.i(TAG, "No hardware encoding support, skipping testEncodeYuvBuffer"); + return; + } + + // Set up the decoder. + EglBase14 eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN); + HardwareVideoDecoderFactory decoderFactory = + new HardwareVideoDecoderFactory(eglBase.getEglBaseContext()); + VideoDecoder decoder = decoderFactory.createDecoder(supportedCodecs[0].name); + + final long presentationTimestampUs = 20000; + final int rotation = 270; + + final CountDownLatch decodeDone = new CountDownLatch(1); + final AtomicReference decoded = new AtomicReference<>(); + VideoDecoder.Callback decodeCallback = new VideoDecoder.Callback() { + @Override + public void onDecodedFrame(VideoFrame frame, Integer decodeTimeMs, Integer qp) { + frame.retain(); + decoded.set(frame); + decodeDone.countDown(); + } + }; + assertEquals(decoder.initDecode(SETTINGS, decodeCallback), VideoCodecStatus.OK); + + // Set up an encoder to produce a valid encoded frame. + VideoEncoder encoder = encoderFactory.createEncoder(supportedCodecs[0]); + final CountDownLatch encodeDone = new CountDownLatch(1); + final AtomicReference encoded = new AtomicReference<>(); + VideoEncoder.Callback encodeCallback = new VideoEncoder.Callback() { + @Override + public void onEncodedFrame(EncodedImage image, VideoEncoder.CodecSpecificInfo info) { + encoded.set(image); + encodeDone.countDown(); + } + }; + assertEquals( + encoder.initEncode( + new VideoEncoder.Settings(1, SETTINGS.width, SETTINGS.height, 300, 30), encodeCallback), + VideoCodecStatus.OK); + + // First, encode a frame. + VideoFrame.I420Buffer buffer = I420BufferImpl.allocate(SETTINGS.width, SETTINGS.height); + VideoFrame frame = + new VideoFrame(buffer, rotation, presentationTimestampUs * 1000, new Matrix()); + VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo( + new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameKey}); + + assertEquals(encoder.encode(frame, info), VideoCodecStatus.OK); + + ThreadUtils.awaitUninterruptibly(encodeDone); + + // Now decode the frame. + assertEquals( + decoder.decode(encoded.get(), new VideoDecoder.DecodeInfo(false, 0)), VideoCodecStatus.OK); + + ThreadUtils.awaitUninterruptibly(decodeDone); + + frame = decoded.get(); + assertEquals(frame.getRotation(), rotation); + assertEquals(frame.getTimestampNs(), presentationTimestampUs * 1000); + // TODO(mellem): Compare the matrix to whatever we expect to get back? + assertNotNull(frame.getTransformMatrix()); + assertEquals(frame.getWidth(), SETTINGS.width); + assertEquals(frame.getHeight(), SETTINGS.height); + + assertTrue(frame.getBuffer() instanceof VideoFrame.TextureBuffer); + VideoFrame.TextureBuffer textureBuffer = (VideoFrame.TextureBuffer) frame.getBuffer(); + assertEquals(textureBuffer.getType(), VideoFrame.TextureBuffer.Type.OES); + + assertEquals(decoder.release(), VideoCodecStatus.OK); + assertEquals(encoder.release(), VideoCodecStatus.OK); + + frame.release(); + eglBase.release(); + } } diff --git a/webrtc/sdk/android/instrumentationtests/src/org/webrtc/HardwareVideoEncoderTest.java b/webrtc/sdk/android/instrumentationtests/src/org/webrtc/HardwareVideoEncoderTest.java index 87fe6ef7bd..f5b255119c 100644 --- a/webrtc/sdk/android/instrumentationtests/src/org/webrtc/HardwareVideoEncoderTest.java +++ b/webrtc/sdk/android/instrumentationtests/src/org/webrtc/HardwareVideoEncoderTest.java @@ -81,7 +81,7 @@ public class HardwareVideoEncoderTest { assertEquals(encoder.initEncode(SETTINGS, callback), VideoCodecStatus.OK); - VideoFrame.I420Buffer buffer = new I420BufferImpl(SETTINGS.width, SETTINGS.height); + VideoFrame.I420Buffer buffer = I420BufferImpl.allocate(SETTINGS.width, SETTINGS.height); VideoFrame frame = new VideoFrame(buffer, 0 /* rotation */, presentationTimestampUs * 1000, new Matrix()); VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo( diff --git a/webrtc/sdk/android/src/java/org/webrtc/HardwareVideoDecoder.java b/webrtc/sdk/android/src/java/org/webrtc/HardwareVideoDecoder.java index 99a0a4b06f..13ccedc26d 100644 --- a/webrtc/sdk/android/src/java/org/webrtc/HardwareVideoDecoder.java +++ b/webrtc/sdk/android/src/java/org/webrtc/HardwareVideoDecoder.java @@ -16,6 +16,7 @@ import android.media.MediaCodec; import android.media.MediaCodecInfo.CodecCapabilities; import android.media.MediaFormat; import android.os.SystemClock; +import android.view.Surface; import java.io.IOException; import java.nio.ByteBuffer; import java.util.Arrays; @@ -27,7 +28,8 @@ import org.webrtc.ThreadUtils.ThreadChecker; /** Android hardware video decoder. */ @TargetApi(16) @SuppressWarnings("deprecation") // Cannot support API 16 without using deprecated methods. -class HardwareVideoDecoder implements VideoDecoder { +class HardwareVideoDecoder + implements VideoDecoder, SurfaceTextureHelper.OnTextureFrameAvailableListener { private static final String TAG = "HardwareVideoDecoder"; // TODO(magjed): Use MediaFormat.KEY_* constants when part of the public API. @@ -100,18 +102,45 @@ class HardwareVideoDecoder implements VideoDecoder { // Whether the decoder has seen a key frame. The first frame must be a key frame. private boolean keyFrameRequired; + private final EglBase.Context sharedContext; + private SurfaceTextureHelper surfaceTextureHelper; + private Surface surface = null; + + private static class DecodedTextureMetadata { + final int width; + final int height; + final int rotation; + final long presentationTimestampUs; + final Integer decodeTimeMs; + + DecodedTextureMetadata( + int width, int height, int rotation, long presentationTimestampUs, Integer decodeTimeMs) { + this.width = width; + this.height = height; + this.rotation = rotation; + this.presentationTimestampUs = presentationTimestampUs; + this.decodeTimeMs = decodeTimeMs; + } + } + + // Metadata for the last frame rendered to the texture. Only accessed on the texture helper's + // thread. + private DecodedTextureMetadata renderedTextureMetadata; + // Decoding proceeds asynchronously. This callback returns decoded frames to the caller. private Callback callback; private MediaCodec codec = null; - HardwareVideoDecoder(String codecName, VideoCodecType codecType, int colorFormat) { + HardwareVideoDecoder( + String codecName, VideoCodecType codecType, int colorFormat, EglBase.Context sharedContext) { if (!isSupportedColorFormat(colorFormat)) { throw new IllegalArgumentException("Unsupported color format: " + colorFormat); } this.codecName = codecName; this.codecType = codecType; this.colorFormat = colorFormat; + this.sharedContext = sharedContext; this.frameInfos = new LinkedBlockingDeque<>(); } @@ -147,8 +176,14 @@ class HardwareVideoDecoder implements VideoDecoder { } try { MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), width, height); - format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat); - codec.configure(format, null, null, 0); + if (sharedContext == null) { + format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat); + } else { + surfaceTextureHelper = SurfaceTextureHelper.create("decoder-texture-thread", sharedContext); + surface = new Surface(surfaceTextureHelper.getSurfaceTexture()); + surfaceTextureHelper.startListening(this); + } + codec.configure(format, surface, null, 0); codec.start(); } catch (IllegalStateException e) { Logging.e(TAG, "initDecode failed", e); @@ -209,7 +244,6 @@ class HardwareVideoDecoder implements VideoDecoder { } } - // TODO(mellem): Support textures. int index; try { index = codec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT_US); @@ -288,6 +322,13 @@ class HardwareVideoDecoder implements VideoDecoder { callback = null; outputThread = null; frameInfos.clear(); + if (surface != null) { + surface.release(); + surface = null; + surfaceTextureHelper.stopListening(); + surfaceTextureHelper.dispose(); + surfaceTextureHelper = null; + } } return VideoCodecStatus.OK; } @@ -343,64 +384,106 @@ class HardwareVideoDecoder implements VideoDecoder { hasDecodedFirstFrame = true; - // Load dimensions from shared memory under the dimension lock. - int width, height, stride, sliceHeight; - synchronized (dimensionLock) { - width = this.width; - height = this.height; - stride = this.stride; - sliceHeight = this.sliceHeight; - } - - // Output must be at least width * height bytes for Y channel, plus (width / 2) * (height / 2) - // bytes for each of the U and V channels. - if (info.size < width * height * 3 / 2) { - Logging.e(TAG, "Insufficient output buffer size: " + info.size); - return; - } - - if (info.size < stride * height * 3 / 2 && sliceHeight == height && stride > width) { - // Some codecs (Exynos) report an incorrect stride. Correct it here. - // Expected size == stride * height * 3 / 2. A bit of algebra gives the correct stride as - // 2 * size / (3 * height). - stride = info.size * 2 / (height * 3); - } - - ByteBuffer buffer = codec.getOutputBuffers()[result]; - buffer.position(info.offset); - buffer.limit(info.size); - - final VideoFrame.I420Buffer frameBuffer; - - // TODO(mellem): As an optimization, use libyuv via JNI to copy/reformatting data. - if (colorFormat == CodecCapabilities.COLOR_FormatYUV420Planar) { - if (sliceHeight % 2 == 0) { - frameBuffer = - createBufferFromI420(buffer, result, info.offset, stride, sliceHeight, width, height); - } else { - frameBuffer = new I420BufferImpl(width, height); - // Optimal path is not possible because we have to copy the last rows of U- and V-planes. - copyI420(buffer, info.offset, frameBuffer, stride, sliceHeight, width, height); - codec.releaseOutputBuffer(result, false); - } + if (surfaceTextureHelper != null) { + deliverTextureFrame(result, info, rotation, decodeTimeMs); } else { - frameBuffer = new I420BufferImpl(width, height); - // All other supported color formats are NV12. - nv12ToI420(buffer, info.offset, frameBuffer, stride, sliceHeight, width, height); - codec.releaseOutputBuffer(result, false); + deliverByteFrame(result, info, rotation, decodeTimeMs); } - long presentationTimeNs = info.presentationTimeUs * 1000; - VideoFrame frame = new VideoFrame(frameBuffer, rotation, presentationTimeNs, new Matrix()); - - // Note that qp is parsed on the C++ side. - callback.onDecodedFrame(frame, decodeTimeMs, null /* qp */); - frame.release(); } catch (IllegalStateException e) { Logging.e(TAG, "deliverDecodedFrame failed", e); } } + private void deliverTextureFrame(final int index, final MediaCodec.BufferInfo info, + final int rotation, final Integer decodeTimeMs) { + // Load dimensions from shared memory under the dimension lock. + final int width, height; + synchronized (dimensionLock) { + width = this.width; + height = this.height; + } + + surfaceTextureHelper.getHandler().post(new Runnable() { + @Override + public void run() { + renderedTextureMetadata = new DecodedTextureMetadata( + width, height, rotation, info.presentationTimeUs, decodeTimeMs); + codec.releaseOutputBuffer(index, true); + } + }); + } + + @Override + public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) { + VideoFrame.TextureBuffer oesBuffer = surfaceTextureHelper.createTextureBuffer( + renderedTextureMetadata.width, renderedTextureMetadata.height, transformMatrix); + + Matrix matrix = RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix); + + VideoFrame frame = new VideoFrame(oesBuffer, renderedTextureMetadata.rotation, + renderedTextureMetadata.presentationTimestampUs * 1000, matrix); + callback.onDecodedFrame(frame, renderedTextureMetadata.decodeTimeMs, null /* qp */); + frame.release(); + } + + private void deliverByteFrame( + int result, MediaCodec.BufferInfo info, int rotation, Integer decodeTimeMs) { + // Load dimensions from shared memory under the dimension lock. + int width, height, stride, sliceHeight; + synchronized (dimensionLock) { + width = this.width; + height = this.height; + stride = this.stride; + sliceHeight = this.sliceHeight; + } + + // Output must be at least width * height bytes for Y channel, plus (width / 2) * (height / 2) + // bytes for each of the U and V channels. + if (info.size < width * height * 3 / 2) { + Logging.e(TAG, "Insufficient output buffer size: " + info.size); + return; + } + + if (info.size < stride * height * 3 / 2 && sliceHeight == height && stride > width) { + // Some codecs (Exynos) report an incorrect stride. Correct it here. + // Expected size == stride * height * 3 / 2. A bit of algebra gives the correct stride as + // 2 * size / (3 * height). + stride = info.size * 2 / (height * 3); + } + + ByteBuffer buffer = codec.getOutputBuffers()[result]; + buffer.position(info.offset); + buffer.limit(info.size); + + final VideoFrame.I420Buffer frameBuffer; + + // TODO(mellem): As an optimization, use libyuv via JNI to copy/reformatting data. + if (colorFormat == CodecCapabilities.COLOR_FormatYUV420Planar) { + if (sliceHeight % 2 == 0) { + frameBuffer = + createBufferFromI420(buffer, result, info.offset, stride, sliceHeight, width, height); + } else { + frameBuffer = I420BufferImpl.allocate(width, height); + // Optimal path is not possible because we have to copy the last rows of U- and V-planes. + copyI420(buffer, info.offset, frameBuffer, stride, sliceHeight, width, height); + codec.releaseOutputBuffer(result, false); + } + } else { + frameBuffer = I420BufferImpl.allocate(width, height); + // All other supported color formats are NV12. + nv12ToI420(buffer, info.offset, frameBuffer, stride, sliceHeight, width, height); + codec.releaseOutputBuffer(result, false); + } + + long presentationTimeNs = info.presentationTimeUs * 1000; + VideoFrame frame = new VideoFrame(frameBuffer, rotation, presentationTimeNs, new Matrix()); + + // Note that qp is parsed on the C++ side. + callback.onDecodedFrame(frame, decodeTimeMs, null /* qp */); + frame.release(); + } + private void reformat(MediaFormat format) { outputThreadChecker.checkIsOnValidThread(); Logging.d(TAG, "Decoder format changed: " + format.toString()); @@ -429,7 +512,9 @@ class HardwareVideoDecoder implements VideoDecoder { height = newHeight; } - if (format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) { + // Note: texture mode ignores colorFormat. Hence, if the texture helper is non-null, skip + // color format updates. + if (surfaceTextureHelper == null && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) { colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat)); if (!isSupportedColorFormat(colorFormat)) { @@ -519,81 +604,20 @@ class HardwareVideoDecoder implements VideoDecoder { synchronized (activeOutputBuffersLock) { activeOutputBuffers++; } - return new VideoFrame.I420Buffer() { - private int refCount = 1; + I420BufferImpl.ReleaseCallback callback = new I420BufferImpl.ReleaseCallback() { @Override - public ByteBuffer getDataY() { - ByteBuffer data = buffer.slice(); - data.position(yPos); - data.limit(yPos + getStrideY() * height); - return data; - } - - @Override - public ByteBuffer getDataU() { - ByteBuffer data = buffer.slice(); - data.position(uPos); - data.limit(uPos + getStrideU() * chromaHeight); - return data; - } - - @Override - public ByteBuffer getDataV() { - ByteBuffer data = buffer.slice(); - data.position(vPos); - data.limit(vPos + getStrideV() * chromaHeight); - return data; - } - - @Override - public int getStrideY() { - return stride; - } - - @Override - public int getStrideU() { - return uvStride; - } - - @Override - public int getStrideV() { - return uvStride; - } - - @Override - public int getWidth() { - return width; - } - - @Override - public int getHeight() { - return height; - } - - @Override - public VideoFrame.I420Buffer toI420() { - return this; - } - - @Override - public void retain() { - refCount++; - } - - @Override - public void release() { - refCount--; - - if (refCount == 0) { - codec.releaseOutputBuffer(outputBufferIndex, false); - synchronized (activeOutputBuffersLock) { - activeOutputBuffers--; - activeOutputBuffersLock.notifyAll(); - } + public void onRelease() { + codec.releaseOutputBuffer(outputBufferIndex, false); + synchronized (activeOutputBuffersLock) { + activeOutputBuffers--; + activeOutputBuffersLock.notifyAll(); } } }; + + return new I420BufferImpl( + buffer, width, height, yPos, stride, uPos, uvStride, vPos, uvStride, callback); } private static void copyI420(ByteBuffer src, int offset, VideoFrame.I420Buffer frameBuffer, diff --git a/webrtc/sdk/android/src/java/org/webrtc/I420BufferImpl.java b/webrtc/sdk/android/src/java/org/webrtc/I420BufferImpl.java index 87fc202ece..09c0782271 100644 --- a/webrtc/sdk/android/src/java/org/webrtc/I420BufferImpl.java +++ b/webrtc/sdk/android/src/java/org/webrtc/I420BufferImpl.java @@ -15,21 +15,48 @@ import org.webrtc.VideoFrame.I420Buffer; /** Implementation of an I420 VideoFrame buffer. */ class I420BufferImpl implements VideoFrame.I420Buffer { + private final ByteBuffer buffer; private final int width; private final int height; - private final int strideUV; - private final ByteBuffer y; - private final ByteBuffer u; - private final ByteBuffer v; + private final int chromaHeight; + private final int yPos; + private final int strideY; + private final int uPos; + private final int strideU; + private final int vPos; + private final int strideV; + private final ReleaseCallback releaseCallback; - I420BufferImpl(int width, int height) { + private int refCount; + + /** Allocates an I420Buffer backed by existing data. */ + I420BufferImpl(ByteBuffer buffer, int width, int height, int yPos, int strideY, int uPos, + int strideU, int vPos, int strideV, ReleaseCallback releaseCallback) { + this.buffer = buffer; this.width = width; this.height = height; - this.strideUV = (width + 1) / 2; - int halfHeight = (height + 1) / 2; - this.y = ByteBuffer.allocateDirect(width * height); - this.u = ByteBuffer.allocateDirect(strideUV * halfHeight); - this.v = ByteBuffer.allocateDirect(strideUV * halfHeight); + this.chromaHeight = (height + 1) / 2; + this.yPos = yPos; + this.strideY = strideY; + this.uPos = uPos; + this.strideU = strideU; + this.vPos = vPos; + this.strideV = strideV; + this.releaseCallback = releaseCallback; + + this.refCount = 1; + } + + /** Allocates an empty I420Buffer suitable for an image of the given dimensions. */ + static I420BufferImpl allocate(int width, int height) { + int chromaHeight = (height + 1) / 2; + int strideUV = (width + 1) / 2; + int yPos = 0; + int uPos = yPos + width * height; + int vPos = uPos + strideUV * chromaHeight; + ByteBuffer buffer = ByteBuffer.allocateDirect(width * height + 2 * strideUV * chromaHeight); + return new I420BufferImpl( + buffer, width, height, yPos, width, uPos, strideUV, vPos, strideUV, null); } @Override @@ -44,32 +71,41 @@ class I420BufferImpl implements VideoFrame.I420Buffer { @Override public ByteBuffer getDataY() { - return y; + ByteBuffer data = buffer.slice(); + data.position(yPos); + data.limit(yPos + getStrideY() * height); + return data; } @Override public ByteBuffer getDataU() { - return u; + ByteBuffer data = buffer.slice(); + data.position(uPos); + data.limit(uPos + strideU * chromaHeight); + return data; } @Override public ByteBuffer getDataV() { - return v; + ByteBuffer data = buffer.slice(); + data.position(vPos); + data.limit(vPos + strideV * chromaHeight); + return data; } @Override public int getStrideY() { - return width; + return strideY; } @Override public int getStrideU() { - return strideUV; + return strideU; } @Override public int getStrideV() { - return strideUV; + return strideV; } @Override @@ -78,8 +114,20 @@ class I420BufferImpl implements VideoFrame.I420Buffer { } @Override - public void retain() {} + public void retain() { + ++refCount; + } @Override - public void release() {} + public void release() { + if (--refCount == 0 && releaseCallback != null) { + releaseCallback.onRelease(); + } + } + + // Callback called when the frame is no longer referenced. + interface ReleaseCallback { + // Called when the frame is no longer referenced. + void onRelease(); + } }