Account for stride when calculating buffer size
https://webrtc-review.googlesource.com/c/src/+/240680 made encoder aware of stride and slice height of input buffer but calculation of buffer size passed to queueInputBuffer() was not updated. Bug: webrtc:13427 Change-Id: Iba8687f56eda148ac67b331d35c45317a4ec5c59 Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/301321 Reviewed-by: Mirta Dvornicic <mirtad@webrtc.org> Commit-Queue: Sergey Silkin <ssilkin@webrtc.org> Cr-Commit-Position: refs/heads/main@{#39895}
This commit is contained in:
parent
f9ffd68d8e
commit
88429d572b
@ -36,8 +36,8 @@ public class YuvHelper {
|
||||
final int dstStartU = dstStartY + dstStrideY * dstSliceHeightY;
|
||||
final int dstEndU = dstStartU + dstStrideU * chromaHeight;
|
||||
final int dstStartV = dstStartU + dstStrideU * dstSliceHeightU;
|
||||
// The last line doesn't need any padding, so use chromaWidth
|
||||
// to calculate the exact end position.
|
||||
// The last line doesn't need any padding, so use chromaWidth to calculate the exact end
|
||||
// position.
|
||||
final int dstEndV = dstStartV + dstStrideU * (chromaHeight - 1) + chromaWidth;
|
||||
if (dst.capacity() < dstEndV) {
|
||||
throw new IllegalArgumentException("Expected destination buffer capacity to be at least "
|
||||
@ -65,6 +65,14 @@ public class YuvHelper {
|
||||
dstWidth, dstHeight, (dstWidth + 1) / 2, (dstHeight + 1) / 2);
|
||||
}
|
||||
|
||||
/** Helper method for copying I420 to buffer with the given stride and slice height. */
|
||||
public static void I420Copy(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU,
|
||||
ByteBuffer srcV, int srcStrideV, ByteBuffer dst, int dstWidth, int dstHeight, int dstStride,
|
||||
int dstSliceHeight) {
|
||||
I420Copy(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dst, dstWidth, dstHeight,
|
||||
dstStride, dstSliceHeight, (dstStride + 1) / 2, (dstSliceHeight + 1) / 2);
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy I420 Buffer to a contiguously allocated buffer.
|
||||
* @param dstStrideY the stride of output buffers' Y plane.
|
||||
|
||||
@ -101,6 +101,22 @@ public class YuvHelperTest {
|
||||
@SmallTest
|
||||
@Test
|
||||
public void testI420CopyStride() {
|
||||
final int dstStrideY = 4;
|
||||
final int dstSliceHeightY = 4;
|
||||
final int dstSize = dstStrideY * dstStrideY * 3 / 2;
|
||||
|
||||
final ByteBuffer dst = ByteBuffer.allocateDirect(dstSize);
|
||||
YuvHelper.I420Copy(TEST_I420_Y, TEST_I420_STRIDE_Y, TEST_I420_U, TEST_I420_STRIDE_V,
|
||||
TEST_I420_V, TEST_I420_STRIDE_U, dst, TEST_WIDTH, TEST_HEIGHT, dstStrideY, dstSliceHeightY);
|
||||
|
||||
assertByteBufferContentEquals(new byte[] {1, 2, 3, 0, 4, 5, 6, 0, 7, 8, 9, 0, 0, 0, 0, 0, 51,
|
||||
52, 53, 54, 101, 102, 105, 106},
|
||||
dst);
|
||||
}
|
||||
|
||||
@SmallTest
|
||||
@Test
|
||||
public void testI420CopyChromaStride() {
|
||||
final int dstStrideY = 4;
|
||||
final int dstSliceHeightY = 4;
|
||||
final int dstStrideU = dstStrideY / 2;
|
||||
|
||||
@ -100,7 +100,6 @@ class HardwareVideoEncoder implements VideoEncoder {
|
||||
private final VideoCodecMimeType codecType;
|
||||
private final Integer surfaceColorFormat;
|
||||
private final Integer yuvColorFormat;
|
||||
private final YuvFormat yuvFormat;
|
||||
private final Map<String, String> params;
|
||||
private final int keyFrameIntervalSec; // Base interval for generating key frames.
|
||||
// Interval at which to force a key frame. Used to reduce color distortions caused by some
|
||||
@ -143,6 +142,10 @@ class HardwareVideoEncoder implements VideoEncoder {
|
||||
private int stride;
|
||||
// Y-plane slice-height in the encoder's input
|
||||
private int sliceHeight;
|
||||
// True if encoder input color format is semi-planar (NV12).
|
||||
private boolean isSemiPlanar;
|
||||
// Size of frame for current color format and stride, in bytes.
|
||||
private int frameSizeBytes;
|
||||
private boolean useSurfaceMode;
|
||||
|
||||
// --- Only accessed from the encoding thread.
|
||||
@ -190,7 +193,6 @@ class HardwareVideoEncoder implements VideoEncoder {
|
||||
this.codecType = codecType;
|
||||
this.surfaceColorFormat = surfaceColorFormat;
|
||||
this.yuvColorFormat = yuvColorFormat;
|
||||
this.yuvFormat = YuvFormat.valueOf(yuvColorFormat);
|
||||
this.params = params;
|
||||
this.keyFrameIntervalSec = keyFrameIntervalSec;
|
||||
this.forcedKeyFrameNs = TimeUnit.MILLISECONDS.toNanos(forceKeyFrameIntervalMs);
|
||||
@ -282,9 +284,7 @@ class HardwareVideoEncoder implements VideoEncoder {
|
||||
textureEglBase.makeCurrent();
|
||||
}
|
||||
|
||||
MediaFormat inputFormat = codec.getInputFormat();
|
||||
stride = getStride(inputFormat, width);
|
||||
sliceHeight = getSliceHeight(inputFormat, height);
|
||||
updateInputFormat(codec.getInputFormat());
|
||||
|
||||
codec.start();
|
||||
} catch (IllegalStateException e) {
|
||||
@ -351,8 +351,7 @@ class HardwareVideoEncoder implements VideoEncoder {
|
||||
return VideoCodecStatus.UNINITIALIZED;
|
||||
}
|
||||
|
||||
final VideoFrame.Buffer videoFrameBuffer = videoFrame.getBuffer();
|
||||
final boolean isTextureBuffer = videoFrameBuffer instanceof VideoFrame.TextureBuffer;
|
||||
final boolean isTextureBuffer = videoFrame.getBuffer() instanceof VideoFrame.TextureBuffer;
|
||||
|
||||
// If input resolution changed, restart the codec with the new resolution.
|
||||
final int frameWidth = videoFrame.getBuffer().getWidth();
|
||||
@ -382,9 +381,6 @@ class HardwareVideoEncoder implements VideoEncoder {
|
||||
requestKeyFrame(videoFrame.getTimestampNs());
|
||||
}
|
||||
|
||||
// Number of bytes in the video buffer. Y channel is sampled at one byte per pixel; U and V are
|
||||
// subsampled at one byte per four pixels.
|
||||
int bufferSize = videoFrameBuffer.getHeight() * videoFrameBuffer.getWidth() * 3 / 2;
|
||||
EncodedImage.Builder builder = EncodedImage.builder()
|
||||
.setCaptureTimeNs(videoFrame.getTimestampNs())
|
||||
.setEncodedWidth(videoFrame.getBuffer().getWidth())
|
||||
@ -402,8 +398,7 @@ class HardwareVideoEncoder implements VideoEncoder {
|
||||
if (useSurfaceMode) {
|
||||
returnValue = encodeTextureBuffer(videoFrame, presentationTimestampUs);
|
||||
} else {
|
||||
returnValue =
|
||||
encodeByteBuffer(videoFrame, presentationTimestampUs, videoFrameBuffer, bufferSize);
|
||||
returnValue = encodeByteBuffer(videoFrame, presentationTimestampUs);
|
||||
}
|
||||
|
||||
// Check if the queue was successful.
|
||||
@ -434,8 +429,7 @@ class HardwareVideoEncoder implements VideoEncoder {
|
||||
return VideoCodecStatus.OK;
|
||||
}
|
||||
|
||||
private VideoCodecStatus encodeByteBuffer(VideoFrame videoFrame, long presentationTimestampUs,
|
||||
VideoFrame.Buffer videoFrameBuffer, int bufferSize) {
|
||||
private VideoCodecStatus encodeByteBuffer(VideoFrame videoFrame, long presentationTimestampUs) {
|
||||
encodeThreadChecker.checkIsOnValidThread();
|
||||
// No timeout. Don't block for an input buffer, drop frames if the encoder falls behind.
|
||||
int index;
|
||||
@ -459,11 +453,19 @@ class HardwareVideoEncoder implements VideoEncoder {
|
||||
Logging.e(TAG, "getInputBuffer with index=" + index + " failed", e);
|
||||
return VideoCodecStatus.ERROR;
|
||||
}
|
||||
fillInputBuffer(buffer, videoFrameBuffer);
|
||||
|
||||
if (buffer.capacity() < frameSizeBytes) {
|
||||
Logging.e(TAG,
|
||||
"Input buffer size: " + buffer.capacity()
|
||||
+ " is smaller than frame size: " + frameSizeBytes);
|
||||
return VideoCodecStatus.ERROR;
|
||||
}
|
||||
|
||||
fillInputBuffer(buffer, videoFrame.getBuffer());
|
||||
|
||||
try {
|
||||
codec.queueInputBuffer(
|
||||
index, 0 /* offset */, bufferSize, presentationTimestampUs, 0 /* flags */);
|
||||
index, 0 /* offset */, frameSizeBytes, presentationTimestampUs, 0 /* flags */);
|
||||
} catch (IllegalStateException e) {
|
||||
Logging.e(TAG, "queueInputBuffer failed", e);
|
||||
// IllegalStateException thrown when the codec is in the wrong state.
|
||||
@ -700,20 +702,37 @@ class HardwareVideoEncoder implements VideoEncoder {
|
||||
return sharedContext != null && surfaceColorFormat != null;
|
||||
}
|
||||
|
||||
private static int getStride(MediaFormat inputFormat, int width) {
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && inputFormat != null
|
||||
&& inputFormat.containsKey(MediaFormat.KEY_STRIDE)) {
|
||||
return inputFormat.getInteger(MediaFormat.KEY_STRIDE);
|
||||
}
|
||||
return width;
|
||||
}
|
||||
/** Fetches stride and slice height from input media format */
|
||||
private void updateInputFormat(MediaFormat format) {
|
||||
stride = width;
|
||||
sliceHeight = height;
|
||||
|
||||
private static int getSliceHeight(MediaFormat inputFormat, int height) {
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && inputFormat != null
|
||||
&& inputFormat.containsKey(MediaFormat.KEY_SLICE_HEIGHT)) {
|
||||
return inputFormat.getInteger(MediaFormat.KEY_SLICE_HEIGHT);
|
||||
if (format != null) {
|
||||
if (format.containsKey(MediaFormat.KEY_STRIDE)) {
|
||||
stride = format.getInteger(MediaFormat.KEY_STRIDE);
|
||||
stride = Math.max(stride, width);
|
||||
}
|
||||
|
||||
if (format.containsKey(MediaFormat.KEY_SLICE_HEIGHT)) {
|
||||
sliceHeight = format.getInteger(MediaFormat.KEY_SLICE_HEIGHT);
|
||||
sliceHeight = Math.max(sliceHeight, height);
|
||||
}
|
||||
}
|
||||
return height;
|
||||
|
||||
isSemiPlanar = isSemiPlanar(yuvColorFormat);
|
||||
if (isSemiPlanar) {
|
||||
int chromaHeight = (height + 1) / 2;
|
||||
frameSizeBytes = sliceHeight * stride + chromaHeight * stride;
|
||||
} else {
|
||||
int chromaStride = (stride + 1) / 2;
|
||||
int chromaSliceHeight = (sliceHeight + 1) / 2;
|
||||
frameSizeBytes = sliceHeight * stride + chromaSliceHeight * chromaStride * 2;
|
||||
}
|
||||
|
||||
Logging.d(TAG,
|
||||
"updateInputFormat format: " + format + " stride: " + stride
|
||||
+ " sliceHeight: " + sliceHeight + " isSemiPlanar: " + isSemiPlanar
|
||||
+ " frameSizeBytes: " + frameSizeBytes);
|
||||
}
|
||||
|
||||
protected boolean isEncodingStatisticsSupported() {
|
||||
@ -740,61 +759,30 @@ class HardwareVideoEncoder implements VideoEncoder {
|
||||
}
|
||||
|
||||
// Visible for testing.
|
||||
protected void fillInputBuffer(ByteBuffer buffer, VideoFrame.Buffer videoFrameBuffer) {
|
||||
yuvFormat.fillBuffer(buffer, videoFrameBuffer, stride, sliceHeight);
|
||||
protected void fillInputBuffer(ByteBuffer buffer, VideoFrame.Buffer frame) {
|
||||
VideoFrame.I420Buffer i420 = frame.toI420();
|
||||
if (isSemiPlanar) {
|
||||
YuvHelper.I420ToNV12(i420.getDataY(), i420.getStrideY(), i420.getDataU(), i420.getStrideU(),
|
||||
i420.getDataV(), i420.getStrideV(), buffer, i420.getWidth(), i420.getHeight(), stride,
|
||||
sliceHeight);
|
||||
} else {
|
||||
YuvHelper.I420Copy(i420.getDataY(), i420.getStrideY(), i420.getDataU(), i420.getStrideU(),
|
||||
i420.getDataV(), i420.getStrideV(), buffer, i420.getWidth(), i420.getHeight(), stride,
|
||||
sliceHeight);
|
||||
}
|
||||
i420.release();
|
||||
}
|
||||
|
||||
/**
|
||||
* Enumeration of supported YUV color formats used for MediaCodec's input.
|
||||
*/
|
||||
private enum YuvFormat {
|
||||
I420 {
|
||||
@Override
|
||||
void fillBuffer(
|
||||
ByteBuffer dstBuffer, VideoFrame.Buffer srcBuffer, int dstStrideY, int dstSliceHeightY) {
|
||||
/*
|
||||
* According to the docs in Android MediaCodec, the stride of the U and V planes can be
|
||||
* calculated based on the color format, though it is generally undefined and depends on the
|
||||
* device and release.
|
||||
* <p/> Assuming the width and height, dstStrideY and dstSliceHeightY are
|
||||
* even, it works fine when we define the stride and slice-height of the dst U/V plane to be
|
||||
* half of the dst Y plane.
|
||||
*/
|
||||
int dstStrideU = dstStrideY / 2;
|
||||
int dstSliceHeight = dstSliceHeightY / 2;
|
||||
VideoFrame.I420Buffer i420 = srcBuffer.toI420();
|
||||
YuvHelper.I420Copy(i420.getDataY(), i420.getStrideY(), i420.getDataU(), i420.getStrideU(),
|
||||
i420.getDataV(), i420.getStrideV(), dstBuffer, i420.getWidth(), i420.getHeight(),
|
||||
dstStrideY, dstSliceHeightY, dstStrideU, dstSliceHeight);
|
||||
i420.release();
|
||||
}
|
||||
},
|
||||
NV12 {
|
||||
@Override
|
||||
void fillBuffer(
|
||||
ByteBuffer dstBuffer, VideoFrame.Buffer srcBuffer, int dstStrideY, int dstSliceHeightY) {
|
||||
VideoFrame.I420Buffer i420 = srcBuffer.toI420();
|
||||
YuvHelper.I420ToNV12(i420.getDataY(), i420.getStrideY(), i420.getDataU(), i420.getStrideU(),
|
||||
i420.getDataV(), i420.getStrideV(), dstBuffer, i420.getWidth(), i420.getHeight(),
|
||||
dstStrideY, dstSliceHeightY);
|
||||
i420.release();
|
||||
}
|
||||
};
|
||||
|
||||
abstract void fillBuffer(
|
||||
ByteBuffer dstBuffer, VideoFrame.Buffer srcBuffer, int dstStrideY, int dstSliceHeightY);
|
||||
|
||||
static YuvFormat valueOf(int colorFormat) {
|
||||
switch (colorFormat) {
|
||||
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
|
||||
return I420;
|
||||
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
|
||||
case MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar:
|
||||
case MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
|
||||
return NV12;
|
||||
default:
|
||||
throw new IllegalArgumentException("Unsupported colorFormat: " + colorFormat);
|
||||
}
|
||||
protected boolean isSemiPlanar(int colorFormat) {
|
||||
switch (colorFormat) {
|
||||
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
|
||||
return false;
|
||||
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
|
||||
case MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar:
|
||||
case MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
|
||||
return true;
|
||||
default:
|
||||
throw new IllegalArgumentException("Unsupported colorFormat: " + colorFormat);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -46,6 +46,8 @@ interface MediaCodecWrapper {
|
||||
|
||||
MediaFormat getOutputFormat();
|
||||
|
||||
MediaFormat getOutputFormat(int index);
|
||||
|
||||
ByteBuffer getInputBuffer(int index);
|
||||
|
||||
ByteBuffer getOutputBuffer(int index);
|
||||
@ -55,6 +57,4 @@ interface MediaCodecWrapper {
|
||||
void setParameters(Bundle params);
|
||||
|
||||
MediaCodecInfo getCodecInfo();
|
||||
|
||||
MediaFormat getOutputFormat(int index);
|
||||
}
|
||||
|
||||
@ -88,6 +88,11 @@ class MediaCodecWrapperFactoryImpl implements MediaCodecWrapperFactory {
|
||||
return mediaCodec.getOutputFormat();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MediaFormat getOutputFormat(int index) {
|
||||
return mediaCodec.getOutputFormat(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuffer getInputBuffer(int index) {
|
||||
return mediaCodec.getInputBuffer(index);
|
||||
@ -112,11 +117,6 @@ class MediaCodecWrapperFactoryImpl implements MediaCodecWrapperFactory {
|
||||
public MediaCodecInfo getCodecInfo() {
|
||||
return mediaCodec.getCodecInfo();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MediaFormat getOutputFormat(int index) {
|
||||
return mediaCodec.getOutputFormat(index);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@ -39,6 +39,7 @@ import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.InOrder;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.MockitoAnnotations;
|
||||
import org.mockito.Spy;
|
||||
import org.robolectric.annotation.Config;
|
||||
import org.webrtc.EncodedImage.FrameType;
|
||||
import org.webrtc.FakeMediaCodecWrapper.State;
|
||||
@ -193,7 +194,7 @@ public class AndroidVideoDecoderTest {
|
||||
@Mock private EglBase.Context mockEglBaseContext;
|
||||
@Mock private SurfaceTextureHelper mockSurfaceTextureHelper;
|
||||
@Mock private VideoDecoder.Callback mockDecoderCallback;
|
||||
private FakeMediaCodecWrapper fakeMediaCodecWrapper;
|
||||
@Spy private FakeMediaCodecWrapper fakeMediaCodecWrapper;
|
||||
private FakeDecoderCallback fakeDecoderCallback;
|
||||
|
||||
@Before
|
||||
@ -201,10 +202,6 @@ public class AndroidVideoDecoderTest {
|
||||
MockitoAnnotations.initMocks(this);
|
||||
when(mockSurfaceTextureHelper.getSurfaceTexture())
|
||||
.thenReturn(new SurfaceTexture(/*texName=*/0));
|
||||
MediaFormat inputFormat = new MediaFormat();
|
||||
MediaFormat outputFormat = new MediaFormat();
|
||||
// TODO(sakal): Add more details to output format as needed.
|
||||
fakeMediaCodecWrapper = spy(new FakeMediaCodecWrapper(inputFormat, outputFormat));
|
||||
fakeDecoderCallback = new FakeDecoderCallback();
|
||||
}
|
||||
|
||||
|
||||
@ -105,18 +105,13 @@ public class FakeMediaCodecWrapper implements MediaCodecWrapper {
|
||||
private State state = State.STOPPED_UNINITIALIZED;
|
||||
private @Nullable MediaFormat configuredFormat;
|
||||
private int configuredFlags;
|
||||
private final MediaFormat inputFormat;
|
||||
private final MediaFormat outputFormat;
|
||||
private final ByteBuffer[] inputBuffers = new ByteBuffer[NUM_INPUT_BUFFERS];
|
||||
private final ByteBuffer[] outputBuffers = new ByteBuffer[NUM_OUTPUT_BUFFERS];
|
||||
private final boolean[] inputBufferReserved = new boolean[NUM_INPUT_BUFFERS];
|
||||
private final boolean[] outputBufferReserved = new boolean[NUM_OUTPUT_BUFFERS];
|
||||
private final List<QueuedOutputBufferInfo> queuedOutputBuffers = new ArrayList<>();
|
||||
|
||||
public FakeMediaCodecWrapper(MediaFormat inputFormat, MediaFormat outputFormat) {
|
||||
this.inputFormat = inputFormat;
|
||||
this.outputFormat = outputFormat;
|
||||
}
|
||||
public FakeMediaCodecWrapper() {}
|
||||
|
||||
/** Returns the current simulated state of MediaCodec. */
|
||||
public State getState() {
|
||||
@ -304,12 +299,17 @@ public class FakeMediaCodecWrapper implements MediaCodecWrapper {
|
||||
|
||||
@Override
|
||||
public MediaFormat getInputFormat() {
|
||||
return inputFormat;
|
||||
return new MediaFormat();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MediaFormat getOutputFormat() {
|
||||
return outputFormat;
|
||||
return new MediaFormat();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MediaFormat getOutputFormat(int index) {
|
||||
return new MediaFormat();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -324,9 +324,4 @@ public class FakeMediaCodecWrapper implements MediaCodecWrapper {
|
||||
public MediaCodecInfo getCodecInfo() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MediaFormat getOutputFormat(int index) {
|
||||
return outputFormat;
|
||||
}
|
||||
}
|
||||
|
||||
@ -12,6 +12,8 @@ package org.webrtc;
|
||||
|
||||
import static android.media.MediaCodec.BUFFER_FLAG_CODEC_CONFIG;
|
||||
import static android.media.MediaCodec.BUFFER_FLAG_SYNC_FRAME;
|
||||
import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar;
|
||||
import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static java.util.concurrent.TimeUnit.SECONDS;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
@ -32,9 +34,7 @@ import static org.webrtc.VideoCodecMimeType.VP8;
|
||||
import static org.webrtc.VideoCodecMimeType.VP9;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaCodecInfo;
|
||||
import android.media.MediaFormat;
|
||||
import android.os.Build.VERSION_CODES;
|
||||
import android.os.Bundle;
|
||||
import androidx.test.runner.AndroidJUnit4;
|
||||
import java.nio.ByteBuffer;
|
||||
@ -48,10 +48,12 @@ import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.InOrder;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.MockitoAnnotations;
|
||||
import org.mockito.Spy;
|
||||
import org.robolectric.annotation.Config;
|
||||
import org.webrtc.EncodedImage;
|
||||
import org.webrtc.EncodedImage.FrameType;
|
||||
import org.webrtc.FakeMediaCodecWrapper.State;
|
||||
import org.webrtc.Logging;
|
||||
import org.webrtc.VideoCodecStatus;
|
||||
import org.webrtc.VideoEncoder;
|
||||
import org.webrtc.VideoEncoder.BitrateAllocation;
|
||||
@ -65,10 +67,10 @@ import org.webrtc.VideoFrame.I420Buffer;
|
||||
@RunWith(AndroidJUnit4.class)
|
||||
@Config(manifest = Config.NONE)
|
||||
public class HardwareVideoEncoderTest {
|
||||
private static final int WIDTH = 640;
|
||||
private static final int HEIGHT = 480;
|
||||
private static final VideoEncoder.Settings TEST_ENCODER_SETTINGS = new Settings(
|
||||
/* numberOfCores= */ 1,
|
||||
/* width= */ 640,
|
||||
/* height= */ 480,
|
||||
/* numberOfCores= */ 1, WIDTH, HEIGHT,
|
||||
/* startBitrate= */ 10000,
|
||||
/* maxFramerate= */ 30,
|
||||
/* numberOfSimulcastStreams= */ 1,
|
||||
@ -147,6 +149,7 @@ public class HardwareVideoEncoderTest {
|
||||
private VideoCodecMimeType codecType = VP8;
|
||||
private BitrateAdjuster bitrateAdjuster = new BaseBitrateAdjuster();
|
||||
private boolean isEncodingStatisticsSupported;
|
||||
private int colorFormat = COLOR_FormatYUV420Planar;
|
||||
|
||||
public TestEncoderBuilder setCodecType(VideoCodecMimeType codecType) {
|
||||
this.codecType = codecType;
|
||||
@ -158,18 +161,22 @@ public class HardwareVideoEncoderTest {
|
||||
return this;
|
||||
}
|
||||
|
||||
public TestEncoderBuilder SetIsEncodingStatisticsSupported(
|
||||
public TestEncoderBuilder setIsEncodingStatisticsSupported(
|
||||
boolean isEncodingStatisticsSupported) {
|
||||
this.isEncodingStatisticsSupported = isEncodingStatisticsSupported;
|
||||
return this;
|
||||
}
|
||||
|
||||
public TestEncoderBuilder setColorFormat(int colorFormat) {
|
||||
this.colorFormat = colorFormat;
|
||||
return this;
|
||||
}
|
||||
|
||||
public TestEncoder build() {
|
||||
return new TestEncoder((String name)
|
||||
-> fakeMediaCodecWrapper,
|
||||
"org.webrtc.testencoder", codecType,
|
||||
/* surfaceColorFormat= */ null,
|
||||
/* yuvColorFormat= */ MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar,
|
||||
/* surfaceColorFormat= */ null, colorFormat,
|
||||
/* params= */ new HashMap<>(),
|
||||
/* keyFrameIntervalSec= */ 0,
|
||||
/* forceKeyFrameIntervalMs= */ 0, bitrateAdjuster,
|
||||
@ -185,16 +192,12 @@ public class HardwareVideoEncoderTest {
|
||||
return new VideoFrame(testBuffer, /* rotation= */ 0, timestampNs);
|
||||
}
|
||||
|
||||
@Mock VideoEncoder.Callback mockEncoderCallback;
|
||||
private FakeMediaCodecWrapper fakeMediaCodecWrapper;
|
||||
@Mock private VideoEncoder.Callback mockEncoderCallback;
|
||||
@Spy private FakeMediaCodecWrapper fakeMediaCodecWrapper;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
MockitoAnnotations.initMocks(this);
|
||||
MediaFormat inputFormat = new MediaFormat();
|
||||
MediaFormat outputFormat = new MediaFormat();
|
||||
// TODO(sakal): Add more details to output format as needed.
|
||||
fakeMediaCodecWrapper = spy(new FakeMediaCodecWrapper(inputFormat, outputFormat));
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -223,7 +226,7 @@ public class HardwareVideoEncoderTest {
|
||||
|
||||
@Test
|
||||
public void encodingStatistics_unsupported_disabled() throws InterruptedException {
|
||||
TestEncoder encoder = new TestEncoderBuilder().SetIsEncodingStatisticsSupported(false).build();
|
||||
TestEncoder encoder = new TestEncoderBuilder().setIsEncodingStatisticsSupported(false).build();
|
||||
|
||||
assertThat(encoder.initEncode(TEST_ENCODER_SETTINGS, mockEncoderCallback))
|
||||
.isEqualTo(VideoCodecStatus.OK);
|
||||
@ -257,7 +260,7 @@ public class HardwareVideoEncoderTest {
|
||||
|
||||
@Test
|
||||
public void encodingStatistics_supported_enabled() throws InterruptedException {
|
||||
TestEncoder encoder = new TestEncoderBuilder().SetIsEncodingStatisticsSupported(true).build();
|
||||
TestEncoder encoder = new TestEncoderBuilder().setIsEncodingStatisticsSupported(true).build();
|
||||
|
||||
assertThat(encoder.initEncode(TEST_ENCODER_SETTINGS, mockEncoderCallback))
|
||||
.isEqualTo(VideoCodecStatus.OK);
|
||||
@ -294,7 +297,7 @@ public class HardwareVideoEncoderTest {
|
||||
@Test
|
||||
public void encodingStatistics_fetchedBeforeFrameBufferIsReleased() throws InterruptedException {
|
||||
TestEncoder encoder =
|
||||
new TestEncoderBuilder().setCodecType(H264).SetIsEncodingStatisticsSupported(true).build();
|
||||
new TestEncoderBuilder().setCodecType(H264).setIsEncodingStatisticsSupported(true).build();
|
||||
assertThat(encoder.initEncode(TEST_ENCODER_SETTINGS, mockEncoderCallback))
|
||||
.isEqualTo(VideoCodecStatus.OK);
|
||||
|
||||
@ -569,4 +572,84 @@ public class HardwareVideoEncoderTest {
|
||||
public void encode_h265KeyFrame_emptyConfig_configNotPrepended() throws InterruptedException {
|
||||
encodeWithConfigBuffer(H265, /*keyFrame=*/true, /* emptyConfig= */ true, "frame");
|
||||
}
|
||||
|
||||
private void encodeWithStride(int colorFormat, int stride, int sliceHeight,
|
||||
int expectedBufferSize) throws InterruptedException {
|
||||
MediaFormat inputFormat = new MediaFormat();
|
||||
inputFormat.setInteger(MediaFormat.KEY_STRIDE, stride);
|
||||
inputFormat.setInteger(MediaFormat.KEY_SLICE_HEIGHT, sliceHeight);
|
||||
doReturn(inputFormat).when(fakeMediaCodecWrapper).getInputFormat();
|
||||
|
||||
ByteBuffer inputBuffer = ByteBuffer.allocateDirect(calcBufferSize(
|
||||
colorFormat, HEIGHT, Math.max(stride, WIDTH), Math.max(sliceHeight, HEIGHT)));
|
||||
doReturn(inputBuffer).when(fakeMediaCodecWrapper).getInputBuffer(anyInt());
|
||||
|
||||
TestEncoder encoder = new TestEncoderBuilder().setColorFormat(colorFormat).build();
|
||||
encoder.initEncode(TEST_ENCODER_SETTINGS, mockEncoderCallback);
|
||||
encoder.encode(createTestVideoFrame(/* timestampNs= */ 0), ENCODE_INFO_DELTA_FRAME);
|
||||
|
||||
verify(fakeMediaCodecWrapper)
|
||||
.queueInputBuffer(
|
||||
/*index=*/anyInt(), /*offset=*/eq(0), /*size=*/eq(expectedBufferSize),
|
||||
/*presentationTimeUs=*/anyLong(), /*flags=*/anyInt());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void encode_invalidStride_planar_ignored() throws InterruptedException {
|
||||
encodeWithStride(/*colorFormat=*/COLOR_FormatYUV420Planar,
|
||||
/*stride=*/WIDTH / 2,
|
||||
/*sliceHeight=*/HEIGHT,
|
||||
/*expectedBufferSize=*/WIDTH * HEIGHT * 3 / 2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void encode_invalidSliceHeight_planar_ignored() throws InterruptedException {
|
||||
encodeWithStride(/*colorFormat=*/COLOR_FormatYUV420Planar,
|
||||
/*stride=*/WIDTH,
|
||||
/*sliceHeight=*/HEIGHT / 2,
|
||||
/*expectedBufferSize=*/WIDTH * HEIGHT * 3 / 2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void encode_validStride_planar_applied() throws InterruptedException {
|
||||
encodeWithStride(/*colorFormat=*/COLOR_FormatYUV420Planar,
|
||||
/*stride=*/WIDTH * 2,
|
||||
/*sliceHeight=*/HEIGHT,
|
||||
/*expectedBufferSize=*/WIDTH * 2 * HEIGHT * 3 / 2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void encode_validSliceHeight_planar_applied() throws InterruptedException {
|
||||
encodeWithStride(/*colorFormat=*/COLOR_FormatYUV420Planar,
|
||||
/*stride=*/WIDTH,
|
||||
/*sliceHeight=*/HEIGHT * 2,
|
||||
/*expectedBufferSize=*/WIDTH * HEIGHT * 2 * 3 / 2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void encode_validStride_semiPlanar_applied() throws InterruptedException {
|
||||
encodeWithStride(/*colorFormat=*/COLOR_FormatYUV420SemiPlanar,
|
||||
/*stride=*/WIDTH * 2,
|
||||
/*sliceHeight=*/HEIGHT,
|
||||
/*expectedBufferSize=*/WIDTH * 2 * HEIGHT * 3 / 2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void encode_validSliceHeight_semiPlanar_applied() throws InterruptedException {
|
||||
encodeWithStride(/*colorFormat=*/COLOR_FormatYUV420SemiPlanar,
|
||||
/*stride=*/WIDTH,
|
||||
/*sliceHeight=*/HEIGHT * 2,
|
||||
/*expectedBufferSize=*/WIDTH * HEIGHT * 2 + WIDTH * HEIGHT / 2);
|
||||
}
|
||||
|
||||
/** Returns buffer size in bytes for the given color format and dimensions. */
|
||||
private int calcBufferSize(int colorFormat, int height, int stride, int sliceHeight) {
|
||||
if (colorFormat == COLOR_FormatYUV420SemiPlanar) {
|
||||
int chromaHeight = (height + 1) / 2;
|
||||
return sliceHeight * stride + chromaHeight * stride;
|
||||
}
|
||||
int chromaStride = (stride + 1) / 2;
|
||||
int chromaSliceHeight = (sliceHeight + 1) / 2;
|
||||
return sliceHeight * stride + chromaSliceHeight * chromaStride * 2;
|
||||
}
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user