Expose functionality to convert TextureBuffer to I420.

Bug: webrtc:8392
Change-Id: I79682efbef3aecbba904aa5047b229833fae25e8
Reviewed-on: https://webrtc-review.googlesource.com/8940
Commit-Queue: Sami Kalliomäki <sakal@webrtc.org>
Reviewed-by: Magnus Jedvert <magjed@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#20313}
This commit is contained in:
Sami Kalliomäki 2017-10-16 11:20:26 +02:00 committed by Commit Bot
parent 6bf43d2818
commit cb98b11b71
11 changed files with 227 additions and 95 deletions

View File

@ -436,6 +436,7 @@ android_library("libjingle_peerconnection_java") {
"api/org/webrtc/VideoSink.java", "api/org/webrtc/VideoSink.java",
"api/org/webrtc/VideoSource.java", "api/org/webrtc/VideoSource.java",
"api/org/webrtc/VideoTrack.java", "api/org/webrtc/VideoTrack.java",
"api/org/webrtc/YuvConverter.java",
"src/java/org/webrtc/AndroidVideoTrackSourceObserver.java", "src/java/org/webrtc/AndroidVideoTrackSourceObserver.java",
"src/java/org/webrtc/BaseBitrateAdjuster.java", "src/java/org/webrtc/BaseBitrateAdjuster.java",
"src/java/org/webrtc/BitrateAdjuster.java", "src/java/org/webrtc/BitrateAdjuster.java",
@ -461,7 +462,6 @@ android_library("libjingle_peerconnection_java") {
"src/java/org/webrtc/VideoDecoderWrapperCallback.java", "src/java/org/webrtc/VideoDecoderWrapperCallback.java",
"src/java/org/webrtc/VideoEncoderWrapperCallback.java", "src/java/org/webrtc/VideoEncoderWrapperCallback.java",
"src/java/org/webrtc/WrappedNativeI420Buffer.java", "src/java/org/webrtc/WrappedNativeI420Buffer.java",
"src/java/org/webrtc/YuvConverter.java",
] ]
deps = [ deps = [

View File

@ -231,8 +231,11 @@ public class SurfaceTextureHelper {
}); });
} }
public void textureToYUV(final ByteBuffer buf, final int width, final int height, /** Deprecated, use textureToYuv. */
final int stride, final int textureId, final float[] transformMatrix) { @Deprecated
@SuppressWarnings("deprecation") // yuvConverter.convert is deprecated
void textureToYUV(final ByteBuffer buf, final int width, final int height, final int stride,
final int textureId, final float[] transformMatrix) {
if (textureId != oesTextureId) { if (textureId != oesTextureId) {
throw new IllegalStateException("textureToByteBuffer called with unexpected textureId"); throw new IllegalStateException("textureToByteBuffer called with unexpected textureId");
} }
@ -248,6 +251,25 @@ public class SurfaceTextureHelper {
}); });
} }
/**
* Posts to the correct thread to convert |textureBuffer| to I420. Must only be called with
* textures generated by this SurfaceTextureHelper.
*/
public VideoFrame.I420Buffer textureToYuv(final TextureBuffer textureBuffer) {
if (textureBuffer.getTextureId() != oesTextureId) {
throw new IllegalStateException("textureToByteBuffer called with unexpected textureId");
}
final VideoFrame.I420Buffer[] result = new VideoFrame.I420Buffer[1];
ThreadUtils.invokeAtFrontUninterruptibly(handler, () -> {
if (yuvConverter == null) {
yuvConverter = new YuvConverter();
}
result[0] = yuvConverter.convert(textureBuffer);
});
return result[0];
}
private void updateTexImage() { private void updateTexImage() {
// SurfaceTexture.updateTexImage apparently can compete and deadlock with eglSwapBuffers, // SurfaceTexture.updateTexImage apparently can compete and deadlock with eglSwapBuffers,
// as observed on Nexus 5. Therefore, synchronize it with the EGL functions. // as observed on Nexus 5. Therefore, synchronize it with the EGL functions.

View File

@ -15,8 +15,8 @@ import android.os.HandlerThread;
import java.io.FileOutputStream; import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.concurrent.CountDownLatch;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.concurrent.CountDownLatch;
/** /**
* Can be used to save the video frames to file. * Can be used to save the video frames to file.
@ -94,7 +94,7 @@ public class VideoFileRenderer implements VideoRenderer.Callbacks {
final float[] texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix); final float[] texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
try { try {
ByteBuffer buffer = nativeCreateNativeByteBuffer(outputFrameSize); ByteBuffer buffer = JniCommon.allocateNativeByteBuffer(outputFrameSize);
if (!frame.yuvFrame) { if (!frame.yuvFrame) {
yuvConverter.convert(outputFrameBuffer, outputFileWidth, outputFileHeight, outputFileWidth, yuvConverter.convert(outputFrameBuffer, outputFileWidth, outputFileHeight, outputFileWidth,
frame.textureId, texMatrix); frame.textureId, texMatrix);
@ -153,7 +153,7 @@ public class VideoFileRenderer implements VideoRenderer.Callbacks {
videoOutFile.write(data); videoOutFile.write(data);
nativeFreeNativeByteBuffer(buffer); JniCommon.freeNativeByteBuffer(buffer);
} }
videoOutFile.close(); videoOutFile.close();
Logging.d(TAG, "Video written to disk as " + outputFileName + ". Number frames are " Logging.d(TAG, "Video written to disk as " + outputFileName + ". Number frames are "
@ -167,8 +167,4 @@ public class VideoFileRenderer implements VideoRenderer.Callbacks {
public static native void nativeI420Scale(ByteBuffer srcY, int strideY, ByteBuffer srcU, public static native void nativeI420Scale(ByteBuffer srcY, int strideY, ByteBuffer srcU,
int strideU, ByteBuffer srcV, int strideV, int width, int height, ByteBuffer dst, int strideU, ByteBuffer srcV, int strideV, int width, int height, ByteBuffer dst,
int dstWidth, int dstHeight); int dstWidth, int dstHeight);
public static native ByteBuffer nativeCreateNativeByteBuffer(int size);
public static native void nativeFreeNativeByteBuffer(ByteBuffer buffer);
} }

View File

@ -11,6 +11,8 @@
package org.webrtc; package org.webrtc;
import android.graphics.Matrix; import android.graphics.Matrix;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
/** /**
@ -88,7 +90,20 @@ public class VideoFrame {
* Interface for buffers that are stored as a single texture, either in OES or RGB format. * Interface for buffers that are stored as a single texture, either in OES or RGB format.
*/ */
public interface TextureBuffer extends Buffer { public interface TextureBuffer extends Buffer {
enum Type { OES, RGB } enum Type {
OES(GLES11Ext.GL_TEXTURE_EXTERNAL_OES),
RGB(GLES20.GL_TEXTURE_2D);
private final int glTarget;
private Type(final int glTarget) {
this.glTarget = glTarget;
}
public int getGlTarget() {
return glTarget;
}
}
Type getType(); Type getType();
int getTextureId(); int getTextureId();

View File

@ -14,6 +14,8 @@ import android.opengl.GLES11Ext;
import android.opengl.GLES20; import android.opengl.GLES20;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.FloatBuffer; import java.nio.FloatBuffer;
import org.webrtc.VideoFrame.I420Buffer;
import org.webrtc.VideoFrame.TextureBuffer;
/** /**
* Class for converting OES textures to a YUV ByteBuffer. It should be constructed on a thread with * Class for converting OES textures to a YUV ByteBuffer. It should be constructed on a thread with
@ -50,12 +52,12 @@ class YuvConverter {
+ " interp_tc = (texMatrix * in_tc).xy;\n" + " interp_tc = (texMatrix * in_tc).xy;\n"
+ "}\n"; + "}\n";
private static final String FRAGMENT_SHADER = private static final String OES_FRAGMENT_SHADER =
"#extension GL_OES_EGL_image_external : require\n" "#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n" + "precision mediump float;\n"
+ "varying vec2 interp_tc;\n" + "varying vec2 interp_tc;\n"
+ "\n" + "\n"
+ "uniform samplerExternalOES oesTex;\n" + "uniform samplerExternalOES tex;\n"
// Difference in texture coordinate corresponding to one // Difference in texture coordinate corresponding to one
// sub-pixel in the x direction. // sub-pixel in the x direction.
+ "uniform vec2 xUnit;\n" + "uniform vec2 xUnit;\n"
@ -70,22 +72,51 @@ class YuvConverter {
// try to do it as a vec3 x mat3x4, followed by an add in of a // try to do it as a vec3 x mat3x4, followed by an add in of a
// constant vector. // constant vector.
+ " gl_FragColor.r = coeffs.a + dot(coeffs.rgb,\n" + " gl_FragColor.r = coeffs.a + dot(coeffs.rgb,\n"
+ " texture2D(oesTex, interp_tc - 1.5 * xUnit).rgb);\n" + " texture2D(tex, interp_tc - 1.5 * xUnit).rgb);\n"
+ " gl_FragColor.g = coeffs.a + dot(coeffs.rgb,\n" + " gl_FragColor.g = coeffs.a + dot(coeffs.rgb,\n"
+ " texture2D(oesTex, interp_tc - 0.5 * xUnit).rgb);\n" + " texture2D(tex, interp_tc - 0.5 * xUnit).rgb);\n"
+ " gl_FragColor.b = coeffs.a + dot(coeffs.rgb,\n" + " gl_FragColor.b = coeffs.a + dot(coeffs.rgb,\n"
+ " texture2D(oesTex, interp_tc + 0.5 * xUnit).rgb);\n" + " texture2D(tex, interp_tc + 0.5 * xUnit).rgb);\n"
+ " gl_FragColor.a = coeffs.a + dot(coeffs.rgb,\n" + " gl_FragColor.a = coeffs.a + dot(coeffs.rgb,\n"
+ " texture2D(oesTex, interp_tc + 1.5 * xUnit).rgb);\n" + " texture2D(tex, interp_tc + 1.5 * xUnit).rgb);\n"
+ "}\n";
private static final String RGB_FRAGMENT_SHADER =
"precision mediump float;\n"
+ "varying vec2 interp_tc;\n"
+ "\n"
+ "uniform sample2D tex;\n"
// Difference in texture coordinate corresponding to one
// sub-pixel in the x direction.
+ "uniform vec2 xUnit;\n"
// Color conversion coefficients, including constant term
+ "uniform vec4 coeffs;\n"
+ "\n"
+ "void main() {\n"
// Since the alpha read from the texture is always 1, this could
// be written as a mat4 x vec4 multiply. However, that seems to
// give a worse framerate, possibly because the additional
// multiplies by 1.0 consume resources. TODO(nisse): Could also
// try to do it as a vec3 x mat3x4, followed by an add in of a
// constant vector.
+ " gl_FragColor.r = coeffs.a + dot(coeffs.rgb,\n"
+ " texture2D(tex, interp_tc - 1.5 * xUnit).rgb);\n"
+ " gl_FragColor.g = coeffs.a + dot(coeffs.rgb,\n"
+ " texture2D(tex, interp_tc - 0.5 * xUnit).rgb);\n"
+ " gl_FragColor.b = coeffs.a + dot(coeffs.rgb,\n"
+ " texture2D(tex, interp_tc + 0.5 * xUnit).rgb);\n"
+ " gl_FragColor.a = coeffs.a + dot(coeffs.rgb,\n"
+ " texture2D(tex, interp_tc + 1.5 * xUnit).rgb);\n"
+ "}\n"; + "}\n";
// clang-format on // clang-format on
private final GlTextureFrameBuffer textureFrameBuffer;
private final GlShader shader;
private final int texMatrixLoc;
private final int xUnitLoc;
private final int coeffsLoc;
private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker(); private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
private final GlTextureFrameBuffer textureFrameBuffer;
private TextureBuffer.Type shaderTextureType;
private GlShader shader;
private int texMatrixLoc;
private int xUnitLoc;
private int coeffsLoc;
private boolean released = false; private boolean released = false;
/** /**
@ -94,12 +125,80 @@ class YuvConverter {
public YuvConverter() { public YuvConverter() {
threadChecker.checkIsOnValidThread(); threadChecker.checkIsOnValidThread();
textureFrameBuffer = new GlTextureFrameBuffer(GLES20.GL_RGBA); textureFrameBuffer = new GlTextureFrameBuffer(GLES20.GL_RGBA);
shader = new GlShader(VERTEX_SHADER, FRAGMENT_SHADER); }
/** Converts the texture buffer to I420. */
public I420Buffer convert(TextureBuffer textureBuffer) {
final int width = textureBuffer.getWidth();
final int height = textureBuffer.getHeight();
// SurfaceTextureHelper requires a stride that is divisible by 8. Round width up.
// See SurfaceTextureHelper for details on the size and format.
final int stride = ((width + 7) / 8) * 8;
final int uvHeight = (height + 1) / 2;
// Due to the layout used by SurfaceTextureHelper, vPos + stride * uvHeight would overrun the
// buffer. Add one row at the bottom to compensate for this. There will never be data in the
// extra row, but now other code does not have to deal with v stride * v height exceeding the
// buffer's capacity.
final int size = stride * (height + uvHeight + 1);
ByteBuffer buffer = JniCommon.allocateNativeByteBuffer(size);
convert(buffer, width, height, stride, textureBuffer.getTextureId(),
RendererCommon.convertMatrixFromAndroidGraphicsMatrix(textureBuffer.getTransformMatrix()),
textureBuffer.getType());
final int yPos = 0;
final int uPos = yPos + stride * height;
// Rows of U and V alternate in the buffer, so V data starts after the first row of U.
final int vPos = uPos + stride / 2;
buffer.position(yPos);
buffer.limit(yPos + stride * height);
ByteBuffer dataY = buffer.slice();
buffer.position(uPos);
buffer.limit(uPos + stride * uvHeight);
ByteBuffer dataU = buffer.slice();
buffer.position(vPos);
buffer.limit(vPos + stride * uvHeight);
ByteBuffer dataV = buffer.slice();
// SurfaceTextureHelper uses the same stride for Y, U, and V data.
return JavaI420Buffer.wrap(width, height, dataY, stride, dataU, stride, dataV, stride,
() -> { JniCommon.freeNativeByteBuffer(buffer); });
}
/** Deprecated, use convert(TextureBuffer). */
@Deprecated
void convert(ByteBuffer buf, int width, int height, int stride, int srcTextureId,
float[] transformMatrix) {
convert(buf, width, height, stride, srcTextureId, transformMatrix, TextureBuffer.Type.OES);
}
private void initShader(TextureBuffer.Type textureType) {
if (shader != null) {
shader.release();
}
final String fragmentShader;
switch (textureType) {
case OES:
fragmentShader = OES_FRAGMENT_SHADER;
break;
case RGB:
fragmentShader = RGB_FRAGMENT_SHADER;
break;
default:
throw new IllegalArgumentException("Unsupported texture type.");
}
shaderTextureType = textureType;
shader = new GlShader(VERTEX_SHADER, fragmentShader);
shader.useProgram(); shader.useProgram();
texMatrixLoc = shader.getUniformLocation("texMatrix"); texMatrixLoc = shader.getUniformLocation("texMatrix");
xUnitLoc = shader.getUniformLocation("xUnit"); xUnitLoc = shader.getUniformLocation("xUnit");
coeffsLoc = shader.getUniformLocation("coeffs"); coeffsLoc = shader.getUniformLocation("coeffs");
GLES20.glUniform1i(shader.getUniformLocation("oesTex"), 0); GLES20.glUniform1i(shader.getUniformLocation("tex"), 0);
GlUtil.checkNoGLES2Error("Initialize fragment shader uniform values."); GlUtil.checkNoGLES2Error("Initialize fragment shader uniform values.");
// Initialize vertex shader attributes. // Initialize vertex shader attributes.
shader.setVertexAttribArray("in_pos", 2, DEVICE_RECTANGLE); shader.setVertexAttribArray("in_pos", 2, DEVICE_RECTANGLE);
@ -108,12 +207,15 @@ class YuvConverter {
shader.setVertexAttribArray("in_tc", 2, TEXTURE_RECTANGLE); shader.setVertexAttribArray("in_tc", 2, TEXTURE_RECTANGLE);
} }
public void convert(ByteBuffer buf, int width, int height, int stride, int srcTextureId, private void convert(ByteBuffer buf, int width, int height, int stride, int srcTextureId,
float[] transformMatrix) { float[] transformMatrix, TextureBuffer.Type textureType) {
threadChecker.checkIsOnValidThread(); threadChecker.checkIsOnValidThread();
if (released) { if (released) {
throw new IllegalStateException("YuvConverter.convert called on released object"); throw new IllegalStateException("YuvConverter.convert called on released object");
} }
if (textureType != shaderTextureType) {
initShader(textureType);
}
// We draw into a buffer laid out like // We draw into a buffer laid out like
// //
@ -174,7 +276,7 @@ class YuvConverter {
GlUtil.checkNoGLES2Error("glBindFramebuffer"); GlUtil.checkNoGLES2Error("glBindFramebuffer");
GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, srcTextureId); GLES20.glBindTexture(textureType.getGlTarget(), srcTextureId);
GLES20.glUniformMatrix4fv(texMatrixLoc, 1, false, transformMatrix, 0); GLES20.glUniformMatrix4fv(texMatrixLoc, 1, false, transformMatrix, 0);
// Draw Y // Draw Y
@ -211,13 +313,15 @@ class YuvConverter {
// Unbind texture. Reportedly needed on some devices to get // Unbind texture. Reportedly needed on some devices to get
// the texture updated from the camera. // the texture updated from the camera.
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0); GLES20.glBindTexture(textureType.getGlTarget(), 0);
} }
public void release() { public void release() {
threadChecker.checkIsOnValidThread(); threadChecker.checkIsOnValidThread();
released = true; released = true;
shader.release(); if (shader != null) {
shader.release();
}
textureFrameBuffer.release(); textureFrameBuffer.release();
} }
} }

View File

@ -22,6 +22,7 @@ import android.support.test.filters.SmallTest;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import org.chromium.base.test.BaseJUnit4ClassRunner; import org.chromium.base.test.BaseJUnit4ClassRunner;
import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
@ -95,6 +96,12 @@ public class SurfaceTextureHelperTest {
+ actual); + actual);
} }
@Before
public void setUp() {
// Load the JNI library for textureToYuv.
NativeLibrary.initialize(new NativeLibrary.DefaultLoader());
}
/** /**
* Test normal use by receiving three uniform texture frames. Texture frames are returned as early * Test normal use by receiving three uniform texture frames. Texture frames are returned as early
* as possible. The texture pixel values are inspected by drawing the texture frame to a pixel * as possible. The texture pixel values are inspected by drawing the texture frame to a pixel
@ -428,7 +435,7 @@ public class SurfaceTextureHelperTest {
@Test @Test
@MediumTest @MediumTest
public void testTexturetoYUV() throws InterruptedException { public void testTexturetoYuv() throws InterruptedException {
final int width = 16; final int width = 16;
final int height = 16; final int height = 16;
@ -482,21 +489,33 @@ public class SurfaceTextureHelperTest {
// ... // ...
// 368 UUUUUUUU VVVVVVVV // 368 UUUUUUUU VVVVVVVV
// 384 buffer end // 384 buffer end
ByteBuffer buffer = ByteBuffer.allocateDirect(width * height * 3 / 2); final VideoFrame.I420Buffer i420 =
surfaceTextureHelper.textureToYUV( surfaceTextureHelper.textureToYuv(surfaceTextureHelper.createTextureBuffer(width, height,
buffer, width, height, width, listener.oesTextureId, listener.transformMatrix); RendererCommon.convertMatrixToAndroidGraphicsMatrix(listener.transformMatrix)));
surfaceTextureHelper.returnTextureFrame(); surfaceTextureHelper.returnTextureFrame();
// Allow off-by-one differences due to different rounding. // Allow off-by-one differences due to different rounding.
while (buffer.position() < width * height) { final ByteBuffer dataY = i420.getDataY();
assertClose(1, buffer.get() & 0xff, ref_y[i]); final int strideY = i420.getStrideY();
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
assertClose(1, ref_y[i], dataY.get(y * strideY + x) & 0xFF);
}
} }
while (buffer.hasRemaining()) {
if (buffer.position() % width < width / 2) final int chromaWidth = width / 2;
assertClose(1, buffer.get() & 0xff, ref_u[i]); final int chromaHeight = height / 2;
else
assertClose(1, buffer.get() & 0xff, ref_v[i]); final ByteBuffer dataU = i420.getDataU();
final ByteBuffer dataV = i420.getDataV();
final int strideU = i420.getStrideU();
final int strideV = i420.getStrideV();
for (int y = 0; y < chromaHeight; y++) {
for (int x = 0; x < chromaWidth; x++) {
assertClose(1, ref_u[i], dataU.get(y * strideU + x) & 0xFF);
assertClose(1, ref_v[i], dataV.get(y * strideV + x) & 0xFF);
}
} }
} }

View File

@ -10,9 +10,14 @@
package org.webrtc; package org.webrtc;
import java.nio.ByteBuffer;
/** Class with static JNI helper functions that are used in many places. */ /** Class with static JNI helper functions that are used in many places. */
class JniCommon { class JniCommon {
/** Functions to increment/decrement an rtc::RefCountInterface pointer. */ /** Functions to increment/decrement an rtc::RefCountInterface pointer. */
static native void nativeAddRef(long nativeRefCountedPointer); static native void nativeAddRef(long nativeRefCountedPointer);
static native void nativeReleaseRef(long nativeRefCountedPointer); static native void nativeReleaseRef(long nativeRefCountedPointer);
public static native ByteBuffer allocateNativeByteBuffer(int size);
public static native void freeNativeByteBuffer(ByteBuffer buffer);
} }

View File

@ -67,41 +67,7 @@ class TextureBufferImpl implements VideoFrame.TextureBuffer {
@Override @Override
public VideoFrame.I420Buffer toI420() { public VideoFrame.I420Buffer toI420() {
if (type == Type.RGB) { return surfaceTextureHelper.textureToYuv(this);
throw new RuntimeException("toI420 for RGB frames not implemented yet");
}
// SurfaceTextureHelper requires a stride that is divisible by 8. Round width up.
// See SurfaceTextureHelper for details on the size and format.
int stride = ((width + 7) / 8) * 8;
int uvHeight = (height + 1) / 2;
// Due to the layout used by SurfaceTextureHelper, vPos + stride * uvHeight would overrun the
// buffer. Add one row at the bottom to compensate for this. There will never be data in the
// extra row, but now other code does not have to deal with v stride * v height exceeding the
// buffer's capacity.
int size = stride * (height + uvHeight + 1);
ByteBuffer buffer = ByteBuffer.allocateDirect(size);
surfaceTextureHelper.textureToYUV(buffer, width, height, stride, id,
RendererCommon.convertMatrixFromAndroidGraphicsMatrix(transformMatrix));
int yPos = 0;
int uPos = yPos + stride * height;
// Rows of U and V alternate in the buffer, so V data starts after the first row of U.
int vPos = uPos + stride / 2;
buffer.position(yPos);
buffer.limit(yPos + stride * height);
ByteBuffer dataY = buffer.slice();
buffer.position(uPos);
buffer.limit(uPos + stride * uvHeight);
ByteBuffer dataU = buffer.slice();
buffer.position(vPos);
buffer.limit(vPos + stride * uvHeight);
ByteBuffer dataV = buffer.slice();
// SurfaceTextureHelper uses the same stride for Y, U, and V data.
return JavaI420Buffer.wrap(width, height, dataY, stride, dataU, stride, dataV, stride, null);
} }
@Override @Override

View File

@ -11,7 +11,8 @@
#include "rtc_base/refcount.h" #include "rtc_base/refcount.h"
#include "sdk/android/src/jni/jni_helpers.h" #include "sdk/android/src/jni/jni_helpers.h"
namespace webrtc_jni { namespace webrtc {
namespace jni {
JNI_FUNCTION_DECLARATION(void, JNI_FUNCTION_DECLARATION(void,
JniCommon_nativeAddRef, JniCommon_nativeAddRef,
@ -31,4 +32,24 @@ JNI_FUNCTION_DECLARATION(void,
->Release(); ->Release();
} }
} // namespace webrtc_jni JNI_FUNCTION_DECLARATION(jobject,
JniCommon_allocateNativeByteBuffer,
JNIEnv* jni,
jclass,
jint size) {
void* new_data = ::operator new(size);
jobject byte_buffer = jni->NewDirectByteBuffer(new_data, size);
return byte_buffer;
}
JNI_FUNCTION_DECLARATION(void,
JniCommon_freeNativeByteBuffer,
JNIEnv* jni,
jclass,
jobject byte_buffer) {
void* data = jni->GetDirectBufferAddress(byte_buffer);
::operator delete(data);
}
} // namespace jni
} // namespace webrtc

View File

@ -306,6 +306,8 @@ rtc::scoped_refptr<I420BufferInterface> AndroidTextureBuffer::ToI420() {
JNIEnv* jni = AttachCurrentThreadIfNeeded(); JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni); ScopedLocalRefFrame local_ref_frame(jni);
// TODO(sakal): This call to a deperecated method will be removed when
// AndroidTextureBuffer is removed.
jmethodID transform_mid = GetMethodID( jmethodID transform_mid = GetMethodID(
jni, jni,
GetObjectClass(jni, surface_texture_helper_), GetObjectClass(jni, surface_texture_helper_),

View File

@ -65,23 +65,5 @@ Java_org_webrtc_VideoFileRenderer_nativeI420Scale(JNIEnv* jni,
} }
} }
extern "C" JNIEXPORT jobject JNICALL
Java_org_webrtc_VideoFileRenderer_nativeCreateNativeByteBuffer(JNIEnv* jni,
jclass,
jint size) {
void* new_data = ::operator new(size);
jobject byte_buffer = jni->NewDirectByteBuffer(new_data, size);
return byte_buffer;
}
extern "C" JNIEXPORT void JNICALL
Java_org_webrtc_VideoFileRenderer_nativeFreeNativeByteBuffer(
JNIEnv* jni,
jclass,
jobject byte_buffer) {
void* data = jni->GetDirectBufferAddress(byte_buffer);
::operator delete(data);
}
} // namespace jni } // namespace jni
} // namespace webrtc } // namespace webrtc