AppRTCDemo: Render each video in a separate SurfaceView

This CL introduces a new org.webrtc.VideoRenderer.Callbacks implementation called SurfaceViewRenderer that renders each video stream in its own SurfaceView. AppRTCDemo is updated to use this new rendering.

This CL also does the following changes:
* Make the VideoRenderer.Callbacks interface asynchronous and require that renderFrameDone() is called for every renderFrame(). In JNI, this is implemented with cricket::VideoFrame::Copy()/delete.
* Make public static helper functions: convertScalingTypeToVisibleFraction(), getDisplaySize(), and getTextureMatrix().
* Introduces new helper functions surfaceWidth()/surfaceHeight() in EGlBase that allows to query the surface size.
* Introduce PercentFrameLayout that implements the percentage layout that is used by AppRTCDemo.

BUG=webrtc:4742

Review URL: https://codereview.webrtc.org/1257043004

Cr-Commit-Position: refs/heads/master@{#9699}
This commit is contained in:
magjed 2015-08-11 06:50:18 -07:00 committed by Commit bot
parent fa301809b6
commit 05bfbe47ef
14 changed files with 834 additions and 129 deletions

View File

@ -48,6 +48,7 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
++framesRendered; ++framesRendered;
frameLock.notify(); frameLock.notify();
} }
VideoRenderer.renderFrameDone(frame);
} }
// TODO(guoweis): Remove this once chrome code base is updated. // TODO(guoweis): Remove this once chrome code base is updated.

View File

@ -127,6 +127,18 @@ public final class EglBase {
return eglSurface != EGL14.EGL_NO_SURFACE; return eglSurface != EGL14.EGL_NO_SURFACE;
} }
public int surfaceWidth() {
final int widthArray[] = new int[1];
EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_WIDTH, widthArray, 0);
return widthArray[0];
}
public int surfaceHeight() {
final int heightArray[] = new int[1];
EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_HEIGHT, heightArray, 0);
return heightArray[0];
}
public void releaseSurface() { public void releaseSurface() {
if (eglSurface != EGL14.EGL_NO_SURFACE) { if (eglSurface != EGL14.EGL_NO_SURFACE) {
EGL14.eglDestroySurface(eglDisplay, eglSurface); EGL14.eglDestroySurface(eglDisplay, eglSurface);

View File

@ -0,0 +1,126 @@
/*
* libjingle
* Copyright 2015 Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.webrtc;
import android.graphics.Point;
import android.opengl.Matrix;
/**
* Static helper functions for VideoRendererGui and SurfaceViewRenderer.
*/
public class RendererCommon {
// Types of video scaling:
// SCALE_ASPECT_FIT - video frame is scaled to fit the size of the view by
// maintaining the aspect ratio (black borders may be displayed).
// SCALE_ASPECT_FILL - video frame is scaled to fill the size of the view by
// maintaining the aspect ratio. Some portion of the video frame may be
// clipped.
// SCALE_ASPECT_BALANCED - Compromise between FIT and FILL. Video frame will fill as much as
// possible of the view while maintaining aspect ratio, under the constraint that at least
// |BALANCED_VISIBLE_FRACTION| of the frame content will be shown.
public static enum ScalingType { SCALE_ASPECT_FIT, SCALE_ASPECT_FILL, SCALE_ASPECT_BALANCED }
// The minimum fraction of the frame content that will be shown for |SCALE_ASPECT_BALANCED|.
// This limits excessive cropping when adjusting display size.
private static float BALANCED_VISIBLE_FRACTION = 0.56f;
/**
* Calculates a texture transformation matrix based on rotation, mirror, and video vs display
* aspect ratio.
*/
public static void getTextureMatrix(float[] outputTextureMatrix, float rotationDegree,
boolean mirror, float videoAspectRatio, float displayAspectRatio) {
// The matrix stack is using post-multiplication, which means that matrix operations:
// A; B; C; will end up as A * B * C. When you apply this to a vertex, it will result in:
// v' = A * B * C * v, i.e. the last matrix operation is the first thing that affects the
// vertex. This is the opposite of what you might expect.
Matrix.setIdentityM(outputTextureMatrix, 0);
// Move coordinates back to [0,1]x[0,1].
Matrix.translateM(outputTextureMatrix, 0, 0.5f, 0.5f, 0.0f);
// Rotate frame clockwise in the XY-plane (around the Z-axis).
Matrix.rotateM(outputTextureMatrix, 0, -rotationDegree, 0, 0, 1);
// Scale one dimension until video and display size have same aspect ratio.
if (displayAspectRatio > videoAspectRatio) {
Matrix.scaleM(outputTextureMatrix, 0, 1, videoAspectRatio / displayAspectRatio, 1);
} else {
Matrix.scaleM(outputTextureMatrix, 0, displayAspectRatio / videoAspectRatio, 1, 1);
}
// TODO(magjed): We currently ignore the texture transform matrix from the SurfaceTexture.
// It contains a vertical flip that is hardcoded here instead.
Matrix.scaleM(outputTextureMatrix, 0, 1, -1, 1);
// Apply optional horizontal flip.
if (mirror) {
Matrix.scaleM(outputTextureMatrix, 0, -1, 1, 1);
}
// Center coordinates around origin.
Matrix.translateM(outputTextureMatrix, 0, -0.5f, -0.5f, 0.0f);
}
/**
* Calculate display size based on scaling type, video aspect ratio, and maximum display size.
*/
public static Point getDisplaySize(ScalingType scalingType, float videoAspectRatio,
int maxDisplayWidth, int maxDisplayHeight) {
return getDisplaySize(convertScalingTypeToVisibleFraction(scalingType), videoAspectRatio,
maxDisplayWidth, maxDisplayHeight);
}
/**
* Each scaling type has a one-to-one correspondence to a numeric minimum fraction of the video
* that must remain visible.
*/
private static float convertScalingTypeToVisibleFraction(ScalingType scalingType) {
switch (scalingType) {
case SCALE_ASPECT_FIT:
return 1.0f;
case SCALE_ASPECT_FILL:
return 0.0f;
case SCALE_ASPECT_BALANCED:
return BALANCED_VISIBLE_FRACTION;
default:
throw new IllegalArgumentException();
}
}
/**
* Calculate display size based on minimum fraction of the video that must remain visible,
* video aspect ratio, and maximum display size.
*/
private static Point getDisplaySize(float minVisibleFraction, float videoAspectRatio,
int maxDisplayWidth, int maxDisplayHeight) {
// If there is no constraint on the amount of cropping, fill the allowed display area.
if (minVisibleFraction == 0 || videoAspectRatio == 0) {
return new Point(maxDisplayWidth, maxDisplayHeight);
}
// Each dimension is constrained on max display size and how much we are allowed to crop.
final int width = Math.min(maxDisplayWidth,
(int) (maxDisplayHeight / minVisibleFraction * videoAspectRatio));
final int height = Math.min(maxDisplayHeight,
(int) (maxDisplayWidth / minVisibleFraction / videoAspectRatio));
return new Point(width, height);
}
}

View File

@ -0,0 +1,474 @@
/*
* libjingle
* Copyright 2015 Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.webrtc;
import java.nio.ByteBuffer;
import android.content.Context;
import android.graphics.Point;
import android.graphics.SurfaceTexture;
import android.opengl.EGLContext;
import android.opengl.GLES20;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
/**
* Implements org.webrtc.VideoRenderer.Callbacks by displaying the video stream on a SurfaceView.
* renderFrame() is asynchronous to avoid blocking the calling thread. Instead, a shallow copy of
* the frame is posted to a dedicated render thread.
* This class is thread safe and handles access from potentially four different threads:
* Interaction from the main app in init, release, setMirror, and setScalingtype.
* Interaction from C++ webrtc::VideoRendererInterface in renderFrame and canApplyRotation.
* Interaction from the Activity lifecycle in surfaceCreated, surfaceChanged, and surfaceDestroyed.
* Interaction with the layout framework in onMeasure and onSizeChanged.
*/
public class SurfaceViewRenderer extends SurfaceView
implements SurfaceHolder.Callback, VideoRenderer.Callbacks {
private static final String TAG = "SurfaceViewRenderer";
// Dedicated render thread. Synchronized on |this|.
private HandlerThread renderThread;
// Handler for inter-thread communication. Synchronized on |this|.
private Handler renderThreadHandler;
// Pending frame to render. Serves as a queue with size 1. Synchronized on |this|.
private VideoRenderer.I420Frame pendingFrame;
// EGL and GL resources for drawing YUV/OES textures. After initilization, these are only accessed
// from the render thread.
private EglBase eglBase;
private GlRectDrawer drawer;
// Texture ids for YUV frames. Allocated on first arrival of a YUV frame.
private int[] yuvTextures = null;
// Intermediate copy buffers in case yuv frames are not packed, i.e. stride > plane width. One for
// Y, and one for U and V.
private final ByteBuffer[] copyBuffer = new ByteBuffer[2];
// These variables are synchronized on |layoutLock|.
private final Object layoutLock = new Object();
// Current surface size.
public int surfaceWidth;
public int surfaceHeight;
// Most recent measurement specification from onMeasure().
private int widthSpec;
private int heightSpec;
// Current size on screen in pixels.
public int layoutWidth;
public int layoutHeight;
// Desired layout size, or 0 if no frame has arrived yet. The desired size is updated before
// rendering a new frame, and is enforced in onMeasure(). Rendering is blocked until layout is
// updated to the desired size.
public int desiredLayoutWidth;
public int desiredLayoutHeight;
// |scalingType| determines how the video will fill the allowed layout area in onMeasure().
private RendererCommon.ScalingType scalingType = RendererCommon.ScalingType.SCALE_ASPECT_BALANCED;
// If true, mirrors the video stream horizontally.
private boolean mirror;
// These variables are synchronized on |statisticsLock|.
private final Object statisticsLock = new Object();
// Total number of video frames received in renderFrame() call.
private int framesReceived;
// Number of video frames dropped by renderFrame() because previous frame has not been rendered
// yet.
private int framesDropped;
// Number of rendered video frames.
private int framesRendered;
// Time in ns when the first video frame was rendered.
private long firstFrameTimeNs;
// Time in ns spent in renderFrameOnRenderThread() function.
private long renderTimeNs;
// Runnable for posting frames to render thread..
private final Runnable renderFrameRunnable = new Runnable() {
@Override public void run() {
renderFrameOnRenderThread();
}
};
/**
* Standard View constructor. In order to render something, you must first call init().
*/
public SurfaceViewRenderer(Context context) {
super(context);
}
/**
* Standard View constructor. In order to render something, you must first call init().
*/
public SurfaceViewRenderer(Context context, AttributeSet attrs) {
super(context, attrs);
}
/**
* Initialize this class, sharing resources with |sharedContext|.
*/
public synchronized void init(EGLContext sharedContext) {
if (renderThreadHandler != null) {
throw new IllegalStateException("Already initialized");
}
Log.d(TAG, "Initializing");
renderThread = new HandlerThread(TAG);
renderThread.start();
renderThreadHandler = new Handler(renderThread.getLooper());
eglBase = new EglBase(sharedContext, EglBase.ConfigType.PLAIN);
drawer = new GlRectDrawer();
getHolder().addCallback(this);
}
/**
* Release all resources. This needs to be done manually, otherwise the resources are leaked.
*/
public synchronized void release() {
if (renderThreadHandler == null) {
Log.d(TAG, "Already released");
return;
}
// Release EGL and GL resources on render thread.
renderThreadHandler.post(new Runnable() {
@Override public void run() {
drawer.release();
drawer = null;
if (yuvTextures != null) {
GLES20.glDeleteTextures(3, yuvTextures, 0);
yuvTextures = null;
}
eglBase.release();
eglBase = null;
}
});
// Don't accept any more messages to the render thread.
renderThreadHandler = null;
// Quit safely to make sure the EGL/GL cleanup posted above is executed.
renderThread.quitSafely();
renderThread = null;
getHolder().removeCallback(this);
if (pendingFrame != null) {
VideoRenderer.renderFrameDone(pendingFrame);
pendingFrame = null;
}
}
/**
* Set if the video stream should be mirrored or not.
*/
public void setMirror(final boolean mirror) {
synchronized (layoutLock) {
this.mirror = mirror;
}
}
/**
* Set how the video will fill the allowed layout area.
*/
public void setScalingType(RendererCommon.ScalingType scalingType) {
synchronized (layoutLock) {
this.scalingType = scalingType;
}
}
// VideoRenderer.Callbacks interface.
@Override
public void renderFrame(VideoRenderer.I420Frame frame) {
synchronized (statisticsLock) {
++framesReceived;
}
synchronized (this) {
if (renderThreadHandler == null) {
Log.d(TAG, "Dropping frame - SurfaceViewRenderer not initialized or already released.");
VideoRenderer.renderFrameDone(frame);
return;
}
if (pendingFrame != null) {
synchronized (statisticsLock) {
++framesDropped;
}
Log.d(TAG, "Dropping frame - previous frame has not been rendered yet.");
VideoRenderer.renderFrameDone(frame);
return;
}
pendingFrame = frame;
renderThreadHandler.post(renderFrameRunnable);
}
}
@Override
public boolean canApplyRotation() {
return true;
}
// View layout interface.
@Override
protected void onMeasure(int widthSpec, int heightSpec) {
synchronized (layoutLock) {
this.widthSpec = widthSpec;
this.heightSpec = heightSpec;
if (desiredLayoutWidth == 0 || desiredLayoutHeight == 0) {
super.onMeasure(widthSpec, heightSpec);
} else {
setMeasuredDimension(desiredLayoutWidth, desiredLayoutHeight);
}
}
}
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
synchronized (layoutLock) {
layoutWidth = right - left;
layoutHeight = bottom - top;
}
// Might have a pending frame waiting for a layout of correct size.
runOnRenderThread(renderFrameRunnable);
}
// SurfaceHolder.Callback interface.
@Override
public void surfaceCreated(final SurfaceHolder holder) {
Log.d(TAG, "Surface created");
runOnRenderThread(new Runnable() {
@Override public void run() {
eglBase.createSurface(holder.getSurface());
eglBase.makeCurrent();
// Necessary for YUV frames with odd width.
GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
}
});
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
Log.d(TAG, "Surface destroyed");
synchronized (layoutLock) {
surfaceWidth = 0;
surfaceHeight = 0;
}
runOnRenderThread(new Runnable() {
@Override public void run() {
eglBase.releaseSurface();
}
});
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Log.d(TAG, "Surface changed: " + width + "x" + height);
synchronized (layoutLock) {
surfaceWidth = width;
surfaceHeight = height;
}
// Might have a pending frame waiting for a surface of correct size.
runOnRenderThread(renderFrameRunnable);
}
/**
* Private helper function to post tasks safely.
*/
private synchronized void runOnRenderThread(Runnable runnable) {
if (renderThreadHandler != null) {
renderThreadHandler.post(runnable);
}
}
private synchronized void runOnRenderThreadDelayed(Runnable runnable, long ms) {
if (renderThreadHandler != null) {
renderThreadHandler.postDelayed(runnable, ms);
}
}
/**
* Renders and releases |pendingFrame|.
*/
private void renderFrameOnRenderThread() {
if (eglBase == null || !eglBase.hasSurface()) {
Log.d(TAG, "No surface to draw on");
return;
}
final float videoAspectRatio;
synchronized (this) {
if (pendingFrame == null) {
return;
}
videoAspectRatio = (float) pendingFrame.rotatedWidth() / pendingFrame.rotatedHeight();
}
// Request new layout if necessary. Don't continue until layout and surface size are in a good
// state.
synchronized (layoutLock) {
final int maxWidth = getDefaultSize(Integer.MAX_VALUE, widthSpec);
final int maxHeight = getDefaultSize(Integer.MAX_VALUE, heightSpec);
final Point suggestedSize =
RendererCommon.getDisplaySize(scalingType, videoAspectRatio, maxWidth, maxHeight);
desiredLayoutWidth =
MeasureSpec.getMode(widthSpec) == MeasureSpec.EXACTLY ? maxWidth : suggestedSize.x;
desiredLayoutHeight =
MeasureSpec.getMode(heightSpec) == MeasureSpec.EXACTLY ? maxHeight : suggestedSize.y;
if (desiredLayoutWidth != layoutWidth || desiredLayoutHeight != layoutHeight) {
Log.d(TAG, "Requesting new layout with size: "
+ desiredLayoutWidth + "x" + desiredLayoutHeight);
// Output an intermediate black frame while the layout is updated.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
eglBase.swapBuffers();
// Request layout update on UI thread.
post(new Runnable() {
@Override public void run() {
requestLayout();
}
});
return;
}
if (surfaceWidth != layoutWidth || surfaceHeight != layoutHeight) {
Log.d(TAG, "Postponing rendering until surface size is updated.");
return;
}
}
// The EGLSurface might have a buffer of the old size in the pipeline, even after
// surfaceChanged() has been called. Querying the EGLSurface will show if the underlying buffer
// dimensions haven't yet changed.
if (eglBase.surfaceWidth() != surfaceWidth || eglBase.surfaceHeight() != surfaceHeight) {
Log.d(TAG, "Flushing old egl surface buffer with incorrect size.");
// There is no way to display the old buffer correctly, so just make it black, and immediately
// render |pendingFrame| on the next buffer.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
eglBase.swapBuffers();
// In some rare cases, the next buffer is not updated either. In those cases, wait 1 ms and
// try again.
if (eglBase.surfaceWidth() != surfaceWidth || eglBase.surfaceHeight() != surfaceHeight) {
Log.e(TAG, "Unexpected buffer size even after swapBuffers() has been called.");
runOnRenderThreadDelayed(renderFrameRunnable, 1);
return;
}
}
// Finally, layout, surface, and EGLSurface are in a good state. Fetch and render pendingFrame|.
final VideoRenderer.I420Frame frame;
synchronized (this) {
if (pendingFrame == null) {
return;
}
frame = pendingFrame;
pendingFrame = null;
}
final long startTimeNs = System.nanoTime();
final float[] texMatrix = new float[16];
synchronized (layoutLock) {
RendererCommon.getTextureMatrix(texMatrix, frame.rotationDegree, mirror, videoAspectRatio,
(float) layoutWidth / layoutHeight);
}
GLES20.glViewport(0, 0, surfaceWidth, surfaceHeight);
if (frame.yuvFrame) {
uploadYuvData(frame.width, frame.height, frame.yuvStrides, frame.yuvPlanes);
drawer.drawYuv(frame.width, frame.height, yuvTextures, texMatrix);
} else {
SurfaceTexture surfaceTexture = (SurfaceTexture) frame.textureObject;
// TODO(magjed): Move updateTexImage() to the video source instead.
surfaceTexture.updateTexImage();
drawer.drawOes(frame.textureId, texMatrix);
}
eglBase.swapBuffers();
VideoRenderer.renderFrameDone(frame);
synchronized (statisticsLock) {
if (framesRendered == 0) {
firstFrameTimeNs = startTimeNs;
}
++framesRendered;
renderTimeNs += (System.nanoTime() - startTimeNs);
if (framesRendered % 300 == 0) {
logStatistics();
}
}
}
private void uploadYuvData(int width, int height, int[] strides, ByteBuffer[] planes) {
// Make sure YUV textures are allocated.
if (yuvTextures == null) {
yuvTextures = new int[3];
// Generate 3 texture ids for Y/U/V and place them into |yuvTextures|.
GLES20.glGenTextures(3, yuvTextures, 0);
for (int i = 0; i < 3; i++) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
}
GlUtil.checkNoGLES2Error("y/u/v glGenTextures");
}
// Upload each plane.
for (int i = 0; i < 3; ++i) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
final int planeWidth = (i == 0) ? width : width / 2;
final int planeHeight = (i == 0) ? height : height / 2;
final int bufferIndex = (i == 0) ? 0 : 1;
// GLES only accepts packed data, i.e. stride == planeWidth.
final ByteBuffer packedByteBuffer;
if (strides[i] == planeWidth) {
// Input is packed already.
packedByteBuffer = planes[i];
} else {
// Make an intermediate packed copy.
final int capacityNeeded = planeWidth * planeHeight;
if (copyBuffer[bufferIndex] == null
|| copyBuffer[bufferIndex].capacity() != capacityNeeded) {
copyBuffer[bufferIndex] = ByteBuffer.allocateDirect(capacityNeeded);
}
packedByteBuffer = copyBuffer[bufferIndex];
VideoRenderer.nativeCopyPlane(
planes[i], planeWidth, planeHeight, strides[i], packedByteBuffer, planeWidth);
}
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, planeWidth, planeHeight, 0,
GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, packedByteBuffer);
}
}
private void logStatistics() {
synchronized (statisticsLock) {
Log.d(TAG, "ID: " + getResources().getResourceEntryName(getId()) + ". Frames received: "
+ framesReceived + ". Dropped: " + framesDropped + ". Rendered: " + framesRendered);
if (framesReceived > 0 && framesRendered > 0) {
final long timeSinceFirstFrameNs = System.nanoTime() - firstFrameTimeNs;
Log.d(TAG, "Duration: " + (int) (timeSinceFirstFrameNs / 1e6) +
" ms. FPS: " + (float) framesRendered * 1e9 / timeSinceFirstFrameNs);
Log.d(TAG, "Average render time: "
+ (int) (renderTimeNs / (1000 * framesRendered)) + " us.");
}
}
}
}

View File

@ -42,7 +42,6 @@ import android.opengl.EGL14;
import android.opengl.EGLContext; import android.opengl.EGLContext;
import android.opengl.GLES20; import android.opengl.GLES20;
import android.opengl.GLSurfaceView; import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import android.util.Log; import android.util.Log;
import org.webrtc.VideoRenderer.I420Frame; import org.webrtc.VideoRenderer.I420Frame;
@ -70,20 +69,6 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
// List of yuv renderers. // List of yuv renderers.
private ArrayList<YuvImageRenderer> yuvImageRenderers; private ArrayList<YuvImageRenderer> yuvImageRenderers;
private GlRectDrawer drawer; private GlRectDrawer drawer;
// The minimum fraction of the frame content that will be shown for |SCALE_ASPECT_BALANCED|.
// This limits excessive cropping when adjusting display size.
private static float BALANCED_VISIBLE_FRACTION = 0.56f;
// Types of video scaling:
// SCALE_ASPECT_FIT - video frame is scaled to fit the size of the view by
// maintaining the aspect ratio (black borders may be displayed).
// SCALE_ASPECT_FILL - video frame is scaled to fill the size of the view by
// maintaining the aspect ratio. Some portion of the video frame may be
// clipped.
// SCALE_ASPECT_BALANCED - Compromise between FIT and FILL. Video frame will fill as much as
// possible of the view while maintaining aspect ratio, under the constraint that at least
// |BALANCED_VISIBLE_FRACTION| of the frame content will be shown.
public static enum ScalingType
{ SCALE_ASPECT_FIT, SCALE_ASPECT_FILL, SCALE_ASPECT_BALANCED }
private static final int EGL14_SDK_VERSION = private static final int EGL14_SDK_VERSION =
android.os.Build.VERSION_CODES.JELLY_BEAN_MR1; android.os.Build.VERSION_CODES.JELLY_BEAN_MR1;
// Current SDK version. // Current SDK version.
@ -123,7 +108,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
// Type of video frame used for recent frame rendering. // Type of video frame used for recent frame rendering.
private static enum RendererType { RENDERER_YUV, RENDERER_TEXTURE }; private static enum RendererType { RENDERER_YUV, RENDERER_TEXTURE };
private RendererType rendererType; private RendererType rendererType;
private ScalingType scalingType; private RendererCommon.ScalingType scalingType;
private boolean mirror; private boolean mirror;
// Flag if renderFrame() was ever called. // Flag if renderFrame() was ever called.
boolean seenFrame; boolean seenFrame;
@ -166,7 +151,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
private YuvImageRenderer( private YuvImageRenderer(
GLSurfaceView surface, int id, GLSurfaceView surface, int id,
int x, int y, int width, int height, int x, int y, int width, int height,
ScalingType scalingType, boolean mirror) { RendererCommon.ScalingType scalingType, boolean mirror) {
Log.d(TAG, "YuvImageRenderer.Create id: " + id); Log.d(TAG, "YuvImageRenderer.Create id: " + id);
this.surface = surface; this.surface = surface;
this.id = id; this.id = id;
@ -199,33 +184,6 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
GlUtil.checkNoGLES2Error("y/u/v glGenTextures"); GlUtil.checkNoGLES2Error("y/u/v glGenTextures");
} }
private static float convertScalingTypeToVisibleFraction(ScalingType scalingType) {
switch (scalingType) {
case SCALE_ASPECT_FIT:
return 1.0f;
case SCALE_ASPECT_FILL:
return 0.0f;
case SCALE_ASPECT_BALANCED:
return BALANCED_VISIBLE_FRACTION;
default:
throw new IllegalArgumentException();
}
}
private static Point getDisplaySize(float minVisibleFraction, float videoAspectRatio,
int maxDisplayWidth, int maxDisplayHeight) {
// If there is no constraint on the amount of cropping, fill the allowed display area.
if (minVisibleFraction == 0) {
return new Point(maxDisplayWidth, maxDisplayHeight);
}
// Each dimension is constrained on max display size and how much we are allowed to crop.
final int width = Math.min(maxDisplayWidth,
(int) (maxDisplayHeight / minVisibleFraction * videoAspectRatio));
final int height = Math.min(maxDisplayHeight,
(int) (maxDisplayWidth / minVisibleFraction / videoAspectRatio));
return new Point(width, height);
}
private void checkAdjustTextureCoords() { private void checkAdjustTextureCoords() {
synchronized(updateTextureLock) { synchronized(updateTextureLock) {
if (!updateTextureProperties) { if (!updateTextureProperties) {
@ -245,38 +203,14 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
? (float) videoWidth / videoHeight ? (float) videoWidth / videoHeight
: (float) videoHeight / videoWidth; : (float) videoHeight / videoWidth;
// Adjust display size based on |scalingType|. // Adjust display size based on |scalingType|.
final float minVisibleFraction = convertScalingTypeToVisibleFraction(scalingType); final Point displaySize = RendererCommon.getDisplaySize(scalingType,
final Point displaySize = getDisplaySize(minVisibleFraction, videoAspectRatio, videoAspectRatio, displayLayout.width(), displayLayout.height());
displayLayout.width(), displayLayout.height());
displayLayout.inset((displayLayout.width() - displaySize.x) / 2, displayLayout.inset((displayLayout.width() - displaySize.x) / 2,
(displayLayout.height() - displaySize.y) / 2); (displayLayout.height() - displaySize.y) / 2);
Log.d(TAG, " Adjusted display size: " + displayLayout.width() + " x " Log.d(TAG, " Adjusted display size: " + displayLayout.width() + " x "
+ displayLayout.height()); + displayLayout.height());
// The matrix stack is using post-multiplication, which means that matrix operations: RendererCommon.getTextureMatrix(texMatrix, rotationDegree, mirror, videoAspectRatio,
// A; B; C; will end up as A * B * C. When you apply this to a vertex, it will result in: (float) displayLayout.width() / displayLayout.height());
// v' = A * B * C * v, i.e. the last matrix operation is the first thing that affects the
// vertex. This is the opposite of what you might expect.
Matrix.setIdentityM(texMatrix, 0);
// Move coordinates back to [0,1]x[0,1].
Matrix.translateM(texMatrix, 0, 0.5f, 0.5f, 0.0f);
// Rotate frame clockwise in the XY-plane (around the Z-axis).
Matrix.rotateM(texMatrix, 0, -rotationDegree, 0, 0, 1);
// Scale one dimension until video and display size have same aspect ratio.
final float displayAspectRatio = (float) displayLayout.width() / displayLayout.height();
if (displayAspectRatio > videoAspectRatio) {
Matrix.scaleM(texMatrix, 0, 1, videoAspectRatio / displayAspectRatio, 1);
} else {
Matrix.scaleM(texMatrix, 0, displayAspectRatio / videoAspectRatio, 1, 1);
}
// TODO(magjed): We currently ignore the texture transform matrix from the SurfaceTexture.
// It contains a vertical flip that is hardcoded here instead.
Matrix.scaleM(texMatrix, 0, 1, -1, 1);
// Apply optional horizontal flip.
if (mirror) {
Matrix.scaleM(texMatrix, 0, -1, 1, 1);
}
// Center coordinates around origin.
Matrix.translateM(texMatrix, 0, -0.5f, -0.5f, 0.0f);
updateTextureProperties = false; updateTextureProperties = false;
Log.d(TAG, " AdjustTextureCoords done"); Log.d(TAG, " AdjustTextureCoords done");
} }
@ -375,7 +309,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
} }
public void setPosition(int x, int y, int width, int height, public void setPosition(int x, int y, int width, int height,
ScalingType scalingType, boolean mirror) { RendererCommon.ScalingType scalingType, boolean mirror) {
final Rect layoutInPercentage = final Rect layoutInPercentage =
new Rect(x, y, Math.min(100, x + width), Math.min(100, y + height)); new Rect(x, y, Math.min(100, x + width), Math.min(100, y + height));
synchronized(updateTextureLock) { synchronized(updateTextureLock) {
@ -415,9 +349,9 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
frameToRenderQueue.poll(); frameToRenderQueue.poll();
// Re-allocate / allocate the frame. // Re-allocate / allocate the frame.
yuvFrameToRender = new I420Frame(videoWidth, videoHeight, rotationDegree, yuvFrameToRender = new I420Frame(videoWidth, videoHeight, rotationDegree,
strides, null); strides, null, 0);
textureFrameToRender = new I420Frame(videoWidth, videoHeight, rotationDegree, textureFrameToRender = new I420Frame(videoWidth, videoHeight, rotationDegree,
null, -1); null, -1, 0);
updateTextureProperties = true; updateTextureProperties = true;
Log.d(TAG, " YuvImageRenderer.setSize done."); Log.d(TAG, " YuvImageRenderer.setSize done.");
} }
@ -431,6 +365,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
// Skip rendering of this frame if setSize() was not called. // Skip rendering of this frame if setSize() was not called.
if (yuvFrameToRender == null || textureFrameToRender == null) { if (yuvFrameToRender == null || textureFrameToRender == null) {
framesDropped++; framesDropped++;
VideoRenderer.renderFrameDone(frame);
return; return;
} }
// Check input frame parameters. // Check input frame parameters.
@ -440,6 +375,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
frame.yuvStrides[2] < frame.width / 2) { frame.yuvStrides[2] < frame.width / 2) {
Log.e(TAG, "Incorrect strides " + frame.yuvStrides[0] + ", " + Log.e(TAG, "Incorrect strides " + frame.yuvStrides[0] + ", " +
frame.yuvStrides[1] + ", " + frame.yuvStrides[2]); frame.yuvStrides[1] + ", " + frame.yuvStrides[2]);
VideoRenderer.renderFrameDone(frame);
return; return;
} }
// Check incoming frame dimensions. // Check incoming frame dimensions.
@ -453,6 +389,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
if (frameToRenderQueue.size() > 0) { if (frameToRenderQueue.size() > 0) {
// Skip rendering of this frame if previous frame was not rendered yet. // Skip rendering of this frame if previous frame was not rendered yet.
framesDropped++; framesDropped++;
VideoRenderer.renderFrameDone(frame);
return; return;
} }
@ -468,6 +405,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
} }
copyTimeNs += (System.nanoTime() - now); copyTimeNs += (System.nanoTime() - now);
seenFrame = true; seenFrame = true;
VideoRenderer.renderFrameDone(frame);
// Request rendering. // Request rendering.
surface.requestRender(); surface.requestRender();
@ -497,7 +435,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
* (width, height). All parameters are in percentage of screen resolution. * (width, height). All parameters are in percentage of screen resolution.
*/ */
public static VideoRenderer createGui(int x, int y, int width, int height, public static VideoRenderer createGui(int x, int y, int width, int height,
ScalingType scalingType, boolean mirror) throws Exception { RendererCommon.ScalingType scalingType, boolean mirror) throws Exception {
YuvImageRenderer javaGuiRenderer = create( YuvImageRenderer javaGuiRenderer = create(
x, y, width, height, scalingType, mirror); x, y, width, height, scalingType, mirror);
return new VideoRenderer(javaGuiRenderer); return new VideoRenderer(javaGuiRenderer);
@ -505,7 +443,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
public static VideoRenderer.Callbacks createGuiRenderer( public static VideoRenderer.Callbacks createGuiRenderer(
int x, int y, int width, int height, int x, int y, int width, int height,
ScalingType scalingType, boolean mirror) { RendererCommon.ScalingType scalingType, boolean mirror) {
return create(x, y, width, height, scalingType, mirror); return create(x, y, width, height, scalingType, mirror);
} }
@ -515,7 +453,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
* screen resolution. * screen resolution.
*/ */
public static YuvImageRenderer create(int x, int y, int width, int height, public static YuvImageRenderer create(int x, int y, int width, int height,
ScalingType scalingType, boolean mirror) { RendererCommon.ScalingType scalingType, boolean mirror) {
// Check display region parameters. // Check display region parameters.
if (x < 0 || x > 100 || y < 0 || y > 100 || if (x < 0 || x > 100 || y < 0 || y > 100 ||
width < 0 || width > 100 || height < 0 || height > 100 || width < 0 || width > 100 || height < 0 || height > 100 ||
@ -559,7 +497,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
public static void update( public static void update(
VideoRenderer.Callbacks renderer, VideoRenderer.Callbacks renderer,
int x, int y, int width, int height, ScalingType scalingType, boolean mirror) { int x, int y, int width, int height, RendererCommon.ScalingType scalingType, boolean mirror) {
Log.d(TAG, "VideoRendererGui.update"); Log.d(TAG, "VideoRendererGui.update");
if (instance == null) { if (instance == null) {
throw new RuntimeException( throw new RuntimeException(

View File

@ -746,10 +746,10 @@ class JavaVideoRendererWrapper : public VideoRendererInterface {
j_frame_class_(jni, j_frame_class_(jni,
FindClass(jni, "org/webrtc/VideoRenderer$I420Frame")), FindClass(jni, "org/webrtc/VideoRenderer$I420Frame")),
j_i420_frame_ctor_id_(GetMethodID( j_i420_frame_ctor_id_(GetMethodID(
jni, *j_frame_class_, "<init>", "(III[I[Ljava/nio/ByteBuffer;)V")), jni, *j_frame_class_, "<init>", "(III[I[Ljava/nio/ByteBuffer;J)V")),
j_texture_frame_ctor_id_(GetMethodID( j_texture_frame_ctor_id_(GetMethodID(
jni, *j_frame_class_, "<init>", jni, *j_frame_class_, "<init>",
"(IIILjava/lang/Object;I)V")), "(IIILjava/lang/Object;IJ)V")),
j_byte_buffer_class_(jni, FindClass(jni, "java/nio/ByteBuffer")), j_byte_buffer_class_(jni, FindClass(jni, "java/nio/ByteBuffer")),
can_apply_rotation_set_(false), can_apply_rotation_set_(false),
can_apply_rotation_(false) { can_apply_rotation_(false) {
@ -767,6 +767,9 @@ class JavaVideoRendererWrapper : public VideoRendererInterface {
const cricket::VideoFrame* frame = const cricket::VideoFrame* frame =
can_apply_rotation_ ? video_frame can_apply_rotation_ ? video_frame
: video_frame->GetCopyWithRotationApplied(); : video_frame->GetCopyWithRotationApplied();
// Make a shallow copy. |j_callbacks_| is responsible for releasing the
// copy by calling VideoRenderer.renderFrameDone().
frame = frame->Copy();
if (frame->GetNativeHandle() != NULL) { if (frame->GetNativeHandle() != NULL) {
jobject j_frame = CricketToJavaTextureFrame(frame); jobject j_frame = CricketToJavaTextureFrame(frame);
jni()->CallVoidMethod(*j_callbacks_, j_render_frame_id_, j_frame); jni()->CallVoidMethod(*j_callbacks_, j_render_frame_id_, j_frame);
@ -817,7 +820,7 @@ class JavaVideoRendererWrapper : public VideoRendererInterface {
*j_frame_class_, j_i420_frame_ctor_id_, *j_frame_class_, j_i420_frame_ctor_id_,
frame->GetWidth(), frame->GetHeight(), frame->GetWidth(), frame->GetHeight(),
static_cast<int>(frame->GetVideoRotation()), static_cast<int>(frame->GetVideoRotation()),
strides, planes); strides, planes, frame);
} }
// Return a VideoRenderer.I420Frame referring texture object in |frame|. // Return a VideoRenderer.I420Frame referring texture object in |frame|.
@ -830,7 +833,7 @@ class JavaVideoRendererWrapper : public VideoRendererInterface {
*j_frame_class_, j_texture_frame_ctor_id_, *j_frame_class_, j_texture_frame_ctor_id_,
frame->GetWidth(), frame->GetHeight(), frame->GetWidth(), frame->GetHeight(),
static_cast<int>(frame->GetVideoRotation()), static_cast<int>(frame->GetVideoRotation()),
texture_object, texture_id); texture_object, texture_id, frame);
} }
JNIEnv* jni() { JNIEnv* jni() {
@ -954,6 +957,11 @@ JOW(void, VideoRenderer_freeWrappedVideoRenderer)(JNIEnv*, jclass, jlong j_p) {
delete reinterpret_cast<JavaVideoRendererWrapper*>(j_p); delete reinterpret_cast<JavaVideoRendererWrapper*>(j_p);
} }
JOW(void, VideoRenderer_releaseNativeFrame)(
JNIEnv* jni, jclass, jlong j_frame_ptr) {
delete reinterpret_cast<const cricket::VideoFrame*>(j_frame_ptr);
}
JOW(void, MediaStreamTrack_free)(JNIEnv*, jclass, jlong j_p) { JOW(void, MediaStreamTrack_free)(JNIEnv*, jclass, jlong j_p) {
CHECK_RELEASE(reinterpret_cast<MediaStreamTrackInterface*>(j_p)); CHECK_RELEASE(reinterpret_cast<MediaStreamTrackInterface*>(j_p));
} }

View File

@ -42,10 +42,12 @@ public class VideoRenderer {
public final int width; public final int width;
public final int height; public final int height;
public final int[] yuvStrides; public final int[] yuvStrides;
public final ByteBuffer[] yuvPlanes; public ByteBuffer[] yuvPlanes;
public final boolean yuvFrame; public final boolean yuvFrame;
public Object textureObject; public Object textureObject;
public int textureId; public int textureId;
// If |nativeFramePointer| is non-zero, the memory is allocated on the C++ side.
private long nativeFramePointer;
// rotationDegree is the degree that the frame must be rotated clockwisely // rotationDegree is the degree that the frame must be rotated clockwisely
// to be rendered correctly. // to be rendered correctly.
@ -58,7 +60,7 @@ public class VideoRenderer {
*/ */
public I420Frame( public I420Frame(
int width, int height, int rotationDegree, int width, int height, int rotationDegree,
int[] yuvStrides, ByteBuffer[] yuvPlanes) { int[] yuvStrides, ByteBuffer[] yuvPlanes, long nativeFramePointer) {
this.width = width; this.width = width;
this.height = height; this.height = height;
this.yuvStrides = yuvStrides; this.yuvStrides = yuvStrides;
@ -71,6 +73,7 @@ public class VideoRenderer {
this.yuvPlanes = yuvPlanes; this.yuvPlanes = yuvPlanes;
this.yuvFrame = true; this.yuvFrame = true;
this.rotationDegree = rotationDegree; this.rotationDegree = rotationDegree;
this.nativeFramePointer = nativeFramePointer;
if (rotationDegree % 90 != 0) { if (rotationDegree % 90 != 0) {
throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree); throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
} }
@ -81,7 +84,7 @@ public class VideoRenderer {
*/ */
public I420Frame( public I420Frame(
int width, int height, int rotationDegree, int width, int height, int rotationDegree,
Object textureObject, int textureId) { Object textureObject, int textureId, long nativeFramePointer) {
this.width = width; this.width = width;
this.height = height; this.height = height;
this.yuvStrides = null; this.yuvStrides = null;
@ -90,6 +93,7 @@ public class VideoRenderer {
this.textureId = textureId; this.textureId = textureId;
this.yuvFrame = false; this.yuvFrame = false;
this.rotationDegree = rotationDegree; this.rotationDegree = rotationDegree;
this.nativeFramePointer = nativeFramePointer;
if (rotationDegree % 90 != 0) { if (rotationDegree % 90 != 0) {
throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree); throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
} }
@ -164,18 +168,33 @@ public class VideoRenderer {
} }
// Helper native function to do a video frame plane copying. // Helper native function to do a video frame plane copying.
private static native void nativeCopyPlane(ByteBuffer src, int width, public static native void nativeCopyPlane(ByteBuffer src, int width,
int height, int srcStride, ByteBuffer dst, int dstStride); int height, int srcStride, ByteBuffer dst, int dstStride);
/** The real meat of VideoRendererInterface. */ /** The real meat of VideoRendererInterface. */
public static interface Callbacks { public static interface Callbacks {
// |frame| might have pending rotation and implementation of Callbacks // |frame| might have pending rotation and implementation of Callbacks
// should handle that by applying rotation during rendering. // should handle that by applying rotation during rendering. The callee
// is responsible for signaling when it is done with |frame| by calling
// renderFrameDone(frame).
public void renderFrame(I420Frame frame); public void renderFrame(I420Frame frame);
// TODO(guoweis): Remove this once chrome code base is updated. // TODO(guoweis): Remove this once chrome code base is updated.
public boolean canApplyRotation(); public boolean canApplyRotation();
} }
/**
* This must be called after every renderFrame() to release the frame.
*/
public static void renderFrameDone(I420Frame frame) {
frame.yuvPlanes = null;
frame.textureObject = null;
frame.textureId = 0;
if (frame.nativeFramePointer != 0) {
releaseNativeFrame(frame.nativeFramePointer);
frame.nativeFramePointer = 0;
}
}
// |this| either wraps a native (GUI) renderer or a client-supplied Callbacks // |this| either wraps a native (GUI) renderer or a client-supplied Callbacks
// (Java) implementation; so exactly one of these will be non-0/null. // (Java) implementation; so exactly one of these will be non-0/null.
final long nativeVideoRenderer; final long nativeVideoRenderer;
@ -212,4 +231,6 @@ public class VideoRenderer {
private static native void freeGuiVideoRenderer(long nativeVideoRenderer); private static native void freeGuiVideoRenderer(long nativeVideoRenderer);
private static native void freeWrappedVideoRenderer(long nativeVideoRenderer); private static native void freeWrappedVideoRenderer(long nativeVideoRenderer);
private static native void releaseNativeFrame(long nativeFramePointer);
} }

View File

@ -136,6 +136,7 @@ public class PeerConnectionTest {
public synchronized void renderFrame(VideoRenderer.I420Frame frame) { public synchronized void renderFrame(VideoRenderer.I420Frame frame) {
setSize(frame.width, frame.height); setSize(frame.width, frame.height);
--expectedFramesDelivered; --expectedFramesDelivered;
VideoRenderer.renderFrameDone(frame);
} }
// TODO(guoweis): Remove this once chrome code base is updated. // TODO(guoweis): Remove this once chrome code base is updated.
@ -453,6 +454,7 @@ public class PeerConnectionTest {
@Override @Override
public void renderFrame(VideoRenderer.I420Frame frame) { public void renderFrame(VideoRenderer.I420Frame frame) {
++numFramesDelivered; ++numFramesDelivered;
VideoRenderer.renderFrameDone(frame);
} }
// TODO(guoweis): Remove this once chrome code base is updated. // TODO(guoweis): Remove this once chrome code base is updated.

View File

@ -144,6 +144,8 @@
'app/webrtc/java/android/org/webrtc/GlRectDrawer.java', 'app/webrtc/java/android/org/webrtc/GlRectDrawer.java',
'app/webrtc/java/android/org/webrtc/GlShader.java', 'app/webrtc/java/android/org/webrtc/GlShader.java',
'app/webrtc/java/android/org/webrtc/GlUtil.java', 'app/webrtc/java/android/org/webrtc/GlUtil.java',
'app/webrtc/java/android/org/webrtc/RendererCommon.java',
'app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java',
'app/webrtc/java/android/org/webrtc/VideoRendererGui.java', 'app/webrtc/java/android/org/webrtc/VideoRendererGui.java',
'app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java', 'app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java',
'app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java', 'app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java',

View File

@ -6,10 +6,25 @@
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="match_parent"> android:layout_height="match_parent">
<android.opengl.GLSurfaceView <org.appspot.apprtc.PercentFrameLayout
android:id="@+id/glview_call" android:id="@+id/remote_video_layout"
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="match_parent" /> android:layout_height="match_parent">
<org.webrtc.SurfaceViewRenderer
android:id="@+id/remote_video_view"
android:layout_width="wrap_content"
android:layout_height="wrap_content" />
</org.appspot.apprtc.PercentFrameLayout>
<org.appspot.apprtc.PercentFrameLayout
android:id="@+id/local_video_layout"
android:layout_width="match_parent"
android:layout_height="match_parent">
<org.webrtc.SurfaceViewRenderer
android:id="@+id/local_video_view"
android:layout_width="wrap_content"
android:layout_height="wrap_content" />
</org.appspot.apprtc.PercentFrameLayout>
<FrameLayout <FrameLayout
android:id="@+id/call_fragment_container" android:id="@+id/call_fragment_container"

View File

@ -24,18 +24,19 @@ import android.content.pm.PackageManager;
import android.net.Uri; import android.net.Uri;
import android.opengl.GLSurfaceView; import android.opengl.GLSurfaceView;
import android.os.Bundle; import android.os.Bundle;
import android.os.Handler;
import android.util.Log; import android.util.Log;
import android.view.View; import android.view.View;
import android.view.Window; import android.view.Window;
import android.view.WindowManager.LayoutParams; import android.view.WindowManager.LayoutParams;
import android.widget.Toast; import android.widget.Toast;
import org.webrtc.EglBase;
import org.webrtc.IceCandidate; import org.webrtc.IceCandidate;
import org.webrtc.SessionDescription; import org.webrtc.SessionDescription;
import org.webrtc.StatsReport; import org.webrtc.StatsReport;
import org.webrtc.VideoRenderer; import org.webrtc.SurfaceViewRenderer;
import org.webrtc.VideoRendererGui; import org.webrtc.RendererCommon.ScalingType;
import org.webrtc.VideoRendererGui.ScalingType;
/** /**
* Activity for peer connection call setup, call waiting * Activity for peer connection call setup, call waiting
@ -109,8 +110,11 @@ public class CallActivity extends Activity
private AppRTCClient appRtcClient; private AppRTCClient appRtcClient;
private SignalingParameters signalingParameters; private SignalingParameters signalingParameters;
private AppRTCAudioManager audioManager = null; private AppRTCAudioManager audioManager = null;
private VideoRenderer.Callbacks localRender; private EglBase rootEglBase;
private VideoRenderer.Callbacks remoteRender; private SurfaceViewRenderer localRender;
private SurfaceViewRenderer remoteRender;
private PercentFrameLayout localRenderLayout;
private PercentFrameLayout remoteRenderLayout;
private ScalingType scalingType; private ScalingType scalingType;
private Toast logToast; private Toast logToast;
private boolean commandLineRun; private boolean commandLineRun;
@ -124,7 +128,6 @@ public class CallActivity extends Activity
private long callStartedTimeMs = 0; private long callStartedTimeMs = 0;
// Controls // Controls
private GLSurfaceView videoView;
CallFragment callFragment; CallFragment callFragment;
HudFragment hudFragment; HudFragment hudFragment;
@ -154,31 +157,29 @@ public class CallActivity extends Activity
scalingType = ScalingType.SCALE_ASPECT_FILL; scalingType = ScalingType.SCALE_ASPECT_FILL;
// Create UI controls. // Create UI controls.
videoView = (GLSurfaceView) findViewById(R.id.glview_call); localRender = (SurfaceViewRenderer) findViewById(R.id.local_video_view);
remoteRender = (SurfaceViewRenderer) findViewById(R.id.remote_video_view);
localRenderLayout = (PercentFrameLayout) findViewById(R.id.local_video_layout);
remoteRenderLayout = (PercentFrameLayout) findViewById(R.id.remote_video_layout);
callFragment = new CallFragment(); callFragment = new CallFragment();
hudFragment = new HudFragment(); hudFragment = new HudFragment();
// Create video renderers.
VideoRendererGui.setView(videoView, new Runnable() {
@Override
public void run() {
createPeerConnectionFactory();
}
});
remoteRender = VideoRendererGui.create(
REMOTE_X, REMOTE_Y,
REMOTE_WIDTH, REMOTE_HEIGHT, scalingType, false);
localRender = VideoRendererGui.create(
LOCAL_X_CONNECTING, LOCAL_Y_CONNECTING,
LOCAL_WIDTH_CONNECTING, LOCAL_HEIGHT_CONNECTING, scalingType, true);
// Show/hide call control fragment on view click. // Show/hide call control fragment on view click.
videoView.setOnClickListener(new View.OnClickListener() { View.OnClickListener listener = new View.OnClickListener() {
@Override @Override
public void onClick(View view) { public void onClick(View view) {
toggleCallControlFragmentVisibility(); toggleCallControlFragmentVisibility();
} }
}); };
localRender.setOnClickListener(listener);
remoteRender.setOnClickListener(listener);
// Create video renderers.
rootEglBase = new EglBase();
localRender.init(rootEglBase.getContext());
remoteRender.init(rootEglBase.getContext());
localRender.setZOrderMediaOverlay(true);
updateVideoView();
// Check for mandatory permissions. // Check for mandatory permissions.
for (String permission : MANDATORY_PERMISSIONS) { for (String permission : MANDATORY_PERMISSIONS) {
@ -242,19 +243,19 @@ public class CallActivity extends Activity
// For command line execution run connection for <runTimeMs> and exit. // For command line execution run connection for <runTimeMs> and exit.
if (commandLineRun && runTimeMs > 0) { if (commandLineRun && runTimeMs > 0) {
videoView.postDelayed(new Runnable() { (new Handler()).postDelayed(new Runnable() {
public void run() { public void run() {
disconnect(); disconnect();
} }
}, runTimeMs); }, runTimeMs);
} }
createPeerConnectionFactory();
} }
// Activity interfaces // Activity interfaces
@Override @Override
public void onPause() { public void onPause() {
super.onPause(); super.onPause();
videoView.onPause();
activityRunning = false; activityRunning = false;
if (peerConnectionClient != null) { if (peerConnectionClient != null) {
peerConnectionClient.stopVideoSource(); peerConnectionClient.stopVideoSource();
@ -264,7 +265,6 @@ public class CallActivity extends Activity
@Override @Override
public void onResume() { public void onResume() {
super.onResume(); super.onResume();
videoView.onResume();
activityRunning = true; activityRunning = true;
if (peerConnectionClient != null) { if (peerConnectionClient != null) {
peerConnectionClient.startVideoSource(); peerConnectionClient.startVideoSource();
@ -279,6 +279,12 @@ public class CallActivity extends Activity
logToast.cancel(); logToast.cancel();
} }
activityRunning = false; activityRunning = false;
localRender.release();
localRender = null;
remoteRender.release();
remoteRender = null;
rootEglBase.release();
rootEglBase = null;
} }
// CallFragment.OnCallEvents interface implementation. // CallFragment.OnCallEvents interface implementation.
@ -320,19 +326,23 @@ public class CallActivity extends Activity
} }
private void updateVideoView() { private void updateVideoView() {
VideoRendererGui.update(remoteRender, remoteRenderLayout.setPosition(REMOTE_X, REMOTE_Y, REMOTE_WIDTH, REMOTE_HEIGHT);
REMOTE_X, REMOTE_Y, remoteRender.setScalingType(scalingType);
REMOTE_WIDTH, REMOTE_HEIGHT, scalingType, false); remoteRender.setMirror(false);
if (iceConnected) { if (iceConnected) {
VideoRendererGui.update(localRender, localRenderLayout.setPosition(
LOCAL_X_CONNECTED, LOCAL_Y_CONNECTED, LOCAL_X_CONNECTED, LOCAL_Y_CONNECTED, LOCAL_WIDTH_CONNECTED, LOCAL_HEIGHT_CONNECTED);
LOCAL_WIDTH_CONNECTED, LOCAL_HEIGHT_CONNECTED, localRender.setScalingType(ScalingType.SCALE_ASPECT_FIT);
ScalingType.SCALE_ASPECT_FIT, true);
} else { } else {
VideoRendererGui.update(localRender, localRenderLayout.setPosition(
LOCAL_X_CONNECTING, LOCAL_Y_CONNECTING, LOCAL_X_CONNECTING, LOCAL_Y_CONNECTING, LOCAL_WIDTH_CONNECTING, LOCAL_HEIGHT_CONNECTING);
LOCAL_WIDTH_CONNECTING, LOCAL_HEIGHT_CONNECTING, scalingType, true); localRender.setScalingType(scalingType);
} }
localRender.setMirror(true);
localRender.requestLayout();
remoteRender.requestLayout();
} }
private void startCall() { private void startCall() {
@ -390,7 +400,7 @@ public class CallActivity extends Activity
Log.d(TAG, "Creating peer connection factory, delay=" + delta + "ms"); Log.d(TAG, "Creating peer connection factory, delay=" + delta + "ms");
peerConnectionClient = PeerConnectionClient.getInstance(); peerConnectionClient = PeerConnectionClient.getInstance();
peerConnectionClient.createPeerConnectionFactory(CallActivity.this, peerConnectionClient.createPeerConnectionFactory(CallActivity.this,
VideoRendererGui.getEGLContext(), peerConnectionParameters, rootEglBase.getContext(), peerConnectionParameters,
CallActivity.this); CallActivity.this);
} }
if (signalingParameters != null) { if (signalingParameters != null) {

View File

@ -19,7 +19,7 @@ import android.view.ViewGroup;
import android.widget.ImageButton; import android.widget.ImageButton;
import android.widget.TextView; import android.widget.TextView;
import org.webrtc.VideoRendererGui.ScalingType; import org.webrtc.RendererCommon.ScalingType;
/** /**
* Fragment for call control. * Fragment for call control.

View File

@ -0,0 +1,95 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.appspot.apprtc;
import android.content.Context;
import android.util.AttributeSet;
import android.view.View;
import android.view.ViewGroup;
/**
* Simple container that confines the children to a subrectangle specified as percentage values of
* the container size. The children are centered horizontally and vertically inside the confined
* space.
*/
public class PercentFrameLayout extends ViewGroup {
private int xPercent = 0;
private int yPercent = 0;
private int widthPercent = 100;
private int heightPercent = 100;
public PercentFrameLayout(Context context) {
super(context);
}
public PercentFrameLayout(Context context, AttributeSet attrs) {
super(context, attrs);
}
public PercentFrameLayout(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
public void setPosition(int xPercent, int yPercent, int widthPercent, int heightPercent) {
this.xPercent = xPercent;
this.yPercent = yPercent;
this.widthPercent = widthPercent;
this.heightPercent = heightPercent;
}
@Override
public boolean shouldDelayChildPressedState() {
return false;
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
final int width = getDefaultSize(Integer.MAX_VALUE, widthMeasureSpec);
final int height = getDefaultSize(Integer.MAX_VALUE, heightMeasureSpec);
setMeasuredDimension(
MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY),
MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY));
final int childWidthMeasureSpec =
MeasureSpec.makeMeasureSpec(width * widthPercent / 100, MeasureSpec.AT_MOST);
final int childHeightMeasureSpec =
MeasureSpec.makeMeasureSpec(height * heightPercent / 100, MeasureSpec.AT_MOST);
for (int i = 0; i < getChildCount(); ++i) {
final View child = getChildAt(i);
if (child.getVisibility() != GONE) {
child.measure(childWidthMeasureSpec, childHeightMeasureSpec);
}
}
}
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
final int width = right - left;
final int height = bottom - top;
// Sub-rectangle specified by percentage values.
final int subWidth = width * widthPercent / 100;
final int subHeight = height * heightPercent / 100;
final int subLeft = left + width * xPercent / 100;
final int subTop = top + height * yPercent / 100;
for (int i = 0; i < getChildCount(); ++i) {
final View child = getChildAt(i);
if (child.getVisibility() != GONE) {
final int childWidth = child.getMeasuredWidth();
final int childHeight = child.getMeasuredHeight();
// Center child both vertically and horizontally.
final int childLeft = subLeft + (subWidth - childWidth) / 2;
final int childTop = subTop + (subHeight - childHeight) / 2;
child.layout(childLeft, childTop, childLeft + childWidth, childTop + childHeight);
}
}
}
}

View File

@ -100,6 +100,7 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
} }
renderFrameCalled = true; renderFrameCalled = true;
doneRendering.countDown(); doneRendering.countDown();
VideoRenderer.renderFrameDone(frame);
} }