diff --git a/sdk/android/api/org/webrtc/SurfaceTextureHelper.java b/sdk/android/api/org/webrtc/SurfaceTextureHelper.java index 0b5bcf805f..2d6d13dc11 100644 --- a/sdk/android/api/org/webrtc/SurfaceTextureHelper.java +++ b/sdk/android/api/org/webrtc/SurfaceTextureHelper.java @@ -287,7 +287,7 @@ public class SurfaceTextureHelper { * The returned TextureBuffer holds a reference to the SurfaceTextureHelper that created it. The * buffer calls returnTextureFrame() when it is released. */ - public TextureBuffer createTextureBuffer(int width, int height, Matrix transformMatrix) { + public TextureBufferImpl createTextureBuffer(int width, int height, Matrix transformMatrix) { return new TextureBufferImpl(width, height, TextureBuffer.Type.OES, oesTextureId, transformMatrix, handler, yuvConverter, this ::returnTextureFrame); } diff --git a/sdk/android/api/org/webrtc/TextureBufferImpl.java b/sdk/android/api/org/webrtc/TextureBufferImpl.java index c34728a3b4..96d7d4380e 100644 --- a/sdk/android/api/org/webrtc/TextureBufferImpl.java +++ b/sdk/android/api/org/webrtc/TextureBufferImpl.java @@ -84,15 +84,27 @@ public class TextureBufferImpl implements VideoFrame.TextureBuffer { @Override public VideoFrame.Buffer cropAndScale( int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) { - final Matrix newMatrix = new Matrix(transformMatrix); + final Matrix cropAndScaleMatrix = new Matrix(); // In WebRTC, Y=0 is the top row, while in OpenGL Y=0 is the bottom row. This means that the Y // direction is effectively reversed. final int cropYFromBottom = height - (cropY + cropHeight); - newMatrix.preTranslate(cropX / (float) width, cropYFromBottom / (float) height); - newMatrix.preScale(cropWidth / (float) width, cropHeight / (float) height); + cropAndScaleMatrix.preTranslate(cropX / (float) width, cropYFromBottom / (float) height); + cropAndScaleMatrix.preScale(cropWidth / (float) width, cropHeight / (float) height); + return applyTransformMatrix(cropAndScaleMatrix, scaleWidth, scaleHeight); + } + + /** + * Create a new TextureBufferImpl with an applied transform matrix and a new size. The + * existing buffer is unchanged. The given transform matrix is applied first when texture + * coordinates are still in the unmodified [0, 1] range. + */ + public TextureBufferImpl applyTransformMatrix( + Matrix transformMatrix, int newWidth, int newHeight) { + final Matrix newMatrix = new Matrix(this.transformMatrix); + newMatrix.preConcat(transformMatrix); retain(); return new TextureBufferImpl( - scaleWidth, scaleHeight, type, id, newMatrix, toI420Handler, yuvConverter, this ::release); + newWidth, newHeight, type, id, newMatrix, toI420Handler, yuvConverter, this ::release); } } diff --git a/sdk/android/api/org/webrtc/VideoFrame.java b/sdk/android/api/org/webrtc/VideoFrame.java index 6d19260471..4afe78266a 100644 --- a/sdk/android/api/org/webrtc/VideoFrame.java +++ b/sdk/android/api/org/webrtc/VideoFrame.java @@ -118,7 +118,7 @@ public class VideoFrame implements RefCounted { * homogeneous coordinates of the form (s, t, 1) with s and t in the inclusive range [0, 1] to * the coordinate that should be used to sample that location from the buffer. */ - public Matrix getTransformMatrix(); + Matrix getTransformMatrix(); } private final Buffer buffer; diff --git a/sdk/android/src/java/org/webrtc/Camera1Session.java b/sdk/android/src/java/org/webrtc/Camera1Session.java index 08502dc630..03a5363ca7 100644 --- a/sdk/android/src/java/org/webrtc/Camera1Session.java +++ b/sdk/android/src/java/org/webrtc/Camera1Session.java @@ -14,13 +14,13 @@ import android.content.Context; import android.os.Handler; import android.os.SystemClock; import android.view.Surface; -import android.view.WindowManager; import java.io.IOException; import java.nio.ByteBuffer; import java.util.List; import java.util.concurrent.TimeUnit; import javax.annotation.Nullable; import org.webrtc.CameraEnumerationAndroid.CaptureFormat; +import android.graphics.Matrix; @SuppressWarnings("deprecation") class Camera1Session implements CameraSession { @@ -253,9 +253,13 @@ class Camera1Session implements CameraSession { int oesTextureId, float[] transformMatrix, long timestampNs) { checkIsOnCameraThread(); + final TextureBufferImpl buffer = + surfaceTextureHelper.createTextureBuffer(captureFormat.width, captureFormat.height, + RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix)); + if (state != SessionState.RUNNING) { Logging.d(TAG, "Texture frame captured but camera is no longer running."); - surfaceTextureHelper.returnTextureFrame(); + buffer.release(); return; } @@ -266,17 +270,14 @@ class Camera1Session implements CameraSession { firstFrameReported = true; } - int rotation = getFrameOrientation(); - if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) { - // Undo the mirror that the OS "helps" us with. - // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int) - transformMatrix = RendererCommon.multiplyMatrices( - transformMatrix, RendererCommon.horizontalFlipMatrix()); - } - final VideoFrame.Buffer buffer = - surfaceTextureHelper.createTextureBuffer(captureFormat.width, captureFormat.height, - RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix)); - final VideoFrame frame = new VideoFrame(buffer, rotation, timestampNs); + // Undo the mirror that the OS "helps" us with. + // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int) + final VideoFrame frame = new VideoFrame( + CameraSession.createTextureBufferWithModifiedTransformMatrix(buffer, + /* mirror= */ info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT, + /* rotation= */ 0), + /* rotation= */ getFrameOrientation(), timestampNs); + buffer.release(); events.onFrameCaptured(Camera1Session.this, frame); frame.release(); } @@ -321,30 +322,8 @@ class Camera1Session implements CameraSession { }); } - private int getDeviceOrientation() { - int orientation = 0; - - WindowManager wm = (WindowManager) applicationContext.getSystemService(Context.WINDOW_SERVICE); - switch (wm.getDefaultDisplay().getRotation()) { - case Surface.ROTATION_90: - orientation = 90; - break; - case Surface.ROTATION_180: - orientation = 180; - break; - case Surface.ROTATION_270: - orientation = 270; - break; - case Surface.ROTATION_0: - default: - orientation = 0; - break; - } - return orientation; - } - private int getFrameOrientation() { - int rotation = getDeviceOrientation(); + int rotation = CameraSession.getDeviceOrientation(applicationContext); if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) { rotation = 360 - rotation; } diff --git a/sdk/android/src/java/org/webrtc/Camera2Session.java b/sdk/android/src/java/org/webrtc/Camera2Session.java index 66e2e6e862..59494b1edd 100644 --- a/sdk/android/src/java/org/webrtc/Camera2Session.java +++ b/sdk/android/src/java/org/webrtc/Camera2Session.java @@ -26,7 +26,6 @@ import android.os.Handler; import javax.annotation.Nullable; import android.util.Range; import android.view.Surface; -import android.view.WindowManager; import java.util.Arrays; import java.util.List; import java.util.concurrent.TimeUnit; @@ -191,9 +190,13 @@ class Camera2Session implements CameraSession { int oesTextureId, float[] transformMatrix, long timestampNs) { checkIsOnCameraThread(); + final TextureBufferImpl buffer = surfaceTextureHelper.createTextureBuffer( + captureFormat.width, captureFormat.height, + RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix)); + if (state != SessionState.RUNNING) { Logging.d(TAG, "Texture frame captured but camera is no longer running."); - surfaceTextureHelper.returnTextureFrame(); + buffer.release(); return; } @@ -204,22 +207,15 @@ class Camera2Session implements CameraSession { camera2StartTimeMsHistogram.addSample(startTimeMs); } - int rotation = getFrameOrientation(); - if (isCameraFrontFacing) { - // Undo the mirror that the OS "helps" us with. - // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int) - transformMatrix = RendererCommon.multiplyMatrices( - transformMatrix, RendererCommon.horizontalFlipMatrix()); - } - - // Undo camera orientation - we report it as rotation instead. - transformMatrix = - RendererCommon.rotateTextureMatrix(transformMatrix, -cameraOrientation); - - VideoFrame.Buffer buffer = surfaceTextureHelper.createTextureBuffer( - captureFormat.width, captureFormat.height, - RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix)); - final VideoFrame frame = new VideoFrame(buffer, rotation, timestampNs); + // Undo the mirror that the OS "helps" us with. + // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int) + // Also, undo camera orientation, we report it as rotation instead. + final VideoFrame frame = new VideoFrame( + CameraSession.createTextureBufferWithModifiedTransformMatrix(buffer, + /* mirror= */ isCameraFrontFacing, + /* rotation= */ -cameraOrientation), + /* rotation= */ getFrameOrientation(), timestampNs); + buffer.release(); events.onFrameCaptured(Camera2Session.this, frame); frame.release(); } @@ -421,30 +417,8 @@ class Camera2Session implements CameraSession { } } - private int getDeviceOrientation() { - int orientation = 0; - - WindowManager wm = (WindowManager) applicationContext.getSystemService(Context.WINDOW_SERVICE); - switch (wm.getDefaultDisplay().getRotation()) { - case Surface.ROTATION_90: - orientation = 90; - break; - case Surface.ROTATION_180: - orientation = 180; - break; - case Surface.ROTATION_270: - orientation = 270; - break; - case Surface.ROTATION_0: - default: - orientation = 0; - break; - } - return orientation; - } - private int getFrameOrientation() { - int rotation = getDeviceOrientation(); + int rotation = CameraSession.getDeviceOrientation(applicationContext); if (!isCameraFrontFacing) { rotation = 360 - rotation; } diff --git a/sdk/android/src/java/org/webrtc/CameraSession.java b/sdk/android/src/java/org/webrtc/CameraSession.java index eec3ed2681..8d137854d8 100644 --- a/sdk/android/src/java/org/webrtc/CameraSession.java +++ b/sdk/android/src/java/org/webrtc/CameraSession.java @@ -10,17 +10,22 @@ package org.webrtc; +import android.content.Context; +import android.graphics.Matrix; +import android.view.WindowManager; +import android.view.Surface; + interface CameraSession { enum FailureType { ERROR, DISCONNECTED } // Callbacks are fired on the camera thread. - public interface CreateSessionCallback { + interface CreateSessionCallback { void onDone(CameraSession session); void onFailure(FailureType failureType, String error); } // Events are fired on the camera thread. - public interface Events { + interface Events { void onCameraOpening(); void onCameraError(CameraSession session, String error); void onCameraDisconnected(CameraSession session); @@ -33,4 +38,35 @@ interface CameraSession { * If waitCameraStop is true, also waits for the camera to stop. */ void stop(); + + static int getDeviceOrientation(Context context) { + final WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE); + switch (wm.getDefaultDisplay().getRotation()) { + case Surface.ROTATION_90: + return 90; + case Surface.ROTATION_180: + return 180; + case Surface.ROTATION_270: + return 270; + case Surface.ROTATION_0: + default: + return 0; + } + } + + static VideoFrame.TextureBuffer createTextureBufferWithModifiedTransformMatrix( + TextureBufferImpl buffer, boolean mirror, int rotation) { + final Matrix transformMatrix = new Matrix(); + // Perform mirror and rotation around (0.5, 0.5) since that is the center of the texture. + transformMatrix.preTranslate(/* dx= */ 0.5f, /* dy= */ 0.5f); + if (mirror) { + transformMatrix.preScale(/* sx= */ -1f, /* sy= */ 1f); + } + transformMatrix.preRotate(rotation); + transformMatrix.preTranslate(/* dx= */ -0.5f, /* dy= */ -0.5f); + + // The width and height are not affected by rotation since Camera2Session has set them to the + // value they should be after undoing the rotation. + return buffer.applyTransformMatrix(transformMatrix, buffer.getWidth(), buffer.getHeight()); + } }