Android: Make VideoCapturer an interface for all VideoCapturers to implement
This CL factors out the interface that AndroidVideoCapturerJni is using to communicate with the Java counterpart. This interface is moved into VideoCapturer. The interface is not touched in this CL, and a follow-up CL is planned to simplify and improve it. Another change is that the native part of VideoCapturer is created in PeerConnectionFactory.createVideoSource() instead of doing it immediately in the ctor. BUG=webrtc:5519 R=perkj@webrtc.org Review URL: https://codereview.webrtc.org/1696553003 . Cr-Commit-Position: refs/heads/master@{#11606}
This commit is contained in:
parent
e78765bd4b
commit
5e7834e151
@ -82,8 +82,7 @@ public class VideoCapturerAndroidTestFixtures {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static class FakeCapturerObserver implements
|
static class FakeCapturerObserver implements VideoCapturer.CapturerObserver {
|
||||||
VideoCapturerAndroid.CapturerObserver {
|
|
||||||
private int framesCaptured = 0;
|
private int framesCaptured = 0;
|
||||||
private int frameSize = 0;
|
private int frameSize = 0;
|
||||||
private int frameWidth = 0;
|
private int frameWidth = 0;
|
||||||
@ -224,7 +223,7 @@ public class VideoCapturerAndroidTestFixtures {
|
|||||||
static public void release(VideoCapturerAndroid capturer) {
|
static public void release(VideoCapturerAndroid capturer) {
|
||||||
assertNotNull(capturer);
|
assertNotNull(capturer);
|
||||||
capturer.dispose();
|
capturer.dispose();
|
||||||
assertTrue(capturer.isReleased());
|
assertTrue(capturer.isDisposed());
|
||||||
}
|
}
|
||||||
|
|
||||||
static public void startCapturerAndRender(VideoCapturerAndroid capturer)
|
static public void startCapturerAndRender(VideoCapturerAndroid capturer)
|
||||||
@ -239,7 +238,7 @@ public class VideoCapturerAndroidTestFixtures {
|
|||||||
track.dispose();
|
track.dispose();
|
||||||
source.dispose();
|
source.dispose();
|
||||||
factory.dispose();
|
factory.dispose();
|
||||||
assertTrue(capturer.isReleased());
|
assertTrue(capturer.isDisposed());
|
||||||
}
|
}
|
||||||
|
|
||||||
static public void switchCamera(VideoCapturerAndroid capturer) throws InterruptedException {
|
static public void switchCamera(VideoCapturerAndroid capturer) throws InterruptedException {
|
||||||
@ -279,7 +278,7 @@ public class VideoCapturerAndroidTestFixtures {
|
|||||||
track.dispose();
|
track.dispose();
|
||||||
source.dispose();
|
source.dispose();
|
||||||
factory.dispose();
|
factory.dispose();
|
||||||
assertTrue(capturer.isReleased());
|
assertTrue(capturer.isDisposed());
|
||||||
}
|
}
|
||||||
|
|
||||||
static public void cameraEventsInvoked(VideoCapturerAndroid capturer, CameraEvents events,
|
static public void cameraEventsInvoked(VideoCapturerAndroid capturer, CameraEvents events,
|
||||||
@ -297,9 +296,8 @@ public class VideoCapturerAndroidTestFixtures {
|
|||||||
if (capturer.isCapturingToTexture()) {
|
if (capturer.isCapturingToTexture()) {
|
||||||
capturer.surfaceHelper.returnTextureFrame();
|
capturer.surfaceHelper.returnTextureFrame();
|
||||||
}
|
}
|
||||||
capturer.dispose();
|
release(capturer);
|
||||||
|
|
||||||
assertTrue(capturer.isReleased());
|
|
||||||
assertTrue(events.onCameraOpeningCalled);
|
assertTrue(events.onCameraOpeningCalled);
|
||||||
assertTrue(events.onFirstFrameAvailableCalled);
|
assertTrue(events.onFirstFrameAvailableCalled);
|
||||||
}
|
}
|
||||||
@ -324,8 +322,7 @@ public class VideoCapturerAndroidTestFixtures {
|
|||||||
capturer.onOutputFormatRequest(640, 480, 15);
|
capturer.onOutputFormatRequest(640, 480, 15);
|
||||||
capturer.changeCaptureFormat(640, 480, 15);
|
capturer.changeCaptureFormat(640, 480, 15);
|
||||||
|
|
||||||
capturer.dispose();
|
release(capturer);
|
||||||
assertTrue(capturer.isReleased());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static public void stopRestartVideoSource(VideoCapturerAndroid capturer)
|
static public void stopRestartVideoSource(VideoCapturerAndroid capturer)
|
||||||
@ -348,7 +345,7 @@ public class VideoCapturerAndroidTestFixtures {
|
|||||||
track.dispose();
|
track.dispose();
|
||||||
source.dispose();
|
source.dispose();
|
||||||
factory.dispose();
|
factory.dispose();
|
||||||
assertTrue(capturer.isReleased());
|
assertTrue(capturer.isDisposed());
|
||||||
}
|
}
|
||||||
|
|
||||||
static public void startStopWithDifferentResolutions(VideoCapturerAndroid capturer,
|
static public void startStopWithDifferentResolutions(VideoCapturerAndroid capturer,
|
||||||
@ -384,8 +381,7 @@ public class VideoCapturerAndroidTestFixtures {
|
|||||||
capturer.surfaceHelper.returnTextureFrame();
|
capturer.surfaceHelper.returnTextureFrame();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
capturer.dispose();
|
release(capturer);
|
||||||
assertTrue(capturer.isReleased());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static void waitUntilIdle(VideoCapturerAndroid capturer) throws InterruptedException {
|
static void waitUntilIdle(VideoCapturerAndroid capturer) throws InterruptedException {
|
||||||
@ -400,10 +396,9 @@ public class VideoCapturerAndroidTestFixtures {
|
|||||||
|
|
||||||
static public void startWhileCameraIsAlreadyOpen(
|
static public void startWhileCameraIsAlreadyOpen(
|
||||||
VideoCapturerAndroid capturer, Context appContext) throws InterruptedException {
|
VideoCapturerAndroid capturer, Context appContext) throws InterruptedException {
|
||||||
Camera camera = Camera.open(capturer.getCurrentCameraId());
|
|
||||||
|
|
||||||
final List<CaptureFormat> formats = capturer.getSupportedFormats();
|
final List<CaptureFormat> formats = capturer.getSupportedFormats();
|
||||||
final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
|
final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
|
||||||
|
Camera camera = Camera.open(capturer.getCurrentCameraId());
|
||||||
|
|
||||||
final FakeCapturerObserver observer = new FakeCapturerObserver();
|
final FakeCapturerObserver observer = new FakeCapturerObserver();
|
||||||
capturer.startCapture(format.width, format.height, format.maxFramerate,
|
capturer.startCapture(format.width, format.height, format.maxFramerate,
|
||||||
@ -417,16 +412,15 @@ public class VideoCapturerAndroidTestFixtures {
|
|||||||
assertFalse(observer.WaitForCapturerToStart());
|
assertFalse(observer.WaitForCapturerToStart());
|
||||||
}
|
}
|
||||||
|
|
||||||
capturer.dispose();
|
release(capturer);
|
||||||
camera.release();
|
camera.release();
|
||||||
}
|
}
|
||||||
|
|
||||||
static public void startWhileCameraIsAlreadyOpenAndCloseCamera(
|
static public void startWhileCameraIsAlreadyOpenAndCloseCamera(
|
||||||
VideoCapturerAndroid capturer, Context appContext) throws InterruptedException {
|
VideoCapturerAndroid capturer, Context appContext) throws InterruptedException {
|
||||||
Camera camera = Camera.open(capturer.getCurrentCameraId());
|
|
||||||
|
|
||||||
final List<CaptureFormat> formats = capturer.getSupportedFormats();
|
final List<CaptureFormat> formats = capturer.getSupportedFormats();
|
||||||
final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
|
final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
|
||||||
|
Camera camera = Camera.open(capturer.getCurrentCameraId());
|
||||||
|
|
||||||
final FakeCapturerObserver observer = new FakeCapturerObserver();
|
final FakeCapturerObserver observer = new FakeCapturerObserver();
|
||||||
capturer.startCapture(format.width, format.height, format.maxFramerate,
|
capturer.startCapture(format.width, format.height, format.maxFramerate,
|
||||||
@ -442,22 +436,20 @@ public class VideoCapturerAndroidTestFixtures {
|
|||||||
if (capturer.isCapturingToTexture()) {
|
if (capturer.isCapturingToTexture()) {
|
||||||
capturer.surfaceHelper.returnTextureFrame();
|
capturer.surfaceHelper.returnTextureFrame();
|
||||||
}
|
}
|
||||||
capturer.dispose();
|
release(capturer);
|
||||||
assertTrue(capturer.isReleased());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static public void startWhileCameraIsAlreadyOpenAndStop(
|
static public void startWhileCameraIsAlreadyOpenAndStop(
|
||||||
VideoCapturerAndroid capturer, Context appContext) throws InterruptedException {
|
VideoCapturerAndroid capturer, Context appContext) throws InterruptedException {
|
||||||
Camera camera = Camera.open(capturer.getCurrentCameraId());
|
|
||||||
final List<CaptureFormat> formats = capturer.getSupportedFormats();
|
final List<CaptureFormat> formats = capturer.getSupportedFormats();
|
||||||
final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
|
final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
|
||||||
|
Camera camera = Camera.open(capturer.getCurrentCameraId());
|
||||||
|
|
||||||
final FakeCapturerObserver observer = new FakeCapturerObserver();
|
final FakeCapturerObserver observer = new FakeCapturerObserver();
|
||||||
capturer.startCapture(format.width, format.height, format.maxFramerate,
|
capturer.startCapture(format.width, format.height, format.maxFramerate,
|
||||||
appContext, observer);
|
appContext, observer);
|
||||||
capturer.stopCapture();
|
capturer.stopCapture();
|
||||||
capturer.dispose();
|
release(capturer);
|
||||||
assertTrue(capturer.isReleased());
|
|
||||||
camera.release();
|
camera.release();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -493,8 +485,7 @@ public class VideoCapturerAndroidTestFixtures {
|
|||||||
capturer.surfaceHelper.returnTextureFrame();
|
capturer.surfaceHelper.returnTextureFrame();
|
||||||
}
|
}
|
||||||
|
|
||||||
capturer.dispose();
|
release(capturer);
|
||||||
assertTrue(capturer.isReleased());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static public void returnBufferLateEndToEnd(VideoCapturerAndroid capturer)
|
static public void returnBufferLateEndToEnd(VideoCapturerAndroid capturer)
|
||||||
@ -514,7 +505,7 @@ public class VideoCapturerAndroidTestFixtures {
|
|||||||
track.dispose();
|
track.dispose();
|
||||||
source.dispose();
|
source.dispose();
|
||||||
factory.dispose();
|
factory.dispose();
|
||||||
assertTrue(capturer.isReleased());
|
assertTrue(capturer.isDisposed());
|
||||||
|
|
||||||
// Return the frame(s), on a different thread out of spite.
|
// Return the frame(s), on a different thread out of spite.
|
||||||
final List<I420Frame> pendingFrames = renderer.waitForPendingFrames();
|
final List<I420Frame> pendingFrames = renderer.waitForPendingFrames();
|
||||||
@ -553,8 +544,7 @@ public class VideoCapturerAndroidTestFixtures {
|
|||||||
capturer.surfaceHelper.returnTextureFrame();
|
capturer.surfaceHelper.returnTextureFrame();
|
||||||
}
|
}
|
||||||
|
|
||||||
capturer.dispose();
|
release(capturer);
|
||||||
assertTrue(capturer.isReleased());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static public void scaleCameraOutput(VideoCapturerAndroid capturer) throws InterruptedException {
|
static public void scaleCameraOutput(VideoCapturerAndroid capturer) throws InterruptedException {
|
||||||
@ -590,7 +580,7 @@ public class VideoCapturerAndroidTestFixtures {
|
|||||||
track.dispose();
|
track.dispose();
|
||||||
source.dispose();
|
source.dispose();
|
||||||
factory.dispose();
|
factory.dispose();
|
||||||
assertTrue(capturer.isReleased());
|
assertTrue(capturer.isDisposed());
|
||||||
|
|
||||||
assertTrue(gotExpectedResolution);
|
assertTrue(gotExpectedResolution);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -225,7 +225,7 @@ void AndroidVideoCapturer::OnOutputFormatRequest(
|
|||||||
bool AndroidVideoCapturer::GetBestCaptureFormat(
|
bool AndroidVideoCapturer::GetBestCaptureFormat(
|
||||||
const cricket::VideoFormat& desired,
|
const cricket::VideoFormat& desired,
|
||||||
cricket::VideoFormat* best_format) {
|
cricket::VideoFormat* best_format) {
|
||||||
// Delegate this choice to VideoCapturerAndroid.startCapture().
|
// Delegate this choice to VideoCapturer.startCapture().
|
||||||
*best_format = desired;
|
*best_format = desired;
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -50,9 +50,8 @@ public class CameraEnumerationAndroid {
|
|||||||
public final int height;
|
public final int height;
|
||||||
public final int maxFramerate;
|
public final int maxFramerate;
|
||||||
public final int minFramerate;
|
public final int minFramerate;
|
||||||
// TODO(hbos): If VideoCapturerAndroid.startCapture is updated to support
|
// TODO(hbos): If VideoCapturer.startCapture is updated to support other image formats then this
|
||||||
// other image formats then this needs to be updated and
|
// needs to be updated and VideoCapturer.getSupportedFormats need to return CaptureFormats of
|
||||||
// VideoCapturerAndroid.getSupportedFormats need to return CaptureFormats of
|
|
||||||
// all imageFormats.
|
// all imageFormats.
|
||||||
public final int imageFormat = ImageFormat.NV21;
|
public final int imageFormat = ImageFormat.NV21;
|
||||||
|
|
||||||
|
|||||||
@ -45,7 +45,8 @@ import java.util.concurrent.TimeUnit;
|
|||||||
// camera thread. The internal *OnCameraThread() methods must check |camera| for null to check if
|
// camera thread. The internal *OnCameraThread() methods must check |camera| for null to check if
|
||||||
// the camera has been stopped.
|
// the camera has been stopped.
|
||||||
@SuppressWarnings("deprecation")
|
@SuppressWarnings("deprecation")
|
||||||
public class VideoCapturerAndroid extends VideoCapturer implements
|
public class VideoCapturerAndroid implements
|
||||||
|
VideoCapturer,
|
||||||
android.hardware.Camera.PreviewCallback,
|
android.hardware.Camera.PreviewCallback,
|
||||||
SurfaceTextureHelper.OnTextureFrameAvailableListener {
|
SurfaceTextureHelper.OnTextureFrameAvailableListener {
|
||||||
private final static String TAG = "VideoCapturerAndroid";
|
private final static String TAG = "VideoCapturerAndroid";
|
||||||
@ -196,12 +197,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
|||||||
if (cameraId == -1) {
|
if (cameraId == -1) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
return new VideoCapturerAndroid(cameraId, eventsHandler, sharedEglContext);
|
||||||
final VideoCapturerAndroid capturer = new VideoCapturerAndroid(cameraId, eventsHandler,
|
|
||||||
sharedEglContext);
|
|
||||||
capturer.setNativeCapturer(
|
|
||||||
nativeCreateVideoCapturer(capturer, capturer.surfaceHelper));
|
|
||||||
return capturer;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void printStackTrace() {
|
public void printStackTrace() {
|
||||||
@ -297,14 +293,14 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
|||||||
return isCapturingToTexture;
|
return isCapturingToTexture;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Called from native code.
|
@Override
|
||||||
private String getSupportedFormatsAsJson() throws JSONException {
|
public String getSupportedFormatsAsJson() throws JSONException {
|
||||||
return CameraEnumerationAndroid.getSupportedFormatsAsJson(getCurrentCameraId());
|
return CameraEnumerationAndroid.getSupportedFormatsAsJson(getCurrentCameraId());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Called from native VideoCapturer_nativeCreateVideoCapturer.
|
@Override
|
||||||
private VideoCapturerAndroid(int cameraId) {
|
public SurfaceTextureHelper getSurfaceTextureHelper() {
|
||||||
this(cameraId, null, null);
|
return surfaceHelper;
|
||||||
}
|
}
|
||||||
|
|
||||||
private VideoCapturerAndroid(int cameraId, CameraEventsHandler eventsHandler,
|
private VideoCapturerAndroid(int cameraId, CameraEventsHandler eventsHandler,
|
||||||
@ -347,11 +343,12 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
|||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Called by native code to quit the camera thread. This needs to be done manually, otherwise the
|
// Quits the camera thread. This needs to be done manually, otherwise the thread and handler will
|
||||||
// thread and handler will not be garbage collected.
|
// not be garbage collected.
|
||||||
private void release() {
|
@Override
|
||||||
|
public void dispose() {
|
||||||
Logging.d(TAG, "release");
|
Logging.d(TAG, "release");
|
||||||
if (isReleased()) {
|
if (isDisposed()) {
|
||||||
throw new IllegalStateException("Already released");
|
throw new IllegalStateException("Already released");
|
||||||
}
|
}
|
||||||
ThreadUtils.invokeUninterruptibly(cameraThreadHandler, new Runnable() {
|
ThreadUtils.invokeUninterruptibly(cameraThreadHandler, new Runnable() {
|
||||||
@ -367,15 +364,14 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Used for testing purposes to check if release() has been called.
|
// Used for testing purposes to check if release() has been called.
|
||||||
public boolean isReleased() {
|
public boolean isDisposed() {
|
||||||
return (cameraThread == null);
|
return (cameraThread == null);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Called by native code.
|
|
||||||
//
|
|
||||||
// Note that this actually opens the camera, and Camera callbacks run on the
|
// Note that this actually opens the camera, and Camera callbacks run on the
|
||||||
// thread that calls open(), so this is done on the CameraThread.
|
// thread that calls open(), so this is done on the CameraThread.
|
||||||
void startCapture(
|
@Override
|
||||||
|
public void startCapture(
|
||||||
final int width, final int height, final int framerate,
|
final int width, final int height, final int framerate,
|
||||||
final Context applicationContext, final CapturerObserver frameObserver) {
|
final Context applicationContext, final CapturerObserver frameObserver) {
|
||||||
Logging.d(TAG, "startCapture requested: " + width + "x" + height
|
Logging.d(TAG, "startCapture requested: " + width + "x" + height
|
||||||
@ -548,8 +544,9 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
|||||||
camera.startPreview();
|
camera.startPreview();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Called by native code. Returns true when camera is known to be stopped.
|
// Blocks until camera is known to be stopped.
|
||||||
void stopCapture() throws InterruptedException {
|
@Override
|
||||||
|
public void stopCapture() throws InterruptedException {
|
||||||
Logging.d(TAG, "stopCapture");
|
Logging.d(TAG, "stopCapture");
|
||||||
final CountDownLatch barrier = new CountDownLatch(1);
|
final CountDownLatch barrier = new CountDownLatch(1);
|
||||||
cameraThreadHandler.post(new Runnable() {
|
cameraThreadHandler.post(new Runnable() {
|
||||||
@ -702,75 +699,4 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
|||||||
frameObserver.onTextureFrameCaptured(captureFormat.width, captureFormat.height, oesTextureId,
|
frameObserver.onTextureFrameCaptured(captureFormat.width, captureFormat.height, oesTextureId,
|
||||||
transformMatrix, rotation, timestampNs);
|
transformMatrix, rotation, timestampNs);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Interface used for providing callbacks to an observer.
|
|
||||||
interface CapturerObserver {
|
|
||||||
// Notify if the camera have been started successfully or not.
|
|
||||||
// Called on a Java thread owned by VideoCapturerAndroid.
|
|
||||||
abstract void onCapturerStarted(boolean success);
|
|
||||||
|
|
||||||
// Delivers a captured frame. Called on a Java thread owned by
|
|
||||||
// VideoCapturerAndroid.
|
|
||||||
abstract void onByteBufferFrameCaptured(byte[] data, int width, int height, int rotation,
|
|
||||||
long timeStamp);
|
|
||||||
|
|
||||||
// Delivers a captured frame in a texture with id |oesTextureId|. Called on a Java thread
|
|
||||||
// owned by VideoCapturerAndroid.
|
|
||||||
abstract void onTextureFrameCaptured(
|
|
||||||
int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
|
|
||||||
long timestamp);
|
|
||||||
|
|
||||||
// Requests an output format from the video capturer. Captured frames
|
|
||||||
// by the camera will be scaled/or dropped by the video capturer.
|
|
||||||
// Called on a Java thread owned by VideoCapturerAndroid.
|
|
||||||
abstract void onOutputFormatRequest(int width, int height, int framerate);
|
|
||||||
}
|
|
||||||
|
|
||||||
// An implementation of CapturerObserver that forwards all calls from
|
|
||||||
// Java to the C layer.
|
|
||||||
static class NativeObserver implements CapturerObserver {
|
|
||||||
private final long nativeCapturer;
|
|
||||||
|
|
||||||
public NativeObserver(long nativeCapturer) {
|
|
||||||
this.nativeCapturer = nativeCapturer;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onCapturerStarted(boolean success) {
|
|
||||||
nativeCapturerStarted(nativeCapturer, success);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onByteBufferFrameCaptured(byte[] data, int width, int height,
|
|
||||||
int rotation, long timeStamp) {
|
|
||||||
nativeOnByteBufferFrameCaptured(nativeCapturer, data, data.length, width, height, rotation,
|
|
||||||
timeStamp);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onTextureFrameCaptured(
|
|
||||||
int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
|
|
||||||
long timestamp) {
|
|
||||||
nativeOnTextureFrameCaptured(nativeCapturer, width, height, oesTextureId, transformMatrix,
|
|
||||||
rotation, timestamp);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onOutputFormatRequest(int width, int height, int framerate) {
|
|
||||||
nativeOnOutputFormatRequest(nativeCapturer, width, height, framerate);
|
|
||||||
}
|
|
||||||
|
|
||||||
private native void nativeCapturerStarted(long nativeCapturer,
|
|
||||||
boolean success);
|
|
||||||
private native void nativeOnByteBufferFrameCaptured(long nativeCapturer,
|
|
||||||
byte[] data, int length, int width, int height, int rotation, long timeStamp);
|
|
||||||
private native void nativeOnTextureFrameCaptured(long nativeCapturer, int width, int height,
|
|
||||||
int oesTextureId, float[] transformMatrix, int rotation, long timestamp);
|
|
||||||
private native void nativeOnOutputFormatRequest(long nativeCapturer,
|
|
||||||
int width, int height, int framerate);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static native long nativeCreateVideoCapturer(
|
|
||||||
VideoCapturerAndroid videoCapturer,
|
|
||||||
SurfaceTextureHelper surfaceHelper);
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -36,11 +36,11 @@ AndroidVideoCapturerJni::AndroidVideoCapturerJni(
|
|||||||
jobject j_surface_texture_helper)
|
jobject j_surface_texture_helper)
|
||||||
: j_video_capturer_(jni, j_video_capturer),
|
: j_video_capturer_(jni, j_video_capturer),
|
||||||
j_video_capturer_class_(
|
j_video_capturer_class_(
|
||||||
jni, FindClass(jni, "org/webrtc/VideoCapturerAndroid")),
|
jni, FindClass(jni, "org/webrtc/VideoCapturer")),
|
||||||
j_observer_class_(
|
j_observer_class_(
|
||||||
jni,
|
jni,
|
||||||
FindClass(jni,
|
FindClass(jni,
|
||||||
"org/webrtc/VideoCapturerAndroid$NativeObserver")),
|
"org/webrtc/VideoCapturer$NativeObserver")),
|
||||||
surface_texture_helper_(new rtc::RefCountedObject<SurfaceTextureHelper>(
|
surface_texture_helper_(new rtc::RefCountedObject<SurfaceTextureHelper>(
|
||||||
jni, j_surface_texture_helper)),
|
jni, j_surface_texture_helper)),
|
||||||
capturer_(nullptr) {
|
capturer_(nullptr) {
|
||||||
@ -52,8 +52,8 @@ AndroidVideoCapturerJni::~AndroidVideoCapturerJni() {
|
|||||||
LOG(LS_INFO) << "AndroidVideoCapturerJni dtor";
|
LOG(LS_INFO) << "AndroidVideoCapturerJni dtor";
|
||||||
jni()->CallVoidMethod(
|
jni()->CallVoidMethod(
|
||||||
*j_video_capturer_,
|
*j_video_capturer_,
|
||||||
GetMethodID(jni(), *j_video_capturer_class_, "release", "()V"));
|
GetMethodID(jni(), *j_video_capturer_class_, "dispose", "()V"));
|
||||||
CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.release()";
|
CHECK_EXCEPTION(jni()) << "error during VideoCapturer.dispose()";
|
||||||
}
|
}
|
||||||
|
|
||||||
void AndroidVideoCapturerJni::Start(int width, int height, int framerate,
|
void AndroidVideoCapturerJni::Start(int width, int height, int framerate,
|
||||||
@ -76,13 +76,13 @@ void AndroidVideoCapturerJni::Start(int width, int height, int framerate,
|
|||||||
jmethodID m = GetMethodID(
|
jmethodID m = GetMethodID(
|
||||||
jni(), *j_video_capturer_class_, "startCapture",
|
jni(), *j_video_capturer_class_, "startCapture",
|
||||||
"(IIILandroid/content/Context;"
|
"(IIILandroid/content/Context;"
|
||||||
"Lorg/webrtc/VideoCapturerAndroid$CapturerObserver;)V");
|
"Lorg/webrtc/VideoCapturer$CapturerObserver;)V");
|
||||||
jni()->CallVoidMethod(*j_video_capturer_,
|
jni()->CallVoidMethod(*j_video_capturer_,
|
||||||
m, width, height,
|
m, width, height,
|
||||||
framerate,
|
framerate,
|
||||||
application_context_,
|
application_context_,
|
||||||
j_frame_observer);
|
j_frame_observer);
|
||||||
CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.startCapture";
|
CHECK_EXCEPTION(jni()) << "error during VideoCapturer.startCapture";
|
||||||
}
|
}
|
||||||
|
|
||||||
void AndroidVideoCapturerJni::Stop() {
|
void AndroidVideoCapturerJni::Stop() {
|
||||||
@ -97,7 +97,7 @@ void AndroidVideoCapturerJni::Stop() {
|
|||||||
jmethodID m = GetMethodID(jni(), *j_video_capturer_class_,
|
jmethodID m = GetMethodID(jni(), *j_video_capturer_class_,
|
||||||
"stopCapture", "()V");
|
"stopCapture", "()V");
|
||||||
jni()->CallVoidMethod(*j_video_capturer_, m);
|
jni()->CallVoidMethod(*j_video_capturer_, m);
|
||||||
CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.stopCapture";
|
CHECK_EXCEPTION(jni()) << "error during VideoCapturer.stopCapture";
|
||||||
LOG(LS_INFO) << "AndroidVideoCapturerJni stop done";
|
LOG(LS_INFO) << "AndroidVideoCapturerJni stop done";
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -178,7 +178,7 @@ void AndroidVideoCapturerJni::OnOutputFormatRequest(int width,
|
|||||||
JNIEnv* AndroidVideoCapturerJni::jni() { return AttachCurrentThreadIfNeeded(); }
|
JNIEnv* AndroidVideoCapturerJni::jni() { return AttachCurrentThreadIfNeeded(); }
|
||||||
|
|
||||||
JOW(void,
|
JOW(void,
|
||||||
VideoCapturerAndroid_00024NativeObserver_nativeOnByteBufferFrameCaptured)
|
VideoCapturer_00024NativeObserver_nativeOnByteBufferFrameCaptured)
|
||||||
(JNIEnv* jni, jclass, jlong j_capturer, jbyteArray j_frame, jint length,
|
(JNIEnv* jni, jclass, jlong j_capturer, jbyteArray j_frame, jint length,
|
||||||
jint width, jint height, jint rotation, jlong timestamp) {
|
jint width, jint height, jint rotation, jlong timestamp) {
|
||||||
jboolean is_copy = true;
|
jboolean is_copy = true;
|
||||||
@ -188,7 +188,7 @@ JOW(void,
|
|||||||
jni->ReleaseByteArrayElements(j_frame, bytes, JNI_ABORT);
|
jni->ReleaseByteArrayElements(j_frame, bytes, JNI_ABORT);
|
||||||
}
|
}
|
||||||
|
|
||||||
JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeOnTextureFrameCaptured)
|
JOW(void, VideoCapturer_00024NativeObserver_nativeOnTextureFrameCaptured)
|
||||||
(JNIEnv* jni, jclass, jlong j_capturer, jint j_width, jint j_height,
|
(JNIEnv* jni, jclass, jlong j_capturer, jint j_width, jint j_height,
|
||||||
jint j_oes_texture_id, jfloatArray j_transform_matrix,
|
jint j_oes_texture_id, jfloatArray j_transform_matrix,
|
||||||
jint j_rotation, jlong j_timestamp) {
|
jint j_rotation, jlong j_timestamp) {
|
||||||
@ -198,14 +198,14 @@ JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeOnTextureFrameCaptured)
|
|||||||
j_transform_matrix));
|
j_transform_matrix));
|
||||||
}
|
}
|
||||||
|
|
||||||
JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeCapturerStarted)
|
JOW(void, VideoCapturer_00024NativeObserver_nativeCapturerStarted)
|
||||||
(JNIEnv* jni, jclass, jlong j_capturer, jboolean j_success) {
|
(JNIEnv* jni, jclass, jlong j_capturer, jboolean j_success) {
|
||||||
LOG(LS_INFO) << "NativeObserver_nativeCapturerStarted";
|
LOG(LS_INFO) << "NativeObserver_nativeCapturerStarted";
|
||||||
reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)->OnCapturerStarted(
|
reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)->OnCapturerStarted(
|
||||||
j_success);
|
j_success);
|
||||||
}
|
}
|
||||||
|
|
||||||
JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeOnOutputFormatRequest)
|
JOW(void, VideoCapturer_00024NativeObserver_nativeOnOutputFormatRequest)
|
||||||
(JNIEnv* jni, jclass, jlong j_capturer, jint j_width, jint j_height,
|
(JNIEnv* jni, jclass, jlong j_capturer, jint j_width, jint j_height,
|
||||||
jint j_fps) {
|
jint j_fps) {
|
||||||
LOG(LS_INFO) << "NativeObserver_nativeOnOutputFormatRequest";
|
LOG(LS_INFO) << "NativeObserver_nativeOnOutputFormatRequest";
|
||||||
@ -213,16 +213,4 @@ JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeOnOutputFormatRequest)
|
|||||||
j_width, j_height, j_fps);
|
j_width, j_height, j_fps);
|
||||||
}
|
}
|
||||||
|
|
||||||
JOW(jlong, VideoCapturerAndroid_nativeCreateVideoCapturer)
|
|
||||||
(JNIEnv* jni, jclass,
|
|
||||||
jobject j_video_capturer, jobject j_surface_texture_helper) {
|
|
||||||
rtc::scoped_refptr<webrtc::AndroidVideoCapturerDelegate> delegate =
|
|
||||||
new rtc::RefCountedObject<AndroidVideoCapturerJni>(
|
|
||||||
jni, j_video_capturer, j_surface_texture_helper);
|
|
||||||
rtc::scoped_ptr<cricket::VideoCapturer> capturer(
|
|
||||||
new webrtc::AndroidVideoCapturer(delegate));
|
|
||||||
// Caller takes ownership of the cricket::VideoCapturer* pointer.
|
|
||||||
return jlongFromPointer(capturer.release());
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace webrtc_jni
|
} // namespace webrtc_jni
|
||||||
|
|||||||
@ -42,7 +42,7 @@ class AndroidVideoCapturerJni : public webrtc::AndroidVideoCapturerDelegate {
|
|||||||
|
|
||||||
std::string GetSupportedFormats() override;
|
std::string GetSupportedFormats() override;
|
||||||
|
|
||||||
// Called from VideoCapturerAndroid::NativeObserver on a Java thread.
|
// Called from VideoCapturer::NativeObserver on a Java thread.
|
||||||
void OnCapturerStarted(bool success);
|
void OnCapturerStarted(bool success);
|
||||||
void OnMemoryBufferFrame(void* video_frame, int length, int width,
|
void OnMemoryBufferFrame(void* video_frame, int length, int width,
|
||||||
int height, int rotation, int64_t timestamp_ns);
|
int height, int rotation, int64_t timestamp_ns);
|
||||||
|
|||||||
@ -90,8 +90,8 @@ ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni) {
|
|||||||
LoadClass(jni, "org/webrtc/StatsReport");
|
LoadClass(jni, "org/webrtc/StatsReport");
|
||||||
LoadClass(jni, "org/webrtc/StatsReport$Value");
|
LoadClass(jni, "org/webrtc/StatsReport$Value");
|
||||||
LoadClass(jni, "org/webrtc/SurfaceTextureHelper");
|
LoadClass(jni, "org/webrtc/SurfaceTextureHelper");
|
||||||
LoadClass(jni, "org/webrtc/VideoCapturerAndroid");
|
LoadClass(jni, "org/webrtc/VideoCapturer");
|
||||||
LoadClass(jni, "org/webrtc/VideoCapturerAndroid$NativeObserver");
|
LoadClass(jni, "org/webrtc/VideoCapturer$NativeObserver");
|
||||||
LoadClass(jni, "org/webrtc/VideoRenderer$I420Frame");
|
LoadClass(jni, "org/webrtc/VideoRenderer$I420Frame");
|
||||||
LoadClass(jni, "org/webrtc/VideoTrack");
|
LoadClass(jni, "org/webrtc/VideoTrack");
|
||||||
}
|
}
|
||||||
|
|||||||
@ -910,10 +910,6 @@ JOW(void, MediaSource_free)(JNIEnv*, jclass, jlong j_p) {
|
|||||||
CHECK_RELEASE(reinterpret_cast<MediaSourceInterface*>(j_p));
|
CHECK_RELEASE(reinterpret_cast<MediaSourceInterface*>(j_p));
|
||||||
}
|
}
|
||||||
|
|
||||||
JOW(void, VideoCapturer_free)(JNIEnv*, jclass, jlong j_p) {
|
|
||||||
delete reinterpret_cast<cricket::VideoCapturer*>(j_p);
|
|
||||||
}
|
|
||||||
|
|
||||||
JOW(void, VideoRenderer_freeWrappedVideoRenderer)(JNIEnv*, jclass, jlong j_p) {
|
JOW(void, VideoRenderer_freeWrappedVideoRenderer)(JNIEnv*, jclass, jlong j_p) {
|
||||||
delete reinterpret_cast<JavaVideoRendererWrapper*>(j_p);
|
delete reinterpret_cast<JavaVideoRendererWrapper*>(j_p);
|
||||||
}
|
}
|
||||||
@ -1216,16 +1212,27 @@ JOW(jlong, PeerConnectionFactory_nativeCreateLocalMediaStream)(
|
|||||||
}
|
}
|
||||||
|
|
||||||
JOW(jlong, PeerConnectionFactory_nativeCreateVideoSource)(
|
JOW(jlong, PeerConnectionFactory_nativeCreateVideoSource)(
|
||||||
JNIEnv* jni, jclass, jlong native_factory, jlong native_capturer,
|
JNIEnv* jni, jclass, jlong native_factory, jobject j_video_capturer,
|
||||||
jobject j_constraints) {
|
jobject j_constraints) {
|
||||||
|
// Create a cricket::VideoCapturer from |j_video_capturer|.
|
||||||
|
jobject j_surface_texture_helper = jni->CallObjectMethod(
|
||||||
|
j_video_capturer,
|
||||||
|
GetMethodID(jni, FindClass(jni, "org/webrtc/VideoCapturer"),
|
||||||
|
"getSurfaceTextureHelper",
|
||||||
|
"()Lorg/webrtc/SurfaceTextureHelper;"));
|
||||||
|
rtc::scoped_refptr<webrtc::AndroidVideoCapturerDelegate> delegate =
|
||||||
|
new rtc::RefCountedObject<AndroidVideoCapturerJni>(
|
||||||
|
jni, j_video_capturer, j_surface_texture_helper);
|
||||||
|
rtc::scoped_ptr<cricket::VideoCapturer> capturer(
|
||||||
|
new webrtc::AndroidVideoCapturer(delegate));
|
||||||
|
// Create a webrtc::VideoSourceInterface from the cricket::VideoCapturer,
|
||||||
|
// native factory and constraints.
|
||||||
scoped_ptr<ConstraintsWrapper> constraints(
|
scoped_ptr<ConstraintsWrapper> constraints(
|
||||||
new ConstraintsWrapper(jni, j_constraints));
|
new ConstraintsWrapper(jni, j_constraints));
|
||||||
rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
|
rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
|
||||||
factoryFromJava(native_factory));
|
factoryFromJava(native_factory));
|
||||||
rtc::scoped_refptr<VideoSourceInterface> source(
|
rtc::scoped_refptr<VideoSourceInterface> source(
|
||||||
factory->CreateVideoSource(
|
factory->CreateVideoSource(capturer.release(), constraints.get()));
|
||||||
reinterpret_cast<cricket::VideoCapturer*>(native_capturer),
|
|
||||||
constraints.get()));
|
|
||||||
return (jlong)source.release();
|
return (jlong)source.release();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -108,10 +108,12 @@ public class PeerConnectionFactory {
|
|||||||
nativeCreateLocalMediaStream(nativeFactory, label));
|
nativeCreateLocalMediaStream(nativeFactory, label));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// The VideoSource takes ownership of |capturer|, so capturer.release() should not be called
|
||||||
|
// manually after this.
|
||||||
public VideoSource createVideoSource(
|
public VideoSource createVideoSource(
|
||||||
VideoCapturer capturer, MediaConstraints constraints) {
|
VideoCapturer capturer, MediaConstraints constraints) {
|
||||||
return new VideoSource(nativeCreateVideoSource(
|
return new VideoSource(nativeCreateVideoSource(
|
||||||
nativeFactory, capturer.takeNativeVideoCapturer(), constraints));
|
nativeFactory, capturer, constraints));
|
||||||
}
|
}
|
||||||
|
|
||||||
public VideoTrack createVideoTrack(String id, VideoSource source) {
|
public VideoTrack createVideoTrack(String id, VideoSource source) {
|
||||||
@ -221,8 +223,7 @@ public class PeerConnectionFactory {
|
|||||||
long nativeFactory, String label);
|
long nativeFactory, String label);
|
||||||
|
|
||||||
private static native long nativeCreateVideoSource(
|
private static native long nativeCreateVideoSource(
|
||||||
long nativeFactory, long nativeVideoCapturer,
|
long nativeFactory, VideoCapturer videoCapturer, MediaConstraints constraints);
|
||||||
MediaConstraints constraints);
|
|
||||||
|
|
||||||
private static native long nativeCreateVideoTrack(
|
private static native long nativeCreateVideoTrack(
|
||||||
long nativeFactory, String id, long nativeVideoSource);
|
long nativeFactory, String id, long nativeVideoSource);
|
||||||
|
|||||||
@ -10,36 +10,89 @@
|
|||||||
|
|
||||||
package org.webrtc;
|
package org.webrtc;
|
||||||
|
|
||||||
/** Java version of cricket::VideoCapturer. */
|
import android.content.Context;
|
||||||
// TODO(perkj): Merge VideoCapturer and VideoCapturerAndroid.
|
|
||||||
public class VideoCapturer {
|
|
||||||
private long nativeVideoCapturer;
|
|
||||||
|
|
||||||
protected VideoCapturer() {
|
import org.json.JSONException;
|
||||||
|
|
||||||
|
// Base interface for all VideoCapturers to implement.
|
||||||
|
// TODO(magjed): Simplify and improve this interface.
|
||||||
|
public interface VideoCapturer {
|
||||||
|
// Interface used for providing callbacks to an observer.
|
||||||
|
public interface CapturerObserver {
|
||||||
|
// Notify if the camera have been started successfully or not.
|
||||||
|
// Called on a Java thread owned by VideoCapturer.
|
||||||
|
void onCapturerStarted(boolean success);
|
||||||
|
|
||||||
|
// Delivers a captured frame. Called on a Java thread owned by VideoCapturer.
|
||||||
|
void onByteBufferFrameCaptured(byte[] data, int width, int height, int rotation,
|
||||||
|
long timeStamp);
|
||||||
|
|
||||||
|
// Delivers a captured frame in a texture with id |oesTextureId|. Called on a Java thread
|
||||||
|
// owned by VideoCapturer.
|
||||||
|
void onTextureFrameCaptured(
|
||||||
|
int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
|
||||||
|
long timestamp);
|
||||||
|
|
||||||
|
// Requests an output format from the video capturer. Captured frames
|
||||||
|
// by the camera will be scaled/or dropped by the video capturer.
|
||||||
|
// Called on a Java thread owned by VideoCapturer.
|
||||||
|
void onOutputFormatRequest(int width, int height, int framerate);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sets |nativeCapturer| to be owned by VideoCapturer.
|
// An implementation of CapturerObserver that forwards all calls from
|
||||||
protected void setNativeCapturer(long nativeCapturer) {
|
// Java to the C layer.
|
||||||
this.nativeVideoCapturer = nativeCapturer;
|
static class NativeObserver implements CapturerObserver {
|
||||||
|
private final long nativeCapturer;
|
||||||
|
|
||||||
|
public NativeObserver(long nativeCapturer) {
|
||||||
|
this.nativeCapturer = nativeCapturer;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Package-visible for PeerConnectionFactory.
|
@Override
|
||||||
long takeNativeVideoCapturer() {
|
public void onCapturerStarted(boolean success) {
|
||||||
if (nativeVideoCapturer == 0) {
|
nativeCapturerStarted(nativeCapturer, success);
|
||||||
throw new RuntimeException("Capturer can only be taken once!");
|
|
||||||
}
|
|
||||||
long ret = nativeVideoCapturer;
|
|
||||||
nativeVideoCapturer = 0;
|
|
||||||
return ret;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void dispose() {
|
@Override
|
||||||
// No-op iff this capturer is owned by a source (see comment on
|
public void onByteBufferFrameCaptured(byte[] data, int width, int height,
|
||||||
// PeerConnectionFactoryInterface::CreateVideoSource()).
|
int rotation, long timeStamp) {
|
||||||
if (nativeVideoCapturer != 0) {
|
nativeOnByteBufferFrameCaptured(nativeCapturer, data, data.length, width, height, rotation,
|
||||||
free(nativeVideoCapturer);
|
timeStamp);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static native void free(long nativeVideoCapturer);
|
@Override
|
||||||
|
public void onTextureFrameCaptured(
|
||||||
|
int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
|
||||||
|
long timestamp) {
|
||||||
|
nativeOnTextureFrameCaptured(nativeCapturer, width, height, oesTextureId, transformMatrix,
|
||||||
|
rotation, timestamp);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onOutputFormatRequest(int width, int height, int framerate) {
|
||||||
|
nativeOnOutputFormatRequest(nativeCapturer, width, height, framerate);
|
||||||
|
}
|
||||||
|
|
||||||
|
private native void nativeCapturerStarted(long nativeCapturer,
|
||||||
|
boolean success);
|
||||||
|
private native void nativeOnByteBufferFrameCaptured(long nativeCapturer,
|
||||||
|
byte[] data, int length, int width, int height, int rotation, long timeStamp);
|
||||||
|
private native void nativeOnTextureFrameCaptured(long nativeCapturer, int width, int height,
|
||||||
|
int oesTextureId, float[] transformMatrix, int rotation, long timestamp);
|
||||||
|
private native void nativeOnOutputFormatRequest(long nativeCapturer,
|
||||||
|
int width, int height, int framerate);
|
||||||
|
}
|
||||||
|
|
||||||
|
String getSupportedFormatsAsJson() throws JSONException;
|
||||||
|
|
||||||
|
SurfaceTextureHelper getSurfaceTextureHelper();
|
||||||
|
|
||||||
|
void startCapture(
|
||||||
|
final int width, final int height, final int framerate,
|
||||||
|
final Context applicationContext, final CapturerObserver frameObserver);
|
||||||
|
|
||||||
|
// Blocks until capture is stopped.
|
||||||
|
void stopCapture() throws InterruptedException;
|
||||||
|
|
||||||
|
void dispose();
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user