Android VideoSource: Add adaptOutputFormat function

The Java VideoSource class wraps the C++ AndroidVideoTrackSource.
AndroidVideoTrackSource is the object actually owning the VideoAdapter.
We currently control the VideoAdapter through the Java VideoCapturer,
but it is more natural and direct to control it through the Java
VideoSource class. This CL adds the necessary function to do this, and
the function in VideoCapturer is deprecated.

BUG=webrtc:6391
R=sakal@webrtc.org

Review URL: https://codereview.webrtc.org/2350933006 .

Cr-Commit-Position: refs/heads/master@{#14332}
This commit is contained in:
Magnus Jedvert 2016-09-21 16:20:03 +02:00
parent e035e2d26f
commit 7640fcf6ed
9 changed files with 31 additions and 12 deletions

View File

@ -112,6 +112,10 @@ public interface VideoCapturer {
*/
void stopCapture() throws InterruptedException;
/**
* Use VideoSource.adaptOutputFormat() instead.
*/
@Deprecated
void onOutputFormatRequest(int width, int height, int framerate);
void changeCaptureFormat(int width, int height, int framerate);

View File

@ -12,14 +12,23 @@
package org.webrtc;
/**
* Java version of VideoSourceInterface, extended with stop/restart
* functionality to allow explicit control of the camera device on android,
* where there is no support for multiple open capture devices and the cost of
* holding a camera open (even if MediaStreamTrack.setEnabled(false) is muting
* its output to the encoder) can be too high to bear.
* Java wrapper of native AndroidVideoTrackSource.
*/
public class VideoSource extends MediaSource {
public VideoSource(long nativeSource) {
super(nativeSource);
}
/**
* Calling this function will cause frames to be scaled down to the requested resolution. Also,
* frames will be cropped to match the requested aspect ratio, and frames will be dropped to match
* the requested fps. The requested aspect ratio is orientation agnostic and will be adjusted to
* maintain the input orientation, so it doesn't matter if e.g. 1280x720 or 720x1280 is requested.
*/
public void adaptOutputFormat(int width, int height, int fps) {
nativeAdaptOutputFormat(nativeSource, width, height, fps);
}
private static native void nativeAdaptOutputFormat(
long nativeSource, int width, int height, int fps);
}

View File

@ -87,4 +87,12 @@ JOW_OBSERVER_METHOD(void, nativeOnOutputFormatRequest)
source->OnOutputFormatRequest(j_width, j_height, j_fps);
}
JOW(void, VideoSource_nativeAdaptOutputFormat)
(JNIEnv* jni, jclass, jlong j_source, jint j_width, jint j_height, jint j_fps) {
LOG(LS_INFO) << "VideoSource_nativeAdaptOutputFormat";
webrtc::AndroidVideoTrackSource* source =
AndroidVideoTrackSourceFromJavaProxy(j_source);
source->OnOutputFormatRequest(j_width, j_height, j_fps);
}
} // namespace webrtc_jni

View File

@ -143,7 +143,7 @@ public class Camera1CapturerUsingByteBufferTest extends InstrumentationTestCase
fixtures.returnBufferLateEndToEnd();
}
// This test that frames forwarded to a renderer is scaled if onOutputFormatRequest is
// This test that frames forwarded to a renderer is scaled if adaptOutputFormat is
// called. This test both Java and C++ parts of of the stack.
@MediumTest
public void testScaleCameraOutput() throws InterruptedException {

View File

@ -145,7 +145,7 @@ public class Camera1CapturerUsingTextureTest extends InstrumentationTestCase {
fixtures.cameraFreezedEventOnBufferStarvation();
}
// This test that frames forwarded to a renderer is scaled if onOutputFormatRequest is
// This test that frames forwarded to a renderer is scaled if adaptOutputFormat is
// called. This test both Java and C++ parts of of the stack.
@MediumTest
public void testScaleCameraOutput() throws InterruptedException {

View File

@ -269,7 +269,7 @@ public class Camera2CapturerTest extends InstrumentationTestCase {
fixtures.cameraFreezedEventOnBufferStarvation();
}
// This test that frames forwarded to a renderer is scaled if onOutputFormatRequest is
// This test that frames forwarded to a renderer is scaled if adaptOutputFormat is
// called. This test both Java and C++ parts of of the stack.
@MediumTest
public void testScaleCameraOutput() throws InterruptedException {

View File

@ -660,7 +660,7 @@ class CameraVideoCapturerTestFixtures {
final int scaledHeight = startHeight / 2;
// Request the captured frames to be scaled.
capturerInstance.capturer.onOutputFormatRequest(scaledWidth, scaledHeight, frameRate);
videoTrackWithRenderer.source.adaptOutputFormat(scaledWidth, scaledHeight, frameRate);
boolean gotExpectedResolution = false;
int numberOfInspectedFrames = 0;

View File

@ -228,8 +228,6 @@ void AndroidVideoTrackSource::OnFrame(const cricket::VideoFrame& frame,
void AndroidVideoTrackSource::OnOutputFormatRequest(int width,
int height,
int fps) {
RTC_DCHECK(camera_thread_checker_.CalledOnValidThread());
cricket::VideoFormat format(width, height,
cricket::VideoFormat::FpsToInterval(fps), 0);
video_adapter_.OnOutputFormatRequest(format);

View File

@ -1058,7 +1058,7 @@ public class PeerConnectionClient {
return;
}
Log.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + framerate);
videoCapturer.onOutputFormatRequest(width, height, framerate);
videoSource.adaptOutputFormat(width, height, framerate);
}
// Implementation detail: observe ICE & stream changes and react accordingly.