Bindings for injectable Java video encoders.

BUG=webrtc:7760

Review-Url: https://codereview.webrtc.org/3003873002
Cr-Commit-Position: refs/heads/master@{#19651}
This commit is contained in:
sakal 2017-09-04 03:57:21 -07:00 committed by Commit Bot
parent 75204c5ccd
commit 07a3bd7c4b
17 changed files with 966 additions and 29 deletions

View File

@ -108,6 +108,10 @@ rtc_static_library("video_jni") {
"src/jni/videodecoderfactorywrapper.h",
"src/jni/videodecoderwrapper.cc",
"src/jni/videodecoderwrapper.h",
"src/jni/videoencoderfactorywrapper.cc",
"src/jni/videoencoderfactorywrapper.h",
"src/jni/videoencoderwrapper.cc",
"src/jni/videoencoderwrapper.h",
"src/jni/videofilerenderer_jni.cc",
"src/jni/videoframe_jni.cc",
"src/jni/videotrack_jni.cc",
@ -145,6 +149,7 @@ rtc_static_library("video_jni") {
"../../common_video:common_video",
"../../media:rtc_audio_video",
"../../media:rtc_media_base",
"../../modules:module_api",
"../../modules/utility:utility",
"../../modules/video_coding:video_coding_utility",
"../../rtc_base:rtc_base",
@ -445,6 +450,7 @@ android_library("libjingle_peerconnection_java") {
"src/java/org/webrtc/TextureBufferImpl.java",
"src/java/org/webrtc/VideoCodecType.java",
"src/java/org/webrtc/VideoDecoderWrapperCallback.java",
"src/java/org/webrtc/VideoEncoderWrapperCallback.java",
"src/java/org/webrtc/WrappedNativeI420Buffer.java",
"src/java/org/webrtc/YuvConverter.java",
]

View File

@ -18,10 +18,30 @@ import java.util.concurrent.TimeUnit;
* encoders.
*/
public class EncodedImage {
// Must be kept in sync with common_types.h FrameType.
public enum FrameType {
EmptyFrame,
VideoFrameKey,
VideoFrameDelta,
EmptyFrame(0),
VideoFrameKey(3),
VideoFrameDelta(4);
private final int nativeIndex;
private FrameType(int nativeIndex) {
this.nativeIndex = nativeIndex;
}
public int getNative() {
return nativeIndex;
}
public static FrameType fromNative(int nativeIndex) {
for (FrameType type : FrameType.values()) {
if (type.nativeIndex == nativeIndex) {
return type;
}
}
throw new IllegalArgumentException("Unknown native frame type: " + nativeIndex);
}
}
public final ByteBuffer buffer;

View File

@ -22,13 +22,16 @@ public interface VideoEncoder {
public final int height;
public final int startBitrate; // Kilobits per second.
public final int maxFramerate;
public final boolean automaticResizeOn;
public Settings(int numberOfCores, int width, int height, int startBitrate, int maxFramerate) {
public Settings(int numberOfCores, int width, int height, int startBitrate, int maxFramerate,
boolean automaticResizeOn) {
this.numberOfCores = numberOfCores;
this.width = width;
this.height = height;
this.startBitrate = startBitrate;
this.maxFramerate = maxFramerate;
this.automaticResizeOn = automaticResizeOn;
}
}
@ -84,11 +87,22 @@ public interface VideoEncoder {
/** Settings for WebRTC quality based scaling. */
public class ScalingSettings {
public final boolean on;
public final int low;
public final int high;
public final Integer low;
public final Integer high;
/**
* Creates quality based scaling settings.
* Creates quality based scaling setting.
*
* @param on True if quality scaling is turned on.
*/
public ScalingSettings(boolean on) {
this.on = on;
this.low = null;
this.high = null;
}
/**
* Creates quality based scaling settings with custom thresholds.
*
* @param on True if quality scaling is turned on.
* @param low Average QP at which to scale up the resolution.
@ -129,6 +143,6 @@ public interface VideoEncoder {
VideoCodecStatus setRateAllocation(BitrateAllocation allocation, int framerate);
/** Any encoder that wants to use WebRTC provided quality scaler must implement this method. */
ScalingSettings getScalingSettings();
/** Should return a descriptive name for the implementation. */
/** Should return a descriptive name for the implementation. Gets called once and cached. */
String getImplementationName();
}

View File

@ -15,6 +15,9 @@ public interface VideoEncoderFactory {
/** Creates an encoder for the given video codec. */
public VideoEncoder createEncoder(VideoCodecInfo info);
/** Enumerates the list of supported video codecs. */
/**
* Enumerates the list of supported video codecs. This method will only be called once and the
* result will be cached.
*/
public VideoCodecInfo[] getSupportedCodecs();
}

View File

@ -116,9 +116,9 @@ public final class HardwareVideoDecoderTest {
encodeDone.countDown();
}
};
assertEquals(
encoder.initEncode(
new VideoEncoder.Settings(1, SETTINGS.width, SETTINGS.height, 300, 30), encodeCallback),
assertEquals(encoder.initEncode(new VideoEncoder.Settings(1, SETTINGS.width, SETTINGS.height,
300, 30, true /* automaticResizeOn */),
encodeCallback),
VideoCodecStatus.OK);
// First, encode a frame.
@ -191,9 +191,9 @@ public final class HardwareVideoDecoderTest {
encodeDone.countDown();
}
};
assertEquals(
encoder.initEncode(
new VideoEncoder.Settings(1, SETTINGS.width, SETTINGS.height, 300, 30), encodeCallback),
assertEquals(encoder.initEncode(new VideoEncoder.Settings(1, SETTINGS.width, SETTINGS.height,
300, 30, true /* automaticResizeOn */),
encodeCallback),
VideoCodecStatus.OK);
// First, encode a frame.

View File

@ -32,8 +32,9 @@ public class HardwareVideoEncoderTest {
private static final boolean ENABLE_INTEL_VP8_ENCODER = true;
private static final boolean ENABLE_H264_HIGH_PROFILE = true;
private static final VideoEncoder.Settings SETTINGS = new VideoEncoder.Settings(
1 /* core */, 640 /* width */, 480 /* height */, 300 /* kbps */, 30 /* fps */);
private static final VideoEncoder.Settings SETTINGS =
new VideoEncoder.Settings(1 /* core */, 640 /* width */, 480 /* height */, 300 /* kbps */,
30 /* fps */, true /* automaticResizeOn */);
@Test
@SmallTest

View File

@ -93,6 +93,7 @@ class HardwareVideoEncoder implements VideoEncoder {
private MediaCodec codec;
private Callback callback;
private boolean automaticResizeOn;
private int width;
private int height;
@ -134,6 +135,8 @@ class HardwareVideoEncoder implements VideoEncoder {
@Override
public VideoCodecStatus initEncode(Settings settings, Callback callback) {
automaticResizeOn = settings.automaticResizeOn;
return initEncodeInternal(
settings.width, settings.height, settings.startBitrate, settings.maxFramerate, callback);
}
@ -380,8 +383,7 @@ class HardwareVideoEncoder implements VideoEncoder {
@Override
public ScalingSettings getScalingSettings() {
// TODO(mellem): Implement scaling settings.
return null;
return new ScalingSettings(automaticResizeOn);
}
@Override

View File

@ -0,0 +1,35 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.nio.ByteBuffer;
/**
* VideoEncoder callback that calls VideoEncoderWrapper.OnEncodedFrame for the Encoded frames.
*/
class VideoEncoderWrapperCallback implements VideoEncoder.Callback {
private final long nativeEncoder;
public VideoEncoderWrapperCallback(long nativeEncoder) {
this.nativeEncoder = nativeEncoder;
}
@Override
public void onEncodedFrame(EncodedImage frame, VideoEncoder.CodecSpecificInfo info) {
nativeOnEncodedFrame(nativeEncoder, frame.buffer, frame.encodedWidth, frame.encodedHeight,
frame.captureTimeNs, frame.frameType.getNative(), frame.rotation, frame.completeFrame,
frame.qp);
}
private native static void nativeOnEncodedFrame(long nativeEncoder, ByteBuffer buffer,
int encodedWidth, int encodedHeight, long captureTimeNs, int frameType, int rotation,
boolean completeFrame, Integer qp);
}

View File

@ -1,17 +1,14 @@
include_rules = [
"+third_party/libyuv",
"+webrtc/call/callfactoryinterface.h",
"+webrtc/common_video/h264/h264_bitstream_parser.h",
"+webrtc/common_video/include",
"+webrtc/common_video/libyuv/include/webrtc_libyuv.h",
"+webrtc/common_video",
"+webrtc/logging/rtc_event_log/rtc_event_log_factory_interface.h",
"+webrtc/media/base",
"+webrtc/media/engine",
"+webrtc/modules/audio_processing/include/audio_processing.h",
"+webrtc/modules/include",
"+webrtc/modules/utility/include/jvm_android.h",
"+webrtc/modules/video_coding/include/video_codec_interface.h",
"+webrtc/modules/video_coding/utility/vp8_header_parser.h",
"+webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.h",
"+webrtc/modules/video_coding",
"+webrtc/pc",
"+webrtc/system_wrappers/include",
"+webrtc/voice_engine/include/voe_base.h",

View File

@ -106,9 +106,16 @@ ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni) {
LoadClass(jni, "org/webrtc/StatsReport$Value");
LoadClass(jni, "org/webrtc/SurfaceTextureHelper");
LoadClass(jni, "org/webrtc/VideoCapturer");
LoadClass(jni, "org/webrtc/VideoCodecInfo");
LoadClass(jni, "org/webrtc/VideoCodecStatus");
LoadClass(jni, "org/webrtc/VideoDecoder$Settings");
LoadClass(jni, "org/webrtc/VideoDecoderWrapperCallback");
LoadClass(jni, "org/webrtc/VideoEncoder");
LoadClass(jni, "org/webrtc/VideoEncoder$BitrateAllocation");
LoadClass(jni, "org/webrtc/VideoEncoder$EncodeInfo");
LoadClass(jni, "org/webrtc/VideoEncoder$ScalingSettings");
LoadClass(jni, "org/webrtc/VideoEncoder$Settings");
LoadClass(jni, "org/webrtc/VideoEncoderWrapperCallback");
LoadClass(jni, "org/webrtc/VideoFrame");
LoadClass(jni, "org/webrtc/VideoFrame$Buffer");
LoadClass(jni, "org/webrtc/VideoFrame$I420Buffer");

View File

@ -307,6 +307,41 @@ std::string GetJavaEnumName(JNIEnv* jni,
return JavaToStdString(jni, name);
}
std::map<std::string, std::string> JavaToStdMapStrings(JNIEnv* jni,
jobject j_map) {
jclass map_class = jni->FindClass("java/util/Map");
jclass set_class = jni->FindClass("java/util/Set");
jclass iterator_class = jni->FindClass("java/util/Iterator");
jclass entry_class = jni->FindClass("java/util/Map$Entry");
jmethodID entry_set_method =
jni->GetMethodID(map_class, "entrySet", "()Ljava/util/Set;");
jmethodID iterator_method =
jni->GetMethodID(set_class, "iterator", "()Ljava/util/Iterator;");
jmethodID has_next_method =
jni->GetMethodID(iterator_class, "hasNext", "()Z");
jmethodID next_method =
jni->GetMethodID(iterator_class, "next", "()Ljava/lang/Object;");
jmethodID get_key_method =
jni->GetMethodID(entry_class, "getKey", "()Ljava/lang/Object;");
jmethodID get_value_method =
jni->GetMethodID(entry_class, "getValue", "()Ljava/lang/Object;");
jobject j_entry_set = jni->CallObjectMethod(j_map, entry_set_method);
jobject j_iterator = jni->CallObjectMethod(j_entry_set, iterator_method);
std::map<std::string, std::string> result;
while (jni->CallBooleanMethod(j_iterator, has_next_method)) {
jobject j_entry = jni->CallObjectMethod(j_iterator, next_method);
jstring j_key =
static_cast<jstring>(jni->CallObjectMethod(j_entry, get_key_method));
jstring j_value =
static_cast<jstring>(jni->CallObjectMethod(j_entry, get_value_method));
result[JavaToStdString(jni, j_key)] = JavaToStdString(jni, j_value);
}
return result;
}
jobject NewGlobalRef(JNIEnv* jni, jobject o) {
jobject ret = jni->NewGlobalRef(o);
CHECK_EXCEPTION(jni) << "error during NewGlobalRef";

View File

@ -15,6 +15,7 @@
#define WEBRTC_SDK_ANDROID_SRC_JNI_JNI_HELPERS_H_
#include <jni.h>
#include <map>
#include <string>
#include <vector>
@ -114,6 +115,10 @@ jobject JavaEnumFromIndexAndClassName(JNIEnv* jni,
const std::string& state_class_fragment,
int index);
// Parses Map<String, String> to std::map<std::string, std::string>.
std::map<std::string, std::string> JavaToStdMapStrings(JNIEnv* jni,
jobject j_map);
// Returns the name of a Java enum.
std::string GetJavaEnumName(JNIEnv* jni,
const std::string& className,

View File

@ -21,6 +21,7 @@
#include "webrtc/sdk/android/src/jni/pc/ownedfactoryandthreads.h"
#include "webrtc/sdk/android/src/jni/surfacetexturehelper_jni.h"
#include "webrtc/sdk/android/src/jni/videodecoderfactorywrapper.h"
#include "webrtc/sdk/android/src/jni/videoencoderfactorywrapper.h"
namespace webrtc {
namespace jni {
@ -29,14 +30,19 @@ namespace jni {
// used and all applications inject their own codecs.
// This is semi broken if someone wants to create multiple peerconnection
// factories.
static bool use_media_codec_encoder_factory;
static bool use_media_codec_decoder_factory;
cricket::WebRtcVideoEncoderFactory* CreateVideoEncoderFactory(
JNIEnv* jni,
jobject j_encoder_factory) {
RTC_DCHECK(j_encoder_factory == nullptr)
<< "Injectable video encoders are not supported yet.";
return new MediaCodecVideoEncoderFactory();
use_media_codec_encoder_factory = j_encoder_factory == nullptr;
if (use_media_codec_encoder_factory) {
return new MediaCodecVideoEncoderFactory();
} else {
return new VideoEncoderFactoryWrapper(jni, j_encoder_factory);
}
}
cricket::WebRtcVideoDecoderFactory* CreateVideoDecoderFactory(
@ -111,7 +117,7 @@ JNI_FUNCTION_DECLARATION(
MediaCodecVideoEncoderFactory* encoder_factory =
static_cast<MediaCodecVideoEncoderFactory*>(
owned_factory->encoder_factory());
if (encoder_factory &&
if (use_media_codec_encoder_factory && encoder_factory &&
jni->IsInstanceOf(local_egl_context, j_eglbase14_context_class)) {
LOG(LS_INFO) << "Set EGL context for HW encoding.";
encoder_factory->SetEGLContext(jni, local_egl_context);

View File

@ -0,0 +1,103 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/sdk/android/src/jni/videoencoderfactorywrapper.h"
#include "webrtc/api/video_codecs/video_encoder.h"
#include "webrtc/common_types.h"
#include "webrtc/rtc_base/logging.h"
#include "webrtc/sdk/android/src/jni/classreferenceholder.h"
#include "webrtc/sdk/android/src/jni/videoencoderwrapper.h"
namespace webrtc {
namespace jni {
VideoEncoderFactoryWrapper::VideoEncoderFactoryWrapper(JNIEnv* jni,
jobject encoder_factory)
: video_codec_info_class_(jni, FindClass(jni, "org/webrtc/VideoCodecInfo")),
hash_map_class_(jni, jni->FindClass("java/util/HashMap")),
encoder_factory_(jni, encoder_factory) {
jclass encoder_factory_class = jni->GetObjectClass(*encoder_factory_);
create_encoder_method_ = jni->GetMethodID(
encoder_factory_class, "createEncoder",
"(Lorg/webrtc/VideoCodecInfo;)Lorg/webrtc/VideoEncoder;");
get_supported_codecs_method_ =
jni->GetMethodID(encoder_factory_class, "getSupportedCodecs",
"()[Lorg/webrtc/VideoCodecInfo;");
video_codec_info_constructor_ =
jni->GetMethodID(*video_codec_info_class_, "<init>",
"(ILjava/lang/String;Ljava/util/Map;)V");
payload_field_ = jni->GetFieldID(*video_codec_info_class_, "payload", "I");
name_field_ =
jni->GetFieldID(*video_codec_info_class_, "name", "Ljava/lang/String;");
params_field_ =
jni->GetFieldID(*video_codec_info_class_, "params", "Ljava/util/Map;");
hash_map_constructor_ = jni->GetMethodID(*hash_map_class_, "<init>", "()V");
put_method_ = jni->GetMethodID(
*hash_map_class_, "put",
"(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;");
supported_codecs_ = GetSupportedCodecs(jni);
}
VideoEncoder* VideoEncoderFactoryWrapper::CreateVideoEncoder(
const cricket::VideoCodec& codec) {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
jobject j_codec_info = ToJavaCodecInfo(jni, codec);
jobject encoder = jni->CallObjectMethod(*encoder_factory_,
create_encoder_method_, j_codec_info);
return encoder != nullptr ? new VideoEncoderWrapper(jni, encoder) : nullptr;
}
jobject VideoEncoderFactoryWrapper::ToJavaCodecInfo(
JNIEnv* jni,
const cricket::VideoCodec& codec) {
jobject j_params = jni->NewObject(*hash_map_class_, hash_map_constructor_);
for (auto const& param : codec.params) {
jni->CallObjectMethod(j_params, put_method_,
JavaStringFromStdString(jni, param.first),
JavaStringFromStdString(jni, param.second));
}
return jni->NewObject(*video_codec_info_class_, video_codec_info_constructor_,
codec.id, JavaStringFromStdString(jni, codec.name),
j_params);
}
std::vector<cricket::VideoCodec> VideoEncoderFactoryWrapper::GetSupportedCodecs(
JNIEnv* jni) const {
const jobjectArray j_supported_codecs = static_cast<jobjectArray>(
jni->CallObjectMethod(*encoder_factory_, get_supported_codecs_method_));
const jsize supported_codecs_count = jni->GetArrayLength(j_supported_codecs);
std::vector<cricket::VideoCodec> supported_codecs;
supported_codecs.resize(supported_codecs_count);
for (jsize i = 0; i < supported_codecs_count; i++) {
jobject j_supported_codec =
jni->GetObjectArrayElement(j_supported_codecs, i);
int payload = jni->GetIntField(j_supported_codec, payload_field_);
jobject j_params = jni->GetObjectField(j_supported_codec, params_field_);
jstring j_name = static_cast<jstring>(
jni->GetObjectField(j_supported_codec, name_field_));
supported_codecs[i] =
cricket::VideoCodec(payload, JavaToStdString(jni, j_name));
supported_codecs[i].params = JavaToStdMapStrings(jni, j_params);
}
return supported_codecs;
}
void VideoEncoderFactoryWrapper::DestroyVideoEncoder(VideoEncoder* encoder) {
delete encoder;
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,65 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_SDK_ANDROID_SRC_JNI_VIDEOENCODERFACTORYWRAPPER_H_
#define WEBRTC_SDK_ANDROID_SRC_JNI_VIDEOENCODERFACTORYWRAPPER_H_
#include <jni.h>
#include <vector>
#include "webrtc/media/engine/webrtcvideoencoderfactory.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
namespace webrtc {
namespace jni {
// Wrapper for Java VideoEncoderFactory class. Delegates method calls through
// JNI and wraps the encoder inside VideoEncoderWrapper.
class VideoEncoderFactoryWrapper : public cricket::WebRtcVideoEncoderFactory {
public:
VideoEncoderFactoryWrapper(JNIEnv* jni, jobject encoder_factory);
// Caller takes the ownership of the returned object and it should be released
// by calling DestroyVideoEncoder().
VideoEncoder* CreateVideoEncoder(const cricket::VideoCodec& codec) override;
// Returns a list of supported codecs in order of preference.
const std::vector<cricket::VideoCodec>& supported_codecs() const override {
return supported_codecs_;
}
void DestroyVideoEncoder(VideoEncoder* encoder) override;
private:
std::vector<cricket::VideoCodec> GetSupportedCodecs(JNIEnv* jni) const;
jobject ToJavaCodecInfo(JNIEnv* jni, const cricket::VideoCodec& codec);
const ScopedGlobalRef<jclass> video_codec_info_class_;
const ScopedGlobalRef<jclass> hash_map_class_;
const ScopedGlobalRef<jobject> encoder_factory_;
jmethodID create_encoder_method_;
jmethodID get_supported_codecs_method_;
jmethodID video_codec_info_constructor_;
jfieldID payload_field_;
jfieldID name_field_;
jfieldID params_field_;
jmethodID hash_map_constructor_;
jmethodID put_method_;
std::vector<cricket::VideoCodec> supported_codecs_;
};
} // namespace jni
} // namespace webrtc
#endif // WEBRTC_SDK_ANDROID_SRC_JNI_VIDEOENCODERFACTORYWRAPPER_H_

View File

@ -0,0 +1,490 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/sdk/android/src/jni/videoencoderwrapper.h"
#include <utility>
#include "webrtc/common_video/h264/h264_common.h"
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/modules/video_coding/include/video_error_codes.h"
#include "webrtc/modules/video_coding/utility/vp8_header_parser.h"
#include "webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.h"
#include "webrtc/rtc_base/logging.h"
#include "webrtc/rtc_base/random.h"
#include "webrtc/rtc_base/timeutils.h"
#include "webrtc/sdk/android/src/jni/classreferenceholder.h"
namespace webrtc {
namespace jni {
static const int kMaxJavaEncoderResets = 3;
VideoEncoderWrapper::VideoEncoderWrapper(JNIEnv* jni, jobject j_encoder)
: encoder_(jni, j_encoder),
settings_class_(jni, FindClass(jni, "org/webrtc/VideoEncoder$Settings")),
encode_info_class_(jni,
FindClass(jni, "org/webrtc/VideoEncoder$EncodeInfo")),
frame_type_class_(jni,
FindClass(jni, "org/webrtc/EncodedImage$FrameType")),
bitrate_allocation_class_(
jni,
FindClass(jni, "org/webrtc/VideoEncoder$BitrateAllocation")),
int_array_class_(jni, jni->FindClass("[I")),
video_frame_factory_(jni) {
jclass encoder_class = FindClass(jni, "org/webrtc/VideoEncoder");
init_encode_method_ =
jni->GetMethodID(encoder_class, "initEncode",
"(Lorg/webrtc/VideoEncoder$Settings;Lorg/webrtc/"
"VideoEncoder$Callback;)Lorg/webrtc/VideoCodecStatus;");
release_method_ = jni->GetMethodID(encoder_class, "release",
"()Lorg/webrtc/VideoCodecStatus;");
encode_method_ = jni->GetMethodID(
encoder_class, "encode",
"(Lorg/webrtc/VideoFrame;Lorg/webrtc/"
"VideoEncoder$EncodeInfo;)Lorg/webrtc/VideoCodecStatus;");
set_channel_parameters_method_ =
jni->GetMethodID(encoder_class, "setChannelParameters",
"(SJ)Lorg/webrtc/VideoCodecStatus;");
set_rate_allocation_method_ =
jni->GetMethodID(encoder_class, "setRateAllocation",
"(Lorg/webrtc/VideoEncoder$BitrateAllocation;I)Lorg/"
"webrtc/VideoCodecStatus;");
get_scaling_settings_method_ =
jni->GetMethodID(encoder_class, "getScalingSettings",
"()Lorg/webrtc/VideoEncoder$ScalingSettings;");
get_implementation_name_method_ = jni->GetMethodID(
encoder_class, "getImplementationName", "()Ljava/lang/String;");
settings_constructor_ =
jni->GetMethodID(*settings_class_, "<init>", "(IIIIIZ)V");
encode_info_constructor_ = jni->GetMethodID(
*encode_info_class_, "<init>", "([Lorg/webrtc/EncodedImage$FrameType;)V");
frame_type_from_native_method_ =
jni->GetStaticMethodID(*frame_type_class_, "fromNative",
"(I)Lorg/webrtc/EncodedImage$FrameType;");
bitrate_allocation_constructor_ =
jni->GetMethodID(*bitrate_allocation_class_, "<init>", "([[I)V");
jclass video_codec_status_class =
FindClass(jni, "org/webrtc/VideoCodecStatus");
get_number_method_ =
jni->GetMethodID(video_codec_status_class, "getNumber", "()I");
jclass integer_class = jni->FindClass("java/lang/Integer");
int_value_method_ = jni->GetMethodID(integer_class, "intValue", "()I");
jclass scaling_settings_class =
FindClass(jni, "org/webrtc/VideoEncoder$ScalingSettings");
scaling_settings_on_field_ =
jni->GetFieldID(scaling_settings_class, "on", "Z");
scaling_settings_low_field_ =
jni->GetFieldID(scaling_settings_class, "low", "Ljava/lang/Integer;");
scaling_settings_high_field_ =
jni->GetFieldID(scaling_settings_class, "high", "Ljava/lang/Integer;");
implementation_name_ = GetImplementationName(jni);
encoder_queue_ = rtc::TaskQueue::Current();
initialized_ = false;
num_resets_ = 0;
Random random(rtc::TimeMicros());
picture_id_ = random.Rand<uint16_t>() & 0x7FFF;
tl0_pic_idx_ = random.Rand<uint8_t>();
}
int32_t VideoEncoderWrapper::InitEncode(const VideoCodec* codec_settings,
int32_t number_of_cores,
size_t max_payload_size) {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
number_of_cores_ = number_of_cores;
codec_settings_ = *codec_settings;
num_resets_ = 0;
return InitEncodeInternal(jni);
}
int32_t VideoEncoderWrapper::InitEncodeInternal(JNIEnv* jni) {
bool automatic_resize_on;
switch (codec_settings_.codecType) {
case kVideoCodecVP8:
automatic_resize_on = codec_settings_.VP8()->automaticResizeOn;
break;
case kVideoCodecVP9:
automatic_resize_on = codec_settings_.VP9()->automaticResizeOn;
break;
default:
automatic_resize_on = true;
}
jobject settings =
jni->NewObject(*settings_class_, settings_constructor_, number_of_cores_,
codec_settings_.width, codec_settings_.height,
codec_settings_.startBitrate, codec_settings_.maxFramerate,
automatic_resize_on);
jclass callback_class =
FindClass(jni, "org/webrtc/VideoEncoderWrapperCallback");
jmethodID callback_constructor =
jni->GetMethodID(callback_class, "<init>", "(J)V");
jobject callback = jni->NewObject(callback_class, callback_constructor,
jlongFromPointer(this));
jobject ret =
jni->CallObjectMethod(*encoder_, init_encode_method_, settings, callback);
if (jni->CallIntMethod(ret, get_number_method_) == WEBRTC_VIDEO_CODEC_OK) {
initialized_ = true;
}
return HandleReturnCode(jni, ret);
}
int32_t VideoEncoderWrapper::RegisterEncodeCompleteCallback(
EncodedImageCallback* callback) {
callback_ = callback;
return WEBRTC_VIDEO_CODEC_OK;
}
int32_t VideoEncoderWrapper::Release() {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
jobject ret = jni->CallObjectMethod(*encoder_, release_method_);
frame_extra_infos_.clear();
initialized_ = false;
return HandleReturnCode(jni, ret);
}
int32_t VideoEncoderWrapper::Encode(
const VideoFrame& frame,
const CodecSpecificInfo* /* codec_specific_info */,
const std::vector<FrameType>* frame_types) {
if (!initialized_) {
// Most likely initializing the codec failed.
return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
}
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
// Construct encode info.
jobjectArray j_frame_types =
jni->NewObjectArray(frame_types->size(), *frame_type_class_, nullptr);
for (size_t i = 0; i < frame_types->size(); ++i) {
jobject j_frame_type = jni->CallStaticObjectMethod(
*frame_type_class_, frame_type_from_native_method_,
static_cast<jint>((*frame_types)[i]));
jni->SetObjectArrayElement(j_frame_types, i, j_frame_type);
}
jobject encode_info = jni->NewObject(*encode_info_class_,
encode_info_constructor_, j_frame_types);
FrameExtraInfo info;
info.capture_time_ns = frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec;
info.timestamp_rtp = frame.timestamp();
frame_extra_infos_.push_back(info);
jobject ret = jni->CallObjectMethod(
*encoder_, encode_method_, video_frame_factory_.ToJavaFrame(jni, frame),
encode_info);
return HandleReturnCode(jni, ret);
}
int32_t VideoEncoderWrapper::SetChannelParameters(uint32_t packet_loss,
int64_t rtt) {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
jobject ret = jni->CallObjectMethod(*encoder_, set_channel_parameters_method_,
(jshort)packet_loss, (jlong)rtt);
return HandleReturnCode(jni, ret);
}
int32_t VideoEncoderWrapper::SetRateAllocation(
const BitrateAllocation& allocation,
uint32_t framerate) {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
jobject j_bitrate_allocation = ToJavaBitrateAllocation(jni, allocation);
jobject ret = jni->CallObjectMethod(*encoder_, set_rate_allocation_method_,
j_bitrate_allocation, (jint)framerate);
return HandleReturnCode(jni, ret);
}
VideoEncoderWrapper::ScalingSettings VideoEncoderWrapper::GetScalingSettings()
const {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
jobject j_scaling_settings =
jni->CallObjectMethod(*encoder_, get_scaling_settings_method_);
bool on =
jni->GetBooleanField(j_scaling_settings, scaling_settings_on_field_);
jobject j_low =
jni->GetObjectField(j_scaling_settings, scaling_settings_low_field_);
jobject j_high =
jni->GetObjectField(j_scaling_settings, scaling_settings_high_field_);
if (j_low != nullptr || j_high != nullptr) {
RTC_DCHECK(j_low != nullptr);
RTC_DCHECK(j_high != nullptr);
int low = jni->CallIntMethod(j_low, int_value_method_);
int high = jni->CallIntMethod(j_high, int_value_method_);
return ScalingSettings(on, low, high);
} else {
return ScalingSettings(on);
}
}
const char* VideoEncoderWrapper::ImplementationName() const {
return implementation_name_.c_str();
}
void VideoEncoderWrapper::OnEncodedFrame(JNIEnv* jni,
jobject j_buffer,
jint encoded_width,
jint encoded_height,
jlong capture_time_ns,
jint frame_type,
jint rotation,
jboolean complete_frame,
jobject j_qp) {
const uint8_t* buffer =
static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_buffer));
const size_t buffer_size = jni->GetDirectBufferCapacity(j_buffer);
std::vector<uint8_t> buffer_copy(buffer_size);
memcpy(buffer_copy.data(), buffer, buffer_size);
int qp = -1;
if (j_qp != nullptr) {
qp = jni->CallIntMethod(j_qp, int_value_method_);
}
encoder_queue_->PostTask(
[
this, task_buffer = std::move(buffer_copy), qp, encoded_width,
encoded_height, capture_time_ns, frame_type, rotation, complete_frame
]() {
FrameExtraInfo frame_extra_info;
do {
if (frame_extra_infos_.empty()) {
LOG(LS_WARNING)
<< "Java encoder produced an unexpected frame with timestamp: "
<< capture_time_ns;
return;
}
frame_extra_info = frame_extra_infos_.front();
frame_extra_infos_.pop_front();
// The encoder might drop frames so iterate through the queue until
// we find a matching timestamp.
} while (frame_extra_info.capture_time_ns != capture_time_ns);
RTPFragmentationHeader header = ParseFragmentationHeader(task_buffer);
EncodedImage frame(const_cast<uint8_t*>(task_buffer.data()),
task_buffer.size(), task_buffer.size());
frame._encodedWidth = encoded_width;
frame._encodedHeight = encoded_height;
frame._timeStamp = frame_extra_info.timestamp_rtp;
frame.capture_time_ms_ = capture_time_ns / rtc::kNumNanosecsPerMillisec;
frame._frameType = (FrameType)frame_type;
frame.rotation_ = (VideoRotation)rotation;
frame._completeFrame = complete_frame;
if (qp == -1) {
frame.qp_ = ParseQp(task_buffer);
} else {
frame.qp_ = qp;
}
CodecSpecificInfo info(ParseCodecSpecificInfo(frame));
callback_->OnEncodedImage(frame, &info, &header);
});
}
int32_t VideoEncoderWrapper::HandleReturnCode(JNIEnv* jni, jobject code) {
int32_t value = jni->CallIntMethod(code, get_number_method_);
if (value < 0) { // Any errors are represented by negative values.
// Try resetting the codec.
if (++num_resets_ <= kMaxJavaEncoderResets &&
Release() == WEBRTC_VIDEO_CODEC_OK) {
LOG(LS_WARNING) << "Reset Java encoder: " << num_resets_;
return InitEncodeInternal(jni);
}
LOG(LS_WARNING) << "Falling back to software decoder.";
return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
} else {
return value;
}
}
RTPFragmentationHeader VideoEncoderWrapper::ParseFragmentationHeader(
const std::vector<uint8_t>& buffer) {
RTPFragmentationHeader header;
if (codec_settings_.codecType == kVideoCodecH264) {
h264_bitstream_parser_.ParseBitstream(buffer.data(), buffer.size());
// For H.264 search for start codes.
const std::vector<H264::NaluIndex> nalu_idxs =
H264::FindNaluIndices(buffer.data(), buffer.size());
if (nalu_idxs.empty()) {
LOG(LS_ERROR) << "Start code is not found!";
LOG(LS_ERROR) << "Data:" << buffer[0] << " " << buffer[1] << " "
<< buffer[2] << " " << buffer[3] << " " << buffer[4] << " "
<< buffer[5];
}
header.VerifyAndAllocateFragmentationHeader(nalu_idxs.size());
for (size_t i = 0; i < nalu_idxs.size(); i++) {
header.fragmentationOffset[i] = nalu_idxs[i].payload_start_offset;
header.fragmentationLength[i] = nalu_idxs[i].payload_size;
header.fragmentationPlType[i] = 0;
header.fragmentationTimeDiff[i] = 0;
}
} else {
// Generate a header describing a single fragment.
header.VerifyAndAllocateFragmentationHeader(1);
header.fragmentationOffset[0] = 0;
header.fragmentationLength[0] = buffer.size();
header.fragmentationPlType[0] = 0;
header.fragmentationTimeDiff[0] = 0;
}
return header;
}
int VideoEncoderWrapper::ParseQp(const std::vector<uint8_t>& buffer) {
int qp;
bool success;
switch (codec_settings_.codecType) {
case kVideoCodecVP8:
success = vp8::GetQp(buffer.data(), buffer.size(), &qp);
break;
case kVideoCodecVP9:
success = vp9::GetQp(buffer.data(), buffer.size(), &qp);
break;
case kVideoCodecH264:
success = h264_bitstream_parser_.GetLastSliceQp(&qp);
break;
default: // Default is to not provide QP.
success = false;
break;
}
return success ? qp : -1; // -1 means unknown QP.
}
CodecSpecificInfo VideoEncoderWrapper::ParseCodecSpecificInfo(
const EncodedImage& frame) {
const bool key_frame = frame._frameType == kVideoFrameKey;
CodecSpecificInfo info;
memset(&info, 0, sizeof(info));
info.codecType = codec_settings_.codecType;
info.codec_name = implementation_name_.c_str();
switch (codec_settings_.codecType) {
case kVideoCodecVP8:
info.codecSpecific.VP8.pictureId = picture_id_;
info.codecSpecific.VP8.nonReference = false;
info.codecSpecific.VP8.simulcastIdx = 0;
info.codecSpecific.VP8.temporalIdx = kNoTemporalIdx;
info.codecSpecific.VP8.layerSync = false;
info.codecSpecific.VP8.tl0PicIdx = kNoTl0PicIdx;
info.codecSpecific.VP8.keyIdx = kNoKeyIdx;
break;
case kVideoCodecVP9:
if (key_frame) {
gof_idx_ = 0;
}
info.codecSpecific.VP9.picture_id = picture_id_;
info.codecSpecific.VP9.inter_pic_predicted = key_frame ? false : true;
info.codecSpecific.VP9.flexible_mode = false;
info.codecSpecific.VP9.ss_data_available = key_frame ? true : false;
info.codecSpecific.VP9.tl0_pic_idx = tl0_pic_idx_++;
info.codecSpecific.VP9.temporal_idx = kNoTemporalIdx;
info.codecSpecific.VP9.spatial_idx = kNoSpatialIdx;
info.codecSpecific.VP9.temporal_up_switch = true;
info.codecSpecific.VP9.inter_layer_predicted = false;
info.codecSpecific.VP9.gof_idx =
static_cast<uint8_t>(gof_idx_++ % gof_.num_frames_in_gof);
info.codecSpecific.VP9.num_spatial_layers = 1;
info.codecSpecific.VP9.spatial_layer_resolution_present = false;
if (info.codecSpecific.VP9.ss_data_available) {
info.codecSpecific.VP9.spatial_layer_resolution_present = true;
info.codecSpecific.VP9.width[0] = frame._encodedWidth;
info.codecSpecific.VP9.height[0] = frame._encodedHeight;
info.codecSpecific.VP9.gof.CopyGofInfoVP9(gof_);
}
break;
default:
break;
}
picture_id_ = (picture_id_ + 1) & 0x7FFF;
return info;
}
jobject VideoEncoderWrapper::ToJavaBitrateAllocation(
JNIEnv* jni,
const BitrateAllocation& allocation) {
jobjectArray j_allocation_array = jni->NewObjectArray(
kMaxSpatialLayers, *int_array_class_, nullptr /* initial */);
for (int spatial_i = 0; spatial_i < kMaxSpatialLayers; ++spatial_i) {
jintArray j_array_spatial_layer = jni->NewIntArray(kMaxTemporalStreams);
jint* array_spatial_layer =
jni->GetIntArrayElements(j_array_spatial_layer, nullptr /* isCopy */);
for (int temporal_i = 0; temporal_i < kMaxTemporalStreams; ++temporal_i) {
array_spatial_layer[temporal_i] =
allocation.GetBitrate(spatial_i, temporal_i);
}
jni->ReleaseIntArrayElements(j_array_spatial_layer, array_spatial_layer,
JNI_COMMIT);
jni->SetObjectArrayElement(j_allocation_array, spatial_i,
j_array_spatial_layer);
}
return jni->NewObject(*bitrate_allocation_class_,
bitrate_allocation_constructor_, j_allocation_array);
}
std::string VideoEncoderWrapper::GetImplementationName(JNIEnv* jni) const {
jstring jname = reinterpret_cast<jstring>(
jni->CallObjectMethod(*encoder_, get_implementation_name_method_));
return JavaToStdString(jni, jname);
}
JNI_FUNCTION_DECLARATION(void,
VideoEncoderWrapperCallback_nativeOnEncodedFrame,
JNIEnv* jni,
jclass,
jlong j_native_encoder,
jobject buffer,
jint encoded_width,
jint encoded_height,
jlong capture_time_ns,
jint frame_type,
jint rotation,
jboolean complete_frame,
jobject qp) {
VideoEncoderWrapper* native_encoder =
reinterpret_cast<VideoEncoderWrapper*>(j_native_encoder);
native_encoder->OnEncodedFrame(jni, buffer, encoded_width, encoded_height,
capture_time_ns, frame_type, rotation,
complete_frame, qp);
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,148 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_SDK_ANDROID_SRC_JNI_VIDEOENCODERWRAPPER_H_
#define WEBRTC_SDK_ANDROID_SRC_JNI_VIDEOENCODERWRAPPER_H_
#include <jni.h>
#include <deque>
#include <string>
#include <vector>
#include "webrtc/api/video_codecs/video_encoder.h"
#include "webrtc/common_video/h264/h264_bitstream_parser.h"
#include "webrtc/modules/video_coding/codecs/vp9/include/vp9_globals.h"
#include "webrtc/rtc_base/task_queue.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
#include "webrtc/sdk/android/src/jni/native_handle_impl.h"
namespace webrtc {
namespace jni {
// Wraps a Java decoder and delegates all calls to it. Passes
// VideoEncoderWrapperCallback to the decoder on InitDecode. Wraps the received
// frames to AndroidVideoBuffer.
class VideoEncoderWrapper : public VideoEncoder {
public:
VideoEncoderWrapper(JNIEnv* jni, jobject j_encoder);
int32_t InitEncode(const VideoCodec* codec_settings,
int32_t number_of_cores,
size_t max_payload_size) override;
int32_t RegisterEncodeCompleteCallback(
EncodedImageCallback* callback) override;
int32_t Release() override;
int32_t Encode(const VideoFrame& frame,
const CodecSpecificInfo* codec_specific_info,
const std::vector<FrameType>* frame_types) override;
int32_t SetChannelParameters(uint32_t packet_loss, int64_t rtt) override;
int32_t SetRateAllocation(const BitrateAllocation& allocation,
uint32_t framerate) override;
ScalingSettings GetScalingSettings() const override;
bool SupportsNativeHandle() const override { return true; }
// Should only be called by JNI.
void OnEncodedFrame(JNIEnv* jni,
jobject j_buffer,
jint encoded_width,
jint encoded_height,
jlong capture_time_ms,
jint frame_type,
jint rotation,
jboolean complete_frame,
jobject j_qp);
const char* ImplementationName() const override;
private:
struct FrameExtraInfo {
uint64_t capture_time_ns; // Used as an identifier of the frame.
uint32_t timestamp_rtp;
};
int32_t InitEncodeInternal(JNIEnv* jni);
// Takes Java VideoCodecStatus, handles it and returns WEBRTC_VIDEO_CODEC_*
// status code.
int32_t HandleReturnCode(JNIEnv* jni, jobject code);
RTPFragmentationHeader ParseFragmentationHeader(
const std::vector<uint8_t>& buffer);
int ParseQp(const std::vector<uint8_t>& buffer);
CodecSpecificInfo ParseCodecSpecificInfo(const EncodedImage& frame);
jobject ToJavaBitrateAllocation(JNIEnv* jni,
const BitrateAllocation& allocation);
std::string GetImplementationName(JNIEnv* jni) const;
const ScopedGlobalRef<jobject> encoder_;
const ScopedGlobalRef<jclass> settings_class_;
const ScopedGlobalRef<jclass> encode_info_class_;
const ScopedGlobalRef<jclass> frame_type_class_;
const ScopedGlobalRef<jclass> bitrate_allocation_class_;
const ScopedGlobalRef<jclass> int_array_class_;
jmethodID init_encode_method_;
jmethodID release_method_;
jmethodID encode_method_;
jmethodID set_channel_parameters_method_;
jmethodID set_rate_allocation_method_;
jmethodID get_scaling_settings_method_;
jmethodID get_implementation_name_method_;
jmethodID settings_constructor_;
jmethodID encode_info_constructor_;
jmethodID frame_type_from_native_method_;
jmethodID bitrate_allocation_constructor_;
jfieldID scaling_settings_on_field_;
jfieldID scaling_settings_low_field_;
jfieldID scaling_settings_high_field_;
jmethodID get_number_method_;
jmethodID int_value_method_;
std::string implementation_name_;
rtc::TaskQueue* encoder_queue_;
JavaVideoFrameFactory video_frame_factory_;
std::deque<FrameExtraInfo> frame_extra_infos_;
EncodedImageCallback* callback_;
bool initialized_;
int num_resets_;
int number_of_cores_;
VideoCodec codec_settings_;
H264BitstreamParser h264_bitstream_parser_;
// RTP state.
uint16_t picture_id_;
uint8_t tl0_pic_idx_;
// VP9 variables to populate codec specific structure.
GofInfoVP9 gof_; // Contains each frame's temporal information for
// non-flexible VP9 mode.
size_t gof_idx_;
};
} // namespace jni
} // namespace webrtc
#endif // WEBRTC_SDK_ANDROID_SRC_JNI_VIDEOENCODERWRAPPER_H_