New class AdaptedVideoTrackSource.

BUG=webrtc:6353

Review-Url: https://codereview.webrtc.org/2328333002
Cr-Commit-Position: refs/heads/master@{#14345}
This commit is contained in:
nisse 2016-09-22 01:25:59 -07:00 committed by Commit bot
parent bc77ed7657
commit 6f5a6c3188
8 changed files with 234 additions and 161 deletions

View File

@ -12,8 +12,6 @@
#include <utility>
#include "third_party/libyuv/include/libyuv/rotate.h"
namespace webrtc {
AndroidVideoTrackSource::AndroidVideoTrackSource(rtc::Thread* signaling_thread,
@ -27,21 +25,9 @@ AndroidVideoTrackSource::AndroidVideoTrackSource(rtc::Thread* signaling_thread,
j_egl_context)),
is_screencast_(is_screencast) {
LOG(LS_INFO) << "AndroidVideoTrackSource ctor";
worker_thread_checker_.DetachFromThread();
camera_thread_checker_.DetachFromThread();
}
bool AndroidVideoTrackSource::GetStats(AndroidVideoTrackSource::Stats* stats) {
rtc::CritScope lock(&stats_crit_);
if (!stats_) {
return false;
}
*stats = *stats_;
return true;
}
void AndroidVideoTrackSource::SetState(SourceState state) {
if (rtc::Thread::Current() != signaling_thread_) {
invoker_.AsyncInvoke<void>(
@ -56,34 +42,6 @@ void AndroidVideoTrackSource::SetState(SourceState state) {
}
}
void AndroidVideoTrackSource::AddOrUpdateSink(
rtc::VideoSinkInterface<cricket::VideoFrame>* sink,
const rtc::VideoSinkWants& wants) {
RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
broadcaster_.AddOrUpdateSink(sink, wants);
OnSinkWantsChanged(broadcaster_.wants());
}
void AndroidVideoTrackSource::RemoveSink(
rtc::VideoSinkInterface<cricket::VideoFrame>* sink) {
RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
broadcaster_.RemoveSink(sink);
OnSinkWantsChanged(broadcaster_.wants());
}
void AndroidVideoTrackSource::OnSinkWantsChanged(
const rtc::VideoSinkWants& wants) {
{
rtc::CritScope lock(&apply_rotation_crit_);
apply_rotation_ = wants.rotation_applied;
}
video_adapter_.OnResolutionRequest(wants.max_pixel_count,
wants.max_pixel_count_step_up);
}
void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data,
int length,
int width,
@ -94,17 +52,20 @@ void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data,
RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
rotation == 270);
int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec;
int64_t translated_camera_time_us =
timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros());
int adapted_width;
int adapted_height;
int crop_width;
int crop_height;
int crop_x;
int crop_y;
int64_t translated_camera_time_us;
if (!AdaptFrame(width, height, timestamp_ns / rtc::kNumNanosecsPerMicrosec,
if (!AdaptFrame(width, height, camera_time_us,
&adapted_width, &adapted_height, &crop_width, &crop_height,
&crop_x, &crop_y, &translated_camera_time_us)) {
&crop_x, &crop_y)) {
return;
}
@ -134,33 +95,9 @@ void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data,
buffer->MutableDataU(), buffer->StrideU(),
buffer->width(), buffer->height());
// Applying rotation is only supported for legacy reasons, and the performance
// for this path is not critical.
rtc::CritScope lock(&apply_rotation_crit_);
if (apply_rotation_ && rotation != 0) {
rtc::scoped_refptr<I420Buffer> rotated_buffer =
rotation == 180 ? I420Buffer::Create(buffer->width(), buffer->height())
: I420Buffer::Create(buffer->height(), buffer->width());
libyuv::I420Rotate(
buffer->DataY(), buffer->StrideY(),
buffer->DataU(), buffer->StrideU(),
buffer->DataV(), buffer->StrideV(),
rotated_buffer->MutableDataY(), rotated_buffer->StrideY(),
rotated_buffer->MutableDataU(), rotated_buffer->StrideU(),
rotated_buffer->MutableDataV(), rotated_buffer->StrideV(),
buffer->width(), buffer->height(),
static_cast<libyuv::RotationMode>(rotation));
buffer = rotated_buffer;
}
OnFrame(cricket::WebRtcVideoFrame(
buffer,
apply_rotation_ ? webrtc::kVideoRotation_0
: static_cast<webrtc::VideoRotation>(rotation),
translated_camera_time_us, 0),
width, height);
buffer, static_cast<webrtc::VideoRotation>(rotation),
translated_camera_time_us, 0));
}
void AndroidVideoTrackSource::OnTextureFrameCaptured(
@ -173,17 +110,20 @@ void AndroidVideoTrackSource::OnTextureFrameCaptured(
RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
rotation == 270);
int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec;
int64_t translated_camera_time_us =
timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros());
int adapted_width;
int adapted_height;
int crop_width;
int crop_height;
int crop_x;
int crop_y;
int64_t translated_camera_time_us;
if (!AdaptFrame(width, height, timestamp_ns / rtc::kNumNanosecsPerMicrosec,
if (!AdaptFrame(width, height, camera_time_us,
&adapted_width, &adapted_height, &crop_width, &crop_height,
&crop_x, &crop_y, &translated_camera_time_us)) {
&crop_x, &crop_y)) {
surface_texture_helper_->ReturnTextureFrame();
return;
}
@ -195,8 +135,11 @@ void AndroidVideoTrackSource::OnTextureFrameCaptured(
crop_x / static_cast<float>(width),
crop_y / static_cast<float>(height));
rtc::CritScope lock(&apply_rotation_crit_);
if (apply_rotation_) {
// Make a local copy, since value of apply_rotation() may change
// under our feet.
bool do_rotate = apply_rotation();
if (do_rotate) {
if (rotation == webrtc::kVideoRotation_90 ||
rotation == webrtc::kVideoRotation_270) {
std::swap(adapted_width, adapted_height);
@ -208,21 +151,9 @@ void AndroidVideoTrackSource::OnTextureFrameCaptured(
surface_texture_helper_->CreateTextureFrame(
adapted_width, adapted_height,
webrtc_jni::NativeHandleImpl(handle.oes_texture_id, matrix)),
apply_rotation_ ? webrtc::kVideoRotation_0
: static_cast<webrtc::VideoRotation>(rotation),
translated_camera_time_us, 0),
width, height);
}
void AndroidVideoTrackSource::OnFrame(const cricket::VideoFrame& frame,
int width,
int height) {
{
rtc::CritScope lock(&stats_crit_);
stats_ = rtc::Optional<AndroidVideoTrackSource::Stats>({width, height});
}
broadcaster_.OnFrame(frame);
do_rotate ? webrtc::kVideoRotation_0
: static_cast<webrtc::VideoRotation>(rotation),
translated_camera_time_us, 0));
}
void AndroidVideoTrackSource::OnOutputFormatRequest(int width,
@ -230,39 +161,7 @@ void AndroidVideoTrackSource::OnOutputFormatRequest(int width,
int fps) {
cricket::VideoFormat format(width, height,
cricket::VideoFormat::FpsToInterval(fps), 0);
video_adapter_.OnOutputFormatRequest(format);
}
bool AndroidVideoTrackSource::AdaptFrame(int width,
int height,
int64_t camera_time_us,
int* out_width,
int* out_height,
int* crop_width,
int* crop_height,
int* crop_x,
int* crop_y,
int64_t* translated_camera_time_us) {
RTC_DCHECK(camera_thread_checker_.CalledOnValidThread());
int64_t system_time_us = rtc::TimeMicros();
*translated_camera_time_us =
timestamp_aligner_.TranslateTimestamp(camera_time_us, system_time_us);
if (!broadcaster_.frame_wanted()) {
return false;
}
if (!video_adapter_.AdaptFrameResolution(
width, height, camera_time_us * rtc::kNumNanosecsPerMicrosec,
crop_width, crop_height, out_width, out_height)) {
// VideoAdapter dropped the frame.
return false;
}
*crop_x = (width - *crop_width) / 2;
*crop_y = (height - *crop_height) / 2;
return true;
video_adapter()->OnOutputFormatRequest(format);
}
} // namespace webrtc

View File

@ -13,21 +13,17 @@
#include "webrtc/api/android/jni/native_handle_impl.h"
#include "webrtc/api/android/jni/surfacetexturehelper_jni.h"
#include "webrtc/api/mediastreaminterface.h"
#include "webrtc/api/notifier.h"
#include "webrtc/base/asyncinvoker.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/thread_checker.h"
#include "webrtc/base/timestampaligner.h"
#include "webrtc/common_video/include/i420_buffer_pool.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/media/base/videoadapter.h"
#include "webrtc/media/base/videobroadcaster.h"
#include "webrtc/media/base/videosinkinterface.h"
#include "webrtc/media/base/adaptedvideotracksource.h"
namespace webrtc {
class AndroidVideoTrackSource : public Notifier<VideoTrackSourceInterface> {
class AndroidVideoTrackSource : public rtc::AdaptedVideoTrackSource {
public:
AndroidVideoTrackSource(rtc::Thread* signaling_thread,
JNIEnv* jni,
@ -43,11 +39,6 @@ class AndroidVideoTrackSource : public Notifier<VideoTrackSourceInterface> {
return rtc::Optional<bool>(false);
}
// Returns false if no stats are available, e.g, for a remote
// source, or a source which has not seen its first frame yet.
// Should avoid blocking.
bool GetStats(Stats* stats) override;
// Called by the native capture observer
void SetState(SourceState state);
@ -55,10 +46,6 @@ class AndroidVideoTrackSource : public Notifier<VideoTrackSourceInterface> {
bool remote() const override { return false; }
void AddOrUpdateSink(rtc::VideoSinkInterface<cricket::VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override;
void RemoveSink(rtc::VideoSinkInterface<cricket::VideoFrame>* sink) override;
void OnByteBufferFrameCaptured(const void* frame_data,
int length,
int width,
@ -82,35 +69,14 @@ class AndroidVideoTrackSource : public Notifier<VideoTrackSourceInterface> {
private:
rtc::Thread* signaling_thread_;
rtc::AsyncInvoker invoker_;
rtc::ThreadChecker worker_thread_checker_;
rtc::ThreadChecker camera_thread_checker_;
rtc::CriticalSection stats_crit_;
rtc::Optional<Stats> stats_ GUARDED_BY(stats_crit_);
SourceState state_;
rtc::VideoBroadcaster broadcaster_;
rtc::TimestampAligner timestamp_aligner_;
cricket::VideoAdapter video_adapter_;
rtc::CriticalSection apply_rotation_crit_;
bool apply_rotation_ GUARDED_BY(apply_rotation_crit_);
webrtc::NV12ToI420Scaler nv12toi420_scaler_;
webrtc::I420BufferPool buffer_pool_;
rtc::scoped_refptr<webrtc_jni::SurfaceTextureHelper> surface_texture_helper_;
const bool is_screencast_;
void OnFrame(const cricket::VideoFrame& frame, int width, int height);
void OnSinkWantsChanged(const rtc::VideoSinkWants& wants);
bool AdaptFrame(int width,
int height,
int64_t camera_time_us,
int* out_width,
int* out_height,
int* crop_width,
int* crop_height,
int* crop_x,
int* crop_y,
int64_t* translated_camera_time_us);
};
} // namespace webrtc

View File

@ -47,6 +47,8 @@ rtc_source_set("rtc_media") {
libs = []
deps = []
sources = [
"base/adaptedvideotracksource.cc",
"base/adaptedvideotracksource.h",
"base/audiosource.h",
"base/codec.cc",
"base/codec.h",

View File

@ -0,0 +1,111 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/media/base/adaptedvideotracksource.h"
namespace rtc {
AdaptedVideoTrackSource::AdaptedVideoTrackSource() {
thread_checker_.DetachFromThread();
}
bool AdaptedVideoTrackSource::GetStats(Stats* stats) {
rtc::CritScope lock(&stats_crit_);
if (!stats_) {
return false;
}
*stats = *stats_;
return true;
}
void AdaptedVideoTrackSource::OnFrame(const cricket::VideoFrame& frame) {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
frame.video_frame_buffer());
/* Note that this is a "best effort" approach to
wants.rotation_applied; apply_rotation_ can change from false to
true between the check of apply_rotation() and the call to
broadcaster_.OnFrame(), in which case we generate a frame with
pending rotation despite some sink with wants.rotation_applied ==
true was just added. The VideoBroadcaster enforces
synchronization for us in this case, by not passing the frame on
to sinks which don't want it. */
if (apply_rotation() &&
frame.rotation() != webrtc::kVideoRotation_0 &&
!buffer->native_handle()) {
/* Apply pending rotation. */
broadcaster_.OnFrame(cricket::WebRtcVideoFrame(
webrtc::I420Buffer::Rotate(buffer, frame.rotation()),
webrtc::kVideoRotation_0, frame.timestamp_us()));
} else {
broadcaster_.OnFrame(frame);
}
}
void AdaptedVideoTrackSource::AddOrUpdateSink(
rtc::VideoSinkInterface<cricket::VideoFrame>* sink,
const rtc::VideoSinkWants& wants) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
broadcaster_.AddOrUpdateSink(sink, wants);
OnSinkWantsChanged(broadcaster_.wants());
}
void AdaptedVideoTrackSource::RemoveSink(
rtc::VideoSinkInterface<cricket::VideoFrame>* sink) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
broadcaster_.RemoveSink(sink);
OnSinkWantsChanged(broadcaster_.wants());
}
bool AdaptedVideoTrackSource::apply_rotation() {
return broadcaster_.wants().rotation_applied;
}
void AdaptedVideoTrackSource::OnSinkWantsChanged(
const rtc::VideoSinkWants& wants) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
video_adapter_.OnResolutionRequest(wants.max_pixel_count,
wants.max_pixel_count_step_up);
}
bool AdaptedVideoTrackSource::AdaptFrame(int width,
int height,
int64_t time_us,
int* out_width,
int* out_height,
int* crop_width,
int* crop_height,
int* crop_x,
int* crop_y) {
{
rtc::CritScope lock(&stats_crit_);
stats_ = rtc::Optional<Stats>({width, height});
}
if (!broadcaster_.frame_wanted()) {
return false;
}
if (!video_adapter_.AdaptFrameResolution(
width, height, time_us * rtc::kNumNanosecsPerMicrosec,
crop_width, crop_height, out_width, out_height)) {
// VideoAdapter dropped the frame.
return false;
}
*crop_x = (width - *crop_width) / 2;
*crop_y = (height - *crop_height) / 2;
return true;
}
} // namespace rtc

View File

@ -0,0 +1,81 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MEDIA_BASE_ADAPTEDVIDEOTRACKSOURCE_H_
#define WEBRTC_MEDIA_BASE_ADAPTEDVIDEOTRACKSOURCE_H_
#include "webrtc/api/mediastreaminterface.h"
#include "webrtc/api/notifier.h"
#include "webrtc/media/base/videoadapter.h"
#include "webrtc/media/base/videobroadcaster.h"
namespace rtc {
// Base class for sources which needs video adaptation, e.g., video
// capture sources. Sinks must be added and removed on one and only
// one thread, while AdaptFrame and OnFrame may be called on any
// thread.
class AdaptedVideoTrackSource
: public webrtc::Notifier<webrtc::VideoTrackSourceInterface> {
public:
AdaptedVideoTrackSource();
protected:
// Checks the apply_rotation() flag. If the frame needs rotation, and it is a
// plain memory frame, it is rotated. Subclasses producing native frames must
// handle apply_rotation() themselves.
void OnFrame(const cricket::VideoFrame& frame);
// Reports the appropriate frame size after adaptation. Returns true
// if a frame is wanted. Returns false if there are no interested
// sinks, or if the VideoAdapter decides to drop the frame.
bool AdaptFrame(int width,
int height,
int64_t time_us,
int* out_width,
int* out_height,
int* crop_width,
int* crop_height,
int* crop_x,
int* crop_y);
// Returns the current value of the apply_rotation flag, derived
// from the VideoSinkWants of registered sinks. The value is derived
// from sinks' wants, in AddOrUpdateSink and RemoveSink. Beware that
// when using this method from a different thread, the value may
// become stale before it is used.
bool apply_rotation();
cricket::VideoAdapter* video_adapter() { return &video_adapter_; }
private:
// Implements rtc::VideoSourceInterface.
void AddOrUpdateSink(rtc::VideoSinkInterface<cricket::VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override;
void RemoveSink(rtc::VideoSinkInterface<cricket::VideoFrame>* sink) override;
// Part of VideoTrackSourceInterface.
bool GetStats(Stats* stats) override;
void OnSinkWantsChanged(const rtc::VideoSinkWants& wants);
rtc::ThreadChecker thread_checker_;
cricket::VideoAdapter video_adapter_;
rtc::CriticalSection stats_crit_;
rtc::Optional<Stats> stats_ GUARDED_BY(stats_crit_);
VideoBroadcaster broadcaster_;
};
} // namespace rtc
#endif // WEBRTC_MEDIA_BASE_ADAPTEDVIDEOTRACKSOURCE_H_

View File

@ -45,7 +45,6 @@ bool VideoBroadcaster::frame_wanted() const {
}
VideoSinkWants VideoBroadcaster::wants() const {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
rtc::CritScope cs(&sinks_and_wants_lock_);
return current_wants_;
}
@ -53,6 +52,15 @@ VideoSinkWants VideoBroadcaster::wants() const {
void VideoBroadcaster::OnFrame(const cricket::VideoFrame& frame) {
rtc::CritScope cs(&sinks_and_wants_lock_);
for (auto& sink_pair : sink_pairs()) {
if (sink_pair.wants.rotation_applied &&
frame.rotation() != webrtc::kVideoRotation_0) {
// Calls to OnFrame are not synchronized with changes to the sink wants.
// When rotation_applied is set to true, one or a few frames may get here
// with rotation still pending. Protect sinks that don't expect any
// pending rotation.
LOG(LS_VERBOSE) << "Discarding frame with unexpected rotation.";
continue;
}
if (sink_pair.wants.black_frames) {
sink_pair.sink->OnFrame(cricket::WebRtcVideoFrame(
GetBlackFrameBuffer(frame.width(), frame.height()), frame.rotation(),

View File

@ -45,6 +45,10 @@ class VideoBroadcaster : public VideoSourceBase,
// aggregated by all VideoSinkWants from all sinks.
VideoSinkWants wants() const;
// This method ensures that if a sink sets rotation_applied == true,
// it will never receive a frame with pending rotation. Our caller
// may pass in frames without precise synchronization with changes
// to the VideoSinkWants.
void OnFrame(const cricket::VideoFrame& frame) override;
protected:

View File

@ -26,6 +26,8 @@
],
},
'sources': [
'base/adaptedvideotracksource.cc',
'base/adaptedvideotracksource.h',
'base/audiosource.h',
'base/codec.cc',
'base/codec.h',