Let WebRtcVideoChannel2::WebRtcVideoSendStream::InputFrame carry the input frame's timestamp to output frame.

Essentially we are carrying over the capture timestamp to the encoded frame sent out, so the frame lengths will contain no noise.

Review URL: https://codereview.webrtc.org/1225153002

Cr-Commit-Position: refs/heads/master@{#9597}
This commit is contained in:
qiangchen 2015-07-16 10:27:16 -07:00 committed by Commit bot
parent c5d0d95fd8
commit c27d89fdc6
6 changed files with 111 additions and 2 deletions

View File

@ -64,6 +64,8 @@ class FakeVideoCapturer : public cricket::VideoCapturer {
cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
formats.push_back(cricket::VideoFormat(160, 120,
cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
formats.push_back(cricket::VideoFormat(1280, 720,
cricket::VideoFormat::FpsToInterval(60), cricket::FOURCC_I420));
ResetSupportedFormats(formats);
}
~FakeVideoCapturer() {
@ -79,9 +81,17 @@ class FakeVideoCapturer : public cricket::VideoCapturer {
}
return CaptureCustomFrame(GetCaptureFormat()->width,
GetCaptureFormat()->height,
GetCaptureFormat()->interval,
GetCaptureFormat()->fourcc);
}
bool CaptureCustomFrame(int width, int height, uint32 fourcc) {
// default to 30fps
return CaptureCustomFrame(width, height, 33333333, fourcc);
}
bool CaptureCustomFrame(int width,
int height,
int64_t timestamp_interval,
uint32 fourcc) {
if (!running_) {
return false;
}
@ -106,7 +116,7 @@ class FakeVideoCapturer : public cricket::VideoCapturer {
frame.data_size = size;
frame.elapsed_time = next_timestamp_;
frame.time_stamp = initial_unix_timestamp_ + next_timestamp_;
next_timestamp_ += 33333333; // 30 fps
next_timestamp_ += timestamp_interval;
rtc::scoped_ptr<char[]> data(new char[size]);
frame.data = data.get();

View File

@ -112,6 +112,11 @@ int FakeVideoSendStream::GetLastHeight() const {
return last_frame_.height();
}
int64_t FakeVideoSendStream::GetLastTimestamp() const {
DCHECK(last_frame_.ntp_time_ms() == 0);
return last_frame_.render_time_ms();
}
void FakeVideoSendStream::IncomingCapturedFrame(
const webrtc::VideoFrame& frame) {
++num_swapped_frames_;

View File

@ -82,6 +82,7 @@ class FakeVideoSendStream : public webrtc::VideoSendStream,
int GetNumberOfSwappedFrames() const;
int GetLastWidth() const;
int GetLastHeight() const;
int64_t GetLastTimestamp() const;
void SetStats(const webrtc::VideoSendStream::Stats& stats);
private:

View File

@ -42,6 +42,7 @@
#include "webrtc/base/buffer.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/stringutils.h"
#include "webrtc/base/timeutils.h"
#include "webrtc/call.h"
#include "webrtc/modules/video_coding/codecs/h264/include/h264.h"
#include "webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.h"
@ -1670,7 +1671,9 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::WebRtcVideoSendStream(
capturer_(NULL),
sending_(false),
muted_(false),
old_adapt_changes_(0) {
old_adapt_changes_(0),
first_frame_timestamp_ms_(0),
last_frame_timestamp_ms_(0) {
parameters_.config.rtp.max_packet_size = kVideoMtu;
sp.GetPrimarySsrcs(&parameters_.config.rtp.ssrcs);
@ -1734,6 +1737,15 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::InputFrame(
static_cast<int>(frame->GetWidth()),
static_cast<int>(frame->GetHeight()));
}
int64_t frame_delta_ms = frame->GetTimeStamp() / rtc::kNumNanosecsPerMillisec;
// frame->GetTimeStamp() is essentially a delta, align to webrtc time
if (first_frame_timestamp_ms_ == 0) {
first_frame_timestamp_ms_ = rtc::Time() - frame_delta_ms;
}
last_frame_timestamp_ms_ = first_frame_timestamp_ms_ + frame_delta_ms;
video_frame.set_render_time_ms(last_frame_timestamp_ms_);
// Reconfigure codec if necessary.
SetDimensions(
video_frame.width(), video_frame.height(), capturer->IsScreencast());
@ -1762,6 +1774,15 @@ bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetCapturer(
CreateBlackFrame(&black_frame, last_dimensions_.width,
last_dimensions_.height);
// Force this black frame not to be dropped due to timestamp order
// check. As IncomingCapturedFrame will drop the frame if this frame's
// timestamp is less than or equal to last frame's timestamp, it is
// necessary to give this black frame a larger timestamp than the
// previous one.
last_frame_timestamp_ms_ +=
format_.interval / rtc::kNumNanosecsPerMillisec;
black_frame.set_render_time_ms(last_frame_timestamp_ms_);
stream_->Input()->IncomingCapturedFrame(black_frame);
}

View File

@ -393,6 +393,14 @@ class WebRtcVideoChannel2 : public rtc::MessageHandler,
bool muted_ GUARDED_BY(lock_);
VideoFormat format_ GUARDED_BY(lock_);
int old_adapt_changes_ GUARDED_BY(lock_);
// The timestamp of the first frame received
// Used to generate the timestamps of subsequent frames
int64_t first_frame_timestamp_ms_ GUARDED_BY(lock_);
// The timestamp of the last frame received
// Used to generate timestamp for the black frame when capturer is removed
int64_t last_frame_timestamp_ms_ GUARDED_BY(lock_);
};
// Wrapper for the receiver part, contains configs etc. that are needed to

View File

@ -419,6 +419,70 @@ TEST_F(WebRtcVideoEngine2Test, CanConstructDecoderForVp9EncoderFactory) {
channel->AddRecvStream(cricket::StreamParams::CreateLegacy(kSsrc)));
}
TEST_F(WebRtcVideoEngine2Test, PropagatesInputFrameTimestamp) {
cricket::FakeWebRtcVideoEncoderFactory encoder_factory;
encoder_factory.AddSupportedVideoCodecType(webrtc::kVideoCodecVP8, "VP8");
std::vector<cricket::VideoCodec> codecs;
codecs.push_back(kVp8Codec);
FakeCallFactory factory;
engine_.SetCallFactory(&factory);
rtc::scoped_ptr<VideoMediaChannel> channel(
SetUpForExternalEncoderFactory(&encoder_factory, codecs));
EXPECT_TRUE(
channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc)));
FakeVideoCapturer capturer;
EXPECT_TRUE(channel->SetCapturer(kSsrc, &capturer));
capturer.Start(cricket::VideoFormat(1280, 720,
cricket::VideoFormat::FpsToInterval(60),
cricket::FOURCC_I420));
channel->SetSend(true);
FakeCall* call = factory.GetCall();
std::vector<FakeVideoSendStream*> streams = call->GetVideoSendStreams();
FakeVideoSendStream* stream = streams[0];
int64_t timestamp;
int64_t last_timestamp;
EXPECT_TRUE(capturer.CaptureFrame());
last_timestamp = stream->GetLastTimestamp();
for (int i = 0; i < 10; i++) {
EXPECT_TRUE(capturer.CaptureFrame());
timestamp = stream->GetLastTimestamp();
int64_t interval = timestamp - last_timestamp;
// Precision changes from nanosecond to millisecond.
// Allow error to be no more than 1.
EXPECT_NEAR(cricket::VideoFormat::FpsToInterval(60) / 1E6, interval, 1);
last_timestamp = timestamp;
}
capturer.Start(cricket::VideoFormat(1280, 720,
cricket::VideoFormat::FpsToInterval(30),
cricket::FOURCC_I420));
EXPECT_TRUE(capturer.CaptureFrame());
last_timestamp = stream->GetLastTimestamp();
for (int i = 0; i < 10; i++) {
EXPECT_TRUE(capturer.CaptureFrame());
timestamp = stream->GetLastTimestamp();
int64_t interval = timestamp - last_timestamp;
// Precision changes from nanosecond to millisecond.
// Allow error to be no more than 1.
EXPECT_NEAR(cricket::VideoFormat::FpsToInterval(30) / 1E6, interval, 1);
last_timestamp = timestamp;
}
// Remove stream previously added to free the external encoder instance.
EXPECT_TRUE(channel->RemoveSendStream(kSsrc));
}
VideoMediaChannel* WebRtcVideoEngine2Test::SetUpForExternalEncoderFactory(
cricket::WebRtcVideoEncoderFactory* encoder_factory,
const std::vector<VideoCodec>& codecs) {