Cleanup video frame metadata copying

In several places VideoFrame::Builder is used to create a new VideoFrame
when intent is to change only one or two fields of a const VideoFrame&.

This approach is bad because each and every metadata field have to be
added to all the places.

Instead, this CL adds missing setters and refactors the code to use
full copy of a VideoFrame and update required fields only.

Along the way few actual bugs are fixed, e.g. when ColorSpace isn't copied
when frame rotation or buffer is cropped or converted.

Bug: webrtc:10460
Change-Id: I2895a473ca938b150eed2916c689060bdf58cb25
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/140102
Reviewed-by: Niels Moller <nisse@webrtc.org>
Commit-Queue: Ilya Nikolaevskiy <ilnik@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#28170}
This commit is contained in:
Ilya Nikolaevskiy 2019-06-05 15:59:12 +02:00 committed by Commit Bot
parent b64af4b168
commit 4fc0855a39
6 changed files with 25 additions and 39 deletions

View File

@ -196,6 +196,12 @@ rtc::scoped_refptr<VideoFrameBuffer> VideoFrame::video_frame_buffer() const {
return video_frame_buffer_;
}
void VideoFrame::set_video_frame_buffer(
const rtc::scoped_refptr<VideoFrameBuffer>& buffer) {
RTC_CHECK(buffer);
video_frame_buffer_ = buffer;
}
int64_t VideoFrame::render_time_ms() const {
return timestamp_us() / rtc::kNumMicrosecsPerMillisec;
}

View File

@ -160,6 +160,9 @@ class RTC_EXPORT VideoFrame {
// initialized VideoFrame.
rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer() const;
void set_video_frame_buffer(
const rtc::scoped_refptr<VideoFrameBuffer>& buffer);
// TODO(nisse): Deprecated.
// Return true if the frame is stored in a texture.
bool is_texture() const {

View File

@ -51,14 +51,10 @@ void AdaptedVideoTrackSource::OnFrame(const webrtc::VideoFrame& frame) {
if (apply_rotation() && frame.rotation() != webrtc::kVideoRotation_0 &&
buffer->type() == webrtc::VideoFrameBuffer::Type::kI420) {
/* Apply pending rotation. */
webrtc::VideoFrame rotated_frame =
webrtc::VideoFrame::Builder()
.set_video_frame_buffer(webrtc::I420Buffer::Rotate(
*buffer->GetI420(), frame.rotation()))
.set_rotation(webrtc::kVideoRotation_0)
.set_timestamp_us(frame.timestamp_us())
.set_id(frame.id())
.build();
webrtc::VideoFrame rotated_frame(frame);
rotated_frame.set_video_frame_buffer(
webrtc::I420Buffer::Rotate(*buffer->GetI420(), frame.rotation()));
rotated_frame.set_rotation(webrtc::kVideoRotation_0);
broadcaster_.OnFrame(rotated_frame);
} else {
broadcaster_.OnFrame(frame);

View File

@ -420,12 +420,11 @@ int SimulcastEncoderAdapter::Encode(
// UpdateRect is not propagated to lower simulcast layers currently.
// TODO(ilnik): Consider scaling UpdateRect together with the buffer.
VideoFrame frame = VideoFrame::Builder()
.set_video_frame_buffer(dst_buffer)
.set_timestamp_rtp(input_image.timestamp())
.set_rotation(webrtc::kVideoRotation_0)
.set_timestamp_ms(input_image.render_time_ms())
.build();
VideoFrame frame(input_image);
frame.set_video_frame_buffer(dst_buffer);
frame.set_rotation(webrtc::kVideoRotation_0);
frame.set_update_rect(
VideoFrame::UpdateRect{0, 0, frame.width(), frame.height()});
int ret =
streaminfos_[stream_idx].encoder->Encode(frame, &stream_frame_types);
if (ret != WEBRTC_VIDEO_CODEC_OK) {

View File

@ -280,15 +280,9 @@ void VideoStreamDecoderImpl::Decoded(VideoFrame& decoded_image,
timing_.StopDecodeTimer(0, *casted_decode_time_ms, decode_stop_time_ms,
frame_timestamps->render_time_us / 1000);
callbacks_->OnDecodedFrame(
VideoFrame::Builder()
.set_video_frame_buffer(decoded_image.video_frame_buffer())
.set_rotation(decoded_image.rotation())
.set_timestamp_us(frame_timestamps->render_time_us)
.set_timestamp_rtp(decoded_image.timestamp())
.set_id(decoded_image.id())
.build(),
casted_decode_time_ms, casted_qp);
VideoFrame copy = decoded_image;
copy.set_timestamp_us(frame_timestamps->render_time_us);
callbacks_->OnDecodedFrame(copy, casted_decode_time_ms, casted_qp);
});
}

View File

@ -1240,14 +1240,8 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame,
VideoFrame::UpdateRect{0, 0, cropped_width, cropped_height};
}
}
out_frame = VideoFrame::Builder()
.set_video_frame_buffer(cropped_buffer)
.set_timestamp_rtp(video_frame.timestamp())
.set_timestamp_ms(video_frame.render_time_ms())
.set_rotation(video_frame.rotation())
.set_id(video_frame.id())
.set_update_rect(update_rect)
.build();
out_frame.set_video_frame_buffer(cropped_buffer);
out_frame.set_update_rect(update_rect);
out_frame.set_ntp_time_ms(video_frame.ntp_time_ms());
// Since accumulated_update_rect_ is constructed before cropping,
// we can't trust it. If any changes were pending, we invalidate whole
@ -1322,14 +1316,8 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame,
VideoFrame::UpdateRect{0, 0, out_frame.width(), out_frame.height()};
}
out_frame = VideoFrame::Builder()
.set_video_frame_buffer(converted_buffer)
.set_timestamp_rtp(out_frame.timestamp())
.set_timestamp_ms(out_frame.render_time_ms())
.set_rotation(out_frame.rotation())
.set_id(out_frame.id())
.set_update_rect(update_rect)
.build();
out_frame.set_video_frame_buffer(converted_buffer);
out_frame.set_update_rect(update_rect);
}
TRACE_EVENT1("webrtc", "VCMGenericEncoder::Encode", "timestamp",