Rename capture_time_identifier to presentation_timestamp
After landing this change, we can change the corresponding usage in blink to start using presentation_timestamp as well and then delete the remaining usage of capture_time_identifier. Bug: webrtc:373365537 Change-Id: I0c4f2b6b3822df42d6e3387df2c243c3684d8a41 Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/365640 Reviewed-by: Harald Alvestrand <hta@webrtc.org> Reviewed-by: Guido Urdaneta <guidou@webrtc.org> Commit-Queue: Palak Agarwal <agpalak@google.com> Cr-Commit-Position: refs/heads/main@{#43317}
This commit is contained in:
parent
b52416eccf
commit
c4f61fbde3
@ -54,9 +54,15 @@ class TransformableFrameInterface {
|
||||
virtual uint32_t GetTimestamp() const = 0;
|
||||
virtual void SetRTPTimestamp(uint32_t timestamp) = 0;
|
||||
|
||||
// TODO(https://bugs.webrtc.org/373365537): Remove this once its usage is
|
||||
// removed from blink.
|
||||
virtual std::optional<Timestamp> GetCaptureTimeIdentifier() const {
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
// TODO(https://bugs.webrtc.org/14878): Change this to pure virtual after it
|
||||
// is implemented everywhere.
|
||||
virtual std::optional<Timestamp> GetCaptureTimeIdentifier() const {
|
||||
virtual std::optional<Timestamp> GetPresentationTimestamp() const {
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
|
||||
@ -35,7 +35,7 @@ class MockTransformableFrame : public TransformableFrameInterface {
|
||||
MOCK_METHOD(uint32_t, GetTimestamp, (), (const, override));
|
||||
MOCK_METHOD(void, SetRTPTimestamp, (uint32_t), (override));
|
||||
MOCK_METHOD(std::optional<webrtc::Timestamp>,
|
||||
GetCaptureTimeIdentifier,
|
||||
GetPresentationTimestamp,
|
||||
(),
|
||||
(const, override));
|
||||
MOCK_METHOD(std::string, GetMimeType, (), (const, override));
|
||||
|
||||
@ -46,7 +46,7 @@ class MockTransformableVideoFrame
|
||||
MOCK_METHOD(std::string, GetMimeType, (), (const, override));
|
||||
MOCK_METHOD(VideoFrameMetadata, Metadata, (), (const, override));
|
||||
MOCK_METHOD(std::optional<Timestamp>,
|
||||
GetCaptureTimeIdentifier,
|
||||
GetPresentationTimestamp,
|
||||
(),
|
||||
(const, override));
|
||||
};
|
||||
|
||||
@ -107,12 +107,12 @@ class RTC_EXPORT EncodedImage {
|
||||
simulcast_index_ = simulcast_index;
|
||||
}
|
||||
|
||||
const std::optional<Timestamp>& CaptureTimeIdentifier() const {
|
||||
return capture_time_identifier_;
|
||||
const std::optional<Timestamp>& PresentationTimestamp() const {
|
||||
return presentation_timestamp_;
|
||||
}
|
||||
void SetCaptureTimeIdentifier(
|
||||
const std::optional<Timestamp>& capture_time_identifier) {
|
||||
capture_time_identifier_ = capture_time_identifier;
|
||||
void SetPresentationTimestamp(
|
||||
const std::optional<Timestamp>& presentation_timestamp) {
|
||||
presentation_timestamp_ = presentation_timestamp;
|
||||
}
|
||||
|
||||
// Encoded images can have dependencies between spatial and/or temporal
|
||||
@ -264,7 +264,7 @@ class RTC_EXPORT EncodedImage {
|
||||
size_t size_ = 0; // Size of encoded frame data.
|
||||
uint32_t timestamp_rtp_ = 0;
|
||||
std::optional<int> simulcast_index_;
|
||||
std::optional<Timestamp> capture_time_identifier_;
|
||||
std::optional<Timestamp> presentation_timestamp_;
|
||||
std::optional<int> spatial_index_;
|
||||
std::optional<int> temporal_index_;
|
||||
std::map<int, size_t> spatial_layer_frame_size_bytes_;
|
||||
|
||||
@ -172,7 +172,7 @@ VideoFrame::Builder::~Builder() = default;
|
||||
VideoFrame VideoFrame::Builder::build() {
|
||||
RTC_CHECK(video_frame_buffer_ != nullptr);
|
||||
return VideoFrame(id_, video_frame_buffer_, timestamp_us_,
|
||||
capture_time_identifier_, reference_time_, timestamp_rtp_,
|
||||
presentation_timestamp_, reference_time_, timestamp_rtp_,
|
||||
ntp_time_ms_, rotation_, color_space_, render_parameters_,
|
||||
update_rect_, packet_infos_);
|
||||
}
|
||||
@ -196,8 +196,14 @@ VideoFrame::Builder& VideoFrame::Builder::set_timestamp_us(
|
||||
}
|
||||
|
||||
VideoFrame::Builder& VideoFrame::Builder::set_capture_time_identifier(
|
||||
const std::optional<Timestamp>& capture_time_identifier) {
|
||||
capture_time_identifier_ = capture_time_identifier;
|
||||
const std::optional<Timestamp>& presentation_timestamp) {
|
||||
presentation_timestamp_ = presentation_timestamp;
|
||||
return *this;
|
||||
}
|
||||
|
||||
VideoFrame::Builder& VideoFrame::Builder::set_presentation_timestamp(
|
||||
const std::optional<Timestamp>& presentation_timestamp) {
|
||||
presentation_timestamp_ = presentation_timestamp;
|
||||
return *this;
|
||||
}
|
||||
|
||||
@ -282,7 +288,7 @@ VideoFrame::VideoFrame(const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
|
||||
VideoFrame::VideoFrame(uint16_t id,
|
||||
const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
|
||||
int64_t timestamp_us,
|
||||
const std::optional<Timestamp>& capture_time_identifier,
|
||||
const std::optional<Timestamp>& presentation_timestamp,
|
||||
const std::optional<Timestamp>& reference_time,
|
||||
uint32_t timestamp_rtp,
|
||||
int64_t ntp_time_ms,
|
||||
@ -296,7 +302,7 @@ VideoFrame::VideoFrame(uint16_t id,
|
||||
timestamp_rtp_(timestamp_rtp),
|
||||
ntp_time_ms_(ntp_time_ms),
|
||||
timestamp_us_(timestamp_us),
|
||||
capture_time_identifier_(capture_time_identifier),
|
||||
presentation_timestamp_(presentation_timestamp),
|
||||
reference_time_(reference_time),
|
||||
rotation_(rotation),
|
||||
color_space_(color_space),
|
||||
|
||||
@ -109,7 +109,9 @@ class RTC_EXPORT VideoFrame {
|
||||
Builder& set_timestamp_ms(int64_t timestamp_ms);
|
||||
Builder& set_timestamp_us(int64_t timestamp_us);
|
||||
Builder& set_capture_time_identifier(
|
||||
const std::optional<Timestamp>& capture_time_identifier);
|
||||
const std::optional<Timestamp>& presentation_timestamp);
|
||||
Builder& set_presentation_timestamp(
|
||||
const std::optional<Timestamp>& presentation_timestamp);
|
||||
Builder& set_reference_time(const std::optional<Timestamp>& reference_time);
|
||||
Builder& set_rtp_timestamp(uint32_t rtp_timestamp);
|
||||
// TODO(https://bugs.webrtc.org/13756): Deprecate and use set_rtp_timestamp.
|
||||
@ -126,7 +128,7 @@ class RTC_EXPORT VideoFrame {
|
||||
uint16_t id_ = kNotSetId;
|
||||
rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer_;
|
||||
int64_t timestamp_us_ = 0;
|
||||
std::optional<Timestamp> capture_time_identifier_;
|
||||
std::optional<Timestamp> presentation_timestamp_;
|
||||
std::optional<Timestamp> reference_time_;
|
||||
uint32_t timestamp_rtp_ = 0;
|
||||
int64_t ntp_time_ms_ = 0;
|
||||
@ -174,12 +176,18 @@ class RTC_EXPORT VideoFrame {
|
||||
int64_t timestamp_us() const { return timestamp_us_; }
|
||||
void set_timestamp_us(int64_t timestamp_us) { timestamp_us_ = timestamp_us; }
|
||||
|
||||
// TODO(https://bugs.webrtc.org/373365537): Remove this once its usage is
|
||||
// removed from blink.
|
||||
const std::optional<Timestamp>& capture_time_identifier() const {
|
||||
return capture_time_identifier_;
|
||||
return presentation_timestamp_;
|
||||
}
|
||||
void set_capture_time_identifier(
|
||||
const std::optional<Timestamp>& capture_time_identifier) {
|
||||
capture_time_identifier_ = capture_time_identifier;
|
||||
|
||||
const std::optional<Timestamp>& presentation_timestamp() const {
|
||||
return presentation_timestamp_;
|
||||
}
|
||||
void set_presentation_timestamp(
|
||||
const std::optional<Timestamp>& presentation_timestamp) {
|
||||
presentation_timestamp_ = presentation_timestamp;
|
||||
}
|
||||
|
||||
const std::optional<Timestamp>& reference_time() const {
|
||||
@ -279,7 +287,7 @@ class RTC_EXPORT VideoFrame {
|
||||
VideoFrame(uint16_t id,
|
||||
const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
|
||||
int64_t timestamp_us,
|
||||
const std::optional<Timestamp>& capture_time_identifier,
|
||||
const std::optional<Timestamp>& presentation_timestamp,
|
||||
const std::optional<Timestamp>& reference_time,
|
||||
uint32_t timestamp_rtp,
|
||||
int64_t ntp_time_ms,
|
||||
@ -295,7 +303,7 @@ class RTC_EXPORT VideoFrame {
|
||||
uint32_t timestamp_rtp_;
|
||||
int64_t ntp_time_ms_;
|
||||
int64_t timestamp_us_;
|
||||
std::optional<Timestamp> capture_time_identifier_;
|
||||
std::optional<Timestamp> presentation_timestamp_;
|
||||
// Contains a monotonically increasing clock time and represents the time
|
||||
// when the frame was captured. Not all platforms provide the "true" sample
|
||||
// capture time in |reference_time| but might instead use a somewhat delayed
|
||||
|
||||
@ -65,7 +65,7 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface {
|
||||
codec_type_(codec_type),
|
||||
timestamp_(rtp_timestamp),
|
||||
capture_time_(encoded_image.CaptureTime()),
|
||||
capture_time_identifier_(encoded_image.CaptureTimeIdentifier()),
|
||||
presentation_timestamp_(encoded_image.PresentationTimestamp()),
|
||||
expected_retransmission_time_(expected_retransmission_time),
|
||||
ssrc_(ssrc),
|
||||
csrcs_(csrcs) {
|
||||
@ -115,7 +115,10 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface {
|
||||
std::optional<VideoCodecType> GetCodecType() const { return codec_type_; }
|
||||
Timestamp GetCaptureTime() const { return capture_time_; }
|
||||
std::optional<Timestamp> GetCaptureTimeIdentifier() const override {
|
||||
return capture_time_identifier_;
|
||||
return presentation_timestamp_;
|
||||
}
|
||||
std::optional<Timestamp> GetPresentationTimestamp() const override {
|
||||
return presentation_timestamp_;
|
||||
}
|
||||
|
||||
TimeDelta GetExpectedRetransmissionTime() const {
|
||||
@ -140,7 +143,7 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface {
|
||||
const std::optional<VideoCodecType> codec_type_ = std::nullopt;
|
||||
uint32_t timestamp_;
|
||||
const Timestamp capture_time_;
|
||||
const std::optional<Timestamp> capture_time_identifier_;
|
||||
const std::optional<Timestamp> presentation_timestamp_;
|
||||
const TimeDelta expected_retransmission_time_;
|
||||
|
||||
uint32_t ssrc_;
|
||||
|
||||
@ -1743,7 +1743,7 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest,
|
||||
std::unique_ptr<RTPSenderVideo> rtp_sender_video =
|
||||
CreateSenderWithFrameTransformer(mock_frame_transformer);
|
||||
auto encoded_image = CreateDefaultEncodedImage();
|
||||
encoded_image->SetCaptureTimeIdentifier(Timestamp::Millis(1));
|
||||
encoded_image->SetPresentationTimestamp(Timestamp::Millis(1));
|
||||
RTPVideoHeader video_header;
|
||||
|
||||
EXPECT_CALL(*mock_frame_transformer, Transform)
|
||||
@ -1752,8 +1752,8 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest,
|
||||
auto* frame = static_cast<TransformableVideoFrameInterface*>(
|
||||
transformable_frame.get());
|
||||
ASSERT_TRUE(frame);
|
||||
EXPECT_EQ(frame->GetCaptureTimeIdentifier(),
|
||||
encoded_image->CaptureTimeIdentifier());
|
||||
EXPECT_EQ(frame->GetPresentationTimestamp(),
|
||||
encoded_image->PresentationTimestamp());
|
||||
});
|
||||
rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp,
|
||||
*encoded_image, video_header,
|
||||
|
||||
@ -710,7 +710,7 @@ int32_t LibaomAv1Encoder::Encode(
|
||||
? VideoFrameType::kVideoFrameKey
|
||||
: VideoFrameType::kVideoFrameDelta;
|
||||
encoded_image.SetRtpTimestamp(frame.rtp_timestamp());
|
||||
encoded_image.SetCaptureTimeIdentifier(frame.capture_time_identifier());
|
||||
encoded_image.SetPresentationTimestamp(frame.presentation_timestamp());
|
||||
encoded_image.capture_time_ms_ = frame.render_time_ms();
|
||||
encoded_image.rotation_ = frame.rotation();
|
||||
encoded_image.content_type_ = VideoContentType::UNSPECIFIED;
|
||||
|
||||
@ -356,10 +356,10 @@ TEST(LibaomAv1EncoderTest, RtpTimestampWrap) {
|
||||
Eq(VideoFrameType::kVideoFrameDelta));
|
||||
}
|
||||
|
||||
TEST(LibaomAv1EncoderTest, TestCaptureTimeId) {
|
||||
TEST(LibaomAv1EncoderTest, TestPresentationTimestamp) {
|
||||
std::unique_ptr<VideoEncoder> encoder =
|
||||
CreateLibaomAv1Encoder(CreateEnvironment());
|
||||
const Timestamp capture_time_id = Timestamp::Micros(2000);
|
||||
const Timestamp presentation_timestamp = Timestamp::Micros(2000);
|
||||
VideoCodec codec_settings = DefaultCodecSettings();
|
||||
codec_settings.SetScalabilityMode(ScalabilityMode::kL2T1);
|
||||
ASSERT_EQ(encoder->InitEncode(&codec_settings, DefaultEncoderSettings()),
|
||||
@ -376,17 +376,17 @@ TEST(LibaomAv1EncoderTest, TestCaptureTimeId) {
|
||||
std::vector<EncodedVideoFrameProducer::EncodedFrame> encoded_frames =
|
||||
EncodedVideoFrameProducer(*encoder)
|
||||
.SetNumInputFrames(1)
|
||||
.SetCaptureTimeIdentifier(capture_time_id)
|
||||
.SetPresentationTimestamp(presentation_timestamp)
|
||||
.Encode();
|
||||
ASSERT_THAT(encoded_frames, SizeIs(2));
|
||||
ASSERT_TRUE(
|
||||
encoded_frames[0].encoded_image.CaptureTimeIdentifier().has_value());
|
||||
encoded_frames[0].encoded_image.PresentationTimestamp().has_value());
|
||||
ASSERT_TRUE(
|
||||
encoded_frames[1].encoded_image.CaptureTimeIdentifier().has_value());
|
||||
EXPECT_EQ(encoded_frames[0].encoded_image.CaptureTimeIdentifier()->us(),
|
||||
capture_time_id.us());
|
||||
EXPECT_EQ(encoded_frames[1].encoded_image.CaptureTimeIdentifier()->us(),
|
||||
capture_time_id.us());
|
||||
encoded_frames[1].encoded_image.PresentationTimestamp().has_value());
|
||||
EXPECT_EQ(encoded_frames[0].encoded_image.PresentationTimestamp()->us(),
|
||||
presentation_timestamp.us());
|
||||
EXPECT_EQ(encoded_frames[1].encoded_image.PresentationTimestamp()->us(),
|
||||
presentation_timestamp.us());
|
||||
}
|
||||
|
||||
TEST(LibaomAv1EncoderTest, AdheresToTargetBitrateDespiteUnevenFrameTiming) {
|
||||
|
||||
@ -62,7 +62,7 @@ EncodedVideoFrameProducer::Encode() {
|
||||
VideoFrame::Builder()
|
||||
.set_video_frame_buffer(frame_buffer_generator->NextFrame().buffer)
|
||||
.set_rtp_timestamp(rtp_timestamp_)
|
||||
.set_capture_time_identifier(capture_time_identifier_)
|
||||
.set_presentation_timestamp(presentation_timestamp_)
|
||||
.build();
|
||||
rtp_timestamp_ += rtp_tick;
|
||||
RTC_CHECK_EQ(encoder_.Encode(frame, &next_frame_type_),
|
||||
|
||||
@ -49,7 +49,7 @@ class EncodedVideoFrameProducer {
|
||||
|
||||
EncodedVideoFrameProducer& SetRtpTimestamp(uint32_t value);
|
||||
|
||||
EncodedVideoFrameProducer& SetCaptureTimeIdentifier(Timestamp value);
|
||||
EncodedVideoFrameProducer& SetPresentationTimestamp(Timestamp value);
|
||||
|
||||
// Generates input video frames and encodes them with `encoder` provided
|
||||
// in the constructor. Returns frame passed to the `OnEncodedImage` by
|
||||
@ -60,7 +60,7 @@ class EncodedVideoFrameProducer {
|
||||
VideoEncoder& encoder_;
|
||||
|
||||
uint32_t rtp_timestamp_ = 1000;
|
||||
Timestamp capture_time_identifier_ = Timestamp::Micros(1000);
|
||||
Timestamp presentation_timestamp_ = Timestamp::Micros(1000);
|
||||
int num_input_frames_ = 1;
|
||||
int framerate_fps_ = 30;
|
||||
RenderResolution resolution_ = {320, 180};
|
||||
@ -100,8 +100,8 @@ inline EncodedVideoFrameProducer& EncodedVideoFrameProducer::SetRtpTimestamp(
|
||||
}
|
||||
|
||||
inline EncodedVideoFrameProducer&
|
||||
EncodedVideoFrameProducer::SetCaptureTimeIdentifier(Timestamp value) {
|
||||
capture_time_identifier_ = value;
|
||||
EncodedVideoFrameProducer::SetPresentationTimestamp(Timestamp value) {
|
||||
presentation_timestamp_ = value;
|
||||
return *this;
|
||||
}
|
||||
} // namespace webrtc
|
||||
|
||||
@ -1240,8 +1240,8 @@ int LibvpxVp8Encoder::GetEncodedPartitions(const VideoFrame& input_image,
|
||||
}
|
||||
}
|
||||
encoded_images_[encoder_idx].SetRtpTimestamp(input_image.rtp_timestamp());
|
||||
encoded_images_[encoder_idx].SetCaptureTimeIdentifier(
|
||||
input_image.capture_time_identifier());
|
||||
encoded_images_[encoder_idx].SetPresentationTimestamp(
|
||||
input_image.presentation_timestamp());
|
||||
encoded_images_[encoder_idx].SetColorSpace(input_image.color_space());
|
||||
encoded_images_[encoder_idx].SetRetransmissionAllowed(
|
||||
retransmission_allowed);
|
||||
|
||||
@ -249,12 +249,12 @@ TEST_F(TestVp8Impl, Configure) {
|
||||
}
|
||||
|
||||
TEST_F(TestVp8Impl, OnEncodedImageReportsInfo) {
|
||||
constexpr Timestamp kCaptureTimeIdentifier = Timestamp::Micros(1000);
|
||||
constexpr Timestamp kPresentationTimestamp = Timestamp::Micros(1000);
|
||||
VideoFrame input_frame = NextInputFrame();
|
||||
input_frame.set_rtp_timestamp(kInitialTimestampRtp);
|
||||
input_frame.set_timestamp_us(kInitialTimestampMs *
|
||||
rtc::kNumMicrosecsPerMillisec);
|
||||
input_frame.set_capture_time_identifier(kCaptureTimeIdentifier);
|
||||
input_frame.set_presentation_timestamp(kPresentationTimestamp);
|
||||
EncodedImage encoded_frame;
|
||||
CodecSpecificInfo codec_specific_info;
|
||||
EncodeAndWaitForFrame(input_frame, &encoded_frame, &codec_specific_info);
|
||||
@ -262,9 +262,9 @@ TEST_F(TestVp8Impl, OnEncodedImageReportsInfo) {
|
||||
EXPECT_EQ(kInitialTimestampRtp, encoded_frame.RtpTimestamp());
|
||||
EXPECT_EQ(kWidth, static_cast<int>(encoded_frame._encodedWidth));
|
||||
EXPECT_EQ(kHeight, static_cast<int>(encoded_frame._encodedHeight));
|
||||
ASSERT_TRUE(encoded_frame.CaptureTimeIdentifier().has_value());
|
||||
EXPECT_EQ(kCaptureTimeIdentifier.us(),
|
||||
encoded_frame.CaptureTimeIdentifier()->us());
|
||||
ASSERT_TRUE(encoded_frame.PresentationTimestamp().has_value());
|
||||
EXPECT_EQ(kPresentationTimestamp.us(),
|
||||
encoded_frame.PresentationTimestamp()->us());
|
||||
}
|
||||
|
||||
TEST_F(TestVp8Impl,
|
||||
|
||||
@ -1731,8 +1731,8 @@ void LibvpxVp9Encoder::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) {
|
||||
|
||||
TRACE_COUNTER1("webrtc", "EncodedFrameSize", encoded_image_.size());
|
||||
encoded_image_.SetRtpTimestamp(input_image_->rtp_timestamp());
|
||||
encoded_image_.SetCaptureTimeIdentifier(
|
||||
input_image_->capture_time_identifier());
|
||||
encoded_image_.SetPresentationTimestamp(
|
||||
input_image_->presentation_timestamp());
|
||||
encoded_image_.SetColorSpace(input_image_->color_space());
|
||||
encoded_image_._encodedHeight =
|
||||
pkt->data.frame.height[layer_id.spatial_layer_id];
|
||||
|
||||
@ -220,17 +220,17 @@ TEST_P(TestVp9ImplForPixelFormat, DecodedQpEqualsEncodedQp) {
|
||||
EXPECT_EQ(encoded_frame.qp_, *decoded_qp);
|
||||
}
|
||||
|
||||
TEST_P(TestVp9ImplForPixelFormat, CheckCaptureTimeID) {
|
||||
constexpr Timestamp kCaptureTimeIdentifier = Timestamp::Micros(1000);
|
||||
TEST_P(TestVp9ImplForPixelFormat, CheckPresentationTimestamp) {
|
||||
constexpr Timestamp kPresentationTimestamp = Timestamp::Micros(1000);
|
||||
VideoFrame input_frame = NextInputFrame();
|
||||
input_frame.set_capture_time_identifier(kCaptureTimeIdentifier);
|
||||
input_frame.set_presentation_timestamp(kPresentationTimestamp);
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(input_frame, nullptr));
|
||||
EncodedImage encoded_frame;
|
||||
CodecSpecificInfo codec_specific_info;
|
||||
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
|
||||
ASSERT_TRUE(encoded_frame.CaptureTimeIdentifier().has_value());
|
||||
EXPECT_EQ(kCaptureTimeIdentifier.us(),
|
||||
encoded_frame.CaptureTimeIdentifier()->us());
|
||||
ASSERT_TRUE(encoded_frame.PresentationTimestamp().has_value());
|
||||
EXPECT_EQ(kPresentationTimestamp.us(),
|
||||
encoded_frame.PresentationTimestamp()->us());
|
||||
}
|
||||
|
||||
TEST_F(TestVp9Impl, SwitchInputPixelFormatsWithoutReconfigure) {
|
||||
|
||||
@ -1523,8 +1523,8 @@ void VideoStreamEncoder::OnFrame(Timestamp post_time,
|
||||
|
||||
// Identifier should remain the same for newly produced incoming frame and the
|
||||
// received |video_frame|.
|
||||
incoming_frame.set_capture_time_identifier(
|
||||
video_frame.capture_time_identifier());
|
||||
incoming_frame.set_presentation_timestamp(
|
||||
video_frame.presentation_timestamp());
|
||||
|
||||
if (incoming_frame.ntp_time_ms() <= last_captured_timestamp_) {
|
||||
// We don't allow the same capture time for two frames, drop this one.
|
||||
@ -1978,8 +1978,7 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame,
|
||||
out_frame.set_video_frame_buffer(cropped_buffer);
|
||||
out_frame.set_update_rect(update_rect);
|
||||
out_frame.set_ntp_time_ms(video_frame.ntp_time_ms());
|
||||
out_frame.set_capture_time_identifier(
|
||||
video_frame.capture_time_identifier());
|
||||
out_frame.set_presentation_timestamp(video_frame.presentation_timestamp());
|
||||
// Since accumulated_update_rect_ is constructed before cropping,
|
||||
// we can't trust it. If any changes were pending, we invalidate whole
|
||||
// frame here.
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user