Rename EncodedImage property Timetamp to RtpTimestamp
To avoid name collision with Timestamp type, To avoid confusion with capture time represented as Timestamp Bug: webrtc:9378 Change-Id: I8438a9cf4316e5f81d98c2af9dc9454c21c78e70 Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/320601 Reviewed-by: Harald Alvestrand <hta@webrtc.org> Reviewed-by: Rasmus Brandt <brandtr@webrtc.org> Commit-Queue: Danil Chapovalov <danilchap@webrtc.org> Cr-Commit-Position: refs/heads/main@{#40796}
This commit is contained in:
parent
bbf27e0081
commit
9c58483b5a
@ -79,17 +79,20 @@ class RTC_EXPORT EncodedImage {
|
||||
EncodedImage& operator=(EncodedImage&&);
|
||||
EncodedImage& operator=(const EncodedImage&);
|
||||
|
||||
// TODO(bugs.webrtc.org/9378): Change style to timestamp(), set_timestamp(),
|
||||
// for consistency with the VideoFrame class. Set frame timestamp (90kHz).
|
||||
void SetTimestamp(uint32_t timestamp) { timestamp_rtp_ = timestamp; }
|
||||
// Frame capture time in RTP timestamp representation (90kHz).
|
||||
void SetRtpTimestamp(uint32_t timestamp) { timestamp_rtp_ = timestamp; }
|
||||
uint32_t RtpTimestamp() const { return timestamp_rtp_; }
|
||||
|
||||
// Get frame timestamp (90kHz).
|
||||
uint32_t Timestamp() const { return timestamp_rtp_; }
|
||||
// TODO(bugs.webrtc.org/9378): Delete two functions below.
|
||||
void SetTimestamp(uint32_t timestamp) { SetRtpTimestamp(timestamp); }
|
||||
uint32_t Timestamp() const { return RtpTimestamp(); }
|
||||
|
||||
void SetEncodeTime(int64_t encode_start_ms, int64_t encode_finish_ms);
|
||||
|
||||
// Frame capture time in local time.
|
||||
webrtc::Timestamp CaptureTime() const;
|
||||
|
||||
// Frame capture time in ntp epoch time, i.e. time since 1st Jan 1900
|
||||
int64_t NtpTimeMs() const { return ntp_time_ms_; }
|
||||
|
||||
// Every simulcast layer (= encoding) has its own encoder and RTP stream.
|
||||
|
||||
@ -50,7 +50,7 @@ int64_t GetFrameId(const FrameIteratorT& it) {
|
||||
|
||||
template <typename FrameIteratorT>
|
||||
uint32_t GetTimestamp(const FrameIteratorT& it) {
|
||||
return it->second.encoded_frame->Timestamp();
|
||||
return it->second.encoded_frame->RtpTimestamp();
|
||||
}
|
||||
|
||||
template <typename FrameIteratorT>
|
||||
@ -76,7 +76,7 @@ bool FrameBuffer::InsertFrame(std::unique_ptr<EncodedFrame> frame) {
|
||||
|
||||
if (frame->Id() <= decoded_frame_history_.GetLastDecodedFrameId()) {
|
||||
if (legacy_frame_id_jump_behavior_ && frame->is_keyframe() &&
|
||||
AheadOf(frame->Timestamp(),
|
||||
AheadOf(frame->RtpTimestamp(),
|
||||
*decoded_frame_history_.GetLastDecodedFrameTimestamp())) {
|
||||
RTC_DLOG(LS_WARNING)
|
||||
<< "Keyframe " << frame->Id()
|
||||
|
||||
@ -575,7 +575,7 @@ EncodedImageCallback::Result RtpVideoSender::OnEncodedImage(
|
||||
RTC_DCHECK_LT(simulcast_index, rtp_streams_.size());
|
||||
|
||||
uint32_t rtp_timestamp =
|
||||
encoded_image.Timestamp() +
|
||||
encoded_image.RtpTimestamp() +
|
||||
rtp_streams_[simulcast_index].rtp_rtcp->StartTimestamp();
|
||||
|
||||
// RTCPSender has it's own copy of the timestamp offset, added in
|
||||
@ -583,7 +583,7 @@ EncodedImageCallback::Result RtpVideoSender::OnEncodedImage(
|
||||
// TODO(nisse): Delete RTCPSender:timestamp_offset_, and see if we can confine
|
||||
// knowledge of the offset to a single place.
|
||||
if (!rtp_streams_[simulcast_index].rtp_rtcp->OnSendingRtpFrame(
|
||||
encoded_image.Timestamp(), encoded_image.capture_time_ms_,
|
||||
encoded_image.RtpTimestamp(), encoded_image.capture_time_ms_,
|
||||
rtp_config_.payload_type,
|
||||
encoded_image._frameType == VideoFrameType::kVideoFrameKey)) {
|
||||
// The payload router could be active but this module isn't sending.
|
||||
|
||||
@ -218,7 +218,7 @@ BitrateAllocationUpdate CreateBitrateAllocationUpdate(int target_bitrate_bps) {
|
||||
TEST(RtpVideoSenderTest, SendOnOneModule) {
|
||||
constexpr uint8_t kPayload = 'a';
|
||||
EncodedImage encoded_image;
|
||||
encoded_image.SetTimestamp(1);
|
||||
encoded_image.SetRtpTimestamp(1);
|
||||
encoded_image.capture_time_ms_ = 2;
|
||||
encoded_image._frameType = VideoFrameType::kVideoFrameKey;
|
||||
encoded_image.SetEncodedData(EncodedImageBuffer::Create(&kPayload, 1));
|
||||
@ -243,7 +243,7 @@ TEST(RtpVideoSenderTest, SendOnOneModule) {
|
||||
TEST(RtpVideoSenderTest, SendSimulcastSetActive) {
|
||||
constexpr uint8_t kPayload = 'a';
|
||||
EncodedImage encoded_image_1;
|
||||
encoded_image_1.SetTimestamp(1);
|
||||
encoded_image_1.SetRtpTimestamp(1);
|
||||
encoded_image_1.capture_time_ms_ = 2;
|
||||
encoded_image_1._frameType = VideoFrameType::kVideoFrameKey;
|
||||
encoded_image_1.SetEncodedData(EncodedImageBuffer::Create(&kPayload, 1));
|
||||
@ -278,7 +278,7 @@ TEST(RtpVideoSenderTest, SendSimulcastSetActive) {
|
||||
TEST(RtpVideoSenderTest, SendSimulcastSetActiveModules) {
|
||||
constexpr uint8_t kPayload = 'a';
|
||||
EncodedImage encoded_image_1;
|
||||
encoded_image_1.SetTimestamp(1);
|
||||
encoded_image_1.SetRtpTimestamp(1);
|
||||
encoded_image_1.capture_time_ms_ = 2;
|
||||
encoded_image_1._frameType = VideoFrameType::kVideoFrameKey;
|
||||
encoded_image_1.SetEncodedData(EncodedImageBuffer::Create(&kPayload, 1));
|
||||
@ -314,7 +314,7 @@ TEST(RtpVideoSenderTest,
|
||||
DiscardsHigherSimulcastFramesAfterLayerDisabledInVideoLayersAllocation) {
|
||||
constexpr uint8_t kPayload = 'a';
|
||||
EncodedImage encoded_image_1;
|
||||
encoded_image_1.SetTimestamp(1);
|
||||
encoded_image_1.SetRtpTimestamp(1);
|
||||
encoded_image_1.capture_time_ms_ = 2;
|
||||
encoded_image_1._frameType = VideoFrameType::kVideoFrameKey;
|
||||
encoded_image_1.SetEncodedData(EncodedImageBuffer::Create(&kPayload, 1));
|
||||
@ -399,7 +399,7 @@ TEST(RtpVideoSenderTest, FrameCountCallbacks) {
|
||||
|
||||
constexpr uint8_t kPayload = 'a';
|
||||
EncodedImage encoded_image;
|
||||
encoded_image.SetTimestamp(1);
|
||||
encoded_image.SetRtpTimestamp(1);
|
||||
encoded_image.capture_time_ms_ = 2;
|
||||
encoded_image._frameType = VideoFrameType::kVideoFrameKey;
|
||||
encoded_image.SetEncodedData(EncodedImageBuffer::Create(&kPayload, 1));
|
||||
@ -445,7 +445,7 @@ TEST(RtpVideoSenderTest, DoesNotRetrasmitAckedPackets) {
|
||||
|
||||
constexpr uint8_t kPayload = 'a';
|
||||
EncodedImage encoded_image;
|
||||
encoded_image.SetTimestamp(1);
|
||||
encoded_image.SetRtpTimestamp(1);
|
||||
encoded_image.capture_time_ms_ = 2;
|
||||
encoded_image._frameType = VideoFrameType::kVideoFrameKey;
|
||||
encoded_image.SetEncodedData(EncodedImageBuffer::Create(&kPayload, 1));
|
||||
@ -466,7 +466,7 @@ TEST(RtpVideoSenderTest, DoesNotRetrasmitAckedPackets) {
|
||||
});
|
||||
EXPECT_EQ(EncodedImageCallback::Result::OK,
|
||||
test.router()->OnEncodedImage(encoded_image, nullptr).error);
|
||||
encoded_image.SetTimestamp(2);
|
||||
encoded_image.SetRtpTimestamp(2);
|
||||
encoded_image.capture_time_ms_ = 3;
|
||||
EXPECT_EQ(EncodedImageCallback::Result::OK,
|
||||
test.router()->OnEncodedImage(encoded_image, nullptr).error);
|
||||
@ -610,7 +610,7 @@ TEST(RtpVideoSenderTest, EarlyRetransmits) {
|
||||
|
||||
const uint8_t kPayload[1] = {'a'};
|
||||
EncodedImage encoded_image;
|
||||
encoded_image.SetTimestamp(1);
|
||||
encoded_image.SetRtpTimestamp(1);
|
||||
encoded_image.capture_time_ms_ = 2;
|
||||
encoded_image._frameType = VideoFrameType::kVideoFrameKey;
|
||||
encoded_image.SetEncodedData(
|
||||
@ -717,7 +717,7 @@ TEST(RtpVideoSenderTest, SupportsDependencyDescriptor) {
|
||||
|
||||
const uint8_t kPayload[1] = {'a'};
|
||||
EncodedImage encoded_image;
|
||||
encoded_image.SetTimestamp(1);
|
||||
encoded_image.SetRtpTimestamp(1);
|
||||
encoded_image.capture_time_ms_ = 2;
|
||||
encoded_image.SetEncodedData(
|
||||
EncodedImageBuffer::Create(kPayload, sizeof(kPayload)));
|
||||
@ -823,7 +823,7 @@ TEST(RtpVideoSenderTest, SupportsDependencyDescriptorForVp9) {
|
||||
|
||||
const uint8_t kPayload[1] = {'a'};
|
||||
EncodedImage encoded_image;
|
||||
encoded_image.SetTimestamp(1);
|
||||
encoded_image.SetRtpTimestamp(1);
|
||||
encoded_image.capture_time_ms_ = 2;
|
||||
encoded_image._frameType = VideoFrameType::kVideoFrameKey;
|
||||
encoded_image.SetEncodedData(
|
||||
@ -879,7 +879,7 @@ TEST(RtpVideoSenderTest,
|
||||
|
||||
const uint8_t kPayload[1] = {'a'};
|
||||
EncodedImage encoded_image;
|
||||
encoded_image.SetTimestamp(1);
|
||||
encoded_image.SetRtpTimestamp(1);
|
||||
encoded_image.capture_time_ms_ = 2;
|
||||
encoded_image._frameType = VideoFrameType::kVideoFrameKey;
|
||||
encoded_image._encodedWidth = 320;
|
||||
@ -901,7 +901,7 @@ TEST(RtpVideoSenderTest,
|
||||
|
||||
// Send in 2nd picture.
|
||||
encoded_image._frameType = VideoFrameType::kVideoFrameDelta;
|
||||
encoded_image.SetTimestamp(3000);
|
||||
encoded_image.SetRtpTimestamp(3000);
|
||||
codec_specific.codecSpecific.VP9.inter_pic_predicted = true;
|
||||
codec_specific.codecSpecific.VP9.num_ref_pics = 1;
|
||||
codec_specific.codecSpecific.VP9.p_diff[0] = 1;
|
||||
@ -934,7 +934,7 @@ TEST(RtpVideoSenderTest, GenerateDependecyDescriptorForGenericCodecs) {
|
||||
|
||||
const uint8_t kPayload[1] = {'a'};
|
||||
EncodedImage encoded_image;
|
||||
encoded_image.SetTimestamp(1);
|
||||
encoded_image.SetRtpTimestamp(1);
|
||||
encoded_image.capture_time_ms_ = 2;
|
||||
encoded_image._frameType = VideoFrameType::kVideoFrameKey;
|
||||
encoded_image._encodedWidth = 320;
|
||||
@ -952,7 +952,7 @@ TEST(RtpVideoSenderTest, GenerateDependecyDescriptorForGenericCodecs) {
|
||||
|
||||
// Send in 2nd picture.
|
||||
encoded_image._frameType = VideoFrameType::kVideoFrameDelta;
|
||||
encoded_image.SetTimestamp(3000);
|
||||
encoded_image.SetRtpTimestamp(3000);
|
||||
EXPECT_EQ(test.router()->OnEncodedImage(encoded_image, &codec_specific).error,
|
||||
EncodedImageCallback::Result::OK);
|
||||
|
||||
@ -980,7 +980,7 @@ TEST(RtpVideoSenderTest, SupportsStoppingUsingDependencyDescriptor) {
|
||||
|
||||
const uint8_t kPayload[1] = {'a'};
|
||||
EncodedImage encoded_image;
|
||||
encoded_image.SetTimestamp(1);
|
||||
encoded_image.SetRtpTimestamp(1);
|
||||
encoded_image.capture_time_ms_ = 2;
|
||||
encoded_image.SetEncodedData(
|
||||
EncodedImageBuffer::Create(kPayload, sizeof(kPayload)));
|
||||
@ -1099,7 +1099,7 @@ TEST(RtpVideoSenderTest, ClearsPendingPacketsOnInactivation) {
|
||||
const size_t kImageSizeBytes = 10000;
|
||||
constexpr uint8_t kPayload[kImageSizeBytes] = {'a'};
|
||||
EncodedImage encoded_image;
|
||||
encoded_image.SetTimestamp(1);
|
||||
encoded_image.SetRtpTimestamp(1);
|
||||
encoded_image.capture_time_ms_ = 2;
|
||||
encoded_image._frameType = VideoFrameType::kVideoFrameKey;
|
||||
encoded_image.SetEncodedData(
|
||||
@ -1138,7 +1138,7 @@ TEST(RtpVideoSenderTest, ClearsPendingPacketsOnInactivation) {
|
||||
EXPECT_TRUE(sent_packets.empty());
|
||||
|
||||
// Send a new frame.
|
||||
encoded_image.SetTimestamp(3);
|
||||
encoded_image.SetRtpTimestamp(3);
|
||||
encoded_image.capture_time_ms_ = 4;
|
||||
EXPECT_EQ(test.router()
|
||||
->OnEncodedImage(encoded_image, /*codec_specific=*/nullptr)
|
||||
@ -1161,7 +1161,7 @@ TEST(RtpVideoSenderTest, RetransmitsBaseLayerOnly) {
|
||||
test.router()->SetRetransmissionMode(kRetransmitBaseLayer);
|
||||
constexpr uint8_t kPayload = 'a';
|
||||
EncodedImage encoded_image;
|
||||
encoded_image.SetTimestamp(1);
|
||||
encoded_image.SetRtpTimestamp(1);
|
||||
encoded_image.capture_time_ms_ = 2;
|
||||
encoded_image._frameType = VideoFrameType::kVideoFrameKey;
|
||||
encoded_image.SetEncodedData(EncodedImageBuffer::Create(&kPayload, 1));
|
||||
@ -1187,7 +1187,7 @@ TEST(RtpVideoSenderTest, RetransmitsBaseLayerOnly) {
|
||||
EXPECT_EQ(EncodedImageCallback::Result::OK,
|
||||
test.router()->OnEncodedImage(
|
||||
encoded_image, &key_codec_info).error);
|
||||
encoded_image.SetTimestamp(2);
|
||||
encoded_image.SetRtpTimestamp(2);
|
||||
encoded_image.capture_time_ms_ = 3;
|
||||
encoded_image._frameType = VideoFrameType::kVideoFrameDelta;
|
||||
CodecSpecificInfo delta_codec_info;
|
||||
|
||||
@ -464,7 +464,7 @@ class TestSimulcastEncoderAdapterFake : public ::testing::Test,
|
||||
last_encoded_image_height_ = encoded_image._encodedHeight;
|
||||
last_encoded_image_simulcast_index_ = encoded_image.SimulcastIndex();
|
||||
|
||||
return Result(Result::OK, encoded_image.Timestamp());
|
||||
return Result(Result::OK, encoded_image.RtpTimestamp());
|
||||
}
|
||||
|
||||
bool GetLastEncodedImageInfo(absl::optional<int>* out_width,
|
||||
|
||||
@ -51,7 +51,7 @@ RtpFrameObject::RtpFrameObject(
|
||||
// VCMEncodedFrame members
|
||||
CopyCodecSpecific(&rtp_video_header_);
|
||||
_payloadType = payload_type;
|
||||
SetTimestamp(rtp_timestamp);
|
||||
SetRtpTimestamp(rtp_timestamp);
|
||||
ntp_time_ms_ = ntp_time_ms;
|
||||
_frameType = rtp_video_header_.frame_type;
|
||||
|
||||
|
||||
@ -47,9 +47,9 @@ class TransformableVideoReceiverFrame
|
||||
|
||||
uint8_t GetPayloadType() const override { return frame_->PayloadType(); }
|
||||
uint32_t GetSsrc() const override { return Metadata().GetSsrc(); }
|
||||
uint32_t GetTimestamp() const override { return frame_->Timestamp(); }
|
||||
uint32_t GetTimestamp() const override { return frame_->RtpTimestamp(); }
|
||||
void SetRTPTimestamp(uint32_t timestamp) override {
|
||||
frame_->SetTimestamp(timestamp);
|
||||
frame_->SetRtpTimestamp(timestamp);
|
||||
}
|
||||
|
||||
bool IsKeyFrame() const override {
|
||||
|
||||
@ -181,9 +181,10 @@ int32_t Dav1dDecoder::Decode(const EncodedImage& encoded_image,
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
}
|
||||
|
||||
VideoFrame decoded_frame = VideoFrame::Builder()
|
||||
VideoFrame decoded_frame =
|
||||
VideoFrame::Builder()
|
||||
.set_video_frame_buffer(wrapped_buffer)
|
||||
.set_timestamp_rtp(encoded_image.Timestamp())
|
||||
.set_timestamp_rtp(encoded_image.RtpTimestamp())
|
||||
.set_ntp_time_ms(encoded_image.ntp_time_ms_)
|
||||
.set_color_space(encoded_image.ColorSpace())
|
||||
.build();
|
||||
|
||||
@ -704,7 +704,7 @@ int32_t LibaomAv1Encoder::Encode(
|
||||
encoded_image._frameType = layer_frame->IsKeyframe()
|
||||
? VideoFrameType::kVideoFrameKey
|
||||
: VideoFrameType::kVideoFrameDelta;
|
||||
encoded_image.SetTimestamp(frame.timestamp());
|
||||
encoded_image.SetRtpTimestamp(frame.timestamp());
|
||||
encoded_image.SetCaptureTimeIdentifier(frame.capture_time_identifier());
|
||||
encoded_image.capture_time_ms_ = frame.render_time_ms();
|
||||
encoded_image.rotation_ = frame.rotation();
|
||||
|
||||
@ -612,7 +612,7 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image,
|
||||
|
||||
VideoFrame decoded_frame = VideoFrame::Builder()
|
||||
.set_video_frame_buffer(cropped_buffer)
|
||||
.set_timestamp_rtp(input_image.Timestamp())
|
||||
.set_timestamp_rtp(input_image.RtpTimestamp())
|
||||
.set_color_space(color_space)
|
||||
.build();
|
||||
|
||||
|
||||
@ -533,7 +533,7 @@ int32_t H264EncoderImpl::Encode(
|
||||
|
||||
encoded_images_[i]._encodedWidth = configurations_[i].width;
|
||||
encoded_images_[i]._encodedHeight = configurations_[i].height;
|
||||
encoded_images_[i].SetTimestamp(input_frame.timestamp());
|
||||
encoded_images_[i].SetRtpTimestamp(input_frame.timestamp());
|
||||
encoded_images_[i].SetColorSpace(input_frame.color_space());
|
||||
encoded_images_[i]._frameType = ConvertToVideoFrameType(info.eFrameType);
|
||||
encoded_images_[i].SetSimulcastIndex(configurations_[i].simulcast_idx);
|
||||
|
||||
@ -129,20 +129,20 @@ int32_t MultiplexDecoderAdapter::Decode(const EncodedImage& input_image,
|
||||
MultiplexImage image = MultiplexEncodedImagePacker::Unpack(input_image);
|
||||
|
||||
if (supports_augmenting_data_) {
|
||||
RTC_DCHECK(decoded_augmenting_data_.find(input_image.Timestamp()) ==
|
||||
RTC_DCHECK(decoded_augmenting_data_.find(input_image.RtpTimestamp()) ==
|
||||
decoded_augmenting_data_.end());
|
||||
decoded_augmenting_data_.emplace(
|
||||
std::piecewise_construct,
|
||||
std::forward_as_tuple(input_image.Timestamp()),
|
||||
std::forward_as_tuple(input_image.RtpTimestamp()),
|
||||
std::forward_as_tuple(std::move(image.augmenting_data),
|
||||
image.augmenting_data_size));
|
||||
}
|
||||
|
||||
if (image.component_count == 1) {
|
||||
RTC_DCHECK(decoded_data_.find(input_image.Timestamp()) ==
|
||||
RTC_DCHECK(decoded_data_.find(input_image.RtpTimestamp()) ==
|
||||
decoded_data_.end());
|
||||
decoded_data_.emplace(std::piecewise_construct,
|
||||
std::forward_as_tuple(input_image.Timestamp()),
|
||||
std::forward_as_tuple(input_image.RtpTimestamp()),
|
||||
std::forward_as_tuple(kAXXStream));
|
||||
}
|
||||
int32_t rv = 0;
|
||||
|
||||
@ -260,7 +260,7 @@ MultiplexImage MultiplexEncodedImagePacker::Unpack(
|
||||
image_component.codec_type = frame_headers[i].codec_type;
|
||||
|
||||
EncodedImage encoded_image = combined_image;
|
||||
encoded_image.SetTimestamp(combined_image.Timestamp());
|
||||
encoded_image.SetRtpTimestamp(combined_image.RtpTimestamp());
|
||||
encoded_image._frameType = frame_headers[i].frame_type;
|
||||
encoded_image.SetEncodedData(EncodedImageBuffer::Create(
|
||||
combined_image.data() + frame_headers[i].bitstream_offset,
|
||||
|
||||
@ -321,7 +321,7 @@ EncodedImageCallback::Result MultiplexEncoderAdapter::OnEncodedImage(
|
||||
|
||||
MutexLock lock(&mutex_);
|
||||
const auto& stashed_image_itr =
|
||||
stashed_images_.find(encodedImage.Timestamp());
|
||||
stashed_images_.find(encodedImage.RtpTimestamp());
|
||||
const auto& stashed_image_next_itr = std::next(stashed_image_itr, 1);
|
||||
RTC_DCHECK(stashed_image_itr != stashed_images_.end());
|
||||
MultiplexImage& stashed_image = stashed_image_itr->second;
|
||||
|
||||
@ -79,7 +79,7 @@ void VideoCodecAnalyzer::StartEncode(const VideoFrame& input_frame) {
|
||||
void VideoCodecAnalyzer::FinishEncode(const EncodedImage& frame) {
|
||||
int64_t encode_finished_us = rtc::TimeMicros();
|
||||
|
||||
task_queue_.PostTask([this, timestamp_rtp = frame.Timestamp(),
|
||||
task_queue_.PostTask([this, timestamp_rtp = frame.RtpTimestamp(),
|
||||
spatial_idx = frame.SpatialIndex().value_or(0),
|
||||
temporal_idx = frame.TemporalIndex().value_or(0),
|
||||
width = frame._encodedWidth,
|
||||
@ -114,7 +114,7 @@ void VideoCodecAnalyzer::FinishEncode(const EncodedImage& frame) {
|
||||
|
||||
void VideoCodecAnalyzer::StartDecode(const EncodedImage& frame) {
|
||||
int64_t decode_start_us = rtc::TimeMicros();
|
||||
task_queue_.PostTask([this, timestamp_rtp = frame.Timestamp(),
|
||||
task_queue_.PostTask([this, timestamp_rtp = frame.RtpTimestamp(),
|
||||
spatial_idx = frame.SpatialIndex().value_or(0),
|
||||
frame_size_bytes = frame.size(), decode_start_us]() {
|
||||
RTC_DCHECK_RUN_ON(&sequence_checker_);
|
||||
|
||||
@ -52,7 +52,7 @@ VideoFrame CreateVideoFrame(uint32_t timestamp_rtp,
|
||||
|
||||
EncodedImage CreateEncodedImage(uint32_t timestamp_rtp, int spatial_idx = 0) {
|
||||
EncodedImage encoded_image;
|
||||
encoded_image.SetTimestamp(timestamp_rtp);
|
||||
encoded_image.SetRtpTimestamp(timestamp_rtp);
|
||||
encoded_image.SetSpatialIndex(spatial_idx);
|
||||
return encoded_image;
|
||||
}
|
||||
|
||||
@ -255,7 +255,7 @@ class TestEncoder : public VideoCodecTester::Encoder,
|
||||
Result OnEncodedImage(const EncodedImage& encoded_image,
|
||||
const CodecSpecificInfo* codec_specific_info) override {
|
||||
MutexLock lock(&mutex_);
|
||||
auto cb = callbacks_.find(encoded_image.Timestamp());
|
||||
auto cb = callbacks_.find(encoded_image.RtpTimestamp());
|
||||
RTC_CHECK(cb != callbacks_.end());
|
||||
cb->second(encoded_image);
|
||||
|
||||
@ -352,7 +352,7 @@ class TestDecoder : public VideoCodecTester::Decoder,
|
||||
void Decode(const EncodedImage& frame, DecodeCallback callback) override {
|
||||
{
|
||||
MutexLock lock(&mutex_);
|
||||
callbacks_[frame.Timestamp()] = std::move(callback);
|
||||
callbacks_[frame.RtpTimestamp()] = std::move(callback);
|
||||
}
|
||||
|
||||
decoder_->Decode(frame, /*render_time_ms=*/0);
|
||||
|
||||
@ -255,7 +255,7 @@ class TesterDecoder {
|
||||
|
||||
void Decode(const EncodedImage& input_frame) {
|
||||
Timestamp timestamp =
|
||||
Timestamp::Micros((input_frame.Timestamp() / k90kHz).us());
|
||||
Timestamp::Micros((input_frame.RtpTimestamp() / k90kHz).us());
|
||||
|
||||
task_queue_.PostScheduledTask(
|
||||
[this, input_frame] {
|
||||
|
||||
@ -64,7 +64,7 @@ VideoFrame CreateVideoFrame(uint32_t timestamp_rtp) {
|
||||
|
||||
EncodedImage CreateEncodedImage(uint32_t timestamp_rtp) {
|
||||
EncodedImage encoded_image;
|
||||
encoded_image.SetTimestamp(timestamp_rtp);
|
||||
encoded_image.SetRtpTimestamp(timestamp_rtp);
|
||||
return encoded_image;
|
||||
}
|
||||
|
||||
|
||||
@ -387,7 +387,7 @@ void VideoProcessor::FrameEncoded(
|
||||
size_t temporal_idx = GetTemporalLayerIndex(codec_specific);
|
||||
|
||||
FrameStatistics* frame_stat =
|
||||
stats_->GetFrameWithTimestamp(encoded_image.Timestamp(), stream_idx);
|
||||
stats_->GetFrameWithTimestamp(encoded_image.RtpTimestamp(), stream_idx);
|
||||
const size_t frame_number = frame_stat->frame_number;
|
||||
|
||||
// Ensure that the encode order is monotonically increasing, within this
|
||||
@ -466,7 +466,7 @@ void VideoProcessor::FrameEncoded(
|
||||
if (!layer_dropped) {
|
||||
base_image = &merged_encoded_frames_[i];
|
||||
base_stat =
|
||||
stats_->GetFrameWithTimestamp(encoded_image.Timestamp(), i);
|
||||
stats_->GetFrameWithTimestamp(encoded_image.RtpTimestamp(), i);
|
||||
} else if (base_image && !base_stat->non_ref_for_inter_layer_pred) {
|
||||
DecodeFrame(*base_image, i);
|
||||
}
|
||||
@ -634,7 +634,7 @@ void VideoProcessor::DecodeFrame(const EncodedImage& encoded_image,
|
||||
size_t spatial_idx) {
|
||||
RTC_DCHECK_RUN_ON(&sequence_checker_);
|
||||
FrameStatistics* frame_stat =
|
||||
stats_->GetFrameWithTimestamp(encoded_image.Timestamp(), spatial_idx);
|
||||
stats_->GetFrameWithTimestamp(encoded_image.RtpTimestamp(), spatial_idx);
|
||||
|
||||
frame_stat->decode_start_ns = rtc::TimeNanos();
|
||||
frame_stat->decode_return_code =
|
||||
@ -659,7 +659,7 @@ const webrtc::EncodedImage* VideoProcessor::BuildAndStoreSuperframe(
|
||||
for (int base_idx = static_cast<int>(spatial_idx) - 1; base_idx >= 0;
|
||||
--base_idx) {
|
||||
EncodedImage lower_layer = merged_encoded_frames_.at(base_idx);
|
||||
if (lower_layer.Timestamp() == encoded_image.Timestamp()) {
|
||||
if (lower_layer.RtpTimestamp() == encoded_image.RtpTimestamp()) {
|
||||
base_image = lower_layer;
|
||||
break;
|
||||
}
|
||||
|
||||
@ -249,8 +249,8 @@ int LibvpxVp8Decoder::Decode(const EncodedImage& input_image,
|
||||
vpx_codec_err_t vpx_ret =
|
||||
vpx_codec_control(decoder_, VPXD_GET_LAST_QUANTIZER, &qp);
|
||||
RTC_DCHECK_EQ(vpx_ret, VPX_CODEC_OK);
|
||||
int ret =
|
||||
ReturnFrame(img, input_image.Timestamp(), qp, input_image.ColorSpace());
|
||||
int ret = ReturnFrame(img, input_image.RtpTimestamp(), qp,
|
||||
input_image.ColorSpace());
|
||||
if (ret != 0) {
|
||||
return ret;
|
||||
}
|
||||
|
||||
@ -1179,7 +1179,7 @@ int LibvpxVp8Encoder::GetEncodedPartitions(const VideoFrame& input_image,
|
||||
break;
|
||||
}
|
||||
}
|
||||
encoded_images_[encoder_idx].SetTimestamp(input_image.timestamp());
|
||||
encoded_images_[encoder_idx].SetRtpTimestamp(input_image.timestamp());
|
||||
encoded_images_[encoder_idx].SetCaptureTimeIdentifier(
|
||||
input_image.capture_time_identifier());
|
||||
encoded_images_[encoder_idx].SetColorSpace(input_image.color_space());
|
||||
|
||||
@ -259,7 +259,7 @@ TEST_F(TestVp8Impl, OnEncodedImageReportsInfo) {
|
||||
CodecSpecificInfo codec_specific_info;
|
||||
EncodeAndWaitForFrame(input_frame, &encoded_frame, &codec_specific_info);
|
||||
|
||||
EXPECT_EQ(kInitialTimestampRtp, encoded_frame.Timestamp());
|
||||
EXPECT_EQ(kInitialTimestampRtp, encoded_frame.RtpTimestamp());
|
||||
EXPECT_EQ(kWidth, static_cast<int>(encoded_frame._encodedWidth));
|
||||
EXPECT_EQ(kHeight, static_cast<int>(encoded_frame._encodedHeight));
|
||||
ASSERT_TRUE(encoded_frame.CaptureTimeIdentifier().has_value());
|
||||
|
||||
@ -246,8 +246,8 @@ int LibvpxVp9Decoder::Decode(const EncodedImage& input_image,
|
||||
vpx_codec_err_t vpx_ret =
|
||||
vpx_codec_control(decoder_, VPXD_GET_LAST_QUANTIZER, &qp);
|
||||
RTC_DCHECK_EQ(vpx_ret, VPX_CODEC_OK);
|
||||
int ret =
|
||||
ReturnFrame(img, input_image.Timestamp(), qp, input_image.ColorSpace());
|
||||
int ret = ReturnFrame(img, input_image.RtpTimestamp(), qp,
|
||||
input_image.ColorSpace());
|
||||
if (ret != 0) {
|
||||
return ret;
|
||||
}
|
||||
|
||||
@ -1733,7 +1733,7 @@ void LibvpxVp9Encoder::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) {
|
||||
UpdateReferenceBuffers(*pkt, pics_since_key_);
|
||||
|
||||
TRACE_COUNTER1("webrtc", "EncodedFrameSize", encoded_image_.size());
|
||||
encoded_image_.SetTimestamp(input_image_->timestamp());
|
||||
encoded_image_.SetRtpTimestamp(input_image_->timestamp());
|
||||
encoded_image_.SetCaptureTimeIdentifier(
|
||||
input_image_->capture_time_identifier());
|
||||
encoded_image_.SetColorSpace(input_image_->color_space());
|
||||
@ -1768,7 +1768,7 @@ void LibvpxVp9Encoder::DeliverBufferedFrame(bool end_of_picture) {
|
||||
if (codec_.mode == VideoCodecMode::kScreensharing) {
|
||||
const uint8_t spatial_idx = encoded_image_.SpatialIndex().value_or(0);
|
||||
const uint32_t frame_timestamp_ms =
|
||||
1000 * encoded_image_.Timestamp() / kVideoPayloadTypeFrequency;
|
||||
1000 * encoded_image_.RtpTimestamp() / kVideoPayloadTypeFrequency;
|
||||
framerate_controller_[spatial_idx].AddFrame(frame_timestamp_ms);
|
||||
|
||||
const size_t steady_state_size = SteadyStateSize(
|
||||
|
||||
@ -58,7 +58,7 @@ bool VCMDecodingState::IsOldFrame(const VCMFrameBuffer* frame) const {
|
||||
RTC_DCHECK(frame);
|
||||
if (in_initial_state_)
|
||||
return false;
|
||||
return !IsNewerTimestamp(frame->Timestamp(), time_stamp_);
|
||||
return !IsNewerTimestamp(frame->RtpTimestamp(), time_stamp_);
|
||||
}
|
||||
|
||||
bool VCMDecodingState::IsOldPacket(const VCMPacket* packet) const {
|
||||
@ -74,7 +74,7 @@ void VCMDecodingState::SetState(const VCMFrameBuffer* frame) {
|
||||
if (!UsingFlexibleMode(frame))
|
||||
UpdateSyncState(frame);
|
||||
sequence_num_ = static_cast<uint16_t>(frame->GetHighSeqNum());
|
||||
time_stamp_ = frame->Timestamp();
|
||||
time_stamp_ = frame->RtpTimestamp();
|
||||
picture_id_ = frame->PictureId();
|
||||
temporal_id_ = frame->TemporalId();
|
||||
tl0_pic_id_ = frame->Tl0PicId();
|
||||
@ -144,7 +144,7 @@ bool VCMDecodingState::UpdateEmptyFrame(const VCMFrameBuffer* frame) {
|
||||
// Continuous empty packets or continuous frames can be dropped if we
|
||||
// advance the sequence number.
|
||||
sequence_num_ = frame->GetHighSeqNum();
|
||||
time_stamp_ = frame->Timestamp();
|
||||
time_stamp_ = frame->RtpTimestamp();
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
|
||||
@ -81,7 +81,7 @@ VCMFrameBufferEnum VCMFrameBuffer::InsertPacket(const VCMPacket& packet,
|
||||
if (kStateEmpty == _state) {
|
||||
// First packet (empty and/or media) inserted into this frame.
|
||||
// store some info and set some initial values.
|
||||
SetTimestamp(packet.timestamp);
|
||||
SetRtpTimestamp(packet.timestamp);
|
||||
// We only take the ntp timestamp of the first packet of a frame.
|
||||
ntp_time_ms_ = packet.ntp_time_ms_;
|
||||
_codec = packet.codec();
|
||||
|
||||
@ -38,7 +38,7 @@ bool HasNonEmptyState(FrameListPair pair) {
|
||||
}
|
||||
|
||||
void FrameList::InsertFrame(VCMFrameBuffer* frame) {
|
||||
insert(rbegin().base(), FrameListPair(frame->Timestamp(), frame));
|
||||
insert(rbegin().base(), FrameListPair(frame->RtpTimestamp(), frame));
|
||||
}
|
||||
|
||||
VCMFrameBuffer* FrameList::PopFrame(uint32_t timestamp) {
|
||||
@ -286,7 +286,7 @@ VCMEncodedFrame* VCMJitterBuffer::ExtractAndSetDecode(uint32_t timestamp) {
|
||||
// Wait for this one to get complete.
|
||||
waiting_for_completion_.frame_size = frame->size();
|
||||
waiting_for_completion_.latest_packet_time = frame->LatestPacketTimeMs();
|
||||
waiting_for_completion_.timestamp = frame->Timestamp();
|
||||
waiting_for_completion_.timestamp = frame->RtpTimestamp();
|
||||
}
|
||||
}
|
||||
|
||||
@ -521,7 +521,8 @@ bool VCMJitterBuffer::IsContinuous(const VCMFrameBuffer& frame) const {
|
||||
for (FrameList::const_iterator it = decodable_frames_.begin();
|
||||
it != decodable_frames_.end(); ++it) {
|
||||
VCMFrameBuffer* decodable_frame = it->second;
|
||||
if (IsNewerTimestamp(decodable_frame->Timestamp(), frame.Timestamp())) {
|
||||
if (IsNewerTimestamp(decodable_frame->RtpTimestamp(),
|
||||
frame.RtpTimestamp())) {
|
||||
break;
|
||||
}
|
||||
decoding_state.SetState(decodable_frame);
|
||||
@ -555,7 +556,7 @@ void VCMJitterBuffer::FindAndInsertContinuousFramesWithState(
|
||||
it != incomplete_frames_.end();) {
|
||||
VCMFrameBuffer* frame = it->second;
|
||||
if (IsNewerTimestamp(original_decoded_state.time_stamp(),
|
||||
frame->Timestamp())) {
|
||||
frame->RtpTimestamp())) {
|
||||
++it;
|
||||
continue;
|
||||
}
|
||||
@ -592,11 +593,11 @@ int VCMJitterBuffer::NonContinuousOrIncompleteDuration() {
|
||||
if (incomplete_frames_.empty()) {
|
||||
return 0;
|
||||
}
|
||||
uint32_t start_timestamp = incomplete_frames_.Front()->Timestamp();
|
||||
uint32_t start_timestamp = incomplete_frames_.Front()->RtpTimestamp();
|
||||
if (!decodable_frames_.empty()) {
|
||||
start_timestamp = decodable_frames_.Back()->Timestamp();
|
||||
start_timestamp = decodable_frames_.Back()->RtpTimestamp();
|
||||
}
|
||||
return incomplete_frames_.Back()->Timestamp() - start_timestamp;
|
||||
return incomplete_frames_.Back()->RtpTimestamp() - start_timestamp;
|
||||
}
|
||||
|
||||
uint16_t VCMJitterBuffer::EstimatedLowSequenceNumber(
|
||||
@ -861,7 +862,7 @@ void VCMJitterBuffer::UpdateJitterEstimate(const VCMFrameBuffer& frame,
|
||||
}
|
||||
// No retransmitted frames should be a part of the jitter
|
||||
// estimate.
|
||||
UpdateJitterEstimate(frame.LatestPacketTimeMs(), frame.Timestamp(),
|
||||
UpdateJitterEstimate(frame.LatestPacketTimeMs(), frame.RtpTimestamp(),
|
||||
frame.size(), incomplete_frame);
|
||||
}
|
||||
|
||||
|
||||
@ -70,7 +70,7 @@ class TestBasicJitterBuffer : public ::testing::Test {
|
||||
VCMEncodedFrame* found_frame = jitter_buffer_->NextCompleteFrame(10);
|
||||
if (!found_frame)
|
||||
return nullptr;
|
||||
return jitter_buffer_->ExtractAndSetDecode(found_frame->Timestamp());
|
||||
return jitter_buffer_->ExtractAndSetDecode(found_frame->RtpTimestamp());
|
||||
}
|
||||
|
||||
void CheckOutFrame(VCMEncodedFrame* frame_out,
|
||||
@ -203,7 +203,7 @@ class TestRunningJitterBuffer : public ::testing::Test {
|
||||
return false;
|
||||
|
||||
VCMEncodedFrame* frame =
|
||||
jitter_buffer_->ExtractAndSetDecode(found_frame->Timestamp());
|
||||
jitter_buffer_->ExtractAndSetDecode(found_frame->RtpTimestamp());
|
||||
bool ret = (frame != NULL);
|
||||
jitter_buffer_->ReleaseFrame(frame);
|
||||
return ret;
|
||||
@ -691,12 +691,12 @@ TEST_F(TestBasicJitterBuffer, TestSkipForwardVp9) {
|
||||
EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_, &re));
|
||||
|
||||
VCMEncodedFrame* frame_out = DecodeCompleteFrame();
|
||||
EXPECT_EQ(1000U, frame_out->Timestamp());
|
||||
EXPECT_EQ(1000U, frame_out->RtpTimestamp());
|
||||
EXPECT_EQ(VideoFrameType::kVideoFrameKey, frame_out->FrameType());
|
||||
jitter_buffer_->ReleaseFrame(frame_out);
|
||||
|
||||
frame_out = DecodeCompleteFrame();
|
||||
EXPECT_EQ(13000U, frame_out->Timestamp());
|
||||
EXPECT_EQ(13000U, frame_out->RtpTimestamp());
|
||||
EXPECT_EQ(VideoFrameType::kVideoFrameDelta, frame_out->FrameType());
|
||||
jitter_buffer_->ReleaseFrame(frame_out);
|
||||
}
|
||||
@ -755,7 +755,7 @@ TEST_F(TestBasicJitterBuffer, ReorderedVp9SsData_3TlLayers) {
|
||||
EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_, &re));
|
||||
|
||||
VCMEncodedFrame* frame_out = DecodeCompleteFrame();
|
||||
EXPECT_EQ(3000U, frame_out->Timestamp());
|
||||
EXPECT_EQ(3000U, frame_out->RtpTimestamp());
|
||||
EXPECT_EQ(VideoFrameType::kVideoFrameKey, frame_out->FrameType());
|
||||
EXPECT_EQ(0, frame_out->CodecSpecific()->codecSpecific.VP9.temporal_idx);
|
||||
EXPECT_FALSE(
|
||||
@ -763,14 +763,14 @@ TEST_F(TestBasicJitterBuffer, ReorderedVp9SsData_3TlLayers) {
|
||||
jitter_buffer_->ReleaseFrame(frame_out);
|
||||
|
||||
frame_out = DecodeCompleteFrame();
|
||||
EXPECT_EQ(6000U, frame_out->Timestamp());
|
||||
EXPECT_EQ(6000U, frame_out->RtpTimestamp());
|
||||
EXPECT_EQ(VideoFrameType::kVideoFrameDelta, frame_out->FrameType());
|
||||
EXPECT_EQ(2, frame_out->CodecSpecific()->codecSpecific.VP9.temporal_idx);
|
||||
EXPECT_TRUE(frame_out->CodecSpecific()->codecSpecific.VP9.temporal_up_switch);
|
||||
jitter_buffer_->ReleaseFrame(frame_out);
|
||||
|
||||
frame_out = DecodeCompleteFrame();
|
||||
EXPECT_EQ(9000U, frame_out->Timestamp());
|
||||
EXPECT_EQ(9000U, frame_out->RtpTimestamp());
|
||||
EXPECT_EQ(VideoFrameType::kVideoFrameDelta, frame_out->FrameType());
|
||||
EXPECT_EQ(1, frame_out->CodecSpecific()->codecSpecific.VP9.temporal_idx);
|
||||
EXPECT_TRUE(frame_out->CodecSpecific()->codecSpecific.VP9.temporal_up_switch);
|
||||
@ -848,7 +848,7 @@ TEST_F(TestBasicJitterBuffer, ReorderedVp9SsData_2Tl2SLayers) {
|
||||
EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_, &re));
|
||||
|
||||
VCMEncodedFrame* frame_out = DecodeCompleteFrame();
|
||||
EXPECT_EQ(3000U, frame_out->Timestamp());
|
||||
EXPECT_EQ(3000U, frame_out->RtpTimestamp());
|
||||
EXPECT_EQ(VideoFrameType::kVideoFrameKey, frame_out->FrameType());
|
||||
EXPECT_EQ(0, frame_out->CodecSpecific()->codecSpecific.VP9.temporal_idx);
|
||||
EXPECT_FALSE(
|
||||
@ -856,7 +856,7 @@ TEST_F(TestBasicJitterBuffer, ReorderedVp9SsData_2Tl2SLayers) {
|
||||
jitter_buffer_->ReleaseFrame(frame_out);
|
||||
|
||||
frame_out = DecodeCompleteFrame();
|
||||
EXPECT_EQ(6000U, frame_out->Timestamp());
|
||||
EXPECT_EQ(6000U, frame_out->RtpTimestamp());
|
||||
EXPECT_EQ(VideoFrameType::kVideoFrameDelta, frame_out->FrameType());
|
||||
EXPECT_EQ(1, frame_out->CodecSpecific()->codecSpecific.VP9.temporal_idx);
|
||||
EXPECT_TRUE(frame_out->CodecSpecific()->codecSpecific.VP9.temporal_up_switch);
|
||||
@ -1089,7 +1089,7 @@ TEST_F(TestBasicJitterBuffer, TestInsertOldFrame) {
|
||||
jitter_buffer_->InsertPacket(*packet_, &retransmitted));
|
||||
|
||||
VCMEncodedFrame* frame_out = DecodeCompleteFrame();
|
||||
EXPECT_EQ(3000u, frame_out->Timestamp());
|
||||
EXPECT_EQ(3000u, frame_out->RtpTimestamp());
|
||||
CheckOutFrame(frame_out, size_, false);
|
||||
EXPECT_EQ(VideoFrameType::kVideoFrameKey, frame_out->FrameType());
|
||||
jitter_buffer_->ReleaseFrame(frame_out);
|
||||
@ -1124,7 +1124,7 @@ TEST_F(TestBasicJitterBuffer, TestInsertOldFrameWithSeqNumWrap) {
|
||||
jitter_buffer_->InsertPacket(*packet_, &retransmitted));
|
||||
|
||||
VCMEncodedFrame* frame_out = DecodeCompleteFrame();
|
||||
EXPECT_EQ(timestamp_, frame_out->Timestamp());
|
||||
EXPECT_EQ(timestamp_, frame_out->RtpTimestamp());
|
||||
|
||||
CheckOutFrame(frame_out, size_, false);
|
||||
|
||||
@ -1234,13 +1234,13 @@ TEST_F(TestBasicJitterBuffer, 2FrameWithTimestampWrap) {
|
||||
jitter_buffer_->InsertPacket(*packet_, &retransmitted));
|
||||
|
||||
VCMEncodedFrame* frame_out = DecodeCompleteFrame();
|
||||
EXPECT_EQ(0xffffff00, frame_out->Timestamp());
|
||||
EXPECT_EQ(0xffffff00, frame_out->RtpTimestamp());
|
||||
CheckOutFrame(frame_out, size_, false);
|
||||
EXPECT_EQ(VideoFrameType::kVideoFrameKey, frame_out->FrameType());
|
||||
jitter_buffer_->ReleaseFrame(frame_out);
|
||||
|
||||
VCMEncodedFrame* frame_out2 = DecodeCompleteFrame();
|
||||
EXPECT_EQ(2700u, frame_out2->Timestamp());
|
||||
EXPECT_EQ(2700u, frame_out2->RtpTimestamp());
|
||||
CheckOutFrame(frame_out2, size_, false);
|
||||
EXPECT_EQ(VideoFrameType::kVideoFrameDelta, frame_out2->FrameType());
|
||||
jitter_buffer_->ReleaseFrame(frame_out2);
|
||||
@ -1277,13 +1277,13 @@ TEST_F(TestBasicJitterBuffer, Insert2FramesReOrderedWithTimestampWrap) {
|
||||
jitter_buffer_->InsertPacket(*packet_, &retransmitted));
|
||||
|
||||
VCMEncodedFrame* frame_out = DecodeCompleteFrame();
|
||||
EXPECT_EQ(0xffffff00, frame_out->Timestamp());
|
||||
EXPECT_EQ(0xffffff00, frame_out->RtpTimestamp());
|
||||
CheckOutFrame(frame_out, size_, false);
|
||||
EXPECT_EQ(VideoFrameType::kVideoFrameKey, frame_out->FrameType());
|
||||
jitter_buffer_->ReleaseFrame(frame_out);
|
||||
|
||||
VCMEncodedFrame* frame_out2 = DecodeCompleteFrame();
|
||||
EXPECT_EQ(2700u, frame_out2->Timestamp());
|
||||
EXPECT_EQ(2700u, frame_out2->RtpTimestamp());
|
||||
CheckOutFrame(frame_out2, size_, false);
|
||||
EXPECT_EQ(VideoFrameType::kVideoFrameDelta, frame_out2->FrameType());
|
||||
jitter_buffer_->ReleaseFrame(frame_out2);
|
||||
@ -1377,7 +1377,7 @@ TEST_F(TestBasicJitterBuffer, ExceedNumOfFrameWithSeqNumWrap) {
|
||||
jitter_buffer_->InsertPacket(*packet_, &retransmitted));
|
||||
|
||||
VCMEncodedFrame* frame_out = DecodeCompleteFrame();
|
||||
EXPECT_EQ(first_key_frame_timestamp, frame_out->Timestamp());
|
||||
EXPECT_EQ(first_key_frame_timestamp, frame_out->RtpTimestamp());
|
||||
CheckOutFrame(frame_out, size_, false);
|
||||
EXPECT_EQ(VideoFrameType::kVideoFrameKey, frame_out->FrameType());
|
||||
jitter_buffer_->ReleaseFrame(frame_out);
|
||||
|
||||
@ -88,7 +88,7 @@ VCMEncodedFrame* VCMReceiver::FrameForDecoding(uint16_t max_wait_time_ms,
|
||||
if (found_frame == nullptr) {
|
||||
return nullptr;
|
||||
}
|
||||
uint32_t frame_timestamp = found_frame->Timestamp();
|
||||
uint32_t frame_timestamp = found_frame->RtpTimestamp();
|
||||
|
||||
if (absl::optional<VideoPlayoutDelay> playout_delay =
|
||||
found_frame->EncodedImage().PlayoutDelay()) {
|
||||
@ -161,8 +161,8 @@ VCMEncodedFrame* VCMReceiver::FrameForDecoding(uint16_t max_wait_time_ms,
|
||||
return NULL;
|
||||
}
|
||||
frame->SetRenderTime(render_time_ms);
|
||||
TRACE_EVENT_ASYNC_STEP1("webrtc", "Video", frame->Timestamp(), "SetRenderTS",
|
||||
"render_time", frame->RenderTimeMs());
|
||||
TRACE_EVENT_ASYNC_STEP1("webrtc", "Video", frame->RtpTimestamp(),
|
||||
"SetRenderTS", "render_time", frame->RenderTimeMs());
|
||||
return frame;
|
||||
}
|
||||
|
||||
|
||||
@ -36,7 +36,7 @@ VCMEncodedFrame::~VCMEncodedFrame() {
|
||||
}
|
||||
|
||||
void VCMEncodedFrame::Reset() {
|
||||
SetTimestamp(0);
|
||||
SetRtpTimestamp(0);
|
||||
SetSpatialIndex(absl::nullopt);
|
||||
_renderTimeMs = -1;
|
||||
_payloadType = 0;
|
||||
|
||||
@ -293,9 +293,9 @@ int32_t VCMGenericDecoder::Decode(const EncodedImage& frame,
|
||||
Timestamp now,
|
||||
int64_t render_time_ms) {
|
||||
TRACE_EVENT1("webrtc", "VCMGenericDecoder::Decode", "timestamp",
|
||||
frame.Timestamp());
|
||||
frame.RtpTimestamp());
|
||||
FrameInfo frame_info;
|
||||
frame_info.rtp_timestamp = frame.Timestamp();
|
||||
frame_info.rtp_timestamp = frame.RtpTimestamp();
|
||||
frame_info.decode_start = now;
|
||||
frame_info.render_time =
|
||||
render_time_ms >= 0
|
||||
@ -335,7 +335,7 @@ int32_t VCMGenericDecoder::Decode(const EncodedImage& frame,
|
||||
? absl::make_optional(frame_info.packet_infos[0].ssrc())
|
||||
: absl::nullopt;
|
||||
RTC_LOG(LS_WARNING) << "Failed to decode frame with timestamp "
|
||||
<< frame.Timestamp() << ", ssrc "
|
||||
<< frame.RtpTimestamp() << ", ssrc "
|
||||
<< (ssrc ? rtc::ToString(*ssrc) : "<not set>")
|
||||
<< ", error code: " << ret;
|
||||
_callback->ClearTimestampMap();
|
||||
|
||||
@ -157,7 +157,7 @@ absl::optional<EncodedImage> IvfFileReader::NextFrame() {
|
||||
|
||||
EncodedImage image;
|
||||
image.capture_time_ms_ = current_timestamp;
|
||||
image.SetTimestamp(
|
||||
image.SetRtpTimestamp(
|
||||
static_cast<uint32_t>(current_timestamp * kRtpClockRateHz / time_scale_));
|
||||
image.SetEncodedData(payload);
|
||||
image.SetSpatialIndex(static_cast<int>(layer_sizes.size()) - 1);
|
||||
|
||||
@ -58,7 +58,7 @@ class IvfFileReaderTest : public ::testing::Test {
|
||||
if (use_capture_tims_ms) {
|
||||
frame.capture_time_ms_ = i;
|
||||
} else {
|
||||
frame.SetTimestamp(i);
|
||||
frame.SetRtpTimestamp(i);
|
||||
}
|
||||
if (!file_writer->WriteFrame(frame, codec_type))
|
||||
return false;
|
||||
@ -86,9 +86,9 @@ class IvfFileReaderTest : public ::testing::Test {
|
||||
EXPECT_EQ(frame->SpatialIndex(), spatial_layers_count - 1);
|
||||
if (use_capture_tims_ms) {
|
||||
EXPECT_EQ(frame->capture_time_ms_, static_cast<int64_t>(frame_index));
|
||||
EXPECT_EQ(frame->Timestamp(), static_cast<int64_t>(90 * frame_index));
|
||||
EXPECT_EQ(frame->RtpTimestamp(), static_cast<int64_t>(90 * frame_index));
|
||||
} else {
|
||||
EXPECT_EQ(frame->Timestamp(), static_cast<int64_t>(frame_index));
|
||||
EXPECT_EQ(frame->RtpTimestamp(), static_cast<int64_t>(frame_index));
|
||||
}
|
||||
ASSERT_EQ(frame->size(), sizeof(kDummyPayload) * spatial_layers_count);
|
||||
for (int i = 0; i < spatial_layers_count; ++i) {
|
||||
|
||||
@ -141,7 +141,7 @@ bool IvfFileWriter::InitFromFirstFrame(const EncodedImage& encoded_image,
|
||||
height_ = encoded_image._encodedHeight;
|
||||
}
|
||||
|
||||
using_capture_timestamps_ = encoded_image.Timestamp() == 0;
|
||||
using_capture_timestamps_ = encoded_image.RtpTimestamp() == 0;
|
||||
|
||||
codec_type_ = codec_type;
|
||||
|
||||
@ -168,7 +168,7 @@ bool IvfFileWriter::WriteFrame(const EncodedImage& encoded_image,
|
||||
|
||||
int64_t timestamp = using_capture_timestamps_
|
||||
? encoded_image.capture_time_ms_
|
||||
: wrap_handler_.Unwrap(encoded_image.Timestamp());
|
||||
: wrap_handler_.Unwrap(encoded_image.RtpTimestamp());
|
||||
if (last_timestamp_ != -1 && timestamp < last_timestamp_) {
|
||||
RTC_LOG(LS_WARNING) << "Timestamp not increasing: " << last_timestamp_
|
||||
<< " -> " << timestamp;
|
||||
|
||||
@ -54,7 +54,7 @@ class IvfFileWriterTest : public ::testing::Test {
|
||||
if (use_capture_tims_ms) {
|
||||
frame.capture_time_ms_ = i;
|
||||
} else {
|
||||
frame.SetTimestamp(i);
|
||||
frame.SetRtpTimestamp(i);
|
||||
}
|
||||
if (!file_writer_->WriteFrame(frame, codec_type))
|
||||
return false;
|
||||
|
||||
@ -101,7 +101,7 @@ class SimulcastTestFixtureImpl::TestEncodedImageCallback
|
||||
temporal_layer_[encoded_image.SimulcastIndex().value_or(0)] =
|
||||
codec_specific_info->codecSpecific.H264.temporal_idx;
|
||||
}
|
||||
return Result(Result::OK, encoded_image.Timestamp());
|
||||
return Result(Result::OK, encoded_image.RtpTimestamp());
|
||||
}
|
||||
// This method only makes sense for VP8.
|
||||
void GetLastEncodedFrameInfo(int* temporal_layer,
|
||||
|
||||
@ -95,7 +95,7 @@ std::string ToString(const EncodedImage& encoded_image) {
|
||||
|
||||
ss << VideoFrameTypeToString(encoded_image._frameType)
|
||||
<< ", size=" << encoded_image.size() << ", qp=" << encoded_image.qp_
|
||||
<< ", timestamp=" << encoded_image.Timestamp();
|
||||
<< ", timestamp=" << encoded_image.RtpTimestamp();
|
||||
|
||||
if (encoded_image.SimulcastIndex()) {
|
||||
ss << ", SimulcastIndex=" << *encoded_image.SimulcastIndex();
|
||||
|
||||
@ -53,7 +53,7 @@ int32_t ConfigurableFrameSizeEncoder::Encode(
|
||||
encodedImage._encodedHeight = inputImage.height();
|
||||
encodedImage._encodedWidth = inputImage.width();
|
||||
encodedImage._frameType = VideoFrameType::kVideoFrameKey;
|
||||
encodedImage.SetTimestamp(inputImage.timestamp());
|
||||
encodedImage.SetRtpTimestamp(inputImage.timestamp());
|
||||
encodedImage.capture_time_ms_ = inputImage.render_time_ms();
|
||||
CodecSpecificInfo specific{};
|
||||
specific.codecType = codec_type_;
|
||||
|
||||
@ -54,7 +54,7 @@ int32_t FakeDecoder::Decode(const EncodedImage& input,
|
||||
.set_rotation(webrtc::kVideoRotation_0)
|
||||
.set_timestamp_ms(render_time_ms)
|
||||
.build();
|
||||
frame.set_timestamp(input.Timestamp());
|
||||
frame.set_timestamp(input.RtpTimestamp());
|
||||
frame.set_ntp_time_ms(input.ntp_time_ms_);
|
||||
|
||||
if (decode_delay_ms_ == 0 || !task_queue_) {
|
||||
|
||||
@ -18,8 +18,9 @@ namespace webrtc {
|
||||
|
||||
void PrintTo(const EncodedFrame& frame,
|
||||
std::ostream* os) /* no-presubmit-check TODO(webrtc:8982) */ {
|
||||
*os << "EncodedFrame with id=" << frame.Id() << " rtp=" << frame.Timestamp()
|
||||
<< " size=" << frame.size() << " refs=[";
|
||||
*os << "EncodedFrame with id=" << frame.Id()
|
||||
<< " rtp=" << frame.RtpTimestamp() << " size=" << frame.size()
|
||||
<< " refs=[";
|
||||
for (size_t ref = 0; ref < frame.num_references; ++ref) {
|
||||
*os << frame.references[ref] << ",";
|
||||
}
|
||||
@ -94,7 +95,7 @@ std::unique_ptr<FakeEncodedFrame> FakeFrameBuilder::Build() {
|
||||
frame->SetEncodedData(EncodedImageBuffer::Create(size_));
|
||||
|
||||
if (rtp_timestamp_)
|
||||
frame->SetTimestamp(*rtp_timestamp_);
|
||||
frame->SetRtpTimestamp(*rtp_timestamp_);
|
||||
if (frame_id_)
|
||||
frame->SetId(*frame_id_);
|
||||
if (playout_delay_)
|
||||
|
||||
@ -51,7 +51,7 @@ MATCHER_P(FrameWithSize, id, "") {
|
||||
}
|
||||
|
||||
MATCHER_P(RtpTimestamp, ts, "") {
|
||||
return ts == arg.Timestamp();
|
||||
return ts == arg.RtpTimestamp();
|
||||
}
|
||||
|
||||
class FakeFrameBuilder {
|
||||
|
||||
@ -141,7 +141,7 @@ int32_t FakeEncoder::Encode(const VideoFrame& input_image,
|
||||
EncodedImage encoded;
|
||||
encoded.SetEncodedData(buffer);
|
||||
|
||||
encoded.SetTimestamp(input_image.timestamp());
|
||||
encoded.SetRtpTimestamp(input_image.timestamp());
|
||||
encoded._frameType = frame_info.keyframe ? VideoFrameType::kVideoFrameKey
|
||||
: VideoFrameType::kVideoFrameDelta;
|
||||
encoded._encodedWidth = simulcast_streams[i].width;
|
||||
|
||||
@ -57,7 +57,7 @@ int32_t FakeVp8Decoder::Decode(const EncodedImage& input,
|
||||
.set_rotation(webrtc::kVideoRotation_0)
|
||||
.set_timestamp_ms(render_time_ms)
|
||||
.build();
|
||||
frame.set_timestamp(input.Timestamp());
|
||||
frame.set_timestamp(input.RtpTimestamp());
|
||||
frame.set_ntp_time_ms(input.ntp_time_ms_);
|
||||
|
||||
callback_->Decoded(frame, /*decode_time_ms=*/absl::nullopt,
|
||||
|
||||
@ -96,10 +96,10 @@ CodecSpecificInfo FakeVp8Encoder::EncodeHook(
|
||||
RTC_DCHECK_RUN_ON(&sequence_checker_);
|
||||
uint8_t simulcast_index = encoded_image.SimulcastIndex().value_or(0);
|
||||
frame_buffer_controller_->NextFrameConfig(simulcast_index,
|
||||
encoded_image.Timestamp());
|
||||
encoded_image.RtpTimestamp());
|
||||
CodecSpecificInfo codec_specific =
|
||||
PopulateCodecSpecific(encoded_image.size(), encoded_image._frameType,
|
||||
simulcast_index, encoded_image.Timestamp());
|
||||
simulcast_index, encoded_image.RtpTimestamp());
|
||||
|
||||
// Write width and height to the payload the same way as the real encoder
|
||||
// does.
|
||||
|
||||
@ -64,7 +64,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) {
|
||||
}
|
||||
case 5: {
|
||||
auto frame = std::make_unique<FuzzyFrameObject>();
|
||||
frame->SetTimestamp(helper.ReadOrDefaultValue<uint32_t>(0));
|
||||
frame->SetRtpTimestamp(helper.ReadOrDefaultValue<uint32_t>(0));
|
||||
int64_t wire_id =
|
||||
helper.ReadOrDefaultValue<uint16_t>(0) & (kFrameIdLength - 1);
|
||||
frame->SetId(unwrapper.Unwrap(wire_id));
|
||||
|
||||
@ -75,18 +75,19 @@ int32_t QualityAnalyzingVideoDecoder::Decode(const EncodedImage& input_image,
|
||||
//
|
||||
// For more details see QualityAnalyzingVideoEncoder.
|
||||
return analyzing_callback_->IrrelevantSimulcastStreamDecoded(
|
||||
out.id.value_or(VideoFrame::kNotSetId), input_image.Timestamp());
|
||||
out.id.value_or(VideoFrame::kNotSetId), input_image.RtpTimestamp());
|
||||
}
|
||||
|
||||
EncodedImage* origin_image;
|
||||
{
|
||||
MutexLock lock(&mutex_);
|
||||
// Store id to be able to retrieve it in analyzing callback.
|
||||
timestamp_to_frame_id_.insert({input_image.Timestamp(), out.id});
|
||||
timestamp_to_frame_id_.insert({input_image.RtpTimestamp(), out.id});
|
||||
// Store encoded image to prevent its destruction while it is used in
|
||||
// decoder.
|
||||
origin_image = &(
|
||||
decoding_images_.insert({input_image.Timestamp(), std::move(out.image)})
|
||||
origin_image =
|
||||
&(decoding_images_
|
||||
.insert({input_image.RtpTimestamp(), std::move(out.image)})
|
||||
.first->second);
|
||||
}
|
||||
// We can safely dereference `origin_image`, because it can be removed from
|
||||
@ -101,8 +102,8 @@ int32_t QualityAnalyzingVideoDecoder::Decode(const EncodedImage& input_image,
|
||||
VideoQualityAnalyzerInterface::DecoderStats stats;
|
||||
{
|
||||
MutexLock lock(&mutex_);
|
||||
timestamp_to_frame_id_.erase(input_image.Timestamp());
|
||||
decoding_images_.erase(input_image.Timestamp());
|
||||
timestamp_to_frame_id_.erase(input_image.RtpTimestamp());
|
||||
decoding_images_.erase(input_image.RtpTimestamp());
|
||||
stats.decoder_name = codec_name_;
|
||||
}
|
||||
analyzer_->OnDecoderError(
|
||||
|
||||
@ -49,7 +49,7 @@ TEST(SingleProcessEncodedImageDataInjectorTest, InjectExtractDiscardFalse) {
|
||||
|
||||
EncodedImage source =
|
||||
CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1);
|
||||
source.SetTimestamp(123456789);
|
||||
source.SetRtpTimestamp(123456789);
|
||||
|
||||
EncodedImageExtractionResult out =
|
||||
injector.ExtractData(injector.InjectData(512, false, source));
|
||||
@ -68,7 +68,7 @@ TEST(SingleProcessEncodedImageDataInjectorTest, InjectExtractDiscardTrue) {
|
||||
|
||||
EncodedImage source =
|
||||
CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1);
|
||||
source.SetTimestamp(123456789);
|
||||
source.SetRtpTimestamp(123456789);
|
||||
|
||||
EncodedImageExtractionResult out =
|
||||
injector.ExtractData(injector.InjectData(512, true, source));
|
||||
@ -85,7 +85,7 @@ TEST(SingleProcessEncodedImageDataInjectorTest,
|
||||
|
||||
EncodedImage source =
|
||||
CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1);
|
||||
source.SetTimestamp(123456789);
|
||||
source.SetRtpTimestamp(123456789);
|
||||
|
||||
EncodedImage intermediate = injector.InjectData(512, false, source);
|
||||
intermediate.SetSpatialIndex(2);
|
||||
@ -110,7 +110,7 @@ TEST(SingleProcessEncodedImageDataInjectorTest,
|
||||
|
||||
EncodedImage source =
|
||||
CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1);
|
||||
source.SetTimestamp(123456789);
|
||||
source.SetRtpTimestamp(123456789);
|
||||
|
||||
EncodedImage intermediate = injector.InjectData(512, false, source);
|
||||
intermediate.SetSpatialIndex(2);
|
||||
@ -138,15 +138,15 @@ TEST(SingleProcessEncodedImageDataInjectorTest, Inject3Extract3) {
|
||||
// 1st frame
|
||||
EncodedImage source1 =
|
||||
CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1);
|
||||
source1.SetTimestamp(123456710);
|
||||
source1.SetRtpTimestamp(123456710);
|
||||
// 2nd frame 1st spatial layer
|
||||
EncodedImage source2 =
|
||||
CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/11);
|
||||
source2.SetTimestamp(123456720);
|
||||
source2.SetRtpTimestamp(123456720);
|
||||
// 2nd frame 2nd spatial layer
|
||||
EncodedImage source3 =
|
||||
CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/21);
|
||||
source3.SetTimestamp(123456720);
|
||||
source3.SetRtpTimestamp(123456720);
|
||||
|
||||
EncodedImage intermediate1 = injector.InjectData(510, false, source1);
|
||||
EncodedImage intermediate2 = injector.InjectData(520, true, source2);
|
||||
@ -183,13 +183,13 @@ TEST(SingleProcessEncodedImageDataInjectorTest, InjectExtractFromConcatenated) {
|
||||
|
||||
EncodedImage source1 =
|
||||
CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1);
|
||||
source1.SetTimestamp(123456710);
|
||||
source1.SetRtpTimestamp(123456710);
|
||||
EncodedImage source2 =
|
||||
CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/11);
|
||||
source2.SetTimestamp(123456710);
|
||||
source2.SetRtpTimestamp(123456710);
|
||||
EncodedImage source3 =
|
||||
CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/21);
|
||||
source3.SetTimestamp(123456710);
|
||||
source3.SetRtpTimestamp(123456710);
|
||||
|
||||
// Inject id into 3 images with same frame id.
|
||||
EncodedImage intermediate1 = injector.InjectData(512, false, source1);
|
||||
@ -235,13 +235,13 @@ TEST(SingleProcessEncodedImageDataInjector,
|
||||
|
||||
EncodedImage source1 =
|
||||
CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1);
|
||||
source1.SetTimestamp(123456710);
|
||||
source1.SetRtpTimestamp(123456710);
|
||||
EncodedImage source2 =
|
||||
CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/11);
|
||||
source2.SetTimestamp(123456710);
|
||||
source2.SetRtpTimestamp(123456710);
|
||||
EncodedImage source3 =
|
||||
CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/21);
|
||||
source3.SetTimestamp(123456710);
|
||||
source3.SetRtpTimestamp(123456710);
|
||||
|
||||
// Inject id into 3 images with same frame id.
|
||||
EncodedImage intermediate1 = injector.InjectData(512, true, source1);
|
||||
@ -282,7 +282,7 @@ TEST(SingleProcessEncodedImageDataInjectorTest, InjectOnceExtractTwice) {
|
||||
|
||||
EncodedImage source =
|
||||
CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1);
|
||||
source.SetTimestamp(123456789);
|
||||
source.SetRtpTimestamp(123456789);
|
||||
|
||||
EncodedImageExtractionResult out = injector.ExtractData(
|
||||
injector.InjectData(/*id=*/512, /*discard=*/false, source));
|
||||
@ -310,7 +310,7 @@ TEST(SingleProcessEncodedImageDataInjectorTest, Add1stReceiverAfterStart) {
|
||||
|
||||
EncodedImage source =
|
||||
CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1);
|
||||
source.SetTimestamp(123456789);
|
||||
source.SetRtpTimestamp(123456789);
|
||||
EncodedImage modified_image = injector.InjectData(
|
||||
/*id=*/512, /*discard=*/false, source);
|
||||
|
||||
@ -332,7 +332,7 @@ TEST(SingleProcessEncodedImageDataInjectorTest, Add3rdReceiverAfterStart) {
|
||||
|
||||
EncodedImage source =
|
||||
CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1);
|
||||
source.SetTimestamp(123456789);
|
||||
source.SetRtpTimestamp(123456789);
|
||||
EncodedImage modified_image = injector.InjectData(
|
||||
/*id=*/512, /*discard=*/false, source);
|
||||
injector.ExtractData(modified_image);
|
||||
@ -357,10 +357,10 @@ TEST(SingleProcessEncodedImageDataInjectorTest,
|
||||
|
||||
EncodedImage source1 =
|
||||
CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1);
|
||||
source1.SetTimestamp(10);
|
||||
source1.SetRtpTimestamp(10);
|
||||
EncodedImage source2 =
|
||||
CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1);
|
||||
source2.SetTimestamp(20);
|
||||
source2.SetRtpTimestamp(20);
|
||||
|
||||
EncodedImage modified_image1 = injector.InjectData(
|
||||
/*id=*/512, /*discard=*/false, source1);
|
||||
@ -399,7 +399,7 @@ TEST(SingleProcessEncodedImageDataInjectorTestDeathTest,
|
||||
|
||||
EncodedImage source =
|
||||
CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1);
|
||||
source.SetTimestamp(123456789);
|
||||
source.SetRtpTimestamp(123456789);
|
||||
|
||||
EncodedImage modified =
|
||||
injector.InjectData(/*id=*/512, /*discard=*/false, source);
|
||||
@ -417,10 +417,10 @@ TEST(SingleProcessEncodedImageDataInjectorTestDeathTest,
|
||||
|
||||
EncodedImage source1 =
|
||||
CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1);
|
||||
source1.SetTimestamp(10);
|
||||
source1.SetRtpTimestamp(10);
|
||||
EncodedImage source2 =
|
||||
CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1);
|
||||
source2.SetTimestamp(20);
|
||||
source2.SetRtpTimestamp(20);
|
||||
|
||||
EncodedImage modified_image1 = injector.InjectData(
|
||||
/*id=*/512, /*discard=*/false, source1);
|
||||
|
||||
@ -507,7 +507,7 @@ void VideoStreamEncoderResourceManager::OnEncodeCompleted(
|
||||
DataSize frame_size) {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
// Inform `encode_usage_resource_` of the encode completed event.
|
||||
uint32_t timestamp = encoded_image.Timestamp();
|
||||
uint32_t timestamp = encoded_image.RtpTimestamp();
|
||||
int64_t capture_time_us =
|
||||
encoded_image.capture_time_ms_ * rtc::kNumMicrosecsPerMillisec;
|
||||
encode_usage_resource_->OnEncodeCompleted(
|
||||
|
||||
@ -236,7 +236,7 @@ FrameEncodeMetadataWriter::ExtractEncodeStartTimeAndFillMetadata(
|
||||
// Because some hardware encoders don't preserve capture timestamp we
|
||||
// use RTP timestamps here.
|
||||
while (!metadata_list->empty() &&
|
||||
IsNewerTimestamp(encoded_image->Timestamp(),
|
||||
IsNewerTimestamp(encoded_image->RtpTimestamp(),
|
||||
metadata_list->front().rtp_timestamp)) {
|
||||
frame_drop_callback_->OnDroppedFrame(
|
||||
EncodedImageCallback::DropReason::kDroppedByEncoder);
|
||||
@ -249,7 +249,7 @@ FrameEncodeMetadataWriter::ExtractEncodeStartTimeAndFillMetadata(
|
||||
: VideoContentType::UNSPECIFIED;
|
||||
|
||||
if (!metadata_list->empty() &&
|
||||
metadata_list->front().rtp_timestamp == encoded_image->Timestamp()) {
|
||||
metadata_list->front().rtp_timestamp == encoded_image->RtpTimestamp()) {
|
||||
result.emplace(metadata_list->front().encode_start_time_ms);
|
||||
encoded_image->capture_time_ms_ =
|
||||
metadata_list->front().timestamp_us / 1000;
|
||||
|
||||
@ -105,7 +105,7 @@ std::vector<std::vector<FrameType>> GetTimingFrames(
|
||||
image.SetEncodedData(EncodedImageBuffer::Create(max_frame_size));
|
||||
image.set_size(FrameSize(min_frame_size, max_frame_size, si, i));
|
||||
image.capture_time_ms_ = current_timestamp;
|
||||
image.SetTimestamp(static_cast<uint32_t>(current_timestamp * 90));
|
||||
image.SetRtpTimestamp(static_cast<uint32_t>(current_timestamp * 90));
|
||||
image.SetSpatialIndex(si);
|
||||
|
||||
if (dropped) {
|
||||
@ -198,7 +198,7 @@ TEST(FrameEncodeMetadataWriterTest, NoTimingFrameIfNoEncodeStartTime) {
|
||||
EncodedImage image;
|
||||
image.SetEncodedData(EncodedImageBuffer::Create(kFrameSize));
|
||||
image.capture_time_ms_ = timestamp;
|
||||
image.SetTimestamp(static_cast<uint32_t>(timestamp * 90));
|
||||
image.SetRtpTimestamp(static_cast<uint32_t>(timestamp * 90));
|
||||
|
||||
FakeEncodedImageCallback sink;
|
||||
FrameEncodeMetadataWriter encode_timer(&sink);
|
||||
@ -222,7 +222,7 @@ TEST(FrameEncodeMetadataWriterTest, NoTimingFrameIfNoEncodeStartTime) {
|
||||
|
||||
// New frame, now skip OnEncodeStarted. Should not result in timing frame.
|
||||
image.capture_time_ms_ = ++timestamp;
|
||||
image.SetTimestamp(static_cast<uint32_t>(timestamp * 90));
|
||||
image.SetRtpTimestamp(static_cast<uint32_t>(timestamp * 90));
|
||||
image.timing_ = EncodedImage::Timing();
|
||||
encode_timer.FillTimingInfo(0, &image);
|
||||
EXPECT_FALSE(IsTimingFrame(image));
|
||||
@ -250,7 +250,7 @@ TEST(FrameEncodeMetadataWriterTest, NotifiesAboutDroppedFrames) {
|
||||
.build();
|
||||
|
||||
image.capture_time_ms_ = kTimestampMs1;
|
||||
image.SetTimestamp(static_cast<uint32_t>(image.capture_time_ms_ * 90));
|
||||
image.SetRtpTimestamp(static_cast<uint32_t>(image.capture_time_ms_ * 90));
|
||||
frame.set_timestamp(image.capture_time_ms_ * 90);
|
||||
frame.set_timestamp_us(image.capture_time_ms_ * 1000);
|
||||
encode_timer.OnEncodeStarted(frame);
|
||||
@ -259,7 +259,7 @@ TEST(FrameEncodeMetadataWriterTest, NotifiesAboutDroppedFrames) {
|
||||
encode_timer.FillTimingInfo(0, &image);
|
||||
|
||||
image.capture_time_ms_ = kTimestampMs2;
|
||||
image.SetTimestamp(static_cast<uint32_t>(image.capture_time_ms_ * 90));
|
||||
image.SetRtpTimestamp(static_cast<uint32_t>(image.capture_time_ms_ * 90));
|
||||
image.timing_ = EncodedImage::Timing();
|
||||
frame.set_timestamp(image.capture_time_ms_ * 90);
|
||||
frame.set_timestamp_us(image.capture_time_ms_ * 1000);
|
||||
@ -269,7 +269,7 @@ TEST(FrameEncodeMetadataWriterTest, NotifiesAboutDroppedFrames) {
|
||||
EXPECT_EQ(0u, sink.GetNumFramesDropped());
|
||||
|
||||
image.capture_time_ms_ = kTimestampMs3;
|
||||
image.SetTimestamp(static_cast<uint32_t>(image.capture_time_ms_ * 90));
|
||||
image.SetRtpTimestamp(static_cast<uint32_t>(image.capture_time_ms_ * 90));
|
||||
image.timing_ = EncodedImage::Timing();
|
||||
frame.set_timestamp(image.capture_time_ms_ * 90);
|
||||
frame.set_timestamp_us(image.capture_time_ms_ * 1000);
|
||||
@ -278,7 +278,7 @@ TEST(FrameEncodeMetadataWriterTest, NotifiesAboutDroppedFrames) {
|
||||
EXPECT_EQ(1u, sink.GetNumFramesDropped());
|
||||
|
||||
image.capture_time_ms_ = kTimestampMs4;
|
||||
image.SetTimestamp(static_cast<uint32_t>(image.capture_time_ms_ * 90));
|
||||
image.SetRtpTimestamp(static_cast<uint32_t>(image.capture_time_ms_ * 90));
|
||||
image.timing_ = EncodedImage::Timing();
|
||||
frame.set_timestamp(image.capture_time_ms_ * 90);
|
||||
frame.set_timestamp_us(image.capture_time_ms_ * 1000);
|
||||
@ -300,7 +300,7 @@ TEST(FrameEncodeMetadataWriterTest, RestoresCaptureTimestamps) {
|
||||
encode_timer.OnSetRates(bitrate_allocation, 30);
|
||||
|
||||
image.capture_time_ms_ = kTimestampMs; // Correct timestamp.
|
||||
image.SetTimestamp(static_cast<uint32_t>(image.capture_time_ms_ * 90));
|
||||
image.SetRtpTimestamp(static_cast<uint32_t>(image.capture_time_ms_ * 90));
|
||||
VideoFrame frame = VideoFrame::Builder()
|
||||
.set_timestamp_ms(image.capture_time_ms_)
|
||||
.set_timestamp_rtp(image.capture_time_ms_ * 90)
|
||||
@ -324,7 +324,7 @@ TEST(FrameEncodeMetadataWriterTest, CopiesRotation) {
|
||||
bitrate_allocation.SetBitrate(0, 0, 500000);
|
||||
encode_timer.OnSetRates(bitrate_allocation, 30);
|
||||
|
||||
image.SetTimestamp(static_cast<uint32_t>(kTimestampMs * 90));
|
||||
image.SetRtpTimestamp(static_cast<uint32_t>(kTimestampMs * 90));
|
||||
VideoFrame frame = VideoFrame::Builder()
|
||||
.set_timestamp_ms(kTimestampMs)
|
||||
.set_timestamp_rtp(kTimestampMs * 90)
|
||||
@ -350,7 +350,7 @@ TEST(FrameEncodeMetadataWriterTest, SetsContentType) {
|
||||
bitrate_allocation.SetBitrate(0, 0, 500000);
|
||||
encode_timer.OnSetRates(bitrate_allocation, 30);
|
||||
|
||||
image.SetTimestamp(static_cast<uint32_t>(kTimestampMs * 90));
|
||||
image.SetRtpTimestamp(static_cast<uint32_t>(kTimestampMs * 90));
|
||||
VideoFrame frame = VideoFrame::Builder()
|
||||
.set_timestamp_ms(kTimestampMs)
|
||||
.set_timestamp_rtp(kTimestampMs * 90)
|
||||
@ -376,7 +376,7 @@ TEST(FrameEncodeMetadataWriterTest, CopiesColorSpace) {
|
||||
|
||||
webrtc::ColorSpace color_space =
|
||||
CreateTestColorSpace(/*with_hdr_metadata=*/true);
|
||||
image.SetTimestamp(static_cast<uint32_t>(kTimestampMs * 90));
|
||||
image.SetRtpTimestamp(static_cast<uint32_t>(kTimestampMs * 90));
|
||||
VideoFrame frame = VideoFrame::Builder()
|
||||
.set_timestamp_ms(kTimestampMs)
|
||||
.set_timestamp_rtp(kTimestampMs * 90)
|
||||
@ -402,7 +402,7 @@ TEST(FrameEncodeMetadataWriterTest, CopiesPacketInfos) {
|
||||
encode_timer.OnSetRates(bitrate_allocation, 30);
|
||||
|
||||
RtpPacketInfos packet_infos = CreatePacketInfos(3);
|
||||
image.SetTimestamp(static_cast<uint32_t>(kTimestampMs * 90));
|
||||
image.SetRtpTimestamp(static_cast<uint32_t>(kTimestampMs * 90));
|
||||
VideoFrame frame = VideoFrame::Builder()
|
||||
.set_timestamp_ms(kTimestampMs)
|
||||
.set_timestamp_rtp(kTimestampMs * 90)
|
||||
|
||||
@ -881,7 +881,7 @@ void RtpVideoStreamReceiver2::OnAssembledFrame(
|
||||
// Reset `reference_finder_` if `frame` is new and the codec have changed.
|
||||
if (current_codec_) {
|
||||
bool frame_is_newer =
|
||||
AheadOf(frame->Timestamp(), last_assembled_frame_rtp_timestamp_);
|
||||
AheadOf(frame->RtpTimestamp(), last_assembled_frame_rtp_timestamp_);
|
||||
|
||||
if (frame->codec_type() != current_codec_) {
|
||||
if (frame_is_newer) {
|
||||
@ -899,11 +899,11 @@ void RtpVideoStreamReceiver2::OnAssembledFrame(
|
||||
}
|
||||
|
||||
if (frame_is_newer) {
|
||||
last_assembled_frame_rtp_timestamp_ = frame->Timestamp();
|
||||
last_assembled_frame_rtp_timestamp_ = frame->RtpTimestamp();
|
||||
}
|
||||
} else {
|
||||
current_codec_ = frame->codec_type();
|
||||
last_assembled_frame_rtp_timestamp_ = frame->Timestamp();
|
||||
last_assembled_frame_rtp_timestamp_ = frame->RtpTimestamp();
|
||||
}
|
||||
|
||||
if (buffered_frame_decryptor_ != nullptr) {
|
||||
|
||||
@ -271,7 +271,7 @@ bool SendStatisticsProxy::UmaSamplesContainer::InsertEncodedFrame(
|
||||
// Check for jump in timestamp.
|
||||
if (!encoded_frames_.empty()) {
|
||||
uint32_t oldest_timestamp = encoded_frames_.begin()->first;
|
||||
if (ForwardDiff(oldest_timestamp, encoded_frame.Timestamp()) >
|
||||
if (ForwardDiff(oldest_timestamp, encoded_frame.RtpTimestamp()) >
|
||||
kMaxEncodedFrameTimestampDiff) {
|
||||
// Gap detected, clear frames to have a sequence where newest timestamp
|
||||
// is not too far away from oldest in order to distinguish old and new.
|
||||
@ -279,11 +279,11 @@ bool SendStatisticsProxy::UmaSamplesContainer::InsertEncodedFrame(
|
||||
}
|
||||
}
|
||||
|
||||
auto it = encoded_frames_.find(encoded_frame.Timestamp());
|
||||
auto it = encoded_frames_.find(encoded_frame.RtpTimestamp());
|
||||
if (it == encoded_frames_.end()) {
|
||||
// First frame with this timestamp.
|
||||
encoded_frames_.insert(
|
||||
std::make_pair(encoded_frame.Timestamp(),
|
||||
std::make_pair(encoded_frame.RtpTimestamp(),
|
||||
Frame(now_ms, encoded_frame._encodedWidth,
|
||||
encoded_frame._encodedHeight, simulcast_idx)));
|
||||
sent_fps_counter_.Add(1);
|
||||
|
||||
@ -471,7 +471,7 @@ TEST_F(SendStatisticsProxyTest,
|
||||
fake_clock_.AdvanceTimeMilliseconds(kInterframeDelayMs);
|
||||
fake_global_clock.SetTime(
|
||||
Timestamp::Millis(fake_clock_.TimeInMilliseconds()));
|
||||
encoded_image.SetTimestamp(encoded_image.Timestamp() +
|
||||
encoded_image.SetRtpTimestamp(encoded_image.RtpTimestamp() +
|
||||
90 * kInterframeDelayMs);
|
||||
statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
|
||||
|
||||
@ -498,7 +498,7 @@ TEST_F(SendStatisticsProxyTest, EncodeFrameRateInSubStream) {
|
||||
fake_global_clock.SetTime(
|
||||
Timestamp::Millis(fake_clock_.TimeInMilliseconds()));
|
||||
// Second frame
|
||||
encoded_image.SetTimestamp(encoded_image.Timestamp() +
|
||||
encoded_image.SetRtpTimestamp(encoded_image.RtpTimestamp() +
|
||||
90 * kInterframeDelayMs);
|
||||
statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
|
||||
fake_clock_.AdvanceTimeMilliseconds(kInterframeDelayMs);
|
||||
@ -519,7 +519,7 @@ TEST_F(SendStatisticsProxyTest, EncodeFrameRateInSubStreamsVp8Simulcast) {
|
||||
codec_info.codecType = kVideoCodecVP8;
|
||||
|
||||
for (int i = 0; i < 10; ++i) {
|
||||
encoded_image.SetTimestamp(encoded_image.Timestamp() +
|
||||
encoded_image.SetRtpTimestamp(encoded_image.RtpTimestamp() +
|
||||
90 * kInterframeDelayMs);
|
||||
encoded_image.SetSimulcastIndex(0);
|
||||
statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info);
|
||||
@ -537,7 +537,7 @@ TEST_F(SendStatisticsProxyTest, EncodeFrameRateInSubStreamsVp8Simulcast) {
|
||||
|
||||
// Stop encoding second stream, expect framerate to be zero.
|
||||
for (int i = 0; i < 10; ++i) {
|
||||
encoded_image.SetTimestamp(encoded_image.Timestamp() +
|
||||
encoded_image.SetRtpTimestamp(encoded_image.RtpTimestamp() +
|
||||
90 * kInterframeDelayMs);
|
||||
encoded_image.SetSimulcastIndex(0);
|
||||
statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info);
|
||||
@ -553,7 +553,7 @@ TEST_F(SendStatisticsProxyTest, EncodeFrameRateInSubStreamsVp8Simulcast) {
|
||||
|
||||
// Start encoding second stream.
|
||||
for (int i = 0; i < 10; ++i) {
|
||||
encoded_image.SetTimestamp(encoded_image.Timestamp() +
|
||||
encoded_image.SetRtpTimestamp(encoded_image.RtpTimestamp() +
|
||||
90 * kInterframeDelayMs);
|
||||
encoded_image.SetSimulcastIndex(0);
|
||||
statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info);
|
||||
@ -580,7 +580,7 @@ TEST_F(SendStatisticsProxyTest, EncodeFrameRateInSubStreamsVp9Svc) {
|
||||
codec_info.codecType = kVideoCodecVP9;
|
||||
|
||||
for (int i = 0; i < 10; ++i) {
|
||||
encoded_image.SetTimestamp(encoded_image.Timestamp() +
|
||||
encoded_image.SetRtpTimestamp(encoded_image.RtpTimestamp() +
|
||||
90 * kInterframeDelayMs);
|
||||
encoded_image.SetSpatialIndex(0);
|
||||
codec_info.end_of_picture = false;
|
||||
@ -1653,7 +1653,8 @@ TEST_F(SendStatisticsProxyTest, SentResolutionHistogramsAreUpdated) {
|
||||
// Not enough samples, stats should not be updated.
|
||||
for (int i = 0; i < kMinSamples - 1; ++i) {
|
||||
fake_clock_.AdvanceTimeMilliseconds(1000 / kFps);
|
||||
encoded_image.SetTimestamp(encoded_image.Timestamp() + 90 * 1000 / kFps);
|
||||
encoded_image.SetRtpTimestamp(encoded_image.RtpTimestamp() +
|
||||
90 * 1000 / kFps);
|
||||
statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
|
||||
}
|
||||
SetUp(); // Reset stats proxy also causes histograms to be reported.
|
||||
@ -1661,10 +1662,11 @@ TEST_F(SendStatisticsProxyTest, SentResolutionHistogramsAreUpdated) {
|
||||
EXPECT_METRIC_EQ(0, metrics::NumSamples("WebRTC.Video.SentHeightInPixels"));
|
||||
|
||||
// Enough samples, max resolution per frame should be reported.
|
||||
encoded_image.SetTimestamp(0xffff0000); // Will wrap.
|
||||
encoded_image.SetRtpTimestamp(0xffff0000); // Will wrap.
|
||||
for (int i = 0; i < kMinSamples; ++i) {
|
||||
fake_clock_.AdvanceTimeMilliseconds(1000 / kFps);
|
||||
encoded_image.SetTimestamp(encoded_image.Timestamp() + 90 * 1000 / kFps);
|
||||
encoded_image.SetRtpTimestamp(encoded_image.RtpTimestamp() +
|
||||
90 * 1000 / kFps);
|
||||
encoded_image._encodedWidth = kWidth;
|
||||
encoded_image._encodedHeight = kHeight;
|
||||
statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
|
||||
@ -1703,7 +1705,7 @@ TEST_F(SendStatisticsProxyTest, SentFpsHistogramIsUpdated) {
|
||||
int frames = kMinPeriodicSamples * kFpsPeriodicIntervalMs * kFps / 1000 + 1;
|
||||
for (int i = 0; i < frames; ++i) {
|
||||
fake_clock_.AdvanceTimeMilliseconds(1000 / kFps);
|
||||
encoded_image.SetTimestamp(encoded_image.Timestamp() + 1);
|
||||
encoded_image.SetRtpTimestamp(encoded_image.RtpTimestamp() + 1);
|
||||
statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
|
||||
// Frame with same timestamp should not be counted.
|
||||
statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
|
||||
@ -1746,7 +1748,7 @@ TEST_F(SendStatisticsProxyTest, SentFpsHistogramExcludesSuspendedTime) {
|
||||
int frames = kMinPeriodicSamples * kFpsPeriodicIntervalMs * kFps / 1000;
|
||||
for (int i = 0; i < frames; ++i) {
|
||||
fake_clock_.AdvanceTimeMilliseconds(1000 / kFps);
|
||||
encoded_image.SetTimestamp(i + 1);
|
||||
encoded_image.SetRtpTimestamp(i + 1);
|
||||
statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
|
||||
}
|
||||
// Suspend.
|
||||
@ -1755,7 +1757,7 @@ TEST_F(SendStatisticsProxyTest, SentFpsHistogramExcludesSuspendedTime) {
|
||||
|
||||
for (int i = 0; i < frames; ++i) {
|
||||
fake_clock_.AdvanceTimeMilliseconds(1000 / kFps);
|
||||
encoded_image.SetTimestamp(i + 1);
|
||||
encoded_image.SetRtpTimestamp(i + 1);
|
||||
statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
|
||||
}
|
||||
// Suspended time interval should not affect the framerate.
|
||||
@ -2067,7 +2069,7 @@ TEST_F(SendStatisticsProxyTest,
|
||||
encoded_image._encodedHeight = kHeight;
|
||||
for (int i = 0; i < kMinSamples; ++i) {
|
||||
fake_clock_.AdvanceTimeMilliseconds(1000 / kFps);
|
||||
encoded_image.SetTimestamp(encoded_image.Timestamp() +
|
||||
encoded_image.SetRtpTimestamp(encoded_image.RtpTimestamp() +
|
||||
(kRtpClockRateHz / kFps));
|
||||
statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
|
||||
}
|
||||
@ -2103,7 +2105,7 @@ TEST_F(SendStatisticsProxyTest,
|
||||
EncodedImage encoded_image;
|
||||
for (int i = 0; i < kMinSamples; ++i) {
|
||||
fake_clock_.AdvanceTimeMilliseconds(1000 / kFps);
|
||||
encoded_image.SetTimestamp(encoded_image.Timestamp() +
|
||||
encoded_image.SetRtpTimestamp(encoded_image.RtpTimestamp() +
|
||||
(kRtpClockRateHz / kFps));
|
||||
encoded_image._encodedWidth = kWidth;
|
||||
encoded_image._encodedHeight = kHeight;
|
||||
@ -2150,7 +2152,7 @@ TEST_F(SendStatisticsProxyTest,
|
||||
encoded_image._encodedHeight = kHeight / 2;
|
||||
for (int i = 0; i < kMinSamples; ++i) {
|
||||
fake_clock_.AdvanceTimeMilliseconds(1000 / kFps);
|
||||
encoded_image.SetTimestamp(encoded_image.Timestamp() +
|
||||
encoded_image.SetRtpTimestamp(encoded_image.RtpTimestamp() +
|
||||
(kRtpClockRateHz / kFps));
|
||||
statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
|
||||
}
|
||||
|
||||
@ -246,7 +246,7 @@ class QualityTestVideoEncoder : public VideoEncoder,
|
||||
RTC_DCHECK_GE(simulcast_index, 0);
|
||||
if (analyzer_) {
|
||||
analyzer_->PostEncodeOnFrame(simulcast_index,
|
||||
encoded_image.Timestamp());
|
||||
encoded_image.RtpTimestamp());
|
||||
}
|
||||
if (static_cast<size_t>(simulcast_index) < writers_.size()) {
|
||||
writers_[simulcast_index]->WriteFrame(encoded_image,
|
||||
|
||||
@ -2915,7 +2915,7 @@ TEST_F(VideoSendStreamTest, ReportsSentResolution) {
|
||||
auto buffer = EncodedImageBuffer::Create(16);
|
||||
memset(buffer->data(), 0, 16);
|
||||
encoded.SetEncodedData(buffer);
|
||||
encoded.SetTimestamp(input_image.timestamp());
|
||||
encoded.SetRtpTimestamp(input_image.timestamp());
|
||||
encoded.capture_time_ms_ = input_image.render_time_ms();
|
||||
|
||||
for (size_t i = 0; i < kNumStreams; ++i) {
|
||||
|
||||
@ -56,7 +56,7 @@ struct FrameMetadata {
|
||||
size(frame.size()),
|
||||
contentType(frame.contentType()),
|
||||
delayed_by_retransmission(frame.delayed_by_retransmission()),
|
||||
rtp_timestamp(frame.Timestamp()),
|
||||
rtp_timestamp(frame.RtpTimestamp()),
|
||||
receive_time(frame.ReceivedTimestamp()) {}
|
||||
|
||||
const bool is_last_spatial_layer;
|
||||
@ -224,10 +224,10 @@ void VideoStreamBufferController::OnFrameReady(
|
||||
TargetVideoDelayIsTooLarge(timing_->TargetVideoDelay())) {
|
||||
RTC_LOG(LS_WARNING) << "Resetting jitter estimator and timing module due "
|
||||
"to bad render timing for rtp_timestamp="
|
||||
<< first_frame.Timestamp();
|
||||
<< first_frame.RtpTimestamp();
|
||||
jitter_estimator_.Reset();
|
||||
timing_->Reset();
|
||||
render_time = timing_->RenderTime(first_frame.Timestamp(), now);
|
||||
render_time = timing_->RenderTime(first_frame.RtpTimestamp(), now);
|
||||
}
|
||||
|
||||
for (std::unique_ptr<EncodedFrame>& frame : frames) {
|
||||
@ -241,7 +241,8 @@ void VideoStreamBufferController::OnFrameReady(
|
||||
|
||||
if (!superframe_delayed_by_retransmission) {
|
||||
absl::optional<TimeDelta> inter_frame_delay_variation =
|
||||
ifdv_calculator_.Calculate(first_frame.Timestamp(), max_receive_time);
|
||||
ifdv_calculator_.Calculate(first_frame.RtpTimestamp(),
|
||||
max_receive_time);
|
||||
if (inter_frame_delay_variation) {
|
||||
jitter_estimator_.UpdateEstimate(*inter_frame_delay_variation,
|
||||
superframe_size);
|
||||
@ -380,7 +381,7 @@ void VideoStreamBufferController::ForceKeyFrameReleaseImmediately()
|
||||
}
|
||||
// Found keyframe - decode right away.
|
||||
if (next_frame.front()->is_keyframe()) {
|
||||
auto render_time = timing_->RenderTime(next_frame.front()->Timestamp(),
|
||||
auto render_time = timing_->RenderTime(next_frame.front()->RtpTimestamp(),
|
||||
clock_->CurrentTime());
|
||||
OnFrameReady(std::move(next_frame), render_time);
|
||||
return;
|
||||
|
||||
@ -69,11 +69,11 @@ auto Frame(testing::Matcher<EncodedFrame> m) {
|
||||
|
||||
std::unique_ptr<test::FakeEncodedFrame> WithReceiveTimeFromRtpTimestamp(
|
||||
std::unique_ptr<test::FakeEncodedFrame> frame) {
|
||||
if (frame->Timestamp() == 0) {
|
||||
if (frame->RtpTimestamp() == 0) {
|
||||
frame->SetReceivedTime(kClockStart.ms());
|
||||
} else {
|
||||
frame->SetReceivedTime(
|
||||
TimeDelta::Seconds(frame->Timestamp() / 90000.0).ms() +
|
||||
TimeDelta::Seconds(frame->RtpTimestamp() / 90000.0).ms() +
|
||||
kClockStart.ms());
|
||||
}
|
||||
return frame;
|
||||
|
||||
@ -2133,7 +2133,7 @@ EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage(
|
||||
const EncodedImage& encoded_image,
|
||||
const CodecSpecificInfo* codec_specific_info) {
|
||||
TRACE_EVENT_INSTANT1("webrtc", "VCMEncodedFrameCallback::Encoded",
|
||||
"timestamp", encoded_image.Timestamp());
|
||||
"timestamp", encoded_image.RtpTimestamp());
|
||||
|
||||
const size_t simulcast_index = encoded_image.SimulcastIndex().value_or(0);
|
||||
const VideoCodecType codec_type = codec_specific_info
|
||||
|
||||
@ -1492,7 +1492,7 @@ class VideoStreamEncoderTest : public ::testing::Test {
|
||||
last_encoded_image_ = EncodedImage(encoded_image);
|
||||
last_encoded_image_data_ = std::vector<uint8_t>(
|
||||
encoded_image.data(), encoded_image.data() + encoded_image.size());
|
||||
uint32_t timestamp = encoded_image.Timestamp();
|
||||
uint32_t timestamp = encoded_image.RtpTimestamp();
|
||||
if (last_timestamp_ != timestamp) {
|
||||
num_received_layers_ = 1;
|
||||
last_width_ = encoded_image._encodedWidth;
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user