Update RtpSenderVideo::SendVideo/SendEncodedImage to take Timestamp/TimeDelta types
Bug: webrtc:13757 Change-Id: I2f21b14ecf003c5cb0c4c92d0c6b9b6f11c35f71 Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/311945 Reviewed-by: Philip Eliasson <philipel@webrtc.org> Commit-Queue: Danil Chapovalov <danilchap@webrtc.org> Cr-Commit-Position: refs/heads/main@{#40450}
This commit is contained in:
parent
2e48e4b112
commit
630c40d716
@ -156,6 +156,7 @@ rtc_library("encoded_image") {
|
||||
"../../rtc_base:checks",
|
||||
"../../rtc_base:refcount",
|
||||
"../../rtc_base/system:rtc_export",
|
||||
"../units:timestamp",
|
||||
]
|
||||
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
|
||||
}
|
||||
|
||||
@ -75,6 +75,11 @@ void EncodedImage::SetEncodeTime(int64_t encode_start_ms,
|
||||
timing_.encode_finish_ms = encode_finish_ms;
|
||||
}
|
||||
|
||||
webrtc::Timestamp EncodedImage::CaptureTime() const {
|
||||
return capture_time_ms_ > 0 ? Timestamp::Millis(capture_time_ms_)
|
||||
: Timestamp::MinusInfinity();
|
||||
}
|
||||
|
||||
absl::optional<size_t> EncodedImage::SpatialLayerFrameSize(
|
||||
int spatial_index) const {
|
||||
RTC_DCHECK_GE(spatial_index, 0);
|
||||
|
||||
@ -19,6 +19,7 @@
|
||||
#include "absl/types/optional.h"
|
||||
#include "api/rtp_packet_infos.h"
|
||||
#include "api/scoped_refptr.h"
|
||||
#include "api/units/timestamp.h"
|
||||
#include "api/video/color_space.h"
|
||||
#include "api/video/video_codec_constants.h"
|
||||
#include "api/video/video_content_type.h"
|
||||
@ -87,6 +88,8 @@ class RTC_EXPORT EncodedImage {
|
||||
|
||||
void SetEncodeTime(int64_t encode_start_ms, int64_t encode_finish_ms);
|
||||
|
||||
webrtc::Timestamp CaptureTime() const;
|
||||
|
||||
int64_t NtpTimeMs() const { return ntp_time_ms_; }
|
||||
|
||||
// Every simulcast layer (= encoding) has its own encoder and RTP stream.
|
||||
|
||||
@ -21,6 +21,7 @@
|
||||
#include "api/array_view.h"
|
||||
#include "api/task_queue/task_queue_factory.h"
|
||||
#include "api/transport/field_trial_based_config.h"
|
||||
#include "api/units/time_delta.h"
|
||||
#include "api/video_codecs/video_codec.h"
|
||||
#include "call/rtp_transport_controller_send_interface.h"
|
||||
#include "modules/pacing/packet_router.h"
|
||||
@ -591,10 +592,10 @@ EncodedImageCallback::Result RtpVideoSender::OnEncodedImage(
|
||||
return Result(Result::ERROR_SEND_FAILED);
|
||||
}
|
||||
|
||||
absl::optional<int64_t> expected_retransmission_time_ms;
|
||||
TimeDelta expected_retransmission_time = TimeDelta::PlusInfinity();
|
||||
if (encoded_image.RetransmissionAllowed()) {
|
||||
expected_retransmission_time_ms =
|
||||
rtp_streams_[simulcast_index].rtp_rtcp->ExpectedRetransmissionTimeMs();
|
||||
expected_retransmission_time =
|
||||
rtp_streams_[simulcast_index].rtp_rtcp->ExpectedRetransmissionTime();
|
||||
}
|
||||
|
||||
if (IsFirstFrameOfACodedVideoSequence(encoded_image, codec_specific_info)) {
|
||||
@ -623,7 +624,7 @@ EncodedImageCallback::Result RtpVideoSender::OnEncodedImage(
|
||||
rtp_config_.payload_type, codec_type_, rtp_timestamp, encoded_image,
|
||||
params_[simulcast_index].GetRtpVideoHeader(
|
||||
encoded_image, codec_specific_info, shared_frame_id_),
|
||||
expected_retransmission_time_ms);
|
||||
expected_retransmission_time);
|
||||
if (frame_count_observer_) {
|
||||
FrameCounts& counts = frame_counts_[simulcast_index];
|
||||
if (encoded_image._frameType == VideoFrameType::kVideoFrameKey) {
|
||||
|
||||
@ -120,7 +120,7 @@ class MockRtpRtcpInterface : public RtpRtcpInterface {
|
||||
MOCK_METHOD(void, SetRTCPStatus, (RtcpMode method), (override));
|
||||
MOCK_METHOD(int32_t, SetCNAME, (absl::string_view cname), (override));
|
||||
MOCK_METHOD(absl::optional<TimeDelta>, LastRtt, (), (const, override));
|
||||
MOCK_METHOD(int64_t, ExpectedRetransmissionTimeMs, (), (const, override));
|
||||
MOCK_METHOD(TimeDelta, ExpectedRetransmissionTime, (), (const, override));
|
||||
MOCK_METHOD(int32_t, SendRTCP, (RTCPPacketType packet_type), (override));
|
||||
MOCK_METHOD(void,
|
||||
GetSendStreamDataCounters,
|
||||
|
||||
@ -210,8 +210,8 @@ class RtpRtcpRtxNackTest : public ::testing::Test {
|
||||
video_header.frame_type = VideoFrameType::kVideoFrameDelta;
|
||||
EXPECT_TRUE(rtp_sender_video_->SendVideo(
|
||||
kPayloadType, VideoCodecType::kVideoCodecGeneric, timestamp,
|
||||
timestamp / 90, payload_data, sizeof(payload_data), video_header, 0,
|
||||
{}));
|
||||
/*capture_time=*/Timestamp::Millis(timestamp / 90), payload_data,
|
||||
sizeof(payload_data), video_header, TimeDelta::Zero(), {}));
|
||||
// Min required delay until retransmit = 5 + RTT ms (RTT = 0).
|
||||
fake_clock.AdvanceTimeMilliseconds(5);
|
||||
int length = BuildNackList(nack_list);
|
||||
@ -261,8 +261,8 @@ TEST_F(RtpRtcpRtxNackTest, LongNackList) {
|
||||
video_header.frame_type = VideoFrameType::kVideoFrameDelta;
|
||||
EXPECT_TRUE(rtp_sender_video_->SendVideo(
|
||||
kPayloadType, VideoCodecType::kVideoCodecGeneric, timestamp,
|
||||
timestamp / 90, payload_data, sizeof(payload_data), video_header, 0,
|
||||
{}));
|
||||
Timestamp::Millis(timestamp / 90), payload_data, sizeof(payload_data),
|
||||
video_header, TimeDelta::Zero(), {}));
|
||||
// Prepare next frame.
|
||||
timestamp += 3000;
|
||||
fake_clock.AdvanceTimeMilliseconds(33);
|
||||
|
||||
@ -39,7 +39,7 @@ namespace webrtc {
|
||||
namespace {
|
||||
const int64_t kRtpRtcpRttProcessTimeMs = 1000;
|
||||
const int64_t kRtpRtcpBitrateProcessTimeMs = 10;
|
||||
const int64_t kDefaultExpectedRetransmissionTimeMs = 125;
|
||||
constexpr TimeDelta kDefaultExpectedRetransmissionTime = TimeDelta::Millis(125);
|
||||
} // namespace
|
||||
|
||||
ModuleRtpRtcpImpl::RtpSenderContext::RtpSenderContext(
|
||||
@ -475,17 +475,17 @@ absl::optional<TimeDelta> ModuleRtpRtcpImpl::LastRtt() const {
|
||||
return rtt;
|
||||
}
|
||||
|
||||
int64_t ModuleRtpRtcpImpl::ExpectedRetransmissionTimeMs() const {
|
||||
TimeDelta ModuleRtpRtcpImpl::ExpectedRetransmissionTime() const {
|
||||
int64_t expected_retransmission_time_ms = rtt_ms();
|
||||
if (expected_retransmission_time_ms > 0) {
|
||||
return expected_retransmission_time_ms;
|
||||
return TimeDelta::Millis(expected_retransmission_time_ms);
|
||||
}
|
||||
// No rtt available (`kRtpRtcpRttProcessTimeMs` not yet passed?), so try to
|
||||
// poll avg_rtt_ms directly from rtcp receiver.
|
||||
if (absl::optional<TimeDelta> rtt = rtcp_receiver_.AverageRtt()) {
|
||||
return rtt->ms();
|
||||
return *rtt;
|
||||
}
|
||||
return kDefaultExpectedRetransmissionTimeMs;
|
||||
return kDefaultExpectedRetransmissionTime;
|
||||
}
|
||||
|
||||
// Force a send of an RTCP packet.
|
||||
|
||||
@ -172,7 +172,7 @@ class ABSL_DEPRECATED("") ModuleRtpRtcpImpl
|
||||
// Get RoundTripTime.
|
||||
absl::optional<TimeDelta> LastRtt() const override;
|
||||
|
||||
int64_t ExpectedRetransmissionTimeMs() const override;
|
||||
TimeDelta ExpectedRetransmissionTime() const override;
|
||||
|
||||
// Force a send of an RTCP packet.
|
||||
// Normal SR and RR are triggered via the process function.
|
||||
|
||||
@ -40,8 +40,7 @@
|
||||
|
||||
namespace webrtc {
|
||||
namespace {
|
||||
const int64_t kDefaultExpectedRetransmissionTimeMs = 125;
|
||||
|
||||
constexpr TimeDelta kDefaultExpectedRetransmissionTime = TimeDelta::Millis(125);
|
||||
constexpr TimeDelta kRttUpdateInterval = TimeDelta::Millis(1000);
|
||||
|
||||
RTCPSender::Configuration AddRtcpSendEvaluationCallback(
|
||||
@ -480,17 +479,17 @@ absl::optional<TimeDelta> ModuleRtpRtcpImpl2::LastRtt() const {
|
||||
return rtt;
|
||||
}
|
||||
|
||||
int64_t ModuleRtpRtcpImpl2::ExpectedRetransmissionTimeMs() const {
|
||||
TimeDelta ModuleRtpRtcpImpl2::ExpectedRetransmissionTime() const {
|
||||
int64_t expected_retransmission_time_ms = rtt_ms();
|
||||
if (expected_retransmission_time_ms > 0) {
|
||||
return expected_retransmission_time_ms;
|
||||
return TimeDelta::Millis(expected_retransmission_time_ms);
|
||||
}
|
||||
// No rtt available (`kRttUpdateInterval` not yet passed?), so try to
|
||||
// poll avg_rtt_ms directly from rtcp receiver.
|
||||
if (absl::optional<TimeDelta> rtt = rtcp_receiver_.AverageRtt()) {
|
||||
return rtt->ms();
|
||||
return *rtt;
|
||||
}
|
||||
return kDefaultExpectedRetransmissionTimeMs;
|
||||
return kDefaultExpectedRetransmissionTime;
|
||||
}
|
||||
|
||||
// Force a send of an RTCP packet.
|
||||
|
||||
@ -183,7 +183,7 @@ class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface,
|
||||
// Get RoundTripTime.
|
||||
absl::optional<TimeDelta> LastRtt() const override;
|
||||
|
||||
int64_t ExpectedRetransmissionTimeMs() const override;
|
||||
TimeDelta ExpectedRetransmissionTime() const override;
|
||||
|
||||
// Force a send of an RTCP packet.
|
||||
// Normal SR and RR are triggered via the task queue that's current when this
|
||||
|
||||
@ -360,9 +360,10 @@ class RtpRtcpImpl2Test : public ::testing::Test {
|
||||
const uint8_t payload[100] = {0};
|
||||
bool success = module->impl_->OnSendingRtpFrame(0, 0, kPayloadType, true);
|
||||
|
||||
success &= sender->SendVideo(kPayloadType, VideoCodecType::kVideoCodecVP8,
|
||||
rtp_timestamp, capture_time_ms, payload,
|
||||
sizeof(payload), rtp_video_header, 0, {});
|
||||
success &= sender->SendVideo(
|
||||
kPayloadType, VideoCodecType::kVideoCodecVP8, rtp_timestamp,
|
||||
Timestamp::Millis(capture_time_ms), payload, sizeof(payload),
|
||||
rtp_video_header, TimeDelta::Zero(), {});
|
||||
return success;
|
||||
}
|
||||
|
||||
|
||||
@ -232,9 +232,9 @@ class RtpRtcpImplTest : public ::testing::Test {
|
||||
|
||||
const uint8_t payload[100] = {0};
|
||||
EXPECT_TRUE(module->impl_->OnSendingRtpFrame(0, 0, kPayloadType, true));
|
||||
EXPECT_TRUE(sender->SendVideo(kPayloadType, VideoCodecType::kVideoCodecVP8,
|
||||
0, 0, payload, sizeof(payload),
|
||||
rtp_video_header, 0, {}));
|
||||
EXPECT_TRUE(sender->SendVideo(
|
||||
kPayloadType, VideoCodecType::kVideoCodecVP8, 0, clock_.CurrentTime(),
|
||||
payload, sizeof(payload), rtp_video_header, TimeDelta::Zero(), {}));
|
||||
}
|
||||
|
||||
void IncomingRtcpNack(const RtpRtcpModule* module, uint16_t sequence_number) {
|
||||
|
||||
@ -20,6 +20,7 @@
|
||||
#include "api/field_trials_view.h"
|
||||
#include "api/frame_transformer_interface.h"
|
||||
#include "api/scoped_refptr.h"
|
||||
#include "api/units/time_delta.h"
|
||||
#include "api/video/video_bitrate_allocation.h"
|
||||
#include "modules/rtp_rtcp/include/receive_statistics.h"
|
||||
#include "modules/rtp_rtcp/include/report_block_data.h"
|
||||
@ -375,7 +376,7 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface {
|
||||
virtual absl::optional<TimeDelta> LastRtt() const = 0;
|
||||
|
||||
// Returns the estimated RTT, with fallback to a default value.
|
||||
virtual int64_t ExpectedRetransmissionTimeMs() const = 0;
|
||||
virtual TimeDelta ExpectedRetransmissionTime() const = 0;
|
||||
|
||||
// Forces a send of a RTCP packet. Periodic SR and RR are triggered via the
|
||||
// process function.
|
||||
|
||||
@ -66,8 +66,8 @@ const uint32_t kRtxSsrc = 12345;
|
||||
const uint32_t kFlexFecSsrc = 45678;
|
||||
const uint64_t kStartTime = 123456789;
|
||||
const uint8_t kPayloadData[] = {47, 11, 32, 93, 89};
|
||||
const int64_t kDefaultExpectedRetransmissionTimeMs = 125;
|
||||
const uint32_t kTimestampTicksPerMs = 90; // 90kHz clock.
|
||||
constexpr TimeDelta kDefaultExpectedRetransmissionTime = TimeDelta::Millis(125);
|
||||
constexpr uint32_t kTimestampTicksPerMs = 90; // 90kHz clock.
|
||||
constexpr absl::string_view kMid = "mid";
|
||||
constexpr absl::string_view kRid = "f";
|
||||
constexpr bool kMarkerBit = true;
|
||||
@ -1340,12 +1340,12 @@ TEST_F(RtpSenderTest, MarksPacketsWithKeyframeStatus) {
|
||||
.Times(AtLeast(1));
|
||||
RTPVideoHeader video_header;
|
||||
video_header.frame_type = VideoFrameType::kVideoFrameKey;
|
||||
int64_t capture_time_ms = clock_->TimeInMilliseconds();
|
||||
Timestamp capture_time = clock_->CurrentTime();
|
||||
EXPECT_TRUE(rtp_sender_video.SendVideo(
|
||||
kPayloadType, kCodecType,
|
||||
capture_time_ms * kCaptureTimeMsToRtpTimestamp, capture_time_ms,
|
||||
capture_time.ms() * kCaptureTimeMsToRtpTimestamp, capture_time,
|
||||
kPayloadData, sizeof(kPayloadData), video_header,
|
||||
kDefaultExpectedRetransmissionTimeMs, {}));
|
||||
kDefaultExpectedRetransmissionTime, {}));
|
||||
|
||||
time_controller_.AdvanceTime(TimeDelta::Millis(33));
|
||||
}
|
||||
@ -1357,12 +1357,12 @@ TEST_F(RtpSenderTest, MarksPacketsWithKeyframeStatus) {
|
||||
.Times(AtLeast(1));
|
||||
RTPVideoHeader video_header;
|
||||
video_header.frame_type = VideoFrameType::kVideoFrameDelta;
|
||||
int64_t capture_time_ms = clock_->TimeInMilliseconds();
|
||||
Timestamp capture_time = clock_->CurrentTime();
|
||||
EXPECT_TRUE(rtp_sender_video.SendVideo(
|
||||
kPayloadType, kCodecType,
|
||||
capture_time_ms * kCaptureTimeMsToRtpTimestamp, capture_time_ms,
|
||||
capture_time.ms() * kCaptureTimeMsToRtpTimestamp, capture_time,
|
||||
kPayloadData, sizeof(kPayloadData), video_header,
|
||||
kDefaultExpectedRetransmissionTimeMs, {}));
|
||||
kDefaultExpectedRetransmissionTime, {}));
|
||||
|
||||
time_controller_.AdvanceTime(TimeDelta::Millis(33));
|
||||
}
|
||||
|
||||
@ -461,24 +461,27 @@ bool RTPSenderVideo::SendVideo(
|
||||
rtc::ArrayView<const uint8_t> payload,
|
||||
RTPVideoHeader video_header,
|
||||
absl::optional<int64_t> expected_retransmission_time_ms) {
|
||||
return SendVideo(payload_type, codec_type, rtp_timestamp, capture_time_ms,
|
||||
return SendVideo(payload_type, codec_type, rtp_timestamp,
|
||||
capture_time_ms > 0 ? Timestamp::Millis(capture_time_ms)
|
||||
: Timestamp::MinusInfinity(),
|
||||
payload, payload.size(), video_header,
|
||||
expected_retransmission_time_ms,
|
||||
expected_retransmission_time_ms.has_value()
|
||||
? TimeDelta::Millis(*expected_retransmission_time_ms)
|
||||
: TimeDelta::PlusInfinity(),
|
||||
/*csrcs=*/{});
|
||||
}
|
||||
|
||||
bool RTPSenderVideo::SendVideo(
|
||||
int payload_type,
|
||||
absl::optional<VideoCodecType> codec_type,
|
||||
uint32_t rtp_timestamp,
|
||||
int64_t capture_time_ms,
|
||||
rtc::ArrayView<const uint8_t> payload,
|
||||
size_t encoder_output_size,
|
||||
RTPVideoHeader video_header,
|
||||
absl::optional<int64_t> expected_retransmission_time_ms,
|
||||
std::vector<uint32_t> csrcs) {
|
||||
bool RTPSenderVideo::SendVideo(int payload_type,
|
||||
absl::optional<VideoCodecType> codec_type,
|
||||
uint32_t rtp_timestamp,
|
||||
Timestamp capture_time,
|
||||
rtc::ArrayView<const uint8_t> payload,
|
||||
size_t encoder_output_size,
|
||||
RTPVideoHeader video_header,
|
||||
TimeDelta expected_retransmission_time,
|
||||
std::vector<uint32_t> csrcs) {
|
||||
TRACE_EVENT_ASYNC_STEP1(
|
||||
"webrtc", "Video", capture_time_ms, "Send", "type",
|
||||
"webrtc", "Video", capture_time.ms_or(0), "Send", "type",
|
||||
std::string(VideoFrameTypeToString(video_header.frame_type)));
|
||||
RTC_CHECK_RUNS_SERIALIZED(&send_checker_);
|
||||
|
||||
@ -499,11 +502,11 @@ bool RTPSenderVideo::SendVideo(
|
||||
}
|
||||
const uint8_t temporal_id = GetTemporalId(video_header);
|
||||
// TODO(bugs.webrtc.org/10714): retransmission_settings_ should generally be
|
||||
// replaced by expected_retransmission_time_ms.has_value().
|
||||
// replaced by expected_retransmission_time.IsFinite().
|
||||
const bool allow_retransmission =
|
||||
expected_retransmission_time_ms.has_value() &&
|
||||
expected_retransmission_time.IsFinite() &&
|
||||
AllowRetransmission(temporal_id, retransmission_settings,
|
||||
*expected_retransmission_time_ms);
|
||||
expected_retransmission_time);
|
||||
|
||||
MaybeUpdateCurrentPlayoutDelay(video_header);
|
||||
if (video_header.frame_type == VideoFrameType::kVideoFrameKey) {
|
||||
@ -539,11 +542,6 @@ bool RTPSenderVideo::SendVideo(
|
||||
packet_capacity -= rtp_sender_->RtxPacketOverhead();
|
||||
}
|
||||
|
||||
absl::optional<Timestamp> capture_time;
|
||||
if (capture_time_ms > 0) {
|
||||
capture_time = Timestamp::Millis(capture_time_ms);
|
||||
}
|
||||
|
||||
rtp_sender_->SetCsrcs(std::move(csrcs));
|
||||
|
||||
std::unique_ptr<RtpPacketToSend> single_packet =
|
||||
@ -551,16 +549,16 @@ bool RTPSenderVideo::SendVideo(
|
||||
RTC_DCHECK_LE(packet_capacity, single_packet->capacity());
|
||||
single_packet->SetPayloadType(payload_type);
|
||||
single_packet->SetTimestamp(rtp_timestamp);
|
||||
if (capture_time)
|
||||
single_packet->set_capture_time(*capture_time);
|
||||
if (capture_time.IsFinite())
|
||||
single_packet->set_capture_time(capture_time);
|
||||
|
||||
// Construct the absolute capture time extension if not provided.
|
||||
if (!video_header.absolute_capture_time.has_value() &&
|
||||
capture_time.has_value()) {
|
||||
capture_time.IsFinite()) {
|
||||
video_header.absolute_capture_time.emplace();
|
||||
video_header.absolute_capture_time->absolute_capture_timestamp =
|
||||
Int64MsToUQ32x32(
|
||||
clock_->ConvertTimestampToNtpTime(*capture_time).ToMs());
|
||||
clock_->ConvertTimestampToNtpTime(capture_time).ToMs());
|
||||
video_header.absolute_capture_time->estimated_capture_clock_offset = 0;
|
||||
}
|
||||
|
||||
@ -763,7 +761,7 @@ bool RTPSenderVideo::SendVideo(
|
||||
send_allocation_ = SendVideoLayersAllocation::kDontSend;
|
||||
}
|
||||
|
||||
TRACE_EVENT_ASYNC_END1("webrtc", "Video", capture_time_ms, "timestamp",
|
||||
TRACE_EVENT_ASYNC_END1("webrtc", "Video", capture_time.ms_or(0), "timestamp",
|
||||
rtp_timestamp);
|
||||
return true;
|
||||
}
|
||||
@ -775,16 +773,30 @@ bool RTPSenderVideo::SendEncodedImage(
|
||||
const EncodedImage& encoded_image,
|
||||
RTPVideoHeader video_header,
|
||||
absl::optional<int64_t> expected_retransmission_time_ms) {
|
||||
return SendEncodedImage(
|
||||
payload_type, codec_type, rtp_timestamp, encoded_image,
|
||||
std::move(video_header),
|
||||
expected_retransmission_time_ms.has_value()
|
||||
? TimeDelta::Millis(*expected_retransmission_time_ms)
|
||||
: TimeDelta::PlusInfinity());
|
||||
}
|
||||
|
||||
bool RTPSenderVideo::SendEncodedImage(int payload_type,
|
||||
absl::optional<VideoCodecType> codec_type,
|
||||
uint32_t rtp_timestamp,
|
||||
const EncodedImage& encoded_image,
|
||||
RTPVideoHeader video_header,
|
||||
TimeDelta expected_retransmission_time) {
|
||||
if (frame_transformer_delegate_) {
|
||||
// The frame will be sent async once transformed.
|
||||
return frame_transformer_delegate_->TransformFrame(
|
||||
payload_type, codec_type, rtp_timestamp, encoded_image, video_header,
|
||||
expected_retransmission_time_ms);
|
||||
expected_retransmission_time);
|
||||
}
|
||||
return SendVideo(payload_type, codec_type, rtp_timestamp,
|
||||
encoded_image.capture_time_ms_, encoded_image,
|
||||
encoded_image.CaptureTime(), encoded_image,
|
||||
encoded_image.size(), video_header,
|
||||
expected_retransmission_time_ms, rtp_sender_->Csrcs());
|
||||
expected_retransmission_time, rtp_sender_->Csrcs());
|
||||
}
|
||||
|
||||
DataRate RTPSenderVideo::PostEncodeOverhead() const {
|
||||
@ -797,7 +809,7 @@ DataRate RTPSenderVideo::PostEncodeOverhead() const {
|
||||
bool RTPSenderVideo::AllowRetransmission(
|
||||
uint8_t temporal_id,
|
||||
int32_t retransmission_settings,
|
||||
int64_t expected_retransmission_time_ms) {
|
||||
TimeDelta expected_retransmission_time) {
|
||||
if (retransmission_settings == kRetransmitOff)
|
||||
return false;
|
||||
|
||||
@ -805,7 +817,7 @@ bool RTPSenderVideo::AllowRetransmission(
|
||||
// Media packet storage.
|
||||
if ((retransmission_settings & kConditionallyRetransmitHigherLayers) &&
|
||||
UpdateConditionalRetransmit(temporal_id,
|
||||
expected_retransmission_time_ms)) {
|
||||
expected_retransmission_time.ms())) {
|
||||
retransmission_settings |= kRetransmitHigherLayers;
|
||||
}
|
||||
|
||||
|
||||
@ -92,26 +92,29 @@ class RTPSenderVideo : public RTPVideoFrameSenderInterface {
|
||||
// expected_retransmission_time_ms.has_value() -> retransmission allowed.
|
||||
// `capture_time_ms` and `clock::CurrentTime` should be using the same epoch.
|
||||
// Calls to this method are assumed to be externally serialized.
|
||||
bool SendVideo(int payload_type,
|
||||
absl::optional<VideoCodecType> codec_type,
|
||||
uint32_t rtp_timestamp,
|
||||
int64_t capture_time_ms,
|
||||
rtc::ArrayView<const uint8_t> payload,
|
||||
RTPVideoHeader video_header,
|
||||
absl::optional<int64_t> expected_retransmission_time_ms);
|
||||
[[deprecated("bugs.webrtc.org/13757")]] bool SendVideo(
|
||||
int payload_type,
|
||||
absl::optional<VideoCodecType> codec_type,
|
||||
uint32_t rtp_timestamp,
|
||||
int64_t capture_time_ms,
|
||||
rtc::ArrayView<const uint8_t> payload,
|
||||
RTPVideoHeader video_header,
|
||||
absl::optional<int64_t> expected_retransmission_time_ms);
|
||||
|
||||
// expected_retransmission_time.IsFinite() -> retransmission allowed.
|
||||
// `encoder_output_size` is the size of the video frame as it came out of the
|
||||
// video encoder, excluding any additional overhead.
|
||||
bool SendVideo(int payload_type,
|
||||
absl::optional<VideoCodecType> codec_type,
|
||||
uint32_t rtp_timestamp,
|
||||
int64_t capture_time_ms,
|
||||
Timestamp capture_time,
|
||||
rtc::ArrayView<const uint8_t> payload,
|
||||
size_t encoder_output_size,
|
||||
RTPVideoHeader video_header,
|
||||
absl::optional<int64_t> expected_retransmission_time_ms,
|
||||
TimeDelta expected_retransmission_time,
|
||||
std::vector<uint32_t> csrcs) override;
|
||||
|
||||
bool SendEncodedImage(
|
||||
[[deprecated("bugs.webrtc.org/13757")]] bool SendEncodedImage(
|
||||
int payload_type,
|
||||
absl::optional<VideoCodecType> codec_type,
|
||||
uint32_t rtp_timestamp,
|
||||
@ -119,6 +122,13 @@ class RTPSenderVideo : public RTPVideoFrameSenderInterface {
|
||||
RTPVideoHeader video_header,
|
||||
absl::optional<int64_t> expected_retransmission_time_ms);
|
||||
|
||||
bool SendEncodedImage(int payload_type,
|
||||
absl::optional<VideoCodecType> codec_type,
|
||||
uint32_t rtp_timestamp,
|
||||
const EncodedImage& encoded_image,
|
||||
RTPVideoHeader video_header,
|
||||
TimeDelta expected_retransmission_time);
|
||||
|
||||
// Configures video structures produced by encoder to send using the
|
||||
// dependency descriptor rtp header extension. Next call to SendVideo should
|
||||
// have video_header.frame_type == kVideoFrameKey.
|
||||
@ -157,7 +167,7 @@ class RTPSenderVideo : public RTPVideoFrameSenderInterface {
|
||||
static uint8_t GetTemporalId(const RTPVideoHeader& header);
|
||||
bool AllowRetransmission(uint8_t temporal_id,
|
||||
int32_t retransmission_settings,
|
||||
int64_t expected_retransmission_time_ms);
|
||||
TimeDelta expected_retransmission_time);
|
||||
|
||||
private:
|
||||
struct TemporalLayerStats {
|
||||
|
||||
@ -23,15 +23,14 @@ namespace {
|
||||
|
||||
class TransformableVideoSenderFrame : public TransformableVideoFrameInterface {
|
||||
public:
|
||||
TransformableVideoSenderFrame(
|
||||
const EncodedImage& encoded_image,
|
||||
const RTPVideoHeader& video_header,
|
||||
int payload_type,
|
||||
absl::optional<VideoCodecType> codec_type,
|
||||
uint32_t rtp_timestamp,
|
||||
absl::optional<int64_t> expected_retransmission_time_ms,
|
||||
uint32_t ssrc,
|
||||
std::vector<uint32_t> csrcs)
|
||||
TransformableVideoSenderFrame(const EncodedImage& encoded_image,
|
||||
const RTPVideoHeader& video_header,
|
||||
int payload_type,
|
||||
absl::optional<VideoCodecType> codec_type,
|
||||
uint32_t rtp_timestamp,
|
||||
TimeDelta expected_retransmission_time,
|
||||
uint32_t ssrc,
|
||||
std::vector<uint32_t> csrcs)
|
||||
: encoded_data_(encoded_image.GetEncodedData()),
|
||||
pre_transform_payload_size_(encoded_image.size()),
|
||||
header_(video_header),
|
||||
@ -39,9 +38,9 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface {
|
||||
payload_type_(payload_type),
|
||||
codec_type_(codec_type),
|
||||
timestamp_(rtp_timestamp),
|
||||
capture_time_ms_(encoded_image.capture_time_ms_),
|
||||
capture_time_(encoded_image.CaptureTime()),
|
||||
capture_time_identifier_(encoded_image.CaptureTimeIdentifier()),
|
||||
expected_retransmission_time_ms_(expected_retransmission_time_ms),
|
||||
expected_retransmission_time_(expected_retransmission_time),
|
||||
ssrc_(ssrc),
|
||||
csrcs_(csrcs) {
|
||||
RTC_DCHECK_GE(payload_type_, 0);
|
||||
@ -88,13 +87,13 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface {
|
||||
const RTPVideoHeader& GetHeader() const { return header_; }
|
||||
uint8_t GetPayloadType() const override { return payload_type_; }
|
||||
absl::optional<VideoCodecType> GetCodecType() const { return codec_type_; }
|
||||
int64_t GetCaptureTimeMs() const { return capture_time_ms_; }
|
||||
Timestamp GetCaptureTime() const { return capture_time_; }
|
||||
absl::optional<Timestamp> GetCaptureTimeIdentifier() const override {
|
||||
return capture_time_identifier_;
|
||||
}
|
||||
|
||||
const absl::optional<int64_t>& GetExpectedRetransmissionTimeMs() const {
|
||||
return expected_retransmission_time_ms_;
|
||||
TimeDelta GetExpectedRetransmissionTime() const {
|
||||
return expected_retransmission_time_;
|
||||
}
|
||||
|
||||
Direction GetDirection() const override { return Direction::kSender; }
|
||||
@ -107,9 +106,9 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface {
|
||||
const uint8_t payload_type_;
|
||||
const absl::optional<VideoCodecType> codec_type_ = absl::nullopt;
|
||||
uint32_t timestamp_;
|
||||
const int64_t capture_time_ms_;
|
||||
const Timestamp capture_time_;
|
||||
const absl::optional<Timestamp> capture_time_identifier_;
|
||||
const absl::optional<int64_t> expected_retransmission_time_ms_;
|
||||
const TimeDelta expected_retransmission_time_;
|
||||
|
||||
uint32_t ssrc_;
|
||||
std::vector<uint32_t> csrcs_;
|
||||
@ -141,10 +140,10 @@ bool RTPSenderVideoFrameTransformerDelegate::TransformFrame(
|
||||
uint32_t rtp_timestamp,
|
||||
const EncodedImage& encoded_image,
|
||||
RTPVideoHeader video_header,
|
||||
absl::optional<int64_t> expected_retransmission_time_ms) {
|
||||
TimeDelta expected_retransmission_time) {
|
||||
frame_transformer_->Transform(std::make_unique<TransformableVideoSenderFrame>(
|
||||
encoded_image, video_header, payload_type, codec_type, rtp_timestamp,
|
||||
expected_retransmission_time_ms, ssrc_, csrcs_));
|
||||
expected_retransmission_time, ssrc_, csrcs_));
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -173,16 +172,15 @@ void RTPSenderVideoFrameTransformerDelegate::SendVideo(
|
||||
TransformableFrameInterface::Direction::kSender) {
|
||||
auto* transformed_video_frame =
|
||||
static_cast<TransformableVideoSenderFrame*>(transformed_frame.get());
|
||||
sender_->SendVideo(
|
||||
transformed_video_frame->GetPayloadType(),
|
||||
transformed_video_frame->GetCodecType(),
|
||||
transformed_video_frame->GetTimestamp(),
|
||||
transformed_video_frame->GetCaptureTimeMs(),
|
||||
transformed_video_frame->GetData(),
|
||||
transformed_video_frame->GetPreTransformPayloadSize(),
|
||||
transformed_video_frame->GetHeader(),
|
||||
transformed_video_frame->GetExpectedRetransmissionTimeMs(),
|
||||
transformed_video_frame->Metadata().GetCsrcs());
|
||||
sender_->SendVideo(transformed_video_frame->GetPayloadType(),
|
||||
transformed_video_frame->GetCodecType(),
|
||||
transformed_video_frame->GetTimestamp(),
|
||||
transformed_video_frame->GetCaptureTime(),
|
||||
transformed_video_frame->GetData(),
|
||||
transformed_video_frame->GetPreTransformPayloadSize(),
|
||||
transformed_video_frame->GetHeader(),
|
||||
transformed_video_frame->GetExpectedRetransmissionTime(),
|
||||
transformed_video_frame->Metadata().GetCsrcs());
|
||||
} else {
|
||||
auto* transformed_video_frame =
|
||||
static_cast<TransformableVideoFrameInterface*>(transformed_frame.get());
|
||||
@ -190,10 +188,11 @@ void RTPSenderVideoFrameTransformerDelegate::SendVideo(
|
||||
sender_->SendVideo(
|
||||
transformed_video_frame->GetPayloadType(), metadata.GetCodec(),
|
||||
transformed_video_frame->GetTimestamp(),
|
||||
/*capture_time_ms=*/0, transformed_video_frame->GetData(),
|
||||
/*capture_time=*/Timestamp::MinusInfinity(),
|
||||
transformed_video_frame->GetData(),
|
||||
transformed_video_frame->GetData().size(),
|
||||
RTPVideoHeader::FromMetadata(metadata),
|
||||
/*expected_retransmission_time_ms_=*/absl::nullopt,
|
||||
/*expected_retransmission_time=*/TimeDelta::PlusInfinity(),
|
||||
metadata.GetCsrcs());
|
||||
}
|
||||
}
|
||||
@ -237,7 +236,7 @@ std::unique_ptr<TransformableVideoFrameInterface> CloneSenderVideoFrame(
|
||||
return std::make_unique<TransformableVideoSenderFrame>(
|
||||
encoded_image, new_header, original->GetPayloadType(), new_header.codec,
|
||||
original->GetTimestamp(),
|
||||
absl::nullopt, // expected_retransmission_time_ms
|
||||
/*expected_retransmission_time=*/TimeDelta::PlusInfinity(),
|
||||
original->GetSsrc(), metadata.GetCsrcs());
|
||||
}
|
||||
|
||||
|
||||
@ -19,6 +19,8 @@
|
||||
#include "api/sequence_checker.h"
|
||||
#include "api/task_queue/task_queue_base.h"
|
||||
#include "api/task_queue/task_queue_factory.h"
|
||||
#include "api/units/time_delta.h"
|
||||
#include "api/units/timestamp.h"
|
||||
#include "api/video/video_layers_allocation.h"
|
||||
#include "rtc_base/synchronization/mutex.h"
|
||||
|
||||
@ -28,16 +30,15 @@ namespace webrtc {
|
||||
// have been applied.
|
||||
class RTPVideoFrameSenderInterface {
|
||||
public:
|
||||
virtual bool SendVideo(
|
||||
int payload_type,
|
||||
absl::optional<VideoCodecType> codec_type,
|
||||
uint32_t rtp_timestamp,
|
||||
int64_t capture_time_ms,
|
||||
rtc::ArrayView<const uint8_t> payload,
|
||||
size_t encoder_output_size,
|
||||
RTPVideoHeader video_header,
|
||||
absl::optional<int64_t> expected_retransmission_time_ms,
|
||||
std::vector<uint32_t> csrcs) = 0;
|
||||
virtual bool SendVideo(int payload_type,
|
||||
absl::optional<VideoCodecType> codec_type,
|
||||
uint32_t rtp_timestamp,
|
||||
Timestamp capture_time,
|
||||
rtc::ArrayView<const uint8_t> payload,
|
||||
size_t encoder_output_size,
|
||||
RTPVideoHeader video_header,
|
||||
TimeDelta expected_retransmission_time,
|
||||
std::vector<uint32_t> csrcs) = 0;
|
||||
|
||||
virtual void SetVideoStructureAfterTransformation(
|
||||
const FrameDependencyStructure* video_structure) = 0;
|
||||
@ -68,7 +69,7 @@ class RTPSenderVideoFrameTransformerDelegate : public TransformedFrameCallback {
|
||||
uint32_t rtp_timestamp,
|
||||
const EncodedImage& encoded_image,
|
||||
RTPVideoHeader video_header,
|
||||
absl::optional<int64_t> expected_retransmission_time_ms);
|
||||
TimeDelta expected_retransmission_time);
|
||||
|
||||
// Implements TransformedFrameCallback. Can be called on any thread. Posts
|
||||
// the transformed frame to be sent on the `encoder_queue_`.
|
||||
|
||||
@ -35,11 +35,11 @@ class MockRTPVideoFrameSenderInterface : public RTPVideoFrameSenderInterface {
|
||||
(int payload_type,
|
||||
absl::optional<VideoCodecType> codec_type,
|
||||
uint32_t rtp_timestamp,
|
||||
int64_t capture_time_ms,
|
||||
Timestamp capture_time,
|
||||
rtc::ArrayView<const uint8_t> payload,
|
||||
size_t encoder_output_size,
|
||||
RTPVideoHeader video_header,
|
||||
absl::optional<int64_t> expected_retransmission_time_ms,
|
||||
TimeDelta expected_retransmission_time,
|
||||
std::vector<uint32_t> csrcs),
|
||||
(override));
|
||||
|
||||
@ -77,7 +77,7 @@ class RtpSenderVideoFrameTransformerDelegateTest : public ::testing::Test {
|
||||
delegate->TransformFrame(
|
||||
/*payload_type=*/1, VideoCodecType::kVideoCodecVP8, /*rtp_timestamp=*/2,
|
||||
encoded_image, RTPVideoHeader(),
|
||||
/*expected_retransmission_time_ms=*/absl::nullopt);
|
||||
/*expected_retransmission_time=*/TimeDelta::PlusInfinity());
|
||||
return frame;
|
||||
}
|
||||
|
||||
@ -120,7 +120,7 @@ TEST_F(RtpSenderVideoFrameTransformerDelegateTest,
|
||||
delegate->TransformFrame(
|
||||
/*payload_type=*/1, VideoCodecType::kVideoCodecVP8, /*rtp_timestamp=*/2,
|
||||
encoded_image, RTPVideoHeader(),
|
||||
/*expected_retransmission_time_ms=*/absl::nullopt);
|
||||
/*expected_retransmission_time=*/TimeDelta::PlusInfinity());
|
||||
}
|
||||
|
||||
TEST_F(RtpSenderVideoFrameTransformerDelegateTest,
|
||||
@ -255,11 +255,12 @@ TEST_F(RtpSenderVideoFrameTransformerDelegateTest,
|
||||
ASSERT_TRUE(callback);
|
||||
|
||||
rtc::Event event;
|
||||
EXPECT_CALL(test_sender_,
|
||||
SendVideo(payload_type, absl::make_optional(kVideoCodecVP8),
|
||||
timestamp, /*capture_time_ms=*/0, buffer, _, _,
|
||||
/*expected_retransmission_time_ms_=*/
|
||||
(absl::optional<int64_t>)absl::nullopt, frame_csrcs))
|
||||
EXPECT_CALL(
|
||||
test_sender_,
|
||||
SendVideo(payload_type, absl::make_optional(kVideoCodecVP8), timestamp,
|
||||
/*capture_time=*/Timestamp::MinusInfinity(), buffer, _, _,
|
||||
/*expected_retransmission_time=*/TimeDelta::PlusInfinity(),
|
||||
frame_csrcs))
|
||||
.WillOnce(WithoutArgs([&] {
|
||||
event.Set();
|
||||
return true;
|
||||
|
||||
@ -87,7 +87,7 @@ constexpr uint32_t kSsrc = 725242;
|
||||
constexpr uint32_t kRtxSsrc = 912364;
|
||||
constexpr int kMaxPacketLength = 1500;
|
||||
constexpr Timestamp kStartTime = Timestamp::Millis(123456789);
|
||||
constexpr int64_t kDefaultExpectedRetransmissionTimeMs = 125;
|
||||
constexpr TimeDelta kDefaultExpectedRetransmissionTime = TimeDelta::Millis(125);
|
||||
|
||||
class LoopbackTransportTest : public webrtc::Transport {
|
||||
public:
|
||||
@ -148,10 +148,10 @@ class TestRtpSenderVideo : public RTPSenderVideo {
|
||||
|
||||
bool AllowRetransmission(const RTPVideoHeader& header,
|
||||
int32_t retransmission_settings,
|
||||
int64_t expected_retransmission_time_ms) {
|
||||
TimeDelta expected_retransmission_time) {
|
||||
return RTPSenderVideo::AllowRetransmission(GetTemporalId(header),
|
||||
retransmission_settings,
|
||||
expected_retransmission_time_ms);
|
||||
expected_retransmission_time);
|
||||
}
|
||||
};
|
||||
|
||||
@ -201,9 +201,9 @@ TEST_F(RtpSenderVideoTest, KeyFrameHasCVO) {
|
||||
RTPVideoHeader hdr;
|
||||
hdr.rotation = kVideoRotation_0;
|
||||
hdr.frame_type = VideoFrameType::kVideoFrameKey;
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
|
||||
VideoRotation rotation;
|
||||
EXPECT_TRUE(
|
||||
@ -219,7 +219,7 @@ TEST_F(RtpSenderVideoTest, TimingFrameHasPacketizationTimstampSet) {
|
||||
rtp_module_->RegisterRtpHeaderExtension(VideoTimingExtension::Uri(),
|
||||
kVideoTimingExtensionId);
|
||||
|
||||
const int64_t kCaptureTimestamp = fake_clock_.TimeInMilliseconds();
|
||||
const Timestamp kCaptureTimestamp = fake_clock_.CurrentTime();
|
||||
|
||||
RTPVideoHeader hdr;
|
||||
hdr.video_timing.flags = VideoSendTiming::kTriggeredByTimer;
|
||||
@ -230,7 +230,7 @@ TEST_F(RtpSenderVideoTest, TimingFrameHasPacketizationTimstampSet) {
|
||||
hdr.frame_type = VideoFrameType::kVideoFrameKey;
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, kCaptureTimestamp,
|
||||
kFrame, sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
kDefaultExpectedRetransmissionTime, {});
|
||||
VideoSendTiming timing;
|
||||
EXPECT_TRUE(transport_.last_sent_packet().GetExtension<VideoTimingExtension>(
|
||||
&timing));
|
||||
@ -248,14 +248,14 @@ TEST_F(RtpSenderVideoTest, DeltaFrameHasCVOWhenChanged) {
|
||||
hdr.rotation = kVideoRotation_90;
|
||||
hdr.frame_type = VideoFrameType::kVideoFrameKey;
|
||||
EXPECT_TRUE(rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, 0, kFrame, sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {}));
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}));
|
||||
|
||||
hdr.rotation = kVideoRotation_0;
|
||||
hdr.frame_type = VideoFrameType::kVideoFrameDelta;
|
||||
EXPECT_TRUE(rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp + 1, 0, kFrame, sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {}));
|
||||
kPayload, kType, kTimestamp + 1, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}));
|
||||
|
||||
VideoRotation rotation;
|
||||
EXPECT_TRUE(
|
||||
@ -272,13 +272,13 @@ TEST_F(RtpSenderVideoTest, DeltaFrameHasCVOWhenNonZero) {
|
||||
hdr.rotation = kVideoRotation_90;
|
||||
hdr.frame_type = VideoFrameType::kVideoFrameKey;
|
||||
EXPECT_TRUE(rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, 0, kFrame, sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {}));
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}));
|
||||
|
||||
hdr.frame_type = VideoFrameType::kVideoFrameDelta;
|
||||
EXPECT_TRUE(rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp + 1, 0, kFrame, sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {}));
|
||||
kPayload, kType, kTimestamp + 1, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}));
|
||||
|
||||
VideoRotation rotation;
|
||||
EXPECT_TRUE(
|
||||
@ -312,14 +312,14 @@ TEST_F(RtpSenderVideoTest, RetransmissionTypesGeneric) {
|
||||
header.codec = kVideoCodecGeneric;
|
||||
|
||||
EXPECT_FALSE(rtp_sender_video_->AllowRetransmission(
|
||||
header, kRetransmitOff, kDefaultExpectedRetransmissionTimeMs));
|
||||
header, kRetransmitOff, kDefaultExpectedRetransmissionTime));
|
||||
EXPECT_TRUE(rtp_sender_video_->AllowRetransmission(
|
||||
header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTimeMs));
|
||||
header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTime));
|
||||
EXPECT_TRUE(rtp_sender_video_->AllowRetransmission(
|
||||
header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTimeMs));
|
||||
header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTime));
|
||||
EXPECT_TRUE(rtp_sender_video_->AllowRetransmission(
|
||||
header, kConditionallyRetransmitHigherLayers,
|
||||
kDefaultExpectedRetransmissionTimeMs));
|
||||
kDefaultExpectedRetransmissionTime));
|
||||
}
|
||||
|
||||
TEST_F(RtpSenderVideoTest, RetransmissionTypesH264) {
|
||||
@ -329,14 +329,14 @@ TEST_F(RtpSenderVideoTest, RetransmissionTypesH264) {
|
||||
header.codec = kVideoCodecH264;
|
||||
|
||||
EXPECT_FALSE(rtp_sender_video_->AllowRetransmission(
|
||||
header, kRetransmitOff, kDefaultExpectedRetransmissionTimeMs));
|
||||
header, kRetransmitOff, kDefaultExpectedRetransmissionTime));
|
||||
EXPECT_TRUE(rtp_sender_video_->AllowRetransmission(
|
||||
header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTimeMs));
|
||||
header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTime));
|
||||
EXPECT_TRUE(rtp_sender_video_->AllowRetransmission(
|
||||
header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTimeMs));
|
||||
header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTime));
|
||||
EXPECT_TRUE(rtp_sender_video_->AllowRetransmission(
|
||||
header, kConditionallyRetransmitHigherLayers,
|
||||
kDefaultExpectedRetransmissionTimeMs));
|
||||
kDefaultExpectedRetransmissionTime));
|
||||
}
|
||||
|
||||
TEST_F(RtpSenderVideoTest, RetransmissionTypesVP8BaseLayer) {
|
||||
@ -346,20 +346,20 @@ TEST_F(RtpSenderVideoTest, RetransmissionTypesVP8BaseLayer) {
|
||||
vp8_header.temporalIdx = 0;
|
||||
|
||||
EXPECT_FALSE(rtp_sender_video_->AllowRetransmission(
|
||||
header, kRetransmitOff, kDefaultExpectedRetransmissionTimeMs));
|
||||
header, kRetransmitOff, kDefaultExpectedRetransmissionTime));
|
||||
EXPECT_TRUE(rtp_sender_video_->AllowRetransmission(
|
||||
header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTimeMs));
|
||||
header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTime));
|
||||
EXPECT_FALSE(rtp_sender_video_->AllowRetransmission(
|
||||
header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTimeMs));
|
||||
header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTime));
|
||||
EXPECT_TRUE(rtp_sender_video_->AllowRetransmission(
|
||||
header, kRetransmitHigherLayers | kRetransmitBaseLayer,
|
||||
kDefaultExpectedRetransmissionTimeMs));
|
||||
kDefaultExpectedRetransmissionTime));
|
||||
EXPECT_FALSE(rtp_sender_video_->AllowRetransmission(
|
||||
header, kConditionallyRetransmitHigherLayers,
|
||||
kDefaultExpectedRetransmissionTimeMs));
|
||||
kDefaultExpectedRetransmissionTime));
|
||||
EXPECT_TRUE(rtp_sender_video_->AllowRetransmission(
|
||||
header, kRetransmitBaseLayer | kConditionallyRetransmitHigherLayers,
|
||||
kDefaultExpectedRetransmissionTimeMs));
|
||||
kDefaultExpectedRetransmissionTime));
|
||||
}
|
||||
|
||||
TEST_F(RtpSenderVideoTest, RetransmissionTypesVP8HigherLayers) {
|
||||
@ -371,14 +371,14 @@ TEST_F(RtpSenderVideoTest, RetransmissionTypesVP8HigherLayers) {
|
||||
vp8_header.temporalIdx = tid;
|
||||
|
||||
EXPECT_FALSE(rtp_sender_video_->AllowRetransmission(
|
||||
header, kRetransmitOff, kDefaultExpectedRetransmissionTimeMs));
|
||||
header, kRetransmitOff, kDefaultExpectedRetransmissionTime));
|
||||
EXPECT_FALSE(rtp_sender_video_->AllowRetransmission(
|
||||
header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTimeMs));
|
||||
header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTime));
|
||||
EXPECT_TRUE(rtp_sender_video_->AllowRetransmission(
|
||||
header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTimeMs));
|
||||
header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTime));
|
||||
EXPECT_TRUE(rtp_sender_video_->AllowRetransmission(
|
||||
header, kRetransmitHigherLayers | kRetransmitBaseLayer,
|
||||
kDefaultExpectedRetransmissionTimeMs));
|
||||
kDefaultExpectedRetransmissionTime));
|
||||
}
|
||||
}
|
||||
|
||||
@ -391,20 +391,20 @@ TEST_F(RtpSenderVideoTest, RetransmissionTypesVP9) {
|
||||
vp9_header.temporal_idx = tid;
|
||||
|
||||
EXPECT_FALSE(rtp_sender_video_->AllowRetransmission(
|
||||
header, kRetransmitOff, kDefaultExpectedRetransmissionTimeMs));
|
||||
header, kRetransmitOff, kDefaultExpectedRetransmissionTime));
|
||||
EXPECT_FALSE(rtp_sender_video_->AllowRetransmission(
|
||||
header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTimeMs));
|
||||
header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTime));
|
||||
EXPECT_TRUE(rtp_sender_video_->AllowRetransmission(
|
||||
header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTimeMs));
|
||||
header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTime));
|
||||
EXPECT_TRUE(rtp_sender_video_->AllowRetransmission(
|
||||
header, kRetransmitHigherLayers | kRetransmitBaseLayer,
|
||||
kDefaultExpectedRetransmissionTimeMs));
|
||||
kDefaultExpectedRetransmissionTime));
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(RtpSenderVideoTest, ConditionalRetransmit) {
|
||||
const int64_t kFrameIntervalMs = 33;
|
||||
const int64_t kRttMs = (kFrameIntervalMs * 3) / 2;
|
||||
constexpr TimeDelta kFrameInterval = TimeDelta::Millis(33);
|
||||
constexpr TimeDelta kRtt = (kFrameInterval * 3) / 2;
|
||||
const uint8_t kSettings =
|
||||
kRetransmitBaseLayer | kConditionallyRetransmitHigherLayers;
|
||||
|
||||
@ -414,14 +414,14 @@ TEST_F(RtpSenderVideoTest, ConditionalRetransmit) {
|
||||
|
||||
// Fill averaging window to prevent rounding errors.
|
||||
constexpr int kNumRepetitions =
|
||||
(RTPSenderVideo::kTLRateWindowSizeMs + (kFrameIntervalMs / 2)) /
|
||||
kFrameIntervalMs;
|
||||
(RTPSenderVideo::kTLRateWindowSizeMs + (kFrameInterval.ms() / 2)) /
|
||||
kFrameInterval.ms();
|
||||
constexpr int kPattern[] = {0, 2, 1, 2};
|
||||
auto& vp8_header = header.video_type_header.emplace<RTPVideoHeaderVP8>();
|
||||
for (size_t i = 0; i < arraysize(kPattern) * kNumRepetitions; ++i) {
|
||||
vp8_header.temporalIdx = kPattern[i % arraysize(kPattern)];
|
||||
rtp_sender_video_->AllowRetransmission(header, kSettings, kRttMs);
|
||||
fake_clock_.AdvanceTimeMilliseconds(kFrameIntervalMs);
|
||||
rtp_sender_video_->AllowRetransmission(header, kSettings, kRtt);
|
||||
fake_clock_.AdvanceTime(kFrameInterval);
|
||||
}
|
||||
|
||||
// Since we're at the start of the pattern, the next expected frame in TL0 is
|
||||
@ -429,40 +429,34 @@ TEST_F(RtpSenderVideoTest, ConditionalRetransmit) {
|
||||
// acknowledging that it did not arrive, which means this frame and the next
|
||||
// will not be retransmitted.
|
||||
vp8_header.temporalIdx = 1;
|
||||
EXPECT_FALSE(
|
||||
rtp_sender_video_->AllowRetransmission(header, kSettings, kRttMs));
|
||||
fake_clock_.AdvanceTimeMilliseconds(kFrameIntervalMs);
|
||||
EXPECT_FALSE(
|
||||
rtp_sender_video_->AllowRetransmission(header, kSettings, kRttMs));
|
||||
fake_clock_.AdvanceTimeMilliseconds(kFrameIntervalMs);
|
||||
EXPECT_FALSE(rtp_sender_video_->AllowRetransmission(header, kSettings, kRtt));
|
||||
fake_clock_.AdvanceTime(kFrameInterval);
|
||||
EXPECT_FALSE(rtp_sender_video_->AllowRetransmission(header, kSettings, kRtt));
|
||||
fake_clock_.AdvanceTime(kFrameInterval);
|
||||
|
||||
// The TL0 frame did not arrive. So allow retransmission.
|
||||
EXPECT_TRUE(
|
||||
rtp_sender_video_->AllowRetransmission(header, kSettings, kRttMs));
|
||||
fake_clock_.AdvanceTimeMilliseconds(kFrameIntervalMs);
|
||||
EXPECT_TRUE(rtp_sender_video_->AllowRetransmission(header, kSettings, kRtt));
|
||||
fake_clock_.AdvanceTime(kFrameInterval);
|
||||
|
||||
// Insert a frame for TL2. We just had frame in TL1, so the next one there is
|
||||
// in three frames away. TL0 is still too far in the past. So, allow
|
||||
// retransmission.
|
||||
vp8_header.temporalIdx = 2;
|
||||
EXPECT_TRUE(
|
||||
rtp_sender_video_->AllowRetransmission(header, kSettings, kRttMs));
|
||||
fake_clock_.AdvanceTimeMilliseconds(kFrameIntervalMs);
|
||||
EXPECT_TRUE(rtp_sender_video_->AllowRetransmission(header, kSettings, kRtt));
|
||||
fake_clock_.AdvanceTime(kFrameInterval);
|
||||
|
||||
// Another TL2, next in TL1 is two frames away. Allow again.
|
||||
EXPECT_TRUE(
|
||||
rtp_sender_video_->AllowRetransmission(header, kSettings, kRttMs));
|
||||
fake_clock_.AdvanceTimeMilliseconds(kFrameIntervalMs);
|
||||
EXPECT_TRUE(rtp_sender_video_->AllowRetransmission(header, kSettings, kRtt));
|
||||
fake_clock_.AdvanceTime(kFrameInterval);
|
||||
|
||||
// Yet another TL2, next in TL1 is now only one frame away, so don't store
|
||||
// for retransmission.
|
||||
EXPECT_FALSE(
|
||||
rtp_sender_video_->AllowRetransmission(header, kSettings, kRttMs));
|
||||
EXPECT_FALSE(rtp_sender_video_->AllowRetransmission(header, kSettings, kRtt));
|
||||
}
|
||||
|
||||
TEST_F(RtpSenderVideoTest, ConditionalRetransmitLimit) {
|
||||
const int64_t kFrameIntervalMs = 200;
|
||||
const int64_t kRttMs = (kFrameIntervalMs * 3) / 2;
|
||||
constexpr TimeDelta kFrameInterval = TimeDelta::Millis(200);
|
||||
constexpr TimeDelta kRtt = (kFrameInterval * 3) / 2;
|
||||
const int32_t kSettings =
|
||||
kRetransmitBaseLayer | kConditionallyRetransmitHigherLayers;
|
||||
|
||||
@ -472,15 +466,15 @@ TEST_F(RtpSenderVideoTest, ConditionalRetransmitLimit) {
|
||||
|
||||
// Fill averaging window to prevent rounding errors.
|
||||
constexpr int kNumRepetitions =
|
||||
(RTPSenderVideo::kTLRateWindowSizeMs + (kFrameIntervalMs / 2)) /
|
||||
kFrameIntervalMs;
|
||||
(RTPSenderVideo::kTLRateWindowSizeMs + (kFrameInterval.ms() / 2)) /
|
||||
kFrameInterval.ms();
|
||||
constexpr int kPattern[] = {0, 2, 2, 2};
|
||||
auto& vp8_header = header.video_type_header.emplace<RTPVideoHeaderVP8>();
|
||||
for (size_t i = 0; i < arraysize(kPattern) * kNumRepetitions; ++i) {
|
||||
vp8_header.temporalIdx = kPattern[i % arraysize(kPattern)];
|
||||
|
||||
rtp_sender_video_->AllowRetransmission(header, kSettings, kRttMs);
|
||||
fake_clock_.AdvanceTimeMilliseconds(kFrameIntervalMs);
|
||||
rtp_sender_video_->AllowRetransmission(header, kSettings, kRtt);
|
||||
fake_clock_.AdvanceTime(kFrameInterval);
|
||||
}
|
||||
|
||||
// Since we're at the start of the pattern, the next expected frame will be
|
||||
@ -489,8 +483,7 @@ TEST_F(RtpSenderVideoTest, ConditionalRetransmitLimit) {
|
||||
// layer, but that last frame in TL1 was a long time ago in absolute terms,
|
||||
// so allow retransmission anyway.
|
||||
vp8_header.temporalIdx = 1;
|
||||
EXPECT_TRUE(
|
||||
rtp_sender_video_->AllowRetransmission(header, kSettings, kRttMs));
|
||||
EXPECT_TRUE(rtp_sender_video_->AllowRetransmission(header, kSettings, kRtt));
|
||||
}
|
||||
|
||||
TEST_F(RtpSenderVideoTest,
|
||||
@ -518,8 +511,10 @@ TEST_F(RtpSenderVideoTest,
|
||||
uint8_t kPayload[kMaxPacketSize] = {};
|
||||
EXPECT_TRUE(rtp_sender_video_->SendVideo(
|
||||
kMediaPayloadId, /*codec_type=*/kVideoCodecVP8, /*rtp_timestamp=*/0,
|
||||
/*capture_time_ms=*/1'000, kPayload, sizeof(kPayload), header,
|
||||
/*expected_retransmission_time_ms=*/absl::nullopt, /*csrcs=*/{}));
|
||||
/*capture_time=*/Timestamp::Seconds(1), kPayload, sizeof(kPayload),
|
||||
header,
|
||||
/*expected_retransmission_time=*/TimeDelta::PlusInfinity(),
|
||||
/*csrcs=*/{}));
|
||||
ASSERT_THAT(transport_.sent_packets(), Not(IsEmpty()));
|
||||
// Ack media ssrc, but not rtx ssrc.
|
||||
rtcp::ReceiverReport rr;
|
||||
@ -537,8 +532,8 @@ TEST_F(RtpSenderVideoTest,
|
||||
|
||||
EXPECT_TRUE(rtp_sender_video_->SendVideo(
|
||||
kMediaPayloadId, /*codec_type=*/kVideoCodecVP8, /*rtp_timestamp=*/0,
|
||||
/*capture_time_ms=*/1'000, payload, frame_size, header,
|
||||
/*expected_retransmission_time_ms=*/1'000, /*csrcs=*/{}));
|
||||
/*capture_time=*/Timestamp::Seconds(1), payload, frame_size, header,
|
||||
/*expected_retransmission_time=*/TimeDelta::Seconds(1), /*csrcs=*/{}));
|
||||
const RtpPacketReceived& media_packet = transport_.last_sent_packet();
|
||||
EXPECT_EQ(media_packet.Ssrc(), kSsrc);
|
||||
|
||||
@ -576,9 +571,9 @@ TEST_F(RtpSenderVideoTest, SendsDependencyDescriptorWhenVideoStructureIsSet) {
|
||||
generic.decode_target_indications = {DecodeTargetIndication::kSwitch,
|
||||
DecodeTargetIndication::kSwitch};
|
||||
hdr.frame_type = VideoFrameType::kVideoFrameKey;
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
|
||||
ASSERT_EQ(transport_.packets_sent(), 1);
|
||||
DependencyDescriptor descriptor_key;
|
||||
@ -603,9 +598,9 @@ TEST_F(RtpSenderVideoTest, SendsDependencyDescriptorWhenVideoStructureIsSet) {
|
||||
generic.decode_target_indications = {DecodeTargetIndication::kNotPresent,
|
||||
DecodeTargetIndication::kRequired};
|
||||
hdr.frame_type = VideoFrameType::kVideoFrameDelta;
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
|
||||
EXPECT_EQ(transport_.packets_sent(), 2);
|
||||
DependencyDescriptor descriptor_delta;
|
||||
@ -653,9 +648,9 @@ TEST_F(RtpSenderVideoTest,
|
||||
generic.decode_target_indications = {DecodeTargetIndication::kSwitch,
|
||||
DecodeTargetIndication::kSwitch};
|
||||
hdr.frame_type = VideoFrameType::kVideoFrameKey;
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
|
||||
ASSERT_EQ(transport_.packets_sent(), 1);
|
||||
DependencyDescriptor descriptor_key;
|
||||
@ -670,9 +665,9 @@ TEST_F(RtpSenderVideoTest,
|
||||
generic.decode_target_indications = {DecodeTargetIndication::kNotPresent,
|
||||
DecodeTargetIndication::kRequired};
|
||||
hdr.frame_type = VideoFrameType::kVideoFrameDelta;
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
|
||||
EXPECT_EQ(transport_.packets_sent(), 2);
|
||||
EXPECT_FALSE(transport_.last_sent_packet()
|
||||
@ -700,9 +695,9 @@ TEST_F(RtpSenderVideoTest, PropagatesChainDiffsIntoDependencyDescriptor) {
|
||||
DecodeTargetIndication::kSwitch};
|
||||
generic.chain_diffs = {2};
|
||||
hdr.frame_type = VideoFrameType::kVideoFrameKey;
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
|
||||
ASSERT_EQ(transport_.packets_sent(), 1);
|
||||
DependencyDescriptor descriptor_key;
|
||||
@ -736,9 +731,9 @@ TEST_F(RtpSenderVideoTest,
|
||||
generic.active_decode_targets = 0b01;
|
||||
generic.chain_diffs = {1};
|
||||
hdr.frame_type = VideoFrameType::kVideoFrameKey;
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
|
||||
ASSERT_EQ(transport_.packets_sent(), 1);
|
||||
DependencyDescriptor descriptor_key;
|
||||
@ -775,9 +770,9 @@ TEST_F(RtpSenderVideoTest,
|
||||
DecodeTargetIndication::kSwitch};
|
||||
hdr.frame_type = VideoFrameType::kVideoFrameKey;
|
||||
rtp_sender_video_->SetVideoStructure(&video_structure1);
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
// Parse 1st extension.
|
||||
ASSERT_EQ(transport_.packets_sent(), 1);
|
||||
DependencyDescriptor descriptor_key1;
|
||||
@ -792,9 +787,9 @@ TEST_F(RtpSenderVideoTest,
|
||||
generic.decode_target_indications = {DecodeTargetIndication::kDiscardable,
|
||||
DecodeTargetIndication::kNotPresent};
|
||||
hdr.frame_type = VideoFrameType::kVideoFrameDelta;
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
|
||||
ASSERT_EQ(transport_.packets_sent(), 2);
|
||||
RtpPacket delta_packet = transport_.last_sent_packet();
|
||||
@ -805,9 +800,9 @@ TEST_F(RtpSenderVideoTest,
|
||||
DecodeTargetIndication::kSwitch};
|
||||
hdr.frame_type = VideoFrameType::kVideoFrameKey;
|
||||
rtp_sender_video_->SetVideoStructure(&video_structure2);
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
// Parse the 2nd key frame.
|
||||
ASSERT_EQ(transport_.packets_sent(), 3);
|
||||
DependencyDescriptor descriptor_key2;
|
||||
@ -860,9 +855,9 @@ TEST_F(RtpSenderVideoTest,
|
||||
|
||||
EXPECT_CALL(*encryptor,
|
||||
Encrypt(_, _, Not(IsEmpty()), ElementsAreArray(kFrame), _, _));
|
||||
rtp_sender_video.SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video.SendVideo(kPayload, kType, kTimestamp,
|
||||
fake_clock_.CurrentTime(), kFrame, sizeof(kFrame),
|
||||
hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
// Double check packet with the dependency descriptor is sent.
|
||||
ASSERT_EQ(transport_.packets_sent(), 1);
|
||||
EXPECT_TRUE(transport_.last_sent_packet()
|
||||
@ -883,9 +878,9 @@ TEST_F(RtpSenderVideoTest, PopulateGenericFrameDescriptor) {
|
||||
generic.dependencies.push_back(kFrameId - 1);
|
||||
generic.dependencies.push_back(kFrameId - 500);
|
||||
hdr.frame_type = VideoFrameType::kVideoFrameDelta;
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
|
||||
RtpGenericFrameDescriptor descriptor_wire;
|
||||
EXPECT_EQ(1, transport_.packets_sent());
|
||||
@ -919,8 +914,9 @@ void RtpSenderVideoTest::
|
||||
generic.frame_id = kFrameId;
|
||||
hdr.frame_type = VideoFrameType::kVideoFrameDelta;
|
||||
rtp_sender_video_->SendVideo(kPayload, VideoCodecType::kVideoCodecVP8,
|
||||
kTimestamp, 0, kFrame, sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTime, {});
|
||||
|
||||
ASSERT_EQ(transport_.packets_sent(), 1);
|
||||
// Expect only minimal 1-byte vp8 descriptor was generated.
|
||||
@ -956,9 +952,9 @@ TEST_F(RtpSenderVideoTest, VideoLayersAllocationWithResolutionSentOnKeyFrames) {
|
||||
|
||||
RTPVideoHeader hdr;
|
||||
hdr.frame_type = VideoFrameType::kVideoFrameKey;
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
|
||||
VideoLayersAllocation sent_allocation;
|
||||
EXPECT_TRUE(
|
||||
@ -967,9 +963,9 @@ TEST_F(RtpSenderVideoTest, VideoLayersAllocationWithResolutionSentOnKeyFrames) {
|
||||
EXPECT_THAT(sent_allocation.active_spatial_layers, ElementsAre(layer));
|
||||
|
||||
// Next key frame also have the allocation.
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
EXPECT_TRUE(
|
||||
transport_.last_sent_packet()
|
||||
.GetExtension<RtpVideoLayersAllocationExtension>(&sent_allocation));
|
||||
@ -995,25 +991,25 @@ TEST_F(RtpSenderVideoTest,
|
||||
|
||||
RTPVideoHeader hdr;
|
||||
hdr.frame_type = VideoFrameType::kVideoFrameKey;
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
EXPECT_TRUE(transport_.last_sent_packet()
|
||||
.HasExtension<RtpVideoLayersAllocationExtension>());
|
||||
|
||||
// No allocation sent on delta frame unless it has been updated.
|
||||
hdr.frame_type = VideoFrameType::kVideoFrameDelta;
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
EXPECT_FALSE(transport_.last_sent_packet()
|
||||
.HasExtension<RtpVideoLayersAllocationExtension>());
|
||||
|
||||
// Update the allocation.
|
||||
rtp_sender_video_->SetVideoLayersAllocation(allocation);
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
|
||||
VideoLayersAllocation sent_allocation;
|
||||
EXPECT_TRUE(
|
||||
@ -1047,9 +1043,9 @@ TEST_F(RtpSenderVideoTest,
|
||||
|
||||
RTPVideoHeader hdr;
|
||||
hdr.frame_type = VideoFrameType::kVideoFrameKey;
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
ASSERT_TRUE(transport_.last_sent_packet()
|
||||
.HasExtension<RtpVideoLayersAllocationExtension>());
|
||||
|
||||
@ -1062,9 +1058,9 @@ TEST_F(RtpSenderVideoTest,
|
||||
allocation.active_spatial_layers.push_back(layer);
|
||||
rtp_sender_video_->SetVideoLayersAllocation(allocation);
|
||||
hdr.frame_type = VideoFrameType::kVideoFrameDelta;
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
|
||||
VideoLayersAllocation sent_allocation;
|
||||
EXPECT_TRUE(
|
||||
@ -1096,9 +1092,9 @@ TEST_F(RtpSenderVideoTest,
|
||||
|
||||
RTPVideoHeader hdr;
|
||||
hdr.frame_type = VideoFrameType::kVideoFrameKey;
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
ASSERT_TRUE(transport_.last_sent_packet()
|
||||
.HasExtension<RtpVideoLayersAllocationExtension>());
|
||||
|
||||
@ -1106,9 +1102,9 @@ TEST_F(RtpSenderVideoTest,
|
||||
allocation.active_spatial_layers[0].frame_rate_fps = 20;
|
||||
rtp_sender_video_->SetVideoLayersAllocation(allocation);
|
||||
hdr.frame_type = VideoFrameType::kVideoFrameDelta;
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
|
||||
VideoLayersAllocation sent_allocation;
|
||||
EXPECT_TRUE(
|
||||
@ -1140,9 +1136,9 @@ TEST_F(RtpSenderVideoTest,
|
||||
|
||||
RTPVideoHeader hdr;
|
||||
hdr.frame_type = VideoFrameType::kVideoFrameKey;
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
ASSERT_TRUE(transport_.last_sent_packet()
|
||||
.HasExtension<RtpVideoLayersAllocationExtension>());
|
||||
|
||||
@ -1150,9 +1146,9 @@ TEST_F(RtpSenderVideoTest,
|
||||
allocation.active_spatial_layers[0].frame_rate_fps = 9;
|
||||
rtp_sender_video_->SetVideoLayersAllocation(allocation);
|
||||
hdr.frame_type = VideoFrameType::kVideoFrameDelta;
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
|
||||
VideoLayersAllocation sent_allocation;
|
||||
EXPECT_TRUE(
|
||||
@ -1179,9 +1175,9 @@ TEST_F(RtpSenderVideoTest, VideoLayersAllocationSentOnDeltaFramesOnlyOnUpdate) {
|
||||
|
||||
RTPVideoHeader hdr;
|
||||
hdr.frame_type = VideoFrameType::kVideoFrameDelta;
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
|
||||
VideoLayersAllocation sent_allocation;
|
||||
EXPECT_TRUE(
|
||||
@ -1190,17 +1186,17 @@ TEST_F(RtpSenderVideoTest, VideoLayersAllocationSentOnDeltaFramesOnlyOnUpdate) {
|
||||
EXPECT_THAT(sent_allocation.active_spatial_layers, SizeIs(1));
|
||||
|
||||
// VideoLayersAllocation not sent on the next delta frame.
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
EXPECT_FALSE(transport_.last_sent_packet()
|
||||
.HasExtension<RtpVideoLayersAllocationExtension>());
|
||||
|
||||
// Update allocation. VideoLayesAllocation should be sent on the next frame.
|
||||
rtp_sender_video_->SetVideoLayersAllocation(allocation);
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
EXPECT_TRUE(
|
||||
transport_.last_sent_packet()
|
||||
.GetExtension<RtpVideoLayersAllocationExtension>(&sent_allocation));
|
||||
@ -1229,17 +1225,17 @@ TEST_F(RtpSenderVideoTest, VideoLayersAllocationNotSentOnHigherTemporalLayers) {
|
||||
auto& vp8_header = hdr.video_type_header.emplace<RTPVideoHeaderVP8>();
|
||||
vp8_header.temporalIdx = 1;
|
||||
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
EXPECT_FALSE(transport_.last_sent_packet()
|
||||
.HasExtension<RtpVideoLayersAllocationExtension>());
|
||||
|
||||
// Send a delta frame on tl0.
|
||||
vp8_header.temporalIdx = 0;
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
EXPECT_TRUE(transport_.last_sent_packet()
|
||||
.HasExtension<RtpVideoLayersAllocationExtension>());
|
||||
}
|
||||
@ -1253,8 +1249,9 @@ TEST_F(RtpSenderVideoTest,
|
||||
RTPVideoHeader hdr;
|
||||
hdr.frame_type = VideoFrameType::kVideoFrameKey;
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp,
|
||||
/*capture_time_ms=*/0, kFrame, sizeof(kFrame),
|
||||
hdr, kDefaultExpectedRetransmissionTimeMs, {});
|
||||
/*capture_time=*/Timestamp::MinusInfinity(),
|
||||
kFrame, sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTime, {});
|
||||
// No absolute capture time should be set as the capture_time_ms was the
|
||||
// default value.
|
||||
for (const RtpPacketReceived& packet : transport_.sent_packets()) {
|
||||
@ -1266,7 +1263,7 @@ TEST_F(RtpSenderVideoTest, AbsoluteCaptureTime) {
|
||||
rtp_sender_video_ = std::make_unique<TestRtpSenderVideo>(
|
||||
&fake_clock_, rtp_module_->RtpSender(), field_trials_);
|
||||
|
||||
constexpr int64_t kAbsoluteCaptureTimestampMs = 12345678;
|
||||
constexpr Timestamp kAbsoluteCaptureTimestamp = Timestamp::Millis(12345678);
|
||||
uint8_t kFrame[kMaxPacketLength];
|
||||
rtp_module_->RegisterRtpHeaderExtension(AbsoluteCaptureTimeExtension::Uri(),
|
||||
kAbsoluteCaptureTimeExtensionId);
|
||||
@ -1274,8 +1271,8 @@ TEST_F(RtpSenderVideoTest, AbsoluteCaptureTime) {
|
||||
RTPVideoHeader hdr;
|
||||
hdr.frame_type = VideoFrameType::kVideoFrameKey;
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, kAbsoluteCaptureTimestampMs, kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTimeMs, {});
|
||||
kPayload, kType, kTimestamp, kAbsoluteCaptureTimestamp, kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
|
||||
absl::optional<AbsoluteCaptureTime> absolute_capture_time;
|
||||
|
||||
@ -1292,10 +1289,10 @@ TEST_F(RtpSenderVideoTest, AbsoluteCaptureTime) {
|
||||
|
||||
// Verify the capture timestamp and that the clock offset is set to zero.
|
||||
ASSERT_TRUE(absolute_capture_time.has_value());
|
||||
EXPECT_EQ(
|
||||
absolute_capture_time->absolute_capture_timestamp,
|
||||
Int64MsToUQ32x32(fake_clock_.ConvertTimestampToNtpTimeInMilliseconds(
|
||||
kAbsoluteCaptureTimestampMs)));
|
||||
EXPECT_EQ(absolute_capture_time->absolute_capture_timestamp,
|
||||
Int64MsToUQ32x32(
|
||||
fake_clock_.ConvertTimestampToNtpTime(kAbsoluteCaptureTimestamp)
|
||||
.ToMs()));
|
||||
EXPECT_EQ(absolute_capture_time->estimated_capture_clock_offset, 0);
|
||||
}
|
||||
|
||||
@ -1312,8 +1309,9 @@ TEST_F(RtpSenderVideoTest, AbsoluteCaptureTimeWithExtensionProvided) {
|
||||
hdr.frame_type = VideoFrameType::kVideoFrameKey;
|
||||
hdr.absolute_capture_time = kAbsoluteCaptureTime;
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp,
|
||||
/*capture_time_ms=*/789, kFrame, sizeof(kFrame),
|
||||
hdr, kDefaultExpectedRetransmissionTimeMs, {});
|
||||
/*capture_time=*/Timestamp::Millis(789), kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTime, {});
|
||||
|
||||
absl::optional<AbsoluteCaptureTime> absolute_capture_time;
|
||||
|
||||
@ -1347,9 +1345,9 @@ TEST_F(RtpSenderVideoTest, PopulatesPlayoutDelay) {
|
||||
auto& vp8_header = hdr.video_type_header.emplace<RTPVideoHeaderVP8>();
|
||||
vp8_header.temporalIdx = 0;
|
||||
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
EXPECT_FALSE(
|
||||
transport_.last_sent_packet().HasExtension<PlayoutDelayLimits>());
|
||||
|
||||
@ -1357,9 +1355,9 @@ TEST_F(RtpSenderVideoTest, PopulatesPlayoutDelay) {
|
||||
hdr.playout_delay = kExpectedDelay;
|
||||
hdr.frame_type = VideoFrameType::kVideoFrameDelta;
|
||||
vp8_header.temporalIdx = 1;
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
VideoPlayoutDelay received_delay = VideoPlayoutDelay();
|
||||
ASSERT_TRUE(transport_.last_sent_packet().GetExtension<PlayoutDelayLimits>(
|
||||
&received_delay));
|
||||
@ -1369,26 +1367,26 @@ TEST_F(RtpSenderVideoTest, PopulatesPlayoutDelay) {
|
||||
// be populated since dilvery wasn't guaranteed on the last one.
|
||||
hdr.playout_delay = VideoPlayoutDelay(); // Indicates "no change".
|
||||
vp8_header.temporalIdx = 0;
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
ASSERT_TRUE(transport_.last_sent_packet().GetExtension<PlayoutDelayLimits>(
|
||||
&received_delay));
|
||||
EXPECT_EQ(received_delay, kExpectedDelay);
|
||||
|
||||
// The next frame does not need the extensions since it's delivery has
|
||||
// already been guaranteed.
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
EXPECT_FALSE(
|
||||
transport_.last_sent_packet().HasExtension<PlayoutDelayLimits>());
|
||||
|
||||
// Insert key-frame, we need to refresh the state here.
|
||||
hdr.frame_type = VideoFrameType::kVideoFrameKey;
|
||||
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame,
|
||||
sizeof(kFrame), hdr,
|
||||
kDefaultExpectedRetransmissionTimeMs, {});
|
||||
rtp_sender_video_->SendVideo(
|
||||
kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame,
|
||||
sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {});
|
||||
ASSERT_TRUE(transport_.last_sent_packet().GetExtension<PlayoutDelayLimits>(
|
||||
&received_delay));
|
||||
EXPECT_EQ(received_delay, kExpectedDelay);
|
||||
@ -1402,9 +1400,9 @@ TEST_F(RtpSenderVideoTest, SendGenericVideo) {
|
||||
// Send keyframe.
|
||||
RTPVideoHeader video_header;
|
||||
video_header.frame_type = VideoFrameType::kVideoFrameKey;
|
||||
ASSERT_TRUE(rtp_sender_video_->SendVideo(kPayloadType, kCodecType, 1234, 4321,
|
||||
kPayload, sizeof(kPayload),
|
||||
video_header, absl::nullopt, {}));
|
||||
ASSERT_TRUE(rtp_sender_video_->SendVideo(
|
||||
kPayloadType, kCodecType, 1234, fake_clock_.CurrentTime(), kPayload,
|
||||
sizeof(kPayload), video_header, TimeDelta::PlusInfinity(), {}));
|
||||
|
||||
rtc::ArrayView<const uint8_t> sent_payload =
|
||||
transport_.last_sent_packet().payload();
|
||||
@ -1416,9 +1414,9 @@ TEST_F(RtpSenderVideoTest, SendGenericVideo) {
|
||||
// Send delta frame.
|
||||
const uint8_t kDeltaPayload[] = {13, 42, 32, 93, 13};
|
||||
video_header.frame_type = VideoFrameType::kVideoFrameDelta;
|
||||
ASSERT_TRUE(rtp_sender_video_->SendVideo(kPayloadType, kCodecType, 1234, 4321,
|
||||
kDeltaPayload, sizeof(kDeltaPayload),
|
||||
video_header, absl::nullopt, {}));
|
||||
ASSERT_TRUE(rtp_sender_video_->SendVideo(
|
||||
kPayloadType, kCodecType, 1234, fake_clock_.CurrentTime(), kDeltaPayload,
|
||||
sizeof(kDeltaPayload), video_header, TimeDelta::PlusInfinity(), {}));
|
||||
|
||||
sent_payload = sent_payload = transport_.last_sent_packet().payload();
|
||||
generic_header = sent_payload[0];
|
||||
@ -1434,9 +1432,9 @@ TEST_F(RtpSenderVideoTest, SendRawVideo) {
|
||||
// Send a frame.
|
||||
RTPVideoHeader video_header;
|
||||
video_header.frame_type = VideoFrameType::kVideoFrameKey;
|
||||
ASSERT_TRUE(rtp_sender_video_->SendVideo(kPayloadType, absl::nullopt, 1234,
|
||||
4321, kPayload, sizeof(kPayload),
|
||||
video_header, absl::nullopt, {}));
|
||||
ASSERT_TRUE(rtp_sender_video_->SendVideo(
|
||||
kPayloadType, absl::nullopt, 1234, fake_clock_.CurrentTime(), kPayload,
|
||||
sizeof(kPayload), video_header, TimeDelta::PlusInfinity(), {}));
|
||||
|
||||
rtc::ArrayView<const uint8_t> sent_payload =
|
||||
transport_.last_sent_packet().payload();
|
||||
@ -1521,7 +1519,7 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest,
|
||||
EXPECT_CALL(*mock_frame_transformer, Transform);
|
||||
rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp,
|
||||
*encoded_image, video_header,
|
||||
kDefaultExpectedRetransmissionTimeMs);
|
||||
kDefaultExpectedRetransmissionTime);
|
||||
}
|
||||
|
||||
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
|
||||
@ -1535,17 +1533,17 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest, ValidPayloadTypes) {
|
||||
|
||||
EXPECT_TRUE(rtp_sender_video->SendEncodedImage(
|
||||
0, kType, kTimestamp, *encoded_image, video_header,
|
||||
kDefaultExpectedRetransmissionTimeMs));
|
||||
kDefaultExpectedRetransmissionTime));
|
||||
EXPECT_TRUE(rtp_sender_video->SendEncodedImage(
|
||||
127, kType, kTimestamp, *encoded_image, video_header,
|
||||
kDefaultExpectedRetransmissionTimeMs));
|
||||
kDefaultExpectedRetransmissionTime));
|
||||
EXPECT_DEATH(rtp_sender_video->SendEncodedImage(
|
||||
-1, kType, kTimestamp, *encoded_image, video_header,
|
||||
kDefaultExpectedRetransmissionTimeMs),
|
||||
kDefaultExpectedRetransmissionTime),
|
||||
"");
|
||||
EXPECT_DEATH(rtp_sender_video->SendEncodedImage(
|
||||
128, kType, kTimestamp, *encoded_image, video_header,
|
||||
kDefaultExpectedRetransmissionTimeMs),
|
||||
kDefaultExpectedRetransmissionTime),
|
||||
"");
|
||||
}
|
||||
#endif
|
||||
@ -1573,14 +1571,14 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest, OnTransformedFrameSendsVideo) {
|
||||
encoder_queue->PostTask([&] {
|
||||
rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp,
|
||||
*encoded_image, video_header,
|
||||
kDefaultExpectedRetransmissionTimeMs);
|
||||
kDefaultExpectedRetransmissionTime);
|
||||
});
|
||||
time_controller_.AdvanceTime(TimeDelta::Zero());
|
||||
EXPECT_EQ(transport_.packets_sent(), 1);
|
||||
encoder_queue->PostTask([&] {
|
||||
rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp,
|
||||
*encoded_image, video_header,
|
||||
kDefaultExpectedRetransmissionTimeMs);
|
||||
kDefaultExpectedRetransmissionTime);
|
||||
});
|
||||
time_controller_.AdvanceTime(TimeDelta::Zero());
|
||||
EXPECT_EQ(transport_.packets_sent(), 2);
|
||||
@ -1615,7 +1613,7 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest,
|
||||
encoder_queue->PostTask([&] {
|
||||
rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp,
|
||||
*encoded_image, video_header,
|
||||
kDefaultExpectedRetransmissionTimeMs);
|
||||
kDefaultExpectedRetransmissionTime);
|
||||
});
|
||||
time_controller_.AdvanceTime(TimeDelta::Millis(1000 / kFramesPerSecond));
|
||||
}
|
||||
@ -1662,7 +1660,7 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest,
|
||||
});
|
||||
rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp,
|
||||
*encoded_image, video_header,
|
||||
kDefaultExpectedRetransmissionTimeMs);
|
||||
kDefaultExpectedRetransmissionTime);
|
||||
}
|
||||
|
||||
TEST_F(RtpSenderVideoWithFrameTransformerTest,
|
||||
@ -1686,7 +1684,7 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest,
|
||||
});
|
||||
rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp,
|
||||
*encoded_image, video_header,
|
||||
kDefaultExpectedRetransmissionTimeMs);
|
||||
kDefaultExpectedRetransmissionTime);
|
||||
}
|
||||
|
||||
TEST_F(RtpSenderVideoWithFrameTransformerTest,
|
||||
@ -1716,14 +1714,14 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest,
|
||||
encoder_queue->PostTask([&] {
|
||||
rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp,
|
||||
*encoded_image, video_header,
|
||||
kDefaultExpectedRetransmissionTimeMs);
|
||||
kDefaultExpectedRetransmissionTime);
|
||||
});
|
||||
time_controller_.AdvanceTime(TimeDelta::Zero());
|
||||
EXPECT_EQ(transport_.packets_sent(), 1);
|
||||
encoder_queue->PostTask([&] {
|
||||
rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp,
|
||||
*encoded_image, video_header,
|
||||
kDefaultExpectedRetransmissionTimeMs);
|
||||
kDefaultExpectedRetransmissionTime);
|
||||
});
|
||||
time_controller_.AdvanceTime(TimeDelta::Zero());
|
||||
EXPECT_EQ(transport_.packets_sent(), 2);
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user