[InsertableStreams] Fix video sender simulcast.

The transformer was previously moved into the config of the first stream
which resulted in incorrect behavior for simulcast. Use the transformer
in all the streams.

Pass the sender's ssrc on registring the transformed frame callback, to
associate separate transformer sinks for each sender.

Bug: chromium:1065838
Change-Id: I5c52dacb241c68268681b85f875257b24987849e
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/173332
Commit-Queue: Marina Ciocea <marinaciocea@webrtc.org>
Reviewed-by: Tommi <tommi@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#31050}
This commit is contained in:
Marina Ciocea 2020-04-10 20:19:14 +02:00 committed by Commit Bot
parent d37010c9b7
commit dc69fd2b80
6 changed files with 68 additions and 23 deletions

View File

@ -273,7 +273,7 @@ std::vector<RtpStreamSender> CreateRtpStreamSenders(
rtp_config.ulpfec.red_payload_type != -1) {
video_config.red_payload_type = rtp_config.ulpfec.red_payload_type;
}
video_config.frame_transformer = std::move(frame_transformer);
video_config.frame_transformer = frame_transformer;
auto sender_video = std::make_unique<RTPSenderVideo>(video_config);
rtp_streams.emplace_back(std::move(rtp_rtcp), std::move(sender_video),
std::move(fec_generator));

View File

@ -117,7 +117,8 @@ class RtpVideoSenderTestFixture {
const std::vector<uint32_t>& rtx_ssrcs,
int payload_type,
const std::map<uint32_t, RtpPayloadState>& suspended_payload_states,
FrameCountObserver* frame_count_observer)
FrameCountObserver* frame_count_observer,
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer)
: time_controller_(Timestamp::Millis(1000000)),
config_(CreateVideoSendStreamConfig(&transport_,
ssrcs,
@ -151,8 +152,22 @@ class RtpVideoSenderTestFixture {
&send_delay_stats_),
&transport_controller_, &event_log_, &retransmission_rate_limiter_,
std::make_unique<FecControllerDefault>(time_controller_.GetClock()),
nullptr, CryptoOptions{}, nullptr);
nullptr, CryptoOptions{}, frame_transformer);
}
RtpVideoSenderTestFixture(
const std::vector<uint32_t>& ssrcs,
const std::vector<uint32_t>& rtx_ssrcs,
int payload_type,
const std::map<uint32_t, RtpPayloadState>& suspended_payload_states,
FrameCountObserver* frame_count_observer)
: RtpVideoSenderTestFixture(ssrcs,
rtx_ssrcs,
payload_type,
suspended_payload_states,
frame_count_observer,
/*frame_transformer=*/nullptr) {}
RtpVideoSenderTestFixture(
const std::vector<uint32_t>& ssrcs,
const std::vector<uint32_t>& rtx_ssrcs,
@ -162,7 +177,8 @@ class RtpVideoSenderTestFixture {
rtx_ssrcs,
payload_type,
suspended_payload_states,
/*frame_count_observer=*/nullptr) {}
/*frame_count_observer=*/nullptr,
/*frame_transformer=*/nullptr) {}
RtpVideoSender* router() { return router_.get(); }
MockTransport& transport() { return transport_; }
@ -801,4 +817,28 @@ TEST(RtpVideoSenderTest, CanSetZeroBitrateWithoutOverhead) {
test.router()->OnBitrateUpdated(update, /*framerate*/ 0);
}
TEST(RtpVideoSenderTest, SimulcastSenderRegistersFrameTransformers) {
class MockFrameTransformer : public FrameTransformerInterface {
public:
MOCK_METHOD3(TransformFrame,
void(std::unique_ptr<video_coding::EncodedFrame> frame,
std::vector<uint8_t> additional_data,
uint32_t ssrc));
MOCK_METHOD2(RegisterTransformedFrameSinkCallback,
void(rtc::scoped_refptr<TransformedFrameCallback>, uint32_t));
MOCK_METHOD1(UnregisterTransformedFrameSinkCallback, void(uint32_t));
};
rtc::scoped_refptr<MockFrameTransformer> transformer =
new rtc::RefCountedObject<MockFrameTransformer>();
EXPECT_CALL(*transformer, RegisterTransformedFrameSinkCallback(_, kSsrc1));
EXPECT_CALL(*transformer, RegisterTransformedFrameSinkCallback(_, kSsrc2));
RtpVideoSenderTestFixture test({kSsrc1, kSsrc2}, {kRtxSsrc1, kRtxSsrc2},
kPayloadType, {}, nullptr, transformer);
EXPECT_CALL(*transformer, UnregisterTransformedFrameSinkCallback(kSsrc1));
EXPECT_CALL(*transformer, UnregisterTransformedFrameSinkCallback(kSsrc2));
}
} // namespace webrtc

View File

@ -160,7 +160,9 @@ RTPSenderVideo::RTPSenderVideo(const Config& config)
config.frame_transformer
? new rtc::RefCountedObject<
RTPSenderVideoFrameTransformerDelegate>(
this, std::move(config.frame_transformer))
this,
config.frame_transformer,
rtp_sender_->SSRC())
: nullptr) {
if (frame_transformer_delegate_)
frame_transformer_delegate_->Init();
@ -713,7 +715,7 @@ bool RTPSenderVideo::SendEncodedImage(
// The frame will be sent async once transformed.
return frame_transformer_delegate_->TransformFrame(
payload_type, codec_type, rtp_timestamp, encoded_image, fragmentation,
video_header, expected_retransmission_time_ms, rtp_sender_->SSRC());
video_header, expected_retransmission_time_ms);
}
return SendVideo(payload_type, codec_type, rtp_timestamp,
encoded_image.capture_time_ms_, encoded_image, fragmentation,

View File

@ -105,12 +105,15 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface {
RTPSenderVideoFrameTransformerDelegate::RTPSenderVideoFrameTransformerDelegate(
RTPSenderVideo* sender,
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer)
: sender_(sender), frame_transformer_(std::move(frame_transformer)) {}
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer,
uint32_t ssrc)
: sender_(sender),
frame_transformer_(std::move(frame_transformer)),
ssrc_(ssrc) {}
void RTPSenderVideoFrameTransformerDelegate::Init() {
frame_transformer_->RegisterTransformedFrameCallback(
rtc::scoped_refptr<TransformedFrameCallback>(this));
frame_transformer_->RegisterTransformedFrameSinkCallback(
rtc::scoped_refptr<TransformedFrameCallback>(this), ssrc_);
}
bool RTPSenderVideoFrameTransformerDelegate::TransformFrame(
@ -120,8 +123,7 @@ bool RTPSenderVideoFrameTransformerDelegate::TransformFrame(
const EncodedImage& encoded_image,
const RTPFragmentationHeader* fragmentation,
RTPVideoHeader video_header,
absl::optional<int64_t> expected_retransmission_time_ms,
uint32_t ssrc) {
absl::optional<int64_t> expected_retransmission_time_ms) {
if (!encoder_queue_)
encoder_queue_ = TaskQueueBase::Current();
// TODO(bugs.webrtc.org/11380) remove once this version of TransformFrame() is
@ -131,10 +133,10 @@ bool RTPSenderVideoFrameTransformerDelegate::TransformFrame(
encoded_image.GetEncodedData(), video_header, payload_type,
codec_type, rtp_timestamp, encoded_image.capture_time_ms_,
fragmentation, expected_retransmission_time_ms),
RtpDescriptorAuthentication(video_header), ssrc);
RtpDescriptorAuthentication(video_header), ssrc_);
frame_transformer_->Transform(std::make_unique<TransformableVideoSenderFrame>(
encoded_image, video_header, payload_type, codec_type, rtp_timestamp,
fragmentation, expected_retransmission_time_ms, ssrc));
fragmentation, expected_retransmission_time_ms, ssrc_));
return true;
}
@ -212,7 +214,7 @@ void RTPSenderVideoFrameTransformerDelegate::SetVideoStructureUnderLock(
}
void RTPSenderVideoFrameTransformerDelegate::Reset() {
frame_transformer_->UnregisterTransformedFrameCallback();
frame_transformer_->UnregisterTransformedFrameSinkCallback(ssrc_);
frame_transformer_ = nullptr;
{
rtc::CritScope lock(&sender_lock_);

View File

@ -30,7 +30,8 @@ class RTPSenderVideoFrameTransformerDelegate : public TransformedFrameCallback {
public:
RTPSenderVideoFrameTransformerDelegate(
RTPSenderVideo* sender,
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer);
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer,
uint32_t ssrc);
void Init();
@ -41,8 +42,7 @@ class RTPSenderVideoFrameTransformerDelegate : public TransformedFrameCallback {
const EncodedImage& encoded_image,
const RTPFragmentationHeader* fragmentation,
RTPVideoHeader video_header,
absl::optional<int64_t> expected_retransmission_time_ms,
uint32_t ssrc);
absl::optional<int64_t> expected_retransmission_time_ms);
// Implements TransformedFrameCallback. Can be called on any thread. Posts
// the transformed frame to be sent on the |encoder_queue_|.
@ -71,6 +71,7 @@ class RTPSenderVideoFrameTransformerDelegate : public TransformedFrameCallback {
rtc::CriticalSection sender_lock_;
RTPSenderVideo* sender_ RTC_GUARDED_BY(sender_lock_);
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer_;
const uint32_t ssrc_;
TaskQueueBase* encoder_queue_ = nullptr;
};

View File

@ -879,9 +879,9 @@ class MockFrameTransformer : public FrameTransformerInterface {
void(std::unique_ptr<video_coding::EncodedFrame> frame,
std::vector<uint8_t> additional_data,
uint32_t ssrc));
MOCK_METHOD1(RegisterTransformedFrameCallback,
void(rtc::scoped_refptr<TransformedFrameCallback>));
MOCK_METHOD0(UnregisterTransformedFrameCallback, void());
MOCK_METHOD2(RegisterTransformedFrameSinkCallback,
void(rtc::scoped_refptr<TransformedFrameCallback>, uint32_t));
MOCK_METHOD1(UnregisterTransformedFrameSinkCallback, void(uint32_t));
};
TEST_P(RtpSenderVideoTest, SendEncodedImageWithFrameTransformer) {
@ -893,7 +893,7 @@ TEST_P(RtpSenderVideoTest, SendEncodedImageWithFrameTransformer) {
config.field_trials = &field_trials_;
config.frame_transformer = transformer;
EXPECT_CALL(*transformer, RegisterTransformedFrameCallback(_));
EXPECT_CALL(*transformer, RegisterTransformedFrameSinkCallback);
std::unique_ptr<RTPSenderVideo> rtp_sender_video =
std::make_unique<RTPSenderVideo>(config);
@ -908,7 +908,7 @@ TEST_P(RtpSenderVideoTest, SendEncodedImageWithFrameTransformer) {
nullptr, hdr,
kDefaultExpectedRetransmissionTimeMs);
EXPECT_CALL(*transformer, UnregisterTransformedFrameCallback());
EXPECT_CALL(*transformer, UnregisterTransformedFrameSinkCallback);
rtp_sender_video.reset();
}