Combine video_send_stream_impl.cc and video_send_stream.cc

There is to reason to have two separate classes as they both represent the same thing.
Done in order to simplify further refactorings.

Bug: webrtc:14928
Change-Id: I33e5fe032c79396fbae970c8732c90eb2252accb
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/335040
Commit-Queue: Per Kjellander <perkj@webrtc.org>
Reviewed-by: Erik Språng <sprang@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#41561}
This commit is contained in:
Per K 2024-01-18 09:42:58 +01:00 committed by WebRTC LUCI CQ
parent 68b580f116
commit 0b6899272c
8 changed files with 515 additions and 610 deletions

View File

@ -70,7 +70,7 @@
#include "video/send_delay_stats.h"
#include "video/stats_counter.h"
#include "video/video_receive_stream2.h"
#include "video/video_send_stream.h"
#include "video/video_send_stream_impl.h"
namespace webrtc {
@ -388,9 +388,10 @@ class Call final : public webrtc::Call,
// should be accessed on the network thread.
std::map<uint32_t, AudioSendStream*> audio_send_ssrcs_
RTC_GUARDED_BY(worker_thread_);
std::map<uint32_t, VideoSendStream*> video_send_ssrcs_
std::map<uint32_t, VideoSendStreamImpl*> video_send_ssrcs_
RTC_GUARDED_BY(worker_thread_);
std::set<VideoSendStreamImpl*> video_send_streams_
RTC_GUARDED_BY(worker_thread_);
std::set<VideoSendStream*> video_send_streams_ RTC_GUARDED_BY(worker_thread_);
// True if `video_send_streams_` is empty, false if not. The atomic variable
// is used to decide UMA send statistics behavior and enables avoiding a
// PostTask().
@ -886,9 +887,9 @@ webrtc::VideoSendStream* Call::CreateVideoSendStream(
// Copy ssrcs from `config` since `config` is moved.
std::vector<uint32_t> ssrcs = config.rtp.ssrcs;
VideoSendStream* send_stream = new VideoSendStream(
VideoSendStreamImpl* send_stream = new VideoSendStreamImpl(
&env_.clock(), num_cpu_cores_, &env_.task_queue_factory(),
network_thread_, call_stats_->AsRtcpRttStats(), transport_send_.get(),
call_stats_->AsRtcpRttStats(), transport_send_.get(),
config_.encode_metronome, bitrate_allocator_.get(),
video_send_delay_stats_.get(), &env_.event_log(), std::move(config),
std::move(encoder_config), suspended_video_send_ssrcs_,
@ -932,12 +933,12 @@ void Call::DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) {
RTC_DCHECK(send_stream != nullptr);
RTC_DCHECK_RUN_ON(worker_thread_);
VideoSendStream* send_stream_impl =
static_cast<VideoSendStream*>(send_stream);
VideoSendStreamImpl* send_stream_impl =
static_cast<VideoSendStreamImpl*>(send_stream);
auto it = video_send_ssrcs_.begin();
while (it != video_send_ssrcs_.end()) {
if (it->second == static_cast<VideoSendStream*>(send_stream)) {
if (it->second == static_cast<VideoSendStreamImpl*>(send_stream)) {
send_stream_impl = it->second;
video_send_ssrcs_.erase(it++);
} else {
@ -953,8 +954,8 @@ void Call::DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) {
if (video_send_streams_.empty())
video_send_streams_empty_.store(true, std::memory_order_relaxed);
VideoSendStream::RtpStateMap rtp_states;
VideoSendStream::RtpPayloadStateMap rtp_payload_states;
VideoSendStreamImpl::RtpStateMap rtp_states;
VideoSendStreamImpl::RtpPayloadStateMap rtp_payload_states;
send_stream_impl->StopPermanentlyAndGetRtpStates(&rtp_states,
&rtp_payload_states);
for (const auto& kv : rtp_states) {
@ -1334,7 +1335,7 @@ void Call::DeliverRtcpPacket(rtc::CopyOnWriteBuffer packet) {
rtcp_delivered = true;
}
for (VideoSendStream* stream : video_send_streams_) {
for (VideoSendStreamImpl* stream : video_send_streams_) {
stream->DeliverRtcp(packet.cdata(), packet.size());
rtcp_delivered = true;
}

View File

@ -65,8 +65,6 @@ rtc_library("video") {
"video_quality_observer2.h",
"video_receive_stream2.cc",
"video_receive_stream2.h",
"video_send_stream.cc",
"video_send_stream.h",
"video_send_stream_impl.cc",
"video_send_stream_impl.h",
"video_stream_decoder2.cc",
@ -82,13 +80,16 @@ rtc_library("video") {
":video_stream_encoder_impl",
":video_stream_encoder_interface",
"../api:array_view",
"../api:bitrate_allocation",
"../api:fec_controller_api",
"../api:field_trials_view",
"../api:frame_transformer_interface",
"../api:rtp_parameters",
"../api:rtp_sender_interface",
"../api:scoped_refptr",
"../api:sequence_checker",
"../api:transport_api",
"../api/adaptation:resource_adaptation_api",
"../api/crypto:frame_decryptor_interface",
"../api/crypto:options",
"../api/environment",
@ -105,6 +106,8 @@ rtc_library("video") {
"../api/video:video_bitrate_allocator",
"../api/video:video_codec_constants",
"../api/video:video_frame",
"../api/video:video_frame_type",
"../api/video:video_layers_allocation",
"../api/video:video_rtp_headers",
"../api/video:video_stream_encoder",
"../api/video_codecs:video_codecs_api",
@ -818,6 +821,8 @@ if (rtc_include_tests) {
":video_stream_buffer_controller",
":video_stream_encoder_impl",
":video_stream_encoder_interface",
"../api:array_view",
"../api:bitrate_allocation",
"../api:create_frame_generator",
"../api:fake_frame_decryptor",
"../api:fake_frame_encryptor",
@ -837,6 +842,7 @@ if (rtc_include_tests) {
"../api:time_controller",
"../api:transport_api",
"../api/adaptation:resource_adaptation_api",
"../api/adaptation:resource_adaptation_api",
"../api/crypto:options",
"../api/environment",
"../api/environment:environment_factory",
@ -858,11 +864,13 @@ if (rtc_include_tests) {
"../api/video:video_bitrate_allocation",
"../api/video:video_frame",
"../api/video:video_frame_type",
"../api/video:video_layers_allocation",
"../api/video:video_rtp_headers",
"../api/video/test:video_frame_matchers",
"../api/video_codecs:scalability_mode",
"../api/video_codecs:video_codecs_api",
"../api/video_codecs:vp8_temporal_layers_factory",
"../call:bitrate_allocator",
"../call:call_interfaces",
"../call:fake_network",
"../call:mock_bitrate_allocator",

View File

@ -1,348 +0,0 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "video/video_send_stream.h"
#include <utility>
#include "api/array_view.h"
#include "api/task_queue/task_queue_base.h"
#include "api/video/video_stream_encoder_settings.h"
#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
#include "modules/rtp_rtcp/source/rtp_header_extension_size.h"
#include "modules/rtp_rtcp/source/rtp_sender.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
#include "rtc_base/strings/string_builder.h"
#include "system_wrappers/include/clock.h"
#include "video/adaptation/overuse_frame_detector.h"
#include "video/frame_cadence_adapter.h"
#include "video/video_stream_encoder.h"
namespace webrtc {
namespace {
size_t CalculateMaxHeaderSize(const RtpConfig& config) {
size_t header_size = kRtpHeaderSize;
size_t extensions_size = 0;
size_t fec_extensions_size = 0;
if (!config.extensions.empty()) {
RtpHeaderExtensionMap extensions_map(config.extensions);
extensions_size = RtpHeaderExtensionSize(RTPSender::VideoExtensionSizes(),
extensions_map);
fec_extensions_size =
RtpHeaderExtensionSize(RTPSender::FecExtensionSizes(), extensions_map);
}
header_size += extensions_size;
if (config.flexfec.payload_type >= 0) {
// All FEC extensions again plus maximum FlexFec overhead.
header_size += fec_extensions_size + 32;
} else {
if (config.ulpfec.ulpfec_payload_type >= 0) {
// Header with all the FEC extensions will be repeated plus maximum
// UlpFec overhead.
header_size += fec_extensions_size + 18;
}
if (config.ulpfec.red_payload_type >= 0) {
header_size += 1; // RED header.
}
}
// Additional room for Rtx.
if (config.rtx.payload_type >= 0)
header_size += kRtxHeaderSize;
return header_size;
}
VideoStreamEncoder::BitrateAllocationCallbackType
GetBitrateAllocationCallbackType(const VideoSendStream::Config& config,
const FieldTrialsView& field_trials) {
if (webrtc::RtpExtension::FindHeaderExtensionByUri(
config.rtp.extensions,
webrtc::RtpExtension::kVideoLayersAllocationUri,
config.crypto_options.srtp.enable_encrypted_rtp_header_extensions
? RtpExtension::Filter::kPreferEncryptedExtension
: RtpExtension::Filter::kDiscardEncryptedExtension)) {
return VideoStreamEncoder::BitrateAllocationCallbackType::
kVideoLayersAllocation;
}
if (field_trials.IsEnabled("WebRTC-Target-Bitrate-Rtcp")) {
return VideoStreamEncoder::BitrateAllocationCallbackType::
kVideoBitrateAllocation;
}
return VideoStreamEncoder::BitrateAllocationCallbackType::
kVideoBitrateAllocationWhenScreenSharing;
}
RtpSenderFrameEncryptionConfig CreateFrameEncryptionConfig(
const VideoSendStream::Config* config) {
RtpSenderFrameEncryptionConfig frame_encryption_config;
frame_encryption_config.frame_encryptor = config->frame_encryptor.get();
frame_encryption_config.crypto_options = config->crypto_options;
return frame_encryption_config;
}
RtpSenderObservers CreateObservers(RtcpRttStats* call_stats,
EncoderRtcpFeedback* encoder_feedback,
SendStatisticsProxy* stats_proxy,
SendPacketObserver* send_packet_observer) {
RtpSenderObservers observers;
observers.rtcp_rtt_stats = call_stats;
observers.intra_frame_callback = encoder_feedback;
observers.rtcp_loss_notification_observer = encoder_feedback;
observers.report_block_data_observer = stats_proxy;
observers.rtp_stats = stats_proxy;
observers.bitrate_observer = stats_proxy;
observers.frame_count_observer = stats_proxy;
observers.rtcp_type_observer = stats_proxy;
observers.send_packet_observer = send_packet_observer;
return observers;
}
std::unique_ptr<VideoStreamEncoder> CreateVideoStreamEncoder(
Clock* clock,
int num_cpu_cores,
TaskQueueFactory* task_queue_factory,
SendStatisticsProxy* stats_proxy,
const VideoStreamEncoderSettings& encoder_settings,
VideoStreamEncoder::BitrateAllocationCallbackType
bitrate_allocation_callback_type,
const FieldTrialsView& field_trials,
Metronome* metronome,
webrtc::VideoEncoderFactory::EncoderSelectorInterface* encoder_selector) {
std::unique_ptr<TaskQueueBase, TaskQueueDeleter> encoder_queue =
task_queue_factory->CreateTaskQueue("EncoderQueue",
TaskQueueFactory::Priority::NORMAL);
TaskQueueBase* encoder_queue_ptr = encoder_queue.get();
return std::make_unique<VideoStreamEncoder>(
clock, num_cpu_cores, stats_proxy, encoder_settings,
std::make_unique<OveruseFrameDetector>(stats_proxy),
FrameCadenceAdapterInterface::Create(
clock, encoder_queue_ptr, metronome,
/*worker_queue=*/TaskQueueBase::Current(), field_trials),
std::move(encoder_queue), bitrate_allocation_callback_type, field_trials,
encoder_selector);
}
} // namespace
namespace internal {
VideoSendStream::VideoSendStream(
Clock* clock,
int num_cpu_cores,
TaskQueueFactory* task_queue_factory,
TaskQueueBase* network_queue,
RtcpRttStats* call_stats,
RtpTransportControllerSendInterface* transport,
Metronome* metronome,
BitrateAllocatorInterface* bitrate_allocator,
SendDelayStats* send_delay_stats,
RtcEventLog* event_log,
VideoSendStream::Config config,
VideoEncoderConfig encoder_config,
const std::map<uint32_t, RtpState>& suspended_ssrcs,
const std::map<uint32_t, RtpPayloadState>& suspended_payload_states,
std::unique_ptr<FecController> fec_controller,
const FieldTrialsView& field_trials)
: transport_(transport),
stats_proxy_(clock, config, encoder_config.content_type, field_trials),
send_packet_observer_(&stats_proxy_, send_delay_stats),
config_(std::move(config)),
content_type_(encoder_config.content_type),
video_stream_encoder_(CreateVideoStreamEncoder(
clock,
num_cpu_cores,
task_queue_factory,
&stats_proxy_,
config_.encoder_settings,
GetBitrateAllocationCallbackType(config_, field_trials),
field_trials,
metronome,
config_.encoder_selector)),
encoder_feedback_(
clock,
config_.rtp.ssrcs,
video_stream_encoder_.get(),
[this](uint32_t ssrc, const std::vector<uint16_t>& seq_nums) {
return rtp_video_sender_->GetSentRtpPacketInfos(ssrc, seq_nums);
}),
rtp_video_sender_(transport->CreateRtpVideoSender(
suspended_ssrcs,
suspended_payload_states,
config_.rtp,
config_.rtcp_report_interval_ms,
config_.send_transport,
CreateObservers(call_stats,
&encoder_feedback_,
&stats_proxy_,
&send_packet_observer_),
event_log,
std::move(fec_controller),
CreateFrameEncryptionConfig(&config_),
config_.frame_transformer)),
send_stream_(clock,
&stats_proxy_,
transport,
bitrate_allocator,
video_stream_encoder_.get(),
&config_,
encoder_config.max_bitrate_bps,
encoder_config.bitrate_priority,
encoder_config.content_type,
rtp_video_sender_,
field_trials) {
RTC_DCHECK(config_.encoder_settings.encoder_factory);
RTC_DCHECK(config_.encoder_settings.bitrate_allocator_factory);
video_stream_encoder_->SetFecControllerOverride(rtp_video_sender_);
ReconfigureVideoEncoder(std::move(encoder_config));
}
VideoSendStream::~VideoSendStream() {
RTC_DCHECK_RUN_ON(&thread_checker_);
RTC_DCHECK(!running_);
transport_->DestroyRtpVideoSender(rtp_video_sender_);
}
void VideoSendStream::Start() {
const std::vector<bool> active_layers(config_.rtp.ssrcs.size(), true);
StartPerRtpStream(active_layers);
}
void VideoSendStream::StartPerRtpStream(const std::vector<bool> active_layers) {
RTC_DCHECK_RUN_ON(&thread_checker_);
// Keep our `running_` flag expected state in sync with active layers since
// the `send_stream_` will be implicitly stopped/started depending on the
// state of the layers.
bool running = false;
rtc::StringBuilder active_layers_string;
active_layers_string << "{";
for (size_t i = 0; i < active_layers.size(); ++i) {
if (active_layers[i]) {
running = true;
active_layers_string << "1";
} else {
active_layers_string << "0";
}
if (i < active_layers.size() - 1) {
active_layers_string << ", ";
}
}
active_layers_string << "}";
RTC_LOG(LS_INFO) << "StartPerRtpStream: " << active_layers_string.str();
send_stream_.StartPerRtpStream(active_layers);
running_ = running;
}
void VideoSendStream::Stop() {
RTC_DCHECK_RUN_ON(&thread_checker_);
if (!running_)
return;
RTC_DLOG(LS_INFO) << "VideoSendStream::Stop";
running_ = false;
send_stream_.Stop();
}
bool VideoSendStream::started() {
RTC_DCHECK_RUN_ON(&thread_checker_);
return running_;
}
void VideoSendStream::AddAdaptationResource(
rtc::scoped_refptr<Resource> resource) {
RTC_DCHECK_RUN_ON(&thread_checker_);
video_stream_encoder_->AddAdaptationResource(resource);
}
std::vector<rtc::scoped_refptr<Resource>>
VideoSendStream::GetAdaptationResources() {
RTC_DCHECK_RUN_ON(&thread_checker_);
return video_stream_encoder_->GetAdaptationResources();
}
void VideoSendStream::SetSource(
rtc::VideoSourceInterface<webrtc::VideoFrame>* source,
const DegradationPreference& degradation_preference) {
RTC_DCHECK_RUN_ON(&thread_checker_);
video_stream_encoder_->SetSource(source, degradation_preference);
}
void VideoSendStream::ReconfigureVideoEncoder(VideoEncoderConfig config) {
ReconfigureVideoEncoder(std::move(config), nullptr);
}
void VideoSendStream::ReconfigureVideoEncoder(VideoEncoderConfig config,
SetParametersCallback callback) {
RTC_DCHECK_RUN_ON(&thread_checker_);
RTC_DCHECK_EQ(content_type_, config.content_type);
RTC_LOG(LS_VERBOSE) << "Encoder config: " << config.ToString()
<< " VideoSendStream config: " << config_.ToString();
video_stream_encoder_->ConfigureEncoder(
std::move(config),
config_.rtp.max_packet_size - CalculateMaxHeaderSize(config_.rtp),
std::move(callback));
}
VideoSendStream::Stats VideoSendStream::GetStats() {
RTC_DCHECK_RUN_ON(&thread_checker_);
return stats_proxy_.GetStats();
}
absl::optional<float> VideoSendStream::GetPacingFactorOverride() const {
return send_stream_.configured_pacing_factor();
}
void VideoSendStream::StopPermanentlyAndGetRtpStates(
VideoSendStream::RtpStateMap* rtp_state_map,
VideoSendStream::RtpPayloadStateMap* payload_state_map) {
RTC_DCHECK_RUN_ON(&thread_checker_);
video_stream_encoder_->Stop();
running_ = false;
// Always run these cleanup steps regardless of whether running_ was set
// or not. This will unregister callbacks before destruction.
// See `VideoSendStreamImpl::StopVideoSendStream` for more.
send_stream_.Stop();
*rtp_state_map = send_stream_.GetRtpStates();
*payload_state_map = send_stream_.GetRtpPayloadStates();
}
void VideoSendStream::DeliverRtcp(const uint8_t* packet, size_t length) {
RTC_DCHECK_RUN_ON(&thread_checker_);
send_stream_.DeliverRtcp(packet, length);
}
void VideoSendStream::GenerateKeyFrame(const std::vector<std::string>& rids) {
RTC_DCHECK_RUN_ON(&thread_checker_);
// Map rids to layers. If rids is empty, generate a keyframe for all layers.
std::vector<VideoFrameType> next_frames(config_.rtp.ssrcs.size(),
VideoFrameType::kVideoFrameKey);
if (!config_.rtp.rids.empty() && !rids.empty()) {
std::fill(next_frames.begin(), next_frames.end(),
VideoFrameType::kVideoFrameDelta);
for (const auto& rid : rids) {
for (size_t i = 0; i < config_.rtp.rids.size(); i++) {
if (config_.rtp.rids[i] == rid) {
next_frames[i] = VideoFrameType::kVideoFrameKey;
break;
}
}
}
}
if (video_stream_encoder_) {
video_stream_encoder_->SendKeyFrame(next_frames);
}
}
} // namespace internal
} // namespace webrtc

View File

@ -1,143 +0,0 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef VIDEO_VIDEO_SEND_STREAM_H_
#define VIDEO_VIDEO_SEND_STREAM_H_
#include <map>
#include <memory>
#include <string>
#include <vector>
#include "api/fec_controller.h"
#include "api/field_trials_view.h"
#include "api/metronome/metronome.h"
#include "api/sequence_checker.h"
#include "api/task_queue/pending_task_safety_flag.h"
#include "call/bitrate_allocator.h"
#include "call/video_receive_stream.h"
#include "call/video_send_stream.h"
#include "rtc_base/event.h"
#include "rtc_base/system/no_unique_address.h"
#include "video/encoder_rtcp_feedback.h"
#include "video/send_delay_stats.h"
#include "video/send_statistics_proxy.h"
#include "video/video_send_stream_impl.h"
#include "video/video_stream_encoder_interface.h"
namespace webrtc {
namespace test {
class VideoSendStreamPeer;
} // namespace test
class CallStats;
class IvfFileWriter;
class RateLimiter;
class RtpRtcp;
class RtpTransportControllerSendInterface;
class RtcEventLog;
namespace internal {
class VideoSendStreamImpl;
// VideoSendStream implements webrtc::VideoSendStream.
// Internally, it delegates all public methods to VideoSendStreamImpl and / or
// VideoStreamEncoder.
class VideoSendStream : public webrtc::VideoSendStream {
public:
using RtpStateMap = std::map<uint32_t, RtpState>;
using RtpPayloadStateMap = std::map<uint32_t, RtpPayloadState>;
VideoSendStream(
Clock* clock,
int num_cpu_cores,
TaskQueueFactory* task_queue_factory,
TaskQueueBase* network_queue,
RtcpRttStats* call_stats,
RtpTransportControllerSendInterface* transport,
Metronome* metronome,
BitrateAllocatorInterface* bitrate_allocator,
SendDelayStats* send_delay_stats,
RtcEventLog* event_log,
VideoSendStream::Config config,
VideoEncoderConfig encoder_config,
const std::map<uint32_t, RtpState>& suspended_ssrcs,
const std::map<uint32_t, RtpPayloadState>& suspended_payload_states,
std::unique_ptr<FecController> fec_controller,
const FieldTrialsView& field_trials);
~VideoSendStream() override;
void DeliverRtcp(const uint8_t* packet, size_t length);
// webrtc::VideoSendStream implementation.
void Start() override;
void StartPerRtpStream(std::vector<bool> active_layers) override;
void Stop() override;
bool started() override;
void AddAdaptationResource(rtc::scoped_refptr<Resource> resource) override;
std::vector<rtc::scoped_refptr<Resource>> GetAdaptationResources() override;
void SetSource(rtc::VideoSourceInterface<webrtc::VideoFrame>* source,
const DegradationPreference& degradation_preference) override;
void ReconfigureVideoEncoder(VideoEncoderConfig config) override;
void ReconfigureVideoEncoder(VideoEncoderConfig config,
SetParametersCallback callback) override;
Stats GetStats() override;
void StopPermanentlyAndGetRtpStates(RtpStateMap* rtp_state_map,
RtpPayloadStateMap* payload_state_map);
void GenerateKeyFrame(const std::vector<std::string>& rids) override;
private:
friend class test::VideoSendStreamPeer;
class OnSendPacketObserver : public SendPacketObserver {
public:
OnSendPacketObserver(SendStatisticsProxy* stats_proxy,
SendDelayStats* send_delay_stats)
: stats_proxy_(*stats_proxy), send_delay_stats_(*send_delay_stats) {}
void OnSendPacket(absl::optional<uint16_t> packet_id,
Timestamp capture_time,
uint32_t ssrc) override {
stats_proxy_.OnSendPacket(ssrc, capture_time);
if (packet_id.has_value()) {
send_delay_stats_.OnSendPacket(*packet_id, capture_time, ssrc);
}
}
private:
SendStatisticsProxy& stats_proxy_;
SendDelayStats& send_delay_stats_;
};
absl::optional<float> GetPacingFactorOverride() const;
RTC_NO_UNIQUE_ADDRESS SequenceChecker thread_checker_;
RtpTransportControllerSendInterface* const transport_;
SendStatisticsProxy stats_proxy_;
OnSendPacketObserver send_packet_observer_;
const VideoSendStream::Config config_;
const VideoEncoderConfig::ContentType content_type_;
std::unique_ptr<VideoStreamEncoderInterface> video_stream_encoder_;
EncoderRtcpFeedback encoder_feedback_;
RtpVideoSenderInterface* const rtp_video_sender_;
VideoSendStreamImpl send_stream_;
bool running_ RTC_GUARDED_BY(thread_checker_) = false;
};
} // namespace internal
} // namespace webrtc
#endif // VIDEO_VIDEO_SEND_STREAM_H_

View File

@ -13,21 +13,51 @@
#include <algorithm>
#include <cstdint>
#include <map>
#include <memory>
#include <string>
#include <utility>
#include <vector>
#include "absl/algorithm/container.h"
#include "absl/types/optional.h"
#include "api/adaptation/resource.h"
#include "api/call/bitrate_allocation.h"
#include "api/crypto/crypto_options.h"
#include "api/fec_controller.h"
#include "api/field_trials_view.h"
#include "api/metronome/metronome.h"
#include "api/rtp_parameters.h"
#include "api/rtp_sender_interface.h"
#include "api/scoped_refptr.h"
#include "api/sequence_checker.h"
#include "api/task_queue/pending_task_safety_flag.h"
#include "api/task_queue/task_queue_base.h"
#include "api/task_queue/task_queue_factory.h"
#include "api/units/data_rate.h"
#include "api/units/time_delta.h"
#include "api/video/encoded_image.h"
#include "api/video/video_bitrate_allocation.h"
#include "api/video/video_codec_constants.h"
#include "api/video/video_codec_type.h"
#include "api/video/video_frame.h"
#include "api/video/video_frame_type.h"
#include "api/video/video_layers_allocation.h"
#include "api/video/video_source_interface.h"
#include "api/video/video_stream_encoder_settings.h"
#include "api/video_codecs/video_codec.h"
#include "api/video_codecs/video_encoder.h"
#include "api/video_codecs/video_encoder_factory.h"
#include "call/bitrate_allocator.h"
#include "call/rtp_config.h"
#include "call/rtp_transport_controller_send_interface.h"
#include "call/video_send_stream.h"
#include "modules/pacing/pacing_controller.h"
#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/rtp_rtcp/source/rtp_header_extension_size.h"
#include "modules/rtp_rtcp/source/rtp_sender.h"
#include "modules/video_coding/include/video_codec_interface.h"
#include "rtc_base/checks.h"
#include "rtc_base/experiments/alr_experiment.h"
#include "rtc_base/experiments/field_trial_parser.h"
@ -35,9 +65,18 @@
#include "rtc_base/experiments/rate_control_settings.h"
#include "rtc_base/logging.h"
#include "rtc_base/numerics/safe_conversions.h"
#include "rtc_base/strings/string_builder.h"
#include "rtc_base/task_utils/repeating_task.h"
#include "rtc_base/trace_event.h"
#include "system_wrappers/include/clock.h"
#include "system_wrappers/include/field_trial.h"
#include "video/adaptation/overuse_frame_detector.h"
#include "video/config/video_encoder_config.h"
#include "video/encoder_rtcp_feedback.h"
#include "video/frame_cadence_adapter.h"
#include "video/send_delay_stats.h"
#include "video/send_statistics_proxy.h"
#include "video/video_stream_encoder.h"
#include "video/video_stream_encoder_interface.h"
namespace webrtc {
namespace internal {
@ -209,6 +248,107 @@ int GetDefaultMinVideoBitrateBps(VideoCodecType codec_type) {
return kDefaultMinVideoBitrateBps;
}
size_t CalculateMaxHeaderSize(const RtpConfig& config) {
size_t header_size = kRtpHeaderSize;
size_t extensions_size = 0;
size_t fec_extensions_size = 0;
if (!config.extensions.empty()) {
RtpHeaderExtensionMap extensions_map(config.extensions);
extensions_size = RtpHeaderExtensionSize(RTPSender::VideoExtensionSizes(),
extensions_map);
fec_extensions_size =
RtpHeaderExtensionSize(RTPSender::FecExtensionSizes(), extensions_map);
}
header_size += extensions_size;
if (config.flexfec.payload_type >= 0) {
// All FEC extensions again plus maximum FlexFec overhead.
header_size += fec_extensions_size + 32;
} else {
if (config.ulpfec.ulpfec_payload_type >= 0) {
// Header with all the FEC extensions will be repeated plus maximum
// UlpFec overhead.
header_size += fec_extensions_size + 18;
}
if (config.ulpfec.red_payload_type >= 0) {
header_size += 1; // RED header.
}
}
// Additional room for Rtx.
if (config.rtx.payload_type >= 0)
header_size += kRtxHeaderSize;
return header_size;
}
VideoStreamEncoder::BitrateAllocationCallbackType
GetBitrateAllocationCallbackType(const VideoSendStream::Config& config,
const FieldTrialsView& field_trials) {
if (webrtc::RtpExtension::FindHeaderExtensionByUri(
config.rtp.extensions,
webrtc::RtpExtension::kVideoLayersAllocationUri,
config.crypto_options.srtp.enable_encrypted_rtp_header_extensions
? RtpExtension::Filter::kPreferEncryptedExtension
: RtpExtension::Filter::kDiscardEncryptedExtension)) {
return VideoStreamEncoder::BitrateAllocationCallbackType::
kVideoLayersAllocation;
}
if (field_trials.IsEnabled("WebRTC-Target-Bitrate-Rtcp")) {
return VideoStreamEncoder::BitrateAllocationCallbackType::
kVideoBitrateAllocation;
}
return VideoStreamEncoder::BitrateAllocationCallbackType::
kVideoBitrateAllocationWhenScreenSharing;
}
RtpSenderFrameEncryptionConfig CreateFrameEncryptionConfig(
const VideoSendStream::Config* config) {
RtpSenderFrameEncryptionConfig frame_encryption_config;
frame_encryption_config.frame_encryptor = config->frame_encryptor.get();
frame_encryption_config.crypto_options = config->crypto_options;
return frame_encryption_config;
}
RtpSenderObservers CreateObservers(RtcpRttStats* call_stats,
EncoderRtcpFeedback* encoder_feedback,
SendStatisticsProxy* stats_proxy,
SendPacketObserver* send_packet_observer) {
RtpSenderObservers observers;
observers.rtcp_rtt_stats = call_stats;
observers.intra_frame_callback = encoder_feedback;
observers.rtcp_loss_notification_observer = encoder_feedback;
observers.report_block_data_observer = stats_proxy;
observers.rtp_stats = stats_proxy;
observers.bitrate_observer = stats_proxy;
observers.frame_count_observer = stats_proxy;
observers.rtcp_type_observer = stats_proxy;
observers.send_packet_observer = send_packet_observer;
return observers;
}
std::unique_ptr<VideoStreamEncoderInterface> CreateVideoStreamEncoder(
Clock* clock,
int num_cpu_cores,
TaskQueueFactory* task_queue_factory,
SendStatisticsProxy* stats_proxy,
const VideoStreamEncoderSettings& encoder_settings,
VideoStreamEncoder::BitrateAllocationCallbackType
bitrate_allocation_callback_type,
const FieldTrialsView& field_trials,
Metronome* metronome,
webrtc::VideoEncoderFactory::EncoderSelectorInterface* encoder_selector) {
std::unique_ptr<TaskQueueBase, TaskQueueDeleter> encoder_queue =
task_queue_factory->CreateTaskQueue("EncoderQueue",
TaskQueueFactory::Priority::NORMAL);
TaskQueueBase* encoder_queue_ptr = encoder_queue.get();
return std::make_unique<VideoStreamEncoder>(
clock, num_cpu_cores, stats_proxy, encoder_settings,
std::make_unique<OveruseFrameDetector>(stats_proxy),
FrameCadenceAdapterInterface::Create(
clock, encoder_queue_ptr, metronome,
/*worker_queue=*/TaskQueueBase::Current(), field_trials),
std::move(encoder_queue), bitrate_allocation_callback_type, field_trials,
encoder_selector);
}
} // namespace
PacingConfig::PacingConfig(const FieldTrialsView& field_trials)
@ -222,66 +362,96 @@ PacingConfig::~PacingConfig() = default;
VideoSendStreamImpl::VideoSendStreamImpl(
Clock* clock,
SendStatisticsProxy* stats_proxy,
int num_cpu_cores,
TaskQueueFactory* task_queue_factory,
RtcpRttStats* call_stats,
RtpTransportControllerSendInterface* transport,
Metronome* metronome,
BitrateAllocatorInterface* bitrate_allocator,
VideoStreamEncoderInterface* video_stream_encoder,
const VideoSendStream::Config* config,
int initial_encoder_max_bitrate,
double initial_encoder_bitrate_priority,
VideoEncoderConfig::ContentType content_type,
RtpVideoSenderInterface* rtp_video_sender,
const FieldTrialsView& field_trials)
: clock_(clock),
has_alr_probing_(config->periodic_alr_bandwidth_probing ||
GetAlrSettings(field_trials, content_type)),
SendDelayStats* send_delay_stats,
RtcEventLog* event_log,
VideoSendStream::Config config,
VideoEncoderConfig encoder_config,
const std::map<uint32_t, RtpState>& suspended_ssrcs,
const std::map<uint32_t, RtpPayloadState>& suspended_payload_states,
std::unique_ptr<FecController> fec_controller,
const FieldTrialsView& field_trials,
std::unique_ptr<VideoStreamEncoderInterface> video_stream_encoder_for_test)
: transport_(transport),
stats_proxy_(clock, config, encoder_config.content_type, field_trials),
send_packet_observer_(&stats_proxy_, send_delay_stats),
config_(std::move(config)),
content_type_(encoder_config.content_type),
video_stream_encoder_(
video_stream_encoder_for_test
? std::move(video_stream_encoder_for_test)
: CreateVideoStreamEncoder(
clock,
num_cpu_cores,
task_queue_factory,
&stats_proxy_,
config_.encoder_settings,
GetBitrateAllocationCallbackType(config_, field_trials),
field_trials,
metronome,
config_.encoder_selector)),
encoder_feedback_(
clock,
config_.rtp.ssrcs,
video_stream_encoder_.get(),
[this](uint32_t ssrc, const std::vector<uint16_t>& seq_nums) {
return rtp_video_sender_->GetSentRtpPacketInfos(ssrc, seq_nums);
}),
rtp_video_sender_(transport->CreateRtpVideoSender(
suspended_ssrcs,
suspended_payload_states,
config_.rtp,
config_.rtcp_report_interval_ms,
config_.send_transport,
CreateObservers(call_stats,
&encoder_feedback_,
&stats_proxy_,
&send_packet_observer_),
event_log,
std::move(fec_controller),
CreateFrameEncryptionConfig(&config_),
config_.frame_transformer)),
clock_(clock),
has_alr_probing_(
config_.periodic_alr_bandwidth_probing ||
GetAlrSettings(field_trials, encoder_config.content_type)),
pacing_config_(PacingConfig(field_trials)),
stats_proxy_(stats_proxy),
config_(config),
worker_queue_(TaskQueueBase::Current()),
timed_out_(false),
transport_(transport),
bitrate_allocator_(bitrate_allocator),
disable_padding_(true),
max_padding_bitrate_(0),
encoder_min_bitrate_bps_(0),
encoder_max_bitrate_bps_(
GetInitialEncoderMaxBitrate(initial_encoder_max_bitrate)),
GetInitialEncoderMaxBitrate(encoder_config.max_bitrate_bps)),
encoder_target_rate_bps_(0),
encoder_bitrate_priority_(initial_encoder_bitrate_priority),
video_stream_encoder_(video_stream_encoder),
rtp_video_sender_(rtp_video_sender),
configured_pacing_factor_(GetConfiguredPacingFactor(*config_,
content_type,
encoder_bitrate_priority_(encoder_config.bitrate_priority),
configured_pacing_factor_(GetConfiguredPacingFactor(config_,
content_type_,
pacing_config_,
field_trials)) {
RTC_DCHECK_GE(config_->rtp.payload_type, 0);
RTC_DCHECK_LE(config_->rtp.payload_type, 127);
RTC_DCHECK(!config_->rtp.ssrcs.empty());
RTC_DCHECK_GE(config_.rtp.payload_type, 0);
RTC_DCHECK_LE(config_.rtp.payload_type, 127);
RTC_DCHECK(!config_.rtp.ssrcs.empty());
RTC_DCHECK(transport_);
RTC_DCHECK_NE(initial_encoder_max_bitrate, 0);
RTC_LOG(LS_INFO) << "VideoSendStreamImpl: " << config_->ToString();
RTC_DCHECK_NE(encoder_max_bitrate_bps_, 0);
RTC_LOG(LS_INFO) << "VideoSendStreamImpl: " << config_.ToString();
RTC_CHECK(AlrExperimentSettings::MaxOneFieldTrialEnabled(field_trials));
// Only request rotation at the source when we positively know that the remote
// side doesn't support the rotation extension. This allows us to prepare the
// encoder in the expectation that rotation is supported - which is the common
// case.
bool rotation_applied = absl::c_none_of(
config_->rtp.extensions, [](const RtpExtension& extension) {
return extension.uri == RtpExtension::kVideoRotationUri;
});
video_stream_encoder_->SetSink(this, rotation_applied);
absl::optional<bool> enable_alr_bw_probing;
// If send-side BWE is enabled, check if we should apply updated probing and
// pacing settings.
if (configured_pacing_factor_) {
absl::optional<AlrExperimentSettings> alr_settings =
GetAlrSettings(field_trials, content_type);
GetAlrSettings(field_trials, content_type_);
int queue_time_limit_ms;
if (alr_settings) {
enable_alr_bw_probing = true;
@ -293,11 +463,11 @@ VideoSendStreamImpl::VideoSendStreamImpl(
queue_time_limit_ms = pacing_config_.max_pacing_delay.Get().ms();
}
transport->SetQueueTimeLimit(queue_time_limit_ms);
transport_->SetQueueTimeLimit(queue_time_limit_ms);
}
if (config_->periodic_alr_bandwidth_probing) {
enable_alr_bw_probing = config_->periodic_alr_bandwidth_probing;
if (config_.periodic_alr_bandwidth_probing) {
enable_alr_bw_probing = config_.periodic_alr_bandwidth_probing;
}
if (enable_alr_bw_probing) {
@ -307,13 +477,110 @@ VideoSendStreamImpl::VideoSendStreamImpl(
if (configured_pacing_factor_)
transport_->SetPacingFactor(*configured_pacing_factor_);
// Only request rotation at the source when we positively know that the remote
// side doesn't support the rotation extension. This allows us to prepare the
// encoder in the expectation that rotation is supported - which is the common
// case.
bool rotation_applied = absl::c_none_of(
config_.rtp.extensions, [](const RtpExtension& extension) {
return extension.uri == RtpExtension::kVideoRotationUri;
});
video_stream_encoder_->SetSink(this, rotation_applied);
video_stream_encoder_->SetStartBitrate(
bitrate_allocator_->GetStartBitrate(this));
video_stream_encoder_->SetFecControllerOverride(rtp_video_sender_);
ReconfigureVideoEncoder(std::move(encoder_config));
}
VideoSendStreamImpl::~VideoSendStreamImpl() {
RTC_DCHECK_RUN_ON(&thread_checker_);
RTC_LOG(LS_INFO) << "~VideoSendStreamImpl: " << config_->ToString();
RTC_LOG(LS_INFO) << "~VideoSendStreamImpl: " << config_.ToString();
RTC_DCHECK(!started());
transport_->DestroyRtpVideoSender(rtp_video_sender_);
}
void VideoSendStreamImpl::AddAdaptationResource(
rtc::scoped_refptr<Resource> resource) {
RTC_DCHECK_RUN_ON(&thread_checker_);
video_stream_encoder_->AddAdaptationResource(resource);
}
std::vector<rtc::scoped_refptr<Resource>>
VideoSendStreamImpl::GetAdaptationResources() {
RTC_DCHECK_RUN_ON(&thread_checker_);
return video_stream_encoder_->GetAdaptationResources();
}
void VideoSendStreamImpl::SetSource(
rtc::VideoSourceInterface<webrtc::VideoFrame>* source,
const DegradationPreference& degradation_preference) {
RTC_DCHECK_RUN_ON(&thread_checker_);
video_stream_encoder_->SetSource(source, degradation_preference);
}
void VideoSendStreamImpl::ReconfigureVideoEncoder(VideoEncoderConfig config) {
ReconfigureVideoEncoder(std::move(config), nullptr);
}
void VideoSendStreamImpl::ReconfigureVideoEncoder(
VideoEncoderConfig config,
SetParametersCallback callback) {
RTC_DCHECK_RUN_ON(&thread_checker_);
RTC_DCHECK_EQ(content_type_, config.content_type);
RTC_LOG(LS_VERBOSE) << "Encoder config: " << config.ToString()
<< " VideoSendStream config: " << config_.ToString();
video_stream_encoder_->ConfigureEncoder(
std::move(config),
config_.rtp.max_packet_size - CalculateMaxHeaderSize(config_.rtp),
std::move(callback));
}
VideoSendStream::Stats VideoSendStreamImpl::GetStats() {
RTC_DCHECK_RUN_ON(&thread_checker_);
return stats_proxy_.GetStats();
}
absl::optional<float> VideoSendStreamImpl::GetPacingFactorOverride() const {
return configured_pacing_factor_;
}
void VideoSendStreamImpl::StopPermanentlyAndGetRtpStates(
VideoSendStreamImpl::RtpStateMap* rtp_state_map,
VideoSendStreamImpl::RtpPayloadStateMap* payload_state_map) {
RTC_DCHECK_RUN_ON(&thread_checker_);
video_stream_encoder_->Stop();
running_ = false;
// Always run these cleanup steps regardless of whether running_ was set
// or not. This will unregister callbacks before destruction.
// See `VideoSendStreamImpl::StopVideoSendStream` for more.
Stop();
*rtp_state_map = GetRtpStates();
*payload_state_map = GetRtpPayloadStates();
}
void VideoSendStreamImpl::GenerateKeyFrame(
const std::vector<std::string>& rids) {
RTC_DCHECK_RUN_ON(&thread_checker_);
// Map rids to layers. If rids is empty, generate a keyframe for all layers.
std::vector<VideoFrameType> next_frames(config_.rtp.ssrcs.size(),
VideoFrameType::kVideoFrameKey);
if (!config_.rtp.rids.empty() && !rids.empty()) {
std::fill(next_frames.begin(), next_frames.end(),
VideoFrameType::kVideoFrameDelta);
for (const auto& rid : rids) {
for (size_t i = 0; i < config_.rtp.rids.size(); i++) {
if (config_.rtp.rids[i] == rid) {
next_frames[i] = VideoFrameType::kVideoFrameKey;
break;
}
}
}
}
if (video_stream_encoder_) {
video_stream_encoder_->SendKeyFrame(next_frames);
}
}
void VideoSendStreamImpl::DeliverRtcp(const uint8_t* packet, size_t length) {
@ -321,9 +588,35 @@ void VideoSendStreamImpl::DeliverRtcp(const uint8_t* packet, size_t length) {
rtp_video_sender_->DeliverRtcp(packet, length);
}
bool VideoSendStreamImpl::started() {
RTC_DCHECK_RUN_ON(&thread_checker_);
return rtp_video_sender_->IsActive();
}
void VideoSendStreamImpl::Start() {
const std::vector<bool> active_layers(config_.rtp.ssrcs.size(), true);
StartPerRtpStream(active_layers);
}
void VideoSendStreamImpl::StartPerRtpStream(
const std::vector<bool> active_layers) {
RTC_DCHECK_RUN_ON(&thread_checker_);
rtc::StringBuilder active_layers_string;
active_layers_string << "{";
for (size_t i = 0; i < active_layers.size(); ++i) {
if (active_layers[i]) {
active_layers_string << "1";
} else {
active_layers_string << "0";
}
if (i < active_layers.size() - 1) {
active_layers_string << ", ";
}
}
active_layers_string << "}";
RTC_LOG(LS_INFO) << "StartPerRtpStream: " << active_layers_string.str();
bool previously_active = rtp_video_sender_->IsActive();
rtp_video_sender_->SetActiveModules(active_layers);
if (!rtp_video_sender_->IsActive() && previously_active) {
@ -381,7 +674,7 @@ void VideoSendStreamImpl::StopVideoSendStream() {
check_encoder_activity_task_.Stop();
video_stream_encoder_->OnBitrateUpdated(DataRate::Zero(), DataRate::Zero(),
DataRate::Zero(), 0, 0, 0);
stats_proxy_->OnSetEncoderTargetRate(0);
stats_proxy_.OnSetEncoderTargetRate(0);
}
void VideoSendStreamImpl::SignalEncoderTimedOut() {
@ -465,7 +758,7 @@ MediaStreamAllocationConfig VideoSendStreamImpl::GetAllocationConfig() const {
encoder_max_bitrate_bps_,
static_cast<uint32_t>(disable_padding_ ? 0 : max_padding_bitrate_),
/* priority_bitrate */ 0,
!config_->suspend_below_min_bitrate,
!config_.suspend_below_min_bitrate,
encoder_bitrate_priority_};
}
@ -478,12 +771,12 @@ void VideoSendStreamImpl::OnEncoderConfigurationChanged(
RTC_DCHECK(!worker_queue_->IsCurrent());
auto closure = [this, streams = std::move(streams), is_svc, content_type,
min_transmit_bitrate_bps]() mutable {
RTC_DCHECK_GE(config_->rtp.ssrcs.size(), streams.size());
RTC_DCHECK_GE(config_.rtp.ssrcs.size(), streams.size());
TRACE_EVENT0("webrtc", "VideoSendStream::OnEncoderConfigurationChanged");
RTC_DCHECK_RUN_ON(&thread_checker_);
const VideoCodecType codec_type =
PayloadStringToCodecType(config_->rtp.payload_name);
PayloadStringToCodecType(config_.rtp.payload_name);
const absl::optional<DataRate> experimental_min_bitrate =
GetExperimentalMinVideoBitrate(codec_type);
@ -512,11 +805,11 @@ void VideoSendStreamImpl::OnEncoderConfigurationChanged(
// TODO(bugs.webrtc.org/10266): Query the VideoBitrateAllocator instead.
max_padding_bitrate_ = CalculateMaxPadBitrateBps(
streams, is_svc, content_type, min_transmit_bitrate_bps,
config_->suspend_below_min_bitrate, has_alr_probing_);
config_.suspend_below_min_bitrate, has_alr_probing_);
// Clear stats for disabled layers.
for (size_t i = streams.size(); i < config_->rtp.ssrcs.size(); ++i) {
stats_proxy_->OnInactiveSsrc(config_->rtp.ssrcs[i]);
for (size_t i = streams.size(); i < config_.rtp.ssrcs.size(); ++i) {
stats_proxy_.OnInactiveSsrc(config_.rtp.ssrcs[i]);
}
const size_t num_temporal_layers =
@ -592,7 +885,7 @@ uint32_t VideoSendStreamImpl::OnBitrateUpdated(BitrateAllocationUpdate update) {
update.stable_target_bitrate = update.target_bitrate;
}
rtp_video_sender_->OnBitrateUpdated(update, stats_proxy_->GetSendFrameRate());
rtp_video_sender_->OnBitrateUpdated(update, stats_proxy_.GetSendFrameRate());
encoder_target_rate_bps_ = rtp_video_sender_->GetPayloadBitrateBps();
const uint32_t protection_bitrate_bps =
rtp_video_sender_->GetProtectionBitrateBps();
@ -623,7 +916,7 @@ uint32_t VideoSendStreamImpl::OnBitrateUpdated(BitrateAllocationUpdate update) {
encoder_target_rate, encoder_stable_target_rate, link_allocation,
rtc::dchecked_cast<uint8_t>(update.packet_loss_ratio * 256),
update.round_trip_time.ms(), update.cwnd_reduce_ratio);
stats_proxy_->OnSetEncoderTargetRate(encoder_target_rate_bps_);
stats_proxy_.OnSetEncoderTargetRate(encoder_target_rate_bps_);
return protection_bitrate_bps;
}

View File

@ -16,21 +16,22 @@
#include <atomic>
#include <map>
#include <memory>
#include <string>
#include <vector>
#include "absl/types/optional.h"
#include "api/field_trials_view.h"
#include "api/metronome/metronome.h"
#include "api/task_queue/pending_task_safety_flag.h"
#include "api/task_queue/task_queue_base.h"
#include "api/video/encoded_image.h"
#include "api/video/video_bitrate_allocation.h"
#include "api/video/video_bitrate_allocator.h"
#include "api/video_codecs/video_encoder.h"
#include "call/bitrate_allocator.h"
#include "call/rtp_config.h"
#include "call/rtp_transport_controller_send_interface.h"
#include "call/rtp_video_sender_interface.h"
#include "modules/include/module_common_types.h"
#include "call/video_send_stream.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/video_coding/include/video_codec_interface.h"
#include "rtc_base/experiments/field_trial_parser.h"
@ -38,10 +39,17 @@
#include "rtc_base/task_utils/repeating_task.h"
#include "rtc_base/thread_annotations.h"
#include "video/config/video_encoder_config.h"
#include "video/encoder_rtcp_feedback.h"
#include "video/send_delay_stats.h"
#include "video/send_statistics_proxy.h"
#include "video/video_stream_encoder_interface.h"
namespace webrtc {
namespace test {
class VideoSendStreamPeer;
} // namespace test
namespace internal {
// Pacing buffer config; overridden by ALR config if provided.
@ -54,32 +62,58 @@ struct PacingConfig {
FieldTrialParameter<TimeDelta> max_pacing_delay;
};
// VideoSendStreamImpl implements internal::VideoSendStream.
// It is created and destroyed on `rtp_transport_queue`. The intent is to
// decrease the need for locking and to ensure methods are called in sequence.
// Public methods except `DeliverRtcp` must be called on `rtp_transport_queue`.
// DeliverRtcp is called on the libjingle worker thread or a network thread.
// VideoSendStreamImpl implements webrtc::VideoSendStream.
// It is created and destroyed on `worker queue`. The intent is to
// An encoder may deliver frames through the EncodedImageCallback on an
// arbitrary thread.
class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver,
class VideoSendStreamImpl : public webrtc::VideoSendStream,
public webrtc::BitrateAllocatorObserver,
public VideoStreamEncoderInterface::EncoderSink {
public:
using RtpStateMap = std::map<uint32_t, RtpState>;
using RtpPayloadStateMap = std::map<uint32_t, RtpPayloadState>;
VideoSendStreamImpl(Clock* clock,
SendStatisticsProxy* stats_proxy,
int num_cpu_cores,
TaskQueueFactory* task_queue_factory,
RtcpRttStats* call_stats,
RtpTransportControllerSendInterface* transport,
Metronome* metronome,
BitrateAllocatorInterface* bitrate_allocator,
VideoStreamEncoderInterface* video_stream_encoder,
const VideoSendStream::Config* config,
int initial_encoder_max_bitrate,
double initial_encoder_bitrate_priority,
VideoEncoderConfig::ContentType content_type,
RtpVideoSenderInterface* rtp_video_sender,
const FieldTrialsView& field_trials);
SendDelayStats* send_delay_stats,
RtcEventLog* event_log,
VideoSendStream::Config config,
VideoEncoderConfig encoder_config,
const RtpStateMap& suspended_ssrcs,
const RtpPayloadStateMap& suspended_payload_states,
std::unique_ptr<FecController> fec_controller,
const FieldTrialsView& field_trials,
std::unique_ptr<VideoStreamEncoderInterface>
video_stream_encoder_for_test = nullptr);
~VideoSendStreamImpl() override;
void DeliverRtcp(const uint8_t* packet, size_t length);
void StartPerRtpStream(std::vector<bool> active_layers);
void Stop();
// webrtc::VideoSendStream implementation.
void Start() override;
void StartPerRtpStream(std::vector<bool> active_layers) override;
void Stop() override;
bool started() override;
void AddAdaptationResource(rtc::scoped_refptr<Resource> resource) override;
std::vector<rtc::scoped_refptr<Resource>> GetAdaptationResources() override;
void SetSource(rtc::VideoSourceInterface<webrtc::VideoFrame>* source,
const DegradationPreference& degradation_preference) override;
void ReconfigureVideoEncoder(VideoEncoderConfig config) override;
void ReconfigureVideoEncoder(VideoEncoderConfig config,
SetParametersCallback callback) override;
Stats GetStats() override;
void StopPermanentlyAndGetRtpStates(RtpStateMap* rtp_state_map,
RtpPayloadStateMap* payload_state_map);
void GenerateKeyFrame(const std::vector<std::string>& rids) override;
// TODO(holmer): Move these to RtpTransportControllerSend.
std::map<uint32_t, RtpState> GetRtpStates() const;
@ -91,6 +125,28 @@ class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver,
}
private:
friend class test::VideoSendStreamPeer;
class OnSendPacketObserver : public SendPacketObserver {
public:
OnSendPacketObserver(SendStatisticsProxy* stats_proxy,
SendDelayStats* send_delay_stats)
: stats_proxy_(*stats_proxy), send_delay_stats_(*send_delay_stats) {}
void OnSendPacket(absl::optional<uint16_t> packet_id,
Timestamp capture_time,
uint32_t ssrc) override {
stats_proxy_.OnSendPacket(ssrc, capture_time);
if (packet_id.has_value()) {
send_delay_stats_.OnSendPacket(*packet_id, capture_time, ssrc);
}
}
private:
SendStatisticsProxy& stats_proxy_;
SendDelayStats& send_delay_stats_;
};
absl::optional<float> GetPacingFactorOverride() const;
// Implements BitrateAllocatorObserver.
uint32_t OnBitrateUpdated(BitrateAllocationUpdate update) override;
@ -130,13 +186,22 @@ class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver,
RTC_RUN_ON(thread_checker_);
RTC_NO_UNIQUE_ADDRESS SequenceChecker thread_checker_;
RtpTransportControllerSendInterface* const transport_;
SendStatisticsProxy stats_proxy_;
OnSendPacketObserver send_packet_observer_;
const VideoSendStream::Config config_;
const VideoEncoderConfig::ContentType content_type_;
std::unique_ptr<VideoStreamEncoderInterface> video_stream_encoder_;
EncoderRtcpFeedback encoder_feedback_;
RtpVideoSenderInterface* const rtp_video_sender_;
bool running_ RTC_GUARDED_BY(thread_checker_) = false;
Clock* const clock_;
const bool has_alr_probing_;
const PacingConfig pacing_config_;
SendStatisticsProxy* const stats_proxy_;
const VideoSendStream::Config* const config_;
TaskQueueBase* const worker_queue_;
RepeatingTaskHandle check_encoder_activity_task_
@ -145,7 +210,6 @@ class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver,
std::atomic_bool activity_;
bool timed_out_ RTC_GUARDED_BY(thread_checker_);
RtpTransportControllerSendInterface* const transport_;
BitrateAllocatorInterface* const bitrate_allocator_;
bool disable_padding_ RTC_GUARDED_BY(thread_checker_);
@ -155,9 +219,6 @@ class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver,
uint32_t encoder_target_rate_bps_ RTC_GUARDED_BY(thread_checker_);
double encoder_bitrate_priority_ RTC_GUARDED_BY(thread_checker_);
VideoStreamEncoderInterface* const video_stream_encoder_;
RtpVideoSenderInterface* const rtp_video_sender_;
ScopedTaskSafety worker_queue_safety_;
// Context for the most recent and last sent video bitrate allocation. Used to

View File

@ -11,31 +11,50 @@
#include "video/video_send_stream_impl.h"
#include <algorithm>
#include <cstddef>
#include <cstdint>
#include <map>
#include <memory>
#include <string>
#include <utility>
#include <vector>
#include "absl/types/optional.h"
#include "api/array_view.h"
#include "api/call/bitrate_allocation.h"
#include "api/rtc_event_log/rtc_event_log.h"
#include "api/sequence_checker.h"
#include "api/rtp_parameters.h"
#include "api/task_queue/task_queue_base.h"
#include "api/task_queue/task_queue_factory.h"
#include "api/units/data_rate.h"
#include "api/units/time_delta.h"
#include "api/units/timestamp.h"
#include "call/rtp_video_sender.h"
#include "api/video/encoded_image.h"
#include "api/video/video_bitrate_allocation.h"
#include "api/video/video_layers_allocation.h"
#include "api/video_codecs/video_encoder.h"
#include "call/bitrate_allocator.h"
#include "call/rtp_config.h"
#include "call/rtp_video_sender_interface.h"
#include "call/test/mock_bitrate_allocator.h"
#include "call/test/mock_rtp_transport_controller_send.h"
#include "call/video_send_stream.h"
#include "modules/pacing/packet_router.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/rtp_rtcp/source/rtp_sequence_number_map.h"
#include "modules/video_coding/fec_controller_default.h"
#include "rtc_base/event.h"
#include "modules/video_coding/include/video_codec_interface.h"
#include "rtc_base/experiments/alr_experiment.h"
#include "rtc_base/fake_clock.h"
#include "rtc_base/logging.h"
#include "test/field_trial.h"
#include "test/gmock.h"
#include "test/gtest.h"
#include "test/mock_transport.h"
#include "test/scoped_key_value_config.h"
#include "test/time_controller/simulated_time_controller.h"
#include "video/config/video_encoder_config.h"
#include "video/send_delay_stats.h"
#include "video/send_statistics_proxy.h"
#include "video/test/mock_video_stream_encoder.h"
#include "video/video_send_stream.h"
#include "video/video_stream_encoder_interface.h"
namespace webrtc {
@ -114,6 +133,7 @@ BitrateAllocationUpdate CreateAllocation(int bitrate_bps) {
update.round_trip_time = TimeDelta::Zero();
return update;
}
} // namespace
class VideoSendStreamImplTest : public ::testing::Test {
@ -159,13 +179,26 @@ class VideoSendStreamImplTest : public ::testing::Test {
EXPECT_CALL(bitrate_allocator_, GetStartBitrate(_))
.WillOnce(Return(123000));
VideoEncoderConfig encoder_config;
encoder_config.max_bitrate_bps = initial_encoder_max_bitrate;
encoder_config.bitrate_priority = initial_encoder_bitrate_priority;
encoder_config.content_type = content_type;
std::map<uint32_t, RtpState> suspended_ssrcs;
std::map<uint32_t, RtpPayloadState> suspended_payload_states;
std::unique_ptr<NiceMock<MockVideoStreamEncoder>> video_stream_encoder =
std::make_unique<NiceMock<MockVideoStreamEncoder>>();
video_stream_encoder_ = video_stream_encoder.get();
auto ret = std::make_unique<VideoSendStreamImpl>(
time_controller_.GetClock(), &stats_proxy_, &transport_controller_,
&bitrate_allocator_, &video_stream_encoder_, &config_,
initial_encoder_max_bitrate, initial_encoder_bitrate_priority,
content_type, &rtp_video_sender_, field_trials_);
time_controller_.GetClock(),
/*num_cpu_cores=*/1, time_controller_.GetTaskQueueFactory(),
/*call_stats=*/nullptr, &transport_controller_,
/*metronome=*/nullptr, &bitrate_allocator_, &send_delay_stats_,
/*event_log=*/nullptr, config_.Copy(), encoder_config.Copy(),
suspended_ssrcs, suspended_payload_states,
/*fec_controller=*/nullptr, field_trials_,
std::move(video_stream_encoder));
// The call to GetStartBitrate() executes asynchronously on the tq.
// Ensure all tasks get to run.
@ -181,7 +214,7 @@ class VideoSendStreamImplTest : public ::testing::Test {
NiceMock<MockTransport> transport_;
NiceMock<MockRtpTransportControllerSend> transport_controller_;
NiceMock<MockBitrateAllocator> bitrate_allocator_;
NiceMock<MockVideoStreamEncoder> video_stream_encoder_;
NiceMock<MockVideoStreamEncoder>* video_stream_encoder_ = nullptr;
NiceMock<MockRtpVideoSender> rtp_video_sender_;
std::vector<bool> active_modules_;
@ -218,6 +251,9 @@ TEST_F(VideoSendStreamImplTest, UpdatesObserverOnConfigurationChange) {
config_.suspend_below_min_bitrate = kSuspend;
config_.rtp.extensions.emplace_back(RtpExtension::kTransportSequenceNumberUri,
1);
config_.rtp.ssrcs.emplace_back(1);
config_.rtp.ssrcs.emplace_back(2);
auto vss_impl = CreateVideoSendStreamImpl(
kDefaultInitialBitrateBps, kDefaultBitratePriority,
VideoEncoderConfig::ContentType::kRealtimeVideo);
@ -248,9 +284,6 @@ TEST_F(VideoSendStreamImplTest, UpdatesObserverOnConfigurationChange) {
int min_transmit_bitrate_bps = 30000;
config_.rtp.ssrcs.emplace_back(1);
config_.rtp.ssrcs.emplace_back(2);
EXPECT_CALL(bitrate_allocator_, AddObserver(vss_impl.get(), _))
.WillRepeatedly(Invoke(
[&](BitrateAllocatorObserver*, MediaStreamAllocationConfig config) {
@ -284,6 +317,9 @@ TEST_F(VideoSendStreamImplTest, UpdatesObserverOnConfigurationChangeWithAlr) {
config_.rtp.extensions.emplace_back(RtpExtension::kTransportSequenceNumberUri,
1);
config_.periodic_alr_bandwidth_probing = true;
config_.rtp.ssrcs.emplace_back(1);
config_.rtp.ssrcs.emplace_back(2);
auto vss_impl = CreateVideoSendStreamImpl(
kDefaultInitialBitrateBps, kDefaultBitratePriority,
VideoEncoderConfig::ContentType::kScreen);
@ -316,9 +352,6 @@ TEST_F(VideoSendStreamImplTest, UpdatesObserverOnConfigurationChangeWithAlr) {
// low_stream.target_bitrate_bps + high_stream.min_bitrate_bps.
int min_transmit_bitrate_bps = 400000;
config_.rtp.ssrcs.emplace_back(1);
config_.rtp.ssrcs.emplace_back(2);
EXPECT_CALL(bitrate_allocator_, AddObserver(vss_impl.get(), _))
.WillRepeatedly(Invoke(
[&](BitrateAllocatorObserver*, MediaStreamAllocationConfig config) {
@ -347,6 +380,8 @@ TEST_F(VideoSendStreamImplTest,
UpdatesObserverOnConfigurationChangeWithSimulcastVideoHysteresis) {
test::ScopedKeyValueConfig hysteresis_experiment(
field_trials_, "WebRTC-VideoRateControl/video_hysteresis:1.25/");
config_.rtp.ssrcs.emplace_back(1);
config_.rtp.ssrcs.emplace_back(2);
auto vss_impl = CreateVideoSendStreamImpl(
kDefaultInitialBitrateBps, kDefaultBitratePriority,
@ -374,9 +409,6 @@ TEST_F(VideoSendStreamImplTest,
high_stream.max_qp = 56;
high_stream.bitrate_priority = 1;
config_.rtp.ssrcs.emplace_back(1);
config_.rtp.ssrcs.emplace_back(2);
EXPECT_CALL(bitrate_allocator_, AddObserver(vss_impl.get(), _))
.WillRepeatedly(Invoke([&](BitrateAllocatorObserver*,
MediaStreamAllocationConfig config) {
@ -397,7 +429,8 @@ TEST_F(VideoSendStreamImplTest,
->OnEncoderConfigurationChanged(
std::vector<VideoStream>{low_stream, high_stream}, false,
VideoEncoderConfig::ContentType::kRealtimeVideo,
/*min_transmit_bitrate_bps=*/0);
/*min_transmit_bitrate_bps=*/
0);
});
time_controller_.AdvanceTime(TimeDelta::Zero());
vss_impl->Stop();
@ -723,7 +756,7 @@ TEST_F(VideoSendStreamImplTest, CallsVideoStreamEncoderOnBitrateUpdate) {
EXPECT_CALL(rtp_video_sender_, GetPayloadBitrateBps())
.WillOnce(Return(network_constrained_rate.bps()));
EXPECT_CALL(
video_stream_encoder_,
*video_stream_encoder_,
OnBitrateUpdated(network_constrained_rate, network_constrained_rate,
network_constrained_rate, 0, _, 0));
static_cast<BitrateAllocatorObserver*>(vss_impl.get())
@ -740,7 +773,7 @@ TEST_F(VideoSendStreamImplTest, CallsVideoStreamEncoderOnBitrateUpdate) {
EXPECT_CALL(rtp_video_sender_, OnBitrateUpdated(update, _));
EXPECT_CALL(rtp_video_sender_, GetPayloadBitrateBps())
.WillOnce(Return(rate_with_headroom.bps()));
EXPECT_CALL(video_stream_encoder_,
EXPECT_CALL(*video_stream_encoder_,
OnBitrateUpdated(qvga_max_bitrate, qvga_max_bitrate,
rate_with_headroom, 0, _, 0));
static_cast<BitrateAllocatorObserver*>(vss_impl.get())
@ -757,7 +790,7 @@ TEST_F(VideoSendStreamImplTest, CallsVideoStreamEncoderOnBitrateUpdate) {
.WillOnce(Return(rate_with_headroom.bps()));
const DataRate headroom_minus_protection =
rate_with_headroom - DataRate::BitsPerSec(protection_bitrate_bps);
EXPECT_CALL(video_stream_encoder_,
EXPECT_CALL(*video_stream_encoder_,
OnBitrateUpdated(qvga_max_bitrate, qvga_max_bitrate,
headroom_minus_protection, 0, _, 0));
static_cast<BitrateAllocatorObserver*>(vss_impl.get())
@ -770,14 +803,14 @@ TEST_F(VideoSendStreamImplTest, CallsVideoStreamEncoderOnBitrateUpdate) {
EXPECT_CALL(rtp_video_sender_, OnBitrateUpdated(update, _));
EXPECT_CALL(rtp_video_sender_, GetPayloadBitrateBps())
.WillOnce(Return(rate_with_headroom.bps()));
EXPECT_CALL(video_stream_encoder_,
EXPECT_CALL(*video_stream_encoder_,
OnBitrateUpdated(qvga_max_bitrate, qvga_max_bitrate,
qvga_max_bitrate, 0, _, 0));
static_cast<BitrateAllocatorObserver*>(vss_impl.get())
->OnBitrateUpdated(update);
// Set rates to zero on stop.
EXPECT_CALL(video_stream_encoder_,
EXPECT_CALL(*video_stream_encoder_,
OnBitrateUpdated(DataRate::Zero(), DataRate::Zero(),
DataRate::Zero(), 0, 0, 0));
vss_impl->Stop();

View File

@ -75,7 +75,7 @@
#include "video/config/encoder_stream_factory.h"
#include "video/send_statistics_proxy.h"
#include "video/transport_adapter.h"
#include "video/video_send_stream.h"
#include "video/video_send_stream_impl.h"
namespace webrtc {
namespace test {
@ -83,13 +83,13 @@ class VideoSendStreamPeer {
public:
explicit VideoSendStreamPeer(webrtc::VideoSendStream* base_class_stream)
: internal_stream_(
static_cast<internal::VideoSendStream*>(base_class_stream)) {}
static_cast<internal::VideoSendStreamImpl*>(base_class_stream)) {}
absl::optional<float> GetPacingFactorOverride() const {
return internal_stream_->GetPacingFactorOverride();
}
private:
internal::VideoSendStream const* const internal_stream_;
internal::VideoSendStreamImpl const* const internal_stream_;
};
} // namespace test