Remove unused VideoReceiveStream.

This class is superseded by VideoReceiveStream2.

Bug: webrtc:11489
Change-Id: I02b844868bafe67ce3e924fc23029ec300e934a7
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/240063
Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Tomas Gunnarsson <tommi@google.com>
Reviewed-by: Tomas Gunnarsson <tommi@webrtc.org>
Commit-Queue: Markus Handell <handellm@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#35491}
This commit is contained in:
Markus Handell 2021-12-07 15:20:18 +01:00 committed by WebRTC LUCI CQ
parent 3d29efd279
commit 5c198e100d
4 changed files with 0 additions and 769 deletions

View File

@ -284,16 +284,6 @@ class VideoReceiveStream : public MediaReceiveStream {
virtual ~VideoReceiveStream() {}
};
class DEPRECATED_VideoReceiveStream : public VideoReceiveStream {
public:
// RtpDemuxer only forwards a given RTP packet to one sink. However, some
// sinks, such as FlexFEC, might wish to be informed of all of the packets
// a given sink receives (or any set of sinks). They may do so by registering
// themselves as secondary sinks.
virtual void AddSecondarySink(RtpPacketSinkInterface* sink) = 0;
virtual void RemoveSecondarySink(const RtpPacketSinkInterface* sink) = 0;
};
} // namespace webrtc
#endif // CALL_VIDEO_RECEIVE_STREAM_H_

View File

@ -153,8 +153,6 @@ rtc_source_set("video_legacy") {
"rtp_video_stream_receiver.h",
"video_quality_observer.cc",
"video_quality_observer.h",
"video_receive_stream.cc",
"video_receive_stream.h",
"video_stream_decoder.cc",
"video_stream_decoder.h",
]
@ -655,7 +653,6 @@ if (rtc_include_tests) {
"stats_counter_unittest.cc",
"stream_synchronization_unittest.cc",
"video_receive_stream2_unittest.cc",
"video_receive_stream_unittest.cc",
"video_send_stream_impl_unittest.cc",
"video_send_stream_tests.cc",
"video_source_sink_controller_unittest.cc",

View File

@ -1,241 +0,0 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef VIDEO_VIDEO_RECEIVE_STREAM_H_
#define VIDEO_VIDEO_RECEIVE_STREAM_H_
#include <memory>
#include <vector>
#include "api/sequence_checker.h"
#include "api/task_queue/task_queue_factory.h"
#include "api/video/recordable_encoded_frame.h"
#include "call/rtp_packet_sink_interface.h"
#include "call/syncable.h"
#include "call/video_receive_stream.h"
#include "modules/rtp_rtcp/include/flexfec_receiver.h"
#include "modules/rtp_rtcp/source/source_tracker.h"
#include "modules/video_coding/frame_buffer2.h"
#include "modules/video_coding/video_receiver2.h"
#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/system/no_unique_address.h"
#include "rtc_base/task_queue.h"
#include "system_wrappers/include/clock.h"
#include "video/receive_statistics_proxy.h"
#include "video/rtp_streams_synchronizer.h"
#include "video/rtp_video_stream_receiver.h"
#include "video/transport_adapter.h"
#include "video/video_stream_decoder.h"
namespace webrtc {
class CallStats;
class ProcessThread;
class RtpStreamReceiverInterface;
class RtpStreamReceiverControllerInterface;
class RtxReceiveStream;
class VCMTiming;
namespace internal {
class VideoReceiveStream
: public webrtc::DEPRECATED_VideoReceiveStream,
public rtc::VideoSinkInterface<VideoFrame>,
public NackSender,
public RtpVideoStreamReceiver::OnCompleteFrameCallback,
public Syncable,
public CallStatsObserver {
public:
// The default number of milliseconds to pass before re-requesting a key frame
// to be sent.
static constexpr int kMaxWaitForKeyFrameMs = 200;
VideoReceiveStream(TaskQueueFactory* task_queue_factory,
RtpStreamReceiverControllerInterface* receiver_controller,
int num_cpu_cores,
PacketRouter* packet_router,
VideoReceiveStream::Config config,
ProcessThread* process_thread,
CallStats* call_stats,
Clock* clock,
VCMTiming* timing);
VideoReceiveStream(TaskQueueFactory* task_queue_factory,
RtpStreamReceiverControllerInterface* receiver_controller,
int num_cpu_cores,
PacketRouter* packet_router,
VideoReceiveStream::Config config,
ProcessThread* process_thread,
CallStats* call_stats,
Clock* clock);
~VideoReceiveStream() override;
const Config& config() const { return config_; }
void SignalNetworkState(NetworkState state);
bool DeliverRtcp(const uint8_t* packet, size_t length);
void SetSync(Syncable* audio_syncable);
// Implements webrtc::VideoReceiveStream.
void Start() override;
void Stop() override;
const RtpConfig& rtp_config() const override { return config_.rtp; }
webrtc::VideoReceiveStream::Stats GetStats() const override;
void AddSecondarySink(RtpPacketSinkInterface* sink) override;
void RemoveSecondarySink(const RtpPacketSinkInterface* sink) override;
void SetRtpExtensions(std::vector<RtpExtension> extensions) override;
// SetBaseMinimumPlayoutDelayMs and GetBaseMinimumPlayoutDelayMs are called
// from webrtc/api level and requested by user code. For e.g. blink/js layer
// in Chromium.
bool SetBaseMinimumPlayoutDelayMs(int delay_ms) override;
int GetBaseMinimumPlayoutDelayMs() const override;
void SetFrameDecryptor(
rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor) override;
void SetDepacketizerToDecoderFrameTransformer(
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer) override;
// Implements rtc::VideoSinkInterface<VideoFrame>.
void OnFrame(const VideoFrame& video_frame) override;
// Implements NackSender.
// For this particular override of the interface,
// only (buffering_allowed == true) is acceptable.
void SendNack(const std::vector<uint16_t>& sequence_numbers,
bool buffering_allowed) override;
// Implements RtpVideoStreamReceiver::OnCompleteFrameCallback.
void OnCompleteFrame(std::unique_ptr<EncodedFrame> frame) override;
// Implements CallStatsObserver::OnRttUpdate
void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) override;
// Implements Syncable.
uint32_t id() const override;
absl::optional<Syncable::Info> GetInfo() const override;
bool GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp,
int64_t* time_ms) const override;
void SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms,
int64_t time_ms) override;
// SetMinimumPlayoutDelay is only called by A/V sync.
bool SetMinimumPlayoutDelay(int delay_ms) override;
std::vector<webrtc::RtpSource> GetSources() const override;
RecordingState SetAndGetRecordingState(RecordingState state,
bool generate_key_frame) override;
void GenerateKeyFrame() override;
private:
int64_t GetWaitMs() const;
void StartNextDecode() RTC_RUN_ON(decode_queue_);
void HandleEncodedFrame(std::unique_ptr<EncodedFrame> frame)
RTC_RUN_ON(decode_queue_);
void HandleFrameBufferTimeout() RTC_RUN_ON(decode_queue_);
void UpdatePlayoutDelays() const
RTC_EXCLUSIVE_LOCKS_REQUIRED(playout_delay_lock_);
void RequestKeyFrame(int64_t timestamp_ms) RTC_RUN_ON(decode_queue_);
void HandleKeyFrameGeneration(bool received_frame_is_keyframe, int64_t now_ms)
RTC_RUN_ON(decode_queue_);
bool IsReceivingKeyFrame(int64_t timestamp_ms) const
RTC_RUN_ON(decode_queue_);
void UpdateHistograms();
RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_sequence_checker_;
RTC_NO_UNIQUE_ADDRESS SequenceChecker module_process_sequence_checker_;
RTC_NO_UNIQUE_ADDRESS SequenceChecker network_sequence_checker_;
TaskQueueFactory* const task_queue_factory_;
TransportAdapter transport_adapter_;
const VideoReceiveStream::Config config_;
const int num_cpu_cores_;
ProcessThread* const process_thread_;
Clock* const clock_;
CallStats* const call_stats_;
bool decoder_running_ RTC_GUARDED_BY(worker_sequence_checker_) = false;
bool decoder_stopped_ RTC_GUARDED_BY(decode_queue_) = true;
SourceTracker source_tracker_;
ReceiveStatisticsProxy stats_proxy_;
// Shared by media and rtx stream receivers, since the latter has no RtpRtcp
// module of its own.
const std::unique_ptr<ReceiveStatistics> rtp_receive_statistics_;
std::unique_ptr<VCMTiming> timing_; // Jitter buffer experiment.
VideoReceiver2 video_receiver_;
std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>> incoming_video_stream_;
RtpVideoStreamReceiver rtp_video_stream_receiver_;
std::unique_ptr<VideoStreamDecoder> video_stream_decoder_;
RtpStreamsSynchronizer rtp_stream_sync_;
// TODO(nisse, philipel): Creation and ownership of video encoders should be
// moved to the new VideoStreamDecoder.
std::vector<std::unique_ptr<VideoDecoder>> video_decoders_;
// Members for the new jitter buffer experiment.
std::unique_ptr<video_coding::FrameBuffer> frame_buffer_;
std::unique_ptr<RtpStreamReceiverInterface> media_receiver_;
std::unique_ptr<RtxReceiveStream> rtx_receive_stream_;
std::unique_ptr<RtpStreamReceiverInterface> rtx_receiver_;
// Whenever we are in an undecodable state (stream has just started or due to
// a decoding error) we require a keyframe to restart the stream.
bool keyframe_required_ = true;
// If we have successfully decoded any frame.
bool frame_decoded_ = false;
int64_t last_keyframe_request_ms_ = 0;
int64_t last_complete_frame_time_ms_ = 0;
// Keyframe request intervals are configurable through field trials.
const int max_wait_for_keyframe_ms_;
const int max_wait_for_frame_ms_;
mutable Mutex playout_delay_lock_;
// All of them tries to change current min_playout_delay on `timing_` but
// source of the change request is different in each case. Among them the
// biggest delay is used. -1 means use default value from the `timing_`.
//
// Minimum delay as decided by the RTP playout delay extension.
int frame_minimum_playout_delay_ms_ RTC_GUARDED_BY(playout_delay_lock_) = -1;
// Minimum delay as decided by the setLatency function in "webrtc/api".
int base_minimum_playout_delay_ms_ RTC_GUARDED_BY(playout_delay_lock_) = -1;
// Minimum delay as decided by the A/V synchronization feature.
int syncable_minimum_playout_delay_ms_ RTC_GUARDED_BY(playout_delay_lock_) =
-1;
// Maximum delay as decided by the RTP playout delay extension.
int frame_maximum_playout_delay_ms_ RTC_GUARDED_BY(playout_delay_lock_) = -1;
// Function that is triggered with encoded frames, if not empty.
std::function<void(const RecordableEncodedFrame&)>
encoded_frame_buffer_function_ RTC_GUARDED_BY(decode_queue_);
// Set to true while we're requesting keyframes but not yet received one.
bool keyframe_generation_requested_ RTC_GUARDED_BY(decode_queue_) = false;
// Defined last so they are destroyed before all other members.
rtc::TaskQueue decode_queue_;
};
} // namespace internal
} // namespace webrtc
#endif // VIDEO_VIDEO_RECEIVE_STREAM_H_

View File

@ -1,515 +0,0 @@
/*
* Copyright 2017 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "video/video_receive_stream.h"
#include <algorithm>
#include <memory>
#include <utility>
#include <vector>
#include "api/task_queue/default_task_queue_factory.h"
#include "api/test/mock_video_decoder.h"
#include "api/test/video/function_video_decoder_factory.h"
#include "api/video_codecs/video_decoder.h"
#include "call/rtp_stream_receiver_controller.h"
#include "common_video/test/utilities.h"
#include "media/base/fake_video_renderer.h"
#include "modules/pacing/packet_router.h"
#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
#include "modules/utility/include/process_thread.h"
#include "modules/video_coding/encoded_frame.h"
#include "rtc_base/event.h"
#include "system_wrappers/include/clock.h"
#include "test/fake_decoder.h"
#include "test/field_trial.h"
#include "test/gmock.h"
#include "test/gtest.h"
#include "test/mock_transport.h"
#include "test/time_controller/simulated_time_controller.h"
#include "test/video_decoder_proxy_factory.h"
#include "video/call_stats.h"
namespace webrtc {
namespace {
using ::testing::_;
using ::testing::ElementsAreArray;
using ::testing::Invoke;
using ::testing::IsEmpty;
using ::testing::SizeIs;
using ::testing::WithoutArgs;
constexpr int kDefaultTimeOutMs = 50;
class FrameObjectFake : public EncodedFrame {
public:
void SetPayloadType(uint8_t payload_type) { _payloadType = payload_type; }
void SetRotation(const VideoRotation& rotation) { rotation_ = rotation; }
void SetNtpTime(int64_t ntp_time_ms) { ntp_time_ms_ = ntp_time_ms; }
int64_t ReceivedTime() const override { return 0; }
int64_t RenderTime() const override { return _renderTimeMs; }
};
} // namespace
class VideoReceiveStreamTest : public ::testing::Test {
public:
VideoReceiveStreamTest()
: process_thread_(ProcessThread::Create("TestThread")),
task_queue_factory_(CreateDefaultTaskQueueFactory()),
h264_decoder_factory_(&mock_h264_video_decoder_),
config_(&mock_transport_, &h264_decoder_factory_),
call_stats_(Clock::GetRealTimeClock(), process_thread_.get()) {}
void SetUp() {
constexpr int kDefaultNumCpuCores = 2;
config_.rtp.remote_ssrc = 1111;
config_.rtp.local_ssrc = 2222;
config_.renderer = &fake_renderer_;
VideoReceiveStream::Decoder h264_decoder;
h264_decoder.payload_type = 99;
h264_decoder.video_format = SdpVideoFormat("H264");
h264_decoder.video_format.parameters.insert(
{"sprop-parameter-sets", "Z0IACpZTBYmI,aMljiA=="});
config_.decoders.push_back(h264_decoder);
clock_ = Clock::GetRealTimeClock();
timing_ = new VCMTiming(clock_);
video_receive_stream_ =
std::make_unique<webrtc::internal::VideoReceiveStream>(
task_queue_factory_.get(), &rtp_stream_receiver_controller_,
kDefaultNumCpuCores, &packet_router_, config_.Copy(),
process_thread_.get(), &call_stats_, clock_, timing_);
}
protected:
std::unique_ptr<ProcessThread> process_thread_;
const std::unique_ptr<TaskQueueFactory> task_queue_factory_;
test::VideoDecoderProxyFactory h264_decoder_factory_;
VideoReceiveStream::Config config_;
CallStats call_stats_;
MockVideoDecoder mock_h264_video_decoder_;
cricket::FakeVideoRenderer fake_renderer_;
MockTransport mock_transport_;
PacketRouter packet_router_;
RtpStreamReceiverController rtp_stream_receiver_controller_;
std::unique_ptr<webrtc::internal::VideoReceiveStream> video_receive_stream_;
Clock* clock_;
VCMTiming* timing_;
};
TEST_F(VideoReceiveStreamTest, CreateFrameFromH264FmtpSpropAndIdr) {
constexpr uint8_t idr_nalu[] = {0x05, 0xFF, 0xFF, 0xFF};
RtpPacketToSend rtppacket(nullptr);
uint8_t* payload = rtppacket.AllocatePayload(sizeof(idr_nalu));
memcpy(payload, idr_nalu, sizeof(idr_nalu));
rtppacket.SetMarker(true);
rtppacket.SetSsrc(1111);
rtppacket.SetPayloadType(99);
rtppacket.SetSequenceNumber(1);
rtppacket.SetTimestamp(0);
rtc::Event init_decode_event;
EXPECT_CALL(mock_h264_video_decoder_, Configure).WillOnce(WithoutArgs([&] {
init_decode_event.Set();
return true;
}));
EXPECT_CALL(mock_h264_video_decoder_, RegisterDecodeCompleteCallback(_));
video_receive_stream_->Start();
EXPECT_CALL(mock_h264_video_decoder_, Decode(_, false, _));
RtpPacketReceived parsed_packet;
ASSERT_TRUE(parsed_packet.Parse(rtppacket.data(), rtppacket.size()));
rtp_stream_receiver_controller_.OnRtpPacket(parsed_packet);
EXPECT_CALL(mock_h264_video_decoder_, Release());
// Make sure the decoder thread had a chance to run.
init_decode_event.Wait(kDefaultTimeOutMs);
}
TEST_F(VideoReceiveStreamTest, PlayoutDelay) {
const VideoPlayoutDelay kPlayoutDelayMs = {123, 321};
std::unique_ptr<FrameObjectFake> test_frame(new FrameObjectFake());
test_frame->SetId(0);
test_frame->SetPlayoutDelay(kPlayoutDelayMs);
video_receive_stream_->OnCompleteFrame(std::move(test_frame));
EXPECT_EQ(kPlayoutDelayMs.min_ms, timing_->min_playout_delay());
EXPECT_EQ(kPlayoutDelayMs.max_ms, timing_->max_playout_delay());
// Check that the biggest minimum delay is chosen.
video_receive_stream_->SetMinimumPlayoutDelay(400);
EXPECT_EQ(400, timing_->min_playout_delay());
// Check base minimum delay validation.
EXPECT_FALSE(video_receive_stream_->SetBaseMinimumPlayoutDelayMs(12345));
EXPECT_FALSE(video_receive_stream_->SetBaseMinimumPlayoutDelayMs(-1));
EXPECT_TRUE(video_receive_stream_->SetBaseMinimumPlayoutDelayMs(500));
EXPECT_EQ(500, timing_->min_playout_delay());
// Check that intermidiate values are remembered and the biggest remembered
// is chosen.
video_receive_stream_->SetBaseMinimumPlayoutDelayMs(0);
EXPECT_EQ(400, timing_->min_playout_delay());
video_receive_stream_->SetMinimumPlayoutDelay(0);
EXPECT_EQ(123, timing_->min_playout_delay());
}
TEST_F(VideoReceiveStreamTest, PlayoutDelayPreservesDefaultMaxValue) {
const int default_max_playout_latency = timing_->max_playout_delay();
const VideoPlayoutDelay kPlayoutDelayMs = {123, -1};
std::unique_ptr<FrameObjectFake> test_frame(new FrameObjectFake());
test_frame->SetId(0);
test_frame->SetPlayoutDelay(kPlayoutDelayMs);
video_receive_stream_->OnCompleteFrame(std::move(test_frame));
// Ensure that -1 preserves default maximum value from `timing_`.
EXPECT_EQ(kPlayoutDelayMs.min_ms, timing_->min_playout_delay());
EXPECT_NE(kPlayoutDelayMs.max_ms, timing_->max_playout_delay());
EXPECT_EQ(default_max_playout_latency, timing_->max_playout_delay());
}
TEST_F(VideoReceiveStreamTest, PlayoutDelayPreservesDefaultMinValue) {
const int default_min_playout_latency = timing_->min_playout_delay();
const VideoPlayoutDelay kPlayoutDelayMs = {-1, 321};
std::unique_ptr<FrameObjectFake> test_frame(new FrameObjectFake());
test_frame->SetId(0);
test_frame->SetPlayoutDelay(kPlayoutDelayMs);
video_receive_stream_->OnCompleteFrame(std::move(test_frame));
// Ensure that -1 preserves default minimum value from `timing_`.
EXPECT_NE(kPlayoutDelayMs.min_ms, timing_->min_playout_delay());
EXPECT_EQ(kPlayoutDelayMs.max_ms, timing_->max_playout_delay());
EXPECT_EQ(default_min_playout_latency, timing_->min_playout_delay());
}
class VideoReceiveStreamTestWithFakeDecoder : public ::testing::Test {
public:
VideoReceiveStreamTestWithFakeDecoder()
: fake_decoder_factory_(
[]() { return std::make_unique<test::FakeDecoder>(); }),
process_thread_(ProcessThread::Create("TestThread")),
task_queue_factory_(CreateDefaultTaskQueueFactory()),
config_(&mock_transport_, &fake_decoder_factory_),
call_stats_(Clock::GetRealTimeClock(), process_thread_.get()) {}
void SetUp() {
config_.rtp.remote_ssrc = 1111;
config_.rtp.local_ssrc = 2222;
config_.renderer = &fake_renderer_;
VideoReceiveStream::Decoder fake_decoder;
fake_decoder.payload_type = 99;
fake_decoder.video_format = SdpVideoFormat("VP8");
config_.decoders.push_back(fake_decoder);
clock_ = Clock::GetRealTimeClock();
ReCreateReceiveStream(VideoReceiveStream::RecordingState());
}
void ReCreateReceiveStream(VideoReceiveStream::RecordingState state) {
constexpr int kDefaultNumCpuCores = 2;
video_receive_stream_ = nullptr;
timing_ = new VCMTiming(clock_);
video_receive_stream_.reset(new webrtc::internal::VideoReceiveStream(
task_queue_factory_.get(), &rtp_stream_receiver_controller_,
kDefaultNumCpuCores, &packet_router_, config_.Copy(),
process_thread_.get(), &call_stats_, clock_, timing_));
video_receive_stream_->SetAndGetRecordingState(std::move(state), false);
}
protected:
test::FunctionVideoDecoderFactory fake_decoder_factory_;
std::unique_ptr<ProcessThread> process_thread_;
const std::unique_ptr<TaskQueueFactory> task_queue_factory_;
VideoReceiveStream::Config config_;
CallStats call_stats_;
cricket::FakeVideoRenderer fake_renderer_;
MockTransport mock_transport_;
PacketRouter packet_router_;
RtpStreamReceiverController rtp_stream_receiver_controller_;
std::unique_ptr<webrtc::internal::VideoReceiveStream> video_receive_stream_;
Clock* clock_;
VCMTiming* timing_;
};
TEST_F(VideoReceiveStreamTestWithFakeDecoder, PassesNtpTime) {
const int64_t kNtpTimestamp = 12345;
auto test_frame = std::make_unique<FrameObjectFake>();
test_frame->SetPayloadType(99);
test_frame->SetId(0);
test_frame->SetNtpTime(kNtpTimestamp);
video_receive_stream_->Start();
video_receive_stream_->OnCompleteFrame(std::move(test_frame));
EXPECT_TRUE(fake_renderer_.WaitForRenderedFrame(kDefaultTimeOutMs));
EXPECT_EQ(kNtpTimestamp, fake_renderer_.ntp_time_ms());
}
TEST_F(VideoReceiveStreamTestWithFakeDecoder, PassesRotation) {
const webrtc::VideoRotation kRotation = webrtc::kVideoRotation_180;
auto test_frame = std::make_unique<FrameObjectFake>();
test_frame->SetPayloadType(99);
test_frame->SetId(0);
test_frame->SetRotation(kRotation);
video_receive_stream_->Start();
video_receive_stream_->OnCompleteFrame(std::move(test_frame));
EXPECT_TRUE(fake_renderer_.WaitForRenderedFrame(kDefaultTimeOutMs));
EXPECT_EQ(kRotation, fake_renderer_.rotation());
}
TEST_F(VideoReceiveStreamTestWithFakeDecoder, PassesPacketInfos) {
auto test_frame = std::make_unique<FrameObjectFake>();
test_frame->SetPayloadType(99);
test_frame->SetId(0);
RtpPacketInfos packet_infos = CreatePacketInfos(3);
test_frame->SetPacketInfos(packet_infos);
video_receive_stream_->Start();
video_receive_stream_->OnCompleteFrame(std::move(test_frame));
EXPECT_TRUE(fake_renderer_.WaitForRenderedFrame(kDefaultTimeOutMs));
EXPECT_THAT(fake_renderer_.packet_infos(), ElementsAreArray(packet_infos));
}
TEST_F(VideoReceiveStreamTestWithFakeDecoder, RenderedFrameUpdatesGetSources) {
constexpr uint32_t kSsrc = 1111;
constexpr uint32_t kCsrc = 9001;
constexpr uint32_t kRtpTimestamp = 12345;
// Prepare one video frame with per-packet information.
auto test_frame = std::make_unique<FrameObjectFake>();
test_frame->SetPayloadType(99);
test_frame->SetId(0);
RtpPacketInfos packet_infos;
{
RtpPacketInfos::vector_type infos;
RtpPacketInfo info;
info.set_ssrc(kSsrc);
info.set_csrcs({kCsrc});
info.set_rtp_timestamp(kRtpTimestamp);
info.set_receive_time(clock_->CurrentTime() - TimeDelta::Millis(5000));
infos.push_back(info);
info.set_receive_time(clock_->CurrentTime() - TimeDelta::Millis(3000));
infos.push_back(info);
info.set_receive_time(clock_->CurrentTime() - TimeDelta::Millis(2000));
infos.push_back(info);
info.set_receive_time(clock_->CurrentTime() - TimeDelta::Millis(4000));
infos.push_back(info);
packet_infos = RtpPacketInfos(std::move(infos));
}
test_frame->SetPacketInfos(packet_infos);
// Start receive stream.
video_receive_stream_->Start();
EXPECT_THAT(video_receive_stream_->GetSources(), IsEmpty());
// Render one video frame.
int64_t timestamp_ms_min = clock_->TimeInMilliseconds();
video_receive_stream_->OnCompleteFrame(std::move(test_frame));
EXPECT_TRUE(fake_renderer_.WaitForRenderedFrame(kDefaultTimeOutMs));
int64_t timestamp_ms_max = clock_->TimeInMilliseconds();
// Verify that the per-packet information is passed to the renderer.
EXPECT_THAT(fake_renderer_.packet_infos(), ElementsAreArray(packet_infos));
// Verify that the per-packet information also updates `GetSources()`.
std::vector<RtpSource> sources = video_receive_stream_->GetSources();
ASSERT_THAT(sources, SizeIs(2));
{
auto it = std::find_if(sources.begin(), sources.end(),
[](const RtpSource& source) {
return source.source_type() == RtpSourceType::SSRC;
});
ASSERT_NE(it, sources.end());
EXPECT_EQ(it->source_id(), kSsrc);
EXPECT_EQ(it->source_type(), RtpSourceType::SSRC);
EXPECT_EQ(it->rtp_timestamp(), kRtpTimestamp);
EXPECT_GE(it->timestamp_ms(), timestamp_ms_min);
EXPECT_LE(it->timestamp_ms(), timestamp_ms_max);
}
{
auto it = std::find_if(sources.begin(), sources.end(),
[](const RtpSource& source) {
return source.source_type() == RtpSourceType::CSRC;
});
ASSERT_NE(it, sources.end());
EXPECT_EQ(it->source_id(), kCsrc);
EXPECT_EQ(it->source_type(), RtpSourceType::CSRC);
EXPECT_EQ(it->rtp_timestamp(), kRtpTimestamp);
EXPECT_GE(it->timestamp_ms(), timestamp_ms_min);
EXPECT_LE(it->timestamp_ms(), timestamp_ms_max);
}
}
std::unique_ptr<FrameObjectFake> MakeFrame(VideoFrameType frame_type,
int picture_id) {
auto frame = std::make_unique<FrameObjectFake>();
frame->SetPayloadType(99);
frame->SetId(picture_id);
frame->SetFrameType(frame_type);
return frame;
}
TEST_F(VideoReceiveStreamTestWithFakeDecoder,
PassesFrameWhenEncodedFramesCallbackSet) {
testing::MockFunction<void(const RecordableEncodedFrame&)> callback;
video_receive_stream_->Start();
// Expect a keyframe request to be generated
EXPECT_CALL(mock_transport_, SendRtcp);
EXPECT_CALL(callback, Call);
video_receive_stream_->SetAndGetRecordingState(
VideoReceiveStream::RecordingState(callback.AsStdFunction()), true);
video_receive_stream_->OnCompleteFrame(
MakeFrame(VideoFrameType::kVideoFrameKey, 0));
EXPECT_TRUE(fake_renderer_.WaitForRenderedFrame(kDefaultTimeOutMs));
video_receive_stream_->Stop();
}
TEST_F(VideoReceiveStreamTestWithFakeDecoder,
MovesEncodedFrameDispatchStateWhenReCreating) {
testing::MockFunction<void(const RecordableEncodedFrame&)> callback;
video_receive_stream_->Start();
// Expect a key frame request over RTCP.
EXPECT_CALL(mock_transport_, SendRtcp).Times(1);
video_receive_stream_->SetAndGetRecordingState(
VideoReceiveStream::RecordingState(callback.AsStdFunction()), true);
video_receive_stream_->Stop();
VideoReceiveStream::RecordingState old_state =
video_receive_stream_->SetAndGetRecordingState(
VideoReceiveStream::RecordingState(), false);
ReCreateReceiveStream(std::move(old_state));
video_receive_stream_->Stop();
}
class VideoReceiveStreamTestWithSimulatedClock : public ::testing::Test {
public:
class FakeDecoder2 : public test::FakeDecoder {
public:
explicit FakeDecoder2(std::function<void()> decode_callback)
: callback_(decode_callback) {}
int32_t Decode(const EncodedImage& input,
bool missing_frames,
int64_t render_time_ms) override {
int32_t result =
FakeDecoder::Decode(input, missing_frames, render_time_ms);
callback_();
return result;
}
private:
std::function<void()> callback_;
};
static VideoReceiveStream::Config GetConfig(
Transport* transport,
VideoDecoderFactory* decoder_factory,
rtc::VideoSinkInterface<webrtc::VideoFrame>* renderer) {
VideoReceiveStream::Config config(transport);
config.rtp.remote_ssrc = 1111;
config.rtp.local_ssrc = 2222;
config.renderer = renderer;
config.decoder_factory = decoder_factory;
VideoReceiveStream::Decoder fake_decoder;
fake_decoder.payload_type = 99;
fake_decoder.video_format = SdpVideoFormat("VP8");
config.decoders.push_back(fake_decoder);
return config;
}
VideoReceiveStreamTestWithSimulatedClock()
: time_controller_(Timestamp::Millis(4711)),
fake_decoder_factory_([this] {
return std::make_unique<FakeDecoder2>([this] { OnFrameDecoded(); });
}),
process_thread_(time_controller_.CreateProcessThread("ProcessThread")),
config_(GetConfig(&mock_transport_,
&fake_decoder_factory_,
&fake_renderer_)),
call_stats_(time_controller_.GetClock(), process_thread_.get()),
video_receive_stream_(time_controller_.GetTaskQueueFactory(),
&rtp_stream_receiver_controller_,
/*num_cores=*/2,
&packet_router_,
config_.Copy(),
process_thread_.get(),
&call_stats_,
time_controller_.GetClock(),
new VCMTiming(time_controller_.GetClock())) {
video_receive_stream_.Start();
}
void OnFrameDecoded() { event_->Set(); }
void PassEncodedFrameAndWait(std::unique_ptr<EncodedFrame> frame) {
event_ = std::make_unique<rtc::Event>();
// This call will eventually end up in the Decoded method where the
// event is set.
video_receive_stream_.OnCompleteFrame(std::move(frame));
event_->Wait(rtc::Event::kForever);
}
protected:
GlobalSimulatedTimeController time_controller_;
test::FunctionVideoDecoderFactory fake_decoder_factory_;
std::unique_ptr<ProcessThread> process_thread_;
MockTransport mock_transport_;
cricket::FakeVideoRenderer fake_renderer_;
VideoReceiveStream::Config config_;
CallStats call_stats_;
PacketRouter packet_router_;
RtpStreamReceiverController rtp_stream_receiver_controller_;
webrtc::internal::VideoReceiveStream video_receive_stream_;
std::unique_ptr<rtc::Event> event_;
};
TEST_F(VideoReceiveStreamTestWithSimulatedClock,
RequestsKeyFramesUntilKeyFrameReceived) {
auto tick = TimeDelta::Millis(
internal::VideoReceiveStream::kMaxWaitForKeyFrameMs / 2);
EXPECT_CALL(mock_transport_, SendRtcp).Times(1);
video_receive_stream_.GenerateKeyFrame();
PassEncodedFrameAndWait(MakeFrame(VideoFrameType::kVideoFrameDelta, 0));
time_controller_.AdvanceTime(tick);
PassEncodedFrameAndWait(MakeFrame(VideoFrameType::kVideoFrameDelta, 1));
testing::Mock::VerifyAndClearExpectations(&mock_transport_);
// T+200ms: still no key frame received, expect key frame request sent again.
EXPECT_CALL(mock_transport_, SendRtcp).Times(1);
time_controller_.AdvanceTime(tick);
PassEncodedFrameAndWait(MakeFrame(VideoFrameType::kVideoFrameDelta, 2));
testing::Mock::VerifyAndClearExpectations(&mock_transport_);
// T+200ms: now send a key frame - we should not observe new key frame
// requests after this.
EXPECT_CALL(mock_transport_, SendRtcp).Times(0);
PassEncodedFrameAndWait(MakeFrame(VideoFrameType::kVideoFrameKey, 3));
time_controller_.AdvanceTime(2 * tick);
PassEncodedFrameAndWait(MakeFrame(VideoFrameType::kVideoFrameDelta, 4));
}
} // namespace webrtc