diff --git a/webrtc/modules/video_coding/BUILD.gn b/webrtc/modules/video_coding/BUILD.gn index cab7ad207e..7362a038db 100644 --- a/webrtc/modules/video_coding/BUILD.gn +++ b/webrtc/modules/video_coding/BUILD.gn @@ -382,6 +382,8 @@ if (rtc_include_tests) { "../../api/video_codecs:video_codecs_api", "../../common_video:common_video", "../../rtc_base:rtc_base_approved", + "../../rtc_base:rtc_task_queue", + "../../rtc_base:sequenced_task_checker", "../../system_wrappers:system_wrappers", "../../test:test_support", "../../test:video_test_common", diff --git a/webrtc/modules/video_coding/codecs/test/videoprocessor.cc b/webrtc/modules/video_coding/codecs/test/videoprocessor.cc index e0e3ef8105..ac5efd23f2 100644 --- a/webrtc/modules/video_coding/codecs/test/videoprocessor.cc +++ b/webrtc/modules/video_coding/codecs/test/videoprocessor.cc @@ -121,7 +121,8 @@ VideoProcessor::VideoProcessor(webrtc::VideoEncoder* encoder, Stats* stats, IvfFileWriter* encoded_frame_writer, FrameWriter* decoded_frame_writer) - : config_(config), + : initialized_(false), + config_(config), encoder_(encoder), decoder_(decoder), bitrate_allocator_(CreateBitrateAllocator(&config_)), @@ -132,7 +133,6 @@ VideoProcessor::VideoProcessor(webrtc::VideoEncoder* encoder, analysis_frame_writer_(analysis_frame_writer), encoded_frame_writer_(encoded_frame_writer), decoded_frame_writer_(decoded_frame_writer), - initialized_(false), last_encoded_frame_num_(-1), last_decoded_frame_num_(-1), first_key_frame_has_been_excluded_(false), @@ -152,6 +152,7 @@ VideoProcessor::VideoProcessor(webrtc::VideoEncoder* encoder, VideoProcessor::~VideoProcessor() = default; void VideoProcessor::Init() { + RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_); RTC_DCHECK(!initialized_) << "VideoProcessor already initialized."; initialized_ = true; @@ -198,6 +199,8 @@ void VideoProcessor::Init() { } void VideoProcessor::Release() { + RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_); + RTC_CHECK_EQ(encoder_->Release(), WEBRTC_VIDEO_CODEC_OK); RTC_CHECK_EQ(decoder_->Release(), WEBRTC_VIDEO_CODEC_OK); @@ -208,6 +211,7 @@ void VideoProcessor::Release() { } void VideoProcessor::ProcessFrame(int frame_number) { + RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_); RTC_DCHECK_EQ(frame_number, frame_infos_.size()) << "Must process frames in sequence."; RTC_DCHECK(initialized_) << "VideoProcessor not initialized."; @@ -248,6 +252,8 @@ void VideoProcessor::ProcessFrame(int frame_number) { } void VideoProcessor::SetRates(int bitrate_kbps, int framerate_fps) { + RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_); + config_.codec_settings.maxFramerate = framerate_fps; int set_rates_result = encoder_->SetRateAllocation( bitrate_allocator_->GetAllocation(bitrate_kbps * 1000, framerate_fps), @@ -259,20 +265,24 @@ void VideoProcessor::SetRates(int bitrate_kbps, int framerate_fps) { } int VideoProcessor::GetQpFromEncoder(int frame_number) const { + RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_); RTC_CHECK_LT(frame_number, frame_infos_.size()); return frame_infos_[frame_number].qp_encoder; } int VideoProcessor::GetQpFromBitstream(int frame_number) const { + RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_); RTC_CHECK_LT(frame_number, frame_infos_.size()); return frame_infos_[frame_number].qp_bitstream; } int VideoProcessor::NumberDroppedFrames() { + RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_); return num_dropped_frames_; } int VideoProcessor::NumberSpatialResizes() { + RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_); return num_spatial_resizes_; } @@ -280,6 +290,8 @@ void VideoProcessor::FrameEncoded( webrtc::VideoCodecType codec, const EncodedImage& encoded_image, const webrtc::RTPFragmentationHeader* fragmentation) { + RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_); + // For the highest measurement accuracy of the encode time, the start/stop // time recordings should wrap the Encode call as tightly as possible. int64_t encode_stop_ns = rtc::TimeNanos(); @@ -411,6 +423,8 @@ void VideoProcessor::FrameEncoded( } void VideoProcessor::FrameDecoded(const VideoFrame& image) { + RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_); + // For the highest measurement accuracy of the decode time, the start/stop // time recordings should wrap the Decode call as tightly as possible. int64_t decode_stop_ns = rtc::TimeNanos(); @@ -479,6 +493,8 @@ void VideoProcessor::FrameDecoded(const VideoFrame& image) { } uint32_t VideoProcessor::FrameNumberToTimestamp(int frame_number) const { + RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_); + RTC_DCHECK_GE(frame_number, 0); const int ticks_per_frame = kRtpClockRateHz / config_.codec_settings.maxFramerate; @@ -486,6 +502,8 @@ uint32_t VideoProcessor::FrameNumberToTimestamp(int frame_number) const { } int VideoProcessor::TimestampToFrameNumber(uint32_t timestamp) const { + RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_); + RTC_DCHECK_GT(timestamp, 0); const int ticks_per_frame = kRtpClockRateHz / config_.codec_settings.maxFramerate; diff --git a/webrtc/modules/video_coding/codecs/test/videoprocessor.h b/webrtc/modules/video_coding/codecs/test/videoprocessor.h index 776a218433..bae877e468 100644 --- a/webrtc/modules/video_coding/codecs/test/videoprocessor.h +++ b/webrtc/modules/video_coding/codecs/test/videoprocessor.h @@ -26,6 +26,8 @@ #include "webrtc/rtc_base/buffer.h" #include "webrtc/rtc_base/checks.h" #include "webrtc/rtc_base/constructormagic.h" +#include "webrtc/rtc_base/sequenced_task_checker.h" +#include "webrtc/rtc_base/task_queue.h" #include "webrtc/test/testsupport/frame_reader.h" #include "webrtc/test/testsupport/frame_writer.h" @@ -196,20 +198,59 @@ class VideoProcessor { public: explicit VideoProcessorEncodeCompleteCallback( VideoProcessor* video_processor) - : video_processor_(video_processor) {} + : video_processor_(video_processor), + task_queue_(rtc::TaskQueue::Current()) {} + Result OnEncodedImage( const webrtc::EncodedImage& encoded_image, const webrtc::CodecSpecificInfo* codec_specific_info, const webrtc::RTPFragmentationHeader* fragmentation) override { - // Forward to parent class. RTC_CHECK(codec_specific_info); + + if (task_queue_ && !task_queue_->IsCurrent()) { + task_queue_->PostTask(std::unique_ptr( + new EncodeCallbackTask(video_processor_, encoded_image, + codec_specific_info, fragmentation))); + return Result(Result::OK, 0); + } + video_processor_->FrameEncoded(codec_specific_info->codecType, encoded_image, fragmentation); return Result(Result::OK, 0); } private: + class EncodeCallbackTask : public rtc::QueuedTask { + public: + EncodeCallbackTask(VideoProcessor* video_processor, + const webrtc::EncodedImage& encoded_image, + const webrtc::CodecSpecificInfo* codec_specific_info, + const webrtc::RTPFragmentationHeader* fragmentation) + : video_processor_(video_processor), + buffer_(encoded_image._buffer, encoded_image._length), + encoded_image_(encoded_image), + codec_specific_info_(*codec_specific_info) { + encoded_image_._buffer = buffer_.data(); + RTC_CHECK(fragmentation); + fragmentation_.CopyFrom(*fragmentation); + } + + bool Run() override { + video_processor_->FrameEncoded(codec_specific_info_.codecType, + encoded_image_, &fragmentation_); + return true; + } + + private: + VideoProcessor* const video_processor_; + rtc::Buffer buffer_; + webrtc::EncodedImage encoded_image_; + const webrtc::CodecSpecificInfo codec_specific_info_; + webrtc::RTPFragmentationHeader fragmentation_; + }; + VideoProcessor* const video_processor_; + rtc::TaskQueue* const task_queue_; }; class VideoProcessorDecodeCompleteCallback @@ -217,16 +258,25 @@ class VideoProcessor { public: explicit VideoProcessorDecodeCompleteCallback( VideoProcessor* video_processor) - : video_processor_(video_processor) {} + : video_processor_(video_processor), + task_queue_(rtc::TaskQueue::Current()) {} + int32_t Decoded(webrtc::VideoFrame& image) override { - // Forward to parent class. + if (task_queue_ && !task_queue_->IsCurrent()) { + task_queue_->PostTask( + [this, image]() { video_processor_->FrameDecoded(image); }); + return 0; + } + video_processor_->FrameDecoded(image); return 0; } + int32_t Decoded(webrtc::VideoFrame& image, int64_t decode_time_ms) override { return Decoded(image); } + void Decoded(webrtc::VideoFrame& image, rtc::Optional decode_time_ms, rtc::Optional qp) override { @@ -235,6 +285,7 @@ class VideoProcessor { private: VideoProcessor* const video_processor_; + rtc::TaskQueue* const task_queue_; }; // Invoked by the callback adapter when a frame has completed encoding. @@ -251,7 +302,9 @@ class VideoProcessor { uint32_t FrameNumberToTimestamp(int frame_number) const; int TimestampToFrameNumber(uint32_t timestamp) const; - TestConfig config_; + bool initialized_ GUARDED_BY(sequence_checker_); + + TestConfig config_ GUARDED_BY(sequence_checker_); webrtc::VideoEncoder* const encoder_; webrtc::VideoDecoder* const decoder_; @@ -276,26 +329,26 @@ class VideoProcessor { IvfFileWriter* const encoded_frame_writer_; FrameWriter* const decoded_frame_writer_; - bool initialized_; - // Frame metadata for all frames that have been added through a call to // ProcessFrames(). We need to store this metadata over the course of the // test run, to support pipelining HW codecs. - std::vector frame_infos_; - int last_encoded_frame_num_; - int last_decoded_frame_num_; + std::vector frame_infos_ GUARDED_BY(sequence_checker_); + int last_encoded_frame_num_ GUARDED_BY(sequence_checker_); + int last_decoded_frame_num_ GUARDED_BY(sequence_checker_); // Keep track of if we have excluded the first key frame from packet loss. - bool first_key_frame_has_been_excluded_; + bool first_key_frame_has_been_excluded_ GUARDED_BY(sequence_checker_); // Keep track of the last successfully decoded frame, since we write that // frame to disk when decoding fails. - rtc::Buffer last_decoded_frame_buffer_; + rtc::Buffer last_decoded_frame_buffer_ GUARDED_BY(sequence_checker_); // Statistics. Stats* stats_; - int num_dropped_frames_; - int num_spatial_resizes_; + int num_dropped_frames_ GUARDED_BY(sequence_checker_); + int num_spatial_resizes_ GUARDED_BY(sequence_checker_); + + rtc::SequencedTaskChecker sequence_checker_; RTC_DISALLOW_COPY_AND_ASSIGN(VideoProcessor); };