diff --git a/webrtc/api/video/video_content_type.h b/webrtc/api/video/video_content_type.h deleted file mode 100644 index 5c468c079d..0000000000 --- a/webrtc/api/video/video_content_type.h +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef WEBRTC_API_VIDEO_VIDEO_CONTENT_TYPE_H_ -#define WEBRTC_API_VIDEO_VIDEO_CONTENT_TYPE_H_ - -#include - -namespace webrtc { - -enum class VideoContentType : uint8_t { - UNSPECIFIED = 0, - SCREENSHARE = 1, - TOTAL_CONTENT_TYPES // Must be the last value in the enum. -}; - -} // namespace webrtc - -#endif // WEBRTC_API_VIDEO_VIDEO_CONTENT_TYPE_H_ diff --git a/webrtc/common_types.cc b/webrtc/common_types.cc index 17bb265674..f5b487f6be 100644 --- a/webrtc/common_types.cc +++ b/webrtc/common_types.cc @@ -31,9 +31,7 @@ RTPHeaderExtension::RTPHeaderExtension() voiceActivity(false), audioLevel(0), hasVideoRotation(false), - videoRotation(kVideoRotation_0), - hasVideoContentType(false), - videoContentType(VideoContentType::UNSPECIFIED) {} + videoRotation(kVideoRotation_0) {} RTPHeader::RTPHeader() : markerBit(false), diff --git a/webrtc/common_types.h b/webrtc/common_types.h index 750420196d..e1a4c77d37 100644 --- a/webrtc/common_types.h +++ b/webrtc/common_types.h @@ -18,7 +18,6 @@ #include #include -#include "webrtc/api/video/video_content_type.h" #include "webrtc/api/video/video_rotation.h" #include "webrtc/base/checks.h" #include "webrtc/base/optional.h" @@ -717,11 +716,6 @@ struct RTPHeaderExtension { bool hasVideoRotation; VideoRotation videoRotation; - // TODO(ilnik): Refactor this and one above to be rtc::Optional() and remove - // a corresponding bool flag. - bool hasVideoContentType; - VideoContentType videoContentType; - PlayoutDelay playout_delay = {-1, -1}; }; diff --git a/webrtc/config.cc b/webrtc/config.cc index ab2f394fbf..e0c490d1ec 100644 --- a/webrtc/config.cc +++ b/webrtc/config.cc @@ -64,10 +64,6 @@ const char* RtpExtension::kTransportSequenceNumberUri = "http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01"; const int RtpExtension::kTransportSequenceNumberDefaultId = 5; -const char* RtpExtension::kVideoContentTypeUri = - "http://www.webrtc.org/experiments/rtp-hdrext/video-content-type"; -const int RtpExtension::kVideoContentTypeDefaultId = 6; - // This extension allows applications to adaptively limit the playout delay // on frames as per the current needs. For example, a gaming application // has very different needs on end-to-end delay compared to a video-conference @@ -89,8 +85,7 @@ bool RtpExtension::IsSupportedForVideo(const std::string& uri) { uri == webrtc::RtpExtension::kAbsSendTimeUri || uri == webrtc::RtpExtension::kVideoRotationUri || uri == webrtc::RtpExtension::kTransportSequenceNumberUri || - uri == webrtc::RtpExtension::kPlayoutDelayUri || - uri == webrtc::RtpExtension::kVideoContentTypeUri; + uri == webrtc::RtpExtension::kPlayoutDelayUri; } VideoStream::VideoStream() diff --git a/webrtc/config.h b/webrtc/config.h index f0039b3a72..f8c9e8b797 100644 --- a/webrtc/config.h +++ b/webrtc/config.h @@ -88,10 +88,6 @@ struct RtpExtension { static const char* kVideoRotationUri; static const int kVideoRotationDefaultId; - // Header extension for video content type. E.g. default or screenshare. - static const char* kVideoContentTypeUri; - static const int kVideoContentTypeDefaultId; - // Header extension for transport sequence number, see url for details: // http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions static const char* kTransportSequenceNumberUri; diff --git a/webrtc/media/engine/webrtcvideoengine2_unittest.cc b/webrtc/media/engine/webrtcvideoengine2_unittest.cc index 37e227f931..f422ffcb6d 100644 --- a/webrtc/media/engine/webrtcvideoengine2_unittest.cc +++ b/webrtc/media/engine/webrtcvideoengine2_unittest.cc @@ -3852,7 +3852,7 @@ TEST_F(WebRtcVideoChannel2Test, ReceiveDifferentUnsignaledSsrc) { EXPECT_EQ(rtpHeader.ssrc, recv_stream->GetConfig().rtp.remote_ssrc); // Verify that the receive stream sinks to a renderer. webrtc::VideoFrame video_frame2(CreateBlackFrameBuffer(4, 4), 200, 0, - webrtc::kVideoRotation_0); + webrtc::kVideoRotation_0); recv_stream->InjectFrame(video_frame2); EXPECT_EQ(2, renderer.num_rendered_frames()); @@ -3869,7 +3869,7 @@ TEST_F(WebRtcVideoChannel2Test, ReceiveDifferentUnsignaledSsrc) { EXPECT_EQ(rtpHeader.ssrc, recv_stream->GetConfig().rtp.remote_ssrc); // Verify that the receive stream sinks to a renderer. webrtc::VideoFrame video_frame3(CreateBlackFrameBuffer(4, 4), 300, 0, - webrtc::kVideoRotation_0); + webrtc::kVideoRotation_0); recv_stream->InjectFrame(video_frame3); EXPECT_EQ(3, renderer.num_rendered_frames()); #endif diff --git a/webrtc/modules/include/module_common_types.h b/webrtc/modules/include/module_common_types.h index ffa07980a0..a16c9392a2 100644 --- a/webrtc/modules/include/module_common_types.h +++ b/webrtc/modules/include/module_common_types.h @@ -58,8 +58,6 @@ struct RTPVideoHeader { PlayoutDelay playout_delay; - VideoContentType content_type; - union { bool is_first_packet_in_frame; RTC_DEPRECATED bool isFirstPacket; // first packet in frame @@ -89,7 +87,7 @@ class RTPFragmentationHeader { fragmentationOffset(NULL), fragmentationLength(NULL), fragmentationTimeDiff(NULL), - fragmentationPlType(NULL) {} + fragmentationPlType(NULL) {}; ~RTPFragmentationHeader() { delete[] fragmentationOffset; diff --git a/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h b/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h index 56aa9bd777..ddfec4d7cd 100644 --- a/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h +++ b/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h @@ -76,8 +76,7 @@ enum RTPExtensionType { kRtpExtensionVideoRotation, kRtpExtensionTransportSequenceNumber, kRtpExtensionPlayoutDelay, - kRtpExtensionVideoContentType, - kRtpExtensionNumberOfExtensions // Must be the last entity in the enum. + kRtpExtensionNumberOfExtensions, }; enum RTCPAppSubTypes { kAppSubtypeBwe = 0x00 }; diff --git a/webrtc/modules/rtp_rtcp/source/rtp_header_extension.cc b/webrtc/modules/rtp_rtcp/source/rtp_header_extension.cc index 1d3925931a..bbbb1438dc 100644 --- a/webrtc/modules/rtp_rtcp/source/rtp_header_extension.cc +++ b/webrtc/modules/rtp_rtcp/source/rtp_header_extension.cc @@ -39,7 +39,6 @@ constexpr ExtensionInfo kExtensions[] = { CreateExtensionInfo(), CreateExtensionInfo(), CreateExtensionInfo(), - CreateExtensionInfo(), }; // Because of kRtpExtensionNone, NumberOfExtension is 1 bigger than the actual diff --git a/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc b/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc index 8141f02290..1b311e6419 100644 --- a/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc +++ b/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc @@ -215,33 +215,4 @@ bool PlayoutDelayLimits::Write(uint8_t* data, return true; } -// Video Content Type. -// -// E.g. default video or screenshare. -// -// 0 1 -// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 -// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ -// | ID | len=0 | Content type | -// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ -constexpr RTPExtensionType VideoContentTypeExtension::kId; -constexpr uint8_t VideoContentTypeExtension::kValueSizeBytes; -constexpr const char* VideoContentTypeExtension::kUri; - -bool VideoContentTypeExtension::Parse(rtc::ArrayView data, - VideoContentType* content_type) { - if (data.size() == 1 && - data[0] < static_cast(VideoContentType::TOTAL_CONTENT_TYPES)) { - *content_type = static_cast(data[0]); - return true; - } - return false; -} - -bool VideoContentTypeExtension::Write(uint8_t* data, - VideoContentType content_type) { - data[0] = static_cast(content_type); - return true; -} - } // namespace webrtc diff --git a/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h b/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h index 0d30848f96..543688c75e 100644 --- a/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h +++ b/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h @@ -12,7 +12,6 @@ #include -#include "webrtc/api/video/video_content_type.h" #include "webrtc/api/video/video_rotation.h" #include "webrtc/base/array_view.h" #include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h" @@ -99,17 +98,5 @@ class PlayoutDelayLimits { static bool Write(uint8_t* data, const PlayoutDelay& playout_delay); }; -class VideoContentTypeExtension { - public: - static constexpr RTPExtensionType kId = kRtpExtensionVideoContentType; - static constexpr uint8_t kValueSizeBytes = 1; - static constexpr const char* kUri = - "http://www.webrtc.org/experiments/rtp-hdrext/video-content-type"; - - static bool Parse(rtc::ArrayView data, - VideoContentType* content_type); - static bool Write(uint8_t* data, VideoContentType content_type); -}; - } // namespace webrtc #endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_HEADER_EXTENSIONS_H_ diff --git a/webrtc/modules/rtp_rtcp/source/rtp_packet.cc b/webrtc/modules/rtp_rtcp/source/rtp_packet.cc index 2e875289de..7a7c45d383 100644 --- a/webrtc/modules/rtp_rtcp/source/rtp_packet.cc +++ b/webrtc/modules/rtp_rtcp/source/rtp_packet.cc @@ -169,9 +169,6 @@ void Packet::GetHeader(RTPHeader* header) const { &header->extension.voiceActivity, &header->extension.audioLevel); header->extension.hasVideoRotation = GetExtension(&header->extension.videoRotation); - header->extension.hasVideoContentType = - GetExtension( - &header->extension.videoContentType); } size_t Packet::headers_size() const { diff --git a/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc b/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc index debe836d8d..d6c5e5c29b 100644 --- a/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc +++ b/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc @@ -90,7 +90,6 @@ int32_t RTPReceiverVideo::ParseRtpPacket(WebRtcRTPHeader* rtp_header, rtp_header->frameType = parsed_payload.frame_type; rtp_header->type = parsed_payload.type; rtp_header->type.Video.rotation = kVideoRotation_0; - rtp_header->type.Video.content_type = VideoContentType::UNSPECIFIED; // Retrieve the video rotation information. if (rtp_header->header.extension.hasVideoRotation) { @@ -98,11 +97,6 @@ int32_t RTPReceiverVideo::ParseRtpPacket(WebRtcRTPHeader* rtp_header, rtp_header->header.extension.videoRotation; } - if (rtp_header->header.extension.hasVideoContentType) { - rtp_header->type.Video.content_type = - rtp_header->header.extension.videoContentType; - } - rtp_header->type.Video.playout_delay = rtp_header->header.extension.playout_delay; diff --git a/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc b/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc index d6c54d033c..f77e59c16e 100644 --- a/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc +++ b/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc @@ -40,8 +40,6 @@ RTPExtensionType StringToRtpExtensionType(const std::string& extension) { return kRtpExtensionTransportSequenceNumber; if (extension == RtpExtension::kPlayoutDelayUri) return kRtpExtensionPlayoutDelay; - if (extension == RtpExtension::kVideoContentTypeUri) - return kRtpExtensionVideoContentType; RTC_NOTREACHED() << "Looking up unsupported RTP extension."; return kRtpExtensionNone; } diff --git a/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc index 75e2dc17d1..66ee51f7cf 100644 --- a/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc +++ b/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc @@ -203,7 +203,6 @@ class RtpRtcpImplTest : public ::testing::Test { rtp_video_header.width = codec_.width; rtp_video_header.height = codec_.height; rtp_video_header.rotation = kVideoRotation_0; - rtp_video_header.content_type = VideoContentType::UNSPECIFIED; rtp_video_header.playout_delay = {-1, -1}; rtp_video_header.is_first_packet_in_frame = true; rtp_video_header.simulcastIdx = 0; diff --git a/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc b/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc index b89aefef53..849ed78ead 100644 --- a/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc +++ b/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc @@ -324,12 +324,6 @@ bool RTPSenderVideo::SendVideo(RtpVideoCodecTypes video_type, current_rotation != kVideoRotation_0) rtp_header->SetExtension(current_rotation); last_rotation_ = current_rotation; - // Report content type only for key frames. - if (frame_type == kVideoFrameKey && - video_header->content_type != VideoContentType::UNSPECIFIED) { - rtp_header->SetExtension( - video_header->content_type); - } } // FEC settings. diff --git a/webrtc/modules/rtp_rtcp/source/rtp_utility.cc b/webrtc/modules/rtp_rtcp/source/rtp_utility.cc index 1c12c89c92..def431f170 100644 --- a/webrtc/modules/rtp_rtcp/source/rtp_utility.cc +++ b/webrtc/modules/rtp_rtcp/source/rtp_utility.cc @@ -254,10 +254,6 @@ bool RtpHeaderParser::Parse(RTPHeader* header, header->extension.playout_delay.min_ms = -1; header->extension.playout_delay.max_ms = -1; - // May not be present in packet. - header->extension.hasVideoContentType = false; - header->extension.videoContentType = VideoContentType::UNSPECIFIED; - if (X) { /* RTP header extension, RFC 3550. 0 1 2 3 @@ -450,25 +446,6 @@ void RtpHeaderParser::ParseOneByteExtensionHeader( max_playout_delay * PlayoutDelayLimits::kGranularityMs; break; } - case kRtpExtensionVideoContentType: { - if (len != 0) { - LOG(LS_WARNING) << "Incorrect video content type len: " << len; - return; - } - // 0 1 - // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 - // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ - // | ID | len=0 | Content type | - // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ - - if (ptr[0] < - static_cast(VideoContentType::TOTAL_CONTENT_TYPES)) { - header->extension.hasVideoContentType = true; - header->extension.videoContentType = - static_cast(ptr[0]); - } - break; - } case kRtpExtensionNone: case kRtpExtensionNumberOfExtensions: { RTC_NOTREACHED() << "Invalid extension type: " << type; diff --git a/webrtc/modules/video_capture/video_capture_impl.cc b/webrtc/modules/video_capture/video_capture_impl.cc index 0afa9aabee..a0908f027b 100644 --- a/webrtc/modules/video_capture/video_capture_impl.cc +++ b/webrtc/modules/video_capture/video_capture_impl.cc @@ -32,8 +32,9 @@ rtc::scoped_refptr VideoCaptureImpl::Create( return implementation; } -const char* VideoCaptureImpl::CurrentDeviceName() const { - return _deviceUniqueId; +const char* VideoCaptureImpl::CurrentDeviceName() const +{ + return _deviceUniqueId; } // static @@ -135,13 +136,14 @@ int32_t VideoCaptureImpl::IncomingFrame( // Not encoded, convert to I420. const VideoType commonVideoType = - RawVideoTypeToCommonVideoVideoType(frameInfo.rawType); + RawVideoTypeToCommonVideoVideoType(frameInfo.rawType); if (frameInfo.rawType != kVideoMJPEG && - CalcBufferSize(commonVideoType, width, abs(height)) != - videoFrameLength) { - LOG(LS_ERROR) << "Wrong incoming frame length."; - return -1; + CalcBufferSize(commonVideoType, width, + abs(height)) != videoFrameLength) + { + LOG(LS_ERROR) << "Wrong incoming frame length."; + return -1; } int stride_y = width; @@ -172,14 +174,16 @@ int32_t VideoCaptureImpl::IncomingFrame( commonVideoType, videoFrame, 0, 0, // No cropping width, height, videoFrameLength, apply_rotation ? _rotateFrame : kVideoRotation_0, buffer.get()); - if (conversionResult < 0) { + if (conversionResult < 0) + { LOG(LS_ERROR) << "Failed to convert capture frame from type " << frameInfo.rawType << "to I420."; - return -1; + return -1; } - VideoFrame captureFrame(buffer, 0, rtc::TimeMillis(), - !apply_rotation ? _rotateFrame : kVideoRotation_0); + VideoFrame captureFrame( + buffer, 0, rtc::TimeMillis(), + !apply_rotation ? _rotateFrame : kVideoRotation_0); captureFrame.set_ntp_time_ms(captureTime); DeliverCapturedFrame(captureFrame); @@ -201,40 +205,52 @@ bool VideoCaptureImpl::SetApplyRotation(bool enable) { return true; } -void VideoCaptureImpl::UpdateFrameCount() { - if (_incomingFrameTimesNanos[0] / rtc::kNumNanosecsPerMicrosec == 0) { - // first no shift - } else { - // shift - for (int i = (kFrameRateCountHistorySize - 2); i >= 0; --i) { - _incomingFrameTimesNanos[i + 1] = _incomingFrameTimesNanos[i]; +void VideoCaptureImpl::UpdateFrameCount() +{ + if (_incomingFrameTimesNanos[0] / rtc::kNumNanosecsPerMicrosec == 0) + { + // first no shift } - } - _incomingFrameTimesNanos[0] = rtc::TimeNanos(); + else + { + // shift + for (int i = (kFrameRateCountHistorySize - 2); i >= 0; i--) + { + _incomingFrameTimesNanos[i + 1] = _incomingFrameTimesNanos[i]; + } + } + _incomingFrameTimesNanos[0] = rtc::TimeNanos(); } -uint32_t VideoCaptureImpl::CalculateFrameRate(int64_t now_ns) { - int32_t num = 0; - int32_t nrOfFrames = 0; - for (num = 1; num < (kFrameRateCountHistorySize - 1); ++num) { - if (_incomingFrameTimesNanos[num] <= 0 || - (now_ns - _incomingFrameTimesNanos[num]) / - rtc::kNumNanosecsPerMillisec > - kFrameRateHistoryWindowMs) { // don't use data older than 2sec - break; - } else { - nrOfFrames++; +uint32_t VideoCaptureImpl::CalculateFrameRate(int64_t now_ns) +{ + int32_t num = 0; + int32_t nrOfFrames = 0; + for (num = 1; num < (kFrameRateCountHistorySize - 1); num++) + { + if (_incomingFrameTimesNanos[num] <= 0 || + (now_ns - _incomingFrameTimesNanos[num]) / + rtc::kNumNanosecsPerMillisec > + kFrameRateHistoryWindowMs) // don't use data older than 2sec + { + break; + } + else + { + nrOfFrames++; + } } - } - if (num > 1) { - int64_t diff = (now_ns - _incomingFrameTimesNanos[num - 1]) / - rtc::kNumNanosecsPerMillisec; - if (diff > 0) { - return uint32_t((nrOfFrames * 1000.0f / diff) + 0.5f); + if (num > 1) + { + int64_t diff = (now_ns - _incomingFrameTimesNanos[num - 1]) / + rtc::kNumNanosecsPerMillisec; + if (diff > 0) + { + return uint32_t((nrOfFrames * 1000.0f / diff) + 0.5f); + } } - } - return nrOfFrames; + return nrOfFrames; } } // namespace videocapturemodule } // namespace webrtc diff --git a/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc b/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc index 315d347c69..84bfafb8b0 100644 --- a/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc +++ b/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc @@ -367,9 +367,6 @@ int32_t H264EncoderImpl::Encode(const VideoFrame& input_frame, encoded_image_.ntp_time_ms_ = input_frame.ntp_time_ms(); encoded_image_.capture_time_ms_ = input_frame.render_time_ms(); encoded_image_.rotation_ = input_frame.rotation(); - encoded_image_.content_type_ = (mode_ == kScreensharing) - ? VideoContentType::SCREENSHARE - : VideoContentType::UNSPECIFIED; encoded_image_._frameType = ConvertToVideoFrameType(info.eFrameType); // Split encoded image up into fragments. This also updates |encoded_image_|. diff --git a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc index 66db72c292..41fd7ff384 100644 --- a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc +++ b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc @@ -878,9 +878,6 @@ int VP8EncoderImpl::GetEncodedPartitions(const VideoFrame& input_image) { encoded_images_[encoder_idx].capture_time_ms_ = input_image.render_time_ms(); encoded_images_[encoder_idx].rotation_ = input_image.rotation(); - encoded_images_[encoder_idx].content_type_ = - (codec_.mode == kScreensharing) ? VideoContentType::SCREENSHARE - : VideoContentType::UNSPECIFIED; int qp = -1; vpx_codec_control(&encoders_[encoder_idx], VP8E_GET_LAST_QUANTIZER_64, &qp); diff --git a/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc b/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc index 4b0f99e324..4d7df867a2 100644 --- a/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc +++ b/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc @@ -706,9 +706,6 @@ int VP9EncoderImpl::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) { encoded_image_._timeStamp = input_image_->timestamp(); encoded_image_.capture_time_ms_ = input_image_->render_time_ms(); encoded_image_.rotation_ = input_image_->rotation(); - encoded_image_.content_type_ = (codec_.mode == kScreensharing) - ? VideoContentType::SCREENSHARE - : VideoContentType::UNSPECIFIED; encoded_image_._encodedHeight = raw_->d_h; encoded_image_._encodedWidth = raw_->d_w; int qp = -1; diff --git a/webrtc/modules/video_coding/encoded_frame.cc b/webrtc/modules/video_coding/encoded_frame.cc index 1807fa5c4c..fb12c5bc68 100644 --- a/webrtc/modules/video_coding/encoded_frame.cc +++ b/webrtc/modules/video_coding/encoded_frame.cc @@ -87,7 +87,6 @@ void VCMEncodedFrame::Reset() { _codecSpecificInfo.codecType = kVideoCodecUnknown; _codec = kVideoCodecUnknown; rotation_ = kVideoRotation_0; - content_type_ = VideoContentType::UNSPECIFIED; _rotation_set = false; } diff --git a/webrtc/modules/video_coding/encoded_frame.h b/webrtc/modules/video_coding/encoded_frame.h index 96f9d00d06..840cd2056d 100644 --- a/webrtc/modules/video_coding/encoded_frame.h +++ b/webrtc/modules/video_coding/encoded_frame.h @@ -77,12 +77,8 @@ class VCMEncodedFrame : protected EncodedImage { */ VideoRotation rotation() const { return rotation_; } /** - * Get video content type - */ - VideoContentType contentType() const { return content_type_; } - /** - * True if this frame is complete, false otherwise - */ + * True if this frame is complete, false otherwise + */ bool Complete() const { return _completeFrame; } /** * True if there's a frame missing before this frame diff --git a/webrtc/modules/video_coding/frame_buffer.cc b/webrtc/modules/video_coding/frame_buffer.cc index 5ea12dc0e6..1439a17ed5 100644 --- a/webrtc/modules/video_coding/frame_buffer.cc +++ b/webrtc/modules/video_coding/frame_buffer.cc @@ -163,7 +163,6 @@ VCMFrameBufferEnum VCMFrameBuffer::InsertPacket( RTC_DCHECK(!_rotation_set); rotation_ = packet.video_header.rotation; _rotation_set = true; - content_type_ = packet.video_header.content_type; } if (packet.is_first_packet_in_frame) { diff --git a/webrtc/modules/video_coding/frame_object.cc b/webrtc/modules/video_coding/frame_object.cc index 9e5ce09770..70b0a02868 100644 --- a/webrtc/modules/video_coding/frame_object.cc +++ b/webrtc/modules/video_coding/frame_object.cc @@ -79,7 +79,6 @@ RtpFrameObject::RtpFrameObject(PacketBuffer* packet_buffer, // (HEVC)). rotation_ = last_packet->video_header.rotation; _rotation_set = true; - content_type_ = last_packet->video_header.content_type; } RtpFrameObject::~RtpFrameObject() { diff --git a/webrtc/modules/video_coding/generic_decoder.cc b/webrtc/modules/video_coding/generic_decoder.cc index f5d9cfe073..2121ab6306 100644 --- a/webrtc/modules/video_coding/generic_decoder.cc +++ b/webrtc/modules/video_coding/generic_decoder.cc @@ -87,7 +87,7 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage, decodedImage.set_timestamp_us( frameInfo->renderTimeMs * rtc::kNumMicrosecsPerMillisec); decodedImage.set_rotation(frameInfo->rotation); - _receiveCallback->FrameToRender(decodedImage, qp, frameInfo->content_type); + _receiveCallback->FrameToRender(decodedImage, qp); } int32_t VCMDecodedFrameCallback::ReceivedDecodedReferenceFrame( @@ -131,8 +131,7 @@ VCMGenericDecoder::VCMGenericDecoder(VideoDecoder* decoder, bool isExternal) _decoder(decoder), _codecType(kVideoCodecUnknown), _isExternal(isExternal), - _keyFrameDecoded(false), - _last_keyframe_content_type(VideoContentType::UNSPECIFIED) {} + _keyFrameDecoded(false) {} VCMGenericDecoder::~VCMGenericDecoder() {} @@ -150,15 +149,6 @@ int32_t VCMGenericDecoder::Decode(const VCMEncodedFrame& frame, int64_t nowMs) { _frameInfos[_nextFrameInfoIdx].decodeStartTimeMs = nowMs; _frameInfos[_nextFrameInfoIdx].renderTimeMs = frame.RenderTimeMs(); _frameInfos[_nextFrameInfoIdx].rotation = frame.rotation(); - // Set correctly only for key frames. Thus, use latest key frame - // content type. If the corresponding key frame was lost, decode will fail - // and content type will be ignored. - if (frame.FrameType() == kVideoFrameKey) { - _frameInfos[_nextFrameInfoIdx].content_type = frame.contentType(); - _last_keyframe_content_type = frame.contentType(); - } else { - _frameInfos[_nextFrameInfoIdx].content_type = _last_keyframe_content_type; - } _callback->Map(frame.TimeStamp(), &_frameInfos[_nextFrameInfoIdx]); _nextFrameInfoIdx = (_nextFrameInfoIdx + 1) % kDecoderFrameMemoryLength; diff --git a/webrtc/modules/video_coding/generic_decoder.h b/webrtc/modules/video_coding/generic_decoder.h index 71b8d81c74..891ec893ff 100644 --- a/webrtc/modules/video_coding/generic_decoder.h +++ b/webrtc/modules/video_coding/generic_decoder.h @@ -30,7 +30,6 @@ struct VCMFrameInformation { int64_t decodeStartTimeMs; void* userData; VideoRotation rotation; - VideoContentType content_type; }; class VCMDecodedFrameCallback : public DecodedImageCallback { @@ -110,7 +109,6 @@ class VCMGenericDecoder { VideoCodecType _codecType; bool _isExternal; bool _keyFrameDecoded; - VideoContentType _last_keyframe_content_type; }; } // namespace webrtc diff --git a/webrtc/modules/video_coding/include/mock/mock_vcm_callbacks.h b/webrtc/modules/video_coding/include/mock/mock_vcm_callbacks.h index 21b154f8d1..8a53c1d31a 100644 --- a/webrtc/modules/video_coding/include/mock/mock_vcm_callbacks.h +++ b/webrtc/modules/video_coding/include/mock/mock_vcm_callbacks.h @@ -33,8 +33,7 @@ class MockVCMReceiveCallback : public VCMReceiveCallback { MockVCMReceiveCallback() {} virtual ~MockVCMReceiveCallback() {} - MOCK_METHOD3(FrameToRender, - int32_t(VideoFrame&, rtc::Optional, VideoContentType)); + MOCK_METHOD2(FrameToRender, int32_t(VideoFrame&, rtc::Optional)); MOCK_METHOD1(ReceivedDecodedReferenceFrame, int32_t(const uint64_t)); MOCK_METHOD1(OnIncomingPayloadType, void(int)); MOCK_METHOD1(OnDecoderImplementationName, void(const char*)); diff --git a/webrtc/modules/video_coding/include/video_coding_defines.h b/webrtc/modules/video_coding/include/video_coding_defines.h index 3ae9981d9c..4ed80a6f81 100644 --- a/webrtc/modules/video_coding/include/video_coding_defines.h +++ b/webrtc/modules/video_coding/include/video_coding_defines.h @@ -62,8 +62,7 @@ struct VCMFrameCount { class VCMReceiveCallback { public: virtual int32_t FrameToRender(VideoFrame& videoFrame, // NOLINT - rtc::Optional qp, - VideoContentType content_type) = 0; + rtc::Optional qp) = 0; virtual int32_t ReceivedDecodedReferenceFrame(const uint64_t pictureId) { return -1; } diff --git a/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc b/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc index 6425015efe..44acccf630 100644 --- a/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc +++ b/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc @@ -1042,10 +1042,6 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { image->_timeStamp = output_timestamp_; image->capture_time_ms_ = output_render_time_ms_; image->rotation_ = output_rotation_; - image->content_type_ = - (codec_mode_ == webrtc::VideoCodecMode::kScreensharing) - ? webrtc::VideoContentType::SCREENSHARE - : webrtc::VideoContentType::UNSPECIFIED; image->_frameType = (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); image->_completeFrame = true; diff --git a/webrtc/sdk/objc/Framework/Classes/h264_video_toolbox_encoder.h b/webrtc/sdk/objc/Framework/Classes/h264_video_toolbox_encoder.h index 09aa7db9ae..5de9a242df 100644 --- a/webrtc/sdk/objc/Framework/Classes/h264_video_toolbox_encoder.h +++ b/webrtc/sdk/objc/Framework/Classes/h264_video_toolbox_encoder.h @@ -85,7 +85,6 @@ class H264VideoToolboxEncoder : public H264Encoder { uint32_t encoder_bitrate_bps_; int32_t width_; int32_t height_; - VideoCodecMode mode_; const CFStringRef profile_; H264BitstreamParser h264_bitstream_parser_; diff --git a/webrtc/sdk/objc/Framework/Classes/h264_video_toolbox_encoder.mm b/webrtc/sdk/objc/Framework/Classes/h264_video_toolbox_encoder.mm index bc46b35364..e50b225f2d 100644 --- a/webrtc/sdk/objc/Framework/Classes/h264_video_toolbox_encoder.mm +++ b/webrtc/sdk/objc/Framework/Classes/h264_video_toolbox_encoder.mm @@ -364,7 +364,6 @@ int H264VideoToolboxEncoder::InitEncode(const VideoCodec* codec_settings, width_ = codec_settings->width; height_ = codec_settings->height; - mode_ = codec_settings->mode; // We can only set average bitrate on the HW encoder. target_bitrate_bps_ = codec_settings->startBitrate; bitrate_adjuster_.SetTargetBitrateBps(target_bitrate_bps_); @@ -723,9 +722,6 @@ void H264VideoToolboxEncoder::OnEncodedFrame( frame._timeStamp = timestamp; frame.rotation_ = rotation; - frame.content_type_ = - (mode_ == kScreensharing) ? VideoContentType::SCREENSHARE : VideoContentType::UNSPECIFIED; - h264_bitstream_parser_.ParseBitstream(buffer->data(), buffer->size()); h264_bitstream_parser_.GetLastSliceQp(&frame.qp_); diff --git a/webrtc/test/call_test.cc b/webrtc/test/call_test.cc index 6ec3fda86d..5c0b42ca46 100644 --- a/webrtc/test/call_test.cc +++ b/webrtc/test/call_test.cc @@ -208,8 +208,6 @@ void CallTest::CreateSendConfig(size_t num_video_streams, video_send_config_.rtp.extensions.push_back( RtpExtension(RtpExtension::kTransportSequenceNumberUri, kTransportSequenceNumberExtensionId)); - video_send_config_.rtp.extensions.push_back(RtpExtension( - RtpExtension::kVideoContentTypeUri, kVideoContentTypeExtensionId)); FillEncoderConfiguration(num_video_streams, &video_encoder_config_); for (size_t i = 0; i < num_video_streams; ++i) diff --git a/webrtc/test/constants.cc b/webrtc/test/constants.cc index a789cc0df6..43f9adc334 100644 --- a/webrtc/test/constants.cc +++ b/webrtc/test/constants.cc @@ -17,7 +17,5 @@ const int kTOffsetExtensionId = 6; const int kAbsSendTimeExtensionId = 7; const int kTransportSequenceNumberExtensionId = 8; const int kVideoRotationExtensionId = 9; -const int kVideoContentTypeExtensionId = 10; - } // namespace test } // namespace webrtc diff --git a/webrtc/test/constants.h b/webrtc/test/constants.h index d0f73d0fa9..1b5b0cb105 100644 --- a/webrtc/test/constants.h +++ b/webrtc/test/constants.h @@ -15,6 +15,5 @@ extern const int kTOffsetExtensionId; extern const int kAbsSendTimeExtensionId; extern const int kTransportSequenceNumberExtensionId; extern const int kVideoRotationExtensionId; -extern const int kVideoContentTypeExtensionId; } // namespace test } // namespace webrtc diff --git a/webrtc/test/fake_encoder.cc b/webrtc/test/fake_encoder.cc index fce12c61a8..1db93eb40e 100644 --- a/webrtc/test/fake_encoder.cc +++ b/webrtc/test/fake_encoder.cc @@ -61,7 +61,6 @@ int32_t FakeEncoder::Encode(const VideoFrame& input_image, int max_target_bitrate_kbps; int64_t last_encode_time_ms; size_t num_encoded_bytes; - VideoCodecMode mode; { rtc::CritScope cs(&crit_sect_); max_framerate = config_.maxFramerate; @@ -74,7 +73,6 @@ int32_t FakeEncoder::Encode(const VideoFrame& input_image, max_target_bitrate_kbps = max_target_bitrate_kbps_; last_encode_time_ms = last_encode_time_ms_; num_encoded_bytes = sizeof(encoded_buffer_); - mode = config_.mode; } int64_t time_now_ms = clock_->TimeInMilliseconds(); @@ -144,9 +142,6 @@ int32_t FakeEncoder::Encode(const VideoFrame& input_image, encoded._encodedWidth = simulcast_streams[i].width; encoded._encodedHeight = simulcast_streams[i].height; encoded.rotation_ = input_image.rotation(); - encoded.content_type_ = (mode == kScreensharing) - ? VideoContentType::SCREENSHARE - : VideoContentType::UNSPECIFIED; specifics.codec_name = ImplementationName(); RTC_DCHECK(callback); if (callback->OnEncodedImage(encoded, &specifics, nullptr).error != diff --git a/webrtc/test/fuzzers/rtp_packet_fuzzer.cc b/webrtc/test/fuzzers/rtp_packet_fuzzer.cc index 7cf65cf655..613f125e33 100644 --- a/webrtc/test/fuzzers/rtp_packet_fuzzer.cc +++ b/webrtc/test/fuzzers/rtp_packet_fuzzer.cc @@ -85,10 +85,6 @@ void FuzzOneInput(const uint8_t* data, size_t size) { PlayoutDelay playout; packet.GetExtension(&playout); break; - case kRtpExtensionVideoContentType: - VideoContentType content_type; - packet.GetExtension(&content_type); - break; } } } diff --git a/webrtc/video/end_to_end_tests.cc b/webrtc/video/end_to_end_tests.cc index f171c5b388..f31a68e17a 100644 --- a/webrtc/video/end_to_end_tests.cc +++ b/webrtc/video/end_to_end_tests.cc @@ -2652,8 +2652,7 @@ void EndToEndTest::VerifyHistogramStats(bool use_rtx, EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.CurrentDelayInMs")); EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.OnewayDelayInMs")); - EXPECT_EQ(1, metrics::NumSamples(video_prefix + "EndToEndDelayInMs")); - EXPECT_EQ(1, metrics::NumSamples(video_prefix + "EndToEndDelayMaxInMs")); + EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.EndToEndDelayInMs")); EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.RenderSqrtPixelsPerSecond")); EXPECT_EQ(1, metrics::NumSamples(video_prefix + "EncodeTimeInMs")); @@ -2693,118 +2692,6 @@ void EndToEndTest::VerifyHistogramStats(bool use_rtx, metrics::NumSamples("WebRTC.Video.ReceivedFecPacketsInPercent")); } -TEST_F(EndToEndTest, ContentTypeSwitches) { - class StatsObserver : public test::BaseTest, - public rtc::VideoSinkInterface { - public: - StatsObserver() : BaseTest(kLongTimeoutMs), num_frames_received_(0) {} - - bool ShouldCreateReceivers() const override { return true; } - - void OnFrame(const VideoFrame& video_frame) override { - // The RTT is needed to estimate |ntp_time_ms| which is used by - // end-to-end delay stats. Therefore, start counting received frames once - // |ntp_time_ms| is valid. - if (video_frame.ntp_time_ms() > 0 && - Clock::GetRealTimeClock()->CurrentNtpInMilliseconds() >= - video_frame.ntp_time_ms()) { - rtc::CritScope lock(&crit_); - ++num_frames_received_; - } - } - - Action OnSendRtp(const uint8_t* packet, size_t length) override { - if (MinNumberOfFramesReceived()) - observation_complete_.Set(); - return SEND_PACKET; - } - - bool MinNumberOfFramesReceived() const { - const int kMinRequiredHistogramSamples = 200; - rtc::CritScope lock(&crit_); - return num_frames_received_ > kMinRequiredHistogramSamples; - } - - // May be called several times. - void PerformTest() override { - EXPECT_TRUE(Wait()) << "Timed out waiting for enough packets."; - // Reset frame counter so next PerformTest() call will do something. - { - rtc::CritScope lock(&crit_); - num_frames_received_ = 0; - } - } - - rtc::CriticalSection crit_; - int num_frames_received_ GUARDED_BY(&crit_); - } test; - - metrics::Reset(); - - Call::Config send_config(test.GetSenderCallConfig()); - CreateSenderCall(send_config); - Call::Config recv_config(test.GetReceiverCallConfig()); - CreateReceiverCall(recv_config); - receive_transport_.reset(test.CreateReceiveTransport()); - send_transport_.reset(test.CreateSendTransport(sender_call_.get())); - send_transport_->SetReceiver(receiver_call_->Receiver()); - receive_transport_->SetReceiver(sender_call_->Receiver()); - receiver_call_->SignalChannelNetworkState(MediaType::VIDEO, kNetworkUp); - CreateSendConfig(1, 0, 0, send_transport_.get()); - CreateMatchingReceiveConfigs(receive_transport_.get()); - - // Modify send and receive configs. - video_send_config_.rtp.nack.rtp_history_ms = kNackRtpHistoryMs; - video_receive_configs_[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs; - video_receive_configs_[0].renderer = &test; - // RTT needed for RemoteNtpTimeEstimator for the receive stream. - video_receive_configs_[0].rtp.rtcp_xr.receiver_reference_time_report = true; - // Start with realtime video. - video_encoder_config_.content_type = - VideoEncoderConfig::ContentType::kRealtimeVideo; - // Second encoder config for the second part of the test uses screenshare - VideoEncoderConfig encoder_config_with_screenshare_ = - video_encoder_config_.Copy(); - encoder_config_with_screenshare_.content_type = - VideoEncoderConfig::ContentType::kScreen; - - CreateVideoStreams(); - CreateFrameGeneratorCapturer(kDefaultFramerate, kDefaultWidth, - kDefaultHeight); - Start(); - - test.PerformTest(); - - // Replace old send stream. - sender_call_->DestroyVideoSendStream(video_send_stream_); - video_send_stream_ = sender_call_->CreateVideoSendStream( - video_send_config_.Copy(), encoder_config_with_screenshare_.Copy()); - video_send_stream_->SetSource( - frame_generator_capturer_.get(), - VideoSendStream::DegradationPreference::kBalanced); - video_send_stream_->Start(); - - // Continue to run test but now with screenshare. - test.PerformTest(); - - send_transport_->StopSending(); - receive_transport_->StopSending(); - Stop(); - DestroyStreams(); - DestroyCalls(); - // Delete the call for Call stats to be reported. - sender_call_.reset(); - receiver_call_.reset(); - - // Verify that stats have been updated for both screenshare and video. - EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.EndToEndDelayInMs")); - EXPECT_EQ(1, - metrics::NumSamples("WebRTC.Video.Screenshare.EndToEndDelayInMs")); - EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.EndToEndDelayMaxInMs")); - EXPECT_EQ( - 1, metrics::NumSamples("WebRTC.Video.Screenshare.EndToEndDelayMaxInMs")); -} - TEST_F(EndToEndTest, VerifyHistogramStatsWithRtx) { const bool kEnabledRtx = true; const bool kEnabledRed = false; diff --git a/webrtc/video/payload_router.cc b/webrtc/video/payload_router.cc index 52e9d46ebb..f2f430904e 100644 --- a/webrtc/video/payload_router.cc +++ b/webrtc/video/payload_router.cc @@ -129,7 +129,6 @@ EncodedImageCallback::Result PayloadRouter::OnEncodedImage( if (codec_specific_info) CopyCodecSpecific(codec_specific_info, &rtp_video_header); rtp_video_header.rotation = encoded_image.rotation_; - rtp_video_header.content_type = encoded_image.content_type_; rtp_video_header.playout_delay = encoded_image.playout_delay_; int stream_index = rtp_video_header.simulcastIdx; diff --git a/webrtc/video/receive_statistics_proxy.cc b/webrtc/video/receive_statistics_proxy.cc index e40a7efd16..2ed2faeb37 100644 --- a/webrtc/video/receive_statistics_proxy.cc +++ b/webrtc/video/receive_statistics_proxy.cc @@ -74,12 +74,9 @@ ReceiveStatisticsProxy::ReceiveStatisticsProxy( render_fps_tracker_(100, 10u), render_pixel_tracker_(100, 10u), total_byte_tracker_(100, 10u), // bucket_interval_ms, bucket_count - e2e_delay_max_ms_video_(-1), - e2e_delay_max_ms_screenshare_(-1), freq_offset_counter_(clock, nullptr, kFreqOffsetProcessIntervalMs), first_report_block_time_ms_(-1), - avg_rtt_ms_(0), - last_content_type_(VideoContentType::UNSPECIFIED) { + avg_rtt_ms_(0) { stats_.ssrc = config_.rtp.remote_ssrc; // TODO(brandtr): Replace |rtx_stats_| with a single instance of // StreamDataCounters. @@ -172,30 +169,9 @@ void ReceiveStatisticsProxy::UpdateHistograms() { if (delay_ms != -1) RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.OnewayDelayInMs", delay_ms); - int e2e_delay_ms_video = e2e_delay_counter_video_.Avg(kMinRequiredSamples); - if (e2e_delay_ms_video != -1) { - RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.EndToEndDelayInMs", - e2e_delay_ms_video); - } - - int e2e_delay_ms_screenshare = - e2e_delay_counter_screenshare_.Avg(kMinRequiredSamples); - if (e2e_delay_ms_screenshare != -1) { - RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.Screenshare.EndToEndDelayInMs", - e2e_delay_ms_screenshare); - } - - int e2e_delay_max_ms_video = e2e_delay_max_ms_video_; - if (e2e_delay_max_ms_video != -1) { - RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.EndToEndDelayMaxInMs", - e2e_delay_max_ms_video); - } - - int e2e_delay_max_ms_screenshare = e2e_delay_max_ms_screenshare_; - if (e2e_delay_max_ms_screenshare != -1) { - RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.Screenshare.EndToEndDelayMaxInMs", - e2e_delay_max_ms_screenshare); - } + int e2e_delay_ms = e2e_delay_counter_.Avg(kMinRequiredSamples); + if (e2e_delay_ms != -1) + RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.EndToEndDelayInMs", e2e_delay_ms); StreamDataCounters rtp = stats_.rtp_stats; StreamDataCounters rtx; @@ -455,8 +431,7 @@ void ReceiveStatisticsProxy::DataCountersUpdated( total_byte_tracker_.AddSamples(total_bytes - last_total_bytes); } -void ReceiveStatisticsProxy::OnDecodedFrame(rtc::Optional qp, - VideoContentType content_type) { +void ReceiveStatisticsProxy::OnDecodedFrame(rtc::Optional qp) { uint64_t now = clock_->TimeInMilliseconds(); rtc::CritScope lock(&crit_); @@ -476,7 +451,6 @@ void ReceiveStatisticsProxy::OnDecodedFrame(rtc::Optional qp, << "QP sum was already set and no QP was given for a frame."; stats_.qp_sum = rtc::Optional(); } - last_content_type_ = content_type; decode_fps_estimator_.Update(1, now); stats_.decode_frame_rate = decode_fps_estimator_.Rate(now).value_or(0); } @@ -501,16 +475,8 @@ void ReceiveStatisticsProxy::OnRenderedFrame(const VideoFrame& frame) { if (frame.ntp_time_ms() > 0) { int64_t delay_ms = clock_->CurrentNtpInMilliseconds() - frame.ntp_time_ms(); - if (delay_ms >= 0) { - if (last_content_type_ == VideoContentType::SCREENSHARE) { - e2e_delay_max_ms_screenshare_ = - std::max(delay_ms, e2e_delay_max_ms_screenshare_); - e2e_delay_counter_screenshare_.Add(delay_ms); - } else { - e2e_delay_max_ms_video_ = std::max(delay_ms, e2e_delay_max_ms_video_); - e2e_delay_counter_video_.Add(delay_ms); - } - } + if (delay_ms >= 0) + e2e_delay_counter_.Add(delay_ms); } } diff --git a/webrtc/video/receive_statistics_proxy.h b/webrtc/video/receive_statistics_proxy.h index e1d097152e..07e59b4d74 100644 --- a/webrtc/video/receive_statistics_proxy.h +++ b/webrtc/video/receive_statistics_proxy.h @@ -46,7 +46,7 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback, VideoReceiveStream::Stats GetStats() const; - void OnDecodedFrame(rtc::Optional qp, VideoContentType content_type); + void OnDecodedFrame(rtc::Optional qp); void OnSyncOffsetUpdated(int64_t sync_offset_ms, double estimated_freq_khz); void OnRenderedFrame(const VideoFrame& frame); void OnIncomingPayloadType(int payload_type); @@ -140,10 +140,7 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback, SampleCounter target_delay_counter_ GUARDED_BY(crit_); SampleCounter current_delay_counter_ GUARDED_BY(crit_); SampleCounter delay_counter_ GUARDED_BY(crit_); - SampleCounter e2e_delay_counter_video_ GUARDED_BY(crit_); - SampleCounter e2e_delay_counter_screenshare_ GUARDED_BY(crit_); - int64_t e2e_delay_max_ms_video_ GUARDED_BY(crit_); - int64_t e2e_delay_max_ms_screenshare_ GUARDED_BY(crit_); + SampleCounter e2e_delay_counter_ GUARDED_BY(crit_); MaxCounter freq_offset_counter_ GUARDED_BY(crit_); int64_t first_report_block_time_ms_ GUARDED_BY(crit_); ReportBlockStats report_block_stats_ GUARDED_BY(crit_); @@ -151,7 +148,6 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback, std::map rtx_stats_ GUARDED_BY(crit_); int64_t avg_rtt_ms_ GUARDED_BY(crit_); mutable std::map frame_window_ GUARDED_BY(&crit_); - VideoContentType last_content_type_ GUARDED_BY(&crit_); }; } // namespace webrtc diff --git a/webrtc/video/receive_statistics_proxy_unittest.cc b/webrtc/video/receive_statistics_proxy_unittest.cc index 84943e2488..af7ae684d5 100644 --- a/webrtc/video/receive_statistics_proxy_unittest.cc +++ b/webrtc/video/receive_statistics_proxy_unittest.cc @@ -54,8 +54,7 @@ class ReceiveStatisticsProxyTest : public ::testing::Test { TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameIncreasesFramesDecoded) { EXPECT_EQ(0u, statistics_proxy_->GetStats().frames_decoded); for (uint32_t i = 1; i <= 3; ++i) { - statistics_proxy_->OnDecodedFrame(rtc::Optional(), - VideoContentType::UNSPECIFIED); + statistics_proxy_->OnDecodedFrame(rtc::Optional()); EXPECT_EQ(i, statistics_proxy_->GetStats().frames_decoded); } } @@ -63,47 +62,40 @@ TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameIncreasesFramesDecoded) { TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameWithQpResetsFramesDecoded) { EXPECT_EQ(0u, statistics_proxy_->GetStats().frames_decoded); for (uint32_t i = 1; i <= 3; ++i) { - statistics_proxy_->OnDecodedFrame(rtc::Optional(), - VideoContentType::UNSPECIFIED); + statistics_proxy_->OnDecodedFrame(rtc::Optional()); EXPECT_EQ(i, statistics_proxy_->GetStats().frames_decoded); } - statistics_proxy_->OnDecodedFrame(rtc::Optional(1u), - VideoContentType::UNSPECIFIED); + statistics_proxy_->OnDecodedFrame(rtc::Optional(1u)); EXPECT_EQ(1u, statistics_proxy_->GetStats().frames_decoded); } TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameIncreasesQpSum) { EXPECT_EQ(rtc::Optional(), statistics_proxy_->GetStats().qp_sum); - statistics_proxy_->OnDecodedFrame(rtc::Optional(3u), - VideoContentType::UNSPECIFIED); + statistics_proxy_->OnDecodedFrame(rtc::Optional(3u)); EXPECT_EQ(rtc::Optional(3u), statistics_proxy_->GetStats().qp_sum); - statistics_proxy_->OnDecodedFrame(rtc::Optional(127u), - VideoContentType::UNSPECIFIED); + statistics_proxy_->OnDecodedFrame(rtc::Optional(127u)); EXPECT_EQ(rtc::Optional(130u), statistics_proxy_->GetStats().qp_sum); } TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameWithoutQpQpSumWontExist) { EXPECT_EQ(rtc::Optional(), statistics_proxy_->GetStats().qp_sum); - statistics_proxy_->OnDecodedFrame(rtc::Optional(), - VideoContentType::UNSPECIFIED); + statistics_proxy_->OnDecodedFrame(rtc::Optional()); EXPECT_EQ(rtc::Optional(), statistics_proxy_->GetStats().qp_sum); } TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameWithoutQpResetsQpSum) { EXPECT_EQ(rtc::Optional(), statistics_proxy_->GetStats().qp_sum); - statistics_proxy_->OnDecodedFrame(rtc::Optional(3u), - VideoContentType::UNSPECIFIED); + statistics_proxy_->OnDecodedFrame(rtc::Optional(3u)); EXPECT_EQ(rtc::Optional(3u), statistics_proxy_->GetStats().qp_sum); - statistics_proxy_->OnDecodedFrame(rtc::Optional(), - VideoContentType::UNSPECIFIED); + statistics_proxy_->OnDecodedFrame(rtc::Optional()); EXPECT_EQ(rtc::Optional(), statistics_proxy_->GetStats().qp_sum); } TEST_F(ReceiveStatisticsProxyTest, OnRenderedFrameIncreasesFramesRendered) { EXPECT_EQ(0u, statistics_proxy_->GetStats().frames_rendered); - webrtc::VideoFrame frame(webrtc::I420Buffer::Create(1, 1), 0, 0, - webrtc::kVideoRotation_0); + webrtc::VideoFrame frame( + webrtc::I420Buffer::Create(1, 1), 0, 0, webrtc::kVideoRotation_0); for (uint32_t i = 1; i <= 3; ++i) { statistics_proxy_->OnRenderedFrame(frame); EXPECT_EQ(i, statistics_proxy_->GetStats().frames_rendered); diff --git a/webrtc/video/rtp_stream_receiver.cc b/webrtc/video/rtp_stream_receiver.cc index 00f1c46efa..90dd0da0e1 100644 --- a/webrtc/video/rtp_stream_receiver.cc +++ b/webrtc/video/rtp_stream_receiver.cc @@ -502,10 +502,6 @@ void RtpStreamReceiver::NotifyReceiverOfFecPacket(const RTPHeader& header) { if (header.extension.hasVideoRotation) { rtp_header.type.Video.rotation = header.extension.videoRotation; } - rtp_header.type.Video.content_type = VideoContentType::UNSPECIFIED; - if (header.extension.hasVideoContentType) { - rtp_header.type.Video.content_type = header.extension.videoContentType; - } rtp_header.type.Video.playout_delay = header.extension.playout_delay; OnReceivedPayloadData(nullptr, 0, &rtp_header); diff --git a/webrtc/video/video_quality_test.cc b/webrtc/video/video_quality_test.cc index cea8af18fe..0baf42c188 100644 --- a/webrtc/video/video_quality_test.cc +++ b/webrtc/video/video_quality_test.cc @@ -1301,8 +1301,6 @@ void VideoQualityTest::SetupVideo(Transport* send_transport, video_send_config_.rtp.extensions.push_back(RtpExtension( RtpExtension::kAbsSendTimeUri, test::kAbsSendTimeExtensionId)); } - video_send_config_.rtp.extensions.push_back(RtpExtension( - RtpExtension::kVideoContentTypeUri, test::kVideoContentTypeExtensionId)); video_encoder_config_.min_transmit_bitrate_bps = params_.video.min_transmit_bps; @@ -1330,8 +1328,6 @@ void VideoQualityTest::SetupVideo(Transport* send_transport, kSendRtxPayloadType; video_receive_configs_[i].rtp.transport_cc = params_.call.send_side_bwe; video_receive_configs_[i].rtp.remb = !params_.call.send_side_bwe; - // Enable RTT calculation so NTP time estimator will work. - video_receive_configs_[i].rtp.rtcp_xr.receiver_reference_time_report = true; // Force fake decoders on non-selected simulcast streams. if (i != params_.ss.selected_stream) { VideoReceiveStream::Decoder decoder; diff --git a/webrtc/video/video_send_stream_tests.cc b/webrtc/video/video_send_stream_tests.cc index e24cb41aac..894b84047c 100644 --- a/webrtc/video/video_send_stream_tests.cc +++ b/webrtc/video/video_send_stream_tests.cc @@ -291,43 +291,6 @@ TEST_F(VideoSendStreamTest, SupportsVideoRotation) { RunBaseTest(&test); } -TEST_F(VideoSendStreamTest, SupportsVideoContentType) { - class VideoRotationObserver : public test::SendTest { - public: - VideoRotationObserver() : SendTest(kDefaultTimeoutMs) { - EXPECT_TRUE(parser_->RegisterRtpHeaderExtension( - kRtpExtensionVideoContentType, test::kVideoContentTypeExtensionId)); - } - - Action OnSendRtp(const uint8_t* packet, size_t length) override { - RTPHeader header; - EXPECT_TRUE(parser_->Parse(packet, length, &header)); - EXPECT_TRUE(header.extension.hasVideoContentType); - EXPECT_EQ(VideoContentType::SCREENSHARE, - header.extension.videoContentType); - observation_complete_.Set(); - return SEND_PACKET; - } - - void ModifyVideoConfigs( - VideoSendStream::Config* send_config, - std::vector* receive_configs, - VideoEncoderConfig* encoder_config) override { - send_config->rtp.extensions.clear(); - send_config->rtp.extensions.push_back( - RtpExtension(RtpExtension::kVideoContentTypeUri, - test::kVideoContentTypeExtensionId)); - encoder_config->content_type = VideoEncoderConfig::ContentType::kScreen; - } - - void PerformTest() override { - EXPECT_TRUE(Wait()) << "Timed out while waiting for single RTP packet."; - } - } test; - - RunBaseTest(&test); -} - class FakeReceiveStatistics : public NullReceiveStatistics { public: FakeReceiveStatistics(uint32_t send_ssrc, diff --git a/webrtc/video/video_stream_decoder.cc b/webrtc/video/video_stream_decoder.cc index a7688ce13c..6eea13f4d4 100644 --- a/webrtc/video/video_stream_decoder.cc +++ b/webrtc/video/video_stream_decoder.cc @@ -76,10 +76,10 @@ VideoStreamDecoder::~VideoStreamDecoder() { // thread may have held the lock when calling VideoDecoder::Decode, Reset, or // Release. Acquiring the same lock in the path of decode callback can deadlock. int32_t VideoStreamDecoder::FrameToRender(VideoFrame& video_frame, - rtc::Optional qp, - VideoContentType content_type) { - receive_stats_callback_->OnDecodedFrame(qp, content_type); + rtc::Optional qp) { + receive_stats_callback_->OnDecodedFrame(qp); incoming_video_stream_->OnFrame(video_frame); + return 0; } diff --git a/webrtc/video/video_stream_decoder.h b/webrtc/video/video_stream_decoder.h index b670b12659..4bca3edfc3 100644 --- a/webrtc/video/video_stream_decoder.h +++ b/webrtc/video/video_stream_decoder.h @@ -59,8 +59,7 @@ class VideoStreamDecoder : public VCMReceiveCallback, // Implements VCMReceiveCallback. int32_t FrameToRender(VideoFrame& video_frame, - rtc::Optional qp, - VideoContentType content_type) override; + rtc::Optional qp) override; int32_t ReceivedDecodedReferenceFrame(const uint64_t picture_id) override; void OnIncomingPayloadType(int payload_type) override; void OnDecoderImplementationName(const char* implementation_name) override; diff --git a/webrtc/video_frame.h b/webrtc/video_frame.h index 47e58a1b62..3b0c16c12e 100644 --- a/webrtc/video_frame.h +++ b/webrtc/video_frame.h @@ -57,7 +57,6 @@ class EncodedImage { size_t _length; size_t _size; VideoRotation rotation_ = kVideoRotation_0; - VideoContentType content_type_ = VideoContentType::UNSPECIFIED; bool _completeFrame = false; AdaptReason adapt_reason_; int qp_ = -1; // Quantizer value.