Add content type information to Encoded Images and add corresponding RTP extension header.

Use it to separate UMA e2e delay metric between screenshare from video.
Content type extension is set based on encoder settings and processed and decoders.

Also,
Fix full-stack-tests to calculate RTT correctly, so new metric could be tested.

BUG=webrtc:7420

Review-Url: https://codereview.webrtc.org/2772033002
Cr-Commit-Position: refs/heads/master@{#17640}
This commit is contained in:
ilnik 2017-04-11 01:46:04 -07:00 committed by Commit bot
parent 93cda2ebde
commit 64e739aeae
48 changed files with 465 additions and 91 deletions

View File

@ -0,0 +1,26 @@
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_API_VIDEO_VIDEO_CONTENT_TYPE_H_
#define WEBRTC_API_VIDEO_VIDEO_CONTENT_TYPE_H_
#include <stdint.h>
namespace webrtc {
enum class VideoContentType : uint8_t {
UNSPECIFIED = 0,
SCREENSHARE = 1,
TOTAL_CONTENT_TYPES // Must be the last value in the enum.
};
} // namespace webrtc
#endif // WEBRTC_API_VIDEO_VIDEO_CONTENT_TYPE_H_

View File

@ -31,7 +31,9 @@ RTPHeaderExtension::RTPHeaderExtension()
voiceActivity(false),
audioLevel(0),
hasVideoRotation(false),
videoRotation(kVideoRotation_0) {}
videoRotation(kVideoRotation_0),
hasVideoContentType(false),
videoContentType(VideoContentType::UNSPECIFIED) {}
RTPHeader::RTPHeader()
: markerBit(false),

View File

@ -18,6 +18,7 @@
#include <string>
#include <vector>
#include "webrtc/api/video/video_content_type.h"
#include "webrtc/api/video/video_rotation.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/optional.h"
@ -716,6 +717,11 @@ struct RTPHeaderExtension {
bool hasVideoRotation;
VideoRotation videoRotation;
// TODO(ilnik): Refactor this and one above to be rtc::Optional() and remove
// a corresponding bool flag.
bool hasVideoContentType;
VideoContentType videoContentType;
PlayoutDelay playout_delay = {-1, -1};
};

View File

@ -64,6 +64,10 @@ const char* RtpExtension::kTransportSequenceNumberUri =
"http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01";
const int RtpExtension::kTransportSequenceNumberDefaultId = 5;
const char* RtpExtension::kVideoContentTypeUri =
"http://www.webrtc.org/experiments/rtp-hdrext/video-content-type";
const int RtpExtension::kVideoContentTypeDefaultId = 6;
// This extension allows applications to adaptively limit the playout delay
// on frames as per the current needs. For example, a gaming application
// has very different needs on end-to-end delay compared to a video-conference
@ -85,7 +89,8 @@ bool RtpExtension::IsSupportedForVideo(const std::string& uri) {
uri == webrtc::RtpExtension::kAbsSendTimeUri ||
uri == webrtc::RtpExtension::kVideoRotationUri ||
uri == webrtc::RtpExtension::kTransportSequenceNumberUri ||
uri == webrtc::RtpExtension::kPlayoutDelayUri;
uri == webrtc::RtpExtension::kPlayoutDelayUri ||
uri == webrtc::RtpExtension::kVideoContentTypeUri;
}
VideoStream::VideoStream()

View File

@ -88,6 +88,10 @@ struct RtpExtension {
static const char* kVideoRotationUri;
static const int kVideoRotationDefaultId;
// Header extension for video content type. E.g. default or screenshare.
static const char* kVideoContentTypeUri;
static const int kVideoContentTypeDefaultId;
// Header extension for transport sequence number, see url for details:
// http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions
static const char* kTransportSequenceNumberUri;

View File

@ -3852,7 +3852,7 @@ TEST_F(WebRtcVideoChannel2Test, ReceiveDifferentUnsignaledSsrc) {
EXPECT_EQ(rtpHeader.ssrc, recv_stream->GetConfig().rtp.remote_ssrc);
// Verify that the receive stream sinks to a renderer.
webrtc::VideoFrame video_frame2(CreateBlackFrameBuffer(4, 4), 200, 0,
webrtc::kVideoRotation_0);
webrtc::kVideoRotation_0);
recv_stream->InjectFrame(video_frame2);
EXPECT_EQ(2, renderer.num_rendered_frames());
@ -3869,7 +3869,7 @@ TEST_F(WebRtcVideoChannel2Test, ReceiveDifferentUnsignaledSsrc) {
EXPECT_EQ(rtpHeader.ssrc, recv_stream->GetConfig().rtp.remote_ssrc);
// Verify that the receive stream sinks to a renderer.
webrtc::VideoFrame video_frame3(CreateBlackFrameBuffer(4, 4), 300, 0,
webrtc::kVideoRotation_0);
webrtc::kVideoRotation_0);
recv_stream->InjectFrame(video_frame3);
EXPECT_EQ(3, renderer.num_rendered_frames());
#endif

View File

@ -58,6 +58,8 @@ struct RTPVideoHeader {
PlayoutDelay playout_delay;
VideoContentType content_type;
union {
bool is_first_packet_in_frame;
RTC_DEPRECATED bool isFirstPacket; // first packet in frame
@ -87,7 +89,7 @@ class RTPFragmentationHeader {
fragmentationOffset(NULL),
fragmentationLength(NULL),
fragmentationTimeDiff(NULL),
fragmentationPlType(NULL) {};
fragmentationPlType(NULL) {}
~RTPFragmentationHeader() {
delete[] fragmentationOffset;

View File

@ -76,7 +76,8 @@ enum RTPExtensionType {
kRtpExtensionVideoRotation,
kRtpExtensionTransportSequenceNumber,
kRtpExtensionPlayoutDelay,
kRtpExtensionNumberOfExtensions,
kRtpExtensionVideoContentType,
kRtpExtensionNumberOfExtensions // Must be the last entity in the enum.
};
enum RTCPAppSubTypes { kAppSubtypeBwe = 0x00 };

View File

@ -39,6 +39,7 @@ constexpr ExtensionInfo kExtensions[] = {
CreateExtensionInfo<VideoOrientation>(),
CreateExtensionInfo<TransportSequenceNumber>(),
CreateExtensionInfo<PlayoutDelayLimits>(),
CreateExtensionInfo<VideoContentTypeExtension>(),
};
// Because of kRtpExtensionNone, NumberOfExtension is 1 bigger than the actual

View File

@ -215,4 +215,33 @@ bool PlayoutDelayLimits::Write(uint8_t* data,
return true;
}
// Video Content Type.
//
// E.g. default video or screenshare.
//
// 0 1
// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// | ID | len=0 | Content type |
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
constexpr RTPExtensionType VideoContentTypeExtension::kId;
constexpr uint8_t VideoContentTypeExtension::kValueSizeBytes;
constexpr const char* VideoContentTypeExtension::kUri;
bool VideoContentTypeExtension::Parse(rtc::ArrayView<const uint8_t> data,
VideoContentType* content_type) {
if (data.size() == 1 &&
data[0] < static_cast<uint8_t>(VideoContentType::TOTAL_CONTENT_TYPES)) {
*content_type = static_cast<VideoContentType>(data[0]);
return true;
}
return false;
}
bool VideoContentTypeExtension::Write(uint8_t* data,
VideoContentType content_type) {
data[0] = static_cast<uint8_t>(content_type);
return true;
}
} // namespace webrtc

View File

@ -12,6 +12,7 @@
#include <stdint.h>
#include "webrtc/api/video/video_content_type.h"
#include "webrtc/api/video/video_rotation.h"
#include "webrtc/base/array_view.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
@ -98,5 +99,17 @@ class PlayoutDelayLimits {
static bool Write(uint8_t* data, const PlayoutDelay& playout_delay);
};
class VideoContentTypeExtension {
public:
static constexpr RTPExtensionType kId = kRtpExtensionVideoContentType;
static constexpr uint8_t kValueSizeBytes = 1;
static constexpr const char* kUri =
"http://www.webrtc.org/experiments/rtp-hdrext/video-content-type";
static bool Parse(rtc::ArrayView<const uint8_t> data,
VideoContentType* content_type);
static bool Write(uint8_t* data, VideoContentType content_type);
};
} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_HEADER_EXTENSIONS_H_

View File

@ -169,6 +169,9 @@ void Packet::GetHeader(RTPHeader* header) const {
&header->extension.voiceActivity, &header->extension.audioLevel);
header->extension.hasVideoRotation =
GetExtension<VideoOrientation>(&header->extension.videoRotation);
header->extension.hasVideoContentType =
GetExtension<VideoContentTypeExtension>(
&header->extension.videoContentType);
}
size_t Packet::headers_size() const {

View File

@ -90,6 +90,7 @@ int32_t RTPReceiverVideo::ParseRtpPacket(WebRtcRTPHeader* rtp_header,
rtp_header->frameType = parsed_payload.frame_type;
rtp_header->type = parsed_payload.type;
rtp_header->type.Video.rotation = kVideoRotation_0;
rtp_header->type.Video.content_type = VideoContentType::UNSPECIFIED;
// Retrieve the video rotation information.
if (rtp_header->header.extension.hasVideoRotation) {
@ -97,6 +98,11 @@ int32_t RTPReceiverVideo::ParseRtpPacket(WebRtcRTPHeader* rtp_header,
rtp_header->header.extension.videoRotation;
}
if (rtp_header->header.extension.hasVideoContentType) {
rtp_header->type.Video.content_type =
rtp_header->header.extension.videoContentType;
}
rtp_header->type.Video.playout_delay =
rtp_header->header.extension.playout_delay;

View File

@ -40,6 +40,8 @@ RTPExtensionType StringToRtpExtensionType(const std::string& extension) {
return kRtpExtensionTransportSequenceNumber;
if (extension == RtpExtension::kPlayoutDelayUri)
return kRtpExtensionPlayoutDelay;
if (extension == RtpExtension::kVideoContentTypeUri)
return kRtpExtensionVideoContentType;
RTC_NOTREACHED() << "Looking up unsupported RTP extension.";
return kRtpExtensionNone;
}

View File

@ -203,6 +203,7 @@ class RtpRtcpImplTest : public ::testing::Test {
rtp_video_header.width = codec_.width;
rtp_video_header.height = codec_.height;
rtp_video_header.rotation = kVideoRotation_0;
rtp_video_header.content_type = VideoContentType::UNSPECIFIED;
rtp_video_header.playout_delay = {-1, -1};
rtp_video_header.is_first_packet_in_frame = true;
rtp_video_header.simulcastIdx = 0;

View File

@ -324,6 +324,12 @@ bool RTPSenderVideo::SendVideo(RtpVideoCodecTypes video_type,
current_rotation != kVideoRotation_0)
rtp_header->SetExtension<VideoOrientation>(current_rotation);
last_rotation_ = current_rotation;
// Report content type only for key frames.
if (frame_type == kVideoFrameKey &&
video_header->content_type != VideoContentType::UNSPECIFIED) {
rtp_header->SetExtension<VideoContentTypeExtension>(
video_header->content_type);
}
}
// FEC settings.

View File

@ -254,6 +254,10 @@ bool RtpHeaderParser::Parse(RTPHeader* header,
header->extension.playout_delay.min_ms = -1;
header->extension.playout_delay.max_ms = -1;
// May not be present in packet.
header->extension.hasVideoContentType = false;
header->extension.videoContentType = VideoContentType::UNSPECIFIED;
if (X) {
/* RTP header extension, RFC 3550.
0 1 2 3
@ -446,6 +450,25 @@ void RtpHeaderParser::ParseOneByteExtensionHeader(
max_playout_delay * PlayoutDelayLimits::kGranularityMs;
break;
}
case kRtpExtensionVideoContentType: {
if (len != 0) {
LOG(LS_WARNING) << "Incorrect video content type len: " << len;
return;
}
// 0 1
// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// | ID | len=0 | Content type |
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
if (ptr[0] <
static_cast<uint8_t>(VideoContentType::TOTAL_CONTENT_TYPES)) {
header->extension.hasVideoContentType = true;
header->extension.videoContentType =
static_cast<VideoContentType>(ptr[0]);
}
break;
}
case kRtpExtensionNone:
case kRtpExtensionNumberOfExtensions: {
RTC_NOTREACHED() << "Invalid extension type: " << type;

View File

@ -32,9 +32,8 @@ rtc::scoped_refptr<VideoCaptureModule> VideoCaptureImpl::Create(
return implementation;
}
const char* VideoCaptureImpl::CurrentDeviceName() const
{
return _deviceUniqueId;
const char* VideoCaptureImpl::CurrentDeviceName() const {
return _deviceUniqueId;
}
// static
@ -136,14 +135,13 @@ int32_t VideoCaptureImpl::IncomingFrame(
// Not encoded, convert to I420.
const VideoType commonVideoType =
RawVideoTypeToCommonVideoVideoType(frameInfo.rawType);
RawVideoTypeToCommonVideoVideoType(frameInfo.rawType);
if (frameInfo.rawType != kVideoMJPEG &&
CalcBufferSize(commonVideoType, width,
abs(height)) != videoFrameLength)
{
LOG(LS_ERROR) << "Wrong incoming frame length.";
return -1;
CalcBufferSize(commonVideoType, width, abs(height)) !=
videoFrameLength) {
LOG(LS_ERROR) << "Wrong incoming frame length.";
return -1;
}
int stride_y = width;
@ -174,16 +172,14 @@ int32_t VideoCaptureImpl::IncomingFrame(
commonVideoType, videoFrame, 0, 0, // No cropping
width, height, videoFrameLength,
apply_rotation ? _rotateFrame : kVideoRotation_0, buffer.get());
if (conversionResult < 0)
{
if (conversionResult < 0) {
LOG(LS_ERROR) << "Failed to convert capture frame from type "
<< frameInfo.rawType << "to I420.";
return -1;
return -1;
}
VideoFrame captureFrame(
buffer, 0, rtc::TimeMillis(),
!apply_rotation ? _rotateFrame : kVideoRotation_0);
VideoFrame captureFrame(buffer, 0, rtc::TimeMillis(),
!apply_rotation ? _rotateFrame : kVideoRotation_0);
captureFrame.set_ntp_time_ms(captureTime);
DeliverCapturedFrame(captureFrame);
@ -205,52 +201,40 @@ bool VideoCaptureImpl::SetApplyRotation(bool enable) {
return true;
}
void VideoCaptureImpl::UpdateFrameCount()
{
if (_incomingFrameTimesNanos[0] / rtc::kNumNanosecsPerMicrosec == 0)
{
// first no shift
void VideoCaptureImpl::UpdateFrameCount() {
if (_incomingFrameTimesNanos[0] / rtc::kNumNanosecsPerMicrosec == 0) {
// first no shift
} else {
// shift
for (int i = (kFrameRateCountHistorySize - 2); i >= 0; --i) {
_incomingFrameTimesNanos[i + 1] = _incomingFrameTimesNanos[i];
}
else
{
// shift
for (int i = (kFrameRateCountHistorySize - 2); i >= 0; i--)
{
_incomingFrameTimesNanos[i + 1] = _incomingFrameTimesNanos[i];
}
}
_incomingFrameTimesNanos[0] = rtc::TimeNanos();
}
_incomingFrameTimesNanos[0] = rtc::TimeNanos();
}
uint32_t VideoCaptureImpl::CalculateFrameRate(int64_t now_ns)
{
int32_t num = 0;
int32_t nrOfFrames = 0;
for (num = 1; num < (kFrameRateCountHistorySize - 1); num++)
{
if (_incomingFrameTimesNanos[num] <= 0 ||
(now_ns - _incomingFrameTimesNanos[num]) /
rtc::kNumNanosecsPerMillisec >
kFrameRateHistoryWindowMs) // don't use data older than 2sec
{
break;
}
else
{
nrOfFrames++;
}
uint32_t VideoCaptureImpl::CalculateFrameRate(int64_t now_ns) {
int32_t num = 0;
int32_t nrOfFrames = 0;
for (num = 1; num < (kFrameRateCountHistorySize - 1); ++num) {
if (_incomingFrameTimesNanos[num] <= 0 ||
(now_ns - _incomingFrameTimesNanos[num]) /
rtc::kNumNanosecsPerMillisec >
kFrameRateHistoryWindowMs) { // don't use data older than 2sec
break;
} else {
nrOfFrames++;
}
if (num > 1)
{
int64_t diff = (now_ns - _incomingFrameTimesNanos[num - 1]) /
rtc::kNumNanosecsPerMillisec;
if (diff > 0)
{
return uint32_t((nrOfFrames * 1000.0f / diff) + 0.5f);
}
}
if (num > 1) {
int64_t diff = (now_ns - _incomingFrameTimesNanos[num - 1]) /
rtc::kNumNanosecsPerMillisec;
if (diff > 0) {
return uint32_t((nrOfFrames * 1000.0f / diff) + 0.5f);
}
}
return nrOfFrames;
return nrOfFrames;
}
} // namespace videocapturemodule
} // namespace webrtc

View File

@ -367,6 +367,9 @@ int32_t H264EncoderImpl::Encode(const VideoFrame& input_frame,
encoded_image_.ntp_time_ms_ = input_frame.ntp_time_ms();
encoded_image_.capture_time_ms_ = input_frame.render_time_ms();
encoded_image_.rotation_ = input_frame.rotation();
encoded_image_.content_type_ = (mode_ == kScreensharing)
? VideoContentType::SCREENSHARE
: VideoContentType::UNSPECIFIED;
encoded_image_._frameType = ConvertToVideoFrameType(info.eFrameType);
// Split encoded image up into fragments. This also updates |encoded_image_|.

View File

@ -878,6 +878,9 @@ int VP8EncoderImpl::GetEncodedPartitions(const VideoFrame& input_image) {
encoded_images_[encoder_idx].capture_time_ms_ =
input_image.render_time_ms();
encoded_images_[encoder_idx].rotation_ = input_image.rotation();
encoded_images_[encoder_idx].content_type_ =
(codec_.mode == kScreensharing) ? VideoContentType::SCREENSHARE
: VideoContentType::UNSPECIFIED;
int qp = -1;
vpx_codec_control(&encoders_[encoder_idx], VP8E_GET_LAST_QUANTIZER_64, &qp);

View File

@ -706,6 +706,9 @@ int VP9EncoderImpl::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) {
encoded_image_._timeStamp = input_image_->timestamp();
encoded_image_.capture_time_ms_ = input_image_->render_time_ms();
encoded_image_.rotation_ = input_image_->rotation();
encoded_image_.content_type_ = (codec_.mode == kScreensharing)
? VideoContentType::SCREENSHARE
: VideoContentType::UNSPECIFIED;
encoded_image_._encodedHeight = raw_->d_h;
encoded_image_._encodedWidth = raw_->d_w;
int qp = -1;

View File

@ -87,6 +87,7 @@ void VCMEncodedFrame::Reset() {
_codecSpecificInfo.codecType = kVideoCodecUnknown;
_codec = kVideoCodecUnknown;
rotation_ = kVideoRotation_0;
content_type_ = VideoContentType::UNSPECIFIED;
_rotation_set = false;
}

View File

@ -77,8 +77,12 @@ class VCMEncodedFrame : protected EncodedImage {
*/
VideoRotation rotation() const { return rotation_; }
/**
* True if this frame is complete, false otherwise
*/
* Get video content type
*/
VideoContentType contentType() const { return content_type_; }
/**
* True if this frame is complete, false otherwise
*/
bool Complete() const { return _completeFrame; }
/**
* True if there's a frame missing before this frame

View File

@ -163,6 +163,7 @@ VCMFrameBufferEnum VCMFrameBuffer::InsertPacket(
RTC_DCHECK(!_rotation_set);
rotation_ = packet.video_header.rotation;
_rotation_set = true;
content_type_ = packet.video_header.content_type;
}
if (packet.is_first_packet_in_frame) {

View File

@ -79,6 +79,7 @@ RtpFrameObject::RtpFrameObject(PacketBuffer* packet_buffer,
// (HEVC)).
rotation_ = last_packet->video_header.rotation;
_rotation_set = true;
content_type_ = last_packet->video_header.content_type;
}
RtpFrameObject::~RtpFrameObject() {

View File

@ -87,7 +87,7 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage,
decodedImage.set_timestamp_us(
frameInfo->renderTimeMs * rtc::kNumMicrosecsPerMillisec);
decodedImage.set_rotation(frameInfo->rotation);
_receiveCallback->FrameToRender(decodedImage, qp);
_receiveCallback->FrameToRender(decodedImage, qp, frameInfo->content_type);
}
int32_t VCMDecodedFrameCallback::ReceivedDecodedReferenceFrame(
@ -131,7 +131,8 @@ VCMGenericDecoder::VCMGenericDecoder(VideoDecoder* decoder, bool isExternal)
_decoder(decoder),
_codecType(kVideoCodecUnknown),
_isExternal(isExternal),
_keyFrameDecoded(false) {}
_keyFrameDecoded(false),
_last_keyframe_content_type(VideoContentType::UNSPECIFIED) {}
VCMGenericDecoder::~VCMGenericDecoder() {}
@ -149,6 +150,15 @@ int32_t VCMGenericDecoder::Decode(const VCMEncodedFrame& frame, int64_t nowMs) {
_frameInfos[_nextFrameInfoIdx].decodeStartTimeMs = nowMs;
_frameInfos[_nextFrameInfoIdx].renderTimeMs = frame.RenderTimeMs();
_frameInfos[_nextFrameInfoIdx].rotation = frame.rotation();
// Set correctly only for key frames. Thus, use latest key frame
// content type. If the corresponding key frame was lost, decode will fail
// and content type will be ignored.
if (frame.FrameType() == kVideoFrameKey) {
_frameInfos[_nextFrameInfoIdx].content_type = frame.contentType();
_last_keyframe_content_type = frame.contentType();
} else {
_frameInfos[_nextFrameInfoIdx].content_type = _last_keyframe_content_type;
}
_callback->Map(frame.TimeStamp(), &_frameInfos[_nextFrameInfoIdx]);
_nextFrameInfoIdx = (_nextFrameInfoIdx + 1) % kDecoderFrameMemoryLength;

View File

@ -30,6 +30,7 @@ struct VCMFrameInformation {
int64_t decodeStartTimeMs;
void* userData;
VideoRotation rotation;
VideoContentType content_type;
};
class VCMDecodedFrameCallback : public DecodedImageCallback {
@ -109,6 +110,7 @@ class VCMGenericDecoder {
VideoCodecType _codecType;
bool _isExternal;
bool _keyFrameDecoded;
VideoContentType _last_keyframe_content_type;
};
} // namespace webrtc

View File

@ -33,7 +33,8 @@ class MockVCMReceiveCallback : public VCMReceiveCallback {
MockVCMReceiveCallback() {}
virtual ~MockVCMReceiveCallback() {}
MOCK_METHOD2(FrameToRender, int32_t(VideoFrame&, rtc::Optional<uint8_t>));
MOCK_METHOD3(FrameToRender,
int32_t(VideoFrame&, rtc::Optional<uint8_t>, VideoContentType));
MOCK_METHOD1(ReceivedDecodedReferenceFrame, int32_t(const uint64_t));
MOCK_METHOD1(OnIncomingPayloadType, void(int));
MOCK_METHOD1(OnDecoderImplementationName, void(const char*));

View File

@ -62,7 +62,8 @@ struct VCMFrameCount {
class VCMReceiveCallback {
public:
virtual int32_t FrameToRender(VideoFrame& videoFrame, // NOLINT
rtc::Optional<uint8_t> qp) = 0;
rtc::Optional<uint8_t> qp,
VideoContentType content_type) = 0;
virtual int32_t ReceivedDecodedReferenceFrame(const uint64_t pictureId) {
return -1;
}

View File

@ -1042,6 +1042,10 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
image->_timeStamp = output_timestamp_;
image->capture_time_ms_ = output_render_time_ms_;
image->rotation_ = output_rotation_;
image->content_type_ =
(codec_mode_ == webrtc::VideoCodecMode::kScreensharing)
? webrtc::VideoContentType::SCREENSHARE
: webrtc::VideoContentType::UNSPECIFIED;
image->_frameType =
(key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta);
image->_completeFrame = true;

View File

@ -85,6 +85,7 @@ class H264VideoToolboxEncoder : public H264Encoder {
uint32_t encoder_bitrate_bps_;
int32_t width_;
int32_t height_;
VideoCodecMode mode_;
const CFStringRef profile_;
H264BitstreamParser h264_bitstream_parser_;

View File

@ -364,6 +364,7 @@ int H264VideoToolboxEncoder::InitEncode(const VideoCodec* codec_settings,
width_ = codec_settings->width;
height_ = codec_settings->height;
mode_ = codec_settings->mode;
// We can only set average bitrate on the HW encoder.
target_bitrate_bps_ = codec_settings->startBitrate;
bitrate_adjuster_.SetTargetBitrateBps(target_bitrate_bps_);
@ -722,6 +723,9 @@ void H264VideoToolboxEncoder::OnEncodedFrame(
frame._timeStamp = timestamp;
frame.rotation_ = rotation;
frame.content_type_ =
(mode_ == kScreensharing) ? VideoContentType::SCREENSHARE : VideoContentType::UNSPECIFIED;
h264_bitstream_parser_.ParseBitstream(buffer->data(), buffer->size());
h264_bitstream_parser_.GetLastSliceQp(&frame.qp_);

View File

@ -208,6 +208,8 @@ void CallTest::CreateSendConfig(size_t num_video_streams,
video_send_config_.rtp.extensions.push_back(
RtpExtension(RtpExtension::kTransportSequenceNumberUri,
kTransportSequenceNumberExtensionId));
video_send_config_.rtp.extensions.push_back(RtpExtension(
RtpExtension::kVideoContentTypeUri, kVideoContentTypeExtensionId));
FillEncoderConfiguration(num_video_streams, &video_encoder_config_);
for (size_t i = 0; i < num_video_streams; ++i)

View File

@ -17,5 +17,7 @@ const int kTOffsetExtensionId = 6;
const int kAbsSendTimeExtensionId = 7;
const int kTransportSequenceNumberExtensionId = 8;
const int kVideoRotationExtensionId = 9;
const int kVideoContentTypeExtensionId = 10;
} // namespace test
} // namespace webrtc

View File

@ -15,5 +15,6 @@ extern const int kTOffsetExtensionId;
extern const int kAbsSendTimeExtensionId;
extern const int kTransportSequenceNumberExtensionId;
extern const int kVideoRotationExtensionId;
extern const int kVideoContentTypeExtensionId;
} // namespace test
} // namespace webrtc

View File

@ -61,6 +61,7 @@ int32_t FakeEncoder::Encode(const VideoFrame& input_image,
int max_target_bitrate_kbps;
int64_t last_encode_time_ms;
size_t num_encoded_bytes;
VideoCodecMode mode;
{
rtc::CritScope cs(&crit_sect_);
max_framerate = config_.maxFramerate;
@ -73,6 +74,7 @@ int32_t FakeEncoder::Encode(const VideoFrame& input_image,
max_target_bitrate_kbps = max_target_bitrate_kbps_;
last_encode_time_ms = last_encode_time_ms_;
num_encoded_bytes = sizeof(encoded_buffer_);
mode = config_.mode;
}
int64_t time_now_ms = clock_->TimeInMilliseconds();
@ -142,6 +144,9 @@ int32_t FakeEncoder::Encode(const VideoFrame& input_image,
encoded._encodedWidth = simulcast_streams[i].width;
encoded._encodedHeight = simulcast_streams[i].height;
encoded.rotation_ = input_image.rotation();
encoded.content_type_ = (mode == kScreensharing)
? VideoContentType::SCREENSHARE
: VideoContentType::UNSPECIFIED;
specifics.codec_name = ImplementationName();
RTC_DCHECK(callback);
if (callback->OnEncodedImage(encoded, &specifics, nullptr).error !=

View File

@ -85,6 +85,10 @@ void FuzzOneInput(const uint8_t* data, size_t size) {
PlayoutDelay playout;
packet.GetExtension<PlayoutDelayLimits>(&playout);
break;
case kRtpExtensionVideoContentType:
VideoContentType content_type;
packet.GetExtension<VideoContentTypeExtension>(&content_type);
break;
}
}
}

View File

@ -2652,7 +2652,8 @@ void EndToEndTest::VerifyHistogramStats(bool use_rtx,
EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.CurrentDelayInMs"));
EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.OnewayDelayInMs"));
EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.EndToEndDelayInMs"));
EXPECT_EQ(1, metrics::NumSamples(video_prefix + "EndToEndDelayInMs"));
EXPECT_EQ(1, metrics::NumSamples(video_prefix + "EndToEndDelayMaxInMs"));
EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.RenderSqrtPixelsPerSecond"));
EXPECT_EQ(1, metrics::NumSamples(video_prefix + "EncodeTimeInMs"));
@ -2692,6 +2693,118 @@ void EndToEndTest::VerifyHistogramStats(bool use_rtx,
metrics::NumSamples("WebRTC.Video.ReceivedFecPacketsInPercent"));
}
TEST_F(EndToEndTest, ContentTypeSwitches) {
class StatsObserver : public test::BaseTest,
public rtc::VideoSinkInterface<VideoFrame> {
public:
StatsObserver() : BaseTest(kLongTimeoutMs), num_frames_received_(0) {}
bool ShouldCreateReceivers() const override { return true; }
void OnFrame(const VideoFrame& video_frame) override {
// The RTT is needed to estimate |ntp_time_ms| which is used by
// end-to-end delay stats. Therefore, start counting received frames once
// |ntp_time_ms| is valid.
if (video_frame.ntp_time_ms() > 0 &&
Clock::GetRealTimeClock()->CurrentNtpInMilliseconds() >=
video_frame.ntp_time_ms()) {
rtc::CritScope lock(&crit_);
++num_frames_received_;
}
}
Action OnSendRtp(const uint8_t* packet, size_t length) override {
if (MinNumberOfFramesReceived())
observation_complete_.Set();
return SEND_PACKET;
}
bool MinNumberOfFramesReceived() const {
const int kMinRequiredHistogramSamples = 200;
rtc::CritScope lock(&crit_);
return num_frames_received_ > kMinRequiredHistogramSamples;
}
// May be called several times.
void PerformTest() override {
EXPECT_TRUE(Wait()) << "Timed out waiting for enough packets.";
// Reset frame counter so next PerformTest() call will do something.
{
rtc::CritScope lock(&crit_);
num_frames_received_ = 0;
}
}
rtc::CriticalSection crit_;
int num_frames_received_ GUARDED_BY(&crit_);
} test;
metrics::Reset();
Call::Config send_config(test.GetSenderCallConfig());
CreateSenderCall(send_config);
Call::Config recv_config(test.GetReceiverCallConfig());
CreateReceiverCall(recv_config);
receive_transport_.reset(test.CreateReceiveTransport());
send_transport_.reset(test.CreateSendTransport(sender_call_.get()));
send_transport_->SetReceiver(receiver_call_->Receiver());
receive_transport_->SetReceiver(sender_call_->Receiver());
receiver_call_->SignalChannelNetworkState(MediaType::VIDEO, kNetworkUp);
CreateSendConfig(1, 0, 0, send_transport_.get());
CreateMatchingReceiveConfigs(receive_transport_.get());
// Modify send and receive configs.
video_send_config_.rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
video_receive_configs_[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
video_receive_configs_[0].renderer = &test;
// RTT needed for RemoteNtpTimeEstimator for the receive stream.
video_receive_configs_[0].rtp.rtcp_xr.receiver_reference_time_report = true;
// Start with realtime video.
video_encoder_config_.content_type =
VideoEncoderConfig::ContentType::kRealtimeVideo;
// Second encoder config for the second part of the test uses screenshare
VideoEncoderConfig encoder_config_with_screenshare_ =
video_encoder_config_.Copy();
encoder_config_with_screenshare_.content_type =
VideoEncoderConfig::ContentType::kScreen;
CreateVideoStreams();
CreateFrameGeneratorCapturer(kDefaultFramerate, kDefaultWidth,
kDefaultHeight);
Start();
test.PerformTest();
// Replace old send stream.
sender_call_->DestroyVideoSendStream(video_send_stream_);
video_send_stream_ = sender_call_->CreateVideoSendStream(
video_send_config_.Copy(), encoder_config_with_screenshare_.Copy());
video_send_stream_->SetSource(
frame_generator_capturer_.get(),
VideoSendStream::DegradationPreference::kBalanced);
video_send_stream_->Start();
// Continue to run test but now with screenshare.
test.PerformTest();
send_transport_->StopSending();
receive_transport_->StopSending();
Stop();
DestroyStreams();
DestroyCalls();
// Delete the call for Call stats to be reported.
sender_call_.reset();
receiver_call_.reset();
// Verify that stats have been updated for both screenshare and video.
EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.EndToEndDelayInMs"));
EXPECT_EQ(1,
metrics::NumSamples("WebRTC.Video.Screenshare.EndToEndDelayInMs"));
EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.EndToEndDelayMaxInMs"));
EXPECT_EQ(
1, metrics::NumSamples("WebRTC.Video.Screenshare.EndToEndDelayMaxInMs"));
}
TEST_F(EndToEndTest, VerifyHistogramStatsWithRtx) {
const bool kEnabledRtx = true;
const bool kEnabledRed = false;

View File

@ -129,6 +129,7 @@ EncodedImageCallback::Result PayloadRouter::OnEncodedImage(
if (codec_specific_info)
CopyCodecSpecific(codec_specific_info, &rtp_video_header);
rtp_video_header.rotation = encoded_image.rotation_;
rtp_video_header.content_type = encoded_image.content_type_;
rtp_video_header.playout_delay = encoded_image.playout_delay_;
int stream_index = rtp_video_header.simulcastIdx;

View File

@ -74,9 +74,12 @@ ReceiveStatisticsProxy::ReceiveStatisticsProxy(
render_fps_tracker_(100, 10u),
render_pixel_tracker_(100, 10u),
total_byte_tracker_(100, 10u), // bucket_interval_ms, bucket_count
e2e_delay_max_ms_video_(-1),
e2e_delay_max_ms_screenshare_(-1),
freq_offset_counter_(clock, nullptr, kFreqOffsetProcessIntervalMs),
first_report_block_time_ms_(-1),
avg_rtt_ms_(0) {
avg_rtt_ms_(0),
last_content_type_(VideoContentType::UNSPECIFIED) {
stats_.ssrc = config_.rtp.remote_ssrc;
// TODO(brandtr): Replace |rtx_stats_| with a single instance of
// StreamDataCounters.
@ -169,9 +172,30 @@ void ReceiveStatisticsProxy::UpdateHistograms() {
if (delay_ms != -1)
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.OnewayDelayInMs", delay_ms);
int e2e_delay_ms = e2e_delay_counter_.Avg(kMinRequiredSamples);
if (e2e_delay_ms != -1)
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.EndToEndDelayInMs", e2e_delay_ms);
int e2e_delay_ms_video = e2e_delay_counter_video_.Avg(kMinRequiredSamples);
if (e2e_delay_ms_video != -1) {
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.EndToEndDelayInMs",
e2e_delay_ms_video);
}
int e2e_delay_ms_screenshare =
e2e_delay_counter_screenshare_.Avg(kMinRequiredSamples);
if (e2e_delay_ms_screenshare != -1) {
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.Screenshare.EndToEndDelayInMs",
e2e_delay_ms_screenshare);
}
int e2e_delay_max_ms_video = e2e_delay_max_ms_video_;
if (e2e_delay_max_ms_video != -1) {
RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.EndToEndDelayMaxInMs",
e2e_delay_max_ms_video);
}
int e2e_delay_max_ms_screenshare = e2e_delay_max_ms_screenshare_;
if (e2e_delay_max_ms_screenshare != -1) {
RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.Screenshare.EndToEndDelayMaxInMs",
e2e_delay_max_ms_screenshare);
}
StreamDataCounters rtp = stats_.rtp_stats;
StreamDataCounters rtx;
@ -431,7 +455,8 @@ void ReceiveStatisticsProxy::DataCountersUpdated(
total_byte_tracker_.AddSamples(total_bytes - last_total_bytes);
}
void ReceiveStatisticsProxy::OnDecodedFrame(rtc::Optional<uint8_t> qp) {
void ReceiveStatisticsProxy::OnDecodedFrame(rtc::Optional<uint8_t> qp,
VideoContentType content_type) {
uint64_t now = clock_->TimeInMilliseconds();
rtc::CritScope lock(&crit_);
@ -451,6 +476,7 @@ void ReceiveStatisticsProxy::OnDecodedFrame(rtc::Optional<uint8_t> qp) {
<< "QP sum was already set and no QP was given for a frame.";
stats_.qp_sum = rtc::Optional<uint64_t>();
}
last_content_type_ = content_type;
decode_fps_estimator_.Update(1, now);
stats_.decode_frame_rate = decode_fps_estimator_.Rate(now).value_or(0);
}
@ -475,8 +501,16 @@ void ReceiveStatisticsProxy::OnRenderedFrame(const VideoFrame& frame) {
if (frame.ntp_time_ms() > 0) {
int64_t delay_ms = clock_->CurrentNtpInMilliseconds() - frame.ntp_time_ms();
if (delay_ms >= 0)
e2e_delay_counter_.Add(delay_ms);
if (delay_ms >= 0) {
if (last_content_type_ == VideoContentType::SCREENSHARE) {
e2e_delay_max_ms_screenshare_ =
std::max(delay_ms, e2e_delay_max_ms_screenshare_);
e2e_delay_counter_screenshare_.Add(delay_ms);
} else {
e2e_delay_max_ms_video_ = std::max(delay_ms, e2e_delay_max_ms_video_);
e2e_delay_counter_video_.Add(delay_ms);
}
}
}
}

View File

@ -46,7 +46,7 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback,
VideoReceiveStream::Stats GetStats() const;
void OnDecodedFrame(rtc::Optional<uint8_t> qp);
void OnDecodedFrame(rtc::Optional<uint8_t> qp, VideoContentType content_type);
void OnSyncOffsetUpdated(int64_t sync_offset_ms, double estimated_freq_khz);
void OnRenderedFrame(const VideoFrame& frame);
void OnIncomingPayloadType(int payload_type);
@ -140,7 +140,10 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback,
SampleCounter target_delay_counter_ GUARDED_BY(crit_);
SampleCounter current_delay_counter_ GUARDED_BY(crit_);
SampleCounter delay_counter_ GUARDED_BY(crit_);
SampleCounter e2e_delay_counter_ GUARDED_BY(crit_);
SampleCounter e2e_delay_counter_video_ GUARDED_BY(crit_);
SampleCounter e2e_delay_counter_screenshare_ GUARDED_BY(crit_);
int64_t e2e_delay_max_ms_video_ GUARDED_BY(crit_);
int64_t e2e_delay_max_ms_screenshare_ GUARDED_BY(crit_);
MaxCounter freq_offset_counter_ GUARDED_BY(crit_);
int64_t first_report_block_time_ms_ GUARDED_BY(crit_);
ReportBlockStats report_block_stats_ GUARDED_BY(crit_);
@ -148,6 +151,7 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback,
std::map<uint32_t, StreamDataCounters> rtx_stats_ GUARDED_BY(crit_);
int64_t avg_rtt_ms_ GUARDED_BY(crit_);
mutable std::map<int64_t, size_t> frame_window_ GUARDED_BY(&crit_);
VideoContentType last_content_type_ GUARDED_BY(&crit_);
};
} // namespace webrtc

View File

@ -54,7 +54,8 @@ class ReceiveStatisticsProxyTest : public ::testing::Test {
TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameIncreasesFramesDecoded) {
EXPECT_EQ(0u, statistics_proxy_->GetStats().frames_decoded);
for (uint32_t i = 1; i <= 3; ++i) {
statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>());
statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(),
VideoContentType::UNSPECIFIED);
EXPECT_EQ(i, statistics_proxy_->GetStats().frames_decoded);
}
}
@ -62,40 +63,47 @@ TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameIncreasesFramesDecoded) {
TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameWithQpResetsFramesDecoded) {
EXPECT_EQ(0u, statistics_proxy_->GetStats().frames_decoded);
for (uint32_t i = 1; i <= 3; ++i) {
statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>());
statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(),
VideoContentType::UNSPECIFIED);
EXPECT_EQ(i, statistics_proxy_->GetStats().frames_decoded);
}
statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(1u));
statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(1u),
VideoContentType::UNSPECIFIED);
EXPECT_EQ(1u, statistics_proxy_->GetStats().frames_decoded);
}
TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameIncreasesQpSum) {
EXPECT_EQ(rtc::Optional<uint64_t>(), statistics_proxy_->GetStats().qp_sum);
statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(3u));
statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(3u),
VideoContentType::UNSPECIFIED);
EXPECT_EQ(rtc::Optional<uint64_t>(3u), statistics_proxy_->GetStats().qp_sum);
statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(127u));
statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(127u),
VideoContentType::UNSPECIFIED);
EXPECT_EQ(rtc::Optional<uint64_t>(130u),
statistics_proxy_->GetStats().qp_sum);
}
TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameWithoutQpQpSumWontExist) {
EXPECT_EQ(rtc::Optional<uint64_t>(), statistics_proxy_->GetStats().qp_sum);
statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>());
statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(),
VideoContentType::UNSPECIFIED);
EXPECT_EQ(rtc::Optional<uint64_t>(), statistics_proxy_->GetStats().qp_sum);
}
TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameWithoutQpResetsQpSum) {
EXPECT_EQ(rtc::Optional<uint64_t>(), statistics_proxy_->GetStats().qp_sum);
statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(3u));
statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(3u),
VideoContentType::UNSPECIFIED);
EXPECT_EQ(rtc::Optional<uint64_t>(3u), statistics_proxy_->GetStats().qp_sum);
statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>());
statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(),
VideoContentType::UNSPECIFIED);
EXPECT_EQ(rtc::Optional<uint64_t>(), statistics_proxy_->GetStats().qp_sum);
}
TEST_F(ReceiveStatisticsProxyTest, OnRenderedFrameIncreasesFramesRendered) {
EXPECT_EQ(0u, statistics_proxy_->GetStats().frames_rendered);
webrtc::VideoFrame frame(
webrtc::I420Buffer::Create(1, 1), 0, 0, webrtc::kVideoRotation_0);
webrtc::VideoFrame frame(webrtc::I420Buffer::Create(1, 1), 0, 0,
webrtc::kVideoRotation_0);
for (uint32_t i = 1; i <= 3; ++i) {
statistics_proxy_->OnRenderedFrame(frame);
EXPECT_EQ(i, statistics_proxy_->GetStats().frames_rendered);

View File

@ -502,6 +502,10 @@ void RtpStreamReceiver::NotifyReceiverOfFecPacket(const RTPHeader& header) {
if (header.extension.hasVideoRotation) {
rtp_header.type.Video.rotation = header.extension.videoRotation;
}
rtp_header.type.Video.content_type = VideoContentType::UNSPECIFIED;
if (header.extension.hasVideoContentType) {
rtp_header.type.Video.content_type = header.extension.videoContentType;
}
rtp_header.type.Video.playout_delay = header.extension.playout_delay;
OnReceivedPayloadData(nullptr, 0, &rtp_header);

View File

@ -1301,6 +1301,8 @@ void VideoQualityTest::SetupVideo(Transport* send_transport,
video_send_config_.rtp.extensions.push_back(RtpExtension(
RtpExtension::kAbsSendTimeUri, test::kAbsSendTimeExtensionId));
}
video_send_config_.rtp.extensions.push_back(RtpExtension(
RtpExtension::kVideoContentTypeUri, test::kVideoContentTypeExtensionId));
video_encoder_config_.min_transmit_bitrate_bps =
params_.video.min_transmit_bps;
@ -1328,6 +1330,8 @@ void VideoQualityTest::SetupVideo(Transport* send_transport,
kSendRtxPayloadType;
video_receive_configs_[i].rtp.transport_cc = params_.call.send_side_bwe;
video_receive_configs_[i].rtp.remb = !params_.call.send_side_bwe;
// Enable RTT calculation so NTP time estimator will work.
video_receive_configs_[i].rtp.rtcp_xr.receiver_reference_time_report = true;
// Force fake decoders on non-selected simulcast streams.
if (i != params_.ss.selected_stream) {
VideoReceiveStream::Decoder decoder;

View File

@ -291,6 +291,43 @@ TEST_F(VideoSendStreamTest, SupportsVideoRotation) {
RunBaseTest(&test);
}
TEST_F(VideoSendStreamTest, SupportsVideoContentType) {
class VideoRotationObserver : public test::SendTest {
public:
VideoRotationObserver() : SendTest(kDefaultTimeoutMs) {
EXPECT_TRUE(parser_->RegisterRtpHeaderExtension(
kRtpExtensionVideoContentType, test::kVideoContentTypeExtensionId));
}
Action OnSendRtp(const uint8_t* packet, size_t length) override {
RTPHeader header;
EXPECT_TRUE(parser_->Parse(packet, length, &header));
EXPECT_TRUE(header.extension.hasVideoContentType);
EXPECT_EQ(VideoContentType::SCREENSHARE,
header.extension.videoContentType);
observation_complete_.Set();
return SEND_PACKET;
}
void ModifyVideoConfigs(
VideoSendStream::Config* send_config,
std::vector<VideoReceiveStream::Config>* receive_configs,
VideoEncoderConfig* encoder_config) override {
send_config->rtp.extensions.clear();
send_config->rtp.extensions.push_back(
RtpExtension(RtpExtension::kVideoContentTypeUri,
test::kVideoContentTypeExtensionId));
encoder_config->content_type = VideoEncoderConfig::ContentType::kScreen;
}
void PerformTest() override {
EXPECT_TRUE(Wait()) << "Timed out while waiting for single RTP packet.";
}
} test;
RunBaseTest(&test);
}
class FakeReceiveStatistics : public NullReceiveStatistics {
public:
FakeReceiveStatistics(uint32_t send_ssrc,

View File

@ -76,10 +76,10 @@ VideoStreamDecoder::~VideoStreamDecoder() {
// thread may have held the lock when calling VideoDecoder::Decode, Reset, or
// Release. Acquiring the same lock in the path of decode callback can deadlock.
int32_t VideoStreamDecoder::FrameToRender(VideoFrame& video_frame,
rtc::Optional<uint8_t> qp) {
receive_stats_callback_->OnDecodedFrame(qp);
rtc::Optional<uint8_t> qp,
VideoContentType content_type) {
receive_stats_callback_->OnDecodedFrame(qp, content_type);
incoming_video_stream_->OnFrame(video_frame);
return 0;
}

View File

@ -59,7 +59,8 @@ class VideoStreamDecoder : public VCMReceiveCallback,
// Implements VCMReceiveCallback.
int32_t FrameToRender(VideoFrame& video_frame,
rtc::Optional<uint8_t> qp) override;
rtc::Optional<uint8_t> qp,
VideoContentType content_type) override;
int32_t ReceivedDecodedReferenceFrame(const uint64_t picture_id) override;
void OnIncomingPayloadType(int payload_type) override;
void OnDecoderImplementationName(const char* implementation_name) override;

View File

@ -57,6 +57,7 @@ class EncodedImage {
size_t _length;
size_t _size;
VideoRotation rotation_ = kVideoRotation_0;
VideoContentType content_type_ = VideoContentType::UNSPECIFIED;
bool _completeFrame = false;
AdaptReason adapt_reason_;
int qp_ = -1; // Quantizer value.