Revert "Add spatial index to EncodedImage."

This reverts commit da0898dfae3b0a013ca8ad3828e9adfdc749748d.

Reason for revert: Broke downstream tests.

Original change's description:
> Add spatial index to EncodedImage.
> 
> Replaces the VP8 simulcast index and VP9 spatial index formely part of
> CodecSpecificInfo.
> 
> Bug: webrtc:9378
> Change-Id: I80eafd63fbdee0a25864338196a690628b4bd3d2
> Reviewed-on: https://webrtc-review.googlesource.com/83161
> Commit-Queue: Niels Moller <nisse@webrtc.org>
> Reviewed-by: Erik Språng <sprang@webrtc.org>
> Reviewed-by: Sebastian Jansson <srte@webrtc.org>
> Reviewed-by: Magnus Jedvert <magjed@webrtc.org>
> Reviewed-by: Philip Eliasson <philipel@webrtc.org>
> Reviewed-by: Rasmus Brandt <brandtr@webrtc.org>
> Cr-Commit-Position: refs/heads/master@{#24485}

TBR=brandtr@webrtc.org,magjed@webrtc.org,nisse@webrtc.org,sprang@webrtc.org,philipel@webrtc.org,srte@webrtc.org

Change-Id: Idb4fb9d72e5574d7353c631cb404a1311f3fd148
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: webrtc:9378
Reviewed-on: https://webrtc-review.googlesource.com/96664
Reviewed-by: Niels Moller <nisse@webrtc.org>
Commit-Queue: Niels Moller <nisse@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#24486}
This commit is contained in:
Niels Moller 2018-08-29 14:35:58 +00:00 committed by Commit Bot
parent da0898dfae
commit 5a998d7246
30 changed files with 215 additions and 160 deletions

View File

@ -22,7 +22,6 @@ namespace webrtc {
namespace {
void PopulateRtpWithCodecSpecifics(const CodecSpecificInfo& info,
absl::optional<int> spatial_index,
RTPVideoHeader* rtp) {
rtp->codec = info.codecType;
switch (info.codecType) {
@ -32,7 +31,7 @@ void PopulateRtpWithCodecSpecifics(const CodecSpecificInfo& info,
rtp->vp8().temporalIdx = info.codecSpecific.VP8.temporalIdx;
rtp->vp8().layerSync = info.codecSpecific.VP8.layerSync;
rtp->vp8().keyIdx = info.codecSpecific.VP8.keyIdx;
rtp->simulcastIdx = spatial_index.value_or(0);
rtp->simulcastIdx = info.codecSpecific.VP8.simulcastIdx;
return;
}
case kVideoCodecVP9: {
@ -45,16 +44,13 @@ void PopulateRtpWithCodecSpecifics(const CodecSpecificInfo& info,
vp9_header.non_ref_for_inter_layer_pred =
info.codecSpecific.VP9.non_ref_for_inter_layer_pred;
vp9_header.temporal_idx = info.codecSpecific.VP9.temporal_idx;
vp9_header.spatial_idx = info.codecSpecific.VP9.spatial_idx;
vp9_header.temporal_up_switch = info.codecSpecific.VP9.temporal_up_switch;
vp9_header.inter_layer_predicted =
info.codecSpecific.VP9.inter_layer_predicted;
vp9_header.gof_idx = info.codecSpecific.VP9.gof_idx;
vp9_header.num_spatial_layers = info.codecSpecific.VP9.num_spatial_layers;
if (vp9_header.num_spatial_layers > 1) {
vp9_header.spatial_idx = spatial_index.value_or(kNoSpatialIdx);
} else {
vp9_header.spatial_idx = kNoSpatialIdx;
}
if (info.codecSpecific.VP9.ss_data_available) {
vp9_header.spatial_layer_resolution_present =
info.codecSpecific.VP9.spatial_layer_resolution_present;
@ -79,13 +75,13 @@ void PopulateRtpWithCodecSpecifics(const CodecSpecificInfo& info,
auto& h264_header = rtp->video_type_header.emplace<RTPVideoHeaderH264>();
h264_header.packetization_mode =
info.codecSpecific.H264.packetization_mode;
rtp->simulcastIdx = spatial_index.value_or(0);
rtp->simulcastIdx = info.codecSpecific.H264.simulcast_idx;
return;
}
case kVideoCodecMultiplex:
case kVideoCodecGeneric:
rtp->codec = kVideoCodecGeneric;
rtp->simulcastIdx = spatial_index.value_or(0);
rtp->simulcastIdx = info.codecSpecific.generic.simulcast_idx;
return;
default:
return;
@ -135,8 +131,7 @@ RTPVideoHeader RtpPayloadParams::GetRtpVideoHeader(
int64_t shared_frame_id) {
RTPVideoHeader rtp_video_header;
if (codec_specific_info) {
PopulateRtpWithCodecSpecifics(*codec_specific_info, image.SpatialIndex(),
&rtp_video_header);
PopulateRtpWithCodecSpecifics(*codec_specific_info, &rtp_video_header);
}
rtp_video_header.rotation = image.rotation_;
rtp_video_header.content_type = image.content_type_;

View File

@ -38,11 +38,11 @@ TEST(RtpPayloadParamsTest, InfoMappedToRtpVideoHeader_Vp8) {
EncodedImage encoded_image;
encoded_image.rotation_ = kVideoRotation_90;
encoded_image.content_type_ = VideoContentType::SCREENSHARE;
encoded_image.SetSpatialIndex(1);
CodecSpecificInfo codec_info;
memset(&codec_info, 0, sizeof(CodecSpecificInfo));
codec_info.codecType = kVideoCodecVP8;
codec_info.codecSpecific.VP8.simulcastIdx = 1;
codec_info.codecSpecific.VP8.temporalIdx = 0;
codec_info.codecSpecific.VP8.keyIdx = kNoKeyIdx;
codec_info.codecSpecific.VP8.layerSync = false;
@ -52,6 +52,7 @@ TEST(RtpPayloadParamsTest, InfoMappedToRtpVideoHeader_Vp8) {
params.GetRtpVideoHeader(encoded_image, &codec_info, kDontCare);
codec_info.codecType = kVideoCodecVP8;
codec_info.codecSpecific.VP8.simulcastIdx = 1;
codec_info.codecSpecific.VP8.temporalIdx = 1;
codec_info.codecSpecific.VP8.layerSync = true;
@ -78,12 +79,13 @@ TEST(RtpPayloadParamsTest, InfoMappedToRtpVideoHeader_Vp9) {
EncodedImage encoded_image;
encoded_image.rotation_ = kVideoRotation_90;
encoded_image.content_type_ = VideoContentType::SCREENSHARE;
encoded_image.SetSpatialIndex(0);
CodecSpecificInfo codec_info;
memset(&codec_info, 0, sizeof(CodecSpecificInfo));
codec_info.codecType = kVideoCodecVP9;
codec_info.codecSpecific.VP9.num_spatial_layers = 3;
codec_info.codecSpecific.VP9.first_frame_in_picture = true;
codec_info.codecSpecific.VP9.spatial_idx = 0;
codec_info.codecSpecific.VP9.temporal_idx = 2;
codec_info.codecSpecific.VP9.end_of_picture = false;
@ -98,7 +100,7 @@ TEST(RtpPayloadParamsTest, InfoMappedToRtpVideoHeader_Vp9) {
EXPECT_EQ(kPictureId + 1, vp9_header.picture_id);
EXPECT_EQ(kTl0PicIdx, vp9_header.tl0_pic_idx);
EXPECT_EQ(vp9_header.temporal_idx, codec_info.codecSpecific.VP9.temporal_idx);
EXPECT_EQ(vp9_header.spatial_idx, encoded_image.SpatialIndex());
EXPECT_EQ(vp9_header.spatial_idx, codec_info.codecSpecific.VP9.spatial_idx);
EXPECT_EQ(vp9_header.num_spatial_layers,
codec_info.codecSpecific.VP9.num_spatial_layers);
EXPECT_EQ(vp9_header.end_of_picture,
@ -106,9 +108,9 @@ TEST(RtpPayloadParamsTest, InfoMappedToRtpVideoHeader_Vp9) {
// Next spatial layer.
codec_info.codecSpecific.VP9.first_frame_in_picture = false;
codec_info.codecSpecific.VP9.spatial_idx += 1;
codec_info.codecSpecific.VP9.end_of_picture = true;
encoded_image.SetSpatialIndex(1);
header = params.GetRtpVideoHeader(encoded_image, &codec_info, kDontCare);
EXPECT_EQ(kVideoRotation_90, header.rotation);
@ -117,7 +119,7 @@ TEST(RtpPayloadParamsTest, InfoMappedToRtpVideoHeader_Vp9) {
EXPECT_EQ(kPictureId + 1, vp9_header.picture_id);
EXPECT_EQ(kTl0PicIdx, vp9_header.tl0_pic_idx);
EXPECT_EQ(vp9_header.temporal_idx, codec_info.codecSpecific.VP9.temporal_idx);
EXPECT_EQ(vp9_header.spatial_idx, encoded_image.SpatialIndex());
EXPECT_EQ(vp9_header.spatial_idx, codec_info.codecSpecific.VP9.spatial_idx);
EXPECT_EQ(vp9_header.num_spatial_layers,
codec_info.codecSpecific.VP9.num_spatial_layers);
EXPECT_EQ(vp9_header.end_of_picture,
@ -152,6 +154,7 @@ TEST(RtpPayloadParamsTest, PictureIdIsSetForVp8) {
CodecSpecificInfo codec_info;
memset(&codec_info, 0, sizeof(CodecSpecificInfo));
codec_info.codecType = kVideoCodecVP8;
codec_info.codecSpecific.VP8.simulcastIdx = 0;
RtpPayloadParams params(kSsrc1, &state);
RTPVideoHeader header =

View File

@ -95,6 +95,21 @@ std::vector<std::unique_ptr<RtpRtcp>> CreateRtpRtcpModules(
return modules;
}
absl::optional<size_t> GetSimulcastIdx(const CodecSpecificInfo* info) {
if (!info)
return absl::nullopt;
switch (info->codecType) {
case kVideoCodecVP8:
return absl::optional<size_t>(info->codecSpecific.VP8.simulcastIdx);
case kVideoCodecH264:
return absl::optional<size_t>(info->codecSpecific.H264.simulcast_idx);
case kVideoCodecMultiplex:
case kVideoCodecGeneric:
return absl::optional<size_t>(info->codecSpecific.generic.simulcast_idx);
default:
return absl::nullopt;
}
}
bool PayloadTypeSupportsSkippingFecPackets(const std::string& payload_name) {
const VideoCodecType codecType = PayloadStringToCodecType(payload_name);
if (codecType == kVideoCodecVP8 || codecType == kVideoCodecVP9) {
@ -305,14 +320,7 @@ EncodedImageCallback::Result RtpVideoSender::OnEncodedImage(
return Result(Result::ERROR_SEND_FAILED);
shared_frame_id_++;
size_t stream_index = 0;
if (codec_specific_info &&
(codec_specific_info->codecType == kVideoCodecVP8 ||
codec_specific_info->codecType == kVideoCodecH264 ||
codec_specific_info->codecType == kVideoCodecGeneric)) {
// Map spatial index to simulcast.
stream_index = encoded_image.SpatialIndex().value_or(0);
}
size_t stream_index = GetSimulcastIdx(codec_specific_info).value_or(0);
RTC_DCHECK_LT(stream_index, rtp_modules_.size());
RTPVideoHeader rtp_video_header = params_[stream_index].GetRtpVideoHeader(
encoded_image, codec_specific_info, shared_frame_id_);

View File

@ -169,41 +169,44 @@ TEST(RtpVideoSenderTest, SendOnOneModule) {
TEST(RtpVideoSenderTest, SendSimulcastSetActive) {
uint8_t payload = 'a';
EncodedImage encoded_image_1;
encoded_image_1.SetTimestamp(1);
encoded_image_1.capture_time_ms_ = 2;
encoded_image_1._frameType = kVideoFrameKey;
encoded_image_1._buffer = &payload;
encoded_image_1._length = 1;
EncodedImage encoded_image;
encoded_image.SetTimestamp(1);
encoded_image.capture_time_ms_ = 2;
encoded_image._frameType = kVideoFrameKey;
encoded_image._buffer = &payload;
encoded_image._length = 1;
RtpVideoSenderTestFixture test({kSsrc1, kSsrc2}, kPayloadType, {});
CodecSpecificInfo codec_info;
memset(&codec_info, 0, sizeof(CodecSpecificInfo));
codec_info.codecType = kVideoCodecVP8;
CodecSpecificInfo codec_info_1;
memset(&codec_info_1, 0, sizeof(CodecSpecificInfo));
codec_info_1.codecType = kVideoCodecVP8;
codec_info_1.codecSpecific.VP8.simulcastIdx = 0;
test.router()->SetActive(true);
EXPECT_EQ(EncodedImageCallback::Result::OK,
test.router()
->OnEncodedImage(encoded_image_1, &codec_info, nullptr)
->OnEncodedImage(encoded_image, &codec_info_1, nullptr)
.error);
EncodedImage encoded_image_2(encoded_image_1);
encoded_image_2.SetSpatialIndex(1);
CodecSpecificInfo codec_info_2;
memset(&codec_info_2, 0, sizeof(CodecSpecificInfo));
codec_info_2.codecType = kVideoCodecVP8;
codec_info_2.codecSpecific.VP8.simulcastIdx = 1;
EXPECT_EQ(EncodedImageCallback::Result::OK,
test.router()
->OnEncodedImage(encoded_image_2, &codec_info, nullptr)
->OnEncodedImage(encoded_image, &codec_info_2, nullptr)
.error);
// Inactive.
test.router()->SetActive(false);
EXPECT_NE(EncodedImageCallback::Result::OK,
test.router()
->OnEncodedImage(encoded_image_1, &codec_info, nullptr)
->OnEncodedImage(encoded_image, &codec_info_1, nullptr)
.error);
EXPECT_NE(EncodedImageCallback::Result::OK,
test.router()
->OnEncodedImage(encoded_image_2, &codec_info, nullptr)
->OnEncodedImage(encoded_image, &codec_info_2, nullptr)
.error);
}
@ -213,19 +216,22 @@ TEST(RtpVideoSenderTest, SendSimulcastSetActive) {
// be sent if both modules are inactive.
TEST(RtpVideoSenderTest, SendSimulcastSetActiveModules) {
uint8_t payload = 'a';
EncodedImage encoded_image_1;
encoded_image_1.SetTimestamp(1);
encoded_image_1.capture_time_ms_ = 2;
encoded_image_1._frameType = kVideoFrameKey;
encoded_image_1._buffer = &payload;
encoded_image_1._length = 1;
EncodedImage encoded_image_2(encoded_image_1);
encoded_image_2.SetSpatialIndex(1);
EncodedImage encoded_image;
encoded_image.SetTimestamp(1);
encoded_image.capture_time_ms_ = 2;
encoded_image._frameType = kVideoFrameKey;
encoded_image._buffer = &payload;
encoded_image._length = 1;
RtpVideoSenderTestFixture test({kSsrc1, kSsrc2}, kPayloadType, {});
CodecSpecificInfo codec_info;
memset(&codec_info, 0, sizeof(CodecSpecificInfo));
codec_info.codecType = kVideoCodecVP8;
CodecSpecificInfo codec_info_1;
memset(&codec_info_1, 0, sizeof(CodecSpecificInfo));
codec_info_1.codecType = kVideoCodecVP8;
codec_info_1.codecSpecific.VP8.simulcastIdx = 0;
CodecSpecificInfo codec_info_2;
memset(&codec_info_2, 0, sizeof(CodecSpecificInfo));
codec_info_2.codecType = kVideoCodecVP8;
codec_info_2.codecSpecific.VP8.simulcastIdx = 1;
// Only setting one stream to active will still set the payload router to
// active and allow sending data on the active stream.
@ -233,7 +239,7 @@ TEST(RtpVideoSenderTest, SendSimulcastSetActiveModules) {
test.router()->SetActiveModules(active_modules);
EXPECT_EQ(EncodedImageCallback::Result::OK,
test.router()
->OnEncodedImage(encoded_image_1, &codec_info, nullptr)
->OnEncodedImage(encoded_image, &codec_info_1, nullptr)
.error);
// Setting both streams to inactive will turn the payload router to
@ -244,11 +250,11 @@ TEST(RtpVideoSenderTest, SendSimulcastSetActiveModules) {
// because the payload router is inactive.
EXPECT_NE(EncodedImageCallback::Result::OK,
test.router()
->OnEncodedImage(encoded_image_1, &codec_info, nullptr)
->OnEncodedImage(encoded_image, &codec_info_1, nullptr)
.error);
EXPECT_NE(EncodedImageCallback::Result::OK,
test.router()
->OnEncodedImage(encoded_image_1, &codec_info, nullptr)
->OnEncodedImage(encoded_image, &codec_info_2, nullptr)
.error);
}

View File

@ -449,14 +449,16 @@ EncodedImageCallback::Result SimulcastEncoderAdapter::OnEncodedImage(
const EncodedImage& encodedImage,
const CodecSpecificInfo* codecSpecificInfo,
const RTPFragmentationHeader* fragmentation) {
EncodedImage stream_image(encodedImage);
CodecSpecificInfo stream_codec_specific = *codecSpecificInfo;
stream_codec_specific.codec_name = implementation_name_.c_str();
stream_image.SetSpatialIndex(stream_idx);
if (stream_codec_specific.codecType == webrtc::kVideoCodecVP8) {
stream_codec_specific.codecSpecific.VP8.simulcastIdx = stream_idx;
} else if (stream_codec_specific.codecType == webrtc::kVideoCodecH264) {
stream_codec_specific.codecSpecific.H264.simulcast_idx = stream_idx;
}
return encoded_complete_callback_->OnEncodedImage(
stream_image, &stream_codec_specific, fragmentation);
encodedImage, &stream_codec_specific, fragmentation);
}
void SimulcastEncoderAdapter::PopulateStreamCodec(

View File

@ -339,10 +339,11 @@ class TestSimulcastEncoderAdapterFake : public ::testing::Test,
const RTPFragmentationHeader* fragmentation) override {
last_encoded_image_width_ = encoded_image._encodedWidth;
last_encoded_image_height_ = encoded_image._encodedHeight;
if (codec_specific_info) {
last_encoded_image_simulcast_index_ =
encoded_image.SpatialIndex().value_or(-1);
return Result(Result::OK, encoded_image._timeStamp);
codec_specific_info->codecSpecific.VP8.simulcastIdx;
}
return Result(Result::OK, encoded_image.Timestamp());
}
bool GetLastEncodedImageInfo(int* out_width,

View File

@ -506,7 +506,6 @@ int32_t H264EncoderImpl::Encode(const VideoFrame& input_frame,
: VideoContentType::UNSPECIFIED;
encoded_images_[i].timing_.flags = VideoSendTiming::kInvalid;
encoded_images_[i]._frameType = ConvertToVideoFrameType(info.eFrameType);
encoded_images_[i].SetSpatialIndex(configurations_[i].simulcast_idx);
// Split encoded image up into fragments. This also updates
// |encoded_image_|.
@ -527,6 +526,8 @@ int32_t H264EncoderImpl::Encode(const VideoFrame& input_frame,
codec_specific.codecType = kVideoCodecH264;
codec_specific.codecSpecific.H264.packetization_mode =
packetization_mode_;
codec_specific.codecSpecific.H264.simulcast_idx =
configurations_[i].simulcast_idx;
encoded_image_callback_->OnEncodedImage(encoded_images_[i],
&codec_specific, &frag_header);
}

View File

@ -285,6 +285,7 @@ EncodedImageCallback::Result MultiplexEncoderAdapter::OnEncodedImage(
CodecSpecificInfo codec_info = *codecSpecificInfo;
codec_info.codecType = kVideoCodecMultiplex;
codec_info.codecSpecific.generic.simulcast_idx = 0;
encoded_complete_callback_->OnEncodedImage(combined_image_, &codec_info,
fragmentation);
}

View File

@ -232,7 +232,7 @@ TEST_P(TestMultiplexAdapter, CheckSingleFrameEncodedBitstream) {
CodecSpecificInfo codec_specific_info;
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
EXPECT_EQ(kVideoCodecMultiplex, codec_specific_info.codecType);
EXPECT_FALSE(encoded_frame.SpatialIndex());
EXPECT_EQ(0, codec_specific_info.codecSpecific.generic.simulcast_idx);
const MultiplexImage& unpacked_frame =
MultiplexEncodedImagePacker::Unpack(encoded_frame);
@ -252,7 +252,7 @@ TEST_P(TestMultiplexAdapter, CheckDoubleFramesEncodedBitstream) {
CodecSpecificInfo codec_specific_info;
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
EXPECT_EQ(kVideoCodecMultiplex, codec_specific_info.codecType);
EXPECT_FALSE(encoded_frame.SpatialIndex());
EXPECT_EQ(0, codec_specific_info.codecSpecific.generic.simulcast_idx);
const MultiplexImage& unpacked_frame =
MultiplexEncodedImagePacker::Unpack(encoded_frame);

View File

@ -56,17 +56,22 @@ size_t GetMaxNaluSizeBytes(const EncodedImage& encoded_frame,
return max_size;
}
size_t GetTemporalLayerIndex(const CodecSpecificInfo& codec_specific) {
size_t temporal_idx = 0;
void GetLayerIndices(const CodecSpecificInfo& codec_specific,
size_t* spatial_idx,
size_t* temporal_idx) {
if (codec_specific.codecType == kVideoCodecVP8) {
temporal_idx = codec_specific.codecSpecific.VP8.temporalIdx;
*spatial_idx = codec_specific.codecSpecific.VP8.simulcastIdx;
*temporal_idx = codec_specific.codecSpecific.VP8.temporalIdx;
} else if (codec_specific.codecType == kVideoCodecVP9) {
temporal_idx = codec_specific.codecSpecific.VP9.temporal_idx;
*spatial_idx = codec_specific.codecSpecific.VP9.spatial_idx;
*temporal_idx = codec_specific.codecSpecific.VP9.temporal_idx;
}
if (temporal_idx == kNoTemporalIdx) {
temporal_idx = 0;
if (*spatial_idx == kNoSpatialIdx) {
*spatial_idx = 0;
}
if (*temporal_idx == kNoTemporalIdx) {
*temporal_idx = 0;
}
return temporal_idx;
}
int GetElapsedTimeMicroseconds(int64_t start_ns, int64_t stop_ns) {
@ -342,8 +347,9 @@ void VideoProcessor::FrameEncoded(
}
// Layer metadata.
size_t spatial_idx = encoded_image.SpatialIndex().value_or(0);
size_t temporal_idx = GetTemporalLayerIndex(codec_specific);
size_t spatial_idx = 0;
size_t temporal_idx = 0;
GetLayerIndices(codec_specific, &spatial_idx, &temporal_idx);
FrameStatistics* frame_stat =
stats_->GetFrameWithTimestamp(encoded_image.Timestamp(), spatial_idx);

View File

@ -817,6 +817,7 @@ void LibvpxVp8Encoder::PopulateCodecSpecific(
codec_specific->codecType = kVideoCodecVP8;
codec_specific->codec_name = ImplementationName();
CodecSpecificInfoVP8* vp8Info = &(codec_specific->codecSpecific.VP8);
vp8Info->simulcastIdx = stream_idx;
vp8Info->keyIdx = kNoKeyIdx; // TODO(hlundin) populate this
vp8Info->nonReference = (pkt.data.frame.flags & VPX_FRAME_IS_DROPPABLE) != 0;
temporal_layers_[stream_idx]->PopulateCodecSpecific(
@ -875,7 +876,6 @@ int LibvpxVp8Encoder::GetEncodedPartitions(
encoded_images_[encoder_idx]._frameType = kVideoFrameKey;
is_keyframe = true;
}
encoded_images_[encoder_idx].SetSpatialIndex(stream_idx);
PopulateCodecSpecific(&codec_specific, tl_configs[stream_idx], *pkt,
stream_idx, input_image.timestamp());
break;

View File

@ -70,7 +70,7 @@ class TestVp8Impl : public VideoCodecUnitTest {
VerifyQpParser(*encoded_frame);
EXPECT_STREQ("libvpx", codec_specific_info->codec_name);
EXPECT_EQ(kVideoCodecVP8, codec_specific_info->codecType);
EXPECT_EQ(0, encoded_frame->SpatialIndex());
EXPECT_EQ(0u, codec_specific_info->codecSpecific.VP8.simulcastIdx);
}
void EncodeAndExpectFrameWith(const VideoFrame& input_frame,

View File

@ -356,7 +356,7 @@ TEST_F(TestVp9Impl, EndOfPicture) {
encoder_->Encode(*NextInputFrame(), nullptr, nullptr));
ASSERT_TRUE(WaitForEncodedFrames(&frames, &codec_specific));
EXPECT_FALSE(frames[0].SpatialIndex());
EXPECT_EQ(codec_specific[0].codecSpecific.VP9.spatial_idx, kNoSpatialIdx);
EXPECT_TRUE(codec_specific[0].codecSpecific.VP9.end_of_picture);
}
@ -395,7 +395,7 @@ TEST_F(TestVp9Impl, InterLayerPred) {
// Key frame.
EXPECT_FALSE(codec_specific[0].codecSpecific.VP9.inter_pic_predicted);
EXPECT_EQ(frames[0].SpatialIndex(), 0);
EXPECT_EQ(codec_specific[0].codecSpecific.VP9.spatial_idx, 0);
EXPECT_EQ(codec_specific[0].codecSpecific.VP9.non_ref_for_inter_layer_pred,
inter_layer_pred == InterLayerPredMode::kOff);
EXPECT_TRUE(
@ -408,7 +408,7 @@ TEST_F(TestVp9Impl, InterLayerPred) {
// Delta frame.
EXPECT_TRUE(codec_specific[0].codecSpecific.VP9.inter_pic_predicted);
EXPECT_EQ(frames[0].SpatialIndex(), 0);
EXPECT_EQ(codec_specific[0].codecSpecific.VP9.spatial_idx, 0);
EXPECT_EQ(codec_specific[0].codecSpecific.VP9.non_ref_for_inter_layer_pred,
inter_layer_pred == InterLayerPredMode::kOff ||
inter_layer_pred == InterLayerPredMode::kOnKeyPic);

View File

@ -754,7 +754,6 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image,
}
void VP9EncoderImpl::PopulateCodecSpecific(CodecSpecificInfo* codec_specific,
absl::optional<int>* spatial_idx,
const vpx_codec_cx_pkt& pkt,
uint32_t timestamp,
bool first_frame_in_picture) {
@ -781,9 +780,9 @@ void VP9EncoderImpl::PopulateCodecSpecific(CodecSpecificInfo* codec_specific,
}
if (num_active_spatial_layers_ == 1) {
RTC_CHECK_EQ(layer_id.spatial_layer_id, 0);
*spatial_idx = absl::nullopt;
vp9_info->spatial_idx = kNoSpatialIdx;
} else {
*spatial_idx = layer_id.spatial_layer_id;
vp9_info->spatial_idx = layer_id.spatial_layer_id;
}
if (layer_id.spatial_layer_id != 0) {
vp9_info->ss_data_available = false;
@ -1022,10 +1021,8 @@ int VP9EncoderImpl::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) {
RTC_DCHECK_LE(encoded_image_._length, encoded_image_._size);
memset(&codec_specific_, 0, sizeof(codec_specific_));
absl::optional<int> spatial_index;
PopulateCodecSpecific(&codec_specific_, &spatial_index, *pkt,
input_image_->timestamp(), first_frame_in_picture);
encoded_image_.SetSpatialIndex(spatial_index);
PopulateCodecSpecific(&codec_specific_, *pkt, input_image_->timestamp(),
first_frame_in_picture);
if (is_flexible_mode_) {
UpdateReferenceBuffers(*pkt, pics_since_key_);

View File

@ -61,7 +61,6 @@ class VP9EncoderImpl : public VP9Encoder {
int InitAndSetControlSettings(const VideoCodec* inst);
void PopulateCodecSpecific(CodecSpecificInfo* codec_specific,
absl::optional<int>* spatial_idx,
const vpx_codec_cx_pkt& pkt,
uint32_t timestamp,
bool first_frame_in_picture);

View File

@ -82,6 +82,7 @@ void VCMEncodedFrame::CopyCodecSpecific(const RTPVideoHeader* header) {
if (_codecSpecificInfo.codecType != kVideoCodecVP9) {
// This is the first packet for this frame.
_codecSpecificInfo.codecSpecific.VP9.temporal_idx = 0;
_codecSpecificInfo.codecSpecific.VP9.spatial_idx = 0;
_codecSpecificInfo.codecSpecific.VP9.gof_idx = 0;
_codecSpecificInfo.codecSpecific.VP9.inter_layer_predicted = false;
_codecSpecificInfo.codecType = kVideoCodecVP9;
@ -105,6 +106,8 @@ void VCMEncodedFrame::CopyCodecSpecific(const RTPVideoHeader* header) {
vp9_header.temporal_up_switch;
}
if (vp9_header.spatial_idx != kNoSpatialIdx) {
_codecSpecificInfo.codecSpecific.VP9.spatial_idx =
vp9_header.spatial_idx;
_codecSpecificInfo.codecSpecific.VP9.inter_layer_predicted =
vp9_header.inter_layer_predicted;
}

View File

@ -391,10 +391,21 @@ EncodedImageCallback::Result VCMEncodedFrameCallback::OnEncodedImage(
const RTPFragmentationHeader* fragmentation_header) {
TRACE_EVENT_INSTANT1("webrtc", "VCMEncodedFrameCallback::Encoded",
"timestamp", encoded_image.Timestamp());
const size_t spatial_idx = encoded_image.SpatialIndex().value_or(0);
size_t simulcast_svc_idx = 0;
if (codec_specific->codecType == kVideoCodecVP9) {
if (codec_specific->codecSpecific.VP9.num_spatial_layers > 1)
simulcast_svc_idx = codec_specific->codecSpecific.VP9.spatial_idx;
} else if (codec_specific->codecType == kVideoCodecVP8) {
simulcast_svc_idx = codec_specific->codecSpecific.VP8.simulcastIdx;
} else if (codec_specific->codecType == kVideoCodecGeneric) {
simulcast_svc_idx = codec_specific->codecSpecific.generic.simulcast_idx;
} else if (codec_specific->codecType == kVideoCodecH264) {
// TODO(ilnik): When h264 simulcast is landed, extract simulcast idx here.
}
EncodedImage image_copy(encoded_image);
FillTimingInfo(spatial_idx, &image_copy);
FillTimingInfo(simulcast_svc_idx, &image_copy);
// Piggyback ALR experiment group id and simulcast id into the content type.
uint8_t experiment_id =
@ -410,7 +421,7 @@ EncodedImageCallback::Result VCMEncodedFrameCallback::OnEncodedImage(
// id in content type to +1 of that is actual simulcast index. This is because
// value 0 on the wire is reserved for 'no simulcast stream specified'.
RTC_CHECK(videocontenttypehelpers::SetSimulcastId(
&image_copy.content_type_, static_cast<uint8_t>(spatial_idx + 1)));
&image_copy.content_type_, static_cast<uint8_t>(simulcast_svc_idx + 1)));
Result result = post_encode_callback_->OnEncodedImage(
image_copy, codec_specific, fragmentation_header);

View File

@ -95,8 +95,8 @@ std::vector<std::vector<FrameType>> GetTimingFrames(
image._length = FrameSize(min_frame_size, max_frame_size, s, i);
image.capture_time_ms_ = current_timestamp;
image.SetTimestamp(static_cast<uint32_t>(current_timestamp * 90));
image.SetSpatialIndex(s);
codec_specific.codecType = kVideoCodecGeneric;
codec_specific.codecSpecific.generic.simulcast_idx = s;
callback.OnEncodeStarted(static_cast<uint32_t>(current_timestamp * 90),
current_timestamp, s);
if (dropped) {
@ -189,6 +189,7 @@ TEST(TestVCMEncodedFrameCallback, NoTimingFrameIfNoEncodeStartTime) {
image.capture_time_ms_ = timestamp;
image.SetTimestamp(static_cast<uint32_t>(timestamp * 90));
codec_specific.codecType = kVideoCodecGeneric;
codec_specific.codecSpecific.generic.simulcast_idx = 0;
FakeEncodedImageCallback sink;
VCMEncodedFrameCallback callback(&sink, nullptr);
VideoCodec::TimingFrameTriggerThresholds thresholds;
@ -220,6 +221,7 @@ TEST(TestVCMEncodedFrameCallback, AdjustsCaptureTimeForInternalSourceEncoder) {
image.capture_time_ms_ = timestamp;
image.SetTimestamp(static_cast<uint32_t>(timestamp * 90));
codec_specific.codecType = kVideoCodecGeneric;
codec_specific.codecSpecific.generic.simulcast_idx = 0;
FakeEncodedImageCallback sink;
VCMEncodedFrameCallback callback(&sink, nullptr);
callback.SetInternalSource(true);
@ -255,6 +257,7 @@ TEST(TestVCMEncodedFrameCallback, NotifiesAboutDroppedFrames) {
const int64_t kTimestampMs3 = 47721860;
const int64_t kTimestampMs4 = 47721870;
codec_specific.codecType = kVideoCodecGeneric;
codec_specific.codecSpecific.generic.simulcast_idx = 0;
FakeEncodedImageCallback sink;
VCMEncodedFrameCallback callback(&sink, nullptr);
// Any non-zero bitrate needed to be set before the first frame.
@ -290,6 +293,7 @@ TEST(TestVCMEncodedFrameCallback, RestoresCaptureTimestamps) {
CodecSpecificInfo codec_specific;
const int64_t kTimestampMs = 123456;
codec_specific.codecType = kVideoCodecGeneric;
codec_specific.codecSpecific.generic.simulcast_idx = 0;
FakeEncodedImageCallback sink;
VCMEncodedFrameCallback callback(&sink, nullptr);
// Any non-zero bitrate needed to be set before the first frame.

View File

@ -28,8 +28,6 @@ class RTPFragmentationHeader; // forward declaration
// with a copy-constructor. See below.
struct CodecSpecificInfoVP8 {
bool nonReference;
// TODO(bugs.webrtc.org/9378): Delete simulcastIdx, replaced by spatial index
// member in EncodedImage. Unused, but assigned in downstream code.
uint8_t simulcastIdx;
uint8_t temporalIdx;
bool layerSync;
@ -45,8 +43,6 @@ struct CodecSpecificInfoVP9 {
bool non_ref_for_inter_layer_pred;
uint8_t temporal_idx;
// TODO(bugs.webrtc.org/9378): Delete spatial_idx, replaced by spatial index
// member in EncodedImage. Unused, but assigned in downstream code.
uint8_t spatial_idx;
bool temporal_up_switch;
bool inter_layer_predicted; // Frame is dependent on directly lower spatial
@ -67,14 +63,13 @@ struct CodecSpecificInfoVP9 {
bool end_of_picture;
};
// TODO(bugs.webrtc.org/9378): Delete this struct. Unused, except that
// simulcast_idx is assigned in downstream code.
struct CodecSpecificInfoGeneric {
uint8_t simulcast_idx;
};
struct CodecSpecificInfoH264 {
H264PacketizationMode packetization_mode;
uint8_t simulcast_idx;
};
union CodecSpecificInfoUnion {

View File

@ -76,9 +76,15 @@ class SimulcastTestFixtureImpl::TestEncodedImageCallback
virtual Result OnEncodedImage(const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) {
uint16_t simulcast_idx = 0;
bool is_vp8 = (codec_specific_info->codecType == kVideoCodecVP8);
if (is_vp8) {
simulcast_idx = codec_specific_info->codecSpecific.VP8.simulcastIdx;
} else {
simulcast_idx = codec_specific_info->codecSpecific.H264.simulcast_idx;
}
// Only store the base layer.
if (encoded_image.SpatialIndex().value_or(0) == 0) {
if (simulcast_idx) {
if (encoded_image._frameType == kVideoFrameKey) {
delete[] encoded_key_frame_._buffer;
encoded_key_frame_._buffer = new uint8_t[encoded_image._size];
@ -98,9 +104,9 @@ class SimulcastTestFixtureImpl::TestEncodedImageCallback
}
}
if (is_vp8) {
layer_sync_[encoded_image.SpatialIndex().value_or(0)] =
layer_sync_[codec_specific_info->codecSpecific.VP8.simulcastIdx] =
codec_specific_info->codecSpecific.VP8.layerSync;
temporal_layer_[encoded_image.SpatialIndex().value_or(0)] =
temporal_layer_[codec_specific_info->codecSpecific.VP8.simulcastIdx] =
codec_specific_info->codecSpecific.VP8.temporalIdx;
}
return Result(Result::OK, encoded_image.Timestamp());

View File

@ -1008,6 +1008,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
info.codecType = codec_type;
if (codec_type == kVideoCodecVP8) {
info.codecSpecific.VP8.nonReference = false;
info.codecSpecific.VP8.simulcastIdx = 0;
info.codecSpecific.VP8.temporalIdx = kNoTemporalIdx;
info.codecSpecific.VP8.layerSync = false;
info.codecSpecific.VP8.keyIdx = kNoKeyIdx;
@ -1019,6 +1020,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
info.codecSpecific.VP9.flexible_mode = false;
info.codecSpecific.VP9.ss_data_available = key_frame ? true : false;
info.codecSpecific.VP9.temporal_idx = kNoTemporalIdx;
info.codecSpecific.VP9.spatial_idx = kNoSpatialIdx;
info.codecSpecific.VP9.temporal_up_switch = true;
info.codecSpecific.VP9.inter_layer_predicted = false;
info.codecSpecific.VP9.gof_idx =

View File

@ -390,6 +390,7 @@ CodecSpecificInfo VideoEncoderWrapper::ParseCodecSpecificInfo(
switch (codec_settings_.codecType) {
case kVideoCodecVP8:
info.codecSpecific.VP8.nonReference = false;
info.codecSpecific.VP8.simulcastIdx = 0;
info.codecSpecific.VP8.temporalIdx = kNoTemporalIdx;
info.codecSpecific.VP8.layerSync = false;
info.codecSpecific.VP8.keyIdx = kNoKeyIdx;
@ -402,6 +403,7 @@ CodecSpecificInfo VideoEncoderWrapper::ParseCodecSpecificInfo(
info.codecSpecific.VP9.flexible_mode = false;
info.codecSpecific.VP9.ss_data_available = key_frame ? true : false;
info.codecSpecific.VP9.temporal_idx = kNoTemporalIdx;
info.codecSpecific.VP9.spatial_idx = kNoSpatialIdx;
info.codecSpecific.VP9.temporal_up_switch = true;
info.codecSpecific.VP9.inter_layer_predicted = false;
info.codecSpecific.VP9.gof_idx =

View File

@ -102,6 +102,7 @@ int32_t FakeEncoder::Encode(const VideoFrame& input_image,
CodecSpecificInfo specifics;
memset(&specifics, 0, sizeof(specifics));
specifics.codecType = kVideoCodecGeneric;
specifics.codecSpecific.generic.simulcast_idx = i;
std::unique_ptr<uint8_t[]> encoded_buffer(
new uint8_t[frame_info.layers[i].size]);
memcpy(encoded_buffer.get(), encoded_buffer_, frame_info.layers[i].size);
@ -117,7 +118,6 @@ int32_t FakeEncoder::Encode(const VideoFrame& input_image,
encoded.content_type_ = (mode == VideoCodecMode::kScreensharing)
? VideoContentType::SCREENSHARE
: VideoContentType::UNSPECIFIED;
encoded.SetSpatialIndex(i);
specifics.codec_name = ImplementationName();
if (callback->OnEncodedImage(encoded, &specifics, nullptr).error !=
EncodedImageCallback::Result::OK) {

View File

@ -88,6 +88,7 @@ void FakeVP8Encoder::PopulateCodecSpecific(
codec_specific->codecType = kVideoCodecVP8;
codec_specific->codec_name = ImplementationName();
CodecSpecificInfoVP8* vp8Info = &(codec_specific->codecSpecific.VP8);
vp8Info->simulcastIdx = stream_idx;
vp8Info->keyIdx = kNoKeyIdx;
vp8Info->nonReference = false;
temporal_layers_[stream_idx]->PopulateCodecSpecific(
@ -99,7 +100,7 @@ EncodedImageCallback::Result FakeVP8Encoder::OnEncodedImage(
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragments) {
RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_);
uint8_t stream_idx = encoded_image.SpatialIndex().value_or(0);
uint8_t stream_idx = codec_specific_info->codecSpecific.generic.simulcast_idx;
CodecSpecificInfo overrided_specific_info;
TemporalLayers::FrameConfig tl_config =
temporal_layers_[stream_idx]->UpdateLayerConfig(encoded_image._timeStamp);

View File

@ -81,9 +81,9 @@ void UpdateCodecTypeHistogram(const std::string& payload_name) {
kVideoMax);
}
bool IsForcedFallbackPossible(const CodecSpecificInfo* codec_info,
int simulcast_index) {
return codec_info->codecType == kVideoCodecVP8 && simulcast_index == 0 &&
bool IsForcedFallbackPossible(const CodecSpecificInfo* codec_info) {
return codec_info->codecType == kVideoCodecVP8 &&
codec_info->codecSpecific.VP8.simulcastIdx == 0 &&
(codec_info->codecSpecific.VP8.temporalIdx == 0 ||
codec_info->codecSpecific.VP8.temporalIdx == kNoTemporalIdx);
}
@ -219,7 +219,7 @@ void SendStatisticsProxy::UmaSamplesContainer::RemoveOld(
sent_height_counter_.Add(it->second.max_height);
// Check number of encoded streams per timestamp.
if (num_streams_ > static_cast<size_t>(it->second.max_simulcast_idx)) {
if (num_streams_ > it->second.max_simulcast_idx) {
*is_limited_in_resolution = false;
if (num_streams_ > 1) {
int disabled_streams =
@ -241,7 +241,7 @@ void SendStatisticsProxy::UmaSamplesContainer::RemoveOld(
bool SendStatisticsProxy::UmaSamplesContainer::InsertEncodedFrame(
const EncodedImage& encoded_frame,
int simulcast_idx,
size_t simulcast_idx,
bool* is_limited_in_resolution) {
int64_t now_ms = clock_->TimeInMilliseconds();
RemoveOld(now_ms, is_limited_in_resolution);
@ -806,15 +806,14 @@ void SendStatisticsProxy::OnSetEncoderTargetRate(uint32_t bitrate_bps) {
void SendStatisticsProxy::UpdateEncoderFallbackStats(
const CodecSpecificInfo* codec_info,
int pixels,
int simulcast_index) {
UpdateFallbackDisabledStats(codec_info, pixels, simulcast_index);
int pixels) {
UpdateFallbackDisabledStats(codec_info, pixels);
if (!fallback_max_pixels_ || !uma_container_->fallback_info_.is_possible) {
return;
}
if (!IsForcedFallbackPossible(codec_info, simulcast_index)) {
if (!IsForcedFallbackPossible(codec_info)) {
uma_container_->fallback_info_.is_possible = false;
return;
}
@ -856,15 +855,14 @@ void SendStatisticsProxy::UpdateEncoderFallbackStats(
void SendStatisticsProxy::UpdateFallbackDisabledStats(
const CodecSpecificInfo* codec_info,
int pixels,
int simulcast_index) {
int pixels) {
if (!fallback_max_pixels_disabled_ ||
!uma_container_->fallback_info_disabled_.is_possible ||
stats_.has_entered_low_resolution) {
return;
}
if (!IsForcedFallbackPossible(codec_info, simulcast_index) ||
if (!IsForcedFallbackPossible(codec_info) ||
strcmp(codec_info->codec_name, kVp8SwCodecName) == 0) {
uma_container_->fallback_info_disabled_.is_possible = false;
return;
@ -884,27 +882,26 @@ void SendStatisticsProxy::OnMinPixelLimitReached() {
void SendStatisticsProxy::OnSendEncodedImage(
const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_info) {
// Simulcast is used for VP8, H264 and Generic.
int simulcast_idx =
(codec_info && (codec_info->codecType == kVideoCodecVP8 ||
codec_info->codecType == kVideoCodecH264 ||
codec_info->codecType == kVideoCodecGeneric))
? encoded_image.SpatialIndex().value_or(0)
: 0;
size_t simulcast_idx = 0;
rtc::CritScope lock(&crit_);
++stats_.frames_encoded;
if (codec_info) {
if (codec_info->codecType == kVideoCodecVP8) {
simulcast_idx = codec_info->codecSpecific.VP8.simulcastIdx;
} else if (codec_info->codecType == kVideoCodecH264) {
simulcast_idx = codec_info->codecSpecific.H264.simulcast_idx;
} else if (codec_info->codecType == kVideoCodecGeneric) {
simulcast_idx = codec_info->codecSpecific.generic.simulcast_idx;
}
if (codec_info->codec_name) {
UpdateEncoderFallbackStats(
codec_info,
encoded_image._encodedWidth * encoded_image._encodedHeight,
simulcast_idx);
UpdateEncoderFallbackStats(codec_info, encoded_image._encodedWidth *
encoded_image._encodedHeight);
stats_.encoder_implementation_name = codec_info->codec_name;
}
}
if (static_cast<size_t>(simulcast_idx) >= rtp_config_.ssrcs.size()) {
if (simulcast_idx >= rtp_config_.ssrcs.size()) {
RTC_LOG(LS_ERROR) << "Encoded image outside simulcast range ("
<< simulcast_idx << " >= " << rtp_config_.ssrcs.size()
<< ").";
@ -931,13 +928,20 @@ void SendStatisticsProxy::OnSendEncodedImage(
if (codec_info) {
if (codec_info->codecType == kVideoCodecVP8) {
int spatial_idx = (rtp_config_.ssrcs.size() == 1) ? -1 : simulcast_idx;
int spatial_idx = (rtp_config_.ssrcs.size() == 1)
? -1
: static_cast<int>(simulcast_idx);
uma_container_->qp_counters_[spatial_idx].vp8.Add(encoded_image.qp_);
} else if (codec_info->codecType == kVideoCodecVP9) {
int spatial_idx = encoded_image.SpatialIndex().value_or(-1);
int spatial_idx =
(codec_info->codecSpecific.VP9.num_spatial_layers == 1)
? -1
: codec_info->codecSpecific.VP9.spatial_idx;
uma_container_->qp_counters_[spatial_idx].vp9.Add(encoded_image.qp_);
} else if (codec_info->codecType == kVideoCodecH264) {
int spatial_idx = (rtp_config_.ssrcs.size() == 1) ? -1 : simulcast_idx;
int spatial_idx = (rtp_config_.ssrcs.size() == 1)
? -1
: static_cast<int>(simulcast_idx);
uma_container_->qp_counters_[spatial_idx].h264.Add(encoded_image.qp_);
}
}

View File

@ -188,7 +188,10 @@ class SendStatisticsProxy : public VideoStreamEncoderObserver,
}
};
struct Frame {
Frame(int64_t send_ms, uint32_t width, uint32_t height, int simulcast_idx)
Frame(int64_t send_ms,
uint32_t width,
uint32_t height,
size_t simulcast_idx)
: send_ms(send_ms),
max_width(width),
max_height(height),
@ -197,7 +200,7 @@ class SendStatisticsProxy : public VideoStreamEncoderObserver,
send_ms; // Time when first frame with this timestamp is sent.
uint32_t max_width; // Max width with this timestamp.
uint32_t max_height; // Max height with this timestamp.
int max_simulcast_idx; // Max simulcast index with this timestamp.
size_t max_simulcast_idx; // Max simulcast index with this timestamp.
};
typedef std::map<uint32_t, Frame, TimestampOlderThan> EncodedFrameMap;
@ -215,12 +218,10 @@ class SendStatisticsProxy : public VideoStreamEncoderObserver,
RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
void UpdateEncoderFallbackStats(const CodecSpecificInfo* codec_info,
int pixels,
int simulcast_index)
int pixels)
RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
void UpdateFallbackDisabledStats(const CodecSpecificInfo* codec_info,
int pixels,
int simulcast_index)
int pixels)
RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
Clock* const clock_;
@ -256,7 +257,7 @@ class SendStatisticsProxy : public VideoStreamEncoderObserver,
void InitializeBitrateCounters(const VideoSendStream::Stats& stats);
bool InsertEncodedFrame(const EncodedImage& encoded_frame,
int simulcast_idx,
size_t simulcast_idx,
bool* is_limited_in_resolution);
void RemoveOld(int64_t now_ms, bool* is_limited_in_resolution);

View File

@ -36,6 +36,7 @@ const int kRtpClockRateHz = 90000;
const CodecSpecificInfo kDefaultCodecInfo = []() {
CodecSpecificInfo codec_info;
codec_info.codecType = kVideoCodecVP8;
codec_info.codecSpecific.VP8.simulcastIdx = 0;
return codec_info;
}();
} // namespace
@ -1312,10 +1313,10 @@ TEST_F(SendStatisticsProxyTest, VerifyQpHistogramStats_Vp8) {
codec_info.codecType = kVideoCodecVP8;
for (int i = 0; i < SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) {
encoded_image.SetSpatialIndex(0);
codec_info.codecSpecific.VP8.simulcastIdx = 0;
encoded_image.qp_ = kQpIdx0;
statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info);
encoded_image.SetSpatialIndex(1);
codec_info.codecSpecific.VP8.simulcastIdx = 1;
encoded_image.qp_ = kQpIdx1;
statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info);
}
@ -1337,7 +1338,7 @@ TEST_F(SendStatisticsProxyTest, VerifyQpHistogramStats_Vp8OneSsrc) {
codec_info.codecType = kVideoCodecVP8;
for (int i = 0; i < SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) {
encoded_image.SetSpatialIndex(0);
codec_info.codecSpecific.VP8.simulcastIdx = 0;
encoded_image.qp_ = kQpIdx0;
statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info);
}
@ -1354,10 +1355,10 @@ TEST_F(SendStatisticsProxyTest, VerifyQpHistogramStats_Vp9) {
for (int i = 0; i < SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) {
encoded_image.qp_ = kQpIdx0;
encoded_image.SetSpatialIndex(0);
codec_info.codecSpecific.VP9.spatial_idx = 0;
statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info);
encoded_image.qp_ = kQpIdx1;
encoded_image.SetSpatialIndex(1);
codec_info.codecSpecific.VP9.spatial_idx = 1;
statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info);
}
statistics_proxy_.reset();
@ -1380,6 +1381,7 @@ TEST_F(SendStatisticsProxyTest, VerifyQpHistogramStats_Vp9OneSpatialLayer) {
for (int i = 0; i < SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) {
encoded_image.qp_ = kQpIdx0;
codec_info.codecSpecific.VP9.spatial_idx = 0;
statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info);
}
statistics_proxy_.reset();
@ -1393,10 +1395,10 @@ TEST_F(SendStatisticsProxyTest, VerifyQpHistogramStats_H264) {
codec_info.codecType = kVideoCodecH264;
for (int i = 0; i < SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) {
encoded_image.SetSpatialIndex(0);
codec_info.codecSpecific.H264.simulcast_idx = 0;
encoded_image.qp_ = kQpIdx0;
statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info);
encoded_image.SetSpatialIndex(1);
codec_info.codecSpecific.H264.simulcast_idx = 1;
encoded_image.qp_ = kQpIdx1;
statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info);
}
@ -1538,7 +1540,6 @@ TEST_F(SendStatisticsProxyTest,
VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts,
quality_counts);
EncodedImage encoded_image;
encoded_image.SetSpatialIndex(0);
for (int i = 0; i < SendStatisticsProxy::kMinRequiredMetricsSamples; ++i)
statistics_proxy_->OnSendEncodedImage(encoded_image, &kDefaultCodecInfo);
@ -1559,7 +1560,6 @@ TEST_F(SendStatisticsProxyTest,
VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts,
quality_counts);
EncodedImage encoded_image;
encoded_image.SetSpatialIndex(0);
for (int i = 0; i < SendStatisticsProxy::kMinRequiredMetricsSamples; ++i)
statistics_proxy_->OnSendEncodedImage(encoded_image, &kDefaultCodecInfo);
@ -1584,7 +1584,6 @@ TEST_F(SendStatisticsProxyTest,
VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts,
quality_counts);
EncodedImage encoded_image;
encoded_image.SetSpatialIndex(0);
for (int i = 0; i < SendStatisticsProxy::kMinRequiredMetricsSamples; ++i)
statistics_proxy_->OnSendEncodedImage(encoded_image, &kDefaultCodecInfo);
// Histograms are updated when the statistics_proxy_ is deleted.
@ -1718,13 +1717,13 @@ TEST_F(SendStatisticsProxyTest, EncodedResolutionTimesOut) {
EncodedImage encoded_image;
encoded_image._encodedWidth = kEncodedWidth;
encoded_image._encodedHeight = kEncodedHeight;
encoded_image.SetSpatialIndex(0);
CodecSpecificInfo codec_info;
codec_info.codecType = kVideoCodecVP8;
codec_info.codecSpecific.VP8.simulcastIdx = 0;
statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info);
encoded_image.SetSpatialIndex(1);
codec_info.codecSpecific.VP8.simulcastIdx = 1;
statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info);
VideoSendStream::Stats stats = statistics_proxy_->GetStats();
@ -1747,6 +1746,7 @@ TEST_F(SendStatisticsProxyTest, EncodedResolutionTimesOut) {
// Report stats for second SSRC to make sure it's not outdated along with the
// first SSRC.
codec_info.codecSpecific.VP8.simulcastIdx = 1;
statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info);
// Forward 1 ms, reach timeout, substream 0 should have no resolution
@ -1765,13 +1765,13 @@ TEST_F(SendStatisticsProxyTest, ClearsResolutionFromInactiveSsrcs) {
EncodedImage encoded_image;
encoded_image._encodedWidth = kEncodedWidth;
encoded_image._encodedHeight = kEncodedHeight;
encoded_image.SetSpatialIndex(0);
CodecSpecificInfo codec_info;
codec_info.codecType = kVideoCodecVP8;
codec_info.codecSpecific.VP8.simulcastIdx = 0;
statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info);
encoded_image.SetSpatialIndex(1);
codec_info.codecSpecific.VP8.simulcastIdx = 1;
statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info);
statistics_proxy_->OnInactiveSsrc(config_.rtp.ssrcs[1]);
@ -2182,11 +2182,11 @@ class ForcedFallbackTest : public SendStatisticsProxyTest {
explicit ForcedFallbackTest(const std::string& field_trials)
: SendStatisticsProxyTest(field_trials) {
codec_info_.codecType = kVideoCodecVP8;
codec_info_.codecSpecific.VP8.simulcastIdx = 0;
codec_info_.codecSpecific.VP8.temporalIdx = 0;
codec_info_.codec_name = "fake_codec";
encoded_image_._encodedWidth = kWidth;
encoded_image_._encodedHeight = kHeight;
encoded_image_.SetSpatialIndex(0);
}
~ForcedFallbackTest() override {}
@ -2260,7 +2260,7 @@ TEST_F(ForcedFallbackEnabled, StatsNotUpdatedForTemporalLayers) {
}
TEST_F(ForcedFallbackEnabled, StatsNotUpdatedForSimulcast) {
encoded_image_.SetSpatialIndex(1);
codec_info_.codecSpecific.VP8.simulcastIdx = 1;
InsertEncodedFrames(kMinFrames, kFrameIntervalMs);
statistics_proxy_.reset();
EXPECT_EQ(0, metrics::NumSamples(kPrefix + "FallbackTimeInPercent.Vp8"));

View File

@ -318,6 +318,10 @@ EncodedImageCallback::Result VideoReceiveStream::OnEncodedImage(
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) {
stats_proxy_.OnPreDecode(encoded_image, codec_specific_info);
size_t simulcast_idx = 0;
if (codec_specific_info->codecType == kVideoCodecVP8) {
simulcast_idx = codec_specific_info->codecSpecific.VP8.simulcastIdx;
}
{
rtc::CritScope lock(&ivf_writer_lock_);
if (ivf_writer_.get()) {

View File

@ -522,13 +522,11 @@ EncodedImageCallback::Result VideoSendStreamImpl::OnEncodedImage(
// Encoded is called on whatever thread the real encoder implementation run
// on. In the case of hardware encoders, there might be several encoders
// running in parallel on different threads.
const size_t simulcast_idx =
(codec_specific_info->codecType != kVideoCodecVP9)
? encoded_image.SpatialIndex().value_or(0)
: 0;
size_t simulcast_idx = 0;
if (codec_specific_info->codecType == kVideoCodecVP8) {
simulcast_idx = codec_specific_info->codecSpecific.VP8.simulcastIdx;
}
if (config_->post_encode_callback) {
// TODO(nisse): Delete webrtc::EncodedFrame class, pass EncodedImage
// instead.
config_->post_encode_callback->EncodedFrameCallback(EncodedFrame(
encoded_image._buffer, encoded_image._length, encoded_image._frameType,
simulcast_idx, encoded_image.Timestamp()));
@ -544,10 +542,15 @@ EncodedImageCallback::Result VideoSendStreamImpl::OnEncodedImage(
EncodedImageCallback::Result result = rtp_video_sender_->OnEncodedImage(
encoded_image, codec_specific_info, fragmentation);
RTC_DCHECK(codec_specific_info);
int layer = codec_specific_info->codecType == kVideoCodecVP8
? codec_specific_info->codecSpecific.VP8.simulcastIdx
: 0;
{
rtc::CritScope lock(&ivf_writers_crit_);
if (file_writers_[simulcast_idx].get()) {
bool ok = file_writers_[simulcast_idx]->WriteFrame(
if (file_writers_[layer].get()) {
bool ok = file_writers_[layer]->WriteFrame(
encoded_image, codec_specific_info->codecType);
RTC_DCHECK(ok);
}

View File

@ -3041,10 +3041,10 @@ TEST_F(VideoSendStreamTest, ReportsSentResolution) {
encoded.capture_time_ms_ = input_image.render_time_ms();
for (size_t i = 0; i < kNumStreams; ++i) {
specifics.codecSpecific.generic.simulcast_idx = static_cast<uint8_t>(i);
encoded._frameType = (*frame_types)[i];
encoded._encodedWidth = kEncodedResolution[i].width;
encoded._encodedHeight = kEncodedResolution[i].height;
encoded.SetSpatialIndex(i);
EncodedImageCallback* callback;
{
rtc::CritScope cs(&crit_sect_);