Log received frames QP value.

spatial_layers_qp holds the QP values for all the spatial layers. However, receiver only sees one of the spatial layers at a given time. It is of interest to know the shown spatial layers QP. This could also be used to indicate if upper layers are frequently dropped or not.

Bug: None
Change-Id: I462ea11e3447f8ffd11f4a6f2ccbf361102c762f
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/358863
Reviewed-by: Ilya Nikolaevskiy <ilnik@webrtc.org>
Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org>
Commit-Queue: Emil Vardar (xWF) <vardar@google.com>
Cr-Commit-Position: refs/heads/main@{#42759}
This commit is contained in:
Emil Vardar 2024-08-12 07:57:43 +00:00 committed by WebRTC LUCI CQ
parent 53523520ff
commit 59c7b2277b
11 changed files with 100 additions and 6 deletions

View File

@ -72,6 +72,8 @@ class VideoQualityAnalyzerInterface
// Decode time provided by decoder itself. If decoder doesnt produce such
// information can be omitted.
absl::optional<int32_t> decode_time_ms = absl::nullopt;
// Decoder quantizer value.
absl::optional<uint8_t> qp = absl::nullopt;
};
~VideoQualityAnalyzerInterface() override = default;

View File

@ -476,7 +476,7 @@ void DefaultVideoQualityAnalyzer::OnFrameDecoded(
used_decoder.switched_on_at = now;
used_decoder.switched_from_at = now;
it->second.OnFrameDecoded(peer_index, now, frame.width(), frame.height(),
used_decoder);
used_decoder, stats.qp);
if (options_.report_infra_metrics) {
analyzer_stats_.on_frame_decoded_processing_time_ms.AddSample(
@ -1258,6 +1258,9 @@ void DefaultVideoQualityAnalyzer::ReportResults(
ImprovementDirection::kSmallerIsBetter,
std::move(qp_metadata));
}
metrics_logger_->LogMetric(
"rendered_frame_qp", test_case_name, stats.rendered_frame_qp,
Unit::kUnitless, ImprovementDirection::kSmallerIsBetter, metric_metadata);
metrics_logger_->LogSingleValueMetric(
"actual_encode_bitrate", test_case_name,
static_cast<double>(stats.total_encoded_images_payload) /

View File

@ -135,11 +135,13 @@ void FrameInFlight::OnFrameDecoded(size_t peer,
webrtc::Timestamp time,
int width,
int height,
const StreamCodecInfo& used_decoder) {
const StreamCodecInfo& used_decoder,
const absl::optional<uint8_t> qp) {
receiver_stats_[peer].decode_end_time = time;
receiver_stats_[peer].used_decoder = used_decoder;
receiver_stats_[peer].decoded_frame_width = width;
receiver_stats_[peer].decoded_frame_height = height;
receiver_stats_[peer].decoded_frame_qp = qp;
}
void FrameInFlight::OnDecoderError(size_t peer,
@ -204,6 +206,7 @@ FrameStats FrameInFlight::GetStatsForPeer(size_t peer) const {
receiver_stats->time_between_rendered_frames;
stats.decoded_frame_width = receiver_stats->decoded_frame_width;
stats.decoded_frame_height = receiver_stats->decoded_frame_height;
stats.decoded_frame_qp = receiver_stats->decoded_frame_qp;
stats.used_decoder = receiver_stats->used_decoder;
stats.pre_decoded_frame_type = receiver_stats->frame_type;
stats.pre_decoded_image_size = receiver_stats->encoded_image_size;

View File

@ -46,6 +46,8 @@ struct ReceiverFrameStats {
absl::optional<int> decoded_frame_width = absl::nullopt;
absl::optional<int> decoded_frame_height = absl::nullopt;
absl::optional<uint8_t> decoded_frame_qp = absl::nullopt;
// Can be not set if frame was dropped in the network.
absl::optional<StreamCodecInfo> used_decoder = absl::nullopt;
@ -113,7 +115,9 @@ class FrameInFlight {
Timestamp time,
int width,
int height,
const StreamCodecInfo& used_decoder);
const StreamCodecInfo& used_decoder,
const absl::optional<uint8_t> qp);
void OnDecoderError(size_t peer, const StreamCodecInfo& used_decoder);
bool HasDecodeEndTime(size_t peer) const;

View File

@ -552,6 +552,14 @@ void DefaultVideoQualityAnalyzerFramesComparator::ProcessComparison(
StatsSample(*comparison.frame_stats.decoded_frame_width *
*comparison.frame_stats.decoded_frame_height,
frame_stats.decode_end_time, metadata));
// TODO(webrtc:357636606): Add a check that the rendered QP is among the
// encoded spatial layer's QP. Can only do that if there are 1 and only 1
// QP value per spatial layer.
if (frame_stats.decoded_frame_qp.has_value()) {
stats->rendered_frame_qp.AddSample(
StatsSample(static_cast<double>(*frame_stats.decoded_frame_qp),
frame_stats.decode_end_time, metadata));
}
}
if (frame_stats.prev_frame_rendered_time.has_value() &&

View File

@ -401,6 +401,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
ExpectEmpty(stats.resolution_of_decoded_frame);
ExpectEmpty(stats.target_encode_bitrate);
EXPECT_THAT(stats.spatial_layers_qp, IsEmpty());
ExpectEmpty(stats.rendered_frame_qp);
ExpectEmpty(stats.recv_key_frame_size_bytes);
ExpectEmpty(stats.recv_delta_frame_size_bytes);
EXPECT_EQ(stats.total_encoded_images_payload, 0);
@ -461,6 +462,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
ExpectEmpty(stats.resolution_of_decoded_frame);
ExpectEmpty(stats.target_encode_bitrate);
EXPECT_THAT(stats.spatial_layers_qp, IsEmpty());
ExpectEmpty(stats.rendered_frame_qp);
ExpectEmpty(stats.recv_key_frame_size_bytes);
ExpectEmpty(stats.recv_delta_frame_size_bytes);
EXPECT_EQ(stats.total_encoded_images_payload, 0);
@ -536,6 +538,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
EXPECT_THAT(stats.spatial_layers_qp, SizeIs(1));
ExpectSizeAndAllElementsAre(stats.spatial_layers_qp[0], /*size=*/2,
/*value=*/5.0);
ExpectEmpty(stats.rendered_frame_qp);
ExpectEmpty(stats.recv_key_frame_size_bytes);
ExpectEmpty(stats.recv_delta_frame_size_bytes);
EXPECT_EQ(stats.total_encoded_images_payload, 1000);
@ -612,6 +615,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
EXPECT_THAT(stats.spatial_layers_qp, SizeIs(1));
ExpectSizeAndAllElementsAre(stats.spatial_layers_qp[0], /*size=*/2,
/*value=*/5.0);
ExpectEmpty(stats.rendered_frame_qp);
ExpectEmpty(stats.recv_key_frame_size_bytes);
ExpectEmpty(stats.recv_delta_frame_size_bytes);
EXPECT_EQ(stats.total_encoded_images_payload, 1000);
@ -694,6 +698,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
EXPECT_THAT(stats.spatial_layers_qp, SizeIs(1));
ExpectSizeAndAllElementsAre(stats.spatial_layers_qp[0], /*size=*/2,
/*value=*/5.0);
ExpectEmpty(stats.rendered_frame_qp);
ExpectSizeAndAllElementsAre(stats.recv_key_frame_size_bytes, /*size=*/1,
/*value=*/500.0);
ExpectEmpty(stats.recv_delta_frame_size_bytes);
@ -749,6 +754,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
frame_stats.decode_end_time = captured_time + TimeDelta::Millis(50);
frame_stats.decoded_frame_width = 200;
frame_stats.decoded_frame_height = 100;
frame_stats.decoded_frame_qp = 10;
frame_stats.used_decoder =
Vp8CodecForOneFrame(frame_id, frame_stats.decode_end_time);
@ -784,6 +790,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
EXPECT_THAT(stats.spatial_layers_qp, SizeIs(1));
ExpectSizeAndAllElementsAre(stats.spatial_layers_qp[0], /*size=*/2,
/*value=*/5.0);
ExpectSizeAndAllElementsAre(stats.rendered_frame_qp, /*size=*/1,
/*value=*/10.0);
ExpectSizeAndAllElementsAre(stats.recv_key_frame_size_bytes, /*size=*/1,
/*value=*/500.0);
ExpectEmpty(stats.recv_delta_frame_size_bytes);
@ -840,6 +848,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
frame_stats.decoder_failed = true;
frame_stats.used_decoder =
Vp8CodecForOneFrame(frame_id, frame_stats.decode_end_time);
frame_stats.decoded_frame_qp = 10;
comparator.Start(/*max_threads_count=*/1);
comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2,
@ -872,6 +881,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
EXPECT_THAT(stats.spatial_layers_qp, SizeIs(1));
ExpectSizeAndAllElementsAre(stats.spatial_layers_qp[0], /*size=*/2,
/*value=*/5.0);
ExpectEmpty(stats.rendered_frame_qp);
ExpectSizeAndAllElementsAre(stats.recv_key_frame_size_bytes, /*size=*/1,
/*value=*/500.0);
ExpectEmpty(stats.recv_delta_frame_size_bytes);
@ -936,6 +946,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
ExpectEmpty(stats.resolution_of_decoded_frame);
ExpectEmpty(stats.target_encode_bitrate);
EXPECT_THAT(stats.spatial_layers_qp, IsEmpty());
ExpectEmpty(stats.rendered_frame_qp);
ExpectEmpty(stats.recv_key_frame_size_bytes);
ExpectEmpty(stats.recv_delta_frame_size_bytes);
EXPECT_EQ(stats.total_encoded_images_payload, 0);
@ -996,6 +1007,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
ExpectEmpty(stats.resolution_of_decoded_frame);
ExpectEmpty(stats.target_encode_bitrate);
EXPECT_THAT(stats.spatial_layers_qp, IsEmpty());
ExpectEmpty(stats.rendered_frame_qp);
ExpectEmpty(stats.recv_key_frame_size_bytes);
ExpectEmpty(stats.recv_delta_frame_size_bytes);
EXPECT_EQ(stats.total_encoded_images_payload, 0);
@ -1071,6 +1083,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
EXPECT_THAT(stats.spatial_layers_qp, SizeIs(1));
ExpectSizeAndAllElementsAre(stats.spatial_layers_qp[0], /*size=*/2,
/*value=*/5.0);
ExpectEmpty(stats.rendered_frame_qp);
ExpectEmpty(stats.recv_key_frame_size_bytes);
ExpectEmpty(stats.recv_delta_frame_size_bytes);
EXPECT_EQ(stats.total_encoded_images_payload, 1000);
@ -1147,6 +1160,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
EXPECT_THAT(stats.spatial_layers_qp, SizeIs(1));
ExpectSizeAndAllElementsAre(stats.spatial_layers_qp[0], /*size=*/2,
/*value=*/5.0);
ExpectEmpty(stats.rendered_frame_qp);
ExpectEmpty(stats.recv_key_frame_size_bytes);
ExpectEmpty(stats.recv_delta_frame_size_bytes);
EXPECT_EQ(stats.total_encoded_images_payload, 1000);
@ -1235,6 +1249,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
EXPECT_EQ(stats.encoders,
std::vector<StreamCodecInfo>{*frame_stats.used_encoder});
EXPECT_THAT(stats.decoders, IsEmpty());
ExpectEmpty(stats.rendered_frame_qp);
}
TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
@ -1280,6 +1295,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
Vp8CodecForOneFrame(frame_id, frame_stats.decode_end_time);
frame_stats.decoded_frame_width = 200;
frame_stats.decoded_frame_height = 100;
frame_stats.decoded_frame_qp = 10;
comparator.Start(/*max_threads_count=*/1);
comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2,
@ -1311,6 +1327,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
EXPECT_THAT(stats.spatial_layers_qp, SizeIs(1));
ExpectSizeAndAllElementsAre(stats.spatial_layers_qp[0], /*size=*/2,
/*value=*/5.0);
ExpectEmpty(stats.rendered_frame_qp);
ExpectEmpty(stats.recv_key_frame_size_bytes);
ExpectEmpty(stats.recv_delta_frame_size_bytes);
EXPECT_EQ(stats.total_encoded_images_payload, 1000);
@ -1366,6 +1383,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
frame_stats.decoder_failed = true;
frame_stats.used_decoder =
Vp8CodecForOneFrame(frame_id, frame_stats.decode_end_time);
frame_stats.decoded_frame_qp = 10;
comparator.Start(/*max_threads_count=*/1);
comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2,
@ -1398,6 +1416,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
EXPECT_THAT(stats.spatial_layers_qp, SizeIs(1));
ExpectSizeAndAllElementsAre(stats.spatial_layers_qp[0], /*size=*/2,
/*value=*/5.0);
ExpectEmpty(stats.rendered_frame_qp);
ExpectSizeAndAllElementsAre(stats.recv_key_frame_size_bytes, /*size=*/1,
/*value=*/500.0);
ExpectEmpty(stats.recv_delta_frame_size_bytes);
@ -1460,6 +1479,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
Vp8CodecForOneFrame(frame_id, frame_stats.decode_end_time);
frame_stats.decoded_frame_width = 200;
frame_stats.decoded_frame_height = 100;
frame_stats.decoded_frame_qp = 10;
// Frame rendered
frame_stats.rendered_time = captured_time + TimeDelta::Millis(60);
@ -1494,6 +1514,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
EXPECT_THAT(stats.spatial_layers_qp, SizeIs(1));
ExpectSizeAndAllElementsAre(stats.spatial_layers_qp[0], /*size=*/2,
/*value=*/5.0);
ExpectSizeAndAllElementsAre(stats.rendered_frame_qp, /*size=*/1,
/*value=*/10.0);
ExpectSizeAndAllElementsAre(stats.recv_key_frame_size_bytes, /*size=*/1,
/*value=*/500.0);
ExpectEmpty(stats.recv_delta_frame_size_bytes);
@ -1555,6 +1577,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, AllStatsHaveMetadataSet) {
frame_stats.rendered_time = captured_time + TimeDelta::Millis(60);
frame_stats.decoded_frame_width = 200;
frame_stats.decoded_frame_height = 100;
frame_stats.decoded_frame_qp = 10;
comparator.Start(/*max_threads_count=*/1);
comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2,
@ -1580,6 +1603,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, AllStatsHaveMetadataSet) {
AssertFirstMetadataHasField(stats.target_encode_bitrate, "frame_id", "1");
AssertFirstMetadataHasField(stats.spatial_layers_qp[0], "frame_id", "1");
AssertFirstMetadataHasField(stats.recv_key_frame_size_bytes, "frame_id", "1");
AssertFirstMetadataHasField(stats.rendered_frame_qp, "frame_id", "1");
ExpectEmpty(stats.recv_delta_frame_size_bytes);
}

View File

@ -72,6 +72,11 @@ struct FrameStats {
// Sender side qp values per spatial layer. In case when spatial layer is not
// set for `webrtc::EncodedImage`, 0 is used as default.
std::map<int, SamplesStatsCounter> spatial_layers_qp;
// Receive side qp value. Receiver only renders one spatial layer for a given
// time index. The QP value here corresponds to one of the encoded spatial
// layer's QP given in `spatial_layers_qp`, i.e. to the one that corresponds
// to the rendered frame.
absl::optional<uint8_t> decoded_frame_qp = absl::nullopt;
absl::optional<int> decoded_frame_width = absl::nullopt;
absl::optional<int> decoded_frame_height = absl::nullopt;

View File

@ -280,6 +280,11 @@ TEST(DefaultVideoQualityAnalyzerMetricNamesTest, MetricNamesForP2PAreCorrect) {
.name = "qp_sl0",
.unit = Unit::kUnitless,
.improvement_direction = ImprovementDirection::kSmallerIsBetter},
MetricValidationInfo{
.test_case = "test_case/alice_video",
.name = "rendered_frame_qp",
.unit = Unit::kUnitless,
.improvement_direction = ImprovementDirection::kSmallerIsBetter},
MetricValidationInfo{
.test_case = "test_case/alice_video",
.name = "actual_encode_bitrate",
@ -454,6 +459,11 @@ TEST(DefaultVideoQualityAnalyzerMetricNamesTest,
.name = "qp_sl0",
.unit = Unit::kUnitless,
.improvement_direction = ImprovementDirection::kSmallerIsBetter},
MetricValidationInfo{
.test_case = "test_case/alice_video_alice_bob",
.name = "rendered_frame_qp",
.unit = Unit::kUnitless,
.improvement_direction = ImprovementDirection::kSmallerIsBetter},
MetricValidationInfo{
.test_case = "test_case/alice_video_alice_bob",
.name = "actual_encode_bitrate",
@ -596,6 +606,11 @@ TEST(DefaultVideoQualityAnalyzerMetricNamesTest,
.name = "qp_sl0",
.unit = Unit::kUnitless,
.improvement_direction = ImprovementDirection::kSmallerIsBetter},
MetricValidationInfo{
.test_case = "test_case/alice_video_alice_charlie",
.name = "rendered_frame_qp",
.unit = Unit::kUnitless,
.improvement_direction = ImprovementDirection::kSmallerIsBetter},
MetricValidationInfo{
.test_case = "test_case/alice_video_alice_charlie",
.name = "actual_encode_bitrate",
@ -669,10 +684,10 @@ TEST(DefaultVideoQualityAnalyzerMetricNamesTest,
std::vector<std::string> metrics =
ToTestCases(metrics_logger.GetCollectedMetrics());
EXPECT_THAT(metrics, SizeIs(57));
EXPECT_THAT(metrics, Contains("test_case/alice_video_alice_bob").Times(28));
EXPECT_THAT(metrics, SizeIs(59));
EXPECT_THAT(metrics, Contains("test_case/alice_video_alice_bob").Times(29));
EXPECT_THAT(metrics,
Contains("test_case/alice_video_alice_charlie").Times(28));
Contains("test_case/alice_video_alice_charlie").Times(29));
EXPECT_THAT(metrics, Contains("test_case").Times(1));
}

View File

@ -151,6 +151,11 @@ struct StreamStats {
// Sender side qp values per spatial layer. In case when spatial layer is not
// set for `webrtc::EncodedImage`, 0 is used as default.
std::map<int, SamplesStatsCounter> spatial_layers_qp;
// QP values of the rendered frames. In SVC or simulcast coding scenarios, the
// receiver will only render one of the spatial layers at a time. Hence, this
// value corresponds to the rendered frames' QP values, which should ideally
// correspond to one of the QP values in `spatial_layers_qp`.
SamplesStatsCounter rendered_frame_qp;
int64_t total_encoded_images_payload = 0;
// Counters on which phase how many frames were dropped.

View File

@ -239,6 +239,7 @@ void QualityAnalyzingVideoDecoder::OnFrameDecoded(
VideoQualityAnalyzerInterface::DecoderStats stats;
stats.decoder_name = codec_name;
stats.decode_time_ms = decode_time_ms;
stats.qp = qp;
analyzer_->OnFrameDecoded(peer_name_, *frame, stats);
}

View File

@ -547,6 +547,18 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest,
{MetricMetadataKey::kSpatialLayerMetadataKey, "0"},
{MetricMetadataKey::kExperimentalTestNameMetadataKey,
"test_case"}}},
MetricValidationInfo{
.test_case = "test_case/alice_video",
.name = "rendered_frame_qp",
.unit = Unit::kUnitless,
.improvement_direction = ImprovementDirection::kSmallerIsBetter,
.metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
{MetricMetadataKey::kVideoStreamMetadataKey,
"alice_video"},
{MetricMetadataKey::kSenderMetadataKey, "alice"},
{MetricMetadataKey::kReceiverMetadataKey, "bob"},
{MetricMetadataKey::kExperimentalTestNameMetadataKey,
"test_case"}}},
MetricValidationInfo{
.test_case = "test_case/alice_video",
.name = "actual_encode_bitrate",
@ -812,6 +824,18 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest,
{MetricMetadataKey::kSpatialLayerMetadataKey, "0"},
{MetricMetadataKey::kExperimentalTestNameMetadataKey,
"test_case"}}},
MetricValidationInfo{
.test_case = "test_case/bob_video",
.name = "rendered_frame_qp",
.unit = Unit::kUnitless,
.improvement_direction = ImprovementDirection::kSmallerIsBetter,
.metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
{MetricMetadataKey::kVideoStreamMetadataKey,
"bob_video"},
{MetricMetadataKey::kSenderMetadataKey, "bob"},
{MetricMetadataKey::kReceiverMetadataKey, "alice"},
{MetricMetadataKey::kExperimentalTestNameMetadataKey,
"test_case"}}},
MetricValidationInfo{
.test_case = "test_case",
.name = "cpu_usage_%",