diff --git a/modules/video_coding/codecs/multiplex/include/multiplex_encoded_image_packer.h b/modules/video_coding/codecs/multiplex/include/multiplex_encoded_image_packer.h index e7f6677211..a84c52d8f1 100644 --- a/modules/video_coding/codecs/multiplex/include/multiplex_encoded_image_packer.h +++ b/modules/video_coding/codecs/multiplex/include/multiplex_encoded_image_packer.h @@ -71,7 +71,7 @@ struct MultiplexImageComponent { // Identifies which component this frame represent, i.e. YUV frame vs Alpha // frame. - uint8_t component_index; + int component_index; // Stores the actual frame data of the encoded image. EncodedImage encoded_image; @@ -79,11 +79,11 @@ struct MultiplexImageComponent { // Struct holding the whole frame bundle of components of an image. struct MultiplexImage { - uint16_t image_index; - uint8_t component_count; + int image_index; + int component_count; std::vector image_components; - MultiplexImage(uint16_t picture_index, uint8_t component_count); + MultiplexImage(int picture_index, int frame_count); }; // A utility class providing conversion between two representations of a diff --git a/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc b/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc index 83be3a7dd3..6e02a6534b 100644 --- a/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc +++ b/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc @@ -113,7 +113,7 @@ void PackBitstream(uint8_t* buffer, MultiplexImageComponent image) { memcpy(buffer, image.encoded_image._buffer, image.encoded_image._length); } -MultiplexImage::MultiplexImage(uint16_t picture_index, uint8_t frame_count) +MultiplexImage::MultiplexImage(int picture_index, int frame_count) : image_index(picture_index), component_count(frame_count) {} EncodedImage MultiplexEncodedImagePacker::PackAndRelease( @@ -195,7 +195,9 @@ MultiplexImage MultiplexEncodedImagePacker::Unpack( const MultiplexImageHeader& header = UnpackHeader(combined_image._buffer); MultiplexImage multiplex_image(header.image_index, header.component_count); + std::vector frame_headers; + int header_offset = header.first_component_header_offset; while (header_offset > 0) { @@ -211,7 +213,6 @@ MultiplexImage MultiplexEncodedImagePacker::Unpack( image_component.codec_type = frame_headers[i].codec_type; EncodedImage encoded_image = combined_image; - encoded_image._timeStamp = combined_image._timeStamp; encoded_image._frameType = frame_headers[i].frame_type; encoded_image._length = encoded_image._size = static_cast(frame_headers[i].bitstream_length); diff --git a/modules/video_coding/codecs/multiplex/test/multiplex_adapter_unittest.cc b/modules/video_coding/codecs/multiplex/test/multiplex_adapter_unittest.cc index 6726b9c2e8..d5e1fb69a1 100644 --- a/modules/video_coding/codecs/multiplex/test/multiplex_adapter_unittest.cc +++ b/modules/video_coding/codecs/multiplex/test/multiplex_adapter_unittest.cc @@ -15,7 +15,6 @@ #include "common_video/libyuv/include/webrtc_libyuv.h" #include "media/base/mediaconstants.h" #include "modules/video_coding/codecs/multiplex/include/multiplex_decoder_adapter.h" -#include "modules/video_coding/codecs/multiplex/include/multiplex_encoded_image_packer.h" #include "modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h" #include "modules/video_coding/codecs/test/video_codec_test.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" @@ -66,21 +65,7 @@ class TestMultiplexAdapter : public VideoCodecTest { yuv_buffer->DataV(), yuv_buffer->StrideV(), yuv_buffer->DataY(), yuv_buffer->StrideY(), rtc::KeepRefUntilDone(yuv_buffer)); return rtc::WrapUnique( - new VideoFrame(yuva_buffer, 123 /* timestamp_us */, - 345 /* render_time_ms */, kVideoRotation_0)); - } - - std::unique_ptr ExtractAXXFrame(const VideoFrame& yuva_frame) { - const I420ABufferInterface* yuva_buffer = - yuva_frame.video_frame_buffer()->GetI420A(); - rtc::scoped_refptr axx_buffer = WrapI420Buffer( - yuva_buffer->width(), yuva_buffer->height(), yuva_buffer->DataA(), - yuva_buffer->StrideA(), yuva_buffer->DataU(), yuva_buffer->StrideU(), - yuva_buffer->DataV(), yuva_buffer->StrideV(), - rtc::KeepRefUntilDone(yuva_frame.video_frame_buffer())); - return rtc::WrapUnique( - new VideoFrame(axx_buffer, 123 /* timestamp_us */, - 345 /* render_time_ms */, kVideoRotation_0)); + new VideoFrame(yuva_buffer, kVideoRotation_0, 0)); } private: @@ -125,6 +110,7 @@ TEST_F(TestMultiplexAdapter, EncodeDecodeI420Frame) { EncodedImage encoded_frame; CodecSpecificInfo codec_specific_info; ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info)); + EXPECT_EQ(kVideoCodecMultiplex, codec_specific_info.codecType); EXPECT_EQ( @@ -144,6 +130,7 @@ TEST_F(TestMultiplexAdapter, EncodeDecodeI420AFrame) { EncodedImage encoded_frame; CodecSpecificInfo codec_specific_info; ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info)); + EXPECT_EQ(kVideoCodecMultiplex, codec_specific_info.codecType); EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, @@ -153,72 +140,6 @@ TEST_F(TestMultiplexAdapter, EncodeDecodeI420AFrame) { ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp)); ASSERT_TRUE(decoded_frame); EXPECT_GT(I420PSNR(yuva_frame.get(), decoded_frame.get()), 36); - - // Find PSNR for AXX bits. - std::unique_ptr input_axx_frame = ExtractAXXFrame(*yuva_frame); - std::unique_ptr output_axx_frame = - ExtractAXXFrame(*decoded_frame); - EXPECT_GT(I420PSNR(input_axx_frame.get(), output_axx_frame.get()), 47); -} - -TEST_F(TestMultiplexAdapter, CheckSingleFrameEncodedBitstream) { - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, - encoder_->Encode(*input_frame_, nullptr, nullptr)); - EncodedImage encoded_frame; - CodecSpecificInfo codec_specific_info; - ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info)); - EXPECT_EQ(kVideoCodecMultiplex, codec_specific_info.codecType); - EXPECT_EQ(0, codec_specific_info.codecSpecific.generic.simulcast_idx); - - const MultiplexImage& unpacked_frame = - MultiplexEncodedImagePacker::Unpack(encoded_frame); - EXPECT_EQ(0, unpacked_frame.image_index); - EXPECT_EQ(1, unpacked_frame.component_count); - const MultiplexImageComponent& component = unpacked_frame.image_components[0]; - EXPECT_EQ(0, component.component_index); - EXPECT_NE(nullptr, component.encoded_image._buffer); - EXPECT_EQ(kVideoFrameKey, component.encoded_image._frameType); -} - -TEST_F(TestMultiplexAdapter, CheckDoubleFramesEncodedBitstream) { - std::unique_ptr yuva_frame = CreateI420AInputFrame(); - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, - encoder_->Encode(*yuva_frame, nullptr, nullptr)); - EncodedImage encoded_frame; - CodecSpecificInfo codec_specific_info; - ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info)); - EXPECT_EQ(kVideoCodecMultiplex, codec_specific_info.codecType); - EXPECT_EQ(0, codec_specific_info.codecSpecific.generic.simulcast_idx); - - const MultiplexImage& unpacked_frame = - MultiplexEncodedImagePacker::Unpack(encoded_frame); - EXPECT_EQ(0, unpacked_frame.image_index); - EXPECT_EQ(2, unpacked_frame.component_count); - EXPECT_EQ(unpacked_frame.image_components.size(), - unpacked_frame.component_count); - for (int i = 0; i < unpacked_frame.component_count; ++i) { - const MultiplexImageComponent& component = - unpacked_frame.image_components[i]; - EXPECT_EQ(i, component.component_index); - EXPECT_NE(nullptr, component.encoded_image._buffer); - EXPECT_EQ(kVideoFrameKey, component.encoded_image._frameType); - } -} - -TEST_F(TestMultiplexAdapter, ImageIndexIncreases) { - std::unique_ptr yuva_frame = CreateI420AInputFrame(); - const size_t expected_num_encoded_frames = 3; - for (size_t i = 0; i < expected_num_encoded_frames; ++i) { - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, - encoder_->Encode(*yuva_frame, nullptr, nullptr)); - EncodedImage encoded_frame; - CodecSpecificInfo codec_specific_info; - ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info)); - const MultiplexImage& unpacked_frame = - MultiplexEncodedImagePacker::Unpack(encoded_frame); - EXPECT_EQ(i, unpacked_frame.image_index); - EXPECT_EQ(i ? kVideoFrameDelta : kVideoFrameKey, encoded_frame._frameType); - } } } // namespace webrtc