Remove deprected functions from EncodedImageCallback and RtpRtcp

Removed EncodedImageCallback::Encoded() and RtpRtcp::SendOutgoingData().
These methods should no longer be used anywhere and it's safe to remove
them.

BUG=chromium:621691

Review-Url: https://codereview.webrtc.org/2405173006
Cr-Commit-Position: refs/heads/master@{#14902}
This commit is contained in:
sergeyu 2016-11-02 13:14:16 -07:00 committed by Commit bot
parent d2fce1744f
commit fa56584271
15 changed files with 64 additions and 74 deletions

View File

@ -263,8 +263,8 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
// |input_frame_infos_|.
// Frame size in bytes fed to MediaCodec.
int yuv_size_;
// True only when between a callback_->Encoded() call return a positive value
// and the next Encode() call being ignored.
// True only when between a callback_->OnEncodedImage() call return a positive
// value and the next Encode() call being ignored.
bool drop_next_input_frame_;
// Global references; must be deleted in Release().
std::vector<jobject> input_buffers_;
@ -1063,7 +1063,8 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
}
// Callback - return encoded frame.
int32_t callback_status = 0;
webrtc::EncodedImageCallback::Result callback_result(
webrtc::EncodedImageCallback::Result::OK);
if (callback_) {
std::unique_ptr<webrtc::EncodedImage> image(
new webrtc::EncodedImage(payload, payload_size, payload_size));
@ -1174,7 +1175,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
}
}
callback_status = callback_->Encoded(*image, &info, &header);
callback_result = callback_->OnEncodedImage(*image, &info, &header);
}
// Return output buffer back to the encoder.
@ -1208,11 +1209,9 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
current_encoding_time_ms_ += frame_encoding_time_ms;
LogStatistics(false);
if (callback_status > 0) {
// Errors in callback_result are currently ignored.
if (callback_result.drop_next_frame)
drop_next_input_frame_ = true;
// Theoretically could handle callback_status<0 here, but unclear what
// that would mean for us.
}
}
return true;
}

View File

@ -227,7 +227,6 @@ class RtpRtcp : public Module {
// as layers or RED
// |transport_frame_id_out| - set to RTP timestamp.
// Returns true on success.
virtual bool SendOutgoingData(FrameType frame_type,
int8_t payload_type,
uint32_t timestamp,
@ -238,24 +237,6 @@ class RtpRtcp : public Module {
const RTPVideoHeader* rtp_video_header,
uint32_t* transport_frame_id_out) = 0;
// Deprecated version of the method above.
int32_t SendOutgoingData(
FrameType frame_type,
int8_t payload_type,
uint32_t timestamp,
int64_t capture_time_ms,
const uint8_t* payload_data,
size_t payload_size,
const RTPFragmentationHeader* fragmentation = nullptr,
const RTPVideoHeader* rtp_video_header = nullptr) {
return SendOutgoingData(frame_type, payload_type, timestamp,
capture_time_ms, payload_data, payload_size,
fragmentation, rtp_video_header,
/*frame_id_out=*/nullptr)
? 0
: -1;
}
virtual bool TimeToSendPacket(uint32_t ssrc,
uint16_t sequence_number,
int64_t capture_time_ms,

View File

@ -377,8 +377,8 @@ int32_t H264EncoderImpl::Encode(const VideoFrame& input_frame,
// Deliver encoded image.
CodecSpecificInfo codec_specific;
codec_specific.codecType = kVideoCodecH264;
encoded_image_callback_->Encoded(encoded_image_, &codec_specific,
&frag_header);
encoded_image_callback_->OnEncodedImage(encoded_image_, &codec_specific,
&frag_header);
// Parse and report QP.
h264_bitstream_parser_.ParseBitstream(encoded_image_._buffer,

View File

@ -650,9 +650,10 @@ void H264VideoToolboxEncoder::OnEncodedFrame(
quality_scaler_.ReportQP(qp);
}
int result = callback_->Encoded(frame, &codec_specific_info, header.get());
if (result != 0) {
LOG(LS_ERROR) << "Encode callback failed: " << result;
EncodedImageCallback::Result result =
callback_->OnEncodedImage(frame, &codec_specific_info, header.get());
if (result.error != EncodedImageCallback::Result::OK) {
LOG(LS_ERROR) << "Encode callback failed: " << result.error;
return;
}
bitrate_adjuster_.Update(frame._size);

View File

@ -116,7 +116,8 @@ int I420Encoder::Encode(const VideoFrame& inputImage,
return WEBRTC_VIDEO_CODEC_MEMORY;
_encodedImage._length = ret_length + kI420HeaderSize;
_encodedCompleteCallback->Encoded(_encodedImage, NULL, NULL);
_encodedCompleteCallback->OnEncodedImage(_encodedImage, nullptr, nullptr);
return WEBRTC_VIDEO_CODEC_OK;
}

View File

@ -152,9 +152,9 @@ class MockVideoEncoder : public VideoEncoder {
EncodedImage image;
image._encodedWidth = width;
image._encodedHeight = height;
CodecSpecificInfo codecSpecificInfo;
memset(&codecSpecificInfo, 0, sizeof(codecSpecificInfo));
callback_->Encoded(image, &codecSpecificInfo, NULL);
CodecSpecificInfo codec_specific_info;
memset(&codec_specific_info, 0, sizeof(codec_specific_info));
callback_->OnEncodedImage(image, &codec_specific_info, NULL);
}
void set_supports_native_handle(bool enabled) {

View File

@ -1024,8 +1024,8 @@ int VP8EncoderImpl::GetEncodedPartitions(const VideoFrame& input_image,
vpx_codec_control(&encoders_[encoder_idx], VP8E_GET_LAST_QUANTIZER,
&qp_128);
encoded_images_[encoder_idx].qp_ = qp_128;
encoded_complete_callback_->Encoded(encoded_images_[encoder_idx],
&codec_specific, &frag_info);
encoded_complete_callback_->OnEncodedImage(encoded_images_[encoder_idx],
&codec_specific, &frag_info);
} else if (codec_.mode == kScreensharing) {
result = WEBRTC_VIDEO_CODEC_TARGET_BITRATE_OVERSHOOT;
}

View File

@ -705,8 +705,8 @@ int VP9EncoderImpl::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) {
int qp = -1;
vpx_codec_control(encoder_, VP8E_GET_LAST_QUANTIZER, &qp);
encoded_image_.qp_ = qp;
encoded_complete_callback_->Encoded(encoded_image_, &codec_specific,
&frag_info);
encoded_complete_callback_->OnEncodedImage(encoded_image_, &codec_specific,
&frag_info);
}
return WEBRTC_VIDEO_CODEC_OK;
}

View File

@ -270,8 +270,8 @@ int32_t VideoReceiver::Decode(uint16_t maxWaitTimeMs) {
if (qp_parser_.GetQp(*frame, &qp)) {
encoded_image.qp_ = qp;
}
pre_decode_image_callback_->Encoded(encoded_image, frame->CodecSpecific(),
nullptr);
pre_decode_image_callback_->OnEncodedImage(encoded_image,
frame->CodecSpecific(), nullptr);
}
rtc::CritScope cs(&receive_crit_);

View File

@ -52,7 +52,7 @@ int32_t ConfigurableFrameSizeEncoder::Encode(
RTPFragmentationHeader* fragmentation = NULL;
CodecSpecificInfo specific;
memset(&specific, 0, sizeof(specific));
callback_->Encoded(encodedImage, &specific, fragmentation);
callback_->OnEncodedImage(encodedImage, &specific, fragmentation);
return WEBRTC_VIDEO_CODEC_OK;
}

View File

@ -112,8 +112,10 @@ int32_t FakeEncoder::Encode(const VideoFrame& input_image,
encoded.rotation_ = input_image.rotation();
RTC_DCHECK(callback_ != NULL);
specifics.codec_name = ImplementationName();
if (callback_->Encoded(encoded, &specifics, NULL) != 0)
if (callback_->OnEncodedImage(encoded, &specifics, NULL).error !=
EncodedImageCallback::Result::OK) {
return -1;
}
bits_available -= std::min(encoded._length * 8, bits_available);
}
return 0;

View File

@ -45,7 +45,9 @@ TEST(PayloadRouterTest, SendOnOneModule) {
encoded_image.capture_time_ms_, &payload,
encoded_image._length, nullptr, _, _))
.Times(0);
EXPECT_EQ(-1, payload_router.Encoded(encoded_image, nullptr, nullptr));
EXPECT_NE(
EncodedImageCallback::Result::OK,
payload_router.OnEncodedImage(encoded_image, nullptr, nullptr).error);
payload_router.set_active(true);
EXPECT_CALL(rtp, SendOutgoingData(encoded_image._frameType, payload_type,
@ -53,7 +55,9 @@ TEST(PayloadRouterTest, SendOnOneModule) {
encoded_image.capture_time_ms_, &payload,
encoded_image._length, nullptr, _, _))
.Times(1);
EXPECT_EQ(0, payload_router.Encoded(encoded_image, nullptr, nullptr));
EXPECT_EQ(
EncodedImageCallback::Result::OK,
payload_router.OnEncodedImage(encoded_image, nullptr, nullptr).error);
payload_router.set_active(false);
EXPECT_CALL(rtp, SendOutgoingData(encoded_image._frameType, payload_type,
@ -61,7 +65,9 @@ TEST(PayloadRouterTest, SendOnOneModule) {
encoded_image.capture_time_ms_, &payload,
encoded_image._length, nullptr, _, _))
.Times(0);
EXPECT_EQ(-1, payload_router.Encoded(encoded_image, nullptr, nullptr));
EXPECT_NE(
EncodedImageCallback::Result::OK,
payload_router.OnEncodedImage(encoded_image, nullptr, nullptr).error);
payload_router.set_active(true);
EXPECT_CALL(rtp, SendOutgoingData(encoded_image._frameType, payload_type,
@ -69,7 +75,9 @@ TEST(PayloadRouterTest, SendOnOneModule) {
encoded_image.capture_time_ms_, &payload,
encoded_image._length, nullptr, _, _))
.Times(1);
EXPECT_EQ(0, payload_router.Encoded(encoded_image, nullptr, nullptr));
EXPECT_EQ(
EncodedImageCallback::Result::OK,
payload_router.OnEncodedImage(encoded_image, nullptr, nullptr).error);
}
TEST(PayloadRouterTest, SendSimulcast) {
@ -103,7 +111,9 @@ TEST(PayloadRouterTest, SendSimulcast) {
encoded_image._length, nullptr, _, _))
.Times(1);
EXPECT_CALL(rtp_2, SendOutgoingData(_, _, _, _, _, _, _, _, _)).Times(0);
EXPECT_EQ(0, payload_router.Encoded(encoded_image, &codec_info_1, nullptr));
EXPECT_EQ(EncodedImageCallback::Result::OK,
payload_router.OnEncodedImage(encoded_image, &codec_info_1, nullptr)
.error);
CodecSpecificInfo codec_info_2;
memset(&codec_info_2, 0, sizeof(CodecSpecificInfo));
@ -117,7 +127,9 @@ TEST(PayloadRouterTest, SendSimulcast) {
.Times(1);
EXPECT_CALL(rtp_1, SendOutgoingData(_, _, _, _, _, _, _, _, _))
.Times(0);
EXPECT_EQ(0, payload_router.Encoded(encoded_image, &codec_info_2, nullptr));
EXPECT_EQ(EncodedImageCallback::Result::OK,
payload_router.OnEncodedImage(encoded_image, &codec_info_2, nullptr)
.error);
// Inactive.
payload_router.set_active(false);
@ -125,8 +137,12 @@ TEST(PayloadRouterTest, SendSimulcast) {
.Times(0);
EXPECT_CALL(rtp_2, SendOutgoingData(_, _, _, _, _, _, _, _, _))
.Times(0);
EXPECT_EQ(-1, payload_router.Encoded(encoded_image, &codec_info_1, nullptr));
EXPECT_EQ(-1, payload_router.Encoded(encoded_image, &codec_info_2, nullptr));
EXPECT_NE(EncodedImageCallback::Result::OK,
payload_router.OnEncodedImage(encoded_image, &codec_info_1, nullptr)
.error);
EXPECT_NE(EncodedImageCallback::Result::OK,
payload_router.OnEncodedImage(encoded_image, &codec_info_2, nullptr)
.error);
}
TEST(PayloadRouterTest, MaxPayloadLength) {

View File

@ -2419,8 +2419,10 @@ TEST_F(VideoSendStreamTest, ReportsSentResolution) {
encoded._encodedWidth = kEncodedResolution[i].width;
encoded._encodedHeight = kEncodedResolution[i].height;
RTC_DCHECK(callback_);
if (callback_->Encoded(encoded, &specifics, nullptr) != 0)
if (callback_->OnEncodedImage(encoded, &specifics, nullptr).error !=
EncodedImageCallback::Result::OK) {
return -1;
}
}
observation_complete_.Set();

View File

@ -208,14 +208,15 @@ class ViEEncoderTest : public ::testing::Test {
}
private:
int32_t Encoded(const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) override {
Result OnEncodedImage(
const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) override {
rtc::CritScope lock(&crit_);
EXPECT_TRUE(expect_frames_);
timestamp_ = encoded_image._timeStamp;
encoded_frame_event_.Set();
return 0;
return Result(Result::OK, timestamp_);
}
void OnEncoderConfigurationChanged(std::vector<VideoStream> streams,

View File

@ -54,23 +54,10 @@ class EncodedImageCallback {
};
// Callback function which is called when an image has been encoded.
virtual Result OnEncodedImage(const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) {
return (Encoded(encoded_image, codec_specific_info, fragmentation) == 0)
? Result(Result::OK, 0)
: Result(Result::ERROR_SEND_FAILED);
}
// DEPRECATED.
// TODO(sergeyu): Remove this method.
virtual int32_t Encoded(const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) {
Result result =
OnEncodedImage(encoded_image, codec_specific_info, fragmentation);
return (result.error != Result::OK) ? -1 : (result.drop_next_frame ? 1 : 0);
}
virtual Result OnEncodedImage(
const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) = 0;
};
class VideoEncoder {