Move video send/receive stream headers to webrtc/call.

Moved the headers video_receive_stream.h and video_send_stream.h from
webrtc/ into webrtc/call/ as part of the Slim and Modular work.

The GN target webrtc:video_stream_api has moved to
webrtc/call:video_stream_api.

There are headers left in webrtc/ with the same name including the
moved headers in webrtc/call/ for not breaking external projects
depending on WebRTC.

At the same time, some minor cleanup is done: Non-pure-virtual functions declared in the two affected headers now have definitions in the same target. After making this change, our 'chromium-style' plugin detected some style violations that have now been fixed: non-inlined constructors and destructors have been added to a number of classes, both inside the GN target of the two affected headers, and in other targets.

BUG=webrtc:8107

Review-Url: https://codereview.webrtc.org/3000253002
Cr-Commit-Position: refs/heads/master@{#19448}
This commit is contained in:
aleloi 2017-08-22 05:43:23 -07:00 committed by Commit Bot
parent aaaf2db253
commit 440b6d9a0f
29 changed files with 905 additions and 739 deletions

View File

@ -26,6 +26,7 @@ config("common_inherited_config") {
if (build_with_mozilla) {
defines += [ "WEBRTC_MOZILLA_BUILD" ]
}
# Some tests need to declare their own trace event handlers. If this define is
# not set, the first time TRACE_EVENT_* is called it will store the return
# value for the current handler in an static variable, so that subsequent
@ -235,19 +236,6 @@ config("common_objc") {
libs = [ "Foundation.framework" ]
}
rtc_source_set("video_stream_api") {
sources = [
"video_receive_stream.h",
"video_send_stream.h",
]
deps = [
":webrtc_common",
"api:transport_api",
"common_video:common_video",
"rtc_base:rtc_base_approved",
]
}
if (!build_with_chromium) {
# Target to build all the WebRTC production code.
rtc_static_library("webrtc") {

View File

@ -29,11 +29,9 @@ include_rules = [
# The below rules will be removed when webrtc:4243 is fixed.
specific_include_rules = {
"video_receive_stream\.h": [
"+webrtc/common_video/include",
"+webrtc/media/base",
"+webrtc/call/video_receive_stream.h",
],
"video_send_stream\.h": [
"+webrtc/common_video/include",
"+webrtc/media/base",
"+webrtc/call/video_send_stream.h",
],
}

View File

@ -22,7 +22,7 @@ rtc_source_set("call_interfaces") {
]
deps = [
":rtp_interfaces",
"..:video_stream_api",
":video_stream_api",
"..:webrtc_common",
"../api:audio_mixer_api",
"../api:libjingle_peerconnection_api",
@ -124,6 +124,21 @@ rtc_static_library("call") {
]
}
rtc_source_set("video_stream_api") {
sources = [
"video_receive_stream.cc",
"video_receive_stream.h",
"video_send_stream.cc",
"video_send_stream.h",
]
deps = [
"../:webrtc_common",
"../api:transport_api",
"../common_video:common_video",
"../rtc_base:rtc_base_approved",
]
}
if (rtc_include_tests) {
rtc_source_set("call_tests") {
testonly = true

View File

@ -15,3 +15,14 @@ include_rules = [
"+webrtc/voice_engine",
"+webrtc/video",
]
specific_include_rules = {
"video_receive_stream\.h": [
"+webrtc/common_video/include",
"+webrtc/media/base",
],
"video_send_stream\.h": [
"+webrtc/common_video/include",
"+webrtc/media/base",
],
}

View File

@ -21,12 +21,12 @@
#include "webrtc/call/audio_state.h"
#include "webrtc/call/flexfec_receive_stream.h"
#include "webrtc/call/rtp_transport_controller_send_interface.h"
#include "webrtc/call/video_receive_stream.h"
#include "webrtc/call/video_send_stream.h"
#include "webrtc/common_types.h"
#include "webrtc/rtc_base/networkroute.h"
#include "webrtc/rtc_base/platform_file.h"
#include "webrtc/rtc_base/socket.h"
#include "webrtc/video_receive_stream.h"
#include "webrtc/video_send_stream.h"
namespace webrtc {

View File

@ -0,0 +1,131 @@
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/call/video_receive_stream.h"
namespace webrtc {
VideoReceiveStream::Decoder::Decoder() = default;
VideoReceiveStream::Decoder::Decoder(const Decoder&) = default;
VideoReceiveStream::Decoder::~Decoder() = default;
std::string VideoReceiveStream::Decoder::ToString() const {
std::stringstream ss;
ss << "{decoder: " << (decoder ? "(VideoDecoder)" : "nullptr");
ss << ", payload_type: " << payload_type;
ss << ", payload_name: " << payload_name;
ss << ", codec_params: {";
for (const auto& it : codec_params)
ss << it.first << ": " << it.second;
ss << '}';
ss << '}';
return ss.str();
}
VideoReceiveStream::Stats::Stats() = default;
VideoReceiveStream::Stats::~Stats() = default;
std::string VideoReceiveStream::Stats::ToString(int64_t time_ms) const {
std::stringstream ss;
ss << "VideoReceiveStream stats: " << time_ms << ", {ssrc: " << ssrc << ", ";
ss << "total_bps: " << total_bitrate_bps << ", ";
ss << "width: " << width << ", ";
ss << "height: " << height << ", ";
ss << "key: " << frame_counts.key_frames << ", ";
ss << "delta: " << frame_counts.delta_frames << ", ";
ss << "network_fps: " << network_frame_rate << ", ";
ss << "decode_fps: " << decode_frame_rate << ", ";
ss << "render_fps: " << render_frame_rate << ", ";
ss << "decode_ms: " << decode_ms << ", ";
ss << "max_decode_ms: " << max_decode_ms << ", ";
ss << "cur_delay_ms: " << current_delay_ms << ", ";
ss << "targ_delay_ms: " << target_delay_ms << ", ";
ss << "jb_delay_ms: " << jitter_buffer_ms << ", ";
ss << "min_playout_delay_ms: " << min_playout_delay_ms << ", ";
ss << "discarded: " << discarded_packets << ", ";
ss << "sync_offset_ms: " << sync_offset_ms << ", ";
ss << "cum_loss: " << rtcp_stats.packets_lost << ", ";
ss << "max_ext_seq: " << rtcp_stats.extended_highest_sequence_number << ", ";
ss << "nack: " << rtcp_packet_type_counts.nack_packets << ", ";
ss << "fir: " << rtcp_packet_type_counts.fir_packets << ", ";
ss << "pli: " << rtcp_packet_type_counts.pli_packets;
ss << '}';
return ss.str();
}
VideoReceiveStream::Config::Config(const Config&) = default;
VideoReceiveStream::Config::Config(Config&&) = default;
VideoReceiveStream::Config::Config(Transport* rtcp_send_transport)
: rtcp_send_transport(rtcp_send_transport) {}
VideoReceiveStream::Config& VideoReceiveStream::Config::operator=(Config&&) =
default;
VideoReceiveStream::Config::Config::~Config() = default;
std::string VideoReceiveStream::Config::ToString() const {
std::stringstream ss;
ss << "{decoders: [";
for (size_t i = 0; i < decoders.size(); ++i) {
ss << decoders[i].ToString();
if (i != decoders.size() - 1)
ss << ", ";
}
ss << ']';
ss << ", rtp: " << rtp.ToString();
ss << ", renderer: " << (renderer ? "(renderer)" : "nullptr");
ss << ", render_delay_ms: " << render_delay_ms;
if (!sync_group.empty())
ss << ", sync_group: " << sync_group;
ss << ", pre_decode_callback: "
<< (pre_decode_callback ? "(EncodedFrameObserver)" : "nullptr");
ss << ", target_delay_ms: " << target_delay_ms;
ss << '}';
return ss.str();
}
VideoReceiveStream::Config::Rtp::Rtp() = default;
VideoReceiveStream::Config::Rtp::Rtp(const Rtp&) = default;
VideoReceiveStream::Config::Rtp::~Rtp() = default;
std::string VideoReceiveStream::Config::Rtp::ToString() const {
std::stringstream ss;
ss << "{remote_ssrc: " << remote_ssrc;
ss << ", local_ssrc: " << local_ssrc;
ss << ", rtcp_mode: "
<< (rtcp_mode == RtcpMode::kCompound ? "RtcpMode::kCompound"
: "RtcpMode::kReducedSize");
ss << ", rtcp_xr: ";
ss << "{receiver_reference_time_report: "
<< (rtcp_xr.receiver_reference_time_report ? "on" : "off");
ss << '}';
ss << ", remb: " << (remb ? "on" : "off");
ss << ", transport_cc: " << (transport_cc ? "on" : "off");
ss << ", nack: {rtp_history_ms: " << nack.rtp_history_ms << '}';
ss << ", ulpfec: " << ulpfec.ToString();
ss << ", rtx_ssrc: " << rtx_ssrc;
ss << ", rtx_payload_types: {";
for (auto& kv : rtx_payload_types) {
ss << kv.first << " (apt) -> " << kv.second << " (pt), ";
}
ss << '}';
ss << ", extensions: [";
for (size_t i = 0; i < extensions.size(); ++i) {
ss << extensions[i].ToString();
if (i != extensions.size() - 1)
ss << ", ";
}
ss << ']';
ss << '}';
return ss.str();
}
} // namespace webrtc

View File

@ -0,0 +1,245 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_CALL_VIDEO_RECEIVE_STREAM_H_
#define WEBRTC_CALL_VIDEO_RECEIVE_STREAM_H_
#include <limits>
#include <map>
#include <string>
#include <vector>
#include "webrtc/api/call/transport.h"
#include "webrtc/common_types.h"
#include "webrtc/common_video/include/frame_callback.h"
#include "webrtc/config.h"
#include "webrtc/media/base/videosinkinterface.h"
#include "webrtc/rtc_base/platform_file.h"
namespace webrtc {
class RtpPacketSinkInterface;
class VideoDecoder;
class VideoReceiveStream {
public:
// TODO(mflodman) Move all these settings to VideoDecoder and move the
// declaration to common_types.h.
struct Decoder {
Decoder();
Decoder(const Decoder&);
~Decoder();
std::string ToString() const;
// The actual decoder instance.
VideoDecoder* decoder = nullptr;
// Received RTP packets with this payload type will be sent to this decoder
// instance.
int payload_type = 0;
// Name of the decoded payload (such as VP8). Maps back to the depacketizer
// used to unpack incoming packets.
std::string payload_name;
// This map contains the codec specific parameters from SDP, i.e. the "fmtp"
// parameters. It is the same as cricket::CodecParameterMap used in
// cricket::VideoCodec.
std::map<std::string, std::string> codec_params;
};
struct Stats {
Stats();
~Stats();
std::string ToString(int64_t time_ms) const;
int network_frame_rate = 0;
int decode_frame_rate = 0;
int render_frame_rate = 0;
uint32_t frames_rendered = 0;
// Decoder stats.
std::string decoder_implementation_name = "unknown";
FrameCounts frame_counts;
int decode_ms = 0;
int max_decode_ms = 0;
int current_delay_ms = 0;
int target_delay_ms = 0;
int jitter_buffer_ms = 0;
int min_playout_delay_ms = 0;
int render_delay_ms = 10;
uint64_t interframe_delay_sum_ms = 0;
uint32_t frames_decoded = 0;
rtc::Optional<uint64_t> qp_sum;
int current_payload_type = -1;
int total_bitrate_bps = 0;
int discarded_packets = 0;
int width = 0;
int height = 0;
int sync_offset_ms = std::numeric_limits<int>::max();
uint32_t ssrc = 0;
std::string c_name;
StreamDataCounters rtp_stats;
RtcpPacketTypeCounter rtcp_packet_type_counts;
RtcpStatistics rtcp_stats;
};
struct Config {
private:
// Access to the copy constructor is private to force use of the Copy()
// method for those exceptional cases where we do use it.
Config(const Config&);
public:
Config() = delete;
Config(Config&&);
explicit Config(Transport* rtcp_send_transport);
Config& operator=(Config&&);
Config& operator=(const Config&) = delete;
~Config();
// Mostly used by tests. Avoid creating copies if you can.
Config Copy() const { return Config(*this); }
std::string ToString() const;
// Decoders for every payload that we can receive.
std::vector<Decoder> decoders;
// Receive-stream specific RTP settings.
struct Rtp {
Rtp();
Rtp(const Rtp&);
~Rtp();
std::string ToString() const;
// Synchronization source (stream identifier) to be received.
uint32_t remote_ssrc = 0;
// Sender SSRC used for sending RTCP (such as receiver reports).
uint32_t local_ssrc = 0;
// See RtcpMode for description.
RtcpMode rtcp_mode = RtcpMode::kCompound;
// Extended RTCP settings.
struct RtcpXr {
// True if RTCP Receiver Reference Time Report Block extension
// (RFC 3611) should be enabled.
bool receiver_reference_time_report = false;
} rtcp_xr;
// TODO(nisse): This remb setting is currently set but never
// applied. REMB logic is now the responsibility of
// PacketRouter, and it will generate REMB feedback if
// OnReceiveBitrateChanged is used, which depends on how the
// estimators belonging to the ReceiveSideCongestionController
// are configured. Decide if this setting should be deleted, and
// if it needs to be replaced by a setting in PacketRouter to
// disable REMB feedback.
// See draft-alvestrand-rmcat-remb for information.
bool remb = false;
// See draft-holmer-rmcat-transport-wide-cc-extensions for details.
bool transport_cc = false;
// See NackConfig for description.
NackConfig nack;
// See UlpfecConfig for description.
UlpfecConfig ulpfec;
// SSRC for retransmissions.
uint32_t rtx_ssrc = 0;
// Set if the stream is protected using FlexFEC.
bool protected_by_flexfec = false;
// Map from video payload type (apt) -> RTX payload type (pt).
// For RTX to be enabled, both an SSRC and this mapping are needed.
std::map<int, int> rtx_payload_types;
// RTP header extensions used for the received stream.
std::vector<RtpExtension> extensions;
} rtp;
// Transport for outgoing packets (RTCP).
Transport* rtcp_send_transport = nullptr;
// Must not be 'nullptr' when the stream is started.
rtc::VideoSinkInterface<VideoFrame>* renderer = nullptr;
// Expected delay needed by the renderer, i.e. the frame will be delivered
// this many milliseconds, if possible, earlier than the ideal render time.
// Only valid if 'renderer' is set.
int render_delay_ms = 10;
// If set, pass frames on to the renderer as soon as they are
// available.
bool disable_prerenderer_smoothing = false;
// Identifier for an A/V synchronization group. Empty string to disable.
// TODO(pbos): Synchronize streams in a sync group, not just video streams
// to one of the audio streams.
std::string sync_group;
// Called for each incoming video frame, i.e. in encoded state. E.g. used
// when
// saving the stream to a file. 'nullptr' disables the callback.
EncodedFrameObserver* pre_decode_callback = nullptr;
// Target delay in milliseconds. A positive value indicates this stream is
// used for streaming instead of a real-time call.
int target_delay_ms = 0;
};
// Starts stream activity.
// When a stream is active, it can receive, process and deliver packets.
virtual void Start() = 0;
// Stops stream activity.
// When a stream is stopped, it can't receive, process or deliver packets.
virtual void Stop() = 0;
// TODO(pbos): Add info on currently-received codec to Stats.
virtual Stats GetStats() const = 0;
virtual rtc::Optional<TimingFrameInfo> GetAndResetTimingFrameInfo() = 0;
// Takes ownership of the file, is responsible for closing it later.
// Calling this method will close and finalize any current log.
// Giving rtc::kInvalidPlatformFileValue disables logging.
// If a frame to be written would make the log too large the write fails and
// the log is closed and finalized. A |byte_limit| of 0 means no limit.
virtual void EnableEncodedFrameRecording(rtc::PlatformFile file,
size_t byte_limit) = 0;
inline void DisableEncodedFrameRecording() {
EnableEncodedFrameRecording(rtc::kInvalidPlatformFileValue, 0);
}
// RtpDemuxer only forwards a given RTP packet to one sink. However, some
// sinks, such as FlexFEC, might wish to be informed of all of the packets
// a given sink receives (or any set of sinks). They may do so by registering
// themselves as secondary sinks.
virtual void AddSecondarySink(RtpPacketSinkInterface* sink) = 0;
virtual void RemoveSecondarySink(const RtpPacketSinkInterface* sink) = 0;
protected:
virtual ~VideoReceiveStream() {}
};
} // namespace webrtc
#endif // WEBRTC_CALL_VIDEO_RECEIVE_STREAM_H_

View File

@ -0,0 +1,162 @@
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/call/video_send_stream.h"
namespace webrtc {
VideoSendStream::StreamStats::StreamStats() = default;
VideoSendStream::StreamStats::~StreamStats() = default;
std::string VideoSendStream::StreamStats::ToString() const {
std::stringstream ss;
ss << "width: " << width << ", ";
ss << "height: " << height << ", ";
ss << "key: " << frame_counts.key_frames << ", ";
ss << "delta: " << frame_counts.delta_frames << ", ";
ss << "total_bps: " << total_bitrate_bps << ", ";
ss << "retransmit_bps: " << retransmit_bitrate_bps << ", ";
ss << "avg_delay_ms: " << avg_delay_ms << ", ";
ss << "max_delay_ms: " << max_delay_ms << ", ";
ss << "cum_loss: " << rtcp_stats.packets_lost << ", ";
ss << "max_ext_seq: " << rtcp_stats.extended_highest_sequence_number << ", ";
ss << "nack: " << rtcp_packet_type_counts.nack_packets << ", ";
ss << "fir: " << rtcp_packet_type_counts.fir_packets << ", ";
ss << "pli: " << rtcp_packet_type_counts.pli_packets;
return ss.str();
}
VideoSendStream::Stats::Stats() = default;
VideoSendStream::Stats::~Stats() = default;
std::string VideoSendStream::Stats::ToString(int64_t time_ms) const {
std::stringstream ss;
ss << "VideoSendStream stats: " << time_ms << ", {";
ss << "input_fps: " << input_frame_rate << ", ";
ss << "encode_fps: " << encode_frame_rate << ", ";
ss << "encode_ms: " << avg_encode_time_ms << ", ";
ss << "encode_usage_perc: " << encode_usage_percent << ", ";
ss << "target_bps: " << target_media_bitrate_bps << ", ";
ss << "media_bps: " << media_bitrate_bps << ", ";
ss << "preferred_media_bitrate_bps: " << preferred_media_bitrate_bps << ", ";
ss << "suspended: " << (suspended ? "true" : "false") << ", ";
ss << "bw_adapted: " << (bw_limited_resolution ? "true" : "false");
ss << '}';
for (const auto& substream : substreams) {
if (!substream.second.is_rtx && !substream.second.is_flexfec) {
ss << " {ssrc: " << substream.first << ", ";
ss << substream.second.ToString();
ss << '}';
}
}
return ss.str();
}
VideoSendStream::Config::Config(const Config&) = default;
VideoSendStream::Config::Config(Config&&) = default;
VideoSendStream::Config::Config(Transport* send_transport)
: send_transport(send_transport) {}
VideoSendStream::Config& VideoSendStream::Config::operator=(Config&&) = default;
VideoSendStream::Config::Config::~Config() = default;
std::string VideoSendStream::Config::ToString() const {
std::stringstream ss;
ss << "{encoder_settings: " << encoder_settings.ToString();
ss << ", rtp: " << rtp.ToString();
ss << ", pre_encode_callback: "
<< (pre_encode_callback ? "(VideoSinkInterface)" : "nullptr");
ss << ", post_encode_callback: "
<< (post_encode_callback ? "(EncodedFrameObserver)" : "nullptr");
ss << ", render_delay_ms: " << render_delay_ms;
ss << ", target_delay_ms: " << target_delay_ms;
ss << ", suspend_below_min_bitrate: "
<< (suspend_below_min_bitrate ? "on" : "off");
ss << '}';
return ss.str();
}
std::string VideoSendStream::Config::EncoderSettings::ToString() const {
std::stringstream ss;
ss << "{payload_name: " << payload_name;
ss << ", payload_type: " << payload_type;
ss << ", encoder: " << (encoder ? "(VideoEncoder)" : "nullptr");
ss << '}';
return ss.str();
}
VideoSendStream::Config::Rtp::Rtp() = default;
VideoSendStream::Config::Rtp::Rtp(const Rtp&) = default;
VideoSendStream::Config::Rtp::~Rtp() = default;
VideoSendStream::Config::Rtp::Flexfec::Flexfec() = default;
VideoSendStream::Config::Rtp::Flexfec::Flexfec(const Flexfec&) = default;
VideoSendStream::Config::Rtp::Flexfec::~Flexfec() = default;
std::string VideoSendStream::Config::Rtp::ToString() const {
std::stringstream ss;
ss << "{ssrcs: [";
for (size_t i = 0; i < ssrcs.size(); ++i) {
ss << ssrcs[i];
if (i != ssrcs.size() - 1)
ss << ", ";
}
ss << ']';
ss << ", rtcp_mode: "
<< (rtcp_mode == RtcpMode::kCompound ? "RtcpMode::kCompound"
: "RtcpMode::kReducedSize");
ss << ", max_packet_size: " << max_packet_size;
ss << ", extensions: [";
for (size_t i = 0; i < extensions.size(); ++i) {
ss << extensions[i].ToString();
if (i != extensions.size() - 1)
ss << ", ";
}
ss << ']';
ss << ", nack: {rtp_history_ms: " << nack.rtp_history_ms << '}';
ss << ", ulpfec: " << ulpfec.ToString();
ss << ", flexfec: {payload_type: " << flexfec.payload_type;
ss << ", ssrc: " << flexfec.ssrc;
ss << ", protected_media_ssrcs: [";
for (size_t i = 0; i < flexfec.protected_media_ssrcs.size(); ++i) {
ss << flexfec.protected_media_ssrcs[i];
if (i != flexfec.protected_media_ssrcs.size() - 1)
ss << ", ";
}
ss << "]}";
ss << ", rtx: " << rtx.ToString();
ss << ", c_name: " << c_name;
ss << '}';
return ss.str();
}
VideoSendStream::Config::Rtp::Rtx::Rtx() = default;
VideoSendStream::Config::Rtp::Rtx::Rtx(const Rtx&) = default;
VideoSendStream::Config::Rtp::Rtx::~Rtx() = default;
std::string VideoSendStream::Config::Rtp::Rtx::ToString() const {
std::stringstream ss;
ss << "{ssrcs: [";
for (size_t i = 0; i < ssrcs.size(); ++i) {
ss << ssrcs[i];
if (i != ssrcs.size() - 1)
ss << ", ";
}
ss << ']';
ss << ", payload_type: " << payload_type;
ss << '}';
return ss.str();
}
} // namespace webrtc

View File

@ -0,0 +1,282 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_CALL_VIDEO_SEND_STREAM_H_
#define WEBRTC_CALL_VIDEO_SEND_STREAM_H_
#include <map>
#include <string>
#include <utility>
#include <vector>
#include "webrtc/api/call/transport.h"
#include "webrtc/common_types.h"
#include "webrtc/common_video/include/frame_callback.h"
#include "webrtc/config.h"
#include "webrtc/media/base/videosinkinterface.h"
#include "webrtc/media/base/videosourceinterface.h"
#include "webrtc/rtc_base/platform_file.h"
namespace webrtc {
class VideoEncoder;
class VideoSendStream {
public:
struct StreamStats {
StreamStats();
~StreamStats();
std::string ToString() const;
FrameCounts frame_counts;
bool is_rtx = false;
bool is_flexfec = false;
int width = 0;
int height = 0;
// TODO(holmer): Move bitrate_bps out to the webrtc::Call layer.
int total_bitrate_bps = 0;
int retransmit_bitrate_bps = 0;
int avg_delay_ms = 0;
int max_delay_ms = 0;
StreamDataCounters rtp_stats;
RtcpPacketTypeCounter rtcp_packet_type_counts;
RtcpStatistics rtcp_stats;
};
struct Stats {
Stats();
~Stats();
std::string ToString(int64_t time_ms) const;
std::string encoder_implementation_name = "unknown";
int input_frame_rate = 0;
int encode_frame_rate = 0;
int avg_encode_time_ms = 0;
int encode_usage_percent = 0;
uint32_t frames_encoded = 0;
rtc::Optional<uint64_t> qp_sum;
// Bitrate the encoder is currently configured to use due to bandwidth
// limitations.
int target_media_bitrate_bps = 0;
// Bitrate the encoder is actually producing.
int media_bitrate_bps = 0;
// Media bitrate this VideoSendStream is configured to prefer if there are
// no bandwidth limitations.
int preferred_media_bitrate_bps = 0;
bool suspended = false;
bool bw_limited_resolution = false;
bool cpu_limited_resolution = false;
bool bw_limited_framerate = false;
bool cpu_limited_framerate = false;
// Total number of times resolution as been requested to be changed due to
// CPU/quality adaptation.
int number_of_cpu_adapt_changes = 0;
int number_of_quality_adapt_changes = 0;
std::map<uint32_t, StreamStats> substreams;
};
struct Config {
public:
Config() = delete;
Config(Config&&);
explicit Config(Transport* send_transport);
Config& operator=(Config&&);
Config& operator=(const Config&) = delete;
~Config();
// Mostly used by tests. Avoid creating copies if you can.
Config Copy() const { return Config(*this); }
std::string ToString() const;
struct EncoderSettings {
EncoderSettings() = default;
EncoderSettings(std::string payload_name,
int payload_type,
VideoEncoder* encoder)
: payload_name(std::move(payload_name)),
payload_type(payload_type),
encoder(encoder) {}
std::string ToString() const;
std::string payload_name;
int payload_type = -1;
// TODO(sophiechang): Delete this field when no one is using internal
// sources anymore.
bool internal_source = false;
// Allow 100% encoder utilization. Used for HW encoders where CPU isn't
// expected to be the limiting factor, but a chip could be running at
// 30fps (for example) exactly.
bool full_overuse_time = false;
// Uninitialized VideoEncoder instance to be used for encoding. Will be
// initialized from inside the VideoSendStream.
VideoEncoder* encoder = nullptr;
} encoder_settings;
static const size_t kDefaultMaxPacketSize = 1500 - 40; // TCP over IPv4.
struct Rtp {
Rtp();
Rtp(const Rtp&);
~Rtp();
std::string ToString() const;
std::vector<uint32_t> ssrcs;
// See RtcpMode for description.
RtcpMode rtcp_mode = RtcpMode::kCompound;
// Max RTP packet size delivered to send transport from VideoEngine.
size_t max_packet_size = kDefaultMaxPacketSize;
// RTP header extensions to use for this send stream.
std::vector<RtpExtension> extensions;
// See NackConfig for description.
NackConfig nack;
// See UlpfecConfig for description.
UlpfecConfig ulpfec;
struct Flexfec {
Flexfec();
Flexfec(const Flexfec&);
~Flexfec();
// Payload type of FlexFEC. Set to -1 to disable sending FlexFEC.
int payload_type = -1;
// SSRC of FlexFEC stream.
uint32_t ssrc = 0;
// Vector containing a single element, corresponding to the SSRC of the
// media stream being protected by this FlexFEC stream.
// The vector MUST have size 1.
//
// TODO(brandtr): Update comment above when we support
// multistream protection.
std::vector<uint32_t> protected_media_ssrcs;
} flexfec;
// Settings for RTP retransmission payload format, see RFC 4588 for
// details.
struct Rtx {
Rtx();
Rtx(const Rtx&);
~Rtx();
std::string ToString() const;
// SSRCs to use for the RTX streams.
std::vector<uint32_t> ssrcs;
// Payload type to use for the RTX stream.
int payload_type = -1;
} rtx;
// RTCP CNAME, see RFC 3550.
std::string c_name;
} rtp;
// Transport for outgoing packets.
Transport* send_transport = nullptr;
// Called for each I420 frame before encoding the frame. Can be used for
// effects, snapshots etc. 'nullptr' disables the callback.
rtc::VideoSinkInterface<VideoFrame>* pre_encode_callback = nullptr;
// Called for each encoded frame, e.g. used for file storage. 'nullptr'
// disables the callback. Also measures timing and passes the time
// spent on encoding. This timing will not fire if encoding takes longer
// than the measuring window, since the sample data will have been dropped.
EncodedFrameObserver* post_encode_callback = nullptr;
// Expected delay needed by the renderer, i.e. the frame will be delivered
// this many milliseconds, if possible, earlier than expected render time.
// Only valid if |local_renderer| is set.
int render_delay_ms = 0;
// Target delay in milliseconds. A positive value indicates this stream is
// used for streaming instead of a real-time call.
int target_delay_ms = 0;
// True if the stream should be suspended when the available bitrate fall
// below the minimum configured bitrate. If this variable is false, the
// stream may send at a rate higher than the estimated available bitrate.
bool suspend_below_min_bitrate = false;
// Enables periodic bandwidth probing in application-limited region.
bool periodic_alr_bandwidth_probing = false;
private:
// Access to the copy constructor is private to force use of the Copy()
// method for those exceptional cases where we do use it.
Config(const Config&);
};
// Starts stream activity.
// When a stream is active, it can receive, process and deliver packets.
virtual void Start() = 0;
// Stops stream activity.
// When a stream is stopped, it can't receive, process or deliver packets.
virtual void Stop() = 0;
// Based on the spec in
// https://w3c.github.io/webrtc-pc/#idl-def-rtcdegradationpreference.
// These options are enforced on a best-effort basis. For instance, all of
// these options may suffer some frame drops in order to avoid queuing.
// TODO(sprang): Look into possibility of more strictly enforcing the
// maintain-framerate option.
enum class DegradationPreference {
// Don't take any actions based on over-utilization signals.
kDegradationDisabled,
// On over-use, request lower frame rate, possibly causing frame drops.
kMaintainResolution,
// On over-use, request lower resolution, possibly causing down-scaling.
kMaintainFramerate,
// Try to strike a "pleasing" balance between frame rate or resolution.
kBalanced,
};
virtual void SetSource(
rtc::VideoSourceInterface<webrtc::VideoFrame>* source,
const DegradationPreference& degradation_preference) = 0;
// Set which streams to send. Must have at least as many SSRCs as configured
// in the config. Encoder settings are passed on to the encoder instance along
// with the VideoStream settings.
virtual void ReconfigureVideoEncoder(VideoEncoderConfig config) = 0;
virtual Stats GetStats() = 0;
// Takes ownership of each file, is responsible for closing them later.
// Calling this method will close and finalize any current logs.
// Some codecs produce multiple streams (VP8 only at present), each of these
// streams will log to a separate file. kMaxSimulcastStreams in common_types.h
// gives the max number of such streams. If there is no file for a stream, or
// the file is rtc::kInvalidPlatformFileValue, frames from that stream will
// not be logged.
// If a frame to be written would make the log too large the write fails and
// the log is closed and finalized. A |byte_limit| of 0 means no limit.
virtual void EnableEncodedFrameRecording(
const std::vector<rtc::PlatformFile>& files,
size_t byte_limit) = 0;
inline void DisableEncodedFrameRecording() {
EnableEncodedFrameRecording(std::vector<rtc::PlatformFile>(), 0);
}
protected:
virtual ~VideoSendStream() {}
};
} // namespace webrtc
#endif // WEBRTC_CALL_VIDEO_SEND_STREAM_H_

View File

@ -28,8 +28,8 @@ rtc_source_set("rtc_event_log_api") {
"rtc_event_log/rtc_event_log_factory_interface.h",
]
deps = [
"..:video_stream_api",
"..:webrtc_common",
"../call:video_stream_api",
"../rtc_base:rtc_base_approved",
]
}
@ -95,7 +95,7 @@ if (rtc_enable_protobuf) {
suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
}
deps = [
"..:video_stream_api",
"../call:video_stream_api",
"../rtc_base:protobuf_utils",
"../rtc_base:rtc_base_approved",
]

View File

@ -15,12 +15,12 @@
#include <utility> // pair
#include <vector>
#include "webrtc/call/video_receive_stream.h"
#include "webrtc/call/video_send_stream.h"
#include "webrtc/logging/rtc_event_log/rtc_event_log.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_header_extension_map.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
#include "webrtc/rtc_base/ignore_wundef.h"
#include "webrtc/video_receive_stream.h"
#include "webrtc/video_send_stream.h"
// Files generated at build-time by the protobuf compiler.
RTC_PUSH_IGNORING_WUNDEF()

View File

@ -93,6 +93,11 @@ rtc_static_library("rtc_media_base") {
"base/videocommon.h",
"base/videosourcebase.cc",
"base/videosourcebase.h",
# TODO(aleloi): add "base/videosinkinterface.h"
"base/videosourceinterface.cc",
# TODO(aleloi): add "base/videosourceinterface.h"
]
if (!build_with_chromium && is_clang) {
@ -217,7 +222,6 @@ rtc_static_library("rtc_audio_video") {
}
deps += [
":rtc_media_base",
"..:video_stream_api",
"..:webrtc_common",
"../api:call_api",
"../api:libjingle_peerconnection_api",
@ -228,6 +232,7 @@ rtc_static_library("rtc_audio_video") {
"../api/audio_codecs:builtin_audio_encoder_factory",
"../api/video_codecs:video_codecs_api",
"../call",
"../call:video_stream_api",
"../common_video:common_video",
"../modules/audio_coding:rent_a_codec",
"../modules/audio_device:audio_device",
@ -322,7 +327,7 @@ if (rtc_include_tests) {
include_dirs = []
public_deps = []
deps = [
"..:video_stream_api",
"../call:video_stream_api",
"../modules/audio_coding:rent_a_codec",
"../modules/audio_processing:audio_processing",
"../modules/rtp_rtcp:rtp_rtcp",

View File

@ -0,0 +1,17 @@
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/media/base/videosourceinterface.h"
namespace rtc {
VideoSinkWants::VideoSinkWants() = default;
VideoSinkWants::~VideoSinkWants() = default;
} // namespace rtc

View File

@ -19,6 +19,8 @@ namespace rtc {
// VideoSinkWants is used for notifying the source of properties a video frame
// should have when it is delivered to a certain sink.
struct VideoSinkWants {
VideoSinkWants();
~VideoSinkWants();
// Tells the source whether the sink wants frames with rotation applied.
// By default, any rotation must be applied by the sink.
bool rotation_applied = false;

View File

@ -21,6 +21,8 @@
#include "webrtc/api/video/video_frame.h"
#include "webrtc/call/call.h"
#include "webrtc/call/flexfec_receive_stream.h"
#include "webrtc/call/video_receive_stream.h"
#include "webrtc/call/video_send_stream.h"
#include "webrtc/media/base/mediaengine.h"
#include "webrtc/media/base/videosinkinterface.h"
#include "webrtc/media/base/videosourceinterface.h"
@ -32,8 +34,6 @@
#include "webrtc/rtc_base/optional.h"
#include "webrtc/rtc_base/thread_annotations.h"
#include "webrtc/rtc_base/thread_checker.h"
#include "webrtc/video_receive_stream.h"
#include "webrtc/video_send_stream.h"
namespace webrtc {
class VideoDecoder;

View File

@ -95,8 +95,8 @@ rtc_static_library("video_coding") {
":webrtc_vp8",
":webrtc_vp9",
"..:module_api",
"../..:video_stream_api",
"../..:webrtc_common",
"../../call:video_stream_api",
"../../common_video",
"../../rtc_base:rtc_base",
"../../rtc_base:rtc_base_approved",

View File

@ -205,8 +205,8 @@ if (rtc_enable_protobuf) {
}
defines = [ "ENABLE_RTC_EVENT_LOG" ]
deps = [
"..:video_stream_api",
"../call:call_interfaces",
"../call:video_stream_api",
"../logging:rtc_event_log_impl",
"../logging:rtc_event_log_parser",
"../modules:module_api",

View File

@ -20,6 +20,8 @@
#include "webrtc/call/audio_receive_stream.h"
#include "webrtc/call/audio_send_stream.h"
#include "webrtc/call/call.h"
#include "webrtc/call/video_receive_stream.h"
#include "webrtc/call/video_send_stream.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/audio_coding/neteq/tools/audio_sink.h"
#include "webrtc/modules/audio_coding/neteq/tools/fake_decode_from_file.h"
@ -43,8 +45,6 @@
#include "webrtc/rtc_base/logging.h"
#include "webrtc/rtc_base/ptr_util.h"
#include "webrtc/rtc_base/rate_statistics.h"
#include "webrtc/video_receive_stream.h"
#include "webrtc/video_send_stream.h"
namespace webrtc {
namespace plotting {

View File

@ -59,9 +59,9 @@ rtc_source_set("video_test_common") {
}
deps = [
"..:video_stream_api",
"..:webrtc_common",
"../api/video_codecs:video_codecs_api",
"../call:video_stream_api",
"../common_video",
"../media:rtc_media_base",
"../modules/video_capture:video_capture_module",
@ -488,7 +488,6 @@ rtc_source_set("test_common") {
":rtp_test_utils",
":test_support",
":video_test_common",
"..:video_stream_api",
"..:webrtc_common",
"../api:transport_api",
"../api:video_frame_api",
@ -498,6 +497,7 @@ rtc_source_set("test_common") {
"../audio",
"../call",
"../call:rtp_sender",
"../call:video_stream_api",
"../common_video",
"../logging:rtc_event_log_api",
"../modules/audio_device:mock_audio_device",

View File

@ -12,8 +12,8 @@
#include <vector>
#include "webrtc/video_receive_stream.h"
#include "webrtc/video_send_stream.h"
#include "webrtc/call/video_receive_stream.h"
#include "webrtc/call/video_send_stream.h"
namespace webrtc {
namespace test {

View File

@ -54,12 +54,12 @@ rtc_static_library("video") {
}
deps = [
"..:video_stream_api",
"..:webrtc_common",
"../api:transport_api",
"../api/video_codecs:video_codecs_api",
"../call:call_interfaces",
"../call:rtp_interfaces",
"../call:video_stream_api",
"../common_video",
"../logging:rtc_event_log_api",
"../media:rtc_media_base",
@ -261,13 +261,13 @@ if (rtc_include_tests) {
]
deps = [
":video",
"..:video_stream_api",
"../api:video_frame_api",
"../api/video_codecs:video_codecs_api",
"../call:call_interfaces",
"../call:mock_rtp_interfaces",
"../call:rtp_receiver",
"../call:rtp_sender",
"../call:video_stream_api",
"../common_video",
"../logging:rtc_event_log_api",
"../media:rtc_media",

View File

@ -14,6 +14,7 @@
#include <map>
#include <string>
#include "webrtc/call/video_receive_stream.h"
#include "webrtc/common_types.h"
#include "webrtc/common_video/include/frame_callback.h"
#include "webrtc/modules/video_coding/include/video_coding_defines.h"
@ -25,7 +26,6 @@
#include "webrtc/video/report_block_stats.h"
#include "webrtc/video/stats_counter.h"
#include "webrtc/video/video_stream_decoder.h"
#include "webrtc/video_receive_stream.h"
namespace webrtc {

View File

@ -18,6 +18,7 @@
#include <vector>
#include "webrtc/call/rtp_packet_sink_interface.h"
#include "webrtc/call/video_receive_stream.h"
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
#include "webrtc/modules/rtp_rtcp/include/remote_ntp_time_estimator.h"
@ -33,7 +34,6 @@
#include "webrtc/rtc_base/criticalsection.h"
#include "webrtc/rtc_base/thread_checker.h"
#include "webrtc/typedefs.h"
#include "webrtc/video_receive_stream.h"
namespace webrtc {

View File

@ -37,106 +37,9 @@
#include "webrtc/system_wrappers/include/field_trial.h"
#include "webrtc/video/call_stats.h"
#include "webrtc/video/receive_statistics_proxy.h"
#include "webrtc/video_receive_stream.h"
namespace webrtc {
std::string VideoReceiveStream::Decoder::ToString() const {
std::stringstream ss;
ss << "{decoder: " << (decoder ? "(VideoDecoder)" : "nullptr");
ss << ", payload_type: " << payload_type;
ss << ", payload_name: " << payload_name;
ss << ", codec_params: {";
for (const auto& it : codec_params)
ss << it.first << ": " << it.second;
ss << '}';
ss << '}';
return ss.str();
}
std::string VideoReceiveStream::Config::ToString() const {
std::stringstream ss;
ss << "{decoders: [";
for (size_t i = 0; i < decoders.size(); ++i) {
ss << decoders[i].ToString();
if (i != decoders.size() - 1)
ss << ", ";
}
ss << ']';
ss << ", rtp: " << rtp.ToString();
ss << ", renderer: " << (renderer ? "(renderer)" : "nullptr");
ss << ", render_delay_ms: " << render_delay_ms;
if (!sync_group.empty())
ss << ", sync_group: " << sync_group;
ss << ", pre_decode_callback: "
<< (pre_decode_callback ? "(EncodedFrameObserver)" : "nullptr");
ss << ", target_delay_ms: " << target_delay_ms;
ss << '}';
return ss.str();
}
std::string VideoReceiveStream::Config::Rtp::ToString() const {
std::stringstream ss;
ss << "{remote_ssrc: " << remote_ssrc;
ss << ", local_ssrc: " << local_ssrc;
ss << ", rtcp_mode: "
<< (rtcp_mode == RtcpMode::kCompound ? "RtcpMode::kCompound"
: "RtcpMode::kReducedSize");
ss << ", rtcp_xr: ";
ss << "{receiver_reference_time_report: "
<< (rtcp_xr.receiver_reference_time_report ? "on" : "off");
ss << '}';
ss << ", remb: " << (remb ? "on" : "off");
ss << ", transport_cc: " << (transport_cc ? "on" : "off");
ss << ", nack: {rtp_history_ms: " << nack.rtp_history_ms << '}';
ss << ", ulpfec: " << ulpfec.ToString();
ss << ", rtx_ssrc: " << rtx_ssrc;
ss << ", rtx_payload_types: {";
for (auto& kv : rtx_payload_types) {
ss << kv.first << " (apt) -> " << kv.second << " (pt), ";
}
ss << '}';
ss << ", extensions: [";
for (size_t i = 0; i < extensions.size(); ++i) {
ss << extensions[i].ToString();
if (i != extensions.size() - 1)
ss << ", ";
}
ss << ']';
ss << '}';
return ss.str();
}
std::string VideoReceiveStream::Stats::ToString(int64_t time_ms) const {
std::stringstream ss;
ss << "VideoReceiveStream stats: " << time_ms << ", {ssrc: " << ssrc << ", ";
ss << "total_bps: " << total_bitrate_bps << ", ";
ss << "width: " << width << ", ";
ss << "height: " << height << ", ";
ss << "key: " << frame_counts.key_frames << ", ";
ss << "delta: " << frame_counts.delta_frames << ", ";
ss << "network_fps: " << network_frame_rate << ", ";
ss << "decode_fps: " << decode_frame_rate << ", ";
ss << "render_fps: " << render_frame_rate << ", ";
ss << "decode_ms: " << decode_ms << ", ";
ss << "max_decode_ms: " << max_decode_ms << ", ";
ss << "cur_delay_ms: " << current_delay_ms << ", ";
ss << "targ_delay_ms: " << target_delay_ms << ", ";
ss << "jb_delay_ms: " << jitter_buffer_ms << ", ";
ss << "min_playout_delay_ms: " << min_playout_delay_ms << ", ";
ss << "discarded: " << discarded_packets << ", ";
ss << "sync_offset_ms: " << sync_offset_ms << ", ";
ss << "cum_loss: " << rtcp_stats.packets_lost << ", ";
ss << "max_ext_seq: " << rtcp_stats.extended_highest_sequence_number << ", ";
ss << "nack: " << rtcp_packet_type_counts.nack_packets << ", ";
ss << "fir: " << rtcp_packet_type_counts.fir_packets << ", ";
ss << "pli: " << rtcp_packet_type_counts.pli_packets;
ss << '}';
return ss.str();
}
namespace {
VideoCodec CreateDecoderVideoCodec(const VideoReceiveStream::Decoder& decoder) {
VideoCodec codec;

View File

@ -16,6 +16,7 @@
#include "webrtc/call/rtp_packet_sink_interface.h"
#include "webrtc/call/syncable.h"
#include "webrtc/call/video_receive_stream.h"
#include "webrtc/common_video/include/incoming_video_stream.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/rtp_rtcp/include/flexfec_receiver.h"
@ -28,7 +29,6 @@
#include "webrtc/video/rtp_video_stream_receiver.h"
#include "webrtc/video/transport_adapter.h"
#include "webrtc/video/video_stream_decoder.h"
#include "webrtc/video_receive_stream.h"
namespace webrtc {

View File

@ -145,129 +145,6 @@ std::unique_ptr<FlexfecSender> MaybeCreateFlexfecSender(
} // namespace
std::string
VideoSendStream::Config::EncoderSettings::ToString() const {
std::stringstream ss;
ss << "{payload_name: " << payload_name;
ss << ", payload_type: " << payload_type;
ss << ", encoder: " << (encoder ? "(VideoEncoder)" : "nullptr");
ss << '}';
return ss.str();
}
std::string VideoSendStream::Config::Rtp::Rtx::ToString()
const {
std::stringstream ss;
ss << "{ssrcs: [";
for (size_t i = 0; i < ssrcs.size(); ++i) {
ss << ssrcs[i];
if (i != ssrcs.size() - 1)
ss << ", ";
}
ss << ']';
ss << ", payload_type: " << payload_type;
ss << '}';
return ss.str();
}
std::string VideoSendStream::Config::Rtp::ToString() const {
std::stringstream ss;
ss << "{ssrcs: [";
for (size_t i = 0; i < ssrcs.size(); ++i) {
ss << ssrcs[i];
if (i != ssrcs.size() - 1)
ss << ", ";
}
ss << ']';
ss << ", rtcp_mode: "
<< (rtcp_mode == RtcpMode::kCompound ? "RtcpMode::kCompound"
: "RtcpMode::kReducedSize");
ss << ", max_packet_size: " << max_packet_size;
ss << ", extensions: [";
for (size_t i = 0; i < extensions.size(); ++i) {
ss << extensions[i].ToString();
if (i != extensions.size() - 1)
ss << ", ";
}
ss << ']';
ss << ", nack: {rtp_history_ms: " << nack.rtp_history_ms << '}';
ss << ", ulpfec: " << ulpfec.ToString();
ss << ", flexfec: {payload_type: " << flexfec.payload_type;
ss << ", ssrc: " << flexfec.ssrc;
ss << ", protected_media_ssrcs: [";
for (size_t i = 0; i < flexfec.protected_media_ssrcs.size(); ++i) {
ss << flexfec.protected_media_ssrcs[i];
if (i != flexfec.protected_media_ssrcs.size() - 1)
ss << ", ";
}
ss << "]}";
ss << ", rtx: " << rtx.ToString();
ss << ", c_name: " << c_name;
ss << '}';
return ss.str();
}
std::string VideoSendStream::Config::ToString() const {
std::stringstream ss;
ss << "{encoder_settings: " << encoder_settings.ToString();
ss << ", rtp: " << rtp.ToString();
ss << ", pre_encode_callback: "
<< (pre_encode_callback ? "(VideoSinkInterface)" : "nullptr");
ss << ", post_encode_callback: "
<< (post_encode_callback ? "(EncodedFrameObserver)" : "nullptr");
ss << ", render_delay_ms: " << render_delay_ms;
ss << ", target_delay_ms: " << target_delay_ms;
ss << ", suspend_below_min_bitrate: " << (suspend_below_min_bitrate ? "on"
: "off");
ss << '}';
return ss.str();
}
std::string VideoSendStream::Stats::ToString(int64_t time_ms) const {
std::stringstream ss;
ss << "VideoSendStream stats: " << time_ms << ", {";
ss << "input_fps: " << input_frame_rate << ", ";
ss << "encode_fps: " << encode_frame_rate << ", ";
ss << "encode_ms: " << avg_encode_time_ms << ", ";
ss << "encode_usage_perc: " << encode_usage_percent << ", ";
ss << "target_bps: " << target_media_bitrate_bps << ", ";
ss << "media_bps: " << media_bitrate_bps << ", ";
ss << "preferred_media_bitrate_bps: " << preferred_media_bitrate_bps << ", ";
ss << "suspended: " << (suspended ? "true" : "false") << ", ";
ss << "bw_adapted: " << (bw_limited_resolution ? "true" : "false");
ss << '}';
for (const auto& substream : substreams) {
if (!substream.second.is_rtx && !substream.second.is_flexfec) {
ss << " {ssrc: " << substream.first << ", ";
ss << substream.second.ToString();
ss << '}';
}
}
return ss.str();
}
std::string VideoSendStream::StreamStats::ToString() const {
std::stringstream ss;
ss << "width: " << width << ", ";
ss << "height: " << height << ", ";
ss << "key: " << frame_counts.key_frames << ", ";
ss << "delta: " << frame_counts.delta_frames << ", ";
ss << "total_bps: " << total_bitrate_bps << ", ";
ss << "retransmit_bps: " << retransmit_bitrate_bps << ", ";
ss << "avg_delay_ms: " << avg_delay_ms << ", ";
ss << "max_delay_ms: " << max_delay_ms << ", ";
ss << "cum_loss: " << rtcp_stats.packets_lost << ", ";
ss << "max_ext_seq: " << rtcp_stats.extended_highest_sequence_number << ", ";
ss << "nack: " << rtcp_packet_type_counts.nack_packets << ", ";
ss << "fir: " << rtcp_packet_type_counts.fir_packets << ", ";
ss << "pli: " << rtcp_packet_type_counts.pli_packets;
return ss.str();
}
namespace {
bool PayloadTypeSupportsSkippingFecPackets(const std::string& payload_name) {

View File

@ -16,6 +16,8 @@
#include <vector>
#include "webrtc/call/bitrate_allocator.h"
#include "webrtc/call/video_receive_stream.h"
#include "webrtc/call/video_send_stream.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/video_coding/protection_bitrate_calculator.h"
#include "webrtc/rtc_base/criticalsection.h"
@ -25,8 +27,6 @@
#include "webrtc/video/send_delay_stats.h"
#include "webrtc/video/send_statistics_proxy.h"
#include "webrtc/video/video_stream_encoder.h"
#include "webrtc/video_receive_stream.h"
#include "webrtc/video_send_stream.h"
namespace webrtc {

View File

@ -11,228 +11,8 @@
#ifndef WEBRTC_VIDEO_RECEIVE_STREAM_H_
#define WEBRTC_VIDEO_RECEIVE_STREAM_H_
#include <limits>
#include <map>
#include <string>
#include <vector>
#include "webrtc/api/call/transport.h"
#include "webrtc/common_types.h"
#include "webrtc/common_video/include/frame_callback.h"
#include "webrtc/config.h"
#include "webrtc/media/base/videosinkinterface.h"
#include "webrtc/rtc_base/platform_file.h"
namespace webrtc {
class RtpPacketSinkInterface;
class VideoDecoder;
class VideoReceiveStream {
public:
// TODO(mflodman) Move all these settings to VideoDecoder and move the
// declaration to common_types.h.
struct Decoder {
std::string ToString() const;
// The actual decoder instance.
VideoDecoder* decoder = nullptr;
// Received RTP packets with this payload type will be sent to this decoder
// instance.
int payload_type = 0;
// Name of the decoded payload (such as VP8). Maps back to the depacketizer
// used to unpack incoming packets.
std::string payload_name;
// This map contains the codec specific parameters from SDP, i.e. the "fmtp"
// parameters. It is the same as cricket::CodecParameterMap used in
// cricket::VideoCodec.
std::map<std::string, std::string> codec_params;
};
struct Stats {
std::string ToString(int64_t time_ms) const;
int network_frame_rate = 0;
int decode_frame_rate = 0;
int render_frame_rate = 0;
uint32_t frames_rendered = 0;
// Decoder stats.
std::string decoder_implementation_name = "unknown";
FrameCounts frame_counts;
int decode_ms = 0;
int max_decode_ms = 0;
int current_delay_ms = 0;
int target_delay_ms = 0;
int jitter_buffer_ms = 0;
int min_playout_delay_ms = 0;
int render_delay_ms = 10;
uint64_t interframe_delay_sum_ms = 0;
uint32_t frames_decoded = 0;
rtc::Optional<uint64_t> qp_sum;
int current_payload_type = -1;
int total_bitrate_bps = 0;
int discarded_packets = 0;
int width = 0;
int height = 0;
int sync_offset_ms = std::numeric_limits<int>::max();
uint32_t ssrc = 0;
std::string c_name;
StreamDataCounters rtp_stats;
RtcpPacketTypeCounter rtcp_packet_type_counts;
RtcpStatistics rtcp_stats;
};
struct Config {
private:
// Access to the copy constructor is private to force use of the Copy()
// method for those exceptional cases where we do use it.
Config(const Config&) = default;
public:
Config() = delete;
Config(Config&&) = default;
explicit Config(Transport* rtcp_send_transport)
: rtcp_send_transport(rtcp_send_transport) {}
Config& operator=(Config&&) = default;
Config& operator=(const Config&) = delete;
// Mostly used by tests. Avoid creating copies if you can.
Config Copy() const { return Config(*this); }
std::string ToString() const;
// Decoders for every payload that we can receive.
std::vector<Decoder> decoders;
// Receive-stream specific RTP settings.
struct Rtp {
std::string ToString() const;
// Synchronization source (stream identifier) to be received.
uint32_t remote_ssrc = 0;
// Sender SSRC used for sending RTCP (such as receiver reports).
uint32_t local_ssrc = 0;
// See RtcpMode for description.
RtcpMode rtcp_mode = RtcpMode::kCompound;
// Extended RTCP settings.
struct RtcpXr {
// True if RTCP Receiver Reference Time Report Block extension
// (RFC 3611) should be enabled.
bool receiver_reference_time_report = false;
} rtcp_xr;
// TODO(nisse): This remb setting is currently set but never
// applied. REMB logic is now the responsibility of
// PacketRouter, and it will generate REMB feedback if
// OnReceiveBitrateChanged is used, which depends on how the
// estimators belonging to the ReceiveSideCongestionController
// are configured. Decide if this setting should be deleted, and
// if it needs to be replaced by a setting in PacketRouter to
// disable REMB feedback.
// See draft-alvestrand-rmcat-remb for information.
bool remb = false;
// See draft-holmer-rmcat-transport-wide-cc-extensions for details.
bool transport_cc = false;
// See NackConfig for description.
NackConfig nack;
// See UlpfecConfig for description.
UlpfecConfig ulpfec;
// SSRC for retransmissions.
uint32_t rtx_ssrc = 0;
// Set if the stream is protected using FlexFEC.
bool protected_by_flexfec = false;
// Map from video payload type (apt) -> RTX payload type (pt).
// For RTX to be enabled, both an SSRC and this mapping are needed.
std::map<int, int> rtx_payload_types;
// RTP header extensions used for the received stream.
std::vector<RtpExtension> extensions;
} rtp;
// Transport for outgoing packets (RTCP).
Transport* rtcp_send_transport = nullptr;
// Must not be 'nullptr' when the stream is started.
rtc::VideoSinkInterface<VideoFrame>* renderer = nullptr;
// Expected delay needed by the renderer, i.e. the frame will be delivered
// this many milliseconds, if possible, earlier than the ideal render time.
// Only valid if 'renderer' is set.
int render_delay_ms = 10;
// If set, pass frames on to the renderer as soon as they are
// available.
bool disable_prerenderer_smoothing = false;
// Identifier for an A/V synchronization group. Empty string to disable.
// TODO(pbos): Synchronize streams in a sync group, not just video streams
// to one of the audio streams.
std::string sync_group;
// Called for each incoming video frame, i.e. in encoded state. E.g. used
// when
// saving the stream to a file. 'nullptr' disables the callback.
EncodedFrameObserver* pre_decode_callback = nullptr;
// Target delay in milliseconds. A positive value indicates this stream is
// used for streaming instead of a real-time call.
int target_delay_ms = 0;
};
// Starts stream activity.
// When a stream is active, it can receive, process and deliver packets.
virtual void Start() = 0;
// Stops stream activity.
// When a stream is stopped, it can't receive, process or deliver packets.
virtual void Stop() = 0;
// TODO(pbos): Add info on currently-received codec to Stats.
virtual Stats GetStats() const = 0;
virtual rtc::Optional<TimingFrameInfo> GetAndResetTimingFrameInfo() = 0;
// Takes ownership of the file, is responsible for closing it later.
// Calling this method will close and finalize any current log.
// Giving rtc::kInvalidPlatformFileValue disables logging.
// If a frame to be written would make the log too large the write fails and
// the log is closed and finalized. A |byte_limit| of 0 means no limit.
virtual void EnableEncodedFrameRecording(rtc::PlatformFile file,
size_t byte_limit) = 0;
inline void DisableEncodedFrameRecording() {
EnableEncodedFrameRecording(rtc::kInvalidPlatformFileValue, 0);
}
// RtpDemuxer only forwards a given RTP packet to one sink. However, some
// sinks, such as FlexFEC, might wish to be informed of all of the packets
// a given sink receives (or any set of sinks). They may do so by registering
// themselves as secondary sinks.
virtual void AddSecondarySink(RtpPacketSinkInterface* sink) = 0;
virtual void RemoveSecondarySink(const RtpPacketSinkInterface* sink) = 0;
protected:
virtual ~VideoReceiveStream() {}
};
} // namespace webrtc
#include "webrtc/call/video_receive_stream.h"
// The contents header have moved to webrtc/call/video_receive_stream.h. This
// file is deprecated. See http://bugs.webrtc.org/8107.
#endif // WEBRTC_VIDEO_RECEIVE_STREAM_H_

View File

@ -11,258 +11,8 @@
#ifndef WEBRTC_VIDEO_SEND_STREAM_H_
#define WEBRTC_VIDEO_SEND_STREAM_H_
#include <map>
#include <string>
#include <utility>
#include <vector>
#include <utility>
#include "webrtc/api/call/transport.h"
#include "webrtc/common_types.h"
#include "webrtc/common_video/include/frame_callback.h"
#include "webrtc/config.h"
#include "webrtc/media/base/videosinkinterface.h"
#include "webrtc/media/base/videosourceinterface.h"
#include "webrtc/rtc_base/platform_file.h"
namespace webrtc {
class VideoEncoder;
class VideoSendStream {
public:
struct StreamStats {
std::string ToString() const;
FrameCounts frame_counts;
bool is_rtx = false;
bool is_flexfec = false;
int width = 0;
int height = 0;
// TODO(holmer): Move bitrate_bps out to the webrtc::Call layer.
int total_bitrate_bps = 0;
int retransmit_bitrate_bps = 0;
int avg_delay_ms = 0;
int max_delay_ms = 0;
StreamDataCounters rtp_stats;
RtcpPacketTypeCounter rtcp_packet_type_counts;
RtcpStatistics rtcp_stats;
};
struct Stats {
std::string ToString(int64_t time_ms) const;
std::string encoder_implementation_name = "unknown";
int input_frame_rate = 0;
int encode_frame_rate = 0;
int avg_encode_time_ms = 0;
int encode_usage_percent = 0;
uint32_t frames_encoded = 0;
rtc::Optional<uint64_t> qp_sum;
// Bitrate the encoder is currently configured to use due to bandwidth
// limitations.
int target_media_bitrate_bps = 0;
// Bitrate the encoder is actually producing.
int media_bitrate_bps = 0;
// Media bitrate this VideoSendStream is configured to prefer if there are
// no bandwidth limitations.
int preferred_media_bitrate_bps = 0;
bool suspended = false;
bool bw_limited_resolution = false;
bool cpu_limited_resolution = false;
bool bw_limited_framerate = false;
bool cpu_limited_framerate = false;
// Total number of times resolution as been requested to be changed due to
// CPU/quality adaptation.
int number_of_cpu_adapt_changes = 0;
int number_of_quality_adapt_changes = 0;
std::map<uint32_t, StreamStats> substreams;
};
struct Config {
public:
Config() = delete;
Config(Config&&) = default;
explicit Config(Transport* send_transport)
: send_transport(send_transport) {}
Config& operator=(Config&&) = default;
Config& operator=(const Config&) = delete;
// Mostly used by tests. Avoid creating copies if you can.
Config Copy() const { return Config(*this); }
std::string ToString() const;
struct EncoderSettings {
EncoderSettings() = default;
EncoderSettings(std::string payload_name,
int payload_type,
VideoEncoder* encoder)
: payload_name(std::move(payload_name)),
payload_type(payload_type),
encoder(encoder) {}
std::string ToString() const;
std::string payload_name;
int payload_type = -1;
// TODO(sophiechang): Delete this field when no one is using internal
// sources anymore.
bool internal_source = false;
// Allow 100% encoder utilization. Used for HW encoders where CPU isn't
// expected to be the limiting factor, but a chip could be running at
// 30fps (for example) exactly.
bool full_overuse_time = false;
// Uninitialized VideoEncoder instance to be used for encoding. Will be
// initialized from inside the VideoSendStream.
VideoEncoder* encoder = nullptr;
} encoder_settings;
static const size_t kDefaultMaxPacketSize = 1500 - 40; // TCP over IPv4.
struct Rtp {
std::string ToString() const;
std::vector<uint32_t> ssrcs;
// See RtcpMode for description.
RtcpMode rtcp_mode = RtcpMode::kCompound;
// Max RTP packet size delivered to send transport from VideoEngine.
size_t max_packet_size = kDefaultMaxPacketSize;
// RTP header extensions to use for this send stream.
std::vector<RtpExtension> extensions;
// See NackConfig for description.
NackConfig nack;
// See UlpfecConfig for description.
UlpfecConfig ulpfec;
struct Flexfec {
// Payload type of FlexFEC. Set to -1 to disable sending FlexFEC.
int payload_type = -1;
// SSRC of FlexFEC stream.
uint32_t ssrc = 0;
// Vector containing a single element, corresponding to the SSRC of the
// media stream being protected by this FlexFEC stream.
// The vector MUST have size 1.
//
// TODO(brandtr): Update comment above when we support
// multistream protection.
std::vector<uint32_t> protected_media_ssrcs;
} flexfec;
// Settings for RTP retransmission payload format, see RFC 4588 for
// details.
struct Rtx {
std::string ToString() const;
// SSRCs to use for the RTX streams.
std::vector<uint32_t> ssrcs;
// Payload type to use for the RTX stream.
int payload_type = -1;
} rtx;
// RTCP CNAME, see RFC 3550.
std::string c_name;
} rtp;
// Transport for outgoing packets.
Transport* send_transport = nullptr;
// Called for each I420 frame before encoding the frame. Can be used for
// effects, snapshots etc. 'nullptr' disables the callback.
rtc::VideoSinkInterface<VideoFrame>* pre_encode_callback = nullptr;
// Called for each encoded frame, e.g. used for file storage. 'nullptr'
// disables the callback. Also measures timing and passes the time
// spent on encoding. This timing will not fire if encoding takes longer
// than the measuring window, since the sample data will have been dropped.
EncodedFrameObserver* post_encode_callback = nullptr;
// Expected delay needed by the renderer, i.e. the frame will be delivered
// this many milliseconds, if possible, earlier than expected render time.
// Only valid if |local_renderer| is set.
int render_delay_ms = 0;
// Target delay in milliseconds. A positive value indicates this stream is
// used for streaming instead of a real-time call.
int target_delay_ms = 0;
// True if the stream should be suspended when the available bitrate fall
// below the minimum configured bitrate. If this variable is false, the
// stream may send at a rate higher than the estimated available bitrate.
bool suspend_below_min_bitrate = false;
// Enables periodic bandwidth probing in application-limited region.
bool periodic_alr_bandwidth_probing = false;
private:
// Access to the copy constructor is private to force use of the Copy()
// method for those exceptional cases where we do use it.
Config(const Config&) = default;
};
// Starts stream activity.
// When a stream is active, it can receive, process and deliver packets.
virtual void Start() = 0;
// Stops stream activity.
// When a stream is stopped, it can't receive, process or deliver packets.
virtual void Stop() = 0;
// Based on the spec in
// https://w3c.github.io/webrtc-pc/#idl-def-rtcdegradationpreference.
// These options are enforced on a best-effort basis. For instance, all of
// these options may suffer some frame drops in order to avoid queuing.
// TODO(sprang): Look into possibility of more strictly enforcing the
// maintain-framerate option.
enum class DegradationPreference {
// Don't take any actions based on over-utilization signals.
kDegradationDisabled,
// On over-use, request lower frame rate, possibly causing frame drops.
kMaintainResolution,
// On over-use, request lower resolution, possibly causing down-scaling.
kMaintainFramerate,
// Try to strike a "pleasing" balance between frame rate or resolution.
kBalanced,
};
virtual void SetSource(
rtc::VideoSourceInterface<webrtc::VideoFrame>* source,
const DegradationPreference& degradation_preference) = 0;
// Set which streams to send. Must have at least as many SSRCs as configured
// in the config. Encoder settings are passed on to the encoder instance along
// with the VideoStream settings.
virtual void ReconfigureVideoEncoder(VideoEncoderConfig config) = 0;
virtual Stats GetStats() = 0;
// Takes ownership of each file, is responsible for closing them later.
// Calling this method will close and finalize any current logs.
// Some codecs produce multiple streams (VP8 only at present), each of these
// streams will log to a separate file. kMaxSimulcastStreams in common_types.h
// gives the max number of such streams. If there is no file for a stream, or
// the file is rtc::kInvalidPlatformFileValue, frames from that stream will
// not be logged.
// If a frame to be written would make the log too large the write fails and
// the log is closed and finalized. A |byte_limit| of 0 means no limit.
virtual void EnableEncodedFrameRecording(
const std::vector<rtc::PlatformFile>& files,
size_t byte_limit) = 0;
inline void DisableEncodedFrameRecording() {
EnableEncodedFrameRecording(std::vector<rtc::PlatformFile>(), 0);
}
protected:
virtual ~VideoSendStream() {}
};
} // namespace webrtc
#include "webrtc/call/video_send_stream.h"
// The contents header have moved to webrtc/call/video_send_stream.h. This
// file is deprecated. See http://bugs.webrtc.org/8107.
#endif // WEBRTC_VIDEO_SEND_STREAM_H_