Breaking out send side bitrate controll cont.

Review URL: https://webrtc-codereview.appspot.com/475004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@2135 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
pwestin@webrtc.org 2012-04-27 05:25:53 +00:00
parent e611619f60
commit 49888ce428
33 changed files with 740 additions and 2068 deletions

View File

@ -533,12 +533,6 @@ public:
WebRtc_UWord32* fecRate,
WebRtc_UWord32* nackRate) const = 0;
/*
* Get the send-side estimate of the available bandwidth.
*/
virtual int EstimatedSendBandwidth(
WebRtc_UWord32* available_bandwidth) const = 0;
/*
* Get the receive-side estimate of the available bandwidth.
*/
@ -573,13 +567,14 @@ public:
***************************************************************************/
/*
* RegisterIncomingRTCPCallback
*
* incomingMessagesCallback - callback object that will receive messages from RTCP
*
* return -1 on failure else 0
* Register a callback objects that will receive callbacks for video
* related events such as an incoming key frame request and events that
* could indicate bandwidth overuse.
*/
virtual WebRtc_Word32 RegisterIncomingRTCPCallback(RtcpFeedback* incomingMessagesCallback) = 0;
virtual void RegisterRtcpObservers(
RtcpIntraFrameObserver* intraFrameCallback,
RtcpBandwidthObserver* bandwidthCallback,
RtcpFeedback* callback) = 0;
/*
* Get RTCP status
@ -780,10 +775,6 @@ public:
const WebRtc_UWord8 numberOfSSRC,
const WebRtc_UWord32* SSRC) = 0;
// Used to set maximum bitrate estimate received in a REMB packet.
virtual WebRtc_Word32 SetMaximumBitrateEstimate(
const WebRtc_UWord32 bitrate) = 0;
// Registers an observer to call when the estimate of the incoming channel
// changes.
virtual bool SetRemoteBitrateObserver(
@ -959,14 +950,6 @@ public:
*
***************************************************************************/
/*
* Register a callback object that will receive callbacks for video related events
* such as an incoming key frame request.
*
* return -1 on failure else 0
*/
virtual WebRtc_Word32 RegisterIncomingVideoCallback(RtpVideoFeedback* incomingMessagesCallback) = 0;
/*
* Set the estimated camera delay in MS
*
@ -975,19 +958,9 @@ public:
virtual WebRtc_Word32 SetCameraDelay(const WebRtc_Word32 delayMS) = 0;
/*
* Set the start and max send bitrate
* used by the bandwidth management
*
* Not calling this or setting startBitrateKbit to 0 disables the bandwidth management
*
* minBitrateKbit = 0 equals no min bitrate
* maxBitrateKbit = 0 equals no max bitrate
*
* return -1 on failure else 0
* Set the target send bitrate
*/
virtual void SetSendBitrate(const WebRtc_UWord32 startBitrate,
const WebRtc_UWord16 minBitrateKbit,
const WebRtc_UWord16 maxBitrateKbit) = 0;
virtual void SetTargetSendBitrate(const WebRtc_UWord32 bitrate) = 0;
/*
* Turn on/off generic FEC

View File

@ -151,24 +151,10 @@ public:
virtual void OnXRVoIPMetricReceived(
const WebRtc_Word32 /*id*/,
const RTCPVoIPMetric* /*metric*/,
const WebRtc_Word8 /*VoIPmetricBuffer*/[28]) {};
const RTCPVoIPMetric* /*metric*/) {};
virtual void OnRTCPPacketTimeout(const WebRtc_Word32 /*id*/) {};
virtual void OnTMMBRReceived(const WebRtc_Word32 /*id*/,
const WebRtc_UWord16 /*bwEstimateKbit*/) {};
virtual void OnSLIReceived(const WebRtc_Word32 /*id*/,
const WebRtc_UWord8 /*pictureId*/) {};
virtual void OnRPSIReceived(const WebRtc_Word32 /*id*/,
const WebRtc_UWord64 /*pictureId*/) {};
virtual void OnReceiverEstimatedMaxBitrateReceived(
const WebRtc_Word32 /*id*/,
const WebRtc_UWord32 /*bitRate*/) {};
virtual void OnSendReportReceived(const WebRtc_Word32 id,
const WebRtc_UWord32 senderSSRC) {};
@ -230,7 +216,13 @@ class RtpAudioFeedback {
class RtcpIntraFrameObserver {
public:
virtual void OnReceivedIntraFrameRequest(const uint32_t ssrc) = 0;
protected:
virtual void OnReceivedSLI(const uint32_t ssrc,
const uint8_t picture_id) = 0;
virtual void OnReceivedRPSI(const uint32_t ssrc,
const uint64_t picture_id) = 0;
virtual ~RtcpIntraFrameObserver() {}
};
@ -249,24 +241,6 @@ class RtcpBandwidthObserver {
virtual ~RtcpBandwidthObserver() {}
};
// TODO(pwestin) To be depricated...
class RtpVideoFeedback
{
public:
// this function should call codec module to inform it about the request
virtual void OnReceivedIntraFrameRequest(const WebRtc_Word32 id,
const FrameType type,
const WebRtc_UWord8 streamIdx) = 0;
virtual void OnNetworkChanged(const WebRtc_Word32 id,
const WebRtc_UWord32 bitrateBps,
const WebRtc_UWord8 fractionLost,
const WebRtc_UWord16 roundTripTimeMs) = 0;
protected:
virtual ~RtpVideoFeedback() {}
};
// A clock interface that allows reading of absolute and relative
// timestamps in an RTP/RTCP module.
class RtpRtcpClock {
@ -284,16 +258,12 @@ class RtpRtcpClock {
// RtpReceiveBitrateUpdate is used to signal changes in bitrate estimates for
// the incoming stream.
class RtpRemoteBitrateObserver {
public:
public:
// Called when a receive channel has a new bitrate estimate for the incoming
// stream.
virtual void OnReceiveBitrateChanged(uint32_t ssrc,
uint32_t bitrate) = 0;
// TODO(pwestin)To be depricated...
// Called when a REMB packet has been received.
virtual void OnReceivedRemb(uint32_t bitrate) = 0;
virtual ~RtpRemoteBitrateObserver() {}
};
} // namespace webrtc

View File

@ -140,8 +140,6 @@ class MockRtpRtcp : public RtpRtcp {
bool());
MOCK_CONST_METHOD4(BitrateSent,
void(WebRtc_UWord32* totalRate, WebRtc_UWord32* videoRate, WebRtc_UWord32* fecRate, WebRtc_UWord32* nackRate));
MOCK_CONST_METHOD1(EstimatedSendBandwidth,
int(WebRtc_UWord32* available_bandwidth));
MOCK_CONST_METHOD1(EstimatedReceiveBandwidth,
int(WebRtc_UWord32* available_bandwidth));
MOCK_METHOD7(SendOutgoingData,
@ -152,8 +150,10 @@ class MockRtpRtcp : public RtpRtcp {
const WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader* fragmentation,
const RTPVideoHeader* rtpVideoHdr));
MOCK_METHOD1(RegisterIncomingRTCPCallback,
WebRtc_Word32(RtcpFeedback* incomingMessagesCallback));
MOCK_METHOD3(RegisterRtcpObservers,
void(RtcpIntraFrameObserver* intraFrameCallback,
RtcpBandwidthObserver* bandwidthCallback,
RtcpFeedback* callback));
MOCK_CONST_METHOD0(RTCP,
RTCPMethod());
MOCK_METHOD1(SetRTCPStatus,
@ -210,8 +210,6 @@ class MockRtpRtcp : public RtpRtcp {
WebRtc_Word32(const bool enable));
MOCK_METHOD3(SetREMBData,
WebRtc_Word32(const WebRtc_UWord32 bitrate, const WebRtc_UWord8 numberOfSSRC, const WebRtc_UWord32* SSRC));
MOCK_METHOD1(SetMaximumBitrateEstimate,
WebRtc_Word32(const WebRtc_UWord32 bitrate));
MOCK_METHOD1(SetRemoteBitrateObserver,
bool(RtpRemoteBitrateObserver*));
MOCK_CONST_METHOD0(IJ,
@ -260,12 +258,10 @@ class MockRtpRtcp : public RtpRtcp {
WebRtc_Word32(bool& enable, WebRtc_UWord8& ID));
MOCK_METHOD1(SetAudioLevel,
WebRtc_Word32(const WebRtc_UWord8 level_dBov));
MOCK_METHOD1(RegisterIncomingVideoCallback,
WebRtc_Word32(RtpVideoFeedback* incomingMessagesCallback));
MOCK_METHOD1(SetCameraDelay,
WebRtc_Word32(const WebRtc_Word32 delayMS));
MOCK_METHOD3(SetSendBitrate,
void(const WebRtc_UWord32 startBitrate, const WebRtc_UWord16 minBitrateKbit, const WebRtc_UWord16 maxBitrateKbit));
MOCK_METHOD1(SetTargetSendBitrate,
void(const WebRtc_UWord32 bitrate));
MOCK_METHOD3(SetGenericFECStatus,
WebRtc_Word32(const bool enable, const WebRtc_UWord8 payloadTypeRED, const WebRtc_UWord8 payloadTypeFEC));
MOCK_METHOD3(GenericFECStatus,
@ -277,7 +273,6 @@ class MockRtpRtcp : public RtpRtcp {
WebRtc_Word32(const KeyFrameRequestMethod method));
MOCK_METHOD0(RequestKeyFrame,
WebRtc_Word32());
MOCK_CONST_METHOD3(Version,
int32_t(char* version, uint32_t& remaining_buffer_in_bytes, uint32_t& position));
MOCK_METHOD0(TimeUntilNextProcess,

View File

@ -1,301 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "bandwidth_management.h"
#include "trace.h"
#include "rtp_utility.h"
#include "rtp_rtcp_config.h"
#include <math.h> // sqrt()
namespace webrtc {
BandwidthManagement::BandwidthManagement(const WebRtc_Word32 id) :
_id(id),
_critsect(CriticalSectionWrapper::CreateCriticalSection()),
_lastPacketLossExtendedHighSeqNum(0),
_lastReportAllLost(false),
_lastLoss(0),
_accumulateLostPacketsQ8(0),
_accumulateExpectedPackets(0),
_bitRate(0),
_minBitRateConfigured(0),
_maxBitRateConfigured(0),
_last_fraction_loss(0),
_last_round_trip_time(0),
_bwEstimateIncoming(0),
_timeLastIncrease(0)
{
}
BandwidthManagement::~BandwidthManagement()
{
delete _critsect;
}
void
BandwidthManagement::SetSendBitrate(const WebRtc_UWord32 startBitrate,
const WebRtc_UWord16 minBitrateKbit,
const WebRtc_UWord16 maxBitrateKbit)
{
CriticalSectionScoped cs(_critsect);
_bitRate = startBitrate;
_minBitRateConfigured = minBitrateKbit*1000;
if(maxBitrateKbit == 0)
{
// no max configured use 1Gbit/s
_maxBitRateConfigured = 1000000000;
} else
{
_maxBitRateConfigured = maxBitrateKbit*1000;
}
}
WebRtc_Word32
BandwidthManagement::MaxConfiguredBitrate(WebRtc_UWord16* maxBitrateKbit)
{
CriticalSectionScoped cs(_critsect);
if(_maxBitRateConfigured == 0)
{
return -1;
}
*maxBitrateKbit = (WebRtc_UWord16)(_maxBitRateConfigured/1000);
return 0;
}
WebRtc_Word32
BandwidthManagement::UpdateBandwidthEstimate(const WebRtc_UWord16 bandWidthKbit,
WebRtc_UWord32* newBitrate,
WebRtc_UWord8* fractionLost,
WebRtc_UWord16* roundTripTime)
{
*newBitrate = 0;
CriticalSectionScoped cs(_critsect);
_bwEstimateIncoming = bandWidthKbit*1000;
if(_bitRate == 0)
{
// BandwidthManagement off
return -1;
}
if (_bwEstimateIncoming > 0 && _bitRate > _bwEstimateIncoming)
{
_bitRate = _bwEstimateIncoming;
} else
{
return -1;
}
*newBitrate = _bitRate;
*fractionLost = _last_fraction_loss;
*roundTripTime = _last_round_trip_time;
return 0;
}
WebRtc_Word32 BandwidthManagement::UpdatePacketLoss(
const WebRtc_UWord32 lastReceivedExtendedHighSeqNum,
WebRtc_UWord32 sentBitrate,
const WebRtc_UWord16 rtt,
WebRtc_UWord8* loss,
WebRtc_UWord32* newBitrate,
WebRtc_Word64 nowMS)
{
CriticalSectionScoped cs(_critsect);
_last_fraction_loss = *loss;
_last_round_trip_time = rtt;
if(_bitRate == 0)
{
// BandwidthManagement off
return -1;
}
// Check sequence number diff and weight loss report
if (_lastPacketLossExtendedHighSeqNum > 0 &&
(lastReceivedExtendedHighSeqNum >= _lastPacketLossExtendedHighSeqNum))
{
// This is not the first loss report and the sequence number is
// non-decreasing. Calculate sequence number diff.
WebRtc_UWord32 seqNumDiff = lastReceivedExtendedHighSeqNum
- _lastPacketLossExtendedHighSeqNum;
// Check if this report and the last was 100% loss, then report
// 100% loss even though seqNumDiff is small.
// If not, go on with the checks.
if (!(_lastReportAllLost && *loss == 255))
{
_lastReportAllLost = (*loss == 255);
// Calculate number of lost packets.
// loss = 256 * numLostPackets / expectedPackets.
const int numLostPacketsQ8 = *loss * seqNumDiff;
// Accumulate reports.
_accumulateLostPacketsQ8 += numLostPacketsQ8;
_accumulateExpectedPackets += seqNumDiff;
// Report loss if the total report is based on sufficiently
// many packets.
const int limitNumPackets = 10;
if (_accumulateExpectedPackets >= limitNumPackets)
{
*loss = _accumulateLostPacketsQ8 / _accumulateExpectedPackets;
// Reset accumulators
_accumulateLostPacketsQ8 = 0;
_accumulateExpectedPackets = 0;
}
else
{
// Report zero loss until we have enough data to estimate
// the loss rate.
*loss = 0;
}
}
}
// Keep for next time.
_lastLoss = *loss;
// Remember the sequence number until next time
_lastPacketLossExtendedHighSeqNum = lastReceivedExtendedHighSeqNum;
WebRtc_UWord32 bitRate = ShapeSimple(*loss, rtt, sentBitrate, nowMS);
if (bitRate == 0)
{
// no change
return -1;
}
_bitRate = bitRate;
*newBitrate = bitRate;
return 0;
}
WebRtc_Word32 BandwidthManagement::AvailableBandwidth(
WebRtc_UWord32* bandwidthKbit) const {
CriticalSectionScoped cs(_critsect);
if (_bitRate == 0) {
return -1;
}
if (!bandwidthKbit) {
return -1;
}
*bandwidthKbit = _bitRate;
return 0;
}
/* Calculate the rate that TCP-Friendly Rate Control (TFRC) would apply.
* The formula in RFC 3448, Section 3.1, is used.
*/
// protected
WebRtc_Word32 BandwidthManagement::CalcTFRCbps(WebRtc_Word16 avgPackSizeBytes,
WebRtc_Word32 rttMs,
WebRtc_Word32 packetLoss)
{
if (avgPackSizeBytes <= 0 || rttMs <= 0 || packetLoss <= 0)
{
// input variables out of range; return -1
return -1;
}
double R = static_cast<double>(rttMs)/1000; // RTT in seconds
int b = 1; // number of packets acknowledged by a single TCP acknowledgement; recommended = 1
double t_RTO = 4.0 * R; // TCP retransmission timeout value in seconds; recommended = 4*R
double p = static_cast<double>(packetLoss)/255; // packet loss rate in [0, 1)
double s = static_cast<double>(avgPackSizeBytes);
// calculate send rate in bytes/second
double X = s / (R * sqrt(2 * b * p / 3) + (t_RTO * (3 * sqrt( 3 * b * p / 8) * p * (1 + 32 * p * p))));
return (static_cast<WebRtc_Word32>(X*8)); // bits/second
}
/*
* Simple bandwidth estimation. Depends a lot on bwEstimateIncoming and packetLoss.
*/
// protected
WebRtc_UWord32 BandwidthManagement::ShapeSimple(WebRtc_Word32 packetLoss,
WebRtc_Word32 rtt,
WebRtc_UWord32 sentBitrate,
WebRtc_Word64 nowMS)
{
WebRtc_UWord32 newBitRate = 0;
bool reducing = false;
// Limit the rate increases to once a second.
if (packetLoss <= 5)
{
if ((nowMS - _timeLastIncrease) <
kBWEUpdateIntervalMs)
{
return _bitRate;
}
_timeLastIncrease = nowMS;
}
if (packetLoss > 5 && packetLoss <= 26)
{
// 2% - 10%
newBitRate = _bitRate;
}
else if (packetLoss > 26)
{
// 26/256 ~= 10%
// reduce rate: newRate = rate * (1 - 0.5*lossRate)
// packetLoss = 256*lossRate
newBitRate = static_cast<WebRtc_UWord32>(
(sentBitrate * static_cast<double>(512 - packetLoss)) / 512.0);
reducing = true;
}
else
{
// increase rate by 8%
newBitRate = static_cast<WebRtc_UWord32>(_bitRate * 1.08 + 0.5);
// add 1 kbps extra, just to make sure that we do not get stuck
// (gives a little extra increase at low rates, negligible at higher rates)
newBitRate += 1000;
}
// Calculate what rate TFRC would apply in this situation
WebRtc_Word32 tfrcRate = CalcTFRCbps(1000, rtt, packetLoss); // scale loss to Q0 (back to [0, 255])
if (reducing &&
tfrcRate > 0 &&
static_cast<WebRtc_UWord32>(tfrcRate) > newBitRate)
{
// do not reduce further if rate is below TFRC rate
newBitRate = tfrcRate;
}
if (_bwEstimateIncoming > 0 && newBitRate > _bwEstimateIncoming)
{
newBitRate = _bwEstimateIncoming;
}
if (newBitRate > _maxBitRateConfigured)
{
newBitRate = _maxBitRateConfigured;
}
if (newBitRate < _minBitRateConfigured)
{
WEBRTC_TRACE(kTraceWarning,
kTraceRtpRtcp,
_id,
"The configured min bitrate (%u kbps) is greater than the "
"estimated available bandwidth (%u kbps).\n",
_minBitRateConfigured / 1000, newBitRate / 1000);
newBitRate = _minBitRateConfigured;
}
return newBitRate;
}
} // namespace webrtc

View File

@ -1,92 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_BANDWIDTH_MANAGEMENT_H_
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_BANDWIDTH_MANAGEMENT_H_
#include "typedefs.h"
#include "rtp_rtcp_config.h"
#include "critical_section_wrapper.h"
/*
* FEC and NACK added bitrate is handled outside class
*/
namespace webrtc {
class BandwidthManagement
{
public:
BandwidthManagement(const WebRtc_Word32 id);
~BandwidthManagement();
// Call when we receive a RTCP message with TMMBR or REMB
WebRtc_Word32 UpdateBandwidthEstimate(const WebRtc_UWord16 bandWidthKbit,
WebRtc_UWord32* newBitrate,
WebRtc_UWord8* fractionLost,
WebRtc_UWord16* roundTripTime);
// Call when we receive a RTCP message with a ReceiveBlock
WebRtc_Word32 UpdatePacketLoss(
const WebRtc_UWord32 lastReceivedExtendedHighSeqNum,
WebRtc_UWord32 sentBitrate,
const WebRtc_UWord16 rtt,
WebRtc_UWord8* loss,
WebRtc_UWord32* newBitrate,
WebRtc_Word64 nowMS);
// If no bandwidth estimate is available or if |bandwidthKbit| is NULL,
// -1 is returned.
WebRtc_Word32 AvailableBandwidth(WebRtc_UWord32* bandwidthKbit) const;
void SetSendBitrate(const WebRtc_UWord32 startBitrate,
const WebRtc_UWord16 minBitrateKbit,
const WebRtc_UWord16 maxBitrateKbit);
WebRtc_Word32 MaxConfiguredBitrate(WebRtc_UWord16* maxBitrateKbit);
protected:
WebRtc_UWord32 ShapeSimple(WebRtc_Word32 packetLoss,
WebRtc_Word32 rtt,
WebRtc_UWord32 sentBitrate,
WebRtc_Word64 nowMS);
WebRtc_Word32 CalcTFRCbps(WebRtc_Word16 avgPackSizeBytes,
WebRtc_Word32 rttMs,
WebRtc_Word32 packetLoss);
private:
enum { kBWEUpdateIntervalMs = 1000 };
WebRtc_Word32 _id;
CriticalSectionWrapper* _critsect;
// incoming filters
WebRtc_UWord32 _lastPacketLossExtendedHighSeqNum;
bool _lastReportAllLost;
WebRtc_UWord8 _lastLoss;
int _accumulateLostPacketsQ8;
int _accumulateExpectedPackets;
// bitrate
WebRtc_UWord32 _bitRate;
WebRtc_UWord32 _minBitRateConfigured;
WebRtc_UWord32 _maxBitRateConfigured;
WebRtc_UWord8 _last_fraction_loss;
WebRtc_UWord16 _last_round_trip_time;
// bandwidth estimate
WebRtc_UWord32 _bwEstimateIncoming;
WebRtc_Word64 _timeLastIncrease;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_BANDWIDTH_MANAGEMENT_H_

View File

@ -30,24 +30,25 @@ using namespace RTCPHelp;
RTCPReceiver::RTCPReceiver(const WebRtc_Word32 id, RtpRtcpClock* clock,
ModuleRtpRtcpImpl* owner)
: TMMBRHelp(),
_id(id),
_clock(*clock),
_method(kRtcpOff),
_lastReceived(0),
_rtpRtcp(*owner),
_id(id),
_clock(*clock),
_method(kRtcpOff),
_lastReceived(0),
_rtpRtcp(*owner),
_criticalSectionFeedbacks(
CriticalSectionWrapper::CreateCriticalSection()),
_cbRtcpFeedback(NULL),
_cbVideoFeedback(NULL),
_criticalSectionRTCPReceiver(
CriticalSectionWrapper::CreateCriticalSection()),
_SSRC(0),
_remoteSSRC(0),
_remoteSenderInfo(),
_lastReceivedSRNTPsecs(0),
_lastReceivedSRNTPfrac(0),
_receivedInfoMap(),
_packetTimeOutMS(0),
_cbRtcpFeedback(NULL),
_cbRtcpBandwidthObserver(NULL),
_cbRtcpIntraFrameObserver(NULL),
_criticalSectionRTCPReceiver(
CriticalSectionWrapper::CreateCriticalSection()),
_SSRC(0),
_remoteSSRC(0),
_remoteSenderInfo(),
_lastReceivedSRNTPsecs(0),
_lastReceivedSRNTPfrac(0),
_receivedInfoMap(),
_packetTimeOutMS(0),
_rtt(0) {
memset(&_remoteSenderInfo, 0, sizeof(_remoteSenderInfo));
WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, id, "%s created", __FUNCTION__);
@ -121,25 +122,18 @@ RTCPReceiver::SetRemoteSSRC( const WebRtc_UWord32 ssrc)
return 0;
}
WebRtc_Word32
RTCPReceiver::RegisterIncomingRTCPCallback(RtcpFeedback* incomingMessagesCallback)
{
CriticalSectionScoped lock(_criticalSectionFeedbacks);
_cbRtcpFeedback = incomingMessagesCallback;
return 0;
void RTCPReceiver::RegisterRtcpObservers(
RtcpIntraFrameObserver* intra_frame_callback,
RtcpBandwidthObserver* bandwidth_callback,
RtcpFeedback* feedback_callback) {
CriticalSectionScoped lock(_criticalSectionFeedbacks);
_cbRtcpIntraFrameObserver = intra_frame_callback;
_cbRtcpBandwidthObserver = bandwidth_callback;
_cbRtcpFeedback = feedback_callback;
}
WebRtc_Word32
RTCPReceiver::RegisterIncomingVideoCallback(RtpVideoFeedback* incomingMessagesCallback)
{
CriticalSectionScoped lock(_criticalSectionFeedbacks);
_cbVideoFeedback = incomingMessagesCallback;
return 0;
}
void
RTCPReceiver::SetSSRC( const WebRtc_UWord32 ssrc)
{
void RTCPReceiver::SetSSRC( const WebRtc_UWord32 ssrc) {
CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
_SSRC = ssrc;
}
@ -157,7 +151,6 @@ WebRtc_Word32 RTCPReceiver::ResetRTT(const WebRtc_UWord32 remoteSSRC) {
reportBlock->avgRTT = 0;
reportBlock->minRTT = 0;
reportBlock->maxRTT = 0;
return 0;
}
@ -1077,20 +1070,16 @@ RTCPReceiver::HandleRPSI(RTCPUtility::RTCPParserV2& rtcpParser,
}
// no need for critsect we have _criticalSectionRTCPReceiver
void
RTCPReceiver::HandlePsfbApp(RTCPUtility::RTCPParserV2& rtcpParser,
RTCPPacketInformation& rtcpPacketInformation)
{
RTCPUtility::RTCPPacketTypes pktType = rtcpParser.Iterate();
if (pktType == RTCPUtility::kRtcpPsfbRembCode)
{
pktType = rtcpParser.Iterate();
if (pktType == RTCPUtility::kRtcpPsfbRembItemCode)
{
HandleREMBItem(rtcpParser, rtcpPacketInformation);
rtcpParser.Iterate();
}
void RTCPReceiver::HandlePsfbApp(RTCPUtility::RTCPParserV2& rtcpParser,
RTCPPacketInformation& rtcpPacketInformation) {
RTCPUtility::RTCPPacketTypes pktType = rtcpParser.Iterate();
if (pktType == RTCPUtility::kRtcpPsfbRembCode) {
pktType = rtcpParser.Iterate();
if (pktType == RTCPUtility::kRtcpPsfbRembItemCode) {
HandleREMBItem(rtcpParser, rtcpPacketInformation);
rtcpParser.Iterate();
}
}
}
// no need for critsect we have _criticalSectionRTCPReceiver
@ -1117,15 +1106,13 @@ RTCPReceiver::HandleIJItem(const RTCPUtility::RTCPPacket& rtcpPacket,
rtcpPacket.ExtendedJitterReportItem.Jitter;
}
void
RTCPReceiver::HandleREMBItem(RTCPUtility::RTCPParserV2& rtcpParser,
RTCPPacketInformation& rtcpPacketInformation)
{
const RTCPUtility::RTCPPacket& rtcpPacket = rtcpParser.Packet();
rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpRemb;
rtcpPacketInformation.receiverEstimatedMaxBitrate =
rtcpPacket.REMBItem.BitRate;
void RTCPReceiver::HandleREMBItem(
RTCPUtility::RTCPParserV2& rtcpParser,
RTCPPacketInformation& rtcpPacketInformation) {
const RTCPUtility::RTCPPacket& rtcpPacket = rtcpParser.Packet();
rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpRemb;
rtcpPacketInformation.receiverEstimatedMaxBitrate =
rtcpPacket.REMBItem.BitRate;
}
// no need for critsect we have _criticalSectionRTCPReceiver
@ -1204,42 +1191,9 @@ RTCPReceiver::HandleAPPItem(RTCPUtility::RTCPParserV2& rtcpParser,
rtcpParser.Iterate();
}
void
RTCPReceiver::OnReceivedIntraFrameRequest(const FrameType frameType,
const WebRtc_UWord8 streamIdx) const
{
CriticalSectionScoped lock(_criticalSectionFeedbacks);
if(_cbVideoFeedback)
{
_cbVideoFeedback->OnReceivedIntraFrameRequest(_id, frameType, streamIdx);
}
}
void
RTCPReceiver::OnReceivedSliceLossIndication(const WebRtc_UWord8 pitureID) const
{
CriticalSectionScoped lock(_criticalSectionFeedbacks);
if(_cbRtcpFeedback)
{
_cbRtcpFeedback->OnSLIReceived(_id, pitureID);
}
}
void RTCPReceiver::OnReceivedReferencePictureSelectionIndication(
const WebRtc_UWord64 pitureID) const {
CriticalSectionScoped lock(_criticalSectionFeedbacks);
if (_cbRtcpFeedback) {
_cbRtcpFeedback->OnRPSIReceived(_id, pitureID);
}
}
WebRtc_Word32 RTCPReceiver::UpdateTMMBR() {
WebRtc_Word32 numBoundingSet = 0;
WebRtc_UWord32 minBitrateKbit = 0;
WebRtc_UWord32 maxBitrateKbit = 0;
WebRtc_UWord32 bitrate = 0;
WebRtc_UWord32 accNumCandidates = 0;
WebRtc_Word32 size = TMMBRReceived(0, 0, NULL);
@ -1271,179 +1225,131 @@ WebRtc_Word32 RTCPReceiver::UpdateTMMBR() {
return 0;
}
// Get net bitrate from bounding set depending on sent packet rate
if (CalcMinBitRate(&minBitrateKbit)) {
if (CalcMinBitRate(&bitrate)) {
// we have a new bandwidth estimate on this channel
_rtpRtcp.OnReceivedBandwidthEstimateUpdate((WebRtc_UWord16)minBitrateKbit);
WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, _id,
"Set TMMBR request min:%d kbps max:%d kbps, channel: %d",
minBitrateKbit, maxBitrateKbit, _id);
CriticalSectionScoped lock(_criticalSectionFeedbacks);
if (_cbRtcpBandwidthObserver) {
_cbRtcpBandwidthObserver->OnReceivedEstimatedBitrate(bitrate * 1000);
WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, _id,
"Set TMMBR request:%d kbps", bitrate);
}
}
return 0;
}
// Holding no Critical section
void RTCPReceiver::TriggerCallbacksFromRTCPPacket(
RTCPPacketInformation& rtcpPacketInformation)
{
// Process TMMBR and REMB first to avoid multiple callbacks
// to OnNetworkChanged.
if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpTmmbr)
{
WEBRTC_TRACE(kTraceStateInfo, kTraceRtpRtcp, _id,
"SIG [RTCP] Incoming TMMBR to id:%d", _id);
RTCPPacketInformation& rtcpPacketInformation) {
// Process TMMBR and REMB first to avoid multiple callbacks
// to OnNetworkChanged.
if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpTmmbr) {
WEBRTC_TRACE(kTraceStateInfo, kTraceRtpRtcp, _id,
"SIG [RTCP] Incoming TMMBR to id:%d", _id);
// Might trigger a OnReceivedBandwidthEstimateUpdate.
UpdateTMMBR();
// Might trigger a OnReceivedBandwidthEstimateUpdate.
UpdateTMMBR();
}
if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpSr) {
_rtpRtcp.OnReceivedNTP();
}
if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpSrReq) {
_rtpRtcp.OnRequestSendReport();
}
if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpNack) {
if (rtcpPacketInformation.nackSequenceNumbersLength > 0) {
WEBRTC_TRACE(kTraceStateInfo, kTraceRtpRtcp, _id,
"SIG [RTCP] Incoming NACK length:%d",
rtcpPacketInformation.nackSequenceNumbersLength);
_rtpRtcp.OnReceivedNACK(
rtcpPacketInformation.nackSequenceNumbersLength,
rtcpPacketInformation.nackSequenceNumbers);
}
if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpRemb)
{
WEBRTC_TRACE(kTraceStateInfo, kTraceRtpRtcp, _id,
"SIG [RTCP] Incoming REMB to id:%d", _id);
}
{
CriticalSectionScoped lock(_criticalSectionFeedbacks);
// We need to bounce this to the default channel.
_rtpRtcp.OnReceivedEstimatedMaxBitrate(
rtcpPacketInformation.receiverEstimatedMaxBitrate);
}
if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpSr ||
rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpRr)
{
if (rtcpPacketInformation.reportBlock)
{
_rtpRtcp.OnPacketLossStatisticsUpdate(
rtcpPacketInformation.fractionLost,
rtcpPacketInformation.roundTripTime,
rtcpPacketInformation.lastReceivedExtendedHighSeqNum);
// We need feedback that we have received a report block(s) so that we
// can generate a new packet in a conference relay scenario, one received
// report can generate several RTCP packets, based on number relayed/mixed
// a send report block should go out to all receivers.
if (_cbRtcpIntraFrameObserver) {
if ((rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpPli) ||
(rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpFir)) {
if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpPli) {
WEBRTC_TRACE(kTraceStateInfo, kTraceRtpRtcp, _id,
"SIG [RTCP] Incoming PLI from SSRC:0x%x",
rtcpPacketInformation.remoteSSRC);
} else {
WEBRTC_TRACE(kTraceStateInfo, kTraceRtpRtcp, _id,
"SIG [RTCP] Incoming FIR from SSRC:0x%x",
rtcpPacketInformation.remoteSSRC);
}
}
if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpSr)
{
_rtpRtcp.OnReceivedNTP();
}
if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpSrReq)
{
_rtpRtcp.OnRequestSendReport();
}
if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpNack)
{
if (rtcpPacketInformation.nackSequenceNumbersLength > 0)
{
WEBRTC_TRACE(kTraceStateInfo, kTraceRtpRtcp, _id,
"SIG [RTCP] Incoming NACK to id:%d", _id);
_rtpRtcp.OnReceivedNACK(
rtcpPacketInformation.nackSequenceNumbersLength,
rtcpPacketInformation.nackSequenceNumbers);
}
}
if ((rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpPli) ||
(rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpFir))
{
if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpPli)
{
WEBRTC_TRACE(kTraceStateInfo, kTraceRtpRtcp, _id,
"SIG [RTCP] Incoming PLI to id:%d", _id);
} else
{
WEBRTC_TRACE(kTraceStateInfo, kTraceRtpRtcp, _id,
"SIG [RTCP] Incoming FIR to id:%d", _id);
}
_rtpRtcp.OnReceivedIntraFrameRequest(&_rtpRtcp);
}
if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpSli)
{
// we need use a bounce it up to handle default channel
_rtpRtcp.OnReceivedSliceLossIndication(
_cbRtcpIntraFrameObserver->OnReceivedIntraFrameRequest(
rtcpPacketInformation.remoteSSRC);
}
if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpSli) {
_cbRtcpIntraFrameObserver->OnReceivedSLI(
rtcpPacketInformation.remoteSSRC,
rtcpPacketInformation.sliPictureId);
}
if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpRpsi)
{
// we need use a bounce it up to handle default channel
_rtpRtcp.OnReceivedReferencePictureSelectionIndication(
}
if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpRpsi) {
_cbRtcpIntraFrameObserver->OnReceivedRPSI(
rtcpPacketInformation.remoteSSRC,
rtcpPacketInformation.rpsiPictureId);
}
}
{
CriticalSectionScoped lock(_criticalSectionFeedbacks);
// we need a feedback that we have received a report block(s) so that we can generate a new packet
// in a conference relay scenario, one received report can generate several RTCP packets, based
// on number relayed/mixed
// a send report block should go out to all receivers
if(_cbRtcpFeedback)
{
if(rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpSr)
{
_cbRtcpFeedback->OnSendReportReceived(_id, rtcpPacketInformation.remoteSSRC);
} else
{
_cbRtcpFeedback->OnReceiveReportReceived(_id, rtcpPacketInformation.remoteSSRC);
}
if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpRemb)
{
_cbRtcpFeedback->OnReceiverEstimatedMaxBitrateReceived(_id,
rtcpPacketInformation.receiverEstimatedMaxBitrate);
}
if(rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpXrVoipMetric)
{
WebRtc_Word8 VoIPmetricBuffer[7*4];
VoIPmetricBuffer[0] = rtcpPacketInformation.VoIPMetric->lossRate;
VoIPmetricBuffer[1] = rtcpPacketInformation.VoIPMetric->discardRate;
VoIPmetricBuffer[2] = rtcpPacketInformation.VoIPMetric->burstDensity;
VoIPmetricBuffer[3] = rtcpPacketInformation.VoIPMetric->gapDensity;
VoIPmetricBuffer[4] = (WebRtc_UWord8)(rtcpPacketInformation.VoIPMetric->burstDuration >> 8);
VoIPmetricBuffer[5] = (WebRtc_UWord8)(rtcpPacketInformation.VoIPMetric->burstDuration);
VoIPmetricBuffer[6] = (WebRtc_UWord8)(rtcpPacketInformation.VoIPMetric->gapDuration >> 8);
VoIPmetricBuffer[7] = (WebRtc_UWord8)(rtcpPacketInformation.VoIPMetric->gapDuration);
VoIPmetricBuffer[8] = (WebRtc_UWord8)(rtcpPacketInformation.VoIPMetric->roundTripDelay >> 8);
VoIPmetricBuffer[9] = (WebRtc_UWord8)(rtcpPacketInformation.VoIPMetric->roundTripDelay);
VoIPmetricBuffer[10] = (WebRtc_UWord8)(rtcpPacketInformation.VoIPMetric->endSystemDelay >> 8);
VoIPmetricBuffer[11] = (WebRtc_UWord8)(rtcpPacketInformation.VoIPMetric->endSystemDelay);
VoIPmetricBuffer[12] = rtcpPacketInformation.VoIPMetric->signalLevel;
VoIPmetricBuffer[13] = rtcpPacketInformation.VoIPMetric->noiseLevel;
VoIPmetricBuffer[14] = rtcpPacketInformation.VoIPMetric->RERL;
VoIPmetricBuffer[15] = rtcpPacketInformation.VoIPMetric->Gmin;
VoIPmetricBuffer[16] = rtcpPacketInformation.VoIPMetric->Rfactor;
VoIPmetricBuffer[17] = rtcpPacketInformation.VoIPMetric->extRfactor;
VoIPmetricBuffer[18] = rtcpPacketInformation.VoIPMetric->MOSLQ;
VoIPmetricBuffer[19] = rtcpPacketInformation.VoIPMetric->MOSCQ;
VoIPmetricBuffer[20] = rtcpPacketInformation.VoIPMetric->RXconfig;
VoIPmetricBuffer[21] = 0; // reserved
VoIPmetricBuffer[22] = (WebRtc_UWord8)(rtcpPacketInformation.VoIPMetric->JBnominal >> 8);
VoIPmetricBuffer[23] = (WebRtc_UWord8)(rtcpPacketInformation.VoIPMetric->JBnominal);
VoIPmetricBuffer[24] = (WebRtc_UWord8)(rtcpPacketInformation.VoIPMetric->JBmax >> 8);
VoIPmetricBuffer[25] = (WebRtc_UWord8)(rtcpPacketInformation.VoIPMetric->JBmax);
VoIPmetricBuffer[26] = (WebRtc_UWord8)(rtcpPacketInformation.VoIPMetric->JBabsMax >> 8);
VoIPmetricBuffer[27] = (WebRtc_UWord8)(rtcpPacketInformation.VoIPMetric->JBabsMax);
_cbRtcpFeedback->OnXRVoIPMetricReceived(_id, rtcpPacketInformation.VoIPMetric, VoIPmetricBuffer);
}
if(rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpApp)
{
_cbRtcpFeedback->OnApplicationDataReceived(_id,
rtcpPacketInformation.applicationSubType,
rtcpPacketInformation.applicationName,
rtcpPacketInformation.applicationLength,
rtcpPacketInformation.applicationData);
}
}
if (_cbRtcpBandwidthObserver) {
if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpRemb) {
WEBRTC_TRACE(kTraceStateInfo, kTraceRtpRtcp, _id,
"SIG [RTCP] Incoming REMB:%d",
rtcpPacketInformation.receiverEstimatedMaxBitrate);
_cbRtcpBandwidthObserver->OnReceivedEstimatedBitrate(
rtcpPacketInformation.receiverEstimatedMaxBitrate);
}
if ((rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpSr ||
rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpRr) &&
rtcpPacketInformation.reportBlock) {
WebRtc_UWord32 now = _clock.GetTimeInMS();
_cbRtcpBandwidthObserver->OnReceivedRtcpReceiverReport(
rtcpPacketInformation.remoteSSRC,
rtcpPacketInformation.fractionLost,
rtcpPacketInformation.roundTripTime,
rtcpPacketInformation.lastReceivedExtendedHighSeqNum,
now);
}
}
if(_cbRtcpFeedback) {
if(rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpSr) {
_cbRtcpFeedback->OnSendReportReceived(_id,
rtcpPacketInformation.remoteSSRC);
} else {
_cbRtcpFeedback->OnReceiveReportReceived(_id,
rtcpPacketInformation.remoteSSRC);
}
if(rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpXrVoipMetric) {
_cbRtcpFeedback->OnXRVoIPMetricReceived(_id,
rtcpPacketInformation.VoIPMetric);
}
if(rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpApp) {
_cbRtcpFeedback->OnApplicationDataReceived(_id,
rtcpPacketInformation.applicationSubType,
rtcpPacketInformation.applicationName,
rtcpPacketInformation.applicationLength,
rtcpPacketInformation.applicationData);
}
}
}
}
WebRtc_Word32 RTCPReceiver::CNAME(const WebRtc_UWord32 remoteSSRC,
char cName[RTCP_CNAME_SIZE]) const {
if (cName == NULL) {
WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
"%s invalid argument", __FUNCTION__);
return -1;
}
assert(cName);
CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
RTCPCnameInformation* cnameInfo = GetCnameInformation(remoteSSRC);
assert(cnameInfo);
if (cnameInfo == NULL) {
return -1;
}
cName[RTCP_CNAME_SIZE - 1] = 0;
strncpy(cName, cnameInfo->name, RTCP_CNAME_SIZE - 1);
return 0;

View File

@ -44,9 +44,9 @@ public:
WebRtc_UWord32 RelaySSRC() const;
WebRtc_Word32 RegisterIncomingRTCPCallback(RtcpFeedback* incomingMessagesCallback);
WebRtc_Word32 RegisterIncomingVideoCallback(RtpVideoFeedback* incomingMessagesCallback);
void RegisterRtcpObservers(RtcpIntraFrameObserver* intra_frame_callback,
RtcpBandwidthObserver* bandwidth_callback,
RtcpFeedback* feedback_callback);
WebRtc_Word32 IncomingRTCPPacket(RTCPHelp::RTCPPacketInformation& rtcpPacketInformation,
RTCPUtility::RTCPParserV2 *rtcpParser);
@ -80,13 +80,6 @@ public:
WebRtc_Word32 SenderInfoReceived(RTCPSenderInfo* senderInfo) const;
void OnReceivedIntraFrameRequest(const FrameType frameType,
const WebRtc_UWord8 streamIdx) const;
void OnReceivedSliceLossIndication(const WebRtc_UWord8 pitureID) const;
void OnReceivedReferencePictureSelectionIndication(
const WebRtc_UWord64 pitureID) const;
// get statistics
WebRtc_Word32 StatisticsReceived(
std::vector<RTCPReportBlock>* receiveBlocks) const;
@ -202,7 +195,8 @@ protected:
CriticalSectionWrapper* _criticalSectionFeedbacks;
RtcpFeedback* _cbRtcpFeedback;
RtpVideoFeedback* _cbVideoFeedback;
RtcpBandwidthObserver* _cbRtcpBandwidthObserver;
RtcpIntraFrameObserver* _cbRtcpIntraFrameObserver;
CriticalSectionWrapper* _criticalSectionRTCPReceiver;
WebRtc_UWord32 _SSRC;

View File

@ -277,13 +277,6 @@ void RTCPSender::UpdateRemoteBitrateEstimate(unsigned int target_bitrate) {
}
}
void RTCPSender::ReceivedRemb(unsigned int estimated_bitrate) {
CriticalSectionScoped lock(_criticalSectionRTCPSender);
if (_bitrate_observer) {
_bitrate_observer->OnReceivedRemb(estimated_bitrate);
}
}
bool
RTCPSender::TMMBR() const
{

View File

@ -91,8 +91,6 @@ public:
void UpdateRemoteBitrateEstimate(unsigned int target_bitrate);
void ReceivedRemb(unsigned int estimated_bitrate);
/*
* TMMBR
*/

View File

@ -123,7 +123,7 @@ RTPReceiver::~RTPReceiver() {
WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, _id, "%s deleted", __FUNCTION__);
}
WebRtc_Word32 RTPReceiver::Init() {
void RTPReceiver::Init() {
CriticalSectionScoped lock(_criticalSectionRTPReceiver);
_lastReceiveTime = 0;
@ -181,7 +181,7 @@ WebRtc_Word32 RTPReceiver::Init() {
Bitrate::Init();
RTPReceiverAudio::Init();
return RTPReceiverVideo::Init();
RTPReceiverVideo::Init();
}
void

View File

@ -26,99 +26,46 @@ WebRtc_UWord32 BitRateBPS(WebRtc_UWord16 x )
return (x & 0x3fff) * WebRtc_UWord32(pow(10.0f,(2 + (x >> 14))));
}
RTPReceiverVideo::RTPReceiverVideo():
_id(0),
_rtpRtcp(NULL),
_criticalSectionFeedback(CriticalSectionWrapper::CreateCriticalSection()),
_cbVideoFeedback(NULL),
_criticalSectionReceiverVideo(
CriticalSectionWrapper::CreateCriticalSection()),
_completeFrame(false),
_packetStartTimeMs(0),
_receivedBW(),
_estimatedBW(0),
_currentFecFrameDecoded(false),
_receiveFEC(NULL),
_overUseDetector(),
_videoBitRate(),
_lastBitRateChange(0),
_packetOverHead(28)
{
memset(_receivedBW, 0,sizeof(_receivedBW));
RTPReceiverVideo::RTPReceiverVideo()
: _id(0),
_rtpRtcp(NULL),
_criticalSectionReceiverVideo(
CriticalSectionWrapper::CreateCriticalSection()),
_currentFecFrameDecoded(false),
_receiveFEC(NULL),
_overUseDetector(),
_videoBitRate(),
_lastBitRateChange(0),
_packetOverHead(28) {
}
RTPReceiverVideo::RTPReceiverVideo(const WebRtc_Word32 id,
ModuleRtpRtcpImpl* owner):
_id(id),
_rtpRtcp(owner),
_criticalSectionFeedback(CriticalSectionWrapper::CreateCriticalSection()),
_cbVideoFeedback(NULL),
_criticalSectionReceiverVideo(
CriticalSectionWrapper::CreateCriticalSection()),
_completeFrame(false),
_packetStartTimeMs(0),
_receivedBW(),
_estimatedBW(0),
_currentFecFrameDecoded(false),
_receiveFEC(NULL),
_overUseDetector(),
_videoBitRate(),
_lastBitRateChange(0),
_packetOverHead(28)
{
memset(_receivedBW, 0,sizeof(_receivedBW));
ModuleRtpRtcpImpl* owner)
: _id(id),
_rtpRtcp(owner),
_criticalSectionReceiverVideo(
CriticalSectionWrapper::CreateCriticalSection()),
_currentFecFrameDecoded(false),
_receiveFEC(NULL),
_overUseDetector(),
_videoBitRate(),
_lastBitRateChange(0),
_packetOverHead(28) {
}
RTPReceiverVideo::~RTPReceiverVideo()
{
delete _criticalSectionFeedback;
RTPReceiverVideo::~RTPReceiverVideo() {
delete _criticalSectionReceiverVideo;
delete _receiveFEC;
}
WebRtc_Word32
RTPReceiverVideo::Init()
{
_completeFrame = false;
_packetStartTimeMs = 0;
_estimatedBW = 0;
_currentFecFrameDecoded = false;
_packetOverHead = 28;
for (int i = 0; i < BW_HISTORY_SIZE; i++)
{
_receivedBW[i] = 0;
}
ResetOverUseDetector();
return 0;
void RTPReceiverVideo::Init() {
_currentFecFrameDecoded = false;
_packetOverHead = 28;
ResetOverUseDetector();
}
void
RTPReceiverVideo::ChangeUniqueId(const WebRtc_Word32 id)
{
_id = id;
}
WebRtc_Word32
RTPReceiverVideo::RegisterIncomingVideoCallback(RtpVideoFeedback* incomingMessagesCallback)
{
CriticalSectionScoped lock(_criticalSectionFeedback);
_cbVideoFeedback = incomingMessagesCallback;
return 0;
}
void
RTPReceiverVideo::UpdateBandwidthManagement(const WebRtc_UWord32 bitrateBps,
const WebRtc_UWord8 fractionLost,
const WebRtc_UWord16 roundTripTimeMs)
{
CriticalSectionScoped lock(_criticalSectionFeedback);
if(_cbVideoFeedback)
{
_cbVideoFeedback->OnNetworkChanged(_id,
bitrateBps,
fractionLost,
roundTripTimeMs);
}
void RTPReceiverVideo::ChangeUniqueId(const WebRtc_Word32 id) {
_id = id;
}
ModuleRTPUtility::Payload* RTPReceiverVideo::RegisterReceiveVideoPayload(
@ -150,231 +97,168 @@ ModuleRTPUtility::Payload* RTPReceiverVideo::RegisterReceiveVideoPayload(
return payload;
}
void RTPReceiverVideo::ResetOverUseDetector()
{
_overUseDetector.Reset();
_videoBitRate.Init();
_lastBitRateChange = 0;
}
// called under _criticalSectionReceiverVideo
WebRtc_UWord16
RTPReceiverVideo::EstimateBandwidth(const WebRtc_UWord16 bandwidth)
{
// received fragments
// estimate BW
WebRtc_UWord16 bwSort[BW_HISTORY_SIZE];
for(int i = 0; i < BW_HISTORY_SIZE-1; i++)
{
_receivedBW[i] = _receivedBW[i+1];
bwSort[i] = _receivedBW[i+1];
}
_receivedBW[BW_HISTORY_SIZE-1] = bandwidth;
bwSort[BW_HISTORY_SIZE-1] = bandwidth;
WebRtc_UWord16 temp;
for (int i = BW_HISTORY_SIZE-1; i >= 0; i--)
{
for (int j = 1; j <= i; j++)
{
if (bwSort[j-1] > bwSort[j])
{
temp = bwSort[j-1];
bwSort[j-1] = bwSort[j];
bwSort[j] = temp;
}
}
}
int zeroCount = 0;
for (; zeroCount < BW_HISTORY_SIZE; zeroCount++)
{
if (bwSort[zeroCount]!= 0)
{
break;
}
}
WebRtc_UWord32 indexMedian = (BW_HISTORY_SIZE -1) - (BW_HISTORY_SIZE-zeroCount)/2;
WebRtc_UWord16 bandwidthMedian = bwSort[indexMedian];
if (bandwidthMedian > 0)
{
if (_estimatedBW == bandwidth)
{
// don't trigger a callback
bandwidthMedian = 0;
} else
{
_estimatedBW = bandwidthMedian;
}
} else
{
// can't be negative
bandwidthMedian = 0;
}
return bandwidthMedian;
void RTPReceiverVideo::ResetOverUseDetector() {
_overUseDetector.Reset();
_videoBitRate.Init();
_lastBitRateChange = 0;
}
// we have no critext when calling this
// we are not allowed to have any critsects when calling CallbackOfReceivedPayloadData
WebRtc_Word32
RTPReceiverVideo::ParseVideoCodecSpecific(WebRtcRTPHeader* rtpHeader,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength,
const RtpVideoCodecTypes videoType,
const bool isRED,
const WebRtc_UWord8* incomingRtpPacket,
const WebRtc_UWord16 incomingRtpPacketSize,
const WebRtc_Word64 nowMS)
{
WebRtc_Word32 retVal = 0;
// we are not allowed to have any critsects when calling
// CallbackOfReceivedPayloadData
WebRtc_Word32 RTPReceiverVideo::ParseVideoCodecSpecific(
WebRtcRTPHeader* rtpHeader,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength,
const RtpVideoCodecTypes videoType,
const bool isRED,
const WebRtc_UWord8* incomingRtpPacket,
const WebRtc_UWord16 incomingRtpPacketSize,
const WebRtc_Word64 nowMS) {
WebRtc_Word32 retVal = 0;
_criticalSectionReceiverVideo->Enter();
_criticalSectionReceiverVideo->Enter();
_videoBitRate.Update(payloadDataLength + rtpHeader->header.paddingLength,
nowMS);
_videoBitRate.Update(payloadDataLength + rtpHeader->header.paddingLength,
nowMS);
// Add headers, ideally we would like to include for instance
// Ethernet header here as well.
const WebRtc_UWord16 packetSize = payloadDataLength + _packetOverHead +
rtpHeader->header.headerLength + rtpHeader->header.paddingLength;
_overUseDetector.Update(*rtpHeader, packetSize, nowMS);
// Add headers, ideally we would like to include for instance
// Ethernet header here as well.
const WebRtc_UWord16 packetSize = payloadDataLength + _packetOverHead +
rtpHeader->header.headerLength + rtpHeader->header.paddingLength;
_overUseDetector.Update(*rtpHeader, packetSize, nowMS);
if (isRED)
{
if(_receiveFEC == NULL)
{
_criticalSectionReceiverVideo->Leave();
return -1;
}
bool FECpacket = false;
retVal = _receiveFEC->AddReceivedFECPacket(
rtpHeader,
incomingRtpPacket,
payloadDataLength,
FECpacket);
if (retVal != -1)
retVal = _receiveFEC->ProcessReceivedFEC();
_criticalSectionReceiverVideo->Leave();
if(retVal == 0 && FECpacket)
{
// Callback with the received FEC packet.
// The normal packets are delivered after parsing.
// This contains the original RTP packet header but with
// empty payload and data length.
rtpHeader->frameType = kFrameEmpty;
// We need this for the routing.
WebRtc_Word32 retVal = SetCodecType(videoType, rtpHeader);
if(retVal != 0)
{
return retVal;
}
retVal = CallbackOfReceivedPayloadData(NULL, 0, rtpHeader);
}
}else
{
// will leave the _criticalSectionReceiverVideo critsect
retVal = ParseVideoCodecSpecificSwitch(rtpHeader,
payloadData,
payloadDataLength,
videoType);
if (isRED) {
if(_receiveFEC == NULL) {
_criticalSectionReceiverVideo->Leave();
return -1;
}
bool FECpacket = false;
retVal = _receiveFEC->AddReceivedFECPacket(
rtpHeader,
incomingRtpPacket,
payloadDataLength,
FECpacket);
if (retVal != -1) {
retVal = _receiveFEC->ProcessReceivedFEC();
}
// Update the remote rate control object and update the overuse
// detector with the current rate control region.
_criticalSectionReceiverVideo->Enter();
const RateControlInput input(_overUseDetector.State(),
_videoBitRate.BitRate(nowMS),
_overUseDetector.NoiseVar());
_criticalSectionReceiverVideo->Leave();
// Call the callback outside critical section
if (_rtpRtcp) {
const RateControlRegion region = _rtpRtcp->OnOverUseStateUpdate(input);
_criticalSectionReceiverVideo->Enter();
_overUseDetector.SetRateControlRegion(region);
_criticalSectionReceiverVideo->Leave();
if(retVal == 0 && FECpacket) {
// Callback with the received FEC packet.
// The normal packets are delivered after parsing.
// This contains the original RTP packet header but with
// empty payload and data length.
rtpHeader->frameType = kFrameEmpty;
// We need this for the routing.
WebRtc_Word32 retVal = SetCodecType(videoType, rtpHeader);
if(retVal != 0) {
return retVal;
}
retVal = CallbackOfReceivedPayloadData(NULL, 0, rtpHeader);
}
} else {
// will leave the _criticalSectionReceiverVideo critsect
retVal = ParseVideoCodecSpecificSwitch(rtpHeader,
payloadData,
payloadDataLength,
videoType);
}
return retVal;
// Update the remote rate control object and update the overuse
// detector with the current rate control region.
_criticalSectionReceiverVideo->Enter();
const RateControlInput input(_overUseDetector.State(),
_videoBitRate.BitRate(nowMS),
_overUseDetector.NoiseVar());
_criticalSectionReceiverVideo->Leave();
// Call the callback outside critical section
if (_rtpRtcp) {
const RateControlRegion region = _rtpRtcp->OnOverUseStateUpdate(input);
_criticalSectionReceiverVideo->Enter();
_overUseDetector.SetRateControlRegion(region);
_criticalSectionReceiverVideo->Leave();
}
return retVal;
}
WebRtc_Word32
RTPReceiverVideo::BuildRTPheader(const WebRtcRTPHeader* rtpHeader,
WebRtc_UWord8* dataBuffer) const
{
dataBuffer[0] = static_cast<WebRtc_UWord8>(0x80); // version 2
dataBuffer[1] = static_cast<WebRtc_UWord8>(rtpHeader->header.payloadType);
if (rtpHeader->header.markerBit)
{
dataBuffer[1] |= kRtpMarkerBitMask; // MarkerBit is 1
WebRtc_Word32 RTPReceiverVideo::BuildRTPheader(
const WebRtcRTPHeader* rtpHeader,
WebRtc_UWord8* dataBuffer) const {
dataBuffer[0] = static_cast<WebRtc_UWord8>(0x80); // version 2
dataBuffer[1] = static_cast<WebRtc_UWord8>(rtpHeader->header.payloadType);
if (rtpHeader->header.markerBit) {
dataBuffer[1] |= kRtpMarkerBitMask; // MarkerBit is 1
}
ModuleRTPUtility::AssignUWord16ToBuffer(dataBuffer + 2,
rtpHeader->header.sequenceNumber);
ModuleRTPUtility::AssignUWord32ToBuffer(dataBuffer + 4,
rtpHeader->header.timestamp);
ModuleRTPUtility::AssignUWord32ToBuffer(dataBuffer + 8,
rtpHeader->header.ssrc);
WebRtc_Word32 rtpHeaderLength = 12;
// Add the CSRCs if any
if (rtpHeader->header.numCSRCs > 0) {
if (rtpHeader->header.numCSRCs > 16) {
// error
assert(false);
}
ModuleRTPUtility::AssignUWord16ToBuffer(dataBuffer+2, rtpHeader->header.sequenceNumber);
ModuleRTPUtility::AssignUWord32ToBuffer(dataBuffer+4, rtpHeader->header.timestamp);
ModuleRTPUtility::AssignUWord32ToBuffer(dataBuffer+8, rtpHeader->header.ssrc);
WebRtc_Word32 rtpHeaderLength = 12;
// Add the CSRCs if any
if (rtpHeader->header.numCSRCs > 0)
{
if(rtpHeader->header.numCSRCs > 16)
{
// error
assert(false);
}
WebRtc_UWord8* ptr = &dataBuffer[rtpHeaderLength];
for (WebRtc_UWord32 i = 0; i < rtpHeader->header.numCSRCs; ++i)
{
ModuleRTPUtility::AssignUWord32ToBuffer(ptr, rtpHeader->header.arrOfCSRCs[i]);
ptr +=4;
}
dataBuffer[0] = (dataBuffer[0]&0xf0) | rtpHeader->header.numCSRCs;
// Update length of header
rtpHeaderLength += sizeof(WebRtc_UWord32)*rtpHeader->header.numCSRCs;
WebRtc_UWord8* ptr = &dataBuffer[rtpHeaderLength];
for (WebRtc_UWord32 i = 0; i < rtpHeader->header.numCSRCs; ++i) {
ModuleRTPUtility::AssignUWord32ToBuffer(ptr,
rtpHeader->header.arrOfCSRCs[i]);
ptr +=4;
}
return rtpHeaderLength;
dataBuffer[0] = (dataBuffer[0]&0xf0) | rtpHeader->header.numCSRCs;
// Update length of header
rtpHeaderLength += sizeof(WebRtc_UWord32)*rtpHeader->header.numCSRCs;
}
return rtpHeaderLength;
}
WebRtc_Word32
RTPReceiverVideo::ReceiveRecoveredPacketCallback(WebRtcRTPHeader* rtpHeader,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength)
{
_criticalSectionReceiverVideo->Enter();
WebRtc_Word32 RTPReceiverVideo::ReceiveRecoveredPacketCallback(
WebRtcRTPHeader* rtpHeader,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength) {
// TODO(pwestin) Re-factor this to avoid the messy critsect handling.
_criticalSectionReceiverVideo->Enter();
_currentFecFrameDecoded = true;
_currentFecFrameDecoded = true;
ModuleRTPUtility::Payload* payload = NULL;
if (PayloadTypeToPayload(rtpHeader->header.payloadType, payload) != 0)
{
return -1;
}
// here we can re-create the original lost packet so that we can use it for the relay
// we need to re-create the RED header too
WebRtc_UWord8 recoveredPacket[IP_PACKET_SIZE];
WebRtc_UWord16 rtpHeaderLength = (WebRtc_UWord16)BuildRTPheader(rtpHeader, recoveredPacket);
ModuleRTPUtility::Payload* payload = NULL;
if (PayloadTypeToPayload(rtpHeader->header.payloadType, payload) != 0) {
_criticalSectionReceiverVideo->Leave();
return -1;
}
// here we can re-create the original lost packet so that we can use it for
// the relay we need to re-create the RED header too
WebRtc_UWord8 recoveredPacket[IP_PACKET_SIZE];
WebRtc_UWord16 rtpHeaderLength = (WebRtc_UWord16)BuildRTPheader(
rtpHeader, recoveredPacket);
const WebRtc_UWord8 REDForFECHeaderLength = 1;
const WebRtc_UWord8 REDForFECHeaderLength = 1;
// replace pltype
recoveredPacket[1] &= 0x80; // reset
recoveredPacket[1] += REDPayloadType(); // replace with RED payload type
// replace pltype
recoveredPacket[1] &= 0x80; // reset
recoveredPacket[1] += REDPayloadType(); // replace with RED payload type
// add RED header
recoveredPacket[rtpHeaderLength] = rtpHeader->header.payloadType; // f-bit always 0
// add RED header
recoveredPacket[rtpHeaderLength] = rtpHeader->header.payloadType;
// f-bit always 0
memcpy(recoveredPacket + rtpHeaderLength + REDForFECHeaderLength, payloadData, payloadDataLength);
memcpy(recoveredPacket + rtpHeaderLength + REDForFECHeaderLength, payloadData,
payloadDataLength);
return ParseVideoCodecSpecificSwitch(rtpHeader,
payloadData,
payloadDataLength,
payload->typeSpecific.Video.videoCodecType);
return ParseVideoCodecSpecificSwitch(
rtpHeader,
payloadData,
payloadDataLength,
payload->typeSpecific.Video.videoCodecType);
}
WebRtc_Word32 RTPReceiverVideo::SetCodecType(const RtpVideoCodecTypes videoType,
@ -400,6 +284,7 @@ WebRtc_Word32 RTPReceiverVideo::ParseVideoCodecSpecificSwitch(
const RtpVideoCodecTypes videoType) {
WebRtc_Word32 retVal = SetCodecType(videoType, rtpHeader);
if (retVal != 0) {
_criticalSectionReceiverVideo->Leave();
return retVal;
}
WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, _id, "%s(timestamp:%u)",
@ -419,103 +304,94 @@ WebRtc_Word32 RTPReceiverVideo::ParseVideoCodecSpecificSwitch(
return -1;
}
WebRtc_Word32
RTPReceiverVideo::ReceiveVp8Codec(WebRtcRTPHeader* rtpHeader,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength)
{
bool success;
ModuleRTPUtility::RTPPayload parsedPacket;
if (payloadDataLength == 0)
{
success = true;
parsedPacket.info.VP8.dataLength = 0;
} else
{
ModuleRTPUtility::RTPPayloadParser rtpPayloadParser(kRtpVp8Video,
payloadData,
payloadDataLength,
_id);
success = rtpPayloadParser.Parse(parsedPacket);
}
// from here down we only work on local data
_criticalSectionReceiverVideo->Leave();
WebRtc_Word32 RTPReceiverVideo::ReceiveVp8Codec(
WebRtcRTPHeader* rtpHeader,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength) {
bool success;
ModuleRTPUtility::RTPPayload parsedPacket;
if (payloadDataLength == 0) {
success = true;
parsedPacket.info.VP8.dataLength = 0;
} else {
ModuleRTPUtility::RTPPayloadParser rtpPayloadParser(kRtpVp8Video,
payloadData,
payloadDataLength,
_id);
if (!success)
{
return -1;
}
if (parsedPacket.info.VP8.dataLength == 0)
{
// we have an "empty" VP8 packet, it's ok, could be one way video
// Inform the jitter buffer about this packet.
rtpHeader->frameType = kFrameEmpty;
if (CallbackOfReceivedPayloadData(NULL, 0, rtpHeader) != 0)
{
return -1;
}
return 0;
}
rtpHeader->frameType = (parsedPacket.frameType == ModuleRTPUtility::kIFrame) ? kVideoFrameKey : kVideoFrameDelta;
success = rtpPayloadParser.Parse(parsedPacket);
}
// from here down we only work on local data
_criticalSectionReceiverVideo->Leave();
RTPVideoHeaderVP8 *toHeader = &rtpHeader->type.Video.codecHeader.VP8;
ModuleRTPUtility::RTPPayloadVP8 *fromHeader = &parsedPacket.info.VP8;
rtpHeader->type.Video.isFirstPacket = fromHeader->beginningOfPartition
&& (fromHeader->partitionID == 0);
toHeader->pictureId = fromHeader->hasPictureID ? fromHeader->pictureID :
kNoPictureId;
toHeader->tl0PicIdx = fromHeader->hasTl0PicIdx ? fromHeader->tl0PicIdx :
kNoTl0PicIdx;
if (fromHeader->hasTID) {
toHeader->temporalIdx = fromHeader->tID;
toHeader->layerSync = fromHeader->layerSync;
} else {
toHeader->temporalIdx = kNoTemporalIdx;
toHeader->layerSync = false;
}
toHeader->keyIdx = fromHeader->hasKeyIdx ? fromHeader->keyIdx : kNoKeyIdx;
toHeader->frameWidth = fromHeader->frameWidth;
toHeader->frameHeight = fromHeader->frameHeight;
toHeader->partitionId = fromHeader->partitionID;
toHeader->beginningOfPartition = fromHeader->beginningOfPartition;
if(CallbackOfReceivedPayloadData(parsedPacket.info.VP8.data,
parsedPacket.info.VP8.dataLength,
rtpHeader) != 0)
{
return -1;
if (!success) {
return -1;
}
if (parsedPacket.info.VP8.dataLength == 0) {
// we have an "empty" VP8 packet, it's ok, could be one way video
// Inform the jitter buffer about this packet.
rtpHeader->frameType = kFrameEmpty;
if (CallbackOfReceivedPayloadData(NULL, 0, rtpHeader) != 0) {
return -1;
}
return 0;
}
rtpHeader->frameType = (parsedPacket.frameType == ModuleRTPUtility::kIFrame) ?
kVideoFrameKey : kVideoFrameDelta;
RTPVideoHeaderVP8 *toHeader = &rtpHeader->type.Video.codecHeader.VP8;
ModuleRTPUtility::RTPPayloadVP8 *fromHeader = &parsedPacket.info.VP8;
rtpHeader->type.Video.isFirstPacket = fromHeader->beginningOfPartition
&& (fromHeader->partitionID == 0);
toHeader->pictureId = fromHeader->hasPictureID ? fromHeader->pictureID :
kNoPictureId;
toHeader->tl0PicIdx = fromHeader->hasTl0PicIdx ? fromHeader->tl0PicIdx :
kNoTl0PicIdx;
if (fromHeader->hasTID) {
toHeader->temporalIdx = fromHeader->tID;
toHeader->layerSync = fromHeader->layerSync;
} else {
toHeader->temporalIdx = kNoTemporalIdx;
toHeader->layerSync = false;
}
toHeader->keyIdx = fromHeader->hasKeyIdx ? fromHeader->keyIdx : kNoKeyIdx;
toHeader->frameWidth = fromHeader->frameWidth;
toHeader->frameHeight = fromHeader->frameHeight;
toHeader->partitionId = fromHeader->partitionID;
toHeader->beginningOfPartition = fromHeader->beginningOfPartition;
if(CallbackOfReceivedPayloadData(parsedPacket.info.VP8.data,
parsedPacket.info.VP8.dataLength,
rtpHeader) != 0) {
return -1;
}
return 0;
}
WebRtc_Word32
RTPReceiverVideo::ReceiveGenericCodec(WebRtcRTPHeader* rtpHeader,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength)
{
rtpHeader->frameType = kVideoFrameKey;
WebRtc_Word32 RTPReceiverVideo::ReceiveGenericCodec(
WebRtcRTPHeader* rtpHeader,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength) {
rtpHeader->frameType = kVideoFrameKey;
if(((SequenceNumber() + 1) == rtpHeader->header.sequenceNumber) &&
(TimeStamp() != rtpHeader->header.timestamp))
{
rtpHeader->type.Video.isFirstPacket = true;
}
_criticalSectionReceiverVideo->Leave();
if(((SequenceNumber() + 1) == rtpHeader->header.sequenceNumber) &&
(TimeStamp() != rtpHeader->header.timestamp)) {
rtpHeader->type.Video.isFirstPacket = true;
}
_criticalSectionReceiverVideo->Leave();
if(CallbackOfReceivedPayloadData(payloadData, payloadDataLength, rtpHeader) != 0)
{
return -1;
}
return 0;
if(CallbackOfReceivedPayloadData(payloadData, payloadDataLength,
rtpHeader) != 0) {
return -1;
}
return 0;
}
void RTPReceiverVideo::SetPacketOverHead(WebRtc_UWord16 packetOverHead)
{
_packetOverHead = packetOverHead;
void RTPReceiverVideo::SetPacketOverHead(WebRtc_UWord16 packetOverHead) {
_packetOverHead = packetOverHead;
}
} // namespace webrtc

View File

@ -25,115 +25,95 @@ class ReceiverFEC;
class ModuleRtpRtcpImpl;
class CriticalSectionWrapper;
class RTPReceiverVideo
{
public:
RTPReceiverVideo();
RTPReceiverVideo(const WebRtc_Word32 id, ModuleRtpRtcpImpl* owner);
class RTPReceiverVideo {
public:
RTPReceiverVideo();
RTPReceiverVideo(const WebRtc_Word32 id, ModuleRtpRtcpImpl* owner);
virtual ~RTPReceiverVideo();
virtual ~RTPReceiverVideo();
virtual void ChangeUniqueId(const WebRtc_Word32 id);
virtual void ChangeUniqueId(const WebRtc_Word32 id);
WebRtc_Word32 Init();
void Init();
WebRtc_Word32 RegisterIncomingVideoCallback(
RtpVideoFeedback* incomingMessagesCallback);
ModuleRTPUtility::Payload* RegisterReceiveVideoPayload(
const char payloadName[RTP_PAYLOAD_NAME_SIZE],
const WebRtc_Word8 payloadType,
const WebRtc_UWord32 maxRate);
void UpdateBandwidthManagement(const WebRtc_UWord32 bitrateBps,
const WebRtc_UWord8 fractionLost,
const WebRtc_UWord16 roundTripTimeMs);
WebRtc_Word32 ParseVideoCodecSpecific(
WebRtcRTPHeader* rtpHeader,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength,
const RtpVideoCodecTypes videoType,
const bool isRED,
const WebRtc_UWord8* incomingRtpPacket,
const WebRtc_UWord16 incomingRtpPacketSize,
const WebRtc_Word64 nowMS);
ModuleRTPUtility::Payload* RegisterReceiveVideoPayload(
const char payloadName[RTP_PAYLOAD_NAME_SIZE],
const WebRtc_Word8 payloadType,
const WebRtc_UWord32 maxRate);
virtual WebRtc_Word32 ReceiveRecoveredPacketCallback(
WebRtcRTPHeader* rtpHeader,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength);
WebRtc_Word32 ParseVideoCodecSpecific(
WebRtcRTPHeader* rtpHeader,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength,
const RtpVideoCodecTypes videoType,
const bool isRED,
const WebRtc_UWord8* incomingRtpPacket,
const WebRtc_UWord16 incomingRtpPacketSize,
const WebRtc_Word64 nowMS);
void SetPacketOverHead(WebRtc_UWord16 packetOverHead);
virtual WebRtc_Word32 ReceiveRecoveredPacketCallback(
WebRtcRTPHeader* rtpHeader,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength);
protected:
void ResetOverUseDetector();
void SetPacketOverHead(WebRtc_UWord16 packetOverHead);
virtual WebRtc_Word32 CallbackOfReceivedPayloadData(
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadSize,
const WebRtcRTPHeader* rtpHeader) = 0;
protected:
void ResetOverUseDetector();
virtual WebRtc_UWord32 TimeStamp() const = 0;
virtual WebRtc_UWord16 SequenceNumber() const = 0;
WebRtc_UWord16 EstimateBandwidth( const WebRtc_UWord16 bufferLength);
virtual WebRtc_UWord32 PayloadTypeToPayload(
const WebRtc_UWord8 payloadType,
ModuleRTPUtility::Payload*& payload) const = 0;
virtual WebRtc_Word32 CallbackOfReceivedPayloadData(
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadSize,
const WebRtcRTPHeader* rtpHeader) = 0;
virtual bool RetransmitOfOldPacket(
const WebRtc_UWord16 sequenceNumber,
const WebRtc_UWord32 rtpTimeStamp) const = 0;
virtual WebRtc_UWord32 TimeStamp() const = 0;
virtual WebRtc_UWord16 SequenceNumber() const = 0;
virtual WebRtc_Word8 REDPayloadType() const = 0;
virtual WebRtc_UWord32 PayloadTypeToPayload(
const WebRtc_UWord8 payloadType,
ModuleRTPUtility::Payload*& payload) const = 0;
WebRtc_Word32 SetCodecType(const RtpVideoCodecTypes videoType,
WebRtcRTPHeader* rtpHeader) const;
virtual bool RetransmitOfOldPacket(
const WebRtc_UWord16 sequenceNumber,
const WebRtc_UWord32 rtpTimeStamp) const = 0;
WebRtc_Word32 ParseVideoCodecSpecificSwitch(
WebRtcRTPHeader* rtpHeader,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength,
const RtpVideoCodecTypes videoType);
virtual WebRtc_Word8 REDPayloadType() const = 0;
WebRtc_Word32 ReceiveGenericCodec(WebRtcRTPHeader *rtpHeader,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength);
WebRtc_Word32 SetCodecType(const RtpVideoCodecTypes videoType,
WebRtcRTPHeader* rtpHeader) const;
WebRtc_Word32 ReceiveVp8Codec(WebRtcRTPHeader *rtpHeader,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength);
WebRtc_Word32 ParseVideoCodecSpecificSwitch(
WebRtcRTPHeader* rtpHeader,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength,
const RtpVideoCodecTypes videoType);
WebRtc_Word32 BuildRTPheader(const WebRtcRTPHeader* rtpHeader,
WebRtc_UWord8* dataBuffer) const;
WebRtc_Word32 ReceiveGenericCodec(WebRtcRTPHeader *rtpHeader,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength);
private:
WebRtc_Word32 _id;
ModuleRtpRtcpImpl* _rtpRtcp;
WebRtc_Word32 ReceiveVp8Codec(WebRtcRTPHeader *rtpHeader,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength);
CriticalSectionWrapper* _criticalSectionReceiverVideo;
WebRtc_Word32 BuildRTPheader(const WebRtcRTPHeader* rtpHeader,
WebRtc_UWord8* dataBuffer) const;
// FEC
bool _currentFecFrameDecoded;
ReceiverFEC* _receiveFEC;
private:
WebRtc_Word32 _id;
ModuleRtpRtcpImpl* _rtpRtcp;
CriticalSectionWrapper* _criticalSectionFeedback;
RtpVideoFeedback* _cbVideoFeedback;
CriticalSectionWrapper* _criticalSectionReceiverVideo;
// bandwidth
bool _completeFrame;
WebRtc_UWord32 _packetStartTimeMs;
WebRtc_UWord16 _receivedBW[BW_HISTORY_SIZE];
WebRtc_UWord16 _estimatedBW;
// FEC
bool _currentFecFrameDecoded;
ReceiverFEC* _receiveFEC;
// BWE
OverUseDetector _overUseDetector;
BitRateStats _videoBitRate;
WebRtc_Word64 _lastBitRateChange;
WebRtc_UWord16 _packetOverHead;
// BWE
OverUseDetector _overUseDetector;
BitRateStats _videoBitRate;
WebRtc_Word64 _lastBitRateChange;
WebRtc_UWord16 _packetOverHead;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_VIDEO_H_

View File

@ -61,8 +61,6 @@
'rtp_sender_audio.cc',
'rtp_sender_audio.h',
# Video Files
'bandwidth_management.cc',
'bandwidth_management.h',
'bwe_defines.h',
'fec_private_tables.h',
'forward_error_correction.cc',

View File

@ -86,7 +86,6 @@ ModuleRtpRtcpImpl::ModuleRtpRtcpImpl(const WebRtc_Word32 id,
_rtpReceiver(id, audio, clock, this),
_rtcpSender(id, audio, clock, this),
_rtcpReceiver(id, clock, this),
_bandwidthManagement(id),
_owns_clock(false),
_clock(*clock),
_id(id),
@ -446,12 +445,9 @@ WebRtc_Word32 ModuleRtpRtcpImpl::InitReceiver() {
_RTCPArrivalTimeSecsAudio = 0;
_RTCPArrivalTimeFracAudio = 0;
WebRtc_Word32 ret = _rtpReceiver.Init();
if (ret < 0) {
return ret;
}
_rtpReceiver.Init();
_rtpReceiver.SetPacketOverHead(_packetOverHead);
return ret;
return 0;
}
void ModuleRtpRtcpImpl::ProcessDeadOrAliveTimer() {
@ -792,31 +788,12 @@ WebRtc_Word32 ModuleRtpRtcpImpl::RegisterIncomingRTPCallback(
return _rtpReceiver.RegisterIncomingRTPCallback(incomingMessagesCallback);
}
WebRtc_Word32 ModuleRtpRtcpImpl::RegisterIncomingRTCPCallback(
RtcpFeedback* incomingMessagesCallback) {
WEBRTC_TRACE(kTraceModuleCall,
kTraceRtpRtcp,
_id,
"RegisterIncomingRTCPCallback(incomingMessagesCallback:0x%x)",
incomingMessagesCallback);
return _rtcpReceiver.RegisterIncomingRTCPCallback(incomingMessagesCallback);
}
WebRtc_Word32 ModuleRtpRtcpImpl::RegisterIncomingVideoCallback(
RtpVideoFeedback* incomingMessagesCallback) {
WEBRTC_TRACE(kTraceModuleCall,
kTraceRtpRtcp,
_id,
"RegisterIncomingVideoCallback(incomingMessagesCallback:0x%x)",
incomingMessagesCallback);
if (_rtcpReceiver.RegisterIncomingVideoCallback(incomingMessagesCallback)
== 0) {
return _rtpReceiver.RegisterIncomingVideoCallback(
incomingMessagesCallback);
}
return -1;
void ModuleRtpRtcpImpl::RegisterRtcpObservers(
RtcpIntraFrameObserver* intra_frame_callback,
RtcpBandwidthObserver* bandwidth_callback,
RtcpFeedback* feedback_callback) {
_rtcpReceiver.RegisterRtcpObservers(intra_frame_callback, bandwidth_callback,
feedback_callback);
}
WebRtc_Word32 ModuleRtpRtcpImpl::RegisterAudioCallback(
@ -1586,18 +1563,6 @@ WebRtc_Word32 ModuleRtpRtcpImpl::SetREMBData(const WebRtc_UWord32 bitrate,
return _rtcpSender.SetREMBData(bitrate, numberOfSSRC, SSRC);
}
WebRtc_Word32 ModuleRtpRtcpImpl::SetMaximumBitrateEstimate(
const WebRtc_UWord32 bitrate) {
if (_defaultModule) {
WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
"SetMaximumBitrateEstimate - Should be called on default "
"module.");
return -1;
}
OnReceivedEstimatedMaxBitrate(bitrate);
return 0;
}
bool ModuleRtpRtcpImpl::SetRemoteBitrateObserver(
RtpRemoteBitrateObserver* observer) {
return _rtcpSender.SetRemoteBitrateObserver(observer);
@ -1947,39 +1912,38 @@ RtpVideoCodecTypes ModuleRtpRtcpImpl::SendVideoCodec() const {
return _rtpSender.VideoCodecType();
}
void ModuleRtpRtcpImpl::SetSendBitrate(const WebRtc_UWord32 startBitrate,
const WebRtc_UWord16 minBitrateKbit,
const WebRtc_UWord16 maxBitrateKbit) {
void ModuleRtpRtcpImpl::SetTargetSendBitrate(const uint32_t bitrate) {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
"SetTargetSendBitrate: %ubit", bitrate);
WEBRTC_TRACE(kTraceModuleCall,
kTraceRtpRtcp,
_id,
"SetSendBitrate start:%ubit/s min:%uKbit/s max:%uKbit/s",
startBitrate, minBitrateKbit, maxBitrateKbit);
const bool defaultInstance(_childModules.empty() ? false : true);
if (defaultInstance) {
// for default we need to update all child modules too
const bool haveChildModules(_childModules.empty() ? false : true);
if (haveChildModules) {
CriticalSectionScoped lock(_criticalSectionModulePtrs);
std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
while (it != _childModules.end()) {
RtpRtcp* module = *it;
if (module) {
module->SetSendBitrate(startBitrate,
minBitrateKbit,
maxBitrateKbit);
if (_simulcast) {
uint32_t bitrate_remainder = bitrate;
std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
for (int i = 0; it != _childModules.end() &&
i < _sendVideoCodec.numberOfSimulcastStreams; ++it, ++i) {
RTPSender& rtpSender = (*it)->_rtpSender;
if (_sendVideoCodec.simulcastStream[i].maxBitrate > bitrate_remainder) {
rtpSender.SetTargetSendBitrate(
_sendVideoCodec.simulcastStream[i].maxBitrate);
bitrate_remainder -= _sendVideoCodec.simulcastStream[i].maxBitrate;
} else {
rtpSender.SetTargetSendBitrate(bitrate_remainder);
bitrate_remainder = 0;
}
}
} else {
std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
for (; it != _childModules.end(); ++it) {
RTPSender& rtpSender = (*it)->_rtpSender;
rtpSender.SetTargetSendBitrate(bitrate);
}
it++;
}
} else {
_rtpSender.SetTargetSendBitrate(bitrate);
}
// TODO(henrike): this function also returns a value. It never fails so
// make it return void.
_rtpSender.SetTargetSendBitrate(startBitrate);
_bandwidthManagement.SetSendBitrate(startBitrate, minBitrateKbit,
maxBitrateKbit);
}
WebRtc_Word32 ModuleRtpRtcpImpl::SetKeyFrameRequestMethod(
@ -2211,11 +2175,6 @@ void ModuleRtpRtcpImpl::BitrateSent(WebRtc_UWord32* totalRate,
*nackRate = _rtpSender.NackOverheadRate();
}
int ModuleRtpRtcpImpl::EstimatedSendBandwidth(
WebRtc_UWord32* available_bandwidth) const {
return _bandwidthManagement.AvailableBandwidth(available_bandwidth);
}
int ModuleRtpRtcpImpl::EstimatedReceiveBandwidth(
WebRtc_UWord32* available_bandwidth) const {
if (!_rtcpSender.ValidBitrateEstimate())
@ -2312,356 +2271,6 @@ void ModuleRtpRtcpImpl::OnRequestIntraFrame() {
RequestKeyFrame();
}
void ModuleRtpRtcpImpl::OnReceivedIntraFrameRequest(const RtpRtcp* caller) {
if (_defaultModule) {
CriticalSectionScoped lock(_criticalSectionModulePtrs);
if (_defaultModule) {
// if we use a default module pass this info to the default module
_defaultModule->OnReceivedIntraFrameRequest(caller);
return;
}
}
WebRtc_UWord8 streamIdx = 0;
FrameType frameType = kVideoFrameKey;
if (_simulcast) {
CriticalSectionScoped lock(_criticalSectionModulePtrs);
// loop though child modules and count idx
std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
while (it != _childModules.end()) {
ModuleRtpRtcpImpl* childModule = *it;
if (childModule == caller) {
break;
}
streamIdx++;
it++;
}
}
_rtcpReceiver.OnReceivedIntraFrameRequest(frameType, streamIdx);
}
void ModuleRtpRtcpImpl::OnReceivedEstimatedMaxBitrate(
const WebRtc_UWord32 maxBitrate) {
// TODO(mflodman) Split this function in two parts. One for the child module
// and one for the default module.
// We received a REMB.
if (_defaultModule) {
// Send this update to the REMB instance to take actions.
_rtcpSender.ReceivedRemb(maxBitrate);
return;
}
WebRtc_UWord32 newBitrate = 0;
WebRtc_UWord8 fractionLost = 0;
WebRtc_UWord16 roundTripTime = 0;
WebRtc_UWord16 bwEstimateKbit = WebRtc_UWord16(maxBitrate / 1000);
if (_bandwidthManagement.UpdateBandwidthEstimate(bwEstimateKbit,
&newBitrate,
&fractionLost,
&roundTripTime) == 0) {
_rtpReceiver.UpdateBandwidthManagement(newBitrate,
fractionLost,
roundTripTime);
// We've received a new bandwidth estimate lower than the current send
// bitrate. For simulcast we need to update the sending bitrate for all
// streams.
if (_simulcast) {
CriticalSectionScoped lock(_criticalSectionModulePtrsFeedback);
WebRtc_UWord8 idx = 0;
for (std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
it != _childModules.end(); ++it) {
// sanity
if (idx >= (_sendVideoCodec.numberOfSimulcastStreams - 1)) {
return;
}
ModuleRtpRtcpImpl* module = *it;
if (newBitrate >= _sendVideoCodec.simulcastStream[idx].maxBitrate) {
module->_bandwidthManagement.SetSendBitrate(
_sendVideoCodec.simulcastStream[idx].maxBitrate, 0, 0);
module->_rtpSender.SetTargetSendBitrate(
_sendVideoCodec.simulcastStream[idx].maxBitrate);
newBitrate -= _sendVideoCodec.simulcastStream[idx].maxBitrate;
} else {
module->_bandwidthManagement.SetSendBitrate(newBitrate, 0, 0);
module->_rtpSender.SetTargetSendBitrate(newBitrate);
newBitrate -= newBitrate;
}
idx++;
}
}
}
// For non-simulcast, update all child modules with the new bandwidth estimate
// regardless of the new estimate.
if (!_simulcast) {
// Update all child modules with the new max bitrate before exiting.
CriticalSectionScoped lock(_criticalSectionModulePtrsFeedback);
for (std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
it != _childModules.end(); ++it) {
// Update all child modules with the maximum bitrate estimate.
ModuleRtpRtcpImpl* module = *it;
WebRtc_UWord32 ignoreBitrate = 0;
WebRtc_UWord8 ignoreFractionLost = 0;
WebRtc_UWord16 ignoreRoundTripTime = 0;
module->_bandwidthManagement.UpdateBandwidthEstimate(
bwEstimateKbit,
&ignoreBitrate,
&ignoreFractionLost,
&ignoreRoundTripTime);
// We don't need to take care of a possible lowered bitrate, that is
// handled earlier in this function for the default module.
}
}
}
// received a request for a new SLI
void ModuleRtpRtcpImpl::OnReceivedSliceLossIndication(
const WebRtc_UWord8 pictureID) {
if (_defaultModule) {
CriticalSectionScoped lock(_criticalSectionModulePtrs);
if (_defaultModule) {
// if we use a default module pass this info to the default module
_defaultModule->OnReceivedSliceLossIndication(pictureID);
return;
}
}
_rtcpReceiver.OnReceivedSliceLossIndication(pictureID);
}
// received a new refereence frame
void ModuleRtpRtcpImpl::OnReceivedReferencePictureSelectionIndication(
const WebRtc_UWord64 pictureID) {
if (_defaultModule) {
CriticalSectionScoped lock(_criticalSectionModulePtrs);
if (_defaultModule) {
// if we use a default module pass this info to the default module
_defaultModule->OnReceivedReferencePictureSelectionIndication(
pictureID);
return;
}
}
_rtcpReceiver.OnReceivedReferencePictureSelectionIndication(pictureID);
}
void ModuleRtpRtcpImpl::OnReceivedBandwidthEstimateUpdate(
const WebRtc_UWord16 bwEstimateKbit) {
// We received a TMMBR
if (_audio) {
return;
}
const bool defaultInstance(_childModules.empty() ? false : true);
if (defaultInstance) {
ProcessDefaultModuleBandwidth();
return;
}
WebRtc_UWord32 newBitrate = 0;
WebRtc_UWord8 fractionLost = 0;
WebRtc_UWord16 roundTripTime = 0;
if (_bandwidthManagement.UpdateBandwidthEstimate(bwEstimateKbit,
&newBitrate,
&fractionLost,
&roundTripTime) == 0) {
if (!_defaultModule) {
// No default module check if we should trigger OnNetworkChanged
// via video callback
_rtpReceiver.UpdateBandwidthManagement(newBitrate,
fractionLost,
roundTripTime);
}
if (newBitrate > 0) {
// update bitrate
_rtpSender.SetTargetSendBitrate(newBitrate);
}
}
if (_defaultModule) {
CriticalSectionScoped lock(_criticalSectionModulePtrs);
if (_defaultModule) {
// if we use a default module pass this info to the default module
_defaultModule->OnReceivedBandwidthEstimateUpdate(bwEstimateKbit);
return;
}
}
}
// bw estimation
// We received a RTCP report block
void ModuleRtpRtcpImpl::OnPacketLossStatisticsUpdate(
const WebRtc_UWord8 fractionLost,
const WebRtc_UWord16 roundTripTime,
const WebRtc_UWord32 lastReceivedExtendedHighSeqNum) {
const bool defaultInstance(_childModules.empty() ? false : true);
if (!defaultInstance) {
WebRtc_UWord32 newBitrate = 0;
WebRtc_UWord8 loss = fractionLost; // local copy since it can change
WebRtc_UWord32 videoRate = 0;
WebRtc_UWord32 fecRate = 0;
WebRtc_UWord32 nackRate = 0;
BitrateSent(NULL, &videoRate, &fecRate, &nackRate);
if (_bandwidthManagement.UpdatePacketLoss(
lastReceivedExtendedHighSeqNum,
videoRate + fecRate + nackRate,
roundTripTime,
&loss,
&newBitrate,
_clock.GetTimeInMS()) != 0) {
// ignore this update
return;
}
// We need to do update RTP sender before calling default module in
// case we'll strip any layers.
if (!_simulcast) {
// the default module will inform all child modules about
// their bitrate
_rtpSender.SetTargetSendBitrate(newBitrate);
}
if (_defaultModule) {
// if we have a default module update it
CriticalSectionScoped lock(_criticalSectionModulePtrs);
if (_defaultModule) { // we need to check again inside the critsect
// if we use a default module pass this info to the
// default module
_defaultModule->OnPacketLossStatisticsUpdate(
loss, // send in the filtered loss
roundTripTime,
lastReceivedExtendedHighSeqNum);
}
return;
}
_rtpReceiver.UpdateBandwidthManagement(newBitrate,
fractionLost,
roundTripTime);
} else {
if (!_simulcast) {
ProcessDefaultModuleBandwidth();
} else {
// default and simulcast
WebRtc_UWord32 newBitrate = 0;
WebRtc_UWord8 loss = fractionLost; // local copy
WebRtc_UWord32 videoRate = 0;
WebRtc_UWord32 fecRate = 0;
WebRtc_UWord32 nackRate = 0;
BitrateSent(NULL, &videoRate, &fecRate, &nackRate);
if (_bandwidthManagement.UpdatePacketLoss(0, // we can't use this
videoRate + fecRate + nackRate,
roundTripTime,
&loss,
&newBitrate,
_clock.GetTimeInMS()) != 0) {
// ignore this update
return;
}
_rtpSender.SetTargetSendBitrate(newBitrate);
_rtpReceiver.UpdateBandwidthManagement(newBitrate,
loss,
roundTripTime);
// sanity
if (_sendVideoCodec.codecType == kVideoCodecUnknown) {
return;
}
CriticalSectionScoped lock(_criticalSectionModulePtrsFeedback);
std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
WebRtc_UWord8 idx = 0;
while (it != _childModules.end()) {
// sanity
if (idx >= (_sendVideoCodec.numberOfSimulcastStreams - 1)) {
return;
}
ModuleRtpRtcpImpl* module = *it;
// update all child modules
if (newBitrate >=
_sendVideoCodec.simulcastStream[idx].maxBitrate) {
module->_bandwidthManagement.SetSendBitrate(
_sendVideoCodec.simulcastStream[idx].maxBitrate, 0, 0);
module->_rtpSender.SetTargetSendBitrate(
_sendVideoCodec.simulcastStream[idx].maxBitrate);
newBitrate -=
_sendVideoCodec.simulcastStream[idx].maxBitrate;
} else {
module->_bandwidthManagement.SetSendBitrate(newBitrate,
0,
0);
module->_rtpSender.SetTargetSendBitrate(newBitrate);
newBitrate -= newBitrate;
}
idx++;
}
}
}
}
void ModuleRtpRtcpImpl::ProcessDefaultModuleBandwidth() {
WebRtc_UWord32 minBitrateBps = 0xffffffff;
WebRtc_UWord32 maxBitrateBps = 0;
WebRtc_UWord32 count = 0;
WebRtc_UWord32 fractionLostAcc = 0;
WebRtc_UWord16 maxRoundTripTime = 0;
{
// get min and max for the sending channels
CriticalSectionScoped lock(_criticalSectionModulePtrs);
for (std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
it != _childModules.end(); ++ it) {
// Get child RTP sender and ask for bitrate estimate.
ModuleRtpRtcpImpl* childModule = *it;
if (childModule->Sending()) {
RTPSender& childRtpSender = (*it)->_rtpSender;
const WebRtc_UWord32 childEstimateBps =
1000 * childRtpSender.TargetSendBitrateKbit();
if (childEstimateBps < minBitrateBps) {
minBitrateBps = childEstimateBps;
}
if (childEstimateBps > maxBitrateBps) {
maxBitrateBps = childEstimateBps;
}
RTCPReceiver& childRtcpReceiver = (*it)->_rtcpReceiver;
std::vector<RTCPReportBlock> rtcp_blocks;
childRtcpReceiver.StatisticsReceived(&rtcp_blocks);
for (std::vector<RTCPReportBlock>::iterator rit = rtcp_blocks.begin();
rit != rtcp_blocks.end(); ++rit) {
count++;
fractionLostAcc += rit->fractionLost;
WebRtc_UWord16 RTT = 0;
childRtcpReceiver.RTT(rit->remoteSSRC, &RTT, NULL, NULL, NULL);
maxRoundTripTime = (RTT > maxRoundTripTime) ? RTT : maxRoundTripTime;
}
}
}
} // end critsect
if (count == 0) {
// No sending modules and no bitrate estimate.
return;
}
// Update RTT to all receive only child modules, they won't have their own RTT
// estimate. Assume the receive only channels are on similar links as the
// sending channel and have approximately the same RTT.
{
CriticalSectionScoped lock(_criticalSectionModulePtrs);
for (std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
it != _childModules.end(); ++it) {
if (!(*it)->Sending()) {
(*it)->_rtcpReceiver.SetRTT(maxRoundTripTime);
}
}
}
_bandwidthManagement.SetSendBitrate(minBitrateBps, 0, 0);
// Update default module bitrate. Don't care about min max.
WebRtc_UWord8 fractionLostAvg = WebRtc_UWord8(fractionLostAcc / count);
_rtpReceiver.UpdateBandwidthManagement(minBitrateBps,
fractionLostAvg ,
maxRoundTripTime);
}
void ModuleRtpRtcpImpl::OnRequestSendReport() {
_rtcpSender.SendRTCP(kRtcpSr);
}
@ -2720,10 +2329,4 @@ WebRtc_Word32 ModuleRtpRtcpImpl::BoundingSet(bool& tmmbrOwner,
TMMBRSet*& boundingSet) {
return _rtcpReceiver.BoundingSet(tmmbrOwner, boundingSet);
}
void ModuleRtpRtcpImpl::SendKeyFrame() {
WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, _id, "SendKeyFrame()");
OnReceivedIntraFrameRequest(0);
}
} // namespace webrtc

View File

@ -13,7 +13,6 @@
#include <list>
#include "bandwidth_management.h"
#include "rtcp_receiver.h"
#include "rtcp_sender.h"
#include "rtp_receiver.h"
@ -132,14 +131,17 @@ public:
const WebRtc_UWord32 audioRTCPArrivalTimeFrac);
// Used by the module to deliver the incoming data to the codec module
virtual WebRtc_Word32 RegisterIncomingDataCallback(RtpData* incomingDataCallback);
virtual WebRtc_Word32 RegisterIncomingDataCallback(
RtpData* incomingDataCallback);
// Used by the module to deliver messages to the codec module/appliation
virtual WebRtc_Word32 RegisterIncomingRTPCallback(RtpFeedback* incomingMessagesCallback);
virtual WebRtc_Word32 RegisterIncomingRTPCallback(
RtpFeedback* incomingMessagesCallback);
virtual WebRtc_Word32 RegisterIncomingRTCPCallback(RtcpFeedback* incomingMessagesCallback);
virtual WebRtc_Word32 RegisterIncomingVideoCallback(RtpVideoFeedback* incomingMessagesCallback);
virtual void RegisterRtcpObservers(
RtcpIntraFrameObserver* intraFrameCallback,
RtcpBandwidthObserver* bandwidthCallback,
RtcpFeedback* callback);
virtual WebRtc_Word32 RegisterAudioCallback(RtpAudioFeedback* messagesCallback);
@ -323,9 +325,6 @@ public:
const WebRtc_UWord8 numberOfSSRC,
const WebRtc_UWord32* SSRC);
virtual WebRtc_Word32 SetMaximumBitrateEstimate(
const WebRtc_UWord32 bitrate);
virtual bool SetRemoteBitrateObserver(RtpRemoteBitrateObserver* observer);
/*
* (IJ) Extended jitter report.
@ -445,9 +444,7 @@ public:
virtual WebRtc_Word32 SetCameraDelay(const WebRtc_Word32 delayMS);
virtual void SetSendBitrate(const WebRtc_UWord32 startBitrate,
const WebRtc_UWord16 minBitrateKbit,
const WebRtc_UWord16 maxBitrateKbit);
virtual void SetTargetSendBitrate(const WebRtc_UWord32 bitrate);
virtual WebRtc_Word32 SetGenericFECStatus(const bool enable,
const WebRtc_UWord8 payloadTypeRED,
@ -473,9 +470,6 @@ public:
WebRtc_UWord32* fecRate,
WebRtc_UWord32* nackRate) const;
virtual int EstimatedSendBandwidth(
WebRtc_UWord32* available_bandwidth) const;
virtual int EstimatedReceiveBandwidth(
WebRtc_UWord32* available_bandwidth) const;
@ -490,23 +484,11 @@ public:
void OnReceivedNTP() ;
// bw estimation
virtual void OnPacketLossStatisticsUpdate(
const WebRtc_UWord8 fractionLost,
const WebRtc_UWord16 roundTripTime,
const WebRtc_UWord32 lastReceivedExtendedHighSeqNum);
void OnReceivedTMMBR();
void OnReceivedEstimatedMaxBitrate(const WebRtc_UWord32 maxBitrate);
void OnReceivedBandwidthEstimateUpdate(const WebRtc_UWord16 bwEstimateKbit);
// bad state of RTP receiver request a keyframe
void OnRequestIntraFrame();
void OnReceivedIntraFrameRequest(const RtpRtcp* caller);
// received a request for a new SLI
void OnReceivedSliceLossIndication(const WebRtc_UWord8 pictureID);
@ -546,14 +528,9 @@ protected:
RTCPSender _rtcpSender;
RTCPReceiver _rtcpReceiver;
BandwidthManagement _bandwidthManagement;
bool _owns_clock;
RtpRtcpClock& _clock;
private:
void SendKeyFrame();
void ProcessDefaultModuleBandwidth();
WebRtc_Word32 _id;
const bool _audio;
bool _collisionDetected;
@ -574,7 +551,6 @@ private:
WebRtc_UWord32 _deadOrAliveTimeoutMS;
WebRtc_UWord32 _deadOrAliveLastTimer;
// receive side
WebRtc_UWord32 _receivedNTPsecsAudio;
WebRtc_UWord32 _receivedNTPfracAudio;
WebRtc_UWord32 _RTCPArrivalTimeSecsAudio;

View File

@ -187,17 +187,8 @@ RTPSender::ChangeUniqueId(const WebRtc_Word32 id)
}
}
WebRtc_Word32
RTPSender::SetTargetSendBitrate(const WebRtc_UWord32 bits)
{
_targetSendBitrate = (WebRtc_UWord16)(bits/1000);
return 0;
}
WebRtc_UWord16
RTPSender::TargetSendBitrateKbit() const
{
return _targetSendBitrate;
void RTPSender::SetTargetSendBitrate(const WebRtc_UWord32 bits) {
_targetSendBitrate = static_cast<uint16_t>(bits / 1000);
}
WebRtc_UWord16
@ -743,7 +734,7 @@ RTPSender::OnReceivedNACK(const WebRtc_UWord16 nackSequenceNumbersLength,
kTraceRtpRtcp,
_id,
"NACK bitrate reached. Skip sending NACK response. Target %d",
TargetSendBitrateKbit());
_targetSendBitrate);
return;
}
@ -766,10 +757,10 @@ RTPSender::OnReceivedNACK(const WebRtc_UWord16 nackSequenceNumbersLength,
break;
}
// delay bandwidth estimate (RTT * BW)
if (TargetSendBitrateKbit() != 0 && avgRTT) {
if (_targetSendBitrate != 0 && avgRTT) {
// kbits/s * ms = bits => bits/8 = bytes
WebRtc_UWord32 targetBytes =
(static_cast<WebRtc_UWord32>(TargetSendBitrateKbit()) * avgRTT) >> 3;
(static_cast<WebRtc_UWord32>(_targetSendBitrate) * avgRTT) >> 3;
if (bytesReSent > targetBytes) {
break; // ignore the rest of the packets in the list
}

View File

@ -54,7 +54,6 @@ public:
virtual WebRtc_UWord16 MaxPayloadLength() const = 0;
virtual WebRtc_UWord16 MaxDataPayloadLength() const = 0;
virtual WebRtc_UWord16 PacketOverHead() const = 0;
virtual WebRtc_UWord16 TargetSendBitrateKbit() const = 0;
virtual WebRtc_UWord16 ActualSendBitrateKbit() const = 0;
virtual WebRtc_Word32 SendToNetwork(const WebRtc_UWord8* dataBuffer,
@ -75,14 +74,13 @@ public:
void ProcessBitrate();
void ProcessSendToNetwork();
WebRtc_UWord16 TargetSendBitrateKbit() const;
WebRtc_UWord16 ActualSendBitrateKbit() const;
WebRtc_UWord32 VideoBitrateSent() const;
WebRtc_UWord32 FecOverheadRate() const;
WebRtc_UWord32 NackOverheadRate() const;
WebRtc_Word32 SetTargetSendBitrate(const WebRtc_UWord32 bits);
void SetTargetSendBitrate(const WebRtc_UWord32 bits);
WebRtc_UWord16 MaxDataPayloadLength() const; // with RTP and FEC headers

View File

@ -101,12 +101,10 @@ TEST_F(RtpRtcpAPITest, RTCP) {
EXPECT_EQ(kRtcpCompound, module->RTCP());
EXPECT_EQ(0, module->SetCNAME("john.doe@test.test"));
EXPECT_EQ(-1, module->SetCNAME(NULL));
char cName[RTCP_CNAME_SIZE];
EXPECT_EQ(0, module->CNAME(cName));
EXPECT_STRCASEEQ(cName, "john.doe@test.test");
EXPECT_EQ(-1, module->CNAME(NULL));
EXPECT_FALSE(module->TMMBR());
EXPECT_EQ(0, module->SetTMMBRStatus(true));

View File

@ -52,18 +52,6 @@ class VerifyingAudioReceiver : public RtpData {
// first channel data being equal to 0xff.
return 0;
}
} else if (rtpHeader->type.Audio.channel == 2) {
if (payloadData[0] == 0x0) {
// All our test vectors for payload type 100, 101 and 102 have the
// second channel data being equal to 0x00.
return 0;
}
} else if (rtpHeader->type.Audio.channel == 3) {
// All our test vectors for payload type 100, 101 and 102 have the
// third channel data being equal to 0xaa.
if (payloadData[0] == 0xaa) {
return 0;
}
}
ADD_FAILURE() << "This code path should never happen.";
return -1;
@ -335,112 +323,3 @@ TEST_F(RtpRtcpAudioTest, DTMF) {
}
delete audioFeedback;
}
TEST_F(RtpRtcpAudioTest, Stereo) {
CodecInst voiceCodec;
voiceCodec.pltype = 96;
voiceCodec.plfreq = 8000;
memcpy(voiceCodec.plname, "PCMU", 5);
EXPECT_EQ(0, module1->RegisterSendPayload(voiceCodec));
EXPECT_EQ(0, module1->RegisterReceivePayload(voiceCodec));
EXPECT_EQ(0, module2->RegisterSendPayload(voiceCodec));
voiceCodec.rate = test_rate;
EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
EXPECT_EQ(0, module1->SetSSRC(test_ssrc));
EXPECT_EQ(0, module1->SetStartTimestamp(test_timestamp));
EXPECT_EQ(0, module1->SetSendingStatus(true));
// Prepare for 3 channel audio 8 bits per sample.
voiceCodec.pltype = 98;
voiceCodec.channels = 3;
memcpy(voiceCodec.plname, "PCMA", 5);
EXPECT_EQ(0, module1->RegisterSendPayload(voiceCodec));
EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
// Prepare for 3 channel audio 16 bits per sample.
voiceCodec.pltype = 99;
memcpy(voiceCodec.plname, "L16", 4);
EXPECT_EQ(0, module1->RegisterSendPayload(voiceCodec));
EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
// Prepare for 3 channel audio 5 bits per sample.
voiceCodec.pltype = 100;
memcpy(voiceCodec.plname, "G726-40",8);
EXPECT_EQ(0, module1->RegisterSendPayload(voiceCodec));
EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
// Prepare for 3 channel audio 3 bits per sample.
voiceCodec.pltype = 101;
memcpy(voiceCodec.plname, "G726-24",8);
EXPECT_EQ(0, module1->RegisterSendPayload(voiceCodec));
EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
// Prepare for 3 channel audio 2 bits per sample.
voiceCodec.pltype = 102;
memcpy(voiceCodec.plname, "G726-16",8);
EXPECT_EQ(0, module1->RegisterSendPayload(voiceCodec));
EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
// Test sample based multi channel codec, 3 channels 8 bits.
WebRtc_UWord8 test3channels[15] = "ttteeesssttt";
WebRtc_UWord32 timeStamp = 160;
EXPECT_EQ(0, module1->SendOutgoingData(webrtc::kAudioFrameSpeech, 98,
timeStamp, test3channels, 12));
fake_clock.IncrementTime(20);
module1->Process();
timeStamp += 160; // Prepare for next packet.
// Test sample based multi channel codec, 3 channels 16 bits.
const WebRtc_UWord8 test3channels16[13] = "teteteststst";
EXPECT_EQ(0, module1->SendOutgoingData(webrtc::kAudioFrameSpeech, 99,
timeStamp, test3channels16, 12));
fake_clock.IncrementTime(20);
module1->Process();
timeStamp += 160; // Prepare for next packet.
// Test sample based multi channel codec, 3 channels 5 bits.
test3channels[0] = 0xf8; // 5 ones 3 zeros.
test3channels[1] = 0x2b; // 2 zeros 5 10 1 one.
test3channels[2] = 0xf0; // 4 ones 4 zeros.
test3channels[3] = 0x2b; // 1 zero 5 01 2 ones.
test3channels[4] = 0xe0; // 3 ones 5 zeros.
test3channels[5] = 0x0;
test3channels[6] = 0x0;
test3channels[7] = 0x0;
test3channels[8] = 0x0;
test3channels[9] = 0x0;
test3channels[10] = 0x0;
test3channels[11] = 0x0;
test3channels[12] = 0x0;
test3channels[13] = 0x0;
test3channels[14] = 0x0;
EXPECT_EQ(0, module1->SendOutgoingData(webrtc::kAudioFrameSpeech, 100,
timeStamp, test3channels, 15));
fake_clock.IncrementTime(20);
module1->Process();
timeStamp += 160; // Prepare for next packet.
// Test sample based multi channel codec, 3 channels 3 bits.
test3channels[0] = 0xe2; // 3 ones 3 zeros 2 10
test3channels[1] = 0xf0; // 1 1 3 ones 3 zeros 1 0
test3channels[2] = 0xb8; // 2 10 3 ones 3 zeros
test3channels[3] = 0xa0; // 3 101 5 zeros
test3channels[4] = 0x0;
EXPECT_EQ(0, module1->SendOutgoingData(webrtc::kAudioFrameSpeech, 101,
timeStamp, test3channels, 15));
fake_clock.IncrementTime(20);
module1->Process();
timeStamp += 160; // Prepare for next packet.
// Test sample based multi channel codec, 3 channels 2 bits.
test3channels[0] = 0xcb; // 2 ones 2 zeros 2 10 2 ones
test3channels[1] = 0x2c; // 2 zeros 2 10 2 ones 2 zeros
test3channels[2] = 0xb2; // 2 10 2 ones 2 zeros 2 10
test3channels[3] = 0xcb; // 2 ones 2 zeros 2 10 2 ones
test3channels[4] = 0x2c; // 2 zeros 2 10 2 ones 2 zeros
EXPECT_EQ(0, module1->SendOutgoingData(webrtc::kAudioFrameSpeech, 102,
timeStamp, test3channels, 15));
}

View File

@ -20,9 +20,9 @@
using namespace webrtc;
const WebRtc_UWord64 kTestPictureId = 12345678;
const uint64_t kTestPictureId = 12345678;
class RtcpCallback : public RtcpFeedback {
class RtcpCallback : public RtcpFeedback, public RtcpIntraFrameObserver {
public:
RtcpCallback(RtpRtcp* module) {
_rtpRtcpModule = module;
@ -32,22 +32,9 @@ class RtcpCallback : public RtcpFeedback {
virtual void OnLipSyncUpdate(const WebRtc_Word32 id,
const WebRtc_Word32 audioVideoOffset) {
};
virtual void OnTMMBRReceived(const WebRtc_Word32 id,
const WebRtc_UWord16 bwEstimateKbit) {
};
virtual void OnXRVoIPMetricReceived(
const WebRtc_Word32 id,
const RTCPVoIPMetric* metric,
const WebRtc_Word8 VoIPmetricBuffer[28]) {
};
virtual void OnSLIReceived(const WebRtc_Word32 id,
const WebRtc_UWord8 pictureId) {
EXPECT_EQ(28, pictureId);
};
virtual void OnRPSIReceived(const WebRtc_Word32 id,
const WebRtc_UWord64 pictureId) {
EXPECT_EQ(kTestPictureId, pictureId);
const RTCPVoIPMetric* metric) {
};
virtual void OnApplicationDataReceived(const WebRtc_Word32 id,
const WebRtc_UWord8 subType,
@ -63,16 +50,24 @@ class RtcpCallback : public RtcpFeedback {
EXPECT_STRCASEEQ("test", print_name);
};
virtual void OnSendReportReceived(const WebRtc_Word32 id,
const WebRtc_UWord32 senderSSRC) {
RTCPSenderInfo senderInfo;
EXPECT_EQ(0, _rtpRtcpModule->RemoteRTCPStat(&senderInfo));
};
virtual void OnReceiveReportReceived(const WebRtc_Word32 id,
const WebRtc_UWord32 senderSSRC) {
};
virtual void OnReceivedIntraFrameRequest(const uint32_t ssrc) {
};
virtual void OnReceivedSLI(const uint32_t ssrc,
const uint8_t pictureId) {
EXPECT_EQ(28, pictureId);
};
virtual void OnReceivedRPSI(const uint32_t ssrc,
const uint64_t pictureId) {
EXPECT_EQ(kTestPictureId, pictureId);
};
private:
RtpRtcp* _rtpRtcpModule;
};
@ -114,8 +109,8 @@ class RtpRtcpRtcpTest : public ::testing::Test {
}
void SetUpCallFromModule1(RtcpCallback* feedback1, RtcpCallback* feedback2 ) {
EXPECT_EQ(0, module1->RegisterIncomingRTCPCallback(feedback1));
EXPECT_EQ(0, module2->RegisterIncomingRTCPCallback(feedback2));
module1->RegisterRtcpObservers(feedback1, NULL, feedback1);
module2->RegisterRtcpObservers(feedback2, NULL, feedback2);
EXPECT_EQ(0, module1->SetRTCPStatus(kRtcpCompound));
EXPECT_EQ(0, module2->SetRTCPStatus(kRtcpCompound));
@ -161,7 +156,7 @@ class RtpRtcpRtcpTest : public ::testing::Test {
FakeRtpRtcpClock fake_clock;
};
TEST_F(RtpRtcpRtcpTest, RTCP) {
TEST_F(RtpRtcpRtcpTest, RTCP_PLI_RPSI) {
RtcpCallback* myRTCPFeedback1 = new RtcpCallback(module1);
RtcpCallback* myRTCPFeedback2 = new RtcpCallback(module2);
@ -169,7 +164,13 @@ TEST_F(RtpRtcpRtcpTest, RTCP) {
EXPECT_EQ(0, module1->SendRTCPReferencePictureSelection(kTestPictureId));
EXPECT_EQ(0, module1->SendRTCPSliceLossIndication(156));
}
TEST_F(RtpRtcpRtcpTest, RTCP_CNAME) {
RtcpCallback* myRTCPFeedback1 = new RtcpCallback(module1);
RtcpCallback* myRTCPFeedback2 = new RtcpCallback(module2);
SetUpCallFromModule1(myRTCPFeedback1, myRTCPFeedback2);
WebRtc_UWord32 testOfCSRC[webrtc::kRtpCsrcSize];
EXPECT_EQ(2, module2->RemoteCSRCs(testOfCSRC));
EXPECT_EQ(test_CSRC[0], testOfCSRC[0]);
@ -178,12 +179,41 @@ TEST_F(RtpRtcpRtcpTest, RTCP) {
// Set cname of mixed.
EXPECT_EQ(0, module1->AddMixedCNAME(test_CSRC[0], "john@192.168.0.1"));
EXPECT_EQ(0, module1->AddMixedCNAME(test_CSRC[1], "jane@192.168.0.2"));
EXPECT_EQ(-1, module1->AddMixedCNAME(test_CSRC[0], NULL));
EXPECT_EQ(-1, module1->RemoveMixedCNAME(test_CSRC[0] + 1));
EXPECT_EQ(0, module1->RemoveMixedCNAME(test_CSRC[1]));
EXPECT_EQ(0, module1->AddMixedCNAME(test_CSRC[1], "jane@192.168.0.2"));
// send RTCP packet, triggered by timer
fake_clock.IncrementTime(7500);
module1->Process();
fake_clock.IncrementTime(100);
module2->Process();
char cName[RTCP_CNAME_SIZE];
EXPECT_EQ(-1, module2->RemoteCNAME(module2->RemoteSSRC() + 1, cName));
// Check multiple CNAME.
EXPECT_EQ(0, module2->RemoteCNAME(module2->RemoteSSRC(), cName));
EXPECT_EQ(0, strncmp(cName, "john.doe@test.test", RTCP_CNAME_SIZE));
EXPECT_EQ(0, module2->RemoteCNAME(test_CSRC[0], cName));
EXPECT_EQ(0, strncmp(cName, "john@192.168.0.1", RTCP_CNAME_SIZE));
EXPECT_EQ(0, module2->RemoteCNAME(test_CSRC[1], cName));
EXPECT_EQ(0, strncmp(cName, "jane@192.168.0.2", RTCP_CNAME_SIZE));
EXPECT_EQ(0, module1->SetSendingStatus(false));
// Test that BYE clears the CNAME
EXPECT_EQ(-1, module2->RemoteCNAME(module2->RemoteSSRC(), cName));
}
TEST_F(RtpRtcpRtcpTest, RTCP) {
RtcpCallback* myRTCPFeedback1 = new RtcpCallback(module1);
RtcpCallback* myRTCPFeedback2 = new RtcpCallback(module2);
SetUpCallFromModule1(myRTCPFeedback1, myRTCPFeedback2);
RTCPReportBlock reportBlock;
reportBlock.cumulativeLost = 1;
reportBlock.delaySinceLastSR = 2;
@ -223,23 +253,9 @@ TEST_F(RtpRtcpRtcpTest, RTCP) {
WebRtc_UWord32 receivedNTPfrac = 0;
WebRtc_UWord32 RTCPArrivalTimeSecs = 0;
WebRtc_UWord32 RTCPArrivalTimeFrac = 0;
char cName[RTCP_CNAME_SIZE];
EXPECT_EQ(0, module2->RemoteNTP(&receivedNTPsecs, &receivedNTPfrac,
&RTCPArrivalTimeSecs, &RTCPArrivalTimeFrac));
EXPECT_EQ(-1, module2->RemoteCNAME(module2->RemoteSSRC() + 1, cName));
EXPECT_EQ(-1, module2->RemoteCNAME(module2->RemoteSSRC(), NULL));
// Check multiple CNAME.
EXPECT_EQ(0, module2->RemoteCNAME(module2->RemoteSSRC(), cName));
EXPECT_EQ(0, strncmp(cName, "john.doe@test.test", RTCP_CNAME_SIZE));
EXPECT_EQ(0, module2->RemoteCNAME(test_CSRC[0], cName));
EXPECT_EQ(0, strncmp(cName, "john@192.168.0.1", RTCP_CNAME_SIZE));
EXPECT_EQ(0, module2->RemoteCNAME(test_CSRC[1], cName));
EXPECT_EQ(0, strncmp(cName, "jane@192.168.0.2", RTCP_CNAME_SIZE));
// get all report blocks
std::vector<RTCPReportBlock> report_blocks;
@ -291,16 +307,10 @@ TEST_F(RtpRtcpRtcpTest, RTCP) {
// Test receive report.
EXPECT_EQ(0, module1->SetSendingStatus(false));
// Test that BYE clears the CNAME
EXPECT_EQ(-1, module2->RemoteCNAME(module2->RemoteSSRC(), cName));
// Send RTCP packet, triggered by timer.
fake_clock.IncrementTime(5000);
module1->Process();
module2->Process();
delete myRTCPFeedback1;
delete myRTCPFeedback2;
}
TEST_F(RtpRtcpRtcpTest, RemoteRTCPStatRemote) {

View File

@ -24,6 +24,7 @@
'<(webrtc_root)/modules/modules.gyp:webrtc_utility',
# ModulesVideo
'<(webrtc_root)/modules/modules.gyp:bitrate_controller',
'<(webrtc_root)/modules/modules.gyp:video_capture_module',
'<(webrtc_root)/modules/modules.gyp:webrtc_video_coding',
'<(webrtc_root)/modules/modules.gyp:video_processing',

View File

@ -38,7 +38,9 @@ const int kMaxDecodeWaitTimeMs = 50;
ViEChannel::ViEChannel(WebRtc_Word32 channel_id,
WebRtc_Word32 engine_id,
WebRtc_UWord32 number_of_cores,
ProcessThread& module_process_thread)
ProcessThread& module_process_thread,
RtcpIntraFrameObserver* intra_frame_observer,
RtcpBandwidthObserver* bandwidth_observer)
: ViEFrameProviderBase(channel_id, engine_id),
channel_id_(channel_id),
engine_id_(engine_id),
@ -63,6 +65,8 @@ ViEChannel::ViEChannel(WebRtc_Word32 channel_id,
rtp_observer_(NULL),
rtcp_observer_(NULL),
networkObserver_(NULL),
intra_frame_observer_(intra_frame_observer),
bandwidth_observer_(bandwidth_observer),
rtp_packet_timeout_(false),
using_packet_spread_(false),
external_transport_(NULL),
@ -112,6 +116,10 @@ WebRtc_Word32 ViEChannel::Init() {
"%s: RTP::RegisterSendTransport failure", __FUNCTION__);
return -1;
}
rtp_rtcp_.RegisterRtcpObservers(intra_frame_observer_,
bandwidth_observer_,
this);
if (module_process_thread_.RegisterModule(&rtp_rtcp_) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: RTP::RegisterModule failure", __FUNCTION__);
@ -130,12 +138,6 @@ WebRtc_Word32 ViEChannel::Init() {
"%s: RTP::RegisterIncomingRTPCallback failure", __FUNCTION__);
return -1;
}
if (rtp_rtcp_.RegisterIncomingRTCPCallback(this) != 0) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: RTP::RegisterIncomingRTCPCallback failure", __FUNCTION__);
return -1;
}
// VCM initialization
if (vcm_.InitializeReceiver() != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
@ -199,7 +201,6 @@ ViEChannel::~ViEChannel() {
while (simulcast_rtp_rtcp_.size() > 0) {
std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
RtpRtcp* rtp_rtcp = *it;
rtp_rtcp->RegisterIncomingRTCPCallback(NULL);
rtp_rtcp->RegisterSendTransport(NULL);
module_process_thread_.DeRegisterModule(rtp_rtcp);
RtpRtcp::DestroyRtpRtcp(rtp_rtcp);
@ -247,13 +248,7 @@ WebRtc_Word32 ViEChannel::SetSendCodec(const VideoCodec& video_codec,
rtp_rtcp_.SetSendingStatus(false);
}
if (video_codec.numberOfSimulcastStreams > 0) {
WebRtc_UWord32 start_bitrate = video_codec.startBitrate * 1000;
WebRtc_UWord32 stream_bitrate =
std::min(start_bitrate, video_codec.simulcastStream[0].maxBitrate);
start_bitrate -= stream_bitrate;
// Set correct bitrate to base layer.
rtp_rtcp_.SetSendBitrate(stream_bitrate, video_codec.minBitrate,
video_codec.simulcastStream[0].maxBitrate);
// Create our simulcast RTP modules.
for (int i = simulcast_rtp_rtcp_.size();
i < video_codec.numberOfSimulcastStreams - 1;
@ -275,6 +270,10 @@ WebRtc_Word32 ViEChannel::SetSendCodec(const VideoCodec& video_codec,
"%s: RTP::InitReceiver failure", __FUNCTION__);
return -1;
}
rtp_rtcp->RegisterRtcpObservers(intra_frame_observer_,
bandwidth_observer_,
this);
if (rtp_rtcp->RegisterSendTransport(
static_cast<Transport*>(&vie_sender_)) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
@ -294,7 +293,6 @@ WebRtc_Word32 ViEChannel::SetSendCodec(const VideoCodec& video_codec,
j > (video_codec.numberOfSimulcastStreams - 1);
j--) {
RtpRtcp* rtp_rtcp = simulcast_rtp_rtcp_.back();
rtp_rtcp->RegisterIncomingRTCPCallback(NULL);
rtp_rtcp->RegisterSendTransport(NULL);
module_process_thread_.DeRegisterModule(rtp_rtcp);
RtpRtcp::DestroyRtpRtcp(rtp_rtcp);
@ -319,12 +317,6 @@ WebRtc_Word32 ViEChannel::SetSendCodec(const VideoCodec& video_codec,
if (restart_rtp) {
rtp_rtcp->SetSendingStatus(true);
}
// Configure all simulcast streams min and max bitrates
const WebRtc_UWord32 stream_bitrate =
std::min(start_bitrate, video_codec.simulcastStream[idx].maxBitrate);
start_bitrate -= stream_bitrate;
rtp_rtcp->SetSendBitrate(stream_bitrate, video_codec.minBitrate,
video_codec.simulcastStream[idx].maxBitrate);
}
vie_receiver_.RegisterSimulcastRtpRtcpModules(simulcast_rtp_rtcp_);
} else {
@ -332,7 +324,6 @@ WebRtc_Word32 ViEChannel::SetSendCodec(const VideoCodec& video_codec,
// Delete all simulcast rtp modules.
while (!simulcast_rtp_rtcp_.empty()) {
RtpRtcp* rtp_rtcp = simulcast_rtp_rtcp_.back();
rtp_rtcp->RegisterIncomingRTCPCallback(NULL);
rtp_rtcp->RegisterSendTransport(NULL);
module_process_thread_.DeRegisterModule(rtp_rtcp);
RtpRtcp::DestroyRtpRtcp(rtp_rtcp);
@ -341,10 +332,6 @@ WebRtc_Word32 ViEChannel::SetSendCodec(const VideoCodec& video_codec,
}
// Clear any previous modules.
vie_receiver_.RegisterSimulcastRtpRtcpModules(simulcast_rtp_rtcp_);
rtp_rtcp_.SetSendBitrate(video_codec.startBitrate * 1000,
video_codec.minBitrate,
video_codec.maxBitrate);
}
// Enable this if H264 is available.
// This sets the wanted packetization mode.

View File

@ -60,7 +60,9 @@ class ViEChannel
ViEChannel(WebRtc_Word32 channel_id,
WebRtc_Word32 engine_id,
WebRtc_UWord32 number_of_cores,
ProcessThread& module_process_thread);
ProcessThread& module_process_thread,
RtcpIntraFrameObserver* intra_frame_observer,
RtcpBandwidthObserver* bandwidth_observer);
~ViEChannel();
WebRtc_Word32 Init();
@ -176,7 +178,6 @@ class ViEChannel
const WebRtc_UWord32 name,
const WebRtc_UWord16 length,
const WebRtc_UWord8* data);
// Implements RtpFeedback.
virtual WebRtc_Word32 OnInitializeDecoder(
const WebRtc_Word32 id,
@ -374,6 +375,8 @@ class ViEChannel
ViERTPObserver* rtp_observer_;
ViERTCPObserver* rtcp_observer_;
ViENetworkObserver* networkObserver_;
RtcpIntraFrameObserver* intra_frame_observer_;
RtcpBandwidthObserver* bandwidth_observer_;
bool rtp_packet_timeout_;
bool using_packet_spread_;

View File

@ -10,6 +10,7 @@
#include "video_engine/vie_channel_group.h"
#include "modules/bitrate_controller/include/bitrate_controller.h"
#include "modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "video_engine/vie_channel.h"
#include "video_engine/vie_encoder.h"
@ -18,7 +19,9 @@
namespace webrtc {
ChannelGroup::ChannelGroup(ProcessThread* process_thread)
: remb_(new VieRemb(process_thread)) {}
: remb_(new VieRemb(process_thread)),
bitrate_controller_(BitrateController::CreateBitrateController()) {
}
ChannelGroup::~ChannelGroup() {
assert(channels_.empty());
@ -53,15 +56,12 @@ bool ChannelGroup::SetChannelRembStatus(int channel_id,
} else if (channel) {
channel->EnableRemb(false);
}
// Update the remb instance with necesary RTp modules.
RtpRtcp* rtp_module = channel->rtp_rtcp();
if (sender) {
remb_->AddRembSender(rtp_module);
remb_->AddSendChannel(encoder->SendRtpRtcpModule());
} else {
remb_->RemoveRembSender(rtp_module);
remb_->RemoveSendChannel(encoder->SendRtpRtcpModule());
}
if (receiver) {
remb_->AddReceiveChannel(rtp_module);

View File

@ -17,6 +17,7 @@
namespace webrtc {
class BitrateController;
class ProcessThread;
class ViEChannel;
class ViEEncoder;
@ -40,10 +41,13 @@ class ChannelGroup {
ViEChannel* channel,
ViEEncoder* encoder);
BitrateController* GetBitrateController() { return bitrate_controller_.get();}
private:
typedef std::set<int> ChannelSet;
scoped_ptr<VieRemb> remb_;
scoped_ptr<BitrateController> bitrate_controller_;
ChannelSet channels_;
};

View File

@ -89,11 +89,17 @@ int ViEChannelManager::CreateChannel(int& channel_id) {
// Create a new channel group and add this channel.
ChannelGroup* group = new ChannelGroup(module_process_thread_);
BitrateController* bitrate_controller = group->GetBitrateController();
ViEEncoder* vie_encoder = new ViEEncoder(engine_id_, new_channel_id,
number_of_cores_,
*module_process_thread_);
*module_process_thread_,
bitrate_controller);
RtcpBandwidthObserver* bandwidth_observer =
bitrate_controller->CreateRtcpBandwidthObserver();
if (!(vie_encoder->Init() &&
CreateChannelObject(new_channel_id, vie_encoder))) {
CreateChannelObject(new_channel_id, vie_encoder, bandwidth_observer))) {
delete vie_encoder;
vie_encoder = NULL;
ReturnChannelId(new_channel_id);
@ -122,20 +128,25 @@ int ViEChannelManager::CreateChannel(int& channel_id,
return -1;
}
BitrateController* bitrate_controller = channel_group->GetBitrateController();
RtcpBandwidthObserver* bandwidth_observer =
bitrate_controller->CreateRtcpBandwidthObserver();
ViEEncoder* vie_encoder = NULL;
if (sender) {
// We need to create a new ViEEncoder.
vie_encoder = new ViEEncoder(engine_id_, new_channel_id, number_of_cores_,
*module_process_thread_);
*module_process_thread_,
bitrate_controller);
if (!(vie_encoder->Init() &&
CreateChannelObject(new_channel_id, vie_encoder))) {
CreateChannelObject(new_channel_id, vie_encoder,
bandwidth_observer))) {
delete vie_encoder;
vie_encoder = NULL;
}
} else {
vie_encoder = ViEEncoderPtr(original_channel);
assert(vie_encoder);
if (!CreateChannelObject(new_channel_id, vie_encoder)) {
if (!CreateChannelObject(new_channel_id, vie_encoder, bandwidth_observer)) {
vie_encoder = NULL;
}
}
@ -153,6 +164,7 @@ int ViEChannelManager::CreateChannel(int& channel_id,
int ViEChannelManager::DeleteChannel(int channel_id) {
ViEChannel* vie_channel = NULL;
ViEEncoder* vie_encoder = NULL;
ChannelGroup* group = NULL;
{
// Write lock to make sure no one is using the channel.
ViEManagerWriteScoped wl(*this);
@ -179,14 +191,10 @@ int ViEChannelManager::DeleteChannel(int channel_id) {
assert(e_it != vie_encoder_map_.end());
vie_encoder = e_it->second;
ChannelGroup* group = FindGroup(channel_id);
group = FindGroup(channel_id);
group->SetChannelRembStatus(channel_id, false, false, vie_channel,
vie_encoder);
group->RemoveChannel(channel_id);
if (group->Empty()) {
channel_groups_.remove(group);
delete group;
}
// Check if other channels are using the same encoder.
if (ChannelUsingViEEncoder(channel_id)) {
@ -196,25 +204,39 @@ int ViEChannelManager::DeleteChannel(int channel_id) {
__FUNCTION__, channel_id);
vie_encoder = NULL;
} else {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_),
"%s ViEEncoder deleted for channel %d", __FUNCTION__,
channel_id);
// Delete later when we've released the critsect.
}
// We can't erase the item before we've checked for other channels using
// same ViEEncoder.
vie_encoder_map_.erase(e_it);
}
if (group->Empty()) {
channel_groups_.remove(group);
} else {
group = NULL; // Prevent group from being deleted.
}
}
// Leave the write critsect before deleting the objects.
// Deleting a channel can cause other objects, such as renderers, to be
// deleted, which might take time.
// If statment just to show that this object is not always deleted.
if (vie_encoder) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_),
"%s ViEEncoder deleted for channel %d", __FUNCTION__,
channel_id);
delete vie_encoder;
}
// If statment just to show that this object is not always deleted.
if (group) {
// Delete the group if empty last since the encoder holds a pointer to the
// BitrateController object that the group owns.
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_),
"%s ChannelGroup deleted for channel %d", __FUNCTION__,
channel_id);
delete group;
}
delete vie_channel;
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_),
"%s Channel %d deleted", __FUNCTION__, channel_id);
return 0;
@ -296,11 +318,15 @@ bool ViEChannelManager::SetRembStatus(int channel_id, bool sender,
encoder);
}
bool ViEChannelManager::CreateChannelObject(int channel_id,
ViEEncoder* vie_encoder) {
bool ViEChannelManager::CreateChannelObject(
int channel_id,
ViEEncoder* vie_encoder,
RtcpBandwidthObserver* bandwidth_observer) {
ViEChannel* vie_channel = new ViEChannel(channel_id, engine_id_,
number_of_cores_,
*module_process_thread_);
*module_process_thread_,
vie_encoder,
bandwidth_observer);
if (vie_channel->Init() != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_),
"%s could not init channel", __FUNCTION__, channel_id);

View File

@ -76,7 +76,8 @@ class ViEChannelManager: private ViEManagerBase {
private:
// Creates a channel object connected to |vie_encoder|. Assumed to be called
// protected.
bool CreateChannelObject(int channel_id, ViEEncoder* vie_encoder);
bool CreateChannelObject(int channel_id, ViEEncoder* vie_encoder,
RtcpBandwidthObserver* bandwidth_observer);
// Used by ViEChannelScoped, forcing a manager user to use scoped.
// Returns a pointer to the channel with id 'channelId'.

View File

@ -40,10 +40,26 @@ class QMVideoSettingsCallback : public VCMQMSettingsCallback {
VideoProcessingModule* vpm_;
};
class ViEBitrateObserver : public BitrateObserver {
public:
ViEBitrateObserver(ViEEncoder* owner)
: owner_(owner) {
}
// Implements BitrateObserver.
virtual void OnNetworkChanged(const uint32_t bitrate_bps,
const uint8_t fraction_lost,
const uint32_t rtt) {
owner_->OnNetworkChanged(bitrate_bps, fraction_lost, rtt);
}
private:
ViEEncoder* owner_;
};
ViEEncoder::ViEEncoder(WebRtc_Word32 engine_id, WebRtc_Word32 channel_id,
ViEEncoder::ViEEncoder(WebRtc_Word32 engine_id,
WebRtc_Word32 channel_id,
WebRtc_UWord32 number_of_cores,
ProcessThread& module_process_thread)
ProcessThread& module_process_thread,
BitrateController* bitrate_controller)
: engine_id_(engine_id),
channel_id_(channel_id),
number_of_cores_(number_of_cores),
@ -55,7 +71,9 @@ ViEEncoder::ViEEncoder(WebRtc_Word32 engine_id, WebRtc_Word32 channel_id,
ViEModuleId(engine_id, channel_id), false)),
callback_cs_(CriticalSectionWrapper::CreateCriticalSection()),
data_cs_(CriticalSectionWrapper::CreateCriticalSection()),
bitrate_controller_(bitrate_controller),
paused_(false),
time_last_intra_request_ms_(0),
channels_dropping_delta_frames_(0),
drop_next_frame_(false),
fec_enabled_(false),
@ -74,8 +92,7 @@ ViEEncoder::ViEEncoder(WebRtc_Word32 engine_id, WebRtc_Word32 channel_id,
"%s(engine_id: %d) 0x%p - Constructor", __FUNCTION__, engine_id,
this);
time_last_intra_request_ms_ = 0;
bitrate_observer_.reset(new ViEBitrateObserver(this));
}
bool ViEEncoder::Init() {
@ -102,18 +119,6 @@ bool ViEEncoder::Init() {
"%s InitSender failure", __FUNCTION__);
return false;
}
if (default_rtp_rtcp_.RegisterIncomingVideoCallback(this) != 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s RegisterIncomingVideoCallback failure", __FUNCTION__);
return false;
}
if (default_rtp_rtcp_.RegisterIncomingRTCPCallback(this) != 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s RegisterIncomingRTCPCallback failure", __FUNCTION__);
return false;
}
if (module_process_thread_.RegisterModule(&default_rtp_rtcp_) != 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
@ -318,11 +323,6 @@ WebRtc_Word32 ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec) {
"%s: CodecType: %d, width: %u, height: %u", __FUNCTION__,
video_codec.codecType, video_codec.width, video_codec.height);
// Convert from kbps to bps.
default_rtp_rtcp_.SetSendBitrate(video_codec.startBitrate * 1000,
video_codec.minBitrate,
video_codec.maxBitrate);
// Setting target width and height for VPM.
if (vpm_.SetTargetResolution(video_codec.width, video_codec.height,
video_codec.maxFramerate) != VPM_OK) {
@ -338,6 +338,8 @@ WebRtc_Word32 ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec) {
"Could register RTP module video payload");
return -1;
}
// Convert from kbps to bps.
default_rtp_rtcp_.SetTargetSendBitrate(video_codec.startBitrate * 1000);
WebRtc_UWord16 max_data_payload_length =
default_rtp_rtcp_.MaxDataPayloadLength();
@ -350,10 +352,6 @@ WebRtc_Word32 ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec) {
return -1;
}
data_cs_->Enter();
memcpy(&send_codec_, &video_codec, sizeof(send_codec_));
data_cs_->Leave();
// Set this module as sending right away, let the slave module in the channel
// start and stop sending.
if (default_rtp_rtcp_.Sending() == false) {
@ -364,6 +362,11 @@ WebRtc_Word32 ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec) {
return -1;
}
}
bitrate_controller_->SetBitrateObserver(bitrate_observer_.get(),
video_codec.startBitrate * 1000,
video_codec.minBitrate * 1000,
video_codec.maxBitrate * 1000);
return 0;
}
@ -608,7 +611,11 @@ WebRtc_Word32 ViEEncoder::EstimatedSendBandwidth(
WebRtc_UWord32* available_bandwidth) const {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
__FUNCTION__);
return default_rtp_rtcp_.EstimatedSendBandwidth(available_bandwidth);
if (!bitrate_controller_->AvailableBandwidth(available_bandwidth)) {
return -1;
}
return 0;
}
int ViEEncoder::CodecTargetBitrate(WebRtc_UWord32* bitrate) const {
@ -779,28 +786,25 @@ WebRtc_Word32 ViEEncoder::RegisterCodecObserver(ViEEncoderObserver* observer) {
return 0;
}
void ViEEncoder::OnSLIReceived(const WebRtc_Word32 id,
const WebRtc_UWord8 picture_id) {
void ViEEncoder::OnReceivedSLI(const uint32_t /*ssrc*/,
const uint8_t picture_id) {
picture_id_sli_ = picture_id;
has_received_sli_ = true;
}
void ViEEncoder::OnRPSIReceived(const WebRtc_Word32 id,
const WebRtc_UWord64 picture_id) {
void ViEEncoder::OnReceivedRPSI(const uint32_t /*ssrc*/,
const uint64_t picture_id) {
picture_id_rpsi_ = picture_id;
has_received_rpsi_ = true;
}
void ViEEncoder::OnReceivedIntraFrameRequest(const WebRtc_Word32 /*id*/,
const FrameType /*type*/,
const WebRtc_UWord8 /*idx*/) {
void ViEEncoder::OnReceivedIntraFrameRequest(const uint32_t /*ssrc*/) {
// Key frame request from remote side, signal to VCM.
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
WebRtc_Word64 now = TickTime::MillisecondTimestamp();
if (time_last_intra_request_ms_ + kViEMinKeyRequestIntervalMs >
now) {
if (time_last_intra_request_ms_ + kViEMinKeyRequestIntervalMs > now) {
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s: Not not encoding new intra due to timing", __FUNCTION__);
@ -810,10 +814,10 @@ void ViEEncoder::OnReceivedIntraFrameRequest(const WebRtc_Word32 /*id*/,
time_last_intra_request_ms_ = now;
}
void ViEEncoder::OnNetworkChanged(const WebRtc_Word32 id,
const WebRtc_UWord32 bitrate_bps,
const WebRtc_UWord8 fraction_lost,
const WebRtc_UWord16 round_trip_time_ms) {
// Called from ViEBitrateObserver.
void ViEEncoder::OnNetworkChanged(const uint32_t bitrate_bps,
const uint8_t fraction_lost,
const uint32_t round_trip_time_ms) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s(bitrate_bps: %u, fraction_lost: %u, rtt_ms: %u",
@ -821,6 +825,8 @@ void ViEEncoder::OnNetworkChanged(const WebRtc_Word32 id,
vcm_.SetChannelParameters(bitrate_bps / 1000, fraction_lost,
round_trip_time_ms);
default_rtp_rtcp_.SetTargetSendBitrate(bitrate_bps);
}
WebRtc_Word32 ViEEncoder::RegisterEffectFilter(ViEEffectFilter* effect_filter) {

View File

@ -12,13 +12,15 @@
#define WEBRTC_VIDEO_ENGINE_VIE_ENCODER_H_
#include "common_types.h"
#include "rtp_rtcp_defines.h"
#include "typedefs.h"
#include "video_coding_defines.h"
#include "video_processing.h"
#include "vie_defines.h"
#include "vie_file_recorder.h"
#include "vie_frame_provider_base.h"
#include "modules/bitrate_controller/include/bitrate_controller.h"
#include "modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "system_wrappers/interface/scoped_ptr.h"
namespace webrtc {
@ -28,21 +30,24 @@ class ProcessThread;
class QMVideoSettingsCallback;
class RtpRtcp;
class VideoCodingModule;
class ViEBitrateObserver;
class ViEEffectFilter;
class ViEEncoderObserver;
class ViEEncoder
: public RtpVideoFeedback,
public RtcpFeedback,
: public RtcpIntraFrameObserver,
public VCMPacketizationCallback,
public VCMProtectionCallback,
public VCMSendStatisticsCallback,
public ViEFrameCallback {
public:
friend class ViEBitrateObserver;
ViEEncoder(WebRtc_Word32 engine_id,
WebRtc_Word32 channel_id,
WebRtc_UWord32 number_of_cores,
ProcessThread& module_process_thread);
ProcessThread& module_process_thread,
BitrateController* bitrate_controller);
~ViEEncoder();
bool Init();
@ -93,8 +98,10 @@ class ViEEncoder
WebRtc_Word32 SendKeyFrame();
WebRtc_Word32 SendCodecStatistics(WebRtc_UWord32& num_key_frames,
WebRtc_UWord32& num_delta_frames);
WebRtc_Word32 EstimatedSendBandwidth(
WebRtc_UWord32* available_bandwidth) const;
WebRtc_UWord32* available_bandwidth) const;
int CodecTargetBitrate(WebRtc_UWord32* bitrate) const;
// Loss protection.
WebRtc_Word32 UpdateProtectionMethod();
@ -122,21 +129,14 @@ class ViEEncoder
const WebRtc_UWord32 frame_rate);
WebRtc_Word32 RegisterCodecObserver(ViEEncoderObserver* observer);
// Implements RtcpFeedback.
virtual void OnSLIReceived(const WebRtc_Word32 id,
const WebRtc_UWord8 picture_id);
virtual void OnRPSIReceived(const WebRtc_Word32 id,
const WebRtc_UWord64 picture_id);
// Implements RtcpIntraFrameObserver.
virtual void OnReceivedIntraFrameRequest(const uint32_t ssrc);
// Implements RtpVideoFeedback.
virtual void OnReceivedIntraFrameRequest(const WebRtc_Word32 id,
const FrameType type,
const WebRtc_UWord8 stream_idx);
virtual void OnReceivedSLI(const uint32_t ssrc,
const uint8_t picture_id);
virtual void OnNetworkChanged(const WebRtc_Word32 id,
const WebRtc_UWord32 bitrate_bps,
const WebRtc_UWord8 fraction_lost,
const WebRtc_UWord16 round_trip_time_ms);
virtual void OnReceivedRPSI(const uint32_t ssrc,
const uint64_t picture_id);
// Effect filter.
WebRtc_Word32 RegisterEffectFilter(ViEEffectFilter* effect_filter);
@ -144,6 +144,12 @@ class ViEEncoder
// Recording.
ViEFileRecorder& GetOutgoingFileRecorder();
protected:
// Called by BitrateObserver.
void OnNetworkChanged(const uint32_t bitrate_bps,
const uint8_t fraction_lost,
const uint32_t round_trip_time_ms);
private:
WebRtc_Word32 engine_id_;
const int channel_id_;
@ -154,7 +160,9 @@ class ViEEncoder
RtpRtcp& default_rtp_rtcp_;
scoped_ptr<CriticalSectionWrapper> callback_cs_;
scoped_ptr<CriticalSectionWrapper> data_cs_;
VideoCodec send_codec_;
scoped_ptr<BitrateObserver> bitrate_observer_;
BitrateController* bitrate_controller_;
bool paused_;
WebRtc_Word64 time_last_intra_request_ms_;

View File

@ -101,38 +101,9 @@ void VieRemb::RemoveRembSender(RtpRtcp* rtp_rtcp) {
}
}
void VieRemb::AddSendChannel(RtpRtcp* rtp_rtcp) {
assert(rtp_rtcp);
WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
"VieRemb::AddSendChannel(%p)", rtp_rtcp);
CriticalSectionScoped cs(list_crit_.get());
// Verify this module hasn't been added earlier.
if (std::find(send_modules_.begin(), send_modules_.end(), rtp_rtcp) !=
send_modules_.end())
return;
send_modules_.push_back(rtp_rtcp);
}
void VieRemb::RemoveSendChannel(RtpRtcp* rtp_rtcp) {
assert(rtp_rtcp);
WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
"VieRemb::RemoveSendChannel(%p)", rtp_rtcp);
CriticalSectionScoped cs(list_crit_.get());
for (RtpModules::iterator it = send_modules_.begin();
it != send_modules_.end(); ++it) {
if ((*it) == rtp_rtcp) {
send_modules_.erase(it);
return;
}
}
}
bool VieRemb::InUse() const {
CriticalSectionScoped cs(list_crit_.get());
if(receive_modules_.empty() && send_modules_.empty() && rtcp_sender_.empty())
if(receive_modules_.empty() && rtcp_sender_.empty())
return false;
else
return true;
@ -161,21 +132,6 @@ void VieRemb::OnReceiveBitrateChanged(unsigned int ssrc, unsigned int bitrate) {
TickTime::MillisecondTimestamp(), bitrate);
}
void VieRemb::OnReceivedRemb(unsigned int bitrate) {
WEBRTC_TRACE(kTraceStream, kTraceVideo, -1,
"VieRemb::OnReceivedRemb(bitrate: %u)", bitrate);
// TODO(mflodman) Should be extended to allow different split of bitrate.
// TODO(mflodman) Do we want to call |SetMaximumBitrateEstimate| from
// |Process| instead?
// Split the bitrate estimate between all sending channels.
CriticalSectionScoped cs(list_crit_.get());
for (RtpModules::iterator it = send_modules_.begin();
it != send_modules_.end(); ++it) {
(*it)->SetMaximumBitrateEstimate(bitrate / send_modules_.size());
}
}
WebRtc_Word32 VieRemb::ChangeUniqueId(const WebRtc_Word32 id) {
return 0;
}

View File

@ -48,13 +48,6 @@ class VieRemb : public RtpRemoteBitrateObserver, public Module {
// Removes a REMB RTCP sender.
void RemoveRembSender(RtpRtcp* rtp_rtcp);
// Called to add a send channel encoding and sending data, affected by
// received REMB packets.
void AddSendChannel(RtpRtcp* rtp_rtcp);
// Removes the specified channel from receiving REMB packet estimates.
void RemoveSendChannel(RtpRtcp* rtp_rtcp);
// Returns true if the instance is in use, false otherwise.
bool InUse() const;
@ -65,10 +58,6 @@ class VieRemb : public RtpRemoteBitrateObserver, public Module {
// Implements RtpReceiveBitrateUpdate.
virtual void OnReceiveBitrateChanged(unsigned int ssrc, unsigned int bitrate);
// Called for every new receive REMB packet and distributes the estmate
// between all sending modules.
virtual void OnReceivedRemb(unsigned int bitrate);
// Implements Module.
virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
virtual WebRtc_Word32 TimeUntilNextProcess();
@ -89,9 +78,6 @@ class VieRemb : public RtpRemoteBitrateObserver, public Module {
// All RtpRtcp modules to include in the REMB packet.
RtpModules receive_modules_;
// All modules encoding and sending data.
RtpModules send_modules_;
// All modules that can send REMB RTCP.
RtpModules rtcp_sender_;

View File

@ -63,7 +63,6 @@ TEST_F(ViERembTest, OneModuleTestForSendingRemb)
{
MockRtpRtcp rtp;
vie_remb_->AddReceiveChannel(&rtp);
vie_remb_->AddSendChannel(&rtp);
vie_remb_->AddRembSender(&rtp);
const unsigned int bitrate_estimate = 456;
@ -86,7 +85,6 @@ TEST_F(ViERembTest, OneModuleTestForSendingRemb)
vie_remb_->Process();
vie_remb_->RemoveReceiveChannel(&rtp);
vie_remb_->RemoveSendChannel(&rtp);
vie_remb_->RemoveRembSender(&rtp);
}
@ -94,7 +92,6 @@ TEST_F(ViERembTest, LowerEstimateToSendRemb)
{
MockRtpRtcp rtp;
vie_remb_->AddReceiveChannel(&rtp);
vie_remb_->AddSendChannel(&rtp);
vie_remb_->AddRembSender(&rtp);
unsigned int bitrate_estimate = 456;
@ -118,7 +115,6 @@ TEST_F(ViERembTest, VerifyCombinedBitrateEstimate)
MockRtpRtcp rtp_0;
MockRtpRtcp rtp_1;
vie_remb_->AddReceiveChannel(&rtp_0);
vie_remb_->AddSendChannel(&rtp_0);
vie_remb_->AddRembSender(&rtp_0);
vie_remb_->AddReceiveChannel(&rtp_1);
@ -143,47 +139,15 @@ TEST_F(ViERembTest, VerifyCombinedBitrateEstimate)
vie_remb_->Process();
vie_remb_->RemoveReceiveChannel(&rtp_0);
vie_remb_->RemoveSendChannel(&rtp_0);
vie_remb_->RemoveRembSender(&rtp_0);
vie_remb_->RemoveReceiveChannel(&rtp_1);
}
// Add two senders, and insert a received REMB estimate. Both sending channels
// should get half of the received value.
TEST_F(ViERembTest, IncomingRemb)
{
MockRtpRtcp rtp_0;
MockRtpRtcp rtp_1;
vie_remb_->AddSendChannel(&rtp_0);
vie_remb_->AddSendChannel(&rtp_1);
const unsigned int bitrate_estimate = 1200;
// Fake received REMB and verify both modules get half of the bitrate.
EXPECT_CALL(rtp_0, SetMaximumBitrateEstimate(bitrate_estimate/2))
.Times(1);
EXPECT_CALL(rtp_1, SetMaximumBitrateEstimate(bitrate_estimate/2))
.Times(1);
vie_remb_->OnReceivedRemb(bitrate_estimate);
// Remove one of the modules and verify the other module get the entire
// bitrate.
vie_remb_->RemoveSendChannel(&rtp_0);
EXPECT_CALL(rtp_0, SetMaximumBitrateEstimate(_))
.Times(0);
EXPECT_CALL(rtp_1, SetMaximumBitrateEstimate(bitrate_estimate))
.Times(1);
vie_remb_->OnReceivedRemb(bitrate_estimate);
vie_remb_->RemoveSendChannel(&rtp_1);
}
TEST_F(ViERembTest, NoRembForIncreasedBitrate)
{
MockRtpRtcp rtp_0;
MockRtpRtcp rtp_1;
vie_remb_->AddReceiveChannel(&rtp_0);
vie_remb_->AddSendChannel(&rtp_0);
vie_remb_->AddRembSender(&rtp_0);
vie_remb_->AddReceiveChannel(&rtp_1);
@ -222,7 +186,6 @@ TEST_F(ViERembTest, NoRembForIncreasedBitrate)
vie_remb_->Process();
vie_remb_->RemoveReceiveChannel(&rtp_1);
vie_remb_->RemoveReceiveChannel(&rtp_0);
vie_remb_->RemoveSendChannel(&rtp_0);
vie_remb_->RemoveRembSender(&rtp_0);
}
@ -231,7 +194,6 @@ TEST_F(ViERembTest, ChangeSendRtpModule)
MockRtpRtcp rtp_0;
MockRtpRtcp rtp_1;
vie_remb_->AddReceiveChannel(&rtp_0);
vie_remb_->AddSendChannel(&rtp_0);
vie_remb_->AddRembSender(&rtp_0);
vie_remb_->AddReceiveChannel(&rtp_1);
@ -258,9 +220,7 @@ TEST_F(ViERembTest, ChangeSendRtpModule)
// Remove the sending module, add it again -> should get remb on the second
// module.
vie_remb_->RemoveSendChannel(&rtp_0);
vie_remb_->RemoveRembSender(&rtp_0);
vie_remb_->AddSendChannel(&rtp_1);
vie_remb_->AddRembSender(&rtp_1);
vie_remb_->OnReceiveBitrateChanged(ssrc[0], bitrate_estimate[0]);
@ -273,7 +233,6 @@ TEST_F(ViERembTest, ChangeSendRtpModule)
vie_remb_->RemoveReceiveChannel(&rtp_0);
vie_remb_->RemoveReceiveChannel(&rtp_1);
vie_remb_->RemoveSendChannel(&rtp_1);
}
TEST_F(ViERembTest, OnlyOneRembForDoubleProcess)
@ -283,7 +242,6 @@ TEST_F(ViERembTest, OnlyOneRembForDoubleProcess)
unsigned int ssrc[] = { 1234 };
vie_remb_->AddReceiveChannel(&rtp);
vie_remb_->AddSendChannel(&rtp);
vie_remb_->AddRembSender(&rtp);
vie_remb_->OnReceiveBitrateChanged(ssrc[0], bitrate_estimate);
EXPECT_CALL(rtp, RemoteSSRC())
@ -301,7 +259,6 @@ TEST_F(ViERembTest, OnlyOneRembForDoubleProcess)
.Times(0);
vie_remb_->Process();
vie_remb_->RemoveReceiveChannel(&rtp);
vie_remb_->RemoveSendChannel(&rtp);
vie_remb_->RemoveRembSender(&rtp);
}
@ -312,7 +269,6 @@ TEST_F(ViERembTest, NoOnReceivedBitrateChangedCall)
.WillRepeatedly(Return(1234));
vie_remb_->AddReceiveChannel(&rtp);
vie_remb_->AddSendChannel(&rtp);
vie_remb_->AddRembSender(&rtp);
// TODO(mflodman) Add fake clock.
TestSleep(1010);
@ -322,7 +278,6 @@ TEST_F(ViERembTest, NoOnReceivedBitrateChangedCall)
vie_remb_->Process();
vie_remb_->RemoveReceiveChannel(&rtp);
vie_remb_->RemoveSendChannel(&rtp);
vie_remb_->RemoveRembSender(&rtp);
}

View File

@ -1258,13 +1258,7 @@ Channel::~Channel()
"~Channel() failed to de-register incoming RTP"
" callback (RTP module)");
}
if (_rtpRtcpModule.RegisterIncomingRTCPCallback(NULL) == -1)
{
WEBRTC_TRACE(kTraceWarning, kTraceVoice,
VoEId(_instanceId,_channelId),
"~Channel() failed to de-register incoming RTCP "
"callback (RTP module)");
}
_rtpRtcpModule.RegisterRtcpObservers(NULL, NULL, NULL);
if (_rtpRtcpModule.RegisterAudioCallback(NULL) == -1)
{
WEBRTC_TRACE(kTraceWarning, kTraceVoice,
@ -1413,11 +1407,11 @@ Channel::Init()
}
// --- Register all permanent callbacks
_rtpRtcpModule.RegisterRtcpObservers(NULL, NULL, this);
const bool fail =
(_rtpRtcpModule.RegisterIncomingDataCallback(this) == -1) ||
(_rtpRtcpModule.RegisterIncomingRTPCallback(this) == -1) ||
(_rtpRtcpModule.RegisterIncomingRTCPCallback(this) == -1) ||
(_rtpRtcpModule.RegisterSendTransport(this) == -1) ||
(_rtpRtcpModule.RegisterAudioCallback(this) == -1) ||
(_audioCodingModule.RegisterTransportCallback(this) == -1) ||