From 4d9c07ad6ddbf9ea655eb5b09dc928e284fc5e19 Mon Sep 17 00:00:00 2001 From: "tnakamura@webrtc.org" Date: Fri, 31 May 2013 16:06:01 +0000 Subject: [PATCH] Revert 4127 "Switch frame list implementation to std::map." We want to revert r4104 for b/9216252, but because r4127 was built on top of r4104, we need to revert r4127 first. We'll un/re-revert this if we discover that r4104 is not to blame. > Switch frame list implementation to std::map. > > This reduces the complexity of insert and find (by timestamp) from linear to logarithmic, which has a big impact on large frame lists. > > BUG=1726 > TEST=trybots, vie_auto_test --automated > R=mikhal@webrtc.org > > Review URL: https://webrtc-codereview.appspot.com/1561005 TBR=stefan@webrtc.org Review URL: https://webrtc-codereview.appspot.com/1590005 git-svn-id: http://webrtc.googlecode.com/svn/trunk@4145 4adac7df-926f-26a2-2b94-8c16560cd09d --- .../video_coding/main/source/jitter_buffer.cc | 129 +++++++++++------- .../video_coding/main/source/jitter_buffer.h | 15 +- 2 files changed, 79 insertions(+), 65 deletions(-) diff --git a/webrtc/modules/video_coding/main/source/jitter_buffer.cc b/webrtc/modules/video_coding/main/source/jitter_buffer.cc index 87c3de7600..c0ea9de594 100644 --- a/webrtc/modules/video_coding/main/source/jitter_buffer.cc +++ b/webrtc/modules/video_coding/main/source/jitter_buffer.cc @@ -31,71 +31,98 @@ namespace webrtc { // Use this rtt if no value has been reported. static const uint32_t kDefaultRtt = 200; -typedef std::pair FrameListPair; +// Predicates used when searching for frames in the frame buffer list +class FrameSmallerTimestamp { + public: + explicit FrameSmallerTimestamp(uint32_t timestamp) : timestamp_(timestamp) {} + bool operator()(VCMFrameBuffer* frame) { + return IsNewerTimestamp(timestamp_, frame->TimeStamp()); + } -bool IsKeyFrame(FrameListPair pair) { - return pair.second->FrameType() == kVideoFrameKey; -} + private: + uint32_t timestamp_; +}; -bool HasNonEmptyState(FrameListPair pair) { - return pair.second->GetState() != kStateEmpty; +class FrameEqualTimestamp { + public: + explicit FrameEqualTimestamp(uint32_t timestamp) : timestamp_(timestamp) {} + bool operator()(VCMFrameBuffer* frame) { + return (timestamp_ == frame->TimeStamp()); + } + + private: + uint32_t timestamp_; +}; + +class KeyFrameCriteria { + public: + bool operator()(VCMFrameBuffer* frame) { + return frame->FrameType() == kVideoFrameKey; + } +}; + +class CompleteKeyFrameCriteria { + public: + bool operator()(VCMFrameBuffer* frame) { + return (frame->FrameType() == kVideoFrameKey && + frame->GetState() == kStateComplete); + } +}; + +bool HasNonEmptyState(VCMFrameBuffer* frame) { + return frame->GetState() != kStateEmpty; } void FrameList::InsertFrame(VCMFrameBuffer* frame) { - insert(rbegin().base(), FrameListPair(frame->TimeStamp(), frame)); + reverse_iterator rit = std::find_if( + rbegin(), rend(), FrameSmallerTimestamp(frame->TimeStamp())); + insert(rit.base(), frame); } VCMFrameBuffer* FrameList::FindFrame(uint32_t timestamp) const { - FrameList::const_iterator it = find(timestamp); + FrameList::const_iterator it = std::find_if(begin(), end(), + FrameEqualTimestamp(timestamp)); if (it == end()) return NULL; - return it->second; + return *it; } VCMFrameBuffer* FrameList::PopFrame(uint32_t timestamp) { - FrameList::iterator it = find(timestamp); + FrameList::iterator it = std::find_if(begin(), end(), + FrameEqualTimestamp(timestamp)); if (it == end()) return NULL; - VCMFrameBuffer* frame = it->second; + VCMFrameBuffer* frame = *it; erase(it); return frame; } -VCMFrameBuffer* FrameList::Front() const { - return begin()->second; -} - -VCMFrameBuffer* FrameList::Back() const { - return rbegin()->second; -} - int FrameList::RecycleFramesUntilKeyFrame(FrameList::iterator* key_frame_it) { int drop_count = 0; - FrameList::iterator it = begin(); + *key_frame_it = begin(); while (!empty()) { // Throw at least one frame. WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding, -1, "Recycling: type=%s, low seqnum=%u", - it->second->FrameType() == kVideoFrameKey ? - "key" : "delta", it->second->GetLowSeqNum()); - if (it->second->GetState() != kStateDecoding) { - it->second->SetState(kStateFree); + (**key_frame_it)->FrameType() == kVideoFrameKey ? + "key" : "delta", (**key_frame_it)->GetLowSeqNum()); + if ((**key_frame_it)->GetState() != kStateDecoding) { + (**key_frame_it)->SetState(kStateFree); } - erase(it++); + *key_frame_it = erase(*key_frame_it); ++drop_count; - if (it != end() && it->second->FrameType() == kVideoFrameKey) { - *key_frame_it = it; + if (*key_frame_it != end() && + (**key_frame_it)->FrameType() == kVideoFrameKey) { return drop_count; } } - *key_frame_it = end(); return drop_count; } int FrameList::CleanUpOldOrEmptyFrames(VCMDecodingState* decoding_state) { int drop_count = 0; while (!empty()) { - VCMFrameBuffer* oldest_frame = Front(); + VCMFrameBuffer* oldest_frame = front(); bool remove_frame = false; if (oldest_frame->GetState() == kStateEmpty && size() > 1) { // This frame is empty, try to update the last decoded state and drop it @@ -107,8 +134,8 @@ int FrameList::CleanUpOldOrEmptyFrames(VCMDecodingState* decoding_state) { if (!remove_frame) { break; } - if (oldest_frame->GetState() != kStateDecoding) { - oldest_frame->SetState(kStateFree); + if (front()->GetState() != kStateDecoding) { + front()->SetState(kStateFree); } ++drop_count; TRACE_EVENT_INSTANT1("webrtc", "JB::OldOrEmptyFrameDropped", "timestamp", @@ -223,19 +250,17 @@ void VCMJitterBuffer::CopyFrom(const VCMJitterBuffer& rhs) { } } decodable_frames_.clear(); + incomplete_frames_.clear(); int i = 0; for (FrameList::const_iterator it = rhs.decodable_frames_.begin(); it != rhs.decodable_frames_.end(); ++it, ++i) { - frame_buffers_[i] = new VCMFrameBuffer(*it->second); - decodable_frames_.insert(decodable_frames_.rbegin().base(), - FrameListPair(frame_buffers_[i]->TimeStamp(), frame_buffers_[i])); + frame_buffers_[i] = new VCMFrameBuffer(**it); + decodable_frames_.push_back(frame_buffers_[i]); } - incomplete_frames_.clear(); for (FrameList::const_iterator it = rhs.incomplete_frames_.begin(); it != rhs.incomplete_frames_.end(); ++it, ++i) { - frame_buffers_[i] = new VCMFrameBuffer(*it->second); - incomplete_frames_.insert(incomplete_frames_.rbegin().base(), - FrameListPair(frame_buffers_[i]->TimeStamp(), frame_buffers_[i])); + frame_buffers_[i] = new VCMFrameBuffer(**it); + incomplete_frames_.push_back(frame_buffers_[i]); } rhs.crit_sect_->Leave(); crit_sect_->Leave(); @@ -475,7 +500,7 @@ bool VCMJitterBuffer::NextCompleteTimestamp( crit_sect_->Leave(); return false; } - *timestamp = decodable_frames_.Front()->TimeStamp(); + *timestamp = decodable_frames_.front()->TimeStamp(); crit_sect_->Leave(); return true; } @@ -802,7 +827,7 @@ bool VCMJitterBuffer::IsContinuous(const VCMFrameBuffer& frame) const { decoding_state.CopyFrom(last_decoded_state_); for (FrameList::const_iterator it = decodable_frames_.begin(); it != decodable_frames_.end(); ++it) { - VCMFrameBuffer* decodable_frame = it->second; + VCMFrameBuffer* decodable_frame = *it; if (IsNewerTimestamp(decodable_frame->TimeStamp(), frame.TimeStamp())) { break; } @@ -825,14 +850,14 @@ void VCMJitterBuffer::FindAndInsertContinuousFrames( // 2. The end of the list was reached. for (FrameList::iterator it = incomplete_frames_.begin(); it != incomplete_frames_.end();) { - VCMFrameBuffer* frame = it->second; + VCMFrameBuffer* frame = *it; if (IsNewerTimestamp(new_frame.TimeStamp(), frame->TimeStamp())) { ++it; continue; } if (IsContinuousInState(*frame, decoding_state)) { decodable_frames_.InsertFrame(frame); - incomplete_frames_.erase(it++); + it = incomplete_frames_.erase(it); decoding_state.SetState(frame); } else if (frame->TemporalId() <= 0) { break; @@ -912,11 +937,11 @@ int VCMJitterBuffer::NonContinuousOrIncompleteDuration() { if (incomplete_frames_.empty()) { return 0; } - uint32_t start_timestamp = incomplete_frames_.Front()->TimeStamp(); + uint32_t start_timestamp = incomplete_frames_.front()->TimeStamp(); if (!decodable_frames_.empty()) { - start_timestamp = decodable_frames_.Back()->TimeStamp(); + start_timestamp = decodable_frames_.back()->TimeStamp(); } - return incomplete_frames_.Back()->TimeStamp() - start_timestamp; + return incomplete_frames_.back()->TimeStamp() - start_timestamp; } uint16_t VCMJitterBuffer::EstimatedLowSequenceNumber( @@ -975,7 +1000,7 @@ uint16_t* VCMJitterBuffer::GetNackList(uint16_t* nack_list_size, non_continuous_incomplete_duration << " > " << 90 * max_incomplete_time_ms_; FrameList::reverse_iterator rit = find_if(incomplete_frames_.rbegin(), - incomplete_frames_.rend(), IsKeyFrame); + incomplete_frames_.rend(), KeyFrameCriteria()); if (rit == incomplete_frames_.rend()) { // Request a key frame if we don't have one already. *request_key_frame = true; @@ -987,7 +1012,7 @@ uint16_t* VCMJitterBuffer::GetNackList(uint16_t* nack_list_size, // Note that the estimated low sequence number is correct for VP8 // streams because only the first packet of a key frame is marked. last_decoded_state_.Reset(); - DropPacketsFromNackList(EstimatedLowSequenceNumber(*rit->second)); + DropPacketsFromNackList(EstimatedLowSequenceNumber(**rit)); } } } @@ -1002,9 +1027,9 @@ uint16_t* VCMJitterBuffer::GetNackList(uint16_t* nack_list_size, VCMFrameBuffer* VCMJitterBuffer::NextFrame() const { if (!decodable_frames_.empty()) - return decodable_frames_.Front(); + return decodable_frames_.front(); if (!incomplete_frames_.empty()) - return incomplete_frames_.Front(); + return incomplete_frames_.front(); return NULL; } @@ -1105,8 +1130,8 @@ void VCMJitterBuffer::RenderBufferSize(uint32_t* timestamp_start, if (decodable_frames_.empty()) { return; } - *timestamp_start = decodable_frames_.Front()->TimeStamp(); - *timestamp_end = decodable_frames_.Back()->TimeStamp(); + *timestamp_start = decodable_frames_.front()->TimeStamp(); + *timestamp_end = decodable_frames_.back()->TimeStamp(); } // Set the frame state to free and remove it from the sorted @@ -1174,7 +1199,7 @@ bool VCMJitterBuffer::RecycleFramesUntilKeyFrame() { // Reset last decoded state to make sure the next frame decoded is a key // frame, and start NACKing from here. last_decoded_state_.Reset(); - DropPacketsFromNackList(EstimatedLowSequenceNumber(*key_frame_it->second)); + DropPacketsFromNackList(EstimatedLowSequenceNumber(**key_frame_it)); } else if (decodable_frames_.empty()) { last_decoded_state_.Reset(); // TODO(mikhal): No sync. missing_sequence_numbers_.clear(); diff --git a/webrtc/modules/video_coding/main/source/jitter_buffer.h b/webrtc/modules/video_coding/main/source/jitter_buffer.h index a9ccdc8ad8..16a73e2a4c 100644 --- a/webrtc/modules/video_coding/main/source/jitter_buffer.h +++ b/webrtc/modules/video_coding/main/source/jitter_buffer.h @@ -11,7 +11,7 @@ #ifndef WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_JITTER_BUFFER_H_ #define WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_JITTER_BUFFER_H_ -#include +#include #include #include @@ -47,22 +47,11 @@ struct VCMJitterSample { int64_t latest_packet_time; }; -class TimestampLessThan { - public: - bool operator() (const uint32_t& timestamp1, - const uint32_t& timestamp2) const { - return IsNewerTimestamp(timestamp2, timestamp1); - } -}; - -class FrameList : - public std::map { +class FrameList : public std::list { public: void InsertFrame(VCMFrameBuffer* frame); VCMFrameBuffer* FindFrame(uint32_t timestamp) const; VCMFrameBuffer* PopFrame(uint32_t timestamp); - VCMFrameBuffer* Front() const; - VCMFrameBuffer* Back() const; int RecycleFramesUntilKeyFrame(FrameList::iterator* key_frame_it); int CleanUpOldOrEmptyFrames(VCMDecodingState* decoding_state); };