dcsctp: Optimize SACK generation

Before this CL, a SACK was generated from scratch based on information
about each received fragment, to generate correct gap-ack-blocks.

When there was a lot of data in the data tracker (due to packet loss),
this took considerate time, as generating a SACK was O(N), where N is
the amount of fragments in the data tracker.

By instead having precomputed gap-ack-blocks that are continuously
updated, generating a SACK is much faster and the memory usage goes down
a bit as well.

Bug: webrtc:12799
Change-Id: I924752c1d6d31f06d27246e10b595e9ccb19320f
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/220763
Commit-Queue: Victor Boivie <boivie@webrtc.org>
Reviewed-by: Harald Alvestrand <hta@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#34171}
This commit is contained in:
Victor Boivie 2021-05-29 21:20:12 +02:00 committed by WebRTC LUCI CQ
parent 0377bab21b
commit 27d2be3583
4 changed files with 396 additions and 52 deletions

View File

@ -24,6 +24,7 @@ rtc_library("data_tracker") {
"data_tracker.h",
]
absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]

View File

@ -9,6 +9,7 @@
*/
#include "net/dcsctp/rx/data_tracker.h"
#include <algorithm>
#include <cstdint>
#include <iterator>
#include <set>
@ -16,6 +17,7 @@
#include <utility>
#include <vector>
#include "absl/algorithm/container.h"
#include "absl/strings/string_view.h"
#include "absl/types/optional.h"
#include "net/dcsctp/common/sequence_numbers.h"
@ -29,6 +31,83 @@ namespace dcsctp {
constexpr size_t DataTracker::kMaxDuplicateTsnReported;
constexpr size_t DataTracker::kMaxGapAckBlocksReported;
bool DataTracker::AdditionalTsnBlocks::Add(UnwrappedTSN tsn) {
// Find any block to expand. It will look for any block that includes (also
// when expanded) the provided `tsn`. It will return the block that is greater
// than, or equal to `tsn`.
auto it = absl::c_lower_bound(
blocks_, tsn, [&](const TsnRange& elem, const UnwrappedTSN& t) {
return elem.last.next_value() < t;
});
if (it == blocks_.end()) {
// No matching block found. There is no greater than, or equal block - which
// means that this TSN is greater than any block. It can then be inserted at
// the end.
blocks_.emplace_back(tsn, tsn);
return true;
}
if (tsn >= it->first && tsn <= it->last) {
// It's already in this block.
return false;
}
if (it->last.next_value() == tsn) {
// This block can be expanded to the right, or merged with the next.
auto next_it = it + 1;
if (next_it != blocks_.end() && tsn.next_value() == next_it->first) {
// Expanding it would make it adjacent to next block - merge those.
it->last = next_it->last;
blocks_.erase(next_it);
return true;
}
// Expand to the right
it->last = tsn;
return true;
}
if (it->first == tsn.next_value()) {
// This block can be expanded to the left. Merging to the left would've been
// covered by the above "merge to the right". Both blocks (expand a
// right-most block to the left and expand a left-most block to the right)
// would match, but the left-most would be returned by std::lower_bound.
RTC_DCHECK(it == blocks_.begin() || (it - 1)->last.next_value() != tsn);
// Expand to the left.
it->first = tsn;
return true;
}
// Need to create a new block in the middle.
blocks_.emplace(it, tsn, tsn);
return true;
}
void DataTracker::AdditionalTsnBlocks::EraseTo(UnwrappedTSN tsn) {
// Find the block that is greater than or equals `tsn`.
auto it = absl::c_lower_bound(
blocks_, tsn, [&](const TsnRange& elem, const UnwrappedTSN& t) {
return elem.last < t;
});
// The block that is found is greater or equal (or possibly ::end, when no
// block is greater or equal). All blocks before this block can be safely
// removed. the TSN might be within this block, so possibly truncate it.
bool tsn_is_within_block = it != blocks_.end() && tsn >= it->first;
blocks_.erase(blocks_.begin(), it);
if (tsn_is_within_block) {
blocks_.front().first = tsn.next_value();
}
}
void DataTracker::AdditionalTsnBlocks::PopFront() {
RTC_DCHECK(!blocks_.empty());
blocks_.erase(blocks_.begin());
}
bool DataTracker::IsTSNValid(TSN tsn) const {
UnwrappedTSN unwrapped_tsn = tsn_unwrapper_.PeekUnwrap(tsn);
@ -69,14 +148,14 @@ void DataTracker::Observe(TSN tsn,
last_cumulative_acked_tsn_ = unwrapped_tsn;
// The cumulative acked tsn may be moved even further, if a gap was
// filled.
while (!additional_tsns_.empty() &&
*additional_tsns_.begin() ==
last_cumulative_acked_tsn_.next_value()) {
last_cumulative_acked_tsn_.Increment();
additional_tsns_.erase(additional_tsns_.begin());
if (!additional_tsn_blocks_.empty() &&
additional_tsn_blocks_.front().first ==
last_cumulative_acked_tsn_.next_value()) {
last_cumulative_acked_tsn_ = additional_tsn_blocks_.front().last;
additional_tsn_blocks_.PopFront();
}
} else {
bool inserted = additional_tsns_.insert(unwrapped_tsn).second;
bool inserted = additional_tsn_blocks_.Add(unwrapped_tsn);
if (!inserted) {
// Already seen before.
if (duplicate_tsns_.size() < kMaxDuplicateTsnReported) {
@ -98,7 +177,7 @@ void DataTracker::Observe(TSN tsn,
// the received DATA chunk sequence, it SHOULD send a SACK with Gap Ack
// Blocks immediately. The data receiver continues sending a SACK after
// receipt of each SCTP packet that doesn't fill the gap."
if (!additional_tsns_.empty()) {
if (!additional_tsn_blocks_.empty()) {
UpdateAckState(AckState::kImmediate, "packet loss");
}
@ -162,24 +241,20 @@ void DataTracker::HandleForwardTsn(TSN new_cumulative_ack) {
// `last_cumulative_acked_tsn_`, and if there have been prior "gaps" that are
// now overlapping with the new value, remove them.
last_cumulative_acked_tsn_ = unwrapped_tsn;
int erased_additional_tsns = std::distance(
additional_tsns_.begin(), additional_tsns_.upper_bound(unwrapped_tsn));
additional_tsns_.erase(additional_tsns_.begin(),
additional_tsns_.upper_bound(unwrapped_tsn));
additional_tsn_blocks_.EraseTo(unwrapped_tsn);
// See if the `last_cumulative_acked_tsn_` can be moved even further:
while (!additional_tsns_.empty() &&
*additional_tsns_.begin() == last_cumulative_acked_tsn_.next_value()) {
last_cumulative_acked_tsn_.Increment();
additional_tsns_.erase(additional_tsns_.begin());
++erased_additional_tsns;
if (!additional_tsn_blocks_.empty() &&
additional_tsn_blocks_.front().first ==
last_cumulative_acked_tsn_.next_value()) {
last_cumulative_acked_tsn_ = additional_tsn_blocks_.front().last;
additional_tsn_blocks_.PopFront();
}
RTC_DLOG(LS_VERBOSE) << log_prefix_ << "FORWARD_TSN, cum_ack_tsn="
<< *prev_last_cum_ack_tsn.Wrap() << "->"
<< *new_cumulative_ack << "->"
<< *last_cumulative_acked_tsn_.Wrap() << ", removed "
<< erased_additional_tsns << " additional TSNs";
<< *last_cumulative_acked_tsn_.Wrap();
// https://tools.ietf.org/html/rfc3758#section-3.6
// "Any time a FORWARD TSN chunk arrives, for the purposes of sending a
@ -209,41 +284,18 @@ SackChunk DataTracker::CreateSelectiveAck(size_t a_rwnd) {
}
std::vector<SackChunk::GapAckBlock> DataTracker::CreateGapAckBlocks() const {
// This method will calculate the gaps between blocks of contiguous values in
// `additional_tsns_`, in the same format as the SACK chunk expects it;
// offsets from the "cumulative ack TSN value".
const auto& blocks = additional_tsn_blocks_.blocks();
std::vector<SackChunk::GapAckBlock> gap_ack_blocks;
absl::optional<UnwrappedTSN> first_tsn_in_block = absl::nullopt;
absl::optional<UnwrappedTSN> last_tsn_in_block = absl::nullopt;
auto flush = [&]() {
if (first_tsn_in_block.has_value()) {
if (gap_ack_blocks.size() < kMaxGapAckBlocksReported) {
auto start_diff = UnwrappedTSN::Difference(*first_tsn_in_block,
last_cumulative_acked_tsn_);
auto end_diff = UnwrappedTSN::Difference(*last_tsn_in_block,
last_cumulative_acked_tsn_);
gap_ack_blocks.emplace_back(static_cast<uint16_t>(start_diff),
static_cast<uint16_t>(end_diff));
}
first_tsn_in_block = absl::nullopt;
last_tsn_in_block = absl::nullopt;
}
};
for (UnwrappedTSN tsn : additional_tsns_) {
if (last_tsn_in_block.has_value() &&
last_tsn_in_block->next_value() == tsn) {
// Continuing the same block.
last_tsn_in_block = tsn;
} else {
// New block, or a gap from the old block's last value.
flush();
first_tsn_in_block = tsn;
last_tsn_in_block = tsn;
}
gap_ack_blocks.reserve(std::min(blocks.size(), kMaxGapAckBlocksReported));
for (size_t i = 0; i < blocks.size() && i < kMaxGapAckBlocksReported; ++i) {
auto start_diff =
UnwrappedTSN::Difference(blocks[i].first, last_cumulative_acked_tsn_);
auto end_diff =
UnwrappedTSN::Difference(blocks[i].last, last_cumulative_acked_tsn_);
gap_ack_blocks.emplace_back(static_cast<uint16_t>(start_diff),
static_cast<uint16_t>(end_diff));
}
flush();
return gap_ack_blocks;
}

View File

@ -16,6 +16,7 @@
#include <cstdint>
#include <set>
#include <string>
#include <utility>
#include <vector>
#include "absl/strings/string_view.h"
@ -116,6 +117,49 @@ class DataTracker {
// Send a SACK immediately after handling this packet.
kImmediate,
};
// Represents ranges of TSNs that have been received that are not directly
// following the last cumulative acked TSN. This information is returned to
// the sender in the "gap ack blocks" in the SACK chunk. The blocks are always
// non-overlapping and non-adjacent.
class AdditionalTsnBlocks {
public:
// Represents an inclusive range of received TSNs, i.e. [first, last].
struct TsnRange {
TsnRange(UnwrappedTSN first, UnwrappedTSN last)
: first(first), last(last) {}
UnwrappedTSN first;
UnwrappedTSN last;
};
// Adds a TSN to the set. This will try to expand any existing block and
// might merge blocks to ensure that all blocks are non-adjacent. If a
// current block can't be expanded, a new block is created.
//
// The return value indicates if `tsn` was added. If false is returned, the
// `tsn` was already represented in one of the blocks.
bool Add(UnwrappedTSN tsn);
// Erases all TSNs up to, and including `tsn`. This will remove all blocks
// that are completely below `tsn` and may truncate a block where `tsn` is
// within that block. In that case, the frontmost block's start TSN will be
// the next following tsn after `tsn`.
void EraseTo(UnwrappedTSN tsn);
// Removes the first block. Must not be called on an empty set.
void PopFront();
const std::vector<TsnRange>& blocks() const { return blocks_; }
bool empty() const { return blocks_.empty(); }
const TsnRange& front() const { return blocks_.front(); }
private:
// A sorted vector of non-overlapping and non-adjacent blocks.
std::vector<TsnRange> blocks_;
};
std::vector<SackChunk::GapAckBlock> CreateGapAckBlocks() const;
void UpdateAckState(AckState new_state, absl::string_view reason);
static absl::string_view ToString(AckState ack_state);
@ -130,7 +174,7 @@ class DataTracker {
// All TSNs up until (and including) this value have been seen.
UnwrappedTSN last_cumulative_acked_tsn_;
// Received TSNs that are not directly following `last_cumulative_acked_tsn_`.
std::set<UnwrappedTSN> additional_tsns_;
AdditionalTsnBlocks additional_tsn_blocks_;
std::set<TSN> duplicate_tsns_;
};
} // namespace dcsctp

View File

@ -385,5 +385,252 @@ TEST_F(DataTrackerTest, SendsSackOnDuplicateDataChunks) {
EXPECT_FALSE(timer_->is_running());
}
TEST_F(DataTrackerTest, GapAckBlockAddSingleBlock) {
Observer({12});
SackChunk sack = buf_.CreateSelectiveAck(kArwnd);
EXPECT_EQ(sack.cumulative_tsn_ack(), TSN(10));
EXPECT_THAT(sack.gap_ack_blocks(), ElementsAre(SackChunk::GapAckBlock(2, 2)));
}
TEST_F(DataTrackerTest, GapAckBlockAddsAnother) {
Observer({12});
Observer({14});
SackChunk sack = buf_.CreateSelectiveAck(kArwnd);
EXPECT_EQ(sack.cumulative_tsn_ack(), TSN(10));
EXPECT_THAT(sack.gap_ack_blocks(), ElementsAre(SackChunk::GapAckBlock(2, 2),
SackChunk::GapAckBlock(4, 4)));
}
TEST_F(DataTrackerTest, GapAckBlockAddsDuplicate) {
Observer({12});
Observer({12});
SackChunk sack = buf_.CreateSelectiveAck(kArwnd);
EXPECT_EQ(sack.cumulative_tsn_ack(), TSN(10));
EXPECT_THAT(sack.gap_ack_blocks(), ElementsAre(SackChunk::GapAckBlock(2, 2)));
EXPECT_THAT(sack.duplicate_tsns(), ElementsAre(TSN(12)));
}
TEST_F(DataTrackerTest, GapAckBlockExpandsToRight) {
Observer({12});
Observer({13});
SackChunk sack = buf_.CreateSelectiveAck(kArwnd);
EXPECT_EQ(sack.cumulative_tsn_ack(), TSN(10));
EXPECT_THAT(sack.gap_ack_blocks(), ElementsAre(SackChunk::GapAckBlock(2, 3)));
}
TEST_F(DataTrackerTest, GapAckBlockExpandsToRightWithOther) {
Observer({12});
Observer({20});
Observer({30});
Observer({21});
SackChunk sack = buf_.CreateSelectiveAck(kArwnd);
EXPECT_EQ(sack.cumulative_tsn_ack(), TSN(10));
EXPECT_THAT(sack.gap_ack_blocks(),
ElementsAre(SackChunk::GapAckBlock(2, 2), //
SackChunk::GapAckBlock(10, 11), //
SackChunk::GapAckBlock(20, 20)));
}
TEST_F(DataTrackerTest, GapAckBlockExpandsToLeft) {
Observer({13});
Observer({12});
SackChunk sack = buf_.CreateSelectiveAck(kArwnd);
EXPECT_EQ(sack.cumulative_tsn_ack(), TSN(10));
EXPECT_THAT(sack.gap_ack_blocks(), ElementsAre(SackChunk::GapAckBlock(2, 3)));
}
TEST_F(DataTrackerTest, GapAckBlockExpandsToLeftWithOther) {
Observer({12});
Observer({21});
Observer({30});
Observer({20});
SackChunk sack = buf_.CreateSelectiveAck(kArwnd);
EXPECT_EQ(sack.cumulative_tsn_ack(), TSN(10));
EXPECT_THAT(sack.gap_ack_blocks(),
ElementsAre(SackChunk::GapAckBlock(2, 2), //
SackChunk::GapAckBlock(10, 11), //
SackChunk::GapAckBlock(20, 20)));
}
TEST_F(DataTrackerTest, GapAckBlockExpandsToLRightAndMerges) {
Observer({12});
Observer({20});
Observer({22});
Observer({30});
Observer({21});
SackChunk sack = buf_.CreateSelectiveAck(kArwnd);
EXPECT_EQ(sack.cumulative_tsn_ack(), TSN(10));
EXPECT_THAT(sack.gap_ack_blocks(),
ElementsAre(SackChunk::GapAckBlock(2, 2), //
SackChunk::GapAckBlock(10, 12), //
SackChunk::GapAckBlock(20, 20)));
}
TEST_F(DataTrackerTest, GapAckBlockMergesManyBlocksIntoOne) {
Observer({22});
EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
ElementsAre(SackChunk::GapAckBlock(12, 12)));
Observer({30});
EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
ElementsAre(SackChunk::GapAckBlock(12, 12), //
SackChunk::GapAckBlock(20, 20)));
Observer({24});
EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
ElementsAre(SackChunk::GapAckBlock(12, 12), //
SackChunk::GapAckBlock(14, 14), //
SackChunk::GapAckBlock(20, 20)));
Observer({28});
EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
ElementsAre(SackChunk::GapAckBlock(12, 12), //
SackChunk::GapAckBlock(14, 14), //
SackChunk::GapAckBlock(18, 18), //
SackChunk::GapAckBlock(20, 20)));
Observer({26});
EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
ElementsAre(SackChunk::GapAckBlock(12, 12), //
SackChunk::GapAckBlock(14, 14), //
SackChunk::GapAckBlock(16, 16), //
SackChunk::GapAckBlock(18, 18), //
SackChunk::GapAckBlock(20, 20)));
Observer({29});
EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
ElementsAre(SackChunk::GapAckBlock(12, 12), //
SackChunk::GapAckBlock(14, 14), //
SackChunk::GapAckBlock(16, 16), //
SackChunk::GapAckBlock(18, 20)));
Observer({23});
EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
ElementsAre(SackChunk::GapAckBlock(12, 14), //
SackChunk::GapAckBlock(16, 16), //
SackChunk::GapAckBlock(18, 20)));
Observer({27});
EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
ElementsAre(SackChunk::GapAckBlock(12, 14), //
SackChunk::GapAckBlock(16, 20)));
Observer({25});
EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
ElementsAre(SackChunk::GapAckBlock(12, 20)));
Observer({20});
EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
ElementsAre(SackChunk::GapAckBlock(10, 10), //
SackChunk::GapAckBlock(12, 20)));
Observer({32});
EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
ElementsAre(SackChunk::GapAckBlock(10, 10), //
SackChunk::GapAckBlock(12, 20), //
SackChunk::GapAckBlock(22, 22)));
Observer({21});
EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
ElementsAre(SackChunk::GapAckBlock(10, 20), //
SackChunk::GapAckBlock(22, 22)));
Observer({31});
EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
ElementsAre(SackChunk::GapAckBlock(10, 22)));
}
TEST_F(DataTrackerTest, GapAckBlockRemoveBeforeCumAckTsn) {
Observer({12, 13, 14, 20, 21, 22, 30, 31});
buf_.HandleForwardTsn(TSN(8));
EXPECT_EQ(buf_.CreateSelectiveAck(kArwnd).cumulative_tsn_ack(), TSN(10));
EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
ElementsAre(SackChunk::GapAckBlock(2, 4), //
SackChunk::GapAckBlock(10, 12),
SackChunk::GapAckBlock(20, 21)));
}
TEST_F(DataTrackerTest, GapAckBlockRemoveBeforeFirstBlock) {
Observer({12, 13, 14, 20, 21, 22, 30, 31});
buf_.HandleForwardTsn(TSN(11));
EXPECT_EQ(buf_.CreateSelectiveAck(kArwnd).cumulative_tsn_ack(), TSN(14));
EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
ElementsAre(SackChunk::GapAckBlock(6, 8), //
SackChunk::GapAckBlock(16, 17)));
}
TEST_F(DataTrackerTest, GapAckBlockRemoveAtBeginningOfFirstBlock) {
Observer({12, 13, 14, 20, 21, 22, 30, 31});
buf_.HandleForwardTsn(TSN(12));
EXPECT_EQ(buf_.CreateSelectiveAck(kArwnd).cumulative_tsn_ack(), TSN(14));
EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
ElementsAre(SackChunk::GapAckBlock(6, 8), //
SackChunk::GapAckBlock(16, 17)));
}
TEST_F(DataTrackerTest, GapAckBlockRemoveAtMiddleOfFirstBlock) {
Observer({12, 13, 14, 20, 21, 22, 30, 31});
buf_.HandleForwardTsn(TSN(13));
EXPECT_EQ(buf_.CreateSelectiveAck(kArwnd).cumulative_tsn_ack(), TSN(14));
EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
ElementsAre(SackChunk::GapAckBlock(6, 8), //
SackChunk::GapAckBlock(16, 17)));
}
TEST_F(DataTrackerTest, GapAckBlockRemoveAtEndOfFirstBlock) {
Observer({12, 13, 14, 20, 21, 22, 30, 31});
buf_.HandleForwardTsn(TSN(14));
EXPECT_EQ(buf_.CreateSelectiveAck(kArwnd).cumulative_tsn_ack(), TSN(14));
EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
ElementsAre(SackChunk::GapAckBlock(6, 8), //
SackChunk::GapAckBlock(16, 17)));
}
TEST_F(DataTrackerTest, GapAckBlockRemoveRightAfterFirstBlock) {
Observer({12, 13, 14, 20, 21, 22, 30, 31});
buf_.HandleForwardTsn(TSN(18));
EXPECT_EQ(buf_.CreateSelectiveAck(kArwnd).cumulative_tsn_ack(), TSN(18));
EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
ElementsAre(SackChunk::GapAckBlock(2, 4), //
SackChunk::GapAckBlock(12, 13)));
}
TEST_F(DataTrackerTest, GapAckBlockRemoveRightBeforeSecondBlock) {
Observer({12, 13, 14, 20, 21, 22, 30, 31});
buf_.HandleForwardTsn(TSN(19));
EXPECT_EQ(buf_.CreateSelectiveAck(kArwnd).cumulative_tsn_ack(), TSN(22));
EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
ElementsAre(SackChunk::GapAckBlock(8, 9)));
}
TEST_F(DataTrackerTest, GapAckBlockRemoveRightAtStartOfSecondBlock) {
Observer({12, 13, 14, 20, 21, 22, 30, 31});
buf_.HandleForwardTsn(TSN(20));
EXPECT_EQ(buf_.CreateSelectiveAck(kArwnd).cumulative_tsn_ack(), TSN(22));
EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
ElementsAre(SackChunk::GapAckBlock(8, 9)));
}
TEST_F(DataTrackerTest, GapAckBlockRemoveRightAtMiddleOfSecondBlock) {
Observer({12, 13, 14, 20, 21, 22, 30, 31});
buf_.HandleForwardTsn(TSN(21));
EXPECT_EQ(buf_.CreateSelectiveAck(kArwnd).cumulative_tsn_ack(), TSN(22));
EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
ElementsAre(SackChunk::GapAckBlock(8, 9)));
}
TEST_F(DataTrackerTest, GapAckBlockRemoveRightAtEndOfSecondBlock) {
Observer({12, 13, 14, 20, 21, 22, 30, 31});
buf_.HandleForwardTsn(TSN(22));
EXPECT_EQ(buf_.CreateSelectiveAck(kArwnd).cumulative_tsn_ack(), TSN(22));
EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
ElementsAre(SackChunk::GapAckBlock(8, 9)));
}
TEST_F(DataTrackerTest, GapAckBlockRemoveeFarAfterAllBlocks) {
Observer({12, 13, 14, 20, 21, 22, 30, 31});
buf_.HandleForwardTsn(TSN(40));
EXPECT_EQ(buf_.CreateSelectiveAck(kArwnd).cumulative_tsn_ack(), TSN(40));
EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(), IsEmpty());
}
} // namespace
} // namespace dcsctp