Makes render time > decode time in VideoFrameMatcher.
Without this, we can end up with negative capture-to-render delays if the jitter buffer sets the render time to an earlier time. Bug: webrtc:11017 Change-Id: I590509136f630d025cde6e5e13d4a3ee620267ae Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/156081 Commit-Queue: Sebastian Jansson <srte@webrtc.org> Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org> Cr-Commit-Position: refs/heads/master@{#29409}
This commit is contained in:
parent
46b0140172
commit
f77b939d44
@ -112,7 +112,9 @@ void VideoFrameMatcher::HandleMatch(VideoFrameMatcher::CapturedFrame captured,
|
||||
frame_pair.decode_id = captured.best_decode->id;
|
||||
frame_pair.decoded = captured.best_decode->frame;
|
||||
frame_pair.decoded_time = captured.best_decode->decoded_time;
|
||||
frame_pair.render_time = captured.best_decode->render_time;
|
||||
// We can't render frames before they have been decoded.
|
||||
frame_pair.render_time = std::max(captured.best_decode->render_time,
|
||||
captured.best_decode->decoded_time);
|
||||
frame_pair.repeated = captured.best_decode->repeat_count++;
|
||||
}
|
||||
for (auto& handler : frame_pair_handlers_)
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user