Clean up VideoRenderFrames
It's possible to clean up VideoRenderFrames now when I420VideoFrame holds a reference counted frame buffer. BUG=1128 R=mflodman@webrtc.org, pbos@webrtc.org Review URL: https://webrtc-codereview.appspot.com/48459004 Cr-Commit-Position: refs/heads/master@{#8695} git-svn-id: http://webrtc.googlecode.com/svn/trunk@8695 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
parent
d71fc87225
commit
b73758d57a
@ -110,7 +110,7 @@ int32_t IncomingVideoStream::RenderFrame(const uint32_t stream_id,
|
||||
|
||||
// Insert frame.
|
||||
CriticalSectionScoped csB(&buffer_critsect_);
|
||||
if (render_buffers_.AddFrame(&video_frame) == 1)
|
||||
if (render_buffers_.AddFrame(video_frame) == 1)
|
||||
deliver_buffer_event_.Set();
|
||||
|
||||
return 0;
|
||||
@ -269,12 +269,9 @@ bool IncomingVideoStream::IncomingVideoStreamProcess() {
|
||||
thread_critsect_.Leave();
|
||||
return false;
|
||||
}
|
||||
|
||||
I420VideoFrame* frame_to_render = NULL;
|
||||
|
||||
// Get a new frame to render and the time for the frame after this one.
|
||||
buffer_critsect_.Enter();
|
||||
frame_to_render = render_buffers_.FrameToRender();
|
||||
I420VideoFrame frame_to_render = render_buffers_.FrameToRender();
|
||||
uint32_t wait_time = render_buffers_.TimeToNextFrameRelease();
|
||||
buffer_critsect_.Leave();
|
||||
|
||||
@ -284,7 +281,7 @@ bool IncomingVideoStream::IncomingVideoStreamProcess() {
|
||||
}
|
||||
deliver_buffer_event_.StartTimer(false, wait_time);
|
||||
|
||||
if (!frame_to_render) {
|
||||
if (frame_to_render.IsZeroSize()) {
|
||||
if (render_callback_) {
|
||||
if (last_render_time_ms_ == 0 && !start_image_.IsZeroSize()) {
|
||||
// We have not rendered anything and have a start image.
|
||||
@ -308,25 +305,24 @@ bool IncomingVideoStream::IncomingVideoStreamProcess() {
|
||||
if (external_callback_) {
|
||||
WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_,
|
||||
"%s: executing external renderer callback to deliver frame",
|
||||
__FUNCTION__, frame_to_render->render_time_ms());
|
||||
external_callback_->RenderFrame(stream_id_, *frame_to_render);
|
||||
__FUNCTION__, frame_to_render.render_time_ms());
|
||||
external_callback_->RenderFrame(stream_id_, frame_to_render);
|
||||
} else {
|
||||
if (render_callback_) {
|
||||
WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_,
|
||||
"%s: Render frame, time: ", __FUNCTION__,
|
||||
frame_to_render->render_time_ms());
|
||||
render_callback_->RenderFrame(stream_id_, *frame_to_render);
|
||||
frame_to_render.render_time_ms());
|
||||
render_callback_->RenderFrame(stream_id_, frame_to_render);
|
||||
}
|
||||
}
|
||||
|
||||
// Release critsect before calling the module user.
|
||||
thread_critsect_.Leave();
|
||||
|
||||
// We're done with this frame, delete it.
|
||||
if (frame_to_render) {
|
||||
// We're done with this frame.
|
||||
if (!frame_to_render.IsZeroSize()) {
|
||||
CriticalSectionScoped cs(&buffer_critsect_);
|
||||
last_render_time_ms_= frame_to_render->render_time_ms();
|
||||
render_buffers_.ReturnFrame(frame_to_render);
|
||||
last_render_time_ms_= frame_to_render.render_time_ms();
|
||||
}
|
||||
}
|
||||
return true;
|
||||
|
||||
@ -26,125 +26,45 @@ VideoRenderFrames::VideoRenderFrames()
|
||||
: render_delay_ms_(10) {
|
||||
}
|
||||
|
||||
VideoRenderFrames::~VideoRenderFrames() {
|
||||
ReleaseAllFrames();
|
||||
}
|
||||
|
||||
int32_t VideoRenderFrames::AddFrame(I420VideoFrame* new_frame) {
|
||||
int32_t VideoRenderFrames::AddFrame(const I420VideoFrame& new_frame) {
|
||||
const int64_t time_now = TickTime::MillisecondTimestamp();
|
||||
|
||||
// Drop old frames only when there are other frames in the queue, otherwise, a
|
||||
// really slow system never renders any frames.
|
||||
if (!incoming_frames_.empty() &&
|
||||
new_frame->render_time_ms() + KOldRenderTimestampMS < time_now) {
|
||||
new_frame.render_time_ms() + KOldRenderTimestampMS < time_now) {
|
||||
WEBRTC_TRACE(kTraceWarning,
|
||||
kTraceVideoRenderer,
|
||||
-1,
|
||||
"%s: too old frame, timestamp=%u.",
|
||||
__FUNCTION__,
|
||||
new_frame->timestamp());
|
||||
new_frame.timestamp());
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (new_frame->render_time_ms() > time_now + KFutureRenderTimestampMS) {
|
||||
if (new_frame.render_time_ms() > time_now + KFutureRenderTimestampMS) {
|
||||
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
|
||||
"%s: frame too long into the future, timestamp=%u.",
|
||||
__FUNCTION__, new_frame->timestamp());
|
||||
__FUNCTION__, new_frame.timestamp());
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (new_frame->native_handle() != NULL) {
|
||||
incoming_frames_.push_back(new_frame->CloneFrame());
|
||||
return static_cast<int32_t>(incoming_frames_.size());
|
||||
}
|
||||
|
||||
// Get an empty frame
|
||||
I420VideoFrame* frame_to_add = NULL;
|
||||
if (!empty_frames_.empty()) {
|
||||
frame_to_add = empty_frames_.front();
|
||||
empty_frames_.pop_front();
|
||||
}
|
||||
if (!frame_to_add) {
|
||||
if (empty_frames_.size() + incoming_frames_.size() >
|
||||
KMaxNumberOfFrames) {
|
||||
// Already allocated too many frames.
|
||||
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer,
|
||||
-1, "%s: too many frames, timestamp=%u, limit=%d",
|
||||
__FUNCTION__, new_frame->timestamp(), KMaxNumberOfFrames);
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Allocate new memory.
|
||||
WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, -1,
|
||||
"%s: allocating buffer %d", __FUNCTION__,
|
||||
empty_frames_.size() + incoming_frames_.size());
|
||||
|
||||
frame_to_add = new I420VideoFrame();
|
||||
if (!frame_to_add) {
|
||||
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
|
||||
"%s: could not create new frame for", __FUNCTION__);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
frame_to_add->CreateEmptyFrame(new_frame->width(), new_frame->height(),
|
||||
new_frame->stride(kYPlane),
|
||||
new_frame->stride(kUPlane),
|
||||
new_frame->stride(kVPlane));
|
||||
// TODO(mflodman) Change this!
|
||||
// Remove const ness. Copying will be costly.
|
||||
frame_to_add->SwapFrame(new_frame);
|
||||
incoming_frames_.push_back(frame_to_add);
|
||||
|
||||
incoming_frames_.push_back(new_frame);
|
||||
return static_cast<int32_t>(incoming_frames_.size());
|
||||
}
|
||||
|
||||
I420VideoFrame* VideoRenderFrames::FrameToRender() {
|
||||
I420VideoFrame* render_frame = NULL;
|
||||
FrameList::iterator iter = incoming_frames_.begin();
|
||||
while(iter != incoming_frames_.end()) {
|
||||
I420VideoFrame* oldest_frame_in_list = *iter;
|
||||
if (oldest_frame_in_list->render_time_ms() <=
|
||||
TickTime::MillisecondTimestamp() + render_delay_ms_) {
|
||||
// This is the oldest one so far and it's OK to render.
|
||||
if (render_frame) {
|
||||
// This one is older than the newly found frame, remove this one.
|
||||
ReturnFrame(render_frame);
|
||||
}
|
||||
render_frame = oldest_frame_in_list;
|
||||
iter = incoming_frames_.erase(iter);
|
||||
} else {
|
||||
// We can't release this one yet, we're done here.
|
||||
break;
|
||||
}
|
||||
I420VideoFrame VideoRenderFrames::FrameToRender() {
|
||||
I420VideoFrame render_frame;
|
||||
// Get the newest frame that can be released for rendering.
|
||||
while (!incoming_frames_.empty() && TimeToNextFrameRelease() <= 0) {
|
||||
render_frame = incoming_frames_.front();
|
||||
incoming_frames_.pop_front();
|
||||
}
|
||||
return render_frame;
|
||||
}
|
||||
|
||||
int32_t VideoRenderFrames::ReturnFrame(I420VideoFrame* old_frame) {
|
||||
// No need to reuse texture frames because they do not allocate memory.
|
||||
if (old_frame->native_handle() == NULL) {
|
||||
old_frame->set_timestamp(0);
|
||||
old_frame->set_render_time_ms(0);
|
||||
empty_frames_.push_back(old_frame);
|
||||
} else {
|
||||
delete old_frame;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
int32_t VideoRenderFrames::ReleaseAllFrames() {
|
||||
for (FrameList::iterator iter = incoming_frames_.begin();
|
||||
iter != incoming_frames_.end(); ++iter) {
|
||||
delete *iter;
|
||||
}
|
||||
incoming_frames_.clear();
|
||||
|
||||
for (FrameList::iterator iter = empty_frames_.begin();
|
||||
iter != empty_frames_.end(); ++iter) {
|
||||
delete *iter;
|
||||
}
|
||||
empty_frames_.clear();
|
||||
return 0;
|
||||
}
|
||||
|
||||
@ -152,13 +72,10 @@ uint32_t VideoRenderFrames::TimeToNextFrameRelease() {
|
||||
if (incoming_frames_.empty()) {
|
||||
return KEventMaxWaitTimeMs;
|
||||
}
|
||||
I420VideoFrame* oldest_frame = incoming_frames_.front();
|
||||
int64_t time_to_release = oldest_frame->render_time_ms() - render_delay_ms_
|
||||
- TickTime::MillisecondTimestamp();
|
||||
if (time_to_release < 0) {
|
||||
time_to_release = 0;
|
||||
}
|
||||
return static_cast<uint32_t>(time_to_release);
|
||||
const int64_t time_to_release = incoming_frames_.front().render_time_ms() -
|
||||
render_delay_ms_ -
|
||||
TickTime::MillisecondTimestamp();
|
||||
return time_to_release < 0 ? 0u : static_cast<uint32_t>(time_to_release);
|
||||
}
|
||||
|
||||
int32_t VideoRenderFrames::SetRenderDelay(
|
||||
|
||||
@ -21,16 +21,12 @@ namespace webrtc {
|
||||
class VideoRenderFrames {
|
||||
public:
|
||||
VideoRenderFrames();
|
||||
~VideoRenderFrames();
|
||||
|
||||
// Add a frame to the render queue
|
||||
int32_t AddFrame(I420VideoFrame* new_frame);
|
||||
int32_t AddFrame(const I420VideoFrame& new_frame);
|
||||
|
||||
// Get a frame for rendering, if it's time to render.
|
||||
I420VideoFrame* FrameToRender();
|
||||
|
||||
// Return an old frame
|
||||
int32_t ReturnFrame(I420VideoFrame* old_frame);
|
||||
// Get a frame for rendering, or a zero-size frame if it's not time to render.
|
||||
I420VideoFrame FrameToRender();
|
||||
|
||||
// Releases all frames
|
||||
int32_t ReleaseAllFrames();
|
||||
@ -42,8 +38,6 @@ class VideoRenderFrames {
|
||||
int32_t SetRenderDelay(const uint32_t render_delay);
|
||||
|
||||
private:
|
||||
typedef std::list<I420VideoFrame*> FrameList;
|
||||
|
||||
// 10 seconds for 30 fps.
|
||||
enum { KMaxNumberOfFrames = 300 };
|
||||
// Don't render frames with timestamp older than 500ms from now.
|
||||
@ -52,9 +46,7 @@ class VideoRenderFrames {
|
||||
enum { KFutureRenderTimestampMS = 10000 };
|
||||
|
||||
// Sorted list with framed to be rendered, oldest first.
|
||||
FrameList incoming_frames_;
|
||||
// Empty frames.
|
||||
FrameList empty_frames_;
|
||||
std::list<I420VideoFrame> incoming_frames_;
|
||||
|
||||
// Estimated delay from a frame is released until it's rendered.
|
||||
uint32_t render_delay_ms_;
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user