Remove cricket::VideoFrame::Set/GetElapsedTime()

This CL is a baby step towards consolidating the timestamps in cricket::VideoFrame and webrtc::VideoFrame, so that we can unify the frame classes in the future.

The elapsed time functionality is not really used. If a video sink wants to know the elapsed time since the first frame they can store the first timestamp themselves and calculate the time delta to later frames. This is already done in all video sinks that need the elapsed time. Having redundant timestamps in the frame classes is confusing and error prone.

TBR=pthatcher@webrtc.org

Review URL: https://codereview.webrtc.org/1324263004

Cr-Commit-Position: refs/heads/master@{#10131}
This commit is contained in:
magjed 2015-10-01 03:02:44 -07:00 committed by Commit bot
parent dfc8f4ff87
commit b09b660c53
21 changed files with 66 additions and 124 deletions

View File

@ -43,7 +43,7 @@ namespace webrtc {
class AndroidVideoCapturer::FrameFactory : public cricket::VideoFrameFactory {
public:
FrameFactory(const rtc::scoped_refptr<AndroidVideoCapturerDelegate>& delegate)
: start_time_(rtc::TimeNanos()), delegate_(delegate) {
: delegate_(delegate) {
// Create a CapturedFrame that only contains header information, not the
// actual pixel data.
captured_frame_.pixel_height = 1;
@ -60,7 +60,6 @@ class AndroidVideoCapturer::FrameFactory : public cricket::VideoFrameFactory {
buffer_ = buffer;
captured_frame_.width = buffer->width();
captured_frame_.height = buffer->height();
captured_frame_.elapsed_time = rtc::TimeNanos() - start_time_;
captured_frame_.time_stamp = time_stamp_in_ns;
captured_frame_.rotation = rotation;
}
@ -69,7 +68,6 @@ class AndroidVideoCapturer::FrameFactory : public cricket::VideoFrameFactory {
buffer_ = nullptr;
captured_frame_.width = 0;
captured_frame_.height = 0;
captured_frame_.elapsed_time = 0;
captured_frame_.time_stamp = 0;
}
@ -85,8 +83,7 @@ class AndroidVideoCapturer::FrameFactory : public cricket::VideoFrameFactory {
RTC_CHECK(captured_frame == &captured_frame_);
rtc::scoped_ptr<cricket::VideoFrame> frame(new cricket::WebRtcVideoFrame(
ShallowCenterCrop(buffer_, dst_width, dst_height),
captured_frame->elapsed_time, captured_frame->time_stamp,
captured_frame->GetRotation()));
captured_frame->time_stamp, captured_frame->GetRotation()));
// Caller takes ownership.
// TODO(magjed): Change CreateAliasedFrame() to return a rtc::scoped_ptr.
return apply_rotation_ ? frame->GetCopyWithRotationApplied()->Copy()
@ -94,7 +91,6 @@ class AndroidVideoCapturer::FrameFactory : public cricket::VideoFrameFactory {
}
private:
uint64 start_time_;
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer_;
cricket::CapturedFrame captured_frame_;
rtc::scoped_refptr<AndroidVideoCapturerDelegate> delegate_;

View File

@ -306,7 +306,7 @@ static dispatch_queue_t kBackgroundQueue = nil;
namespace webrtc {
AVFoundationVideoCapturer::AVFoundationVideoCapturer()
: _capturer(nil), _startThread(nullptr), _startTime(0) {
: _capturer(nil), _startThread(nullptr) {
// Set our supported formats. This matches kDefaultPreset.
std::vector<cricket::VideoFormat> supportedFormats;
supportedFormats.push_back(cricket::VideoFormat(kDefaultFormat));
@ -344,7 +344,6 @@ cricket::CaptureState AVFoundationVideoCapturer::Start(
// to spin up, and this call returns async.
// TODO(tkchin): make this better.
[_capturer startCaptureAsync];
_startTime = rtc::TimeNanos();
SetCaptureState(cricket::CaptureState::CS_RUNNING);
return cricket::CaptureState::CS_STARTING;
@ -424,7 +423,6 @@ void AVFoundationVideoCapturer::CaptureSampleBuffer(
frame.pixel_height = 1;
frame.fourcc = static_cast<uint32>(cricket::FOURCC_NV12);
frame.time_stamp = currentTime;
frame.elapsed_time = currentTime - _startTime;
frame.data = yPlaneAddress;
frame.data_size = frameSize;

View File

@ -82,7 +82,7 @@ TEST_F(VideoTrackTest, RenderVideo) {
ASSERT_FALSE(renderer_input == NULL);
cricket::WebRtcVideoFrame frame;
frame.InitToBlack(123, 123, 1, 1, 0, 0);
frame.InitToBlack(123, 123, 1, 1, 0);
renderer_input->RenderFrame(&frame);
EXPECT_EQ(1, renderer_1->num_rendered_frames());

View File

@ -114,7 +114,6 @@ class FakeVideoCapturer : public cricket::VideoCapturer {
frame.height = height;
frame.fourcc = fourcc;
frame.data_size = size;
frame.elapsed_time = next_timestamp_;
frame.time_stamp = initial_unix_timestamp_ + next_timestamp_;
next_timestamp_ += timestamp_interval;

View File

@ -44,8 +44,7 @@ class FakeVideoRenderer : public VideoRenderer {
height_(0),
num_set_sizes_(0),
num_rendered_frames_(0),
black_frame_(false),
last_frame_elapsed_time_ns_(-1) {
black_frame_(false) {
}
virtual bool SetSize(int width, int height, int reserved) {
@ -76,7 +75,6 @@ class FakeVideoRenderer : public VideoRenderer {
++errors_;
return false;
}
last_frame_elapsed_time_ns_ = frame->GetElapsedTime();
++num_rendered_frames_;
SignalRenderFrame(frame);
return true;
@ -104,11 +102,6 @@ class FakeVideoRenderer : public VideoRenderer {
return black_frame_;
}
int64_t last_frame_elapsed_time_ns() const {
rtc::CritScope cs(&crit_);
return last_frame_elapsed_time_ns_;
}
sigslot::signal3<int, int, int> SignalSetSize;
sigslot::signal1<const VideoFrame*> SignalRenderFrame;
@ -167,7 +160,6 @@ class FakeVideoRenderer : public VideoRenderer {
int num_set_sizes_;
int num_rendered_frames_;
bool black_frame_;
int64_t last_frame_elapsed_time_ns_;
mutable rtc::CriticalSection crit_;
};

View File

@ -80,7 +80,6 @@ CapturedFrame::CapturedFrame()
fourcc(0),
pixel_width(0),
pixel_height(0),
elapsed_time(0),
time_stamp(0),
data_size(0),
rotation(0),
@ -323,8 +322,7 @@ std::string VideoCapturer::ToString(const CapturedFrame* captured_frame) const {
}
std::ostringstream ss;
ss << fourcc_name << captured_frame->width << "x" << captured_frame->height
<< "x" << VideoFormat::IntervalToFpsFloat(captured_frame->elapsed_time);
ss << fourcc_name << captured_frame->width << "x" << captured_frame->height;
return ss.str();
}

View File

@ -90,8 +90,8 @@ struct CapturedFrame {
uint32 fourcc; // compression
uint32 pixel_width; // width of a pixel, default is 1
uint32 pixel_height; // height of a pixel, default is 1
int64 elapsed_time; // elapsed time since the creation of the frame
// source (that is, the camera), in nanoseconds.
// TODO(magjed): |elapsed_time| is deprecated - remove once not used anymore.
int64 elapsed_time;
int64 time_stamp; // timestamp of when the frame was captured, in unix
// time with nanosecond units.
uint32 data_size; // number of bytes of the frame data

View File

@ -55,7 +55,6 @@ class VideoCapturerTest
: capture_state_(cricket::CS_STOPPED),
num_state_changes_(0),
video_frames_received_(0),
last_frame_elapsed_time_(0),
expects_rotation_applied_(true) {
capturer_.SignalVideoFrame.connect(this, &VideoCapturerTest::OnVideoFrame);
capturer_.SignalStateChange.connect(this,
@ -69,7 +68,6 @@ class VideoCapturerTest
protected:
void OnVideoFrame(cricket::VideoCapturer*, const cricket::VideoFrame* frame) {
++video_frames_received_;
last_frame_elapsed_time_ = frame->GetElapsedTime();
if (expects_rotation_applied_) {
EXPECT_EQ(webrtc::kVideoRotation_0, frame->GetRotation());
} else {
@ -87,13 +85,11 @@ class VideoCapturerTest
int video_frames_received() const {
return video_frames_received_;
}
int64 last_frame_elapsed_time() const { return last_frame_elapsed_time_; }
cricket::FakeVideoCapturer capturer_;
cricket::CaptureState capture_state_;
int num_state_changes_;
int video_frames_received_;
int64 last_frame_elapsed_time_;
cricket::FakeVideoRenderer renderer_;
bool expects_rotation_applied_;
};

View File

@ -201,7 +201,6 @@ void VideoFrame::StretchToFrame(VideoFrame* dst,
dst->GetYPitch(), dst->GetUPitch(), dst->GetVPitch(),
dst->GetWidth(), dst->GetHeight(),
interpolate, vert_crop);
dst->SetElapsedTime(GetElapsedTime());
dst->SetTimeStamp(GetTimeStamp());
// Stretched frame should have the same rotation as the source.
dst->SetRotation(GetVideoRotation());
@ -212,7 +211,7 @@ VideoFrame* VideoFrame::Stretch(size_t dst_width, size_t dst_height,
VideoFrame* dest = CreateEmptyFrame(static_cast<int>(dst_width),
static_cast<int>(dst_height),
GetPixelWidth(), GetPixelHeight(),
GetElapsedTime(), GetTimeStamp());
GetTimeStamp());
if (dest) {
StretchToFrame(dest, interpolate, vert_crop);
}

View File

@ -42,8 +42,7 @@ class VideoFrame {
virtual ~VideoFrame() {}
virtual bool InitToBlack(int w, int h, size_t pixel_width,
size_t pixel_height, int64_t elapsed_time,
int64_t time_stamp) = 0;
size_t pixel_height, int64_t time_stamp) = 0;
// Creates a frame from a raw sample with FourCC |format| and size |w| x |h|.
// |h| can be negative indicating a vertically flipped image.
// |dw| is destination width; can be less than |w| if cropping is desired.
@ -59,7 +58,6 @@ class VideoFrame {
size_t sample_size,
size_t pixel_width,
size_t pixel_height,
int64_t elapsed_time,
int64_t time_stamp,
webrtc::VideoRotation rotation,
bool apply_rotation) = 0;
@ -74,11 +72,10 @@ class VideoFrame {
size_t sample_size,
size_t pixel_width,
size_t pixel_height,
int64_t elapsed_time,
int64_t time_stamp,
int rotation) {
return Reset(fourcc, w, h, dw, dh, sample, sample_size, pixel_width,
pixel_height, elapsed_time, time_stamp,
pixel_height, time_stamp,
static_cast<webrtc::VideoRotation>(rotation), true);
}
@ -117,9 +114,7 @@ class VideoFrame {
virtual size_t GetPixelWidth() const = 0;
virtual size_t GetPixelHeight() const = 0;
virtual int64_t GetElapsedTime() const = 0;
virtual int64_t GetTimeStamp() const = 0;
virtual void SetElapsedTime(int64_t elapsed_time) = 0;
virtual void SetTimeStamp(int64_t time_stamp) = 0;
// Indicates the rotation angle in degrees.
@ -218,7 +213,6 @@ class VideoFrame {
// Creates an empty frame.
virtual VideoFrame *CreateEmptyFrame(int w, int h, size_t pixel_width,
size_t pixel_height,
int64_t elapsed_time,
int64_t time_stamp) const = 0;
virtual void SetRotation(webrtc::VideoRotation rotation) = 0;
};

View File

@ -142,7 +142,7 @@ class VideoFrameTest : public testing::Test {
bool ret = false;
for (int i = 0; i < repeat_; ++i) {
ret = frame->Init(format, width, height, dw, dh,
sample, sample_size, 1, 1, 0, 0, rotation);
sample, sample_size, 1, 1, 0, rotation);
}
return ret;
}
@ -280,7 +280,7 @@ class VideoFrameTest : public testing::Test {
const uint8* start = reinterpret_cast<const uint8*>(ms->GetBuffer());
int awidth = (width + 1) & ~1;
frame->InitToBlack(width, height, 1, 1, 0, 0);
frame->InitToBlack(width, height, 1, 1, 0);
int stride_y = frame->GetYPitch();
int stride_u = frame->GetUPitch();
int stride_v = frame->GetVPitch();
@ -322,7 +322,7 @@ class VideoFrameTest : public testing::Test {
start = start + pitch * (height - 1);
pitch = -pitch;
}
frame->InitToBlack(width, height, 1, 1, 0, 0);
frame->InitToBlack(width, height, 1, 1, 0);
int stride_y = frame->GetYPitch();
int stride_u = frame->GetUPitch();
int stride_v = frame->GetVPitch();
@ -435,7 +435,7 @@ class VideoFrameTest : public testing::Test {
static bool IsEqual(const cricket::VideoFrame& frame,
size_t width, size_t height,
size_t pixel_width, size_t pixel_height,
int64 elapsed_time, int64 time_stamp,
int64 time_stamp,
const uint8* y, uint32 ypitch,
const uint8* u, uint32 upitch,
const uint8* v, uint32 vpitch,
@ -445,7 +445,6 @@ class VideoFrameTest : public testing::Test {
static_cast<uint32>(height)) &&
frame.GetPixelWidth() == pixel_width &&
frame.GetPixelHeight() == pixel_height &&
frame.GetElapsedTime() == elapsed_time &&
frame.GetTimeStamp() == time_stamp &&
IsPlaneEqual("y", frame.GetYPlane(), frame.GetYPitch(), y, ypitch,
static_cast<uint32>(width),
@ -464,7 +463,7 @@ class VideoFrameTest : public testing::Test {
return IsEqual(frame1,
frame2.GetWidth(), frame2.GetHeight(),
frame2.GetPixelWidth(), frame2.GetPixelHeight(),
frame2.GetElapsedTime(), frame2.GetTimeStamp(),
frame2.GetTimeStamp(),
frame2.GetYPlane(), frame2.GetYPitch(),
frame2.GetUPlane(), frame2.GetUPitch(),
frame2.GetVPlane(), frame2.GetVPitch(),
@ -480,7 +479,7 @@ class VideoFrameTest : public testing::Test {
frame2.GetWidth() - hcrop * 2,
frame2.GetHeight() - vcrop * 2,
frame2.GetPixelWidth(), frame2.GetPixelHeight(),
frame2.GetElapsedTime(), frame2.GetTimeStamp(),
frame2.GetTimeStamp(),
frame2.GetYPlane() + vcrop * frame2.GetYPitch()
+ hcrop,
frame2.GetYPitch(),
@ -516,7 +515,7 @@ class VideoFrameTest : public testing::Test {
const uint8* y = reinterpret_cast<uint8*>(ms.get()->GetBuffer());
const uint8* u = y + kWidth * kHeight;
const uint8* v = u + kWidth * kHeight / 4;
EXPECT_TRUE(IsEqual(frame, kWidth, kHeight, 1, 1, 0, 0,
EXPECT_TRUE(IsEqual(frame, kWidth, kHeight, 1, 1, 0,
y, kWidth, u, kWidth / 2, v, kWidth / 2, 0));
}
@ -531,7 +530,7 @@ class VideoFrameTest : public testing::Test {
const uint8* y = reinterpret_cast<uint8*>(ms.get()->GetBuffer());
const uint8* v = y + kWidth * kHeight;
const uint8* u = v + kWidth * kHeight / 4;
EXPECT_TRUE(IsEqual(frame, kWidth, kHeight, 1, 1, 0, 0,
EXPECT_TRUE(IsEqual(frame, kWidth, kHeight, 1, 1, 0,
y, kWidth, u, kWidth / 2, v, kWidth / 2, 0));
}
@ -795,10 +794,10 @@ class VideoFrameTest : public testing::Test {
EXPECT_TRUE(frame2.Init(cricket::FOURCC_##FOURCC, kWidth, kHeight, kWidth, \
kHeight, \
reinterpret_cast<uint8*>(ms->GetBuffer()), \
data_size, 1, 1, 0, 0, webrtc::kVideoRotation_0)); \
data_size, 1, 1, 0, webrtc::kVideoRotation_0)); \
int width_rotate = static_cast<int>(frame1.GetWidth()); \
int height_rotate = static_cast<int>(frame1.GetHeight()); \
EXPECT_TRUE(frame3.InitToBlack(width_rotate, height_rotate, 1, 1, 0, 0)); \
EXPECT_TRUE(frame3.InitToBlack(width_rotate, height_rotate, 1, 1, 0)); \
libyuv::I420Mirror( \
frame2.GetYPlane(), frame2.GetYPitch(), frame2.GetUPlane(), \
frame2.GetUPitch(), frame2.GetVPlane(), frame2.GetVPitch(), \
@ -826,10 +825,10 @@ class VideoFrameTest : public testing::Test {
EXPECT_TRUE(frame2.Init(cricket::FOURCC_##FOURCC, kWidth, kHeight, kWidth, \
kHeight, \
reinterpret_cast<uint8*>(ms->GetBuffer()), \
data_size, 1, 1, 0, 0, webrtc::kVideoRotation_0)); \
data_size, 1, 1, 0, webrtc::kVideoRotation_0)); \
int width_rotate = static_cast<int>(frame1.GetWidth()); \
int height_rotate = static_cast<int>(frame1.GetHeight()); \
EXPECT_TRUE(frame3.InitToBlack(width_rotate, height_rotate, 1, 1, 0, 0)); \
EXPECT_TRUE(frame3.InitToBlack(width_rotate, height_rotate, 1, 1, 0)); \
libyuv::I420Rotate( \
frame2.GetYPlane(), frame2.GetYPitch(), frame2.GetUPlane(), \
frame2.GetUPitch(), frame2.GetVPlane(), frame2.GetVPitch(), \
@ -935,13 +934,12 @@ class VideoFrameTest : public testing::Test {
uint8 pixel[3] = { 1, 2, 3 };
for (int i = 0; i < repeat_; ++i) {
EXPECT_TRUE(frame.Init(cricket::FOURCC_I420, 1, 1, 1, 1, pixel,
sizeof(pixel), 1, 1, 0, 0,
webrtc::kVideoRotation_0));
sizeof(pixel), 1, 1, 0, webrtc::kVideoRotation_0));
}
const uint8* y = pixel;
const uint8* u = y + 1;
const uint8* v = u + 1;
EXPECT_TRUE(IsEqual(frame, 1, 1, 1, 1, 0, 0,
EXPECT_TRUE(IsEqual(frame, 1, 1, 1, 1, 0,
y, 1, u, 1, v, 1, 0));
}
@ -952,7 +950,7 @@ class VideoFrameTest : public testing::Test {
memset(pixels5x5, 1, 5 * 5 + ((5 + 1) / 2 * (5 + 1) / 2) * 2);
for (int i = 0; i < repeat_; ++i) {
EXPECT_TRUE(frame.Init(cricket::FOURCC_I420, 5, 5, 5, 5, pixels5x5,
sizeof(pixels5x5), 1, 1, 0, 0,
sizeof(pixels5x5), 1, 1, 0,
webrtc::kVideoRotation_0));
}
EXPECT_EQ(5u, frame.GetWidth());
@ -968,7 +966,7 @@ class VideoFrameTest : public testing::Test {
uint8 pixel[4] = { 64, 128, 192, 255 };
for (int i = 0; i < repeat_; ++i) {
EXPECT_TRUE(frame.Init(cricket::FOURCC_ARGB, 1, 1, 1, 1, pixel,
sizeof(pixel), 1, 1, 0, 0,
sizeof(pixel), 1, 1, 0,
webrtc::kVideoRotation_0));
}
// Convert back to ARGB.
@ -1005,7 +1003,7 @@ class VideoFrameTest : public testing::Test {
for (int i = 0; i < repeat_; ++i) {
EXPECT_TRUE(frame.Init(cricket::FOURCC_ARGB, 10, 1, 10, 1, pixel,
sizeof(pixel), 1, 1, 0, 0,
sizeof(pixel), 1, 1, 0,
webrtc::kVideoRotation_0));
}
// Convert back to ARGB
@ -1314,7 +1312,7 @@ class VideoFrameTest : public testing::Test {
void ConstructBlack() {
T frame;
for (int i = 0; i < repeat_; ++i) {
EXPECT_TRUE(frame.InitToBlack(kWidth, kHeight, 1, 1, 0, 0));
EXPECT_TRUE(frame.InitToBlack(kWidth, kHeight, 1, 1, 0));
}
EXPECT_TRUE(IsSize(frame, kWidth, kHeight));
EXPECT_TRUE(IsBlack(frame));
@ -1380,13 +1378,13 @@ class VideoFrameTest : public testing::Test {
ASSERT_TRUE(ms.get() != NULL);
size_t data_size;
ms->GetSize(&data_size);
EXPECT_TRUE(frame1.InitToBlack(kWidth, kHeight, 1, 1, 0, 0));
EXPECT_TRUE(frame2.InitToBlack(kWidth, kHeight, 1, 1, 0, 0));
EXPECT_TRUE(frame1.InitToBlack(kWidth, kHeight, 1, 1, 0));
EXPECT_TRUE(frame2.InitToBlack(kWidth, kHeight, 1, 1, 0));
EXPECT_TRUE(IsBlack(frame1));
EXPECT_TRUE(IsEqual(frame1, frame2, 0));
EXPECT_TRUE(frame1.Reset(cricket::FOURCC_I420, kWidth, kHeight, kWidth,
kHeight, reinterpret_cast<uint8*>(ms->GetBuffer()),
data_size, 1, 1, 0, 0, rotation,
data_size, 1, 1, 0, rotation,
apply_rotation));
if (apply_rotation)
EXPECT_EQ(webrtc::kVideoRotation_0, frame1.GetVideoRotation());
@ -1450,7 +1448,7 @@ class VideoFrameTest : public testing::Test {
out,
out_size, stride));
}
EXPECT_TRUE(frame2.InitToBlack(kWidth, kHeight, 1, 1, 0, 0));
EXPECT_TRUE(frame2.InitToBlack(kWidth, kHeight, 1, 1, 0));
for (int i = 0; i < repeat_from; ++i) {
EXPECT_EQ(0, RGBToI420(out, stride,
frame2.GetYPlane(), frame2.GetYPitch(),
@ -1767,7 +1765,7 @@ class VideoFrameTest : public testing::Test {
kWidth, kHeight));
}
EXPECT_TRUE(frame2.Init(cricket::FOURCC_I422, kWidth, kHeight, kWidth,
kHeight, y, out_size, 1, 1, 0, 0,
kHeight, y, out_size, 1, 1, 0,
webrtc::kVideoRotation_0));
EXPECT_TRUE(IsEqual(frame1, frame2, 1));
}
@ -1871,7 +1869,7 @@ class VideoFrameTest : public testing::Test {
uint8 pixel[3] = { 1, 2, 3 };
T frame;
EXPECT_TRUE(frame.Init(cricket::FOURCC_I420, 1, 1, 1, 1, pixel,
sizeof(pixel), 1, 1, 0, 0,
sizeof(pixel), 1, 1, 0,
webrtc::kVideoRotation_0));
for (int i = 0; i < repeat_; ++i) {
EXPECT_EQ(out_size, frame.CopyToBuffer(out.get(), out_size));
@ -1885,7 +1883,7 @@ class VideoFrameTest : public testing::Test {
void StretchToFrame() {
// Create the source frame as a black frame.
T source;
EXPECT_TRUE(source.InitToBlack(kWidth * 2, kHeight * 2, 1, 1, 0, 0));
EXPECT_TRUE(source.InitToBlack(kWidth * 2, kHeight * 2, 1, 1, 0));
EXPECT_TRUE(IsSize(source, kWidth * 2, kHeight * 2));
// Create the target frame by loading from a file.
@ -1902,7 +1900,6 @@ class VideoFrameTest : public testing::Test {
ASSERT_TRUE(LoadFrameNoRepeat(&target2));
source.StretchToFrame(&target2, true, true);
EXPECT_TRUE(IsBlack(target2));
EXPECT_EQ(source.GetElapsedTime(), target2.GetElapsedTime());
EXPECT_EQ(source.GetTimeStamp(), target2.GetTimeStamp());
}

View File

@ -70,7 +70,6 @@ VideoFrame* VideoFrameFactory::CreateAliasedFrame(
}
} else {
cropped_input_frame->StretchToFrame(output_frame_.get(), true, true);
output_frame_->SetElapsedTime(cropped_input_frame->GetElapsedTime());
output_frame_->SetTimeStamp(cropped_input_frame->GetTimeStamp());
}
return output_frame_->Copy();

View File

@ -74,7 +74,9 @@ bool VideoRecorder::RecordFrame(const CapturedFrame& frame) {
buffer.WriteUInt32(frame.fourcc);
buffer.WriteUInt32(frame.pixel_width);
buffer.WriteUInt32(frame.pixel_height);
buffer.WriteUInt64(frame.elapsed_time);
// Elapsed time is deprecated.
const uint64_t dummy_elapsed_time = 0;
buffer.WriteUInt64(dummy_elapsed_time);
buffer.WriteUInt64(frame.time_stamp);
buffer.WriteUInt32(size);
@ -163,7 +165,6 @@ FileVideoCapturer::FileVideoCapturer()
: frame_buffer_size_(0),
file_read_thread_(NULL),
repeat_(0),
start_time_ns_(0),
last_frame_timestamp_ns_(0),
ignore_framerate_(false) {
}
@ -243,8 +244,6 @@ CaptureState FileVideoCapturer::Start(const VideoFormat& capture_format) {
SetCaptureFormat(&capture_format);
// Create a thread to read the file.
file_read_thread_ = new FileReadThread(this);
start_time_ns_ = kNumNanoSecsPerMilliSec *
static_cast<int64>(rtc::Time());
bool ret = file_read_thread_->Start();
if (ret) {
LOG(LS_INFO) << "File video capturer '" << GetId() << "' started";
@ -302,7 +301,9 @@ rtc::StreamResult FileVideoCapturer::ReadFrameHeader(
buffer.ReadUInt32(&frame->fourcc);
buffer.ReadUInt32(&frame->pixel_width);
buffer.ReadUInt32(&frame->pixel_height);
buffer.ReadUInt64(reinterpret_cast<uint64*>(&frame->elapsed_time));
// Elapsed time is deprecated.
uint64 dummy_elapsed_time;
buffer.ReadUInt64(&dummy_elapsed_time);
buffer.ReadUInt64(reinterpret_cast<uint64*>(&frame->time_stamp));
buffer.ReadUInt32(&frame->data_size);
}
@ -318,7 +319,6 @@ bool FileVideoCapturer::ReadFrame(bool first_frame, int* wait_time_ms) {
if (!first_frame) {
captured_frame_.time_stamp = kNumNanoSecsPerMilliSec *
static_cast<int64>(start_read_time_ms);
captured_frame_.elapsed_time = captured_frame_.time_stamp - start_time_ns_;
SignalFrameCaptured(this, &captured_frame_);
}

View File

@ -149,7 +149,6 @@ class FileVideoCapturer : public VideoCapturer {
uint32 frame_buffer_size_;
FileReadThread* file_read_thread_;
int repeat_; // How many times to repeat the file.
int64 start_time_ns_; // Time when the file video capturer starts.
int64 last_frame_timestamp_ns_; // Timestamp of last read frame.
bool ignore_framerate_;

View File

@ -434,8 +434,7 @@ WebRtcCapturedFrame::WebRtcCapturedFrame(const webrtc::VideoFrame& sample,
pixel_width = 1;
pixel_height = 1;
// Convert units from VideoFrame RenderTimeMs to CapturedFrame (nanoseconds).
elapsed_time = sample.render_time_ms() * rtc::kNumNanosecsPerMillisec;
time_stamp = elapsed_time;
time_stamp = sample.render_time_ms() * rtc::kNumNanosecsPerMillisec;
data_size = rtc::checked_cast<uint32>(length);
data = buffer;
rotation = sample.rotation();

View File

@ -2537,7 +2537,6 @@ void WebRtcVideoChannel2::WebRtcVideoReceiveStream::RenderFrame(
const WebRtcVideoFrame render_frame(
frame.video_frame_buffer(),
elapsed_time_ms * rtc::kNumNanosecsPerMillisec,
frame.render_time_ms() * rtc::kNumNanosecsPerMillisec, frame.rotation());
renderer_->RenderFrame(&render_frame);
}

View File

@ -492,7 +492,6 @@ TEST_F(WebRtcVideoEngine2Test,
rtc::scoped_ptr<char[]> data(new char[frame.data_size]);
frame.data = data.get();
memset(frame.data, 1, frame.data_size);
frame.elapsed_time = 0;
const int kInitialTimestamp = 123456;
frame.time_stamp = kInitialTimestamp;
@ -1810,7 +1809,7 @@ void WebRtcVideoChannel2Test::TestCpuAdaptation(bool enable_overuse,
EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, NULL));
}
TEST_F(WebRtcVideoChannel2Test, EstimatesNtpStartTimeAndElapsedTimeCorrectly) {
TEST_F(WebRtcVideoChannel2Test, EstimatesNtpStartTimeCorrectly) {
// Start at last timestamp to verify that wraparounds are estimated correctly.
static const uint32_t kInitialTimestamp = 0xFFFFFFFFu;
static const int64_t kInitialNtpTimeMs = 1247891230;
@ -1829,7 +1828,6 @@ TEST_F(WebRtcVideoChannel2Test, EstimatesNtpStartTimeAndElapsedTimeCorrectly) {
stream->InjectFrame(video_frame, 0);
EXPECT_EQ(1, renderer.num_rendered_frames());
EXPECT_EQ(0, renderer.last_frame_elapsed_time_ns());
// This timestamp is kInitialTimestamp (-1) + kFrameOffsetMs * 90, which
// triggers a constant-overflow warning, hence we're calculating it explicitly
@ -1839,8 +1837,6 @@ TEST_F(WebRtcVideoChannel2Test, EstimatesNtpStartTimeAndElapsedTimeCorrectly) {
stream->InjectFrame(video_frame, 0);
EXPECT_EQ(2, renderer.num_rendered_frames());
EXPECT_EQ(kFrameOffsetMs * rtc::kNumNanosecsPerMillisec,
renderer.last_frame_elapsed_time_ns());
// Verify that NTP time has been correctly deduced.
cricket::VideoMediaInfo info;

View File

@ -42,19 +42,16 @@ namespace cricket {
WebRtcVideoFrame::WebRtcVideoFrame():
pixel_width_(0),
pixel_height_(0),
elapsed_time_ns_(0),
time_stamp_ns_(0),
rotation_(webrtc::kVideoRotation_0) {}
WebRtcVideoFrame::WebRtcVideoFrame(
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
int64_t elapsed_time_ns,
int64_t time_stamp_ns,
webrtc::VideoRotation rotation)
: video_frame_buffer_(buffer),
pixel_width_(1),
pixel_height_(1),
elapsed_time_ns_(elapsed_time_ns),
time_stamp_ns_(time_stamp_ns),
rotation_(rotation) {
}
@ -66,7 +63,6 @@ WebRtcVideoFrame::WebRtcVideoFrame(
: video_frame_buffer_(buffer),
pixel_width_(1),
pixel_height_(1),
elapsed_time_ns_(elapsed_time_ns),
time_stamp_ns_(time_stamp_ns),
rotation_(webrtc::kVideoRotation_0) {
}
@ -82,11 +78,10 @@ bool WebRtcVideoFrame::Init(uint32 format,
size_t sample_size,
size_t pixel_width,
size_t pixel_height,
int64_t elapsed_time_ns,
int64_t time_stamp_ns,
webrtc::VideoRotation rotation) {
return Reset(format, w, h, dw, dh, sample, sample_size, pixel_width,
pixel_height, elapsed_time_ns, time_stamp_ns, rotation,
pixel_height, time_stamp_ns, rotation,
true /*apply_rotation*/);
}
@ -94,17 +89,21 @@ bool WebRtcVideoFrame::Init(const CapturedFrame* frame, int dw, int dh,
bool apply_rotation) {
return Reset(frame->fourcc, frame->width, frame->height, dw, dh,
static_cast<uint8*>(frame->data), frame->data_size,
frame->pixel_width, frame->pixel_height, frame->elapsed_time,
frame->pixel_width, frame->pixel_height,
frame->time_stamp,
frame->GetRotation(),
apply_rotation);
}
bool WebRtcVideoFrame::InitToBlack(int w, int h, size_t pixel_width,
size_t pixel_height, int64_t elapsed_time_ns,
size_t pixel_height, int64_t,
int64_t time_stamp_ns) {
InitToEmptyBuffer(w, h, pixel_width, pixel_height, elapsed_time_ns,
time_stamp_ns);
return InitToBlack(w, h, pixel_width, pixel_height, time_stamp_ns);
}
bool WebRtcVideoFrame::InitToBlack(int w, int h, size_t pixel_width,
size_t pixel_height, int64_t time_stamp_ns) {
InitToEmptyBuffer(w, h, pixel_width, pixel_height, time_stamp_ns);
return SetToBlack();
}
@ -170,7 +169,7 @@ WebRtcVideoFrame::GetVideoFrameBuffer() const {
VideoFrame* WebRtcVideoFrame::Copy() const {
WebRtcVideoFrame* new_frame = new WebRtcVideoFrame(
video_frame_buffer_, elapsed_time_ns_, time_stamp_ns_, rotation_);
video_frame_buffer_, time_stamp_ns_, rotation_);
new_frame->pixel_width_ = pixel_width_;
new_frame->pixel_height_ = pixel_height_;
return new_frame;
@ -216,7 +215,6 @@ bool WebRtcVideoFrame::Reset(uint32 format,
size_t sample_size,
size_t pixel_width,
size_t pixel_height,
int64_t elapsed_time_ns,
int64_t time_stamp_ns,
webrtc::VideoRotation rotation,
bool apply_rotation) {
@ -237,7 +235,7 @@ bool WebRtcVideoFrame::Reset(uint32 format,
}
InitToEmptyBuffer(new_width, new_height, pixel_width, pixel_height,
elapsed_time_ns, time_stamp_ns);
time_stamp_ns);
rotation_ = apply_rotation ? webrtc::kVideoRotation_0 : rotation;
int horiz_crop = ((w - dw) / 2) & ~1;
@ -267,21 +265,18 @@ bool WebRtcVideoFrame::Reset(uint32 format,
VideoFrame* WebRtcVideoFrame::CreateEmptyFrame(
int w, int h, size_t pixel_width, size_t pixel_height,
int64_t elapsed_time_ns, int64_t time_stamp_ns) const {
int64_t time_stamp_ns) const {
WebRtcVideoFrame* frame = new WebRtcVideoFrame();
frame->InitToEmptyBuffer(w, h, pixel_width, pixel_height, elapsed_time_ns,
time_stamp_ns);
frame->InitToEmptyBuffer(w, h, pixel_width, pixel_height, time_stamp_ns);
return frame;
}
void WebRtcVideoFrame::InitToEmptyBuffer(int w, int h, size_t pixel_width,
size_t pixel_height,
int64_t elapsed_time_ns,
int64_t time_stamp_ns) {
video_frame_buffer_ = new rtc::RefCountedObject<webrtc::I420Buffer>(w, h);
pixel_width_ = pixel_width;
pixel_height_ = pixel_height;
elapsed_time_ns_ = elapsed_time_ns;
time_stamp_ns_ = time_stamp_ns;
rotation_ = webrtc::kVideoRotation_0;
}
@ -315,7 +310,7 @@ const VideoFrame* WebRtcVideoFrame::GetCopyWithRotationApplied() const {
rotated_frame_.reset(CreateEmptyFrame(rotated_width, rotated_height,
GetPixelWidth(), GetPixelHeight(),
GetElapsedTime(), GetTimeStamp()));
GetTimeStamp()));
// TODO(guoweis): Add a function in webrtc_libyuv.cc to convert from
// VideoRotation to libyuv::RotationMode.

View File

@ -43,7 +43,6 @@ class WebRtcVideoFrame : public VideoFrame {
public:
WebRtcVideoFrame();
WebRtcVideoFrame(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
int64_t elapsed_time_ns,
int64_t time_stamp_ns,
webrtc::VideoRotation rotation);
@ -67,18 +66,21 @@ class WebRtcVideoFrame : public VideoFrame {
size_t sample_size,
size_t pixel_width,
size_t pixel_height,
int64_t elapsed_time_ns,
int64_t time_stamp_ns,
webrtc::VideoRotation rotation);
bool Init(const CapturedFrame* frame, int dw, int dh, bool apply_rotation);
void InitToEmptyBuffer(int w, int h, size_t pixel_width, size_t pixel_height,
int64_t elapsed_time_ns, int64_t time_stamp_ns);
int64_t time_stamp_ns);
// TODO(magjed): Remove once Chromium is updated.
bool InitToBlack(int w, int h, size_t pixel_width, size_t pixel_height,
int64_t elapsed_time_ns, int64_t time_stamp_ns);
bool InitToBlack(int w, int h, size_t pixel_width, size_t pixel_height,
int64_t time_stamp_ns);
// From base class VideoFrame.
virtual bool Reset(uint32 format,
int w,
@ -89,7 +91,6 @@ class WebRtcVideoFrame : public VideoFrame {
size_t sample_size,
size_t pixel_width,
size_t pixel_height,
int64_t elapsed_time_ns,
int64_t time_stamp_ns,
webrtc::VideoRotation rotation,
bool apply_rotation);
@ -111,11 +112,7 @@ class WebRtcVideoFrame : public VideoFrame {
virtual size_t GetPixelWidth() const { return pixel_width_; }
virtual size_t GetPixelHeight() const { return pixel_height_; }
virtual int64_t GetElapsedTime() const { return elapsed_time_ns_; }
virtual int64_t GetTimeStamp() const { return time_stamp_ns_; }
virtual void SetElapsedTime(int64_t elapsed_time_ns) {
elapsed_time_ns_ = elapsed_time_ns;
}
virtual void SetTimeStamp(int64_t time_stamp_ns) {
time_stamp_ns_ = time_stamp_ns;
}
@ -138,14 +135,12 @@ class WebRtcVideoFrame : public VideoFrame {
private:
virtual VideoFrame* CreateEmptyFrame(int w, int h, size_t pixel_width,
size_t pixel_height,
int64_t elapsed_time_ns,
int64_t time_stamp_ns) const;
// An opaque reference counted handle that stores the pixel data.
rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer_;
size_t pixel_width_;
size_t pixel_height_;
int64_t elapsed_time_ns_;
int64_t time_stamp_ns_;
webrtc::VideoRotation rotation_;

View File

@ -41,11 +41,9 @@ class WebRtcVideoTestFrame : public cricket::WebRtcVideoFrame {
int h,
size_t pixel_width,
size_t pixel_height,
int64_t elapsed_time,
int64_t time_stamp) const override {
WebRtcVideoTestFrame* frame = new WebRtcVideoTestFrame();
frame->InitToBlack(w, h, pixel_width, pixel_height, elapsed_time,
time_stamp);
frame->InitToBlack(w, h, pixel_width, pixel_height, time_stamp);
return frame;
}
};
@ -68,7 +66,6 @@ class WebRtcVideoFrameTest : public VideoFrameTest<cricket::WebRtcVideoFrame> {
captured_frame.fourcc = cricket::FOURCC_I420;
captured_frame.pixel_width = 1;
captured_frame.pixel_height = 1;
captured_frame.elapsed_time = 1234;
captured_frame.time_stamp = 5678;
captured_frame.rotation = frame_rotation;
captured_frame.width = frame_width;
@ -90,7 +87,6 @@ class WebRtcVideoFrameTest : public VideoFrameTest<cricket::WebRtcVideoFrame> {
// Verify the new frame.
EXPECT_EQ(1u, frame.GetPixelWidth());
EXPECT_EQ(1u, frame.GetPixelHeight());
EXPECT_EQ(1234, frame.GetElapsedTime());
EXPECT_EQ(5678, frame.GetTimeStamp());
if (apply_rotation)
EXPECT_EQ(webrtc::kVideoRotation_0, frame.GetRotation());
@ -303,14 +299,11 @@ TEST_F(WebRtcVideoFrameTest, TextureInitialValues) {
webrtc::NativeHandleBuffer* buffer =
new rtc::RefCountedObject<webrtc::test::FakeNativeHandleBuffer>(
dummy_handle, 640, 480);
cricket::WebRtcVideoFrame frame(buffer, 100, 200, webrtc::kVideoRotation_0);
cricket::WebRtcVideoFrame frame(buffer, 200, webrtc::kVideoRotation_0);
EXPECT_EQ(dummy_handle, frame.GetNativeHandle());
EXPECT_EQ(640u, frame.GetWidth());
EXPECT_EQ(480u, frame.GetHeight());
EXPECT_EQ(100, frame.GetElapsedTime());
EXPECT_EQ(200, frame.GetTimeStamp());
frame.SetElapsedTime(300);
EXPECT_EQ(300, frame.GetElapsedTime());
frame.SetTimeStamp(400);
EXPECT_EQ(400, frame.GetTimeStamp());
}
@ -321,12 +314,11 @@ TEST_F(WebRtcVideoFrameTest, CopyTextureFrame) {
webrtc::NativeHandleBuffer* buffer =
new rtc::RefCountedObject<webrtc::test::FakeNativeHandleBuffer>(
dummy_handle, 640, 480);
cricket::WebRtcVideoFrame frame1(buffer, 100, 200, webrtc::kVideoRotation_0);
cricket::WebRtcVideoFrame frame1(buffer, 200, webrtc::kVideoRotation_0);
cricket::VideoFrame* frame2 = frame1.Copy();
EXPECT_EQ(frame1.GetNativeHandle(), frame2->GetNativeHandle());
EXPECT_EQ(frame1.GetWidth(), frame2->GetWidth());
EXPECT_EQ(frame1.GetHeight(), frame2->GetHeight());
EXPECT_EQ(frame1.GetElapsedTime(), frame2->GetElapsedTime());
EXPECT_EQ(frame1.GetTimeStamp(), frame2->GetTimeStamp());
delete frame2;
}

View File

@ -44,7 +44,6 @@ class WebRtcVideoFrameFactoryTest
captured_frame_.fourcc = cricket::FOURCC_I420;
captured_frame_.pixel_width = 1;
captured_frame_.pixel_height = 1;
captured_frame_.elapsed_time = 1234;
captured_frame_.time_stamp = 5678;
captured_frame_.rotation = frame_rotation;
captured_frame_.width = frame_width;