Delete RawVideoType enum, use the VideoType enum instead.

BUG=webrtc:7385

Review-Url: https://codereview.webrtc.org/2765243002
Cr-Commit-Position: refs/heads/master@{#17930}
This commit is contained in:
nisse 2017-04-28 07:18:05 -07:00 committed by Commit bot
parent aec49d2b49
commit eb44b39a21
31 changed files with 261 additions and 309 deletions

View File

@ -416,23 +416,25 @@ struct AudioDecodingCallStats {
// Video specific types
// ==================================================================
// Raw video types
enum RawVideoType {
kVideoI420 = 0,
kVideoYV12 = 1,
kVideoYUY2 = 2,
kVideoUYVY = 3,
kVideoIYUV = 4,
kVideoARGB = 5,
kVideoRGB24 = 6,
kVideoRGB565 = 7,
kVideoARGB4444 = 8,
kVideoARGB1555 = 9,
kVideoMJPEG = 10,
kVideoNV12 = 11,
kVideoNV21 = 12,
kVideoBGRA = 13,
kVideoUnknown = 99
// TODO(nisse): Delete, and switch to fourcc values everywhere?
// Supported video types.
enum class VideoType {
kUnknown,
kI420,
kIYUV,
kRGB24,
kABGR,
kARGB,
kARGB4444,
kRGB565,
kARGB1555,
kYUY2,
kYV12,
kUYVY,
kMJPEG,
kNV21,
kNV12,
kBGRA,
};
// Video codec

View File

@ -19,39 +19,19 @@
#include <vector>
#include "webrtc/api/video/video_frame.h"
#include "webrtc/common_types.h" // RawVideoTypes.
#include "webrtc/common_types.h" // VideoTypes.
#include "webrtc/typedefs.h"
namespace webrtc {
class I420Buffer;
// Supported video types.
enum VideoType {
kUnknown,
kI420,
kIYUV,
kRGB24,
kABGR,
kARGB,
kARGB4444,
kRGB565,
kARGB1555,
kYUY2,
kYV12,
kUYVY,
kMJPG,
kNV21,
kNV12,
kBGRA,
};
// This is the max PSNR value our algorithms can return.
const double kPerfectPSNR = 48.0f;
// Conversion between the RawVideoType and the LibYuv videoType.
// TODO(wu): Consolidate types into one type throughout WebRtc.
VideoType RawVideoTypeToCommonVideoVideoType(RawVideoType type);
// TODO(nisse): Some downstream apps call CalcBufferSize with
// ::webrtc::kI420 as the first argument. Delete after they are updated.
const VideoType kI420 = VideoType::kI420;
// Calculate the required buffer size.
// Input:

View File

@ -53,7 +53,7 @@ TestLibYuv::TestLibYuv()
height_(288),
size_y_(width_ * height_),
size_uv_(((width_ + 1) / 2) * ((height_ + 1) / 2)),
frame_length_(CalcBufferSize(kI420, 352, 288)) {}
frame_length_(CalcBufferSize(VideoType::kI420, 352, 288)) {}
void TestLibYuv::SetUp() {
const std::string input_file_name = webrtc::test::ResourcePath("foreman_cif",
@ -94,10 +94,11 @@ TEST_F(TestLibYuv, ConvertTest) {
printf("\nConvert #%d I420 <-> I420 \n", j);
std::unique_ptr<uint8_t[]> out_i420_buffer(new uint8_t[frame_length_]);
EXPECT_EQ(0, ConvertFromI420(*orig_frame_, kI420, 0, out_i420_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kI420, out_i420_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0,
res_i420_buffer.get()));
EXPECT_EQ(0, ConvertFromI420(*orig_frame_, VideoType::kI420, 0,
out_i420_buffer.get()));
EXPECT_EQ(0,
ConvertToI420(VideoType::kI420, out_i420_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0, res_i420_buffer.get()));
if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
return;
@ -114,11 +115,12 @@ TEST_F(TestLibYuv, ConvertTest) {
Calc16ByteAlignedStride(width_, &stride_y, &stride_uv);
res_i420_buffer =
I420Buffer::Create(width_, height_, stride_y, stride_uv, stride_uv);
EXPECT_EQ(0, ConvertFromI420(*orig_frame_, kRGB24, 0, res_rgb_buffer2.get()));
EXPECT_EQ(0, ConvertFromI420(*orig_frame_, VideoType::kRGB24, 0,
res_rgb_buffer2.get()));
EXPECT_EQ(0, ConvertToI420(kRGB24, res_rgb_buffer2.get(), 0, 0, width_,
height_, 0, kVideoRotation_0,
res_i420_buffer.get()));
EXPECT_EQ(
0, ConvertToI420(VideoType::kRGB24, res_rgb_buffer2.get(), 0, 0, width_,
height_, 0, kVideoRotation_0, res_i420_buffer.get()));
if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
return;
@ -131,10 +133,11 @@ TEST_F(TestLibYuv, ConvertTest) {
printf("\nConvert #%d I420 <-> UYVY\n", j);
std::unique_ptr<uint8_t[]> out_uyvy_buffer(new uint8_t[width_ * height_ * 2]);
EXPECT_EQ(0, ConvertFromI420(*orig_frame_, kUYVY, 0, out_uyvy_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kUYVY, out_uyvy_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0,
res_i420_buffer.get()));
EXPECT_EQ(0, ConvertFromI420(*orig_frame_, VideoType::kUYVY, 0,
out_uyvy_buffer.get()));
EXPECT_EQ(0,
ConvertToI420(VideoType::kUYVY, out_uyvy_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0, res_i420_buffer.get()));
psnr = I420PSNR(*orig_frame_->video_frame_buffer(), *res_i420_buffer);
EXPECT_EQ(48.0, psnr);
if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
@ -144,11 +147,12 @@ TEST_F(TestLibYuv, ConvertTest) {
printf("\nConvert #%d I420 <-> YUY2\n", j);
std::unique_ptr<uint8_t[]> out_yuy2_buffer(new uint8_t[width_ * height_ * 2]);
EXPECT_EQ(0, ConvertFromI420(*orig_frame_, kYUY2, 0, out_yuy2_buffer.get()));
EXPECT_EQ(0, ConvertFromI420(*orig_frame_, VideoType::kYUY2, 0,
out_yuy2_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kYUY2, out_yuy2_buffer.get(), 0, 0, width_,
height_, 0,
kVideoRotation_0, res_i420_buffer.get()));
EXPECT_EQ(0,
ConvertToI420(VideoType::kYUY2, out_yuy2_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0, res_i420_buffer.get()));
if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
return;
@ -160,12 +164,12 @@ TEST_F(TestLibYuv, ConvertTest) {
printf("\nConvert #%d I420 <-> RGB565\n", j);
std::unique_ptr<uint8_t[]> out_rgb565_buffer(
new uint8_t[width_ * height_ * 2]);
EXPECT_EQ(0,
ConvertFromI420(*orig_frame_, kRGB565, 0, out_rgb565_buffer.get()));
EXPECT_EQ(0, ConvertFromI420(*orig_frame_, VideoType::kRGB565, 0,
out_rgb565_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kRGB565, out_rgb565_buffer.get(), 0, 0, width_,
height_, 0,
kVideoRotation_0, res_i420_buffer.get()));
EXPECT_EQ(0, ConvertToI420(VideoType::kRGB565, out_rgb565_buffer.get(), 0, 0,
width_, height_, 0, kVideoRotation_0,
res_i420_buffer.get()));
if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
return;
}
@ -180,11 +184,11 @@ TEST_F(TestLibYuv, ConvertTest) {
printf("\nConvert #%d I420 <-> ARGB8888\n", j);
std::unique_ptr<uint8_t[]> out_argb8888_buffer(
new uint8_t[width_ * height_ * 4]);
EXPECT_EQ(0,
ConvertFromI420(*orig_frame_, kARGB, 0, out_argb8888_buffer.get()));
EXPECT_EQ(0, ConvertFromI420(*orig_frame_, VideoType::kARGB, 0,
out_argb8888_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kARGB, out_argb8888_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0,
EXPECT_EQ(0, ConvertToI420(VideoType::kARGB, out_argb8888_buffer.get(), 0, 0,
width_, height_, 0, kVideoRotation_0,
res_i420_buffer.get()));
if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
@ -215,11 +219,11 @@ TEST_F(TestLibYuv, ConvertAlignedFrame) {
rtc::scoped_refptr<I420Buffer> res_i420_buffer =
I420Buffer::Create(width_, height_, stride_y, stride_uv, stride_uv);
std::unique_ptr<uint8_t[]> out_i420_buffer(new uint8_t[frame_length_]);
EXPECT_EQ(0, ConvertFromI420(*orig_frame_, kI420, 0,
EXPECT_EQ(0, ConvertFromI420(*orig_frame_, VideoType::kI420, 0,
out_i420_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kI420, out_i420_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0,
res_i420_buffer.get()));
EXPECT_EQ(0,
ConvertToI420(VideoType::kI420, out_i420_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0, res_i420_buffer.get()));
if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
return;
@ -242,17 +246,17 @@ TEST_F(TestLibYuv, RotateTest) {
Calc16ByteAlignedStride(rotated_width, &stride_y, &stride_uv);
rtc::scoped_refptr<I420Buffer> rotated_res_i420_buffer = I420Buffer::Create(
rotated_width, rotated_height, stride_y, stride_uv, stride_uv);
EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer, 0, 0, width_, height_,
0, kVideoRotation_90,
rotated_res_i420_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer, 0, 0, width_, height_,
0, kVideoRotation_270,
rotated_res_i420_buffer.get()));
EXPECT_EQ(
0, ConvertToI420(VideoType::kI420, orig_buffer, 0, 0, width_, height_, 0,
kVideoRotation_90, rotated_res_i420_buffer.get()));
EXPECT_EQ(
0, ConvertToI420(VideoType::kI420, orig_buffer, 0, 0, width_, height_, 0,
kVideoRotation_270, rotated_res_i420_buffer.get()));
rotated_res_i420_buffer = I420Buffer::Create(
width_, height_, width_, (width_ + 1) / 2, (width_ + 1) / 2);
EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer, 0, 0, width_, height_,
0, kVideoRotation_180,
rotated_res_i420_buffer.get()));
EXPECT_EQ(
0, ConvertToI420(VideoType::kI420, orig_buffer, 0, 0, width_, height_, 0,
kVideoRotation_180, rotated_res_i420_buffer.get()));
}
static uint8_t Average(int a, int b, int c, int d) {

View File

@ -21,69 +21,33 @@
namespace webrtc {
VideoType RawVideoTypeToCommonVideoVideoType(RawVideoType type) {
switch (type) {
case kVideoI420:
return kI420;
case kVideoIYUV:
return kIYUV;
case kVideoRGB24:
return kRGB24;
case kVideoARGB:
return kARGB;
case kVideoARGB4444:
return kARGB4444;
case kVideoRGB565:
return kRGB565;
case kVideoARGB1555:
return kARGB1555;
case kVideoYUY2:
return kYUY2;
case kVideoYV12:
return kYV12;
case kVideoUYVY:
return kUYVY;
case kVideoNV21:
return kNV21;
case kVideoNV12:
return kNV12;
case kVideoBGRA:
return kBGRA;
case kVideoMJPEG:
return kMJPG;
default:
RTC_NOTREACHED();
}
return kUnknown;
}
size_t CalcBufferSize(VideoType type, int width, int height) {
RTC_DCHECK_GE(width, 0);
RTC_DCHECK_GE(height, 0);
size_t buffer_size = 0;
switch (type) {
case kI420:
case kNV12:
case kNV21:
case kIYUV:
case kYV12: {
case VideoType::kI420:
case VideoType::kNV12:
case VideoType::kNV21:
case VideoType::kIYUV:
case VideoType::kYV12: {
int half_width = (width + 1) >> 1;
int half_height = (height + 1) >> 1;
buffer_size = width * height + half_width * half_height * 2;
break;
}
case kARGB4444:
case kRGB565:
case kARGB1555:
case kYUY2:
case kUYVY:
case VideoType::kARGB4444:
case VideoType::kRGB565:
case VideoType::kARGB1555:
case VideoType::kYUY2:
case VideoType::kUYVY:
buffer_size = width * height * 2;
break;
case kRGB24:
case VideoType::kRGB24:
buffer_size = width * height * 3;
break;
case kBGRA:
case kARGB:
case VideoType::kBGRA:
case VideoType::kARGB:
buffer_size = width * height * 4;
break;
default:
@ -141,7 +105,7 @@ int ExtractBuffer(const rtc::scoped_refptr<VideoFrameBuffer>& input_frame,
return -1;
int width = input_frame->width();
int height = input_frame->height();
size_t length = CalcBufferSize(kI420, width, height);
size_t length = CalcBufferSize(VideoType::kI420, width, height);
if (size < length) {
return -1;
}
@ -207,36 +171,36 @@ libyuv::RotationMode ConvertRotationMode(VideoRotation rotation) {
int ConvertVideoType(VideoType video_type) {
switch (video_type) {
case kUnknown:
case VideoType::kUnknown:
return libyuv::FOURCC_ANY;
case kI420:
case VideoType::kI420:
return libyuv::FOURCC_I420;
case kIYUV: // same as KYV12
case kYV12:
case VideoType::kIYUV: // same as VideoType::kYV12
case VideoType::kYV12:
return libyuv::FOURCC_YV12;
case kRGB24:
case VideoType::kRGB24:
return libyuv::FOURCC_24BG;
case kABGR:
case VideoType::kABGR:
return libyuv::FOURCC_ABGR;
case kRGB565:
case VideoType::kRGB565:
return libyuv::FOURCC_RGBP;
case kYUY2:
case VideoType::kYUY2:
return libyuv::FOURCC_YUY2;
case kUYVY:
case VideoType::kUYVY:
return libyuv::FOURCC_UYVY;
case kMJPG:
case VideoType::kMJPEG:
return libyuv::FOURCC_MJPG;
case kNV21:
case VideoType::kNV21:
return libyuv::FOURCC_NV21;
case kNV12:
case VideoType::kNV12:
return libyuv::FOURCC_NV12;
case kARGB:
case VideoType::kARGB:
return libyuv::FOURCC_ARGB;
case kBGRA:
case VideoType::kBGRA:
return libyuv::FOURCC_BGRA;
case kARGB4444:
case VideoType::kARGB4444:
return libyuv::FOURCC_R444;
case kARGB1555:
case VideoType::kARGB1555:
return libyuv::FOURCC_RGBO;
}
RTC_NOTREACHED();

View File

@ -25,44 +25,31 @@
namespace cricket {
namespace {
struct kVideoFourCCEntry {
uint32_t fourcc;
webrtc::RawVideoType webrtc_type;
webrtc::VideoType webrtc_type;
};
// This indicates our format preferences and defines a mapping between
// webrtc::RawVideoType (from video_capture_defines.h) to our FOURCCs.
static kVideoFourCCEntry kSupportedFourCCs[] = {
{ FOURCC_I420, webrtc::kVideoI420 }, // 12 bpp, no conversion.
{ FOURCC_YV12, webrtc::kVideoYV12 }, // 12 bpp, no conversion.
{ FOURCC_YUY2, webrtc::kVideoYUY2 }, // 16 bpp, fast conversion.
{ FOURCC_UYVY, webrtc::kVideoUYVY }, // 16 bpp, fast conversion.
{ FOURCC_NV12, webrtc::kVideoNV12 }, // 12 bpp, fast conversion.
{ FOURCC_NV21, webrtc::kVideoNV21 }, // 12 bpp, fast conversion.
{ FOURCC_MJPG, webrtc::kVideoMJPEG }, // compressed, slow conversion.
{ FOURCC_ARGB, webrtc::kVideoARGB }, // 32 bpp, slow conversion.
{ FOURCC_24BG, webrtc::kVideoRGB24 }, // 24 bpp, slow conversion.
kVideoFourCCEntry kSupportedFourCCs[] = {
{FOURCC_I420, webrtc::VideoType::kI420}, // 12 bpp, no conversion.
{FOURCC_YV12, webrtc::VideoType::kYV12}, // 12 bpp, no conversion.
{FOURCC_YUY2, webrtc::VideoType::kYUY2}, // 16 bpp, fast conversion.
{FOURCC_UYVY, webrtc::VideoType::kUYVY}, // 16 bpp, fast conversion.
{FOURCC_NV12, webrtc::VideoType::kNV12}, // 12 bpp, fast conversion.
{FOURCC_NV21, webrtc::VideoType::kNV21}, // 12 bpp, fast conversion.
{FOURCC_MJPG, webrtc::VideoType::kMJPEG}, // compressed, slow conversion.
{FOURCC_ARGB, webrtc::VideoType::kARGB}, // 32 bpp, slow conversion.
{FOURCC_24BG, webrtc::VideoType::kRGB24}, // 24 bpp, slow conversion.
};
class WebRtcVcmFactory : public WebRtcVcmFactoryInterface {
public:
virtual rtc::scoped_refptr<webrtc::VideoCaptureModule> Create(
const char* device) {
return webrtc::VideoCaptureFactory::Create(device);
}
virtual webrtc::VideoCaptureModule::DeviceInfo* CreateDeviceInfo() {
return webrtc::VideoCaptureFactory::CreateDeviceInfo();
}
virtual void DestroyDeviceInfo(webrtc::VideoCaptureModule::DeviceInfo* info) {
delete info;
}
};
static bool CapabilityToFormat(const webrtc::VideoCaptureCapability& cap,
VideoFormat* format) {
bool CapabilityToFormat(const webrtc::VideoCaptureCapability& cap,
VideoFormat* format) {
uint32_t fourcc = 0;
for (size_t i = 0; i < arraysize(kSupportedFourCCs); ++i) {
if (kSupportedFourCCs[i].webrtc_type == cap.rawType) {
if (kSupportedFourCCs[i].webrtc_type == cap.videoType) {
fourcc = kSupportedFourCCs[i].fourcc;
break;
}
@ -78,27 +65,43 @@ static bool CapabilityToFormat(const webrtc::VideoCaptureCapability& cap,
return true;
}
static bool FormatToCapability(const VideoFormat& format,
webrtc::VideoCaptureCapability* cap) {
webrtc::RawVideoType webrtc_type = webrtc::kVideoUnknown;
bool FormatToCapability(const VideoFormat& format,
webrtc::VideoCaptureCapability* cap) {
webrtc::VideoType webrtc_type = webrtc::VideoType::kUnknown;
for (size_t i = 0; i < arraysize(kSupportedFourCCs); ++i) {
if (kSupportedFourCCs[i].fourcc == format.fourcc) {
webrtc_type = kSupportedFourCCs[i].webrtc_type;
break;
}
}
if (webrtc_type == webrtc::kVideoUnknown) {
if (webrtc_type == webrtc::VideoType::kUnknown) {
return false;
}
cap->width = format.width;
cap->height = format.height;
cap->maxFPS = VideoFormat::IntervalToFps(format.interval);
cap->rawType = webrtc_type;
cap->videoType = webrtc_type;
cap->interlaced = false;
return true;
}
} // namespace
class WebRtcVcmFactory : public WebRtcVcmFactoryInterface {
public:
virtual rtc::scoped_refptr<webrtc::VideoCaptureModule> Create(
const char* device) {
return webrtc::VideoCaptureFactory::Create(device);
}
virtual webrtc::VideoCaptureModule::DeviceInfo* CreateDeviceInfo() {
return webrtc::VideoCaptureFactory::CreateDeviceInfo();
}
virtual void DestroyDeviceInfo(webrtc::VideoCaptureModule::DeviceInfo* info) {
delete info;
}
};
///////////////////////////////////////////////////////////////////////////
// Implementation of class WebRtcVideoCapturer
///////////////////////////////////////////////////////////////////////////
@ -165,7 +168,7 @@ bool WebRtcVideoCapturer::Init(const Device& device) {
supported.push_back(format);
} else {
LOG(LS_WARNING) << "Ignoring unsupported WebRTC capture format "
<< cap.rawType;
<< static_cast<int>(cap.videoType);
}
}
}

View File

@ -42,7 +42,7 @@ class WebRtcVideoCapturerTest : public testing::Test {
vga.width = 640;
vga.height = 480;
vga.maxFPS = 30;
vga.rawType = webrtc::kVideoI420;
vga.videoType = webrtc::VideoType::kI420;
factory_->device_info.AddCapability(kTestDeviceId, vga);
}

View File

@ -153,7 +153,7 @@ int32_t DeviceInfoImpl::GetBestMatchedCapability(
int32_t bestWidth = 0;
int32_t bestHeight = 0;
int32_t bestFrameRate = 0;
RawVideoType bestRawType = kVideoUnknown;
VideoType bestVideoType = VideoType::kUnknown;
const int32_t numberOfCapabilies =
static_cast<int32_t>(_captureCapabilities.size());
@ -193,15 +193,15 @@ int32_t DeviceInfoImpl::GetBestMatchedCapability(
if ((currentbestDiffFrameRate == diffFrameRate) // Same frame rate as previous or frame rate allready good enough
|| (currentbestDiffFrameRate >= 0))
{
if (bestRawType != requested.rawType
&& requested.rawType != kVideoUnknown
&& (capability.rawType == requested.rawType
|| capability.rawType == kVideoI420
|| capability.rawType == kVideoYUY2
|| capability.rawType == kVideoYV12))
{
bestRawType = capability.rawType;
bestformatIndex = tmp;
if (bestVideoType != requested.videoType &&
requested.videoType != VideoType::kUnknown &&
(capability.videoType ==
requested.videoType ||
capability.videoType == VideoType::kI420 ||
capability.videoType == VideoType::kYUY2 ||
capability.videoType == VideoType::kYV12)) {
bestVideoType = capability.videoType;
bestformatIndex = tmp;
}
// If width height and frame rate is full filled we can use the camera for encoding if it is supported.
if (capability.height == requested.height
@ -216,7 +216,7 @@ int32_t DeviceInfoImpl::GetBestMatchedCapability(
bestWidth = capability.width;
bestHeight = capability.height;
bestFrameRate = capability.maxFPS;
bestRawType = capability.rawType;
bestVideoType = capability.videoType;
bestformatIndex = tmp;
}
}
@ -226,7 +226,7 @@ int32_t DeviceInfoImpl::GetBestMatchedCapability(
bestWidth = capability.width;
bestHeight = capability.height;
bestFrameRate = capability.maxFPS;
bestRawType = capability.rawType;
bestVideoType = capability.videoType;
bestformatIndex = tmp;
}
}// else width no good
@ -236,7 +236,7 @@ int32_t DeviceInfoImpl::GetBestMatchedCapability(
bestWidth = capability.width;
bestHeight = capability.height;
bestFrameRate = capability.maxFPS;
bestRawType = capability.rawType;
bestVideoType = capability.videoType;
bestformatIndex = tmp;
}
}// else height not good
@ -244,7 +244,7 @@ int32_t DeviceInfoImpl::GetBestMatchedCapability(
LOG(LS_VERBOSE) << "Best camera format: " << bestWidth << "x" << bestHeight
<< "@" << bestFrameRate
<< "fps, color format: " << bestRawType;
<< "fps, color format: " << static_cast<int>(bestVideoType);
// Copy the capability
if (bestformatIndex < 0)

View File

@ -287,26 +287,26 @@ int32_t DeviceInfoLinux::FillCapabilities(int fd)
cap.height = video_fmt.fmt.pix.height;
if (videoFormats[fmts] == V4L2_PIX_FMT_YUYV)
{
cap.rawType = kVideoYUY2;
cap.videoType = VideoType::kYUY2;
}
else if (videoFormats[fmts] == V4L2_PIX_FMT_YUV420)
{
cap.rawType = kVideoI420;
cap.videoType = VideoType::kI420;
}
else if (videoFormats[fmts] == V4L2_PIX_FMT_MJPEG)
{
cap.rawType = kVideoMJPEG;
cap.videoType = VideoType::kMJPEG;
}
else if (videoFormats[fmts] == V4L2_PIX_FMT_UYVY)
{
cap.rawType = kVideoUYVY;
cap.videoType = VideoType::kUYVY;
}
// get fps of current camera mode
// V4l2 does not have a stable method of knowing so we just guess.
if(cap.width >= 800 && cap.rawType != kVideoMJPEG)
{
cap.maxFPS = 15;
if (cap.width >= 800 &&
cap.videoType != VideoType::kMJPEG) {
cap.maxFPS = 15;
}
else
{
@ -318,7 +318,7 @@ int32_t DeviceInfoLinux::FillCapabilities(int fd)
WEBRTC_TRACE(
webrtc::kTraceInfo, webrtc::kTraceVideoCapture, 0,
"Camera capability, width:%d height:%d type:%d fps:%d",
cap.width, cap.height, cap.rawType, cap.maxFPS);
cap.width, cap.height, cap.videoType, cap.maxFPS);
}
}
}

View File

@ -48,10 +48,8 @@ VideoCaptureModuleV4L2::VideoCaptureModuleV4L2()
_currentHeight(-1),
_currentFrameRate(-1),
_captureStarted(false),
_captureVideoType(kVideoI420),
_pool(NULL)
{
}
_captureVideoType(VideoType::kI420),
_pool(NULL) {}
int32_t VideoCaptureModuleV4L2::Init(const char* deviceUniqueIdUTF8)
{
@ -114,11 +112,10 @@ int32_t VideoCaptureModuleV4L2::StartCapture(
{
if (_captureStarted)
{
if (capability.width == _currentWidth &&
capability.height == _currentHeight &&
_captureVideoType == capability.rawType)
{
return 0;
if (capability.width == _currentWidth &&
capability.height == _currentHeight &&
_captureVideoType == capability.videoType) {
return 0;
}
else
{
@ -201,14 +198,14 @@ int32_t VideoCaptureModuleV4L2::StartCapture(
video_fmt.fmt.pix.pixelformat = fmts[fmtsIdx];
if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV)
_captureVideoType = kVideoYUY2;
_captureVideoType = VideoType::kYUY2;
else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUV420)
_captureVideoType = kVideoI420;
_captureVideoType = VideoType::kI420;
else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_UYVY)
_captureVideoType = kVideoUYVY;
_captureVideoType = VideoType::kUYVY;
else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG ||
video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_JPEG)
_captureVideoType = kVideoMJPEG;
_captureVideoType = VideoType::kMJPEG;
//set format and frame size now
if (ioctl(_deviceFd, VIDIOC_S_FMT, &video_fmt) < 0)
@ -252,7 +249,7 @@ int32_t VideoCaptureModuleV4L2::StartCapture(
// If driver doesn't support framerate control, need to hardcode.
// Hardcoding the value based on the frame size.
if (!driver_framerate_support) {
if(_currentWidth >= 800 && _captureVideoType != kVideoMJPEG) {
if (_currentWidth >= 800 && _captureVideoType != VideoType::kMJPEG) {
_currentFrameRate = 15;
} else {
_currentFrameRate = 30;
@ -447,7 +444,7 @@ bool VideoCaptureModuleV4L2::CaptureProcess()
VideoCaptureCapability frameInfo;
frameInfo.width = _currentWidth;
frameInfo.height = _currentHeight;
frameInfo.rawType = _captureVideoType;
frameInfo.videoType = _captureVideoType;
// convert to to I420 if needed
IncomingFrame((unsigned char*) _pool[buf.index].start,
@ -468,7 +465,7 @@ int32_t VideoCaptureModuleV4L2::CaptureSettings(VideoCaptureCapability& settings
settings.width = _currentWidth;
settings.height = _currentHeight;
settings.maxFPS = _currentFrameRate;
settings.rawType=_captureVideoType;
settings.videoType = _captureVideoType;
return 0;
}

View File

@ -53,7 +53,7 @@ private:
int32_t _currentHeight;
int32_t _currentFrameRate;
bool _captureStarted;
RawVideoType _captureVideoType;
VideoType _captureVideoType;
struct Buffer
{
void *start;

View File

@ -60,19 +60,19 @@
capability.width = 352;
capability.height = 288;
capability.maxFPS = 30;
capability.rawType = webrtc::kVideoNV12;
capability.videoType = webrtc::VideoType::kNV12;
capability.interlaced = false;
} else if ([preset isEqualToString:AVCaptureSessionPreset640x480]) {
capability.width = 640;
capability.height = 480;
capability.maxFPS = 30;
capability.rawType = webrtc::kVideoNV12;
capability.videoType = webrtc::VideoType::kNV12;
capability.interlaced = false;
} else if ([preset isEqualToString:AVCaptureSessionPreset1280x720]) {
capability.width = 1280;
capability.height = 720;
capability.maxFPS = 30;
capability.rawType = webrtc::kVideoNV12;
capability.videoType = webrtc::VideoType::kNV12;
capability.interlaced = false;
}

View File

@ -359,7 +359,7 @@ using namespace webrtc::videocapturemodule;
tempCaptureCapability.width = CVPixelBufferGetWidth(videoFrame);
tempCaptureCapability.height = CVPixelBufferGetHeight(videoFrame);
tempCaptureCapability.maxFPS = _capability.maxFPS;
tempCaptureCapability.rawType = kVideoNV12;
tempCaptureCapability.videoType = VideoType::kNV12;
_owner->IncomingFrame(baseAddress, frameSize, tempCaptureCapability, 0);

View File

@ -100,6 +100,6 @@ bool VideoCaptureIos::CaptureStarted() {
int32_t VideoCaptureIos::CaptureSettings(VideoCaptureCapability& settings) {
settings = capability_;
settings.rawType = kVideoNV12;
settings.videoType = VideoType::kNV12;
return 0;
}

View File

@ -212,7 +212,7 @@ TEST_F(VideoCaptureTest, MAYBE_CreateDelete) {
capability.width = kTestWidth;
capability.height = kTestHeight;
capability.maxFPS = kTestFramerate;
capability.rawType = webrtc::kVideoUnknown;
capability.videoType = webrtc::VideoType::kUnknown;
#endif
capture_observer.SetExpectedCapability(capability);
ASSERT_NO_FATAL_FAILURE(StartCapture(module.get(), capability));
@ -319,7 +319,7 @@ TEST_F(VideoCaptureTest, DISABLED_TestTwoCameras) {
capability1.width = kTestWidth;
capability1.height = kTestHeight;
capability1.maxFPS = kTestFramerate;
capability1.rawType = webrtc::kVideoUnknown;
capability1.videoType = webrtc::VideoType::kUnknown;
#endif
capture_observer1.SetExpectedCapability(capability1);
@ -336,7 +336,7 @@ TEST_F(VideoCaptureTest, DISABLED_TestTwoCameras) {
capability2.width = kTestWidth;
capability2.height = kTestHeight;
capability2.maxFPS = kTestFramerate;
capability2.rawType = webrtc::kVideoUnknown;
capability2.videoType = webrtc::VideoType::kUnknown;
#endif
capture_observer2.SetExpectedCapability(capability2);
@ -358,7 +358,7 @@ class VideoCaptureExternalTest : public testing::Test {
VideoCaptureCapability capability;
capability.width = kTestWidth;
capability.height = kTestHeight;
capability.rawType = webrtc::kVideoYV12;
capability.videoType = webrtc::VideoType::kYV12;
capability.maxFPS = kTestFramerate;
capture_callback_.SetExpectedCapability(capability);
@ -390,9 +390,8 @@ class VideoCaptureExternalTest : public testing::Test {
// Test input of external video frames.
TEST_F(VideoCaptureExternalTest, TestExternalCapture) {
size_t length = webrtc::CalcBufferSize(webrtc::kI420,
test_frame_->width(),
test_frame_->height());
size_t length = webrtc::CalcBufferSize(
webrtc::VideoType::kI420, test_frame_->width(), test_frame_->height());
std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
webrtc::ExtractBuffer(*test_frame_, length, test_buffer.get());
EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
@ -402,9 +401,8 @@ TEST_F(VideoCaptureExternalTest, TestExternalCapture) {
TEST_F(VideoCaptureExternalTest, Rotation) {
EXPECT_EQ(0, capture_module_->SetCaptureRotation(webrtc::kVideoRotation_0));
size_t length = webrtc::CalcBufferSize(webrtc::kI420,
test_frame_->width(),
test_frame_->height());
size_t length = webrtc::CalcBufferSize(
webrtc::VideoType::kI420, test_frame_->width(), test_frame_->height());
std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
webrtc::ExtractBuffer(*test_frame_, length, test_buffer.get());
EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),

View File

@ -31,7 +31,7 @@ struct VideoCaptureCapability
int32_t width;
int32_t height;
int32_t maxFPS;
RawVideoType rawType;
VideoType videoType;
bool interlaced;
VideoCaptureCapability()
@ -39,7 +39,7 @@ struct VideoCaptureCapability
width = 0;
height = 0;
maxFPS = 0;
rawType = kVideoUnknown;
videoType = VideoType::kUnknown;
interlaced = false;
}
;
@ -51,8 +51,8 @@ struct VideoCaptureCapability
return true;
if (maxFPS != other.maxFPS)
return true;
if (rawType != other.rawType)
return true;
if (videoType != other.videoType)
return true;
if (interlaced != other.interlaced)
return true;
return false;

View File

@ -89,7 +89,7 @@ VideoCaptureImpl::VideoCaptureImpl()
_requestedCapability.width = kDefaultWidth;
_requestedCapability.height = kDefaultHeight;
_requestedCapability.maxFPS = 30;
_requestedCapability.rawType = kVideoI420;
_requestedCapability.videoType = VideoType::kI420;
memset(_incomingFrameTimesNanos, 0, sizeof(_incomingFrameTimesNanos));
}
@ -134,11 +134,8 @@ int32_t VideoCaptureImpl::IncomingFrame(
TRACE_EVENT1("webrtc", "VC::IncomingFrame", "capture_time", captureTime);
// Not encoded, convert to I420.
const VideoType commonVideoType =
RawVideoTypeToCommonVideoVideoType(frameInfo.rawType);
if (frameInfo.rawType != kVideoMJPEG &&
CalcBufferSize(commonVideoType, width, abs(height)) !=
if (frameInfo.videoType != VideoType::kMJPEG &&
CalcBufferSize(frameInfo.videoType, width, abs(height)) !=
videoFrameLength) {
LOG(LS_ERROR) << "Wrong incoming frame length.";
return -1;
@ -169,12 +166,12 @@ int32_t VideoCaptureImpl::IncomingFrame(
rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create(
target_width, abs(target_height), stride_y, stride_uv, stride_uv);
const int conversionResult = ConvertToI420(
commonVideoType, videoFrame, 0, 0, // No cropping
frameInfo.videoType, videoFrame, 0, 0, // No cropping
width, height, videoFrameLength,
apply_rotation ? _rotateFrame : kVideoRotation_0, buffer.get());
if (conversionResult < 0) {
LOG(LS_ERROR) << "Failed to convert capture frame from type "
<< frameInfo.rawType << "to I420.";
<< static_cast<int>(frameInfo.videoType) << "to I420.";
return -1;
}

View File

@ -589,44 +589,45 @@ int32_t DeviceInfoDS::CreateCapabilityMap(
// can't switch MEDIATYPE :~(
if (pmt->subtype == MEDIASUBTYPE_I420)
{
capability.rawType = kVideoI420;
capability.videoType = VideoType::kI420;
}
else if (pmt->subtype == MEDIASUBTYPE_IYUV)
{
capability.rawType = kVideoIYUV;
capability.videoType = VideoType::kIYUV;
}
else if (pmt->subtype == MEDIASUBTYPE_RGB24)
{
capability.rawType = kVideoRGB24;
capability.videoType = VideoType::kRGB24;
}
else if (pmt->subtype == MEDIASUBTYPE_YUY2)
{
capability.rawType = kVideoYUY2;
capability.videoType = VideoType::kYUY2;
}
else if (pmt->subtype == MEDIASUBTYPE_RGB565)
{
capability.rawType = kVideoRGB565;
capability.videoType = VideoType::kRGB565;
}
else if (pmt->subtype == MEDIASUBTYPE_MJPG)
{
capability.rawType = kVideoMJPEG;
capability.videoType = VideoType::kMJPEG;
}
else if (pmt->subtype == MEDIASUBTYPE_dvsl
|| pmt->subtype == MEDIASUBTYPE_dvsd
|| pmt->subtype == MEDIASUBTYPE_dvhd) // If this is an external DV camera
{
capability.rawType = kVideoYUY2;// MS DV filter seems to create this type
capability.videoType =
VideoType::kYUY2; // MS DV filter seems to create this type
}
else if (pmt->subtype == MEDIASUBTYPE_UYVY) // Seen used by Declink capture cards
{
capability.rawType = kVideoUYVY;
capability.videoType = VideoType::kUYVY;
}
else if (pmt->subtype == MEDIASUBTYPE_HDYC) // Seen used by Declink capture cards. Uses BT. 709 color. Not entiry correct to use UYVY. http://en.wikipedia.org/wiki/YCbCr
{
WEBRTC_TRACE(webrtc::kTraceWarning,
webrtc::kTraceVideoCapture, 0,
"Device support HDYC.");
capability.rawType = kVideoUYVY;
capability.videoType = VideoType::kUYVY;
}
else
{
@ -641,10 +642,11 @@ int32_t DeviceInfoDS::CreateCapabilityMap(
_captureCapabilities.push_back(capability);
_captureCapabilitiesWindows.push_back(capability);
WEBRTC_TRACE( webrtc::kTraceInfo, webrtc::kTraceVideoCapture, 0,
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, 0,
"Camera capability, width:%d height:%d type:%d fps:%d",
capability.width, capability.height,
capability.rawType, capability.maxFPS);
static_cast<int>(capability.videoType),
capability.maxFPS);
}
DeleteMediaType(pmt);
pmt = NULL;

View File

@ -205,39 +205,39 @@ CaptureInputPin::CheckMediaType ( IN const CMediaType * pMediaType)
if(*SubType == MEDIASUBTYPE_MJPG
&& pvi->bmiHeader.biCompression == MAKEFOURCC('M','J','P','G'))
{
_resultingCapability.rawType = kVideoMJPEG;
return S_OK; // This format is acceptable.
_resultingCapability.videoType = VideoType::kMJPEG;
return S_OK; // This format is acceptable.
}
if(*SubType == MEDIASUBTYPE_I420
&& pvi->bmiHeader.biCompression == MAKEFOURCC('I','4','2','0'))
{
_resultingCapability.rawType = kVideoI420;
return S_OK; // This format is acceptable.
_resultingCapability.videoType = VideoType::kI420;
return S_OK; // This format is acceptable.
}
if(*SubType == MEDIASUBTYPE_YUY2
&& pvi->bmiHeader.biCompression == MAKEFOURCC('Y','U','Y','2'))
{
_resultingCapability.rawType = kVideoYUY2;
::Sleep(60); // workaround for bad driver
return S_OK; // This format is acceptable.
_resultingCapability.videoType = VideoType::kYUY2;
::Sleep(60); // workaround for bad driver
return S_OK; // This format is acceptable.
}
if(*SubType == MEDIASUBTYPE_UYVY
&& pvi->bmiHeader.biCompression == MAKEFOURCC('U','Y','V','Y'))
{
_resultingCapability.rawType = kVideoUYVY;
return S_OK; // This format is acceptable.
_resultingCapability.videoType = VideoType::kUYVY;
return S_OK; // This format is acceptable.
}
if(*SubType == MEDIASUBTYPE_HDYC)
{
_resultingCapability.rawType = kVideoUYVY;
return S_OK; // This format is acceptable.
_resultingCapability.videoType = VideoType::kUYVY;
return S_OK; // This format is acceptable.
}
if(*SubType == MEDIASUBTYPE_RGB24
&& pvi->bmiHeader.biCompression == BI_RGB)
{
_resultingCapability.rawType = kVideoRGB24;
return S_OK; // This format is acceptable.
_resultingCapability.videoType = VideoType::kRGB24;
return S_OK; // This format is acceptable.
}
}
if(*formatType == FORMAT_VideoInfo2)
@ -272,38 +272,38 @@ CaptureInputPin::CheckMediaType ( IN const CMediaType * pMediaType)
if(*SubType == MEDIASUBTYPE_MJPG
&& pvi->bmiHeader.biCompression == MAKEFOURCC('M','J','P','G'))
{
_resultingCapability.rawType = kVideoMJPEG;
return S_OK; // This format is acceptable.
_resultingCapability.videoType = VideoType::kMJPEG;
return S_OK; // This format is acceptable.
}
if(*SubType == MEDIASUBTYPE_I420
&& pvi->bmiHeader.biCompression == MAKEFOURCC('I','4','2','0'))
{
_resultingCapability.rawType = kVideoI420;
return S_OK; // This format is acceptable.
_resultingCapability.videoType = VideoType::kI420;
return S_OK; // This format is acceptable.
}
if(*SubType == MEDIASUBTYPE_YUY2
&& pvi->bmiHeader.biCompression == MAKEFOURCC('Y','U','Y','2'))
{
_resultingCapability.rawType = kVideoYUY2;
return S_OK; // This format is acceptable.
_resultingCapability.videoType = VideoType::kYUY2;
return S_OK; // This format is acceptable.
}
if(*SubType == MEDIASUBTYPE_UYVY
&& pvi->bmiHeader.biCompression == MAKEFOURCC('U','Y','V','Y'))
{
_resultingCapability.rawType = kVideoUYVY;
return S_OK; // This format is acceptable.
_resultingCapability.videoType = VideoType::kUYVY;
return S_OK; // This format is acceptable.
}
if(*SubType == MEDIASUBTYPE_HDYC)
{
_resultingCapability.rawType = kVideoUYVY;
return S_OK; // This format is acceptable.
_resultingCapability.videoType = VideoType::kUYVY;
return S_OK; // This format is acceptable.
}
if(*SubType == MEDIASUBTYPE_RGB24
&& pvi->bmiHeader.biCompression == BI_RGB)
{
_resultingCapability.rawType = kVideoRGB24;
return S_OK; // This format is acceptable.
_resultingCapability.videoType = VideoType::kRGB24;
return S_OK; // This format is acceptable.
}
}
return E_INVALIDARG;

View File

@ -107,8 +107,8 @@ static void RtpFragmentize(EncodedImage* encoded_image,
// should be more than enough to hold any encoded data of future frames of
// the same size (avoiding possible future reallocation due to variations in
// required size).
encoded_image->_size =
CalcBufferSize(kI420, frame_buffer.width(), frame_buffer.height());
encoded_image->_size = CalcBufferSize(
VideoType::kI420, frame_buffer.width(), frame_buffer.height());
if (encoded_image->_size < required_size) {
// Encoded data > unencoded data. Allocate required bytes.
LOG(LS_WARNING) << "Encoding produced more bytes than the original image "
@ -254,8 +254,8 @@ int32_t H264EncoderImpl::InitEncode(const VideoCodec* codec_settings,
&video_format);
// Initialize encoded image. Default buffer size: size of unencoded data.
encoded_image_._size =
CalcBufferSize(kI420, codec_settings->width, codec_settings->height);
encoded_image_._size = CalcBufferSize(VideoType::kI420, codec_settings->width,
codec_settings->height);
encoded_image_._buffer = new uint8_t[encoded_image_._size];
encoded_image_buffer_.reset(encoded_image_._buffer);
encoded_image_._completeFrame = true;

View File

@ -57,9 +57,9 @@ int I420Encoder::InitEncode(const VideoCodec* codecSettings,
_encodedImage._buffer = NULL;
_encodedImage._size = 0;
}
const size_t newSize =
CalcBufferSize(kI420, codecSettings->width, codecSettings->height) +
kI420HeaderSize;
const size_t newSize = CalcBufferSize(VideoType::kI420, codecSettings->width,
codecSettings->height) +
kI420HeaderSize;
uint8_t* newBuffer = new uint8_t[newSize];
if (newBuffer == NULL) {
return WEBRTC_VIDEO_CODEC_MEMORY;
@ -96,9 +96,9 @@ int I420Encoder::Encode(const VideoFrame& inputImage,
return WEBRTC_VIDEO_CODEC_ERR_SIZE;
}
size_t req_length =
CalcBufferSize(kI420, inputImage.width(), inputImage.height()) +
kI420HeaderSize;
size_t req_length = CalcBufferSize(VideoType::kI420, inputImage.width(),
inputImage.height()) +
kI420HeaderSize;
if (_encodedImage._size > req_length) {
// Reallocate buffer.
delete[] _encodedImage._buffer;
@ -193,7 +193,8 @@ int I420Decoder::Decode(const EncodedImage& inputImage,
_height = height;
// Verify that the available length is sufficient:
size_t req_length = CalcBufferSize(kI420, _width, _height) + kI420HeaderSize;
size_t req_length =
CalcBufferSize(VideoType::kI420, _width, _height) + kI420HeaderSize;
if (req_length > inputImage._length) {
return WEBRTC_VIDEO_CODEC_ERROR;
@ -204,7 +205,7 @@ int I420Decoder::Decode(const EncodedImage& inputImage,
I420Buffer::Create(_width, _height, _width, half_width, half_width);
// Converting from raw buffer I420Buffer.
int ret = ConvertToI420(kI420, buffer, 0, 0, _width, _height, 0,
int ret = ConvertToI420(VideoType::kI420, buffer, 0, 0, _width, _height, 0,
kVideoRotation_0, frame_buffer.get());
if (ret < 0) {
return WEBRTC_VIDEO_CODEC_MEMORY;

View File

@ -251,7 +251,8 @@ bool VideoProcessorImpl::ProcessFrame(int frame_number) {
}
if (source_frame_writer_) {
size_t length = CalcBufferSize(kI420, buffer->width(), buffer->height());
size_t length =
CalcBufferSize(VideoType::kI420, buffer->width(), buffer->height());
rtc::Buffer extracted_buffer(length);
int extracted_length =
ExtractBuffer(buffer, length, extracted_buffer.data());
@ -467,14 +468,15 @@ void VideoProcessorImpl::FrameDecoded(const VideoFrame& image) {
scaled_buffer->ScaleFrom(*image.video_frame_buffer());
}
size_t length =
CalcBufferSize(kI420, scaled_buffer->width(), scaled_buffer->height());
size_t length = CalcBufferSize(VideoType::kI420, scaled_buffer->width(),
scaled_buffer->height());
extracted_buffer.SetSize(length);
extracted_length =
ExtractBuffer(scaled_buffer, length, extracted_buffer.data());
} else {
// No resize.
size_t length = CalcBufferSize(kI420, image.width(), image.height());
size_t length =
CalcBufferSize(VideoType::kI420, image.width(), image.height());
extracted_buffer.SetSize(length);
if (image.video_frame_buffer()->native_handle()) {
extracted_length =

View File

@ -248,7 +248,7 @@ class VideoProcessorIntegrationTest : public testing::Test {
test::OutputPath(), "videoprocessor_integrationtest");
config_.frame_length_in_bytes =
CalcBufferSize(kI420, process.width, process.height);
CalcBufferSize(VideoType::kI420, process.width, process.height);
config_.verbose = process.verbose_logging;
config_.use_single_core = process.use_single_core;
// Key frame interval and packet loss are set for each test.

View File

@ -198,7 +198,7 @@ class TestVp8Impl : public ::testing::Test {
int64_t startTime = rtc::TimeMillis();
while (rtc::TimeMillis() - startTime < kMaxWaitDecTimeMs) {
if (decode_complete_callback_->DecodeComplete()) {
return CalcBufferSize(kI420, decoded_frame_->width(),
return CalcBufferSize(VideoType::kI420, decoded_frame_->width(),
decoded_frame_->height());
}
}

View File

@ -368,7 +368,7 @@ int VP8EncoderImpl::InitEncode(const VideoCodec* inst,
delete[] encoded_images_[i]._buffer;
}
encoded_images_[i]._size =
CalcBufferSize(kI420, codec_.width, codec_.height);
CalcBufferSize(VideoType::kI420, codec_.width, codec_.height);
encoded_images_[i]._buffer = new uint8_t[encoded_images_[i]._size];
encoded_images_[i]._completeFrame = true;
}

View File

@ -284,7 +284,8 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst,
if (encoded_image_._buffer != NULL) {
delete[] encoded_image_._buffer;
}
encoded_image_._size = CalcBufferSize(kI420, codec_.width, codec_.height);
encoded_image_._size =
CalcBufferSize(VideoType::kI420, codec_.width, codec_.height);
encoded_image_._buffer = new uint8_t[encoded_image_._size];
encoded_image_._completeFrame = true;
// Creating a wrapper to the image - setting image data to NULL. Actual

View File

@ -129,7 +129,7 @@ class YuvFileGenerator : public FrameGenerator {
files_(files),
width_(width),
height_(height),
frame_size_(CalcBufferSize(kI420,
frame_size_(CalcBufferSize(VideoType::kI420,
static_cast<int>(width_),
static_cast<int>(height_))),
frame_buffer_(new uint8_t[frame_size_]),

View File

@ -78,7 +78,7 @@ void GlRenderer::OnFrame(const webrtc::VideoFrame& frame) {
ResizeVideo(frame.width(), frame.height());
}
webrtc::ConvertFromI420(frame, kBGRA, 0, buffer_);
webrtc::ConvertFromI420(frame, VideoType::kBGRA, 0, buffer_);
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, texture_);

View File

@ -40,7 +40,7 @@ bool VcmCapturer::Init(size_t width, size_t height, size_t target_fps) {
capability_.width = static_cast<int32_t>(width);
capability_.height = static_cast<int32_t>(height);
capability_.maxFPS = static_cast<int32_t>(target_fps);
capability_.rawType = kVideoI420;
capability_.videoType = VideoType::kI420;
if (vcm_->StartCapture(capability_) != 0) {
Destroy();

View File

@ -203,7 +203,8 @@ void D3dRenderer::OnFrame(const webrtc::VideoFrame& frame) {
if (texture_->LockRect(0, &lock_rect, NULL, 0) != D3D_OK)
return;
ConvertFromI420(frame, kARGB, 0, static_cast<uint8_t*>(lock_rect.pBits));
ConvertFromI420(frame, VideoType::kARGB, 0,
static_cast<uint8_t*>(lock_rect.pBits));
texture_->UnlockRect(0);
d3d_device_->BeginScene();

View File

@ -35,7 +35,7 @@ int EditFrames(const std::string& in_path, int width, int height,
}
// Frame size of I420.
size_t frame_length = CalcBufferSize(kI420, width, height);
size_t frame_length = CalcBufferSize(VideoType::kI420, width, height);
std::unique_ptr<uint8_t[]> temp_buffer(new uint8_t[frame_length]);

View File

@ -24,7 +24,7 @@ namespace test {
const int kWidth = 352;
const int kHeight = 288;
const size_t kFrameSize = CalcBufferSize(kI420, kWidth, kHeight);
const size_t kFrameSize = CalcBufferSize(VideoType::kI420, kWidth, kHeight);
class FrameEditingTest : public ::testing::Test {
protected: