Deprecate GetWidth() and GetHeight() methods. Replaced by width() and height().
Delete GetChromaWidth, GetChromaHeight, and GetChromaSize. Delete unused function VideoFrameEqual. BUG=webrtc:5682 Review URL: https://codereview.webrtc.org/1838353004 Cr-Commit-Position: refs/heads/master@{#12213}
This commit is contained in:
parent
9266cc0668
commit
71a0c2f9a6
@ -36,23 +36,22 @@
|
||||
}
|
||||
|
||||
- (NSUInteger)width {
|
||||
return _videoFrame->GetWidth();
|
||||
return _videoFrame->width();
|
||||
}
|
||||
|
||||
- (NSUInteger)height {
|
||||
return _videoFrame->GetHeight();
|
||||
return _videoFrame->height();
|
||||
}
|
||||
|
||||
// TODO(nisse): chromaWidth and chromaHeight are used only in
|
||||
// RTCOpenGLVideoRenderer.mm. Update, and then delete these
|
||||
// properties.
|
||||
- (NSUInteger)chromaWidth {
|
||||
return _videoFrame->GetChromaWidth();
|
||||
return (self.width + 1) / 2;
|
||||
}
|
||||
|
||||
- (NSUInteger)chromaHeight {
|
||||
return _videoFrame->GetChromaHeight();
|
||||
}
|
||||
|
||||
- (NSUInteger)chromaSize {
|
||||
return _videoFrame->GetChromaSize();
|
||||
return (self.height + 1) / 2;
|
||||
}
|
||||
|
||||
- (const uint8_t*)yPlane {
|
||||
|
||||
@ -46,7 +46,7 @@ class RTCVideoRendererNativeAdapter
|
||||
|
||||
void OnFrame(const cricket::VideoFrame& videoFrame) override {
|
||||
const cricket::VideoFrame* frame = videoFrame.GetCopyWithRotationApplied();
|
||||
CGSize currentSize = CGSizeMake(frame->GetWidth(), frame->GetHeight());
|
||||
CGSize currentSize = CGSizeMake(frame->width(), frame->height());
|
||||
if (!CGSizeEqualToSize(_size, currentSize)) {
|
||||
_size = currentSize;
|
||||
[_adapter.videoRenderer setSize:_size];
|
||||
|
||||
@ -34,7 +34,6 @@
|
||||
@property(nonatomic, readonly) NSUInteger height;
|
||||
@property(nonatomic, readonly) NSUInteger chromaWidth;
|
||||
@property(nonatomic, readonly) NSUInteger chromaHeight;
|
||||
@property(nonatomic, readonly) NSUInteger chromaSize;
|
||||
// These can return NULL if the object is not backed by a buffer.
|
||||
@property(nonatomic, readonly) const uint8_t* yPlane;
|
||||
@property(nonatomic, readonly) const uint8_t* uPlane;
|
||||
|
||||
@ -771,16 +771,18 @@ class JavaVideoRendererWrapper
|
||||
jobject y_buffer =
|
||||
jni()->NewDirectByteBuffer(const_cast<uint8_t*>(frame->GetYPlane()),
|
||||
frame->GetYPitch() * frame->GetHeight());
|
||||
size_t chroma_size =
|
||||
((frame->width() + 1) / 2) * ((frame->height() + 1) / 2);
|
||||
jobject u_buffer = jni()->NewDirectByteBuffer(
|
||||
const_cast<uint8_t*>(frame->GetUPlane()), frame->GetChromaSize());
|
||||
const_cast<uint8_t*>(frame->GetUPlane()), chroma_size);
|
||||
jobject v_buffer = jni()->NewDirectByteBuffer(
|
||||
const_cast<uint8_t*>(frame->GetVPlane()), frame->GetChromaSize());
|
||||
const_cast<uint8_t*>(frame->GetVPlane()), chroma_size);
|
||||
jni()->SetObjectArrayElement(planes, 0, y_buffer);
|
||||
jni()->SetObjectArrayElement(planes, 1, u_buffer);
|
||||
jni()->SetObjectArrayElement(planes, 2, v_buffer);
|
||||
return jni()->NewObject(
|
||||
*j_frame_class_, j_i420_frame_ctor_id_,
|
||||
frame->GetWidth(), frame->GetHeight(),
|
||||
frame->width(), frame->height(),
|
||||
static_cast<int>(frame->GetVideoRotation()),
|
||||
strides, planes, javaShallowCopy(frame));
|
||||
}
|
||||
@ -793,7 +795,7 @@ class JavaVideoRendererWrapper
|
||||
jni()->SetFloatArrayRegion(sampling_matrix, 0, 16, handle->sampling_matrix);
|
||||
return jni()->NewObject(
|
||||
*j_frame_class_, j_texture_frame_ctor_id_,
|
||||
frame->GetWidth(), frame->GetHeight(),
|
||||
frame->width(), frame->height(),
|
||||
static_cast<int>(frame->GetVideoRotation()),
|
||||
handle->oes_texture_id, sampling_matrix, javaShallowCopy(frame));
|
||||
}
|
||||
|
||||
@ -22,7 +22,6 @@ NS_ASSUME_NONNULL_BEGIN
|
||||
@property(nonatomic, readonly) size_t height;
|
||||
@property(nonatomic, readonly) size_t chromaWidth;
|
||||
@property(nonatomic, readonly) size_t chromaHeight;
|
||||
@property(nonatomic, readonly) size_t chromaSize;
|
||||
// These can return NULL if the object is not backed by a buffer.
|
||||
@property(nonatomic, readonly, nullable) const uint8_t *yPlane;
|
||||
@property(nonatomic, readonly, nullable) const uint8_t *uPlane;
|
||||
|
||||
@ -19,23 +19,22 @@
|
||||
}
|
||||
|
||||
- (size_t)width {
|
||||
return _videoFrame->GetWidth();
|
||||
return _videoFrame->width();
|
||||
}
|
||||
|
||||
- (size_t)height {
|
||||
return _videoFrame->GetHeight();
|
||||
return _videoFrame->height();
|
||||
}
|
||||
|
||||
// TODO(nisse): chromaWidth and chromaHeight are used only in
|
||||
// RTCOpenGLVideoRenderer.mm. Update, and then delete these
|
||||
// properties.
|
||||
- (size_t)chromaWidth {
|
||||
return _videoFrame->GetChromaWidth();
|
||||
return (self.width + 1) / 2;
|
||||
}
|
||||
|
||||
- (size_t)chromaHeight {
|
||||
return _videoFrame->GetChromaHeight();
|
||||
}
|
||||
|
||||
- (size_t)chromaSize {
|
||||
return _videoFrame->GetChromaSize();
|
||||
return (self.height + 1) / 2;
|
||||
}
|
||||
|
||||
- (const uint8_t *)yPlane {
|
||||
|
||||
@ -26,7 +26,7 @@ class VideoRendererAdapter
|
||||
void OnFrame(const cricket::VideoFrame& nativeVideoFrame) override {
|
||||
const cricket::VideoFrame *frame =
|
||||
nativeVideoFrame.GetCopyWithRotationApplied();
|
||||
CGSize current_size = CGSizeMake(frame->GetWidth(), frame->GetHeight());
|
||||
CGSize current_size = CGSizeMake(frame->width(), frame->height());
|
||||
if (!CGSizeEqualToSize(size_, current_size)) {
|
||||
size_ = current_size;
|
||||
[adapter_.videoRenderer setSize:size_];
|
||||
|
||||
@ -486,8 +486,7 @@ void GtkMainWnd::VideoRenderer::OnFrame(
|
||||
|
||||
const cricket::VideoFrame* frame = video_frame.GetCopyWithRotationApplied();
|
||||
|
||||
SetSize(static_cast<int>(frame->GetWidth()),
|
||||
static_cast<int>(frame->GetHeight()));
|
||||
SetSize(frame->width(), frame->height());
|
||||
|
||||
int size = width_ * height_ * 4;
|
||||
// TODO(henrike): Convert directly to RGBA
|
||||
|
||||
@ -607,8 +607,7 @@ void MainWnd::VideoRenderer::OnFrame(
|
||||
const cricket::VideoFrame* frame =
|
||||
video_frame.GetCopyWithRotationApplied();
|
||||
|
||||
SetSize(static_cast<int>(frame->GetWidth()),
|
||||
static_cast<int>(frame->GetHeight()));
|
||||
SetSize(frame->width(), frame->height());
|
||||
|
||||
ASSERT(image_.get() != NULL);
|
||||
frame->ConvertToRgbBuffer(cricket::FOURCC_ARGB,
|
||||
|
||||
@ -37,8 +37,8 @@ class FakeVideoRenderer : public rtc::VideoSinkInterface<cricket::VideoFrame> {
|
||||
black_frame_ = CheckFrameColorYuv(6, 48, 128, 128, 128, 128, &frame);
|
||||
// Treat unexpected frame size as error.
|
||||
++num_rendered_frames_;
|
||||
width_ = static_cast<int>(frame.GetWidth());
|
||||
height_ = static_cast<int>(frame.GetHeight());
|
||||
width_ = frame.width();
|
||||
height_ = frame.height();
|
||||
rotation_ = frame.GetVideoRotation();
|
||||
timestamp_ = frame.GetTimeStamp();
|
||||
SignalRenderFrame(&frame);
|
||||
@ -86,13 +86,13 @@ class FakeVideoRenderer : public rtc::VideoSinkInterface<cricket::VideoFrame> {
|
||||
return false;
|
||||
}
|
||||
// Y
|
||||
size_t y_width = frame->GetWidth();
|
||||
size_t y_height = frame->GetHeight();
|
||||
int y_width = frame->width();
|
||||
int y_height = frame->height();
|
||||
const uint8_t* y_plane = frame->GetYPlane();
|
||||
const uint8_t* y_pos = y_plane;
|
||||
int32_t y_pitch = frame->GetYPitch();
|
||||
for (size_t i = 0; i < y_height; ++i) {
|
||||
for (size_t j = 0; j < y_width; ++j) {
|
||||
for (int i = 0; i < y_height; ++i) {
|
||||
for (int j = 0; j < y_width; ++j) {
|
||||
uint8_t y_value = *(y_pos + j);
|
||||
if (y_value < y_min || y_value > y_max) {
|
||||
return false;
|
||||
@ -101,16 +101,16 @@ class FakeVideoRenderer : public rtc::VideoSinkInterface<cricket::VideoFrame> {
|
||||
y_pos += y_pitch;
|
||||
}
|
||||
// U and V
|
||||
size_t chroma_width = frame->GetChromaWidth();
|
||||
size_t chroma_height = frame->GetChromaHeight();
|
||||
int chroma_width = (frame->width() + 1)/2;
|
||||
int chroma_height = (frame->height() + 1)/2;
|
||||
const uint8_t* u_plane = frame->GetUPlane();
|
||||
const uint8_t* v_plane = frame->GetVPlane();
|
||||
const uint8_t* u_pos = u_plane;
|
||||
const uint8_t* v_pos = v_plane;
|
||||
int32_t u_pitch = frame->GetUPitch();
|
||||
int32_t v_pitch = frame->GetVPitch();
|
||||
for (size_t i = 0; i < chroma_height; ++i) {
|
||||
for (size_t j = 0; j < chroma_width; ++j) {
|
||||
for (int i = 0; i < chroma_height; ++i) {
|
||||
for (int j = 0; j < chroma_width; ++j) {
|
||||
uint8_t u_value = *(u_pos + j);
|
||||
if (u_value < u_min || u_value > u_max) {
|
||||
return false;
|
||||
|
||||
@ -306,38 +306,6 @@ void DumpPlanarArgbTestImage(const std::string& prefix,
|
||||
fs.Write(img, ARGB_SIZE(w, h), NULL, NULL);
|
||||
}
|
||||
|
||||
bool VideoFrameEqual(const VideoFrame* frame0, const VideoFrame* frame1) {
|
||||
const uint8_t* y0 = frame0->GetYPlane();
|
||||
const uint8_t* u0 = frame0->GetUPlane();
|
||||
const uint8_t* v0 = frame0->GetVPlane();
|
||||
const uint8_t* y1 = frame1->GetYPlane();
|
||||
const uint8_t* u1 = frame1->GetUPlane();
|
||||
const uint8_t* v1 = frame1->GetVPlane();
|
||||
|
||||
for (size_t i = 0; i < frame0->GetHeight(); ++i) {
|
||||
if (0 != memcmp(y0, y1, frame0->GetWidth())) {
|
||||
return false;
|
||||
}
|
||||
y0 += frame0->GetYPitch();
|
||||
y1 += frame1->GetYPitch();
|
||||
}
|
||||
|
||||
for (size_t i = 0; i < frame0->GetChromaHeight(); ++i) {
|
||||
if (0 != memcmp(u0, u1, frame0->GetChromaWidth())) {
|
||||
return false;
|
||||
}
|
||||
if (0 != memcmp(v0, v1, frame0->GetChromaWidth())) {
|
||||
return false;
|
||||
}
|
||||
u0 += frame0->GetUPitch();
|
||||
v0 += frame0->GetVPitch();
|
||||
u1 += frame1->GetUPitch();
|
||||
v1 += frame1->GetVPitch();
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
cricket::StreamParams CreateSimStreamParams(
|
||||
const std::string& cname,
|
||||
const std::vector<uint32_t>& ssrcs) {
|
||||
|
||||
@ -203,9 +203,6 @@ void DumpPlanarArgbTestImage(const std::string& prefix,
|
||||
int w,
|
||||
int h);
|
||||
|
||||
// Compare two I420 frames.
|
||||
bool VideoFrameEqual(const VideoFrame* frame0, const VideoFrame* frame1);
|
||||
|
||||
// Checks whether |codecs| contains |codec|; checks using Codec::Matches().
|
||||
template <class C>
|
||||
bool ContainsMatchingCodec(const std::vector<C>& codecs, const C& codec) {
|
||||
|
||||
@ -96,16 +96,15 @@ void VideoBroadcaster::UpdateWants() {
|
||||
|
||||
const cricket::VideoFrame& VideoBroadcaster::GetBlackFrame(
|
||||
const cricket::VideoFrame& frame) {
|
||||
if (black_frame_ && black_frame_->GetWidth() == frame.GetWidth() &&
|
||||
black_frame_->GetHeight() == frame.GetHeight() &&
|
||||
if (black_frame_ && black_frame_->width() == frame.width() &&
|
||||
black_frame_->height() == frame.height() &&
|
||||
black_frame_->GetVideoRotation() == frame.GetVideoRotation()) {
|
||||
black_frame_->SetTimeStamp(frame.GetTimeStamp());
|
||||
return *black_frame_;
|
||||
}
|
||||
black_frame_.reset(new cricket::WebRtcVideoFrame(
|
||||
new rtc::RefCountedObject<webrtc::I420Buffer>(
|
||||
static_cast<int>(frame.GetWidth()),
|
||||
static_cast<int>(frame.GetHeight())),
|
||||
frame.width(), frame.height()),
|
||||
frame.GetTimeStamp(), frame.GetVideoRotation()));
|
||||
black_frame_->SetToBlack();
|
||||
return *black_frame_;
|
||||
|
||||
@ -35,8 +35,8 @@ bool VideoFrame::CopyToPlanes(uint8_t* dst_y,
|
||||
LOG(LS_ERROR) << "NULL plane pointer.";
|
||||
return false;
|
||||
}
|
||||
int32_t src_width = static_cast<int>(GetWidth());
|
||||
int32_t src_height = static_cast<int>(GetHeight());
|
||||
int32_t src_width = width();
|
||||
int32_t src_height = height();
|
||||
return libyuv::I420Copy(GetYPlane(), GetYPitch(),
|
||||
GetUPlane(), GetUPitch(),
|
||||
GetVPlane(), GetVPitch(),
|
||||
@ -50,7 +50,7 @@ size_t VideoFrame::ConvertToRgbBuffer(uint32_t to_fourcc,
|
||||
uint8_t* buffer,
|
||||
size_t size,
|
||||
int stride_rgb) const {
|
||||
const size_t needed = std::abs(stride_rgb) * GetHeight();
|
||||
const size_t needed = std::abs(stride_rgb) * static_cast<size_t>(height());
|
||||
if (size < needed) {
|
||||
LOG(LS_WARNING) << "RGB buffer is not large enough";
|
||||
return needed;
|
||||
@ -58,8 +58,7 @@ size_t VideoFrame::ConvertToRgbBuffer(uint32_t to_fourcc,
|
||||
|
||||
if (libyuv::ConvertFromI420(GetYPlane(), GetYPitch(), GetUPlane(),
|
||||
GetUPitch(), GetVPlane(), GetVPitch(), buffer,
|
||||
stride_rgb, static_cast<int>(GetWidth()),
|
||||
static_cast<int>(GetHeight()), to_fourcc)) {
|
||||
stride_rgb, width(), height(), to_fourcc)) {
|
||||
LOG(LS_ERROR) << "RGB type not supported: " << to_fourcc;
|
||||
return 0; // 0 indicates error
|
||||
}
|
||||
@ -67,14 +66,16 @@ size_t VideoFrame::ConvertToRgbBuffer(uint32_t to_fourcc,
|
||||
}
|
||||
|
||||
// TODO(fbarchard): Handle odd width/height with rounding.
|
||||
// TODO(nisse): If method is kept, switch to using int instead of
|
||||
// size_t and int32_t.
|
||||
void VideoFrame::StretchToPlanes(uint8_t* dst_y,
|
||||
uint8_t* dst_u,
|
||||
uint8_t* dst_v,
|
||||
int32_t dst_pitch_y,
|
||||
int32_t dst_pitch_u,
|
||||
int32_t dst_pitch_v,
|
||||
size_t width,
|
||||
size_t height,
|
||||
size_t dst_width,
|
||||
size_t dst_height,
|
||||
bool interpolate,
|
||||
bool vert_crop) const {
|
||||
if (!GetYPlane() || !GetUPlane() || !GetVPlane()) {
|
||||
@ -82,9 +83,9 @@ void VideoFrame::StretchToPlanes(uint8_t* dst_y,
|
||||
return;
|
||||
}
|
||||
|
||||
size_t src_width = GetWidth();
|
||||
size_t src_height = GetHeight();
|
||||
if (width == src_width && height == src_height) {
|
||||
size_t src_width = width();
|
||||
size_t src_height = height();
|
||||
if (dst_width == src_width && dst_height == src_height) {
|
||||
CopyToPlanes(dst_y, dst_u, dst_v, dst_pitch_y, dst_pitch_u, dst_pitch_v);
|
||||
return;
|
||||
}
|
||||
@ -94,18 +95,18 @@ void VideoFrame::StretchToPlanes(uint8_t* dst_y,
|
||||
|
||||
if (vert_crop) {
|
||||
// Adjust the input width:height ratio to be the same as the output ratio.
|
||||
if (src_width * height > src_height * width) {
|
||||
if (src_width * dst_height > src_height * dst_width) {
|
||||
// Reduce the input width, but keep size/position aligned for YuvScaler
|
||||
src_width = ROUNDTO2(src_height * width / height);
|
||||
int32_t iwidth_offset = ROUNDTO2((GetWidth() - src_width) / 2);
|
||||
src_width = ROUNDTO2(src_height * dst_width / dst_height);
|
||||
int32_t iwidth_offset = ROUNDTO2((width() - src_width) / 2);
|
||||
src_y += iwidth_offset;
|
||||
src_u += iwidth_offset / 2;
|
||||
src_v += iwidth_offset / 2;
|
||||
} else if (src_width * height < src_height * width) {
|
||||
} else if (src_width * dst_height < src_height * dst_width) {
|
||||
// Reduce the input height.
|
||||
src_height = src_width * height / width;
|
||||
src_height = src_width * dst_height / dst_width;
|
||||
int32_t iheight_offset =
|
||||
static_cast<int32_t>((GetHeight() - src_height) >> 2);
|
||||
static_cast<int32_t>((height() - src_height) >> 2);
|
||||
iheight_offset <<= 1; // Ensure that iheight_offset is even.
|
||||
src_y += iheight_offset * GetYPitch();
|
||||
src_u += iheight_offset / 2 * GetUPitch();
|
||||
@ -118,7 +119,8 @@ void VideoFrame::StretchToPlanes(uint8_t* dst_y,
|
||||
GetYPitch(), GetUPitch(), GetVPitch(),
|
||||
static_cast<int>(src_width), static_cast<int>(src_height),
|
||||
dst_y, dst_u, dst_v, dst_pitch_y, dst_pitch_u, dst_pitch_v,
|
||||
static_cast<int>(width), static_cast<int>(height), interpolate);
|
||||
static_cast<int>(dst_width), static_cast<int>(dst_height),
|
||||
interpolate);
|
||||
}
|
||||
|
||||
void VideoFrame::StretchToFrame(VideoFrame* dst,
|
||||
@ -130,7 +132,7 @@ void VideoFrame::StretchToFrame(VideoFrame* dst,
|
||||
|
||||
StretchToPlanes(dst->GetYPlane(), dst->GetUPlane(), dst->GetVPlane(),
|
||||
dst->GetYPitch(), dst->GetUPitch(), dst->GetVPitch(),
|
||||
dst->GetWidth(), dst->GetHeight(),
|
||||
dst->width(), dst->height(),
|
||||
interpolate, vert_crop);
|
||||
dst->SetTimeStamp(GetTimeStamp());
|
||||
// Stretched frame should have the same rotation as the source.
|
||||
@ -153,8 +155,7 @@ bool VideoFrame::SetToBlack() {
|
||||
GetUPlane(), GetUPitch(),
|
||||
GetVPlane(), GetVPitch(),
|
||||
0, 0,
|
||||
static_cast<int>(GetWidth()),
|
||||
static_cast<int>(GetHeight()),
|
||||
width(), height(),
|
||||
16, 128, 128) == 0;
|
||||
}
|
||||
|
||||
|
||||
@ -45,12 +45,15 @@ class VideoFrame {
|
||||
|
||||
// Basic accessors.
|
||||
// Note this is the width and height without rotation applied.
|
||||
virtual size_t GetWidth() const = 0;
|
||||
virtual size_t GetHeight() const = 0;
|
||||
virtual int width() const = 0;
|
||||
virtual int height() const = 0;
|
||||
|
||||
// Deprecated methods, for backwards compatibility.
|
||||
// TODO(nisse): Delete when usage in Chrome and other applications
|
||||
// have been replaced by width() and height().
|
||||
virtual size_t GetWidth() const final { return width(); }
|
||||
virtual size_t GetHeight() const final { return height(); }
|
||||
|
||||
size_t GetChromaWidth() const { return (GetWidth() + 1) / 2; }
|
||||
size_t GetChromaHeight() const { return (GetHeight() + 1) / 2; }
|
||||
size_t GetChromaSize() const { return GetUPitch() * GetChromaHeight(); }
|
||||
// These can return NULL if the object is not backed by a buffer.
|
||||
virtual const uint8_t* GetYPlane() const = 0;
|
||||
virtual const uint8_t* GetUPlane() const = 0;
|
||||
|
||||
@ -399,12 +399,12 @@ class VideoFrameTest : public testing::Test {
|
||||
}
|
||||
|
||||
static bool IsSize(const cricket::VideoFrame& frame,
|
||||
uint32_t width,
|
||||
uint32_t height) {
|
||||
return !IsNull(frame) && frame.GetYPitch() >= static_cast<int32_t>(width) &&
|
||||
frame.GetUPitch() >= static_cast<int32_t>(width) / 2 &&
|
||||
frame.GetVPitch() >= static_cast<int32_t>(width) / 2 &&
|
||||
frame.GetWidth() == width && frame.GetHeight() == height;
|
||||
int width,
|
||||
int height) {
|
||||
return !IsNull(frame) && frame.GetYPitch() >= width &&
|
||||
frame.GetUPitch() >= width / 2 &&
|
||||
frame.GetVPitch() >= width / 2 &&
|
||||
frame.width() == width && frame.height() == height;
|
||||
}
|
||||
|
||||
static bool IsPlaneEqual(const std::string& name,
|
||||
@ -434,8 +434,8 @@ class VideoFrameTest : public testing::Test {
|
||||
}
|
||||
|
||||
static bool IsEqual(const cricket::VideoFrame& frame,
|
||||
size_t width,
|
||||
size_t height,
|
||||
int width,
|
||||
int height,
|
||||
int64_t time_stamp,
|
||||
const uint8_t* y,
|
||||
uint32_t ypitch,
|
||||
@ -444,8 +444,7 @@ class VideoFrameTest : public testing::Test {
|
||||
const uint8_t* v,
|
||||
uint32_t vpitch,
|
||||
int max_error) {
|
||||
return IsSize(frame, static_cast<uint32_t>(width),
|
||||
static_cast<uint32_t>(height)) &&
|
||||
return IsSize(frame, width, height) &&
|
||||
frame.GetTimeStamp() == time_stamp &&
|
||||
IsPlaneEqual("y", frame.GetYPlane(), frame.GetYPitch(), y, ypitch,
|
||||
static_cast<uint32_t>(width),
|
||||
@ -462,7 +461,7 @@ class VideoFrameTest : public testing::Test {
|
||||
const cricket::VideoFrame& frame2,
|
||||
int max_error) {
|
||||
return IsEqual(frame1,
|
||||
frame2.GetWidth(), frame2.GetHeight(),
|
||||
frame2.width(), frame2.height(),
|
||||
frame2.GetTimeStamp(),
|
||||
frame2.GetYPlane(), frame2.GetYPitch(),
|
||||
frame2.GetUPlane(), frame2.GetUPitch(),
|
||||
@ -473,11 +472,11 @@ class VideoFrameTest : public testing::Test {
|
||||
static bool IsEqualWithCrop(const cricket::VideoFrame& frame1,
|
||||
const cricket::VideoFrame& frame2,
|
||||
int hcrop, int vcrop, int max_error) {
|
||||
return frame1.GetWidth() <= frame2.GetWidth() &&
|
||||
frame1.GetHeight() <= frame2.GetHeight() &&
|
||||
return frame1.width() <= frame2.width() &&
|
||||
frame1.height() <= frame2.height() &&
|
||||
IsEqual(frame1,
|
||||
frame2.GetWidth() - hcrop * 2,
|
||||
frame2.GetHeight() - vcrop * 2,
|
||||
frame2.width() - hcrop * 2,
|
||||
frame2.height() - vcrop * 2,
|
||||
frame2.GetTimeStamp(),
|
||||
frame2.GetYPlane() + vcrop * frame2.GetYPitch()
|
||||
+ hcrop,
|
||||
@ -794,8 +793,8 @@ class VideoFrameTest : public testing::Test {
|
||||
kHeight, \
|
||||
reinterpret_cast<uint8_t*>(ms->GetBuffer()), \
|
||||
data_size, 0, webrtc::kVideoRotation_0)); \
|
||||
int width_rotate = static_cast<int>(frame1.GetWidth()); \
|
||||
int height_rotate = static_cast<int>(frame1.GetHeight()); \
|
||||
int width_rotate = frame1.width(); \
|
||||
int height_rotate = frame1.height(); \
|
||||
EXPECT_TRUE(frame3.InitToBlack(width_rotate, height_rotate, 0)); \
|
||||
libyuv::I420Mirror( \
|
||||
frame2.GetYPlane(), frame2.GetYPitch(), frame2.GetUPlane(), \
|
||||
@ -825,8 +824,8 @@ class VideoFrameTest : public testing::Test {
|
||||
kHeight, \
|
||||
reinterpret_cast<uint8_t*>(ms->GetBuffer()), \
|
||||
data_size, 0, webrtc::kVideoRotation_0)); \
|
||||
int width_rotate = static_cast<int>(frame1.GetWidth()); \
|
||||
int height_rotate = static_cast<int>(frame1.GetHeight()); \
|
||||
int width_rotate = frame1.width(); \
|
||||
int height_rotate = frame1.height(); \
|
||||
EXPECT_TRUE(frame3.InitToBlack(width_rotate, height_rotate, 0)); \
|
||||
libyuv::I420Rotate( \
|
||||
frame2.GetYPlane(), frame2.GetYPitch(), frame2.GetUPlane(), \
|
||||
@ -951,8 +950,8 @@ class VideoFrameTest : public testing::Test {
|
||||
sizeof(pixels5x5), 0,
|
||||
webrtc::kVideoRotation_0));
|
||||
}
|
||||
EXPECT_EQ(5u, frame.GetWidth());
|
||||
EXPECT_EQ(5u, frame.GetHeight());
|
||||
EXPECT_EQ(5, frame.width());
|
||||
EXPECT_EQ(5, frame.height());
|
||||
EXPECT_EQ(5, frame.GetYPitch());
|
||||
EXPECT_EQ(3, frame.GetUPitch());
|
||||
EXPECT_EQ(3, frame.GetVPitch());
|
||||
@ -1396,11 +1395,11 @@ class VideoFrameTest : public testing::Test {
|
||||
// Swapp width and height if the frame is rotated 90 or 270 degrees.
|
||||
if (apply_rotation && (rotation == webrtc::kVideoRotation_90
|
||||
|| rotation == webrtc::kVideoRotation_270)) {
|
||||
EXPECT_TRUE(kHeight == frame1.GetWidth());
|
||||
EXPECT_TRUE(kWidth == frame1.GetHeight());
|
||||
EXPECT_TRUE(kHeight == frame1.width());
|
||||
EXPECT_TRUE(kWidth == frame1.height());
|
||||
} else {
|
||||
EXPECT_TRUE(kWidth == frame1.GetWidth());
|
||||
EXPECT_TRUE(kHeight == frame1.GetHeight());
|
||||
EXPECT_TRUE(kWidth == frame1.width());
|
||||
EXPECT_TRUE(kHeight == frame1.height());
|
||||
}
|
||||
EXPECT_FALSE(IsBlack(frame1));
|
||||
EXPECT_FALSE(IsEqual(frame1, frame2, 0));
|
||||
|
||||
@ -42,8 +42,8 @@ VideoFrame* VideoFrameFactory::CreateAliasedFrame(
|
||||
// Create and stretch the output frame if it has not been created yet, is
|
||||
// still in use by others, or its size is not same as the expected.
|
||||
if (!output_frame_ || !output_frame_->IsExclusive() ||
|
||||
output_frame_->GetWidth() != static_cast<size_t>(output_width) ||
|
||||
output_frame_->GetHeight() != static_cast<size_t>(output_height)) {
|
||||
output_frame_->width() != output_width ||
|
||||
output_frame_->height() != output_height) {
|
||||
output_frame_.reset(
|
||||
cropped_input_frame->Stretch(output_width, output_height, true, true));
|
||||
if (!output_frame_) {
|
||||
|
||||
@ -108,7 +108,7 @@ void CarbonVideoRenderer::OnFrame(const VideoFrame& video_frame) {
|
||||
{
|
||||
const VideoFrame* frame = video_frame->GetCopyWithRotationApplied();
|
||||
|
||||
if (!SetSize(frame->GetWidth(), frame->GetHeight(), 0)) {
|
||||
if (!SetSize(frame->width(), frame->height())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -116,8 +116,9 @@ void CarbonVideoRenderer::OnFrame(const VideoFrame& video_frame) {
|
||||
rtc::CritScope cs(&image_crit_);
|
||||
frame->ConvertToRgbBuffer(cricket::FOURCC_ABGR,
|
||||
image_.get(),
|
||||
frame->GetWidth() * frame->GetHeight() * 4,
|
||||
frame->GetWidth() * 4);
|
||||
static_cast<size_t>(frame->width()) *
|
||||
frame->height() * 4,
|
||||
frame->width() * 4);
|
||||
}
|
||||
|
||||
// Trigger a repaint event for the whole window.
|
||||
|
||||
@ -137,8 +137,7 @@ void GdiVideoRenderer::VideoWindow::OnFrame(const VideoFrame& video_frame) {
|
||||
|
||||
const VideoFrame* frame = video_frame.GetCopyWithRotationApplied();
|
||||
|
||||
if (SetSize(static_cast<int>(frame->GetWidth()),
|
||||
static_cast<int>(frame->GetHeight()))) {
|
||||
if (SetSize(frame->width(), frame->height())) {
|
||||
SendMessage(handle(), kRenderFrameMsg, reinterpret_cast<WPARAM>(frame), 0);
|
||||
}
|
||||
}
|
||||
@ -244,9 +243,8 @@ GdiVideoRenderer::~GdiVideoRenderer() {}
|
||||
|
||||
void GdiVideoRenderer::OnFrame(const VideoFrame& frame) {
|
||||
if (!window_.get()) { // Create the window for the first frame
|
||||
window_.reset(new VideoWindow(initial_x_, initial_y_,
|
||||
static_cast<int>(frame.GetWidth()),
|
||||
static_cast<int>(frame.GetHeight())));
|
||||
window_.reset(
|
||||
new VideoWindow(initial_x_, initial_y_, frame.width(), frame.height()));
|
||||
}
|
||||
window_->OnFrame(frame);
|
||||
}
|
||||
|
||||
@ -84,15 +84,16 @@ void GtkVideoRenderer::OnFrame(const VideoFrame& video_frame) {
|
||||
const VideoFrame* frame = video_frame.GetCopyWithRotationApplied();
|
||||
|
||||
// Need to set size as the frame might be rotated.
|
||||
if (!SetSize(frame->GetWidth(), frame->GetHeight())) {
|
||||
if (!SetSize(frame->width(), frame->height())) {
|
||||
return;
|
||||
}
|
||||
|
||||
// convert I420 frame to ABGR format, which is accepted by GTK
|
||||
frame->ConvertToRgbBuffer(cricket::FOURCC_ABGR,
|
||||
image_.get(),
|
||||
frame->GetWidth() * frame->GetHeight() * 4,
|
||||
frame->GetWidth() * 4);
|
||||
static_cast<size_t>(frame->width()) *
|
||||
frame->height() * 4,
|
||||
frame->width() * 4);
|
||||
|
||||
ScopedGdkLock lock;
|
||||
|
||||
@ -105,11 +106,11 @@ void GtkVideoRenderer::OnFrame(const VideoFrame& video_frame) {
|
||||
draw_area_->style->fg_gc[GTK_STATE_NORMAL],
|
||||
0,
|
||||
0,
|
||||
frame->GetWidth(),
|
||||
frame->GetHeight(),
|
||||
frame->width(),
|
||||
frame->height(),
|
||||
GDK_RGB_DITHER_MAX,
|
||||
image_.get(),
|
||||
frame->GetWidth() * 4);
|
||||
frame->width() * 4);
|
||||
|
||||
// Run the Gtk main loop to refresh the window.
|
||||
Pump();
|
||||
|
||||
@ -1590,8 +1590,8 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame(
|
||||
if (muted_) {
|
||||
// Create a black frame to transmit instead.
|
||||
CreateBlackFrame(&video_frame,
|
||||
static_cast<int>(frame.GetWidth()),
|
||||
static_cast<int>(frame.GetHeight()),
|
||||
frame.width(),
|
||||
frame.height(),
|
||||
video_frame.rotation());
|
||||
}
|
||||
|
||||
|
||||
@ -65,11 +65,11 @@ bool WebRtcVideoFrame::InitToBlack(int w, int h,
|
||||
return SetToBlack();
|
||||
}
|
||||
|
||||
size_t WebRtcVideoFrame::GetWidth() const {
|
||||
int WebRtcVideoFrame::width() const {
|
||||
return video_frame_buffer_ ? video_frame_buffer_->width() : 0;
|
||||
}
|
||||
|
||||
size_t WebRtcVideoFrame::GetHeight() const {
|
||||
int WebRtcVideoFrame::height() const {
|
||||
return video_frame_buffer_ ? video_frame_buffer_->height() : 0;
|
||||
}
|
||||
|
||||
@ -226,15 +226,15 @@ const VideoFrame* WebRtcVideoFrame::GetCopyWithRotationApplied() const {
|
||||
return rotated_frame_.get();
|
||||
}
|
||||
|
||||
int width = static_cast<int>(GetWidth());
|
||||
int height = static_cast<int>(GetHeight());
|
||||
int orig_width = width();
|
||||
int orig_height = height();
|
||||
|
||||
int rotated_width = width;
|
||||
int rotated_height = height;
|
||||
int rotated_width = orig_width;
|
||||
int rotated_height = orig_height;
|
||||
if (GetVideoRotation() == webrtc::kVideoRotation_90 ||
|
||||
GetVideoRotation() == webrtc::kVideoRotation_270) {
|
||||
rotated_width = height;
|
||||
rotated_height = width;
|
||||
rotated_width = orig_height;
|
||||
rotated_height = orig_width;
|
||||
}
|
||||
|
||||
rotated_frame_.reset(CreateEmptyFrame(rotated_width, rotated_height,
|
||||
@ -246,7 +246,8 @@ const VideoFrame* WebRtcVideoFrame::GetCopyWithRotationApplied() const {
|
||||
GetYPlane(), GetYPitch(), GetUPlane(), GetUPitch(), GetVPlane(),
|
||||
GetVPitch(), rotated_frame_->GetYPlane(), rotated_frame_->GetYPitch(),
|
||||
rotated_frame_->GetUPlane(), rotated_frame_->GetUPitch(),
|
||||
rotated_frame_->GetVPlane(), rotated_frame_->GetVPitch(), width, height,
|
||||
rotated_frame_->GetVPlane(), rotated_frame_->GetVPitch(),
|
||||
orig_width, orig_height,
|
||||
static_cast<libyuv::RotationMode>(GetVideoRotation()));
|
||||
if (ret == 0) {
|
||||
return rotated_frame_.get();
|
||||
|
||||
@ -65,8 +65,9 @@ class WebRtcVideoFrame : public VideoFrame {
|
||||
webrtc::VideoRotation rotation,
|
||||
bool apply_rotation) override;
|
||||
|
||||
size_t GetWidth() const override;
|
||||
size_t GetHeight() const override;
|
||||
int width() const override;
|
||||
int height() const override;
|
||||
|
||||
const uint8_t* GetYPlane() const override;
|
||||
const uint8_t* GetUPlane() const override;
|
||||
const uint8_t* GetVPlane() const override;
|
||||
|
||||
@ -75,11 +75,11 @@ class WebRtcVideoFrameTest : public VideoFrameTest<cricket::WebRtcVideoFrame> {
|
||||
// height are flipped.
|
||||
if (apply_rotation && (frame_rotation == webrtc::kVideoRotation_90
|
||||
|| frame_rotation == webrtc::kVideoRotation_270)) {
|
||||
EXPECT_EQ(static_cast<size_t>(cropped_width), frame.GetHeight());
|
||||
EXPECT_EQ(static_cast<size_t>(cropped_height), frame.GetWidth());
|
||||
EXPECT_EQ(cropped_width, frame.height());
|
||||
EXPECT_EQ(cropped_height, frame.width());
|
||||
} else {
|
||||
EXPECT_EQ(static_cast<size_t>(cropped_width), frame.GetWidth());
|
||||
EXPECT_EQ(static_cast<size_t>(cropped_height), frame.GetHeight());
|
||||
EXPECT_EQ(cropped_width, frame.width());
|
||||
EXPECT_EQ(cropped_height, frame.height());
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -275,8 +275,8 @@ TEST_F(WebRtcVideoFrameTest, TextureInitialValues) {
|
||||
dummy_handle, 640, 480);
|
||||
cricket::WebRtcVideoFrame frame(buffer, 200, webrtc::kVideoRotation_0);
|
||||
EXPECT_EQ(dummy_handle, frame.GetNativeHandle());
|
||||
EXPECT_EQ(640u, frame.GetWidth());
|
||||
EXPECT_EQ(480u, frame.GetHeight());
|
||||
EXPECT_EQ(640, frame.width());
|
||||
EXPECT_EQ(480, frame.height());
|
||||
EXPECT_EQ(200, frame.GetTimeStamp());
|
||||
frame.SetTimeStamp(400);
|
||||
EXPECT_EQ(400, frame.GetTimeStamp());
|
||||
@ -291,8 +291,8 @@ TEST_F(WebRtcVideoFrameTest, CopyTextureFrame) {
|
||||
cricket::WebRtcVideoFrame frame1(buffer, 200, webrtc::kVideoRotation_0);
|
||||
cricket::VideoFrame* frame2 = frame1.Copy();
|
||||
EXPECT_EQ(frame1.GetNativeHandle(), frame2->GetNativeHandle());
|
||||
EXPECT_EQ(frame1.GetWidth(), frame2->GetWidth());
|
||||
EXPECT_EQ(frame1.GetHeight(), frame2->GetHeight());
|
||||
EXPECT_EQ(frame1.width(), frame2->width());
|
||||
EXPECT_EQ(frame1.height(), frame2->height());
|
||||
EXPECT_EQ(frame1.GetTimeStamp(), frame2->GetTimeStamp());
|
||||
delete frame2;
|
||||
}
|
||||
|
||||
@ -49,17 +49,17 @@ class WebRtcVideoFrameFactoryTest
|
||||
bool apply_rotation) {
|
||||
if (!apply_rotation) {
|
||||
EXPECT_EQ(dest_frame->GetVideoRotation(), src_rotation);
|
||||
EXPECT_EQ(dest_frame->GetWidth(), src_width);
|
||||
EXPECT_EQ(dest_frame->GetHeight(), src_height);
|
||||
EXPECT_EQ(dest_frame->width(), src_width);
|
||||
EXPECT_EQ(dest_frame->height(), src_height);
|
||||
} else {
|
||||
EXPECT_EQ(dest_frame->GetVideoRotation(), webrtc::kVideoRotation_0);
|
||||
if (src_rotation == webrtc::kVideoRotation_90 ||
|
||||
src_rotation == webrtc::kVideoRotation_270) {
|
||||
EXPECT_EQ(dest_frame->GetWidth(), src_height);
|
||||
EXPECT_EQ(dest_frame->GetHeight(), src_width);
|
||||
EXPECT_EQ(dest_frame->width(), src_height);
|
||||
EXPECT_EQ(dest_frame->height(), src_width);
|
||||
} else {
|
||||
EXPECT_EQ(dest_frame->GetWidth(), src_width);
|
||||
EXPECT_EQ(dest_frame->GetHeight(), src_height);
|
||||
EXPECT_EQ(dest_frame->width(), src_width);
|
||||
EXPECT_EQ(dest_frame->height(), src_height);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user