Delete RGB to I420 and YUV to IVF converters

Use external tools like ffmpeg instead.

Bug: webrtc:10138
Change-Id: Ia73b2724aefb0688d002e7d6d8bb04df2d284a25
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/285781
Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#38791}
This commit is contained in:
Sergey Silkin 2022-12-01 16:30:38 +01:00 committed by WebRTC LUCI CQ
parent cb885923d8
commit b1bdadcabd
5 changed files with 1 additions and 691 deletions

View File

@ -19,7 +19,6 @@ group("rtc_tools") {
deps += [
":frame_analyzer",
":psnr_ssim_analyzer",
":rgba_to_i420_converter",
":video_quality_analysis",
]
}
@ -27,10 +26,7 @@ group("rtc_tools") {
deps += [ ":chart_proto" ]
}
if (!build_with_chromium && rtc_include_tests) {
deps += [
":tools_unittests",
":yuv_to_ivf_converter",
]
deps += [ ":tools_unittests" ]
}
if (rtc_include_tests && rtc_enable_protobuf) {
deps += [
@ -340,24 +336,6 @@ if (!build_with_chromium) {
]
}
rtc_executable("rgba_to_i420_converter") {
visibility = [ "*" ]
testonly = true
sources = [
"converter/converter.cc",
"converter/converter.h",
"converter/rgba_to_i420_converter.cc",
]
deps = [
"../common_video",
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/flags:parse",
"//third_party/abseil-cpp/absl/flags:usage",
"//third_party/libyuv",
]
}
if (rtc_enable_protobuf) {
proto_library("chart_proto") {
visibility = [ "*" ]
@ -439,41 +417,6 @@ if (!build_with_chromium) {
if (rtc_include_tests) {
if (!build_with_chromium) {
rtc_executable("yuv_to_ivf_converter") {
visibility = [ "*" ]
testonly = true
sources = [ "converter/yuv_to_ivf_converter.cc" ]
deps = [
"../api:create_frame_generator",
"../api:frame_generator_api",
"../api/task_queue:default_task_queue_factory",
"../api/video:encoded_image",
"../api/video:video_frame",
"../api/video_codecs:video_codecs_api",
"../media:rtc_media_base",
"../modules/rtp_rtcp:rtp_rtcp_format",
"../modules/video_coding:video_codec_interface",
"../modules/video_coding:video_coding_utility",
"../modules/video_coding:webrtc_h264",
"../modules/video_coding:webrtc_vp8",
"../modules/video_coding:webrtc_vp9",
"../rtc_base:checks",
"../rtc_base:criticalsection",
"../rtc_base:logging",
"../rtc_base:rtc_event",
"../rtc_base:rtc_task_queue",
"../rtc_base/synchronization:mutex",
"../rtc_base/system:file_wrapper",
"../test:video_test_common",
"../test:video_test_support",
"//third_party/abseil-cpp/absl/debugging:failure_signal_handler",
"//third_party/abseil-cpp/absl/debugging:symbolize",
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/flags:parse",
"//third_party/abseil-cpp/absl/strings",
]
}
if (rtc_enable_protobuf) {
rtc_executable("event_log_visualizer") {
# TODO(bugs.webrtc.org/14248): Remove once usage of std::tmpnam

View File

@ -1,175 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "rtc_tools/converter/converter.h"
#include <stdio.h>
#include <sys/stat.h>
#include <iomanip>
#include <sstream>
#include "third_party/libyuv/include/libyuv/compare.h"
#include "third_party/libyuv/include/libyuv/convert.h"
#ifdef WIN32
#define SEPARATOR '\\'
#define STAT _stat
#else
#define SEPARATOR '/'
#define STAT stat
#endif
namespace webrtc {
namespace test {
Converter::Converter(int width, int height) : width_(width), height_(height) {}
bool Converter::ConvertRGBAToI420Video(std::string frames_dir,
std::string output_file_name,
bool delete_frames) {
FILE* output_file = fopen(output_file_name.c_str(), "wb");
// Open output file in append mode.
if (output_file == NULL) {
fprintf(stderr, "Couldn't open input file for reading: %s\n",
output_file_name.c_str());
return false;
}
int input_frame_size = InputFrameSize();
uint8_t* rgba_buffer = new uint8_t[input_frame_size];
int y_plane_size = YPlaneSize();
uint8_t* dst_y = new uint8_t[y_plane_size];
int u_plane_size = UPlaneSize();
uint8_t* dst_u = new uint8_t[u_plane_size];
int v_plane_size = VPlaneSize();
uint8_t* dst_v = new uint8_t[v_plane_size];
int counter = 0; // Counter to form frame names.
bool success = false; // Is conversion successful.
while (true) {
std::string file_name = FormFrameName(4, counter);
// Get full path file name.
std::string input_file_name = FindFullFileName(frames_dir, file_name);
if (FileExists(input_file_name)) {
++counter; // Update counter for the next round.
} else {
fprintf(stdout, "Reached end of frames list\n");
break;
}
// Read the RGBA frame into rgba_buffer.
ReadRGBAFrame(input_file_name.c_str(), input_frame_size, rgba_buffer);
// Delete the input frame.
if (delete_frames) {
if (remove(input_file_name.c_str()) != 0) {
fprintf(stderr, "Cannot delete file %s\n", input_file_name.c_str());
}
}
// Convert to I420 frame.
libyuv::ABGRToI420(rgba_buffer, SrcStrideFrame(), dst_y, DstStrideY(),
dst_u, DstStrideU(), dst_v, DstStrideV(), width_,
height_);
// Add the I420 frame to the YUV video file.
success = AddYUVToFile(dst_y, y_plane_size, dst_u, u_plane_size, dst_v,
v_plane_size, output_file);
if (!success) {
fprintf(stderr, "LibYUV error during RGBA to I420 frame conversion\n");
break;
}
}
delete[] rgba_buffer;
delete[] dst_y;
delete[] dst_u;
delete[] dst_v;
fclose(output_file);
return success;
}
bool Converter::AddYUVToFile(uint8_t* y_plane,
int y_plane_size,
uint8_t* u_plane,
int u_plane_size,
uint8_t* v_plane,
int v_plane_size,
FILE* output_file) {
bool success = AddYUVPlaneToFile(y_plane, y_plane_size, output_file) &&
AddYUVPlaneToFile(u_plane, u_plane_size, output_file) &&
AddYUVPlaneToFile(v_plane, v_plane_size, output_file);
return success;
}
bool Converter::AddYUVPlaneToFile(uint8_t* yuv_plane,
int yuv_plane_size,
FILE* file) {
size_t bytes_written = fwrite(yuv_plane, 1, yuv_plane_size, file);
if (bytes_written != static_cast<size_t>(yuv_plane_size)) {
fprintf(stderr,
"Number of bytes written (%d) doesn't match size of y plane"
" (%d)\n",
static_cast<int>(bytes_written), yuv_plane_size);
return false;
}
return true;
}
bool Converter::ReadRGBAFrame(const char* input_file_name,
int input_frame_size,
unsigned char* buffer) {
FILE* input_file = fopen(input_file_name, "rb");
if (input_file == NULL) {
fprintf(stderr, "Couldn't open input file for reading: %s\n",
input_file_name);
return false;
}
size_t nbr_read = fread(buffer, 1, input_frame_size, input_file);
fclose(input_file);
if (nbr_read != static_cast<size_t>(input_frame_size)) {
fprintf(stderr, "Error reading from input file: %s\n", input_file_name);
return false;
}
return true;
}
std::string Converter::FindFullFileName(std::string dir_name,
std::string file_name) {
return dir_name + SEPARATOR + file_name;
}
bool Converter::FileExists(std::string file_name_to_check) {
struct STAT file_info;
int result = STAT(file_name_to_check.c_str(), &file_info);
return (result == 0);
}
std::string Converter::FormFrameName(int width, int number) {
std::stringstream tmp;
// Zero-pad number to a string.
tmp << std::setfill('0') << std::setw(width) << number;
return "frame_" + tmp.str();
}
} // namespace test
} // namespace webrtc

View File

@ -1,94 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef RTC_TOOLS_CONVERTER_CONVERTER_H_
#define RTC_TOOLS_CONVERTER_CONVERTER_H_
#include <stdio.h>
#include <string>
namespace webrtc {
namespace test {
// Handles a conversion between a set of RGBA frames to a YUV (I420) video.
class Converter {
public:
Converter(int width, int height);
// Converts RGBA to YUV video. If the delete_frames argument is true, the
// method will delete the input frames after conversion.
bool ConvertRGBAToI420Video(std::string frames_dir,
std::string output_file_name,
bool delete_frames);
private:
int width_; // Width of the video (respectively of the RGBA frames).
int height_; // Height of the video (respectively of the RGBA frames).
// Returns the size of the Y plane in bytes.
int YPlaneSize() const { return width_ * height_; }
// Returns the size of the U plane in bytes.
int UPlaneSize() const { return ((width_ + 1) / 2) * ((height_) / 2); }
// Returns the size of the V plane in bytes.
int VPlaneSize() const { return ((width_ + 1) / 2) * ((height_) / 2); }
// Returns the number of bytes per row in the RGBA frame.
int SrcStrideFrame() const { return width_ * 4; }
// Returns the number of bytes in the Y plane.
int DstStrideY() const { return width_; }
// Returns the number of bytes in the U plane.
int DstStrideU() const { return (width_ + 1) / 2; }
// Returns the number of bytes in the V plane.
int DstStrideV() const { return (width_ + 1) / 2; }
// Returns the size in bytes of the input RGBA frames.
int InputFrameSize() const { return width_ * height_ * 4; }
// Writes the Y, U and V (in this order) planes to the file, thus adding a
// raw YUV frame to the file.
bool AddYUVToFile(uint8_t* y_plane,
int y_plane_size,
uint8_t* u_plane,
int u_plane_size,
uint8_t* v_plane,
int v_plane_size,
FILE* output_file);
// Adds the Y, U or V plane to the file.
bool AddYUVPlaneToFile(uint8_t* yuv_plane, int yuv_plane_size, FILE* file);
// Reads a RGBA frame from input_file_name with input_frame_size size in bytes
// into the buffer.
bool ReadRGBAFrame(const char* input_file_name,
int input_frame_size,
unsigned char* buffer);
// Finds the full path name of the file - concatenates the directory and file
// names.
std::string FindFullFileName(std::string dir_name, std::string file_name);
// Checks if a file exists.
bool FileExists(std::string file_name_to_check);
// Returns the name of the file in the form frame_<number>, where <number> is
// 4 zero padded (i.e. frame_0000, frame_0001, etc.).
std::string FormFrameName(int width, int number);
};
} // namespace test
} // namespace webrtc
#endif // RTC_TOOLS_CONVERTER_CONVERTER_H_

View File

@ -1,79 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <stdio.h>
#include <stdlib.h>
#include <string>
#include "absl/flags/flag.h"
#include "absl/flags/parse.h"
#include "absl/flags/usage.h"
#include "rtc_tools/converter/converter.h"
ABSL_FLAG(int, width, -1, "Width in pixels of the frames in the input file");
ABSL_FLAG(int, height, -1, "Height in pixels of the frames in the input file");
ABSL_FLAG(std::string,
frames_dir,
".",
"The path to the directory where the frames reside");
ABSL_FLAG(std::string,
output_file,
"output.yuv",
" The output file to which frames are written");
ABSL_FLAG(bool,
delete_frames,
false,
" Whether or not to delete the input frames after the conversion");
/*
* A command-line tool based on libyuv to convert a set of RGBA files to a YUV
* video.
* Usage:
* rgba_to_i420_converter --frames_dir=<directory_to_rgba_frames>
* --output_file=<output_yuv_file> --width=<width_of_input_frames>
* --height=<height_of_input_frames>
*/
int main(int argc, char* argv[]) {
absl::SetProgramUsageMessage(
"Converts RGBA raw image files to I420 frames "
"for YUV.\n"
"Example usage:\n"
"./rgba_to_i420_converter --frames_dir=. "
"--output_file=output.yuv --width=320 "
"--height=240\n"
"IMPORTANT: If you pass the --delete_frames "
"command line parameter, the tool will delete "
"the input frames after conversion.\n");
absl::ParseCommandLine(argc, argv);
int width = absl::GetFlag(FLAGS_width);
int height = absl::GetFlag(FLAGS_height);
if (width <= 0 || height <= 0) {
fprintf(stderr, "Error: width or height cannot be <= 0!\n");
return -1;
}
bool del_frames = absl::GetFlag(FLAGS_delete_frames);
webrtc::test::Converter converter(width, height);
bool success = converter.ConvertRGBAToI420Video(
absl::GetFlag(FLAGS_frames_dir), absl::GetFlag(FLAGS_output_file),
del_frames);
if (success) {
fprintf(stdout, "Successful conversion of RGBA frames to YUV video!\n");
return 0;
} else {
fprintf(stdout, "Unsuccessful conversion of RGBA frames to YUV video!\n");
return -1;
}
}

View File

@ -1,285 +0,0 @@
/*
* Copyright 2019 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <memory>
#include <string>
#include "absl/debugging/failure_signal_handler.h"
#include "absl/debugging/symbolize.h"
#include "absl/flags/flag.h"
#include "absl/flags/parse.h"
#include "absl/strings/match.h"
#include "api/task_queue/default_task_queue_factory.h"
#include "api/test/create_frame_generator.h"
#include "api/test/frame_generator_interface.h"
#include "api/video/encoded_image.h"
#include "api/video/video_codec_type.h"
#include "api/video_codecs/video_codec.h"
#include "api/video_codecs/video_encoder.h"
#include "media/base/media_constants.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/video_coding/codecs/vp8/include/vp8.h"
#include "modules/video_coding/codecs/vp9/include/vp9.h"
#include "modules/video_coding/include/video_error_codes.h"
#include "modules/video_coding/utility/ivf_file_writer.h"
#include "rtc_base/checks.h"
#include "rtc_base/event.h"
#include "rtc_base/logging.h"
#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/system/file_wrapper.h"
#include "rtc_base/task_queue.h"
#include "test/testsupport/frame_reader.h"
#include "test/video_codec_settings.h"
#if defined(WEBRTC_USE_H264)
#include "modules/video_coding/codecs/h264/include/h264.h"
#endif
ABSL_FLAG(std::string, input, "", "Input YUV file to convert to IVF");
ABSL_FLAG(int, width, 0, "Input frame width");
ABSL_FLAG(int, height, 0, "Input frame height");
ABSL_FLAG(std::string, codec, cricket::kVp8CodecName, "Codec to use");
ABSL_FLAG(std::string, output, "", "Output IVF file");
namespace webrtc {
namespace test {
namespace {
constexpr int kMaxFramerate = 30;
// We use very big value here to ensure that codec won't hit any limits.
constexpr uint32_t kBitrateBps = 100000000;
constexpr int kKeyFrameIntervalMs = 30000;
constexpr TimeDelta kMaxFrameEncodeWaitTimeout = TimeDelta::Seconds(2);
constexpr int kFrameLogInterval = 100;
static const VideoEncoder::Capabilities kCapabilities(false);
class IvfFileWriterEncodedCallback : public EncodedImageCallback {
public:
IvfFileWriterEncodedCallback(const std::string& file_name,
VideoCodecType video_codec_type,
int expected_frames_count)
: file_writer_(
IvfFileWriter::Wrap(FileWrapper::OpenWriteOnly(file_name), 0)),
video_codec_type_(video_codec_type),
expected_frames_count_(expected_frames_count) {
RTC_CHECK(file_writer_.get());
}
~IvfFileWriterEncodedCallback() { RTC_CHECK(file_writer_->Close()); }
Result OnEncodedImage(const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info) override {
RTC_CHECK(file_writer_->WriteFrame(encoded_image, video_codec_type_));
MutexLock lock(&lock_);
received_frames_count_++;
RTC_CHECK_LE(received_frames_count_, expected_frames_count_);
if (received_frames_count_ % kFrameLogInterval == 0) {
RTC_LOG(LS_INFO) << received_frames_count_ << " out of "
<< expected_frames_count_ << " frames written";
}
next_frame_written_.Set();
return Result(Result::Error::OK);
}
void WaitNextFrameWritten(TimeDelta timeout) {
RTC_CHECK(next_frame_written_.Wait(timeout));
next_frame_written_.Reset();
}
private:
std::unique_ptr<IvfFileWriter> file_writer_;
const VideoCodecType video_codec_type_;
const int expected_frames_count_;
Mutex lock_;
int received_frames_count_ RTC_GUARDED_BY(lock_) = 0;
rtc::Event next_frame_written_;
};
class Encoder {
public:
Encoder(int width,
int height,
int frames_count,
const std::string& output_file_name,
VideoCodecType video_codec_type,
std::unique_ptr<VideoEncoder> video_encoder)
: video_encoder_(std::move(video_encoder)),
task_queue_(CreateDefaultTaskQueueFactory()->CreateTaskQueue(
"Encoder",
TaskQueueFactory::Priority::HIGH)) {
ivf_writer_callback_ = std::make_unique<IvfFileWriterEncodedCallback>(
output_file_name, video_codec_type, frames_count);
task_queue_.PostTask([width, height, video_codec_type, this]() {
VideoCodec codec_settings;
CodecSettings(video_codec_type, &codec_settings);
codec_settings.width = width;
codec_settings.height = height;
codec_settings.maxFramerate = kMaxFramerate;
codec_settings.startBitrate = kBitrateBps;
codec_settings.minBitrate = kBitrateBps;
codec_settings.maxBitrate = kBitrateBps;
codec_settings.SetFrameDropEnabled(false);
switch (video_codec_type) {
case VideoCodecType::kVideoCodecVP8: {
VideoCodecVP8* vp8_settings = codec_settings.VP8();
vp8_settings->keyFrameInterval = kKeyFrameIntervalMs;
vp8_settings->denoisingOn = false;
} break;
case VideoCodecType::kVideoCodecVP9: {
VideoCodecVP9* vp9_settings = codec_settings.VP9();
vp9_settings->denoisingOn = false;
vp9_settings->keyFrameInterval = kKeyFrameIntervalMs;
vp9_settings->automaticResizeOn = false;
} break;
case VideoCodecType::kVideoCodecH264: {
VideoCodecH264* h264_settings = codec_settings.H264();
h264_settings->keyFrameInterval = kKeyFrameIntervalMs;
} break;
default:
RTC_CHECK(false) << "Unsupported codec type";
}
VideoBitrateAllocation bitrate_allocation;
bitrate_allocation.SetBitrate(0, 0, kBitrateBps);
video_encoder_->RegisterEncodeCompleteCallback(
ivf_writer_callback_.get());
RTC_CHECK_EQ(
WEBRTC_VIDEO_CODEC_OK,
video_encoder_->InitEncode(
&codec_settings,
VideoEncoder::Settings(kCapabilities, /*number_of_cores=*/4,
/*max_payload_size=*/0)));
video_encoder_->SetRates(VideoEncoder::RateControlParameters(
bitrate_allocation,
static_cast<double>(codec_settings.maxFramerate)));
});
}
void Encode(const VideoFrame& frame) {
task_queue_.PostTask([frame, this]() {
RTC_CHECK_EQ(WEBRTC_VIDEO_CODEC_OK,
video_encoder_->Encode(frame, nullptr));
});
}
void WaitNextFrameWritten(TimeDelta timeout) {
ivf_writer_callback_->WaitNextFrameWritten(timeout);
}
private:
std::unique_ptr<VideoEncoder> video_encoder_;
std::unique_ptr<IvfFileWriterEncodedCallback> ivf_writer_callback_;
rtc::TaskQueue task_queue_;
};
int GetFrameCount(std::string yuv_file_name, int width, int height) {
std::unique_ptr<FrameReader> yuv_reader =
std::make_unique<YuvFrameReaderImpl>(std::move(yuv_file_name), width,
height);
RTC_CHECK(yuv_reader->Init());
int frames_count = yuv_reader->NumberOfFrames();
yuv_reader->Close();
return frames_count;
}
VideoFrame BuildFrame(FrameGeneratorInterface::VideoFrameData frame_data,
uint32_t rtp_timestamp) {
return VideoFrame::Builder()
.set_video_frame_buffer(frame_data.buffer)
.set_update_rect(frame_data.update_rect)
.set_timestamp_rtp(rtp_timestamp)
.build();
}
void WriteVideoFile(std::string input_file_name,
int width,
int height,
std::string output_file_name,
VideoCodecType video_codec_type,
std::unique_ptr<VideoEncoder> video_encoder) {
int frames_count = GetFrameCount(input_file_name, width, height);
std::unique_ptr<FrameGeneratorInterface> frame_generator =
CreateFromYuvFileFrameGenerator({input_file_name}, width, height,
/*frame_repeat_count=*/1);
Encoder encoder(width, height, frames_count, output_file_name,
video_codec_type, std::move(video_encoder));
uint32_t last_frame_timestamp = 0;
for (int i = 0; i < frames_count; ++i) {
const uint32_t timestamp =
last_frame_timestamp + kVideoPayloadTypeFrequency / kMaxFramerate;
VideoFrame frame = BuildFrame(frame_generator->NextFrame(), timestamp);
last_frame_timestamp = timestamp;
encoder.Encode(frame);
encoder.WaitNextFrameWritten(kMaxFrameEncodeWaitTimeout);
if ((i + 1) % kFrameLogInterval == 0) {
RTC_LOG(LS_INFO) << i + 1 << " out of " << frames_count
<< " frames are sent for encoding";
}
}
RTC_LOG(LS_INFO) << "All " << frames_count << " frame are sent for encoding";
}
} // namespace
} // namespace test
} // namespace webrtc
int main(int argc, char* argv[]) {
// Initialize the symbolizer to get a human-readable stack trace.
absl::InitializeSymbolizer(argv[0]);
absl::FailureSignalHandlerOptions options;
absl::InstallFailureSignalHandler(options);
absl::ParseCommandLine(argc, argv);
std::string codec_name = absl::GetFlag(FLAGS_codec);
std::string input_file_name = absl::GetFlag(FLAGS_input);
std::string output_file_name = absl::GetFlag(FLAGS_output);
int width = absl::GetFlag(FLAGS_width);
int height = absl::GetFlag(FLAGS_height);
RTC_CHECK_NE(input_file_name, "") << "--input is required";
RTC_CHECK_NE(output_file_name, "") << "--output is required";
RTC_CHECK_GT(width, 0) << "width must be greater then 0";
RTC_CHECK_GT(height, 0) << "height must be greater then 0";
if (absl::EqualsIgnoreCase(codec_name, cricket::kVp8CodecName)) {
webrtc::test::WriteVideoFile(
input_file_name, width, height, output_file_name,
webrtc::VideoCodecType::kVideoCodecVP8, webrtc::VP8Encoder::Create());
return 0;
}
if (absl::EqualsIgnoreCase(codec_name, cricket::kVp9CodecName)) {
webrtc::test::WriteVideoFile(
input_file_name, width, height, output_file_name,
webrtc::VideoCodecType::kVideoCodecVP9, webrtc::VP9Encoder::Create());
return 0;
}
#if defined(WEBRTC_USE_H264)
if (absl::EqualsIgnoreCase(codec_name, cricket::kH264CodecName)) {
webrtc::test::WriteVideoFile(
input_file_name, width, height, output_file_name,
webrtc::VideoCodecType::kVideoCodecH264,
webrtc::H264Encoder::Create(
cricket::VideoCodec(cricket::kH264CodecName)));
return 0;
}
#endif
RTC_CHECK(false) << "Unsupported codec: " << codec_name;
return 1;
}