Replace rtc::Optional with absl::optional

This is a no-op change because rtc::Optional is an alias to absl::optional

This CL generated by running script passing top level directories except rtc_base and api

find $@ -type f \( -name \*.h -o -name \*.cc -o -name \*.mm \) \
-exec sed -i 's|rtc::Optional|absl::optional|g' {} \+ \
-exec sed -i 's|rtc::nullopt|absl::nullopt|g' {} \+ \
-exec sed -i 's|#include "api/optional.h"|#include "absl/types/optional.h"|' {} \+

find $@ -type f -name BUILD.gn \
-exec sed -r -i 's|"[\./api]*:optional"|"//third_party/abseil-cpp/absl/types:optional"|' {} \+;

git cl format

Bug: webrtc:9078
Change-Id: I9465c172e65ba6e6ed4e4fdc35b0b265038d6f71
Reviewed-on: https://webrtc-review.googlesource.com/84584
Reviewed-by: Karl Wiberg <kwiberg@webrtc.org>
Commit-Queue: Danil Chapovalov <danilchap@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23697}
This commit is contained in:
Danil Chapovalov 2018-06-21 10:17:24 +02:00 committed by Commit Bot
parent ae810c10b4
commit 196100efa6
71 changed files with 189 additions and 188 deletions

View File

@ -64,7 +64,6 @@ rtc_static_library("common_audio") {
":sinc_resampler", ":sinc_resampler",
"..:webrtc_common", "..:webrtc_common",
"../:typedefs", "../:typedefs",
"../api:optional",
"../rtc_base:checks", "../rtc_base:checks",
"../rtc_base:gtest_prod", "../rtc_base:gtest_prod",
"../rtc_base:rtc_base_approved", "../rtc_base:rtc_base_approved",
@ -72,6 +71,7 @@ rtc_static_library("common_audio") {
"../rtc_base/memory:aligned_malloc", "../rtc_base/memory:aligned_malloc",
"../system_wrappers", "../system_wrappers",
"../system_wrappers:cpu_features_api", "../system_wrappers:cpu_features_api",
"//third_party/abseil-cpp/absl/types:optional",
] ]
defines = [] defines = []

View File

@ -19,7 +19,7 @@ namespace webrtc {
class MockSmoothingFilter : public SmoothingFilter { class MockSmoothingFilter : public SmoothingFilter {
public: public:
MOCK_METHOD1(AddSample, void(float)); MOCK_METHOD1(AddSample, void(float));
MOCK_METHOD0(GetAverage, rtc::Optional<float>()); MOCK_METHOD0(GetAverage, absl::optional<float>());
MOCK_METHOD1(SetTimeConstantMs, bool(int)); MOCK_METHOD1(SetTimeConstantMs, bool(int));
}; };

View File

@ -52,10 +52,10 @@ void SmoothingFilterImpl::AddSample(float sample) {
last_sample_ = sample; last_sample_ = sample;
} }
rtc::Optional<float> SmoothingFilterImpl::GetAverage() { absl::optional<float> SmoothingFilterImpl::GetAverage() {
if (!init_end_time_ms_) { if (!init_end_time_ms_) {
// |init_end_time_ms_| undefined since we have not received any sample. // |init_end_time_ms_| undefined since we have not received any sample.
return rtc::nullopt; return absl::nullopt;
} }
ExtrapolateLastSample(rtc::TimeMillis()); ExtrapolateLastSample(rtc::TimeMillis());
return state_; return state_;

View File

@ -11,7 +11,7 @@
#ifndef COMMON_AUDIO_SMOOTHING_FILTER_H_ #ifndef COMMON_AUDIO_SMOOTHING_FILTER_H_
#define COMMON_AUDIO_SMOOTHING_FILTER_H_ #define COMMON_AUDIO_SMOOTHING_FILTER_H_
#include "api/optional.h" #include "absl/types/optional.h"
#include "rtc_base/constructormagic.h" #include "rtc_base/constructormagic.h"
#include "system_wrappers/include/clock.h" #include "system_wrappers/include/clock.h"
@ -21,7 +21,7 @@ class SmoothingFilter {
public: public:
virtual ~SmoothingFilter() = default; virtual ~SmoothingFilter() = default;
virtual void AddSample(float sample) = 0; virtual void AddSample(float sample) = 0;
virtual rtc::Optional<float> GetAverage() = 0; virtual absl::optional<float> GetAverage() = 0;
virtual bool SetTimeConstantMs(int time_constant_ms) = 0; virtual bool SetTimeConstantMs(int time_constant_ms) = 0;
}; };
@ -44,7 +44,7 @@ class SmoothingFilterImpl final : public SmoothingFilter {
~SmoothingFilterImpl() override; ~SmoothingFilterImpl() override;
void AddSample(float sample) override; void AddSample(float sample) override;
rtc::Optional<float> GetAverage() override; absl::optional<float> GetAverage() override;
bool SetTimeConstantMs(int time_constant_ms) override; bool SetTimeConstantMs(int time_constant_ms) override;
// Methods used for unittests. // Methods used for unittests.
@ -58,7 +58,7 @@ class SmoothingFilterImpl final : public SmoothingFilter {
const float init_factor_; const float init_factor_;
const float init_const_; const float init_const_;
rtc::Optional<int64_t> init_end_time_ms_; absl::optional<int64_t> init_end_time_ms_;
float last_sample_; float last_sample_;
float alpha_; float alpha_;
float state_; float state_;

View File

@ -57,7 +57,6 @@ rtc_static_library("common_video") {
deps = [ deps = [
"..:webrtc_common", "..:webrtc_common",
"../:typedefs", "../:typedefs",
"../api:optional",
"../api/video:video_bitrate_allocation", "../api/video:video_bitrate_allocation",
"../api/video:video_frame", "../api/video:video_frame",
"../api/video:video_frame_i420", "../api/video:video_frame_i420",
@ -67,6 +66,7 @@ rtc_static_library("common_video") {
"../rtc_base:rtc_base", "../rtc_base:rtc_base",
"../rtc_base:rtc_task_queue", "../rtc_base:rtc_task_queue",
"../rtc_base:safe_minmax", "../rtc_base:safe_minmax",
"//third_party/abseil-cpp/absl/types:optional",
"//third_party/libyuv", "//third_party/libyuv",
] ]
} }

View File

@ -68,7 +68,7 @@ uint32_t BitrateAdjuster::GetAdjustedBitrateBps() const {
return adjusted_bitrate_bps_; return adjusted_bitrate_bps_;
} }
rtc::Optional<uint32_t> BitrateAdjuster::GetEstimatedBitrateBps() { absl::optional<uint32_t> BitrateAdjuster::GetEstimatedBitrateBps() {
rtc::CritScope cs(&crit_); rtc::CritScope cs(&crit_);
return bitrate_tracker_.Rate(rtc::TimeMillis()); return bitrate_tracker_.Rate(rtc::TimeMillis());
} }

View File

@ -44,7 +44,7 @@ H264BitstreamParser::Result H264BitstreamParser::ParseNonParameterSetNalu(
if (!sps_ || !pps_) if (!sps_ || !pps_)
return kInvalidStream; return kInvalidStream;
last_slice_qp_delta_ = rtc::nullopt; last_slice_qp_delta_ = absl::nullopt;
const std::vector<uint8_t> slice_rbsp = const std::vector<uint8_t> slice_rbsp =
H264::ParseRbsp(source, source_length); H264::ParseRbsp(source, source_length);
if (slice_rbsp.size() < H264::kNaluTypeSize) if (slice_rbsp.size() < H264::kNaluTypeSize)

View File

@ -13,7 +13,7 @@
#include <stddef.h> #include <stddef.h>
#include <stdint.h> #include <stdint.h>
#include "api/optional.h" #include "absl/types/optional.h"
#include "common_video/h264/pps_parser.h" #include "common_video/h264/pps_parser.h"
#include "common_video/h264/sps_parser.h" #include "common_video/h264/sps_parser.h"
@ -53,11 +53,11 @@ class H264BitstreamParser {
uint8_t nalu_type); uint8_t nalu_type);
// SPS/PPS state, updated when parsing new SPS/PPS, used to parse slices. // SPS/PPS state, updated when parsing new SPS/PPS, used to parse slices.
rtc::Optional<SpsParser::SpsState> sps_; absl::optional<SpsParser::SpsState> sps_;
rtc::Optional<PpsParser::PpsState> pps_; absl::optional<PpsParser::PpsState> pps_;
// Last parsed slice QP. // Last parsed slice QP.
rtc::Optional<int32_t> last_slice_qp_delta_; absl::optional<int32_t> last_slice_qp_delta_;
}; };
} // namespace webrtc } // namespace webrtc

View File

@ -19,7 +19,7 @@
#define RETURN_EMPTY_ON_FAIL(x) \ #define RETURN_EMPTY_ON_FAIL(x) \
if (!(x)) { \ if (!(x)) { \
return rtc::nullopt; \ return absl::nullopt; \
} }
namespace { namespace {
@ -33,7 +33,7 @@ namespace webrtc {
// You can find it on this page: // You can find it on this page:
// http://www.itu.int/rec/T-REC-H.264 // http://www.itu.int/rec/T-REC-H.264
rtc::Optional<PpsParser::PpsState> PpsParser::ParsePps(const uint8_t* data, absl::optional<PpsParser::PpsState> PpsParser::ParsePps(const uint8_t* data,
size_t length) { size_t length) {
// First, parse out rbsp, which is basically the source buffer minus emulation // First, parse out rbsp, which is basically the source buffer minus emulation
// bytes (the last byte of a 0x00 0x00 0x03 sequence). RBSP is defined in // bytes (the last byte of a 0x00 0x00 0x03 sequence). RBSP is defined in
@ -57,7 +57,7 @@ bool PpsParser::ParsePpsIds(const uint8_t* data,
return ParsePpsIdsInternal(&bit_buffer, pps_id, sps_id); return ParsePpsIdsInternal(&bit_buffer, pps_id, sps_id);
} }
rtc::Optional<uint32_t> PpsParser::ParsePpsIdFromSlice(const uint8_t* data, absl::optional<uint32_t> PpsParser::ParsePpsIdFromSlice(const uint8_t* data,
size_t length) { size_t length) {
std::vector<uint8_t> unpacked_buffer = H264::ParseRbsp(data, length); std::vector<uint8_t> unpacked_buffer = H264::ParseRbsp(data, length);
rtc::BitBuffer slice_reader(unpacked_buffer.data(), unpacked_buffer.size()); rtc::BitBuffer slice_reader(unpacked_buffer.data(), unpacked_buffer.size());
@ -65,18 +65,18 @@ rtc::Optional<uint32_t> PpsParser::ParsePpsIdFromSlice(const uint8_t* data,
uint32_t golomb_tmp; uint32_t golomb_tmp;
// first_mb_in_slice: ue(v) // first_mb_in_slice: ue(v)
if (!slice_reader.ReadExponentialGolomb(&golomb_tmp)) if (!slice_reader.ReadExponentialGolomb(&golomb_tmp))
return rtc::nullopt; return absl::nullopt;
// slice_type: ue(v) // slice_type: ue(v)
if (!slice_reader.ReadExponentialGolomb(&golomb_tmp)) if (!slice_reader.ReadExponentialGolomb(&golomb_tmp))
return rtc::nullopt; return absl::nullopt;
// pic_parameter_set_id: ue(v) // pic_parameter_set_id: ue(v)
uint32_t slice_pps_id; uint32_t slice_pps_id;
if (!slice_reader.ReadExponentialGolomb(&slice_pps_id)) if (!slice_reader.ReadExponentialGolomb(&slice_pps_id))
return rtc::nullopt; return absl::nullopt;
return slice_pps_id; return slice_pps_id;
} }
rtc::Optional<PpsParser::PpsState> PpsParser::ParseInternal( absl::optional<PpsParser::PpsState> PpsParser::ParseInternal(
rtc::BitBuffer* bit_buffer) { rtc::BitBuffer* bit_buffer) {
PpsState pps; PpsState pps;

View File

@ -11,7 +11,7 @@
#ifndef COMMON_VIDEO_H264_PPS_PARSER_H_ #ifndef COMMON_VIDEO_H264_PPS_PARSER_H_
#define COMMON_VIDEO_H264_PPS_PARSER_H_ #define COMMON_VIDEO_H264_PPS_PARSER_H_
#include "api/optional.h" #include "absl/types/optional.h"
namespace rtc { namespace rtc {
class BitBuffer; class BitBuffer;
@ -38,20 +38,20 @@ class PpsParser {
}; };
// Unpack RBSP and parse PPS state from the supplied buffer. // Unpack RBSP and parse PPS state from the supplied buffer.
static rtc::Optional<PpsState> ParsePps(const uint8_t* data, size_t length); static absl::optional<PpsState> ParsePps(const uint8_t* data, size_t length);
static bool ParsePpsIds(const uint8_t* data, static bool ParsePpsIds(const uint8_t* data,
size_t length, size_t length,
uint32_t* pps_id, uint32_t* pps_id,
uint32_t* sps_id); uint32_t* sps_id);
static rtc::Optional<uint32_t> ParsePpsIdFromSlice(const uint8_t* data, static absl::optional<uint32_t> ParsePpsIdFromSlice(const uint8_t* data,
size_t length); size_t length);
protected: protected:
// Parse the PPS state, for a bit buffer where RBSP decoding has already been // Parse the PPS state, for a bit buffer where RBSP decoding has already been
// performed. // performed.
static rtc::Optional<PpsState> ParseInternal(rtc::BitBuffer* bit_buffer); static absl::optional<PpsState> ParseInternal(rtc::BitBuffer* bit_buffer);
static bool ParsePpsIdsInternal(rtc::BitBuffer* bit_buffer, static bool ParsePpsIdsInternal(rtc::BitBuffer* bit_buffer,
uint32_t* pps_id, uint32_t* pps_id,
uint32_t* sps_id); uint32_t* sps_id);

View File

@ -192,7 +192,7 @@ class PpsParserTest : public ::testing::Test {
PpsParser::PpsState generated_pps_; PpsParser::PpsState generated_pps_;
rtc::Buffer buffer_; rtc::Buffer buffer_;
rtc::Optional<PpsParser::PpsState> parsed_pps_; absl::optional<PpsParser::PpsState> parsed_pps_;
}; };
TEST_F(PpsParserTest, ZeroPps) { TEST_F(PpsParserTest, ZeroPps) {
@ -215,7 +215,7 @@ TEST_F(PpsParserTest, MaxPps) {
} }
TEST_F(PpsParserTest, PpsIdFromSlice) { TEST_F(PpsParserTest, PpsIdFromSlice) {
rtc::Optional<uint32_t> pps_id = PpsParser::ParsePpsIdFromSlice( absl::optional<uint32_t> pps_id = PpsParser::ParsePpsIdFromSlice(
kH264BitstreamChunk, sizeof(kH264BitstreamChunk)); kH264BitstreamChunk, sizeof(kH264BitstreamChunk));
ASSERT_TRUE(pps_id); ASSERT_TRUE(pps_id);
EXPECT_EQ(2u, *pps_id); EXPECT_EQ(2u, *pps_id);

View File

@ -125,7 +125,7 @@ TEST(H264ProfileLevelId, TestToStringInvalid) {
} }
TEST(H264ProfileLevelId, TestParseSdpProfileLevelIdEmpty) { TEST(H264ProfileLevelId, TestParseSdpProfileLevelIdEmpty) {
const rtc::Optional<ProfileLevelId> profile_level_id = const absl::optional<ProfileLevelId> profile_level_id =
ParseSdpProfileLevelId(CodecParameterMap()); ParseSdpProfileLevelId(CodecParameterMap());
EXPECT_TRUE(profile_level_id); EXPECT_TRUE(profile_level_id);
EXPECT_EQ(kProfileConstrainedBaseline, profile_level_id->profile); EXPECT_EQ(kProfileConstrainedBaseline, profile_level_id->profile);
@ -135,7 +135,7 @@ TEST(H264ProfileLevelId, TestParseSdpProfileLevelIdEmpty) {
TEST(H264ProfileLevelId, TestParseSdpProfileLevelIdConstrainedHigh) { TEST(H264ProfileLevelId, TestParseSdpProfileLevelIdConstrainedHigh) {
CodecParameterMap params; CodecParameterMap params;
params["profile-level-id"] = "640c2a"; params["profile-level-id"] = "640c2a";
const rtc::Optional<ProfileLevelId> profile_level_id = const absl::optional<ProfileLevelId> profile_level_id =
ParseSdpProfileLevelId(params); ParseSdpProfileLevelId(params);
EXPECT_TRUE(profile_level_id); EXPECT_TRUE(profile_level_id);
EXPECT_EQ(kProfileConstrainedHigh, profile_level_id->profile); EXPECT_EQ(kProfileConstrainedHigh, profile_level_id->profile);

View File

@ -18,7 +18,7 @@
#include "rtc_base/logging.h" #include "rtc_base/logging.h"
namespace { namespace {
typedef rtc::Optional<webrtc::SpsParser::SpsState> OptionalSps; typedef absl::optional<webrtc::SpsParser::SpsState> OptionalSps;
#define RETURN_EMPTY_ON_FAIL(x) \ #define RETURN_EMPTY_ON_FAIL(x) \
if (!(x)) { \ if (!(x)) { \
@ -38,14 +38,14 @@ SpsParser::SpsState::SpsState() = default;
// http://www.itu.int/rec/T-REC-H.264 // http://www.itu.int/rec/T-REC-H.264
// Unpack RBSP and parse SPS state from the supplied buffer. // Unpack RBSP and parse SPS state from the supplied buffer.
rtc::Optional<SpsParser::SpsState> SpsParser::ParseSps(const uint8_t* data, absl::optional<SpsParser::SpsState> SpsParser::ParseSps(const uint8_t* data,
size_t length) { size_t length) {
std::vector<uint8_t> unpacked_buffer = H264::ParseRbsp(data, length); std::vector<uint8_t> unpacked_buffer = H264::ParseRbsp(data, length);
rtc::BitBuffer bit_buffer(unpacked_buffer.data(), unpacked_buffer.size()); rtc::BitBuffer bit_buffer(unpacked_buffer.data(), unpacked_buffer.size());
return ParseSpsUpToVui(&bit_buffer); return ParseSpsUpToVui(&bit_buffer);
} }
rtc::Optional<SpsParser::SpsState> SpsParser::ParseSpsUpToVui( absl::optional<SpsParser::SpsState> SpsParser::ParseSpsUpToVui(
rtc::BitBuffer* buffer) { rtc::BitBuffer* buffer) {
// Now, we need to use a bit buffer to parse through the actual AVC SPS // Now, we need to use a bit buffer to parse through the actual AVC SPS
// format. See Section 7.3.2.1.1 ("Sequence parameter set data syntax") of the // format. See Section 7.3.2.1.1 ("Sequence parameter set data syntax") of the

View File

@ -11,7 +11,7 @@
#ifndef COMMON_VIDEO_H264_SPS_PARSER_H_ #ifndef COMMON_VIDEO_H264_SPS_PARSER_H_
#define COMMON_VIDEO_H264_SPS_PARSER_H_ #define COMMON_VIDEO_H264_SPS_PARSER_H_
#include "api/optional.h" #include "absl/types/optional.h"
namespace rtc { namespace rtc {
class BitBuffer; class BitBuffer;
@ -41,12 +41,12 @@ class SpsParser {
}; };
// Unpack RBSP and parse SPS state from the supplied buffer. // Unpack RBSP and parse SPS state from the supplied buffer.
static rtc::Optional<SpsState> ParseSps(const uint8_t* data, size_t length); static absl::optional<SpsState> ParseSps(const uint8_t* data, size_t length);
protected: protected:
// Parse the SPS state, up till the VUI part, for a bit buffer where RBSP // Parse the SPS state, up till the VUI part, for a bit buffer where RBSP
// decoding has already been performed. // decoding has already been performed.
static rtc::Optional<SpsState> ParseSpsUpToVui(rtc::BitBuffer* buffer); static absl::optional<SpsState> ParseSpsUpToVui(rtc::BitBuffer* buffer);
}; };
} // namespace webrtc } // namespace webrtc

View File

@ -112,7 +112,7 @@ class H264SpsParserTest : public ::testing::Test {
H264SpsParserTest() {} H264SpsParserTest() {}
virtual ~H264SpsParserTest() {} virtual ~H264SpsParserTest() {}
rtc::Optional<SpsParser::SpsState> sps_; absl::optional<SpsParser::SpsState> sps_;
}; };
TEST_F(H264SpsParserTest, TestSampleSPSHdLandscape) { TEST_F(H264SpsParserTest, TestSampleSPSHdLandscape) {

View File

@ -72,13 +72,13 @@ bool CopyRemainingBits(rtc::BitBuffer* source,
SpsVuiRewriter::ParseResult SpsVuiRewriter::ParseAndRewriteSps( SpsVuiRewriter::ParseResult SpsVuiRewriter::ParseAndRewriteSps(
const uint8_t* buffer, const uint8_t* buffer,
size_t length, size_t length,
rtc::Optional<SpsParser::SpsState>* sps, absl::optional<SpsParser::SpsState>* sps,
rtc::Buffer* destination) { rtc::Buffer* destination) {
// Create temporary RBSP decoded buffer of the payload (exlcuding the // Create temporary RBSP decoded buffer of the payload (exlcuding the
// leading nalu type header byte (the SpsParser uses only the payload). // leading nalu type header byte (the SpsParser uses only the payload).
std::vector<uint8_t> rbsp_buffer = H264::ParseRbsp(buffer, length); std::vector<uint8_t> rbsp_buffer = H264::ParseRbsp(buffer, length);
rtc::BitBuffer source_buffer(rbsp_buffer.data(), rbsp_buffer.size()); rtc::BitBuffer source_buffer(rbsp_buffer.data(), rbsp_buffer.size());
rtc::Optional<SpsParser::SpsState> sps_state = absl::optional<SpsParser::SpsState> sps_state =
SpsParser::ParseSpsUpToVui(&source_buffer); SpsParser::ParseSpsUpToVui(&source_buffer);
if (!sps_state) if (!sps_state)
return ParseResult::kFailure; return ParseResult::kFailure;

View File

@ -12,7 +12,7 @@
#ifndef COMMON_VIDEO_H264_SPS_VUI_REWRITER_H_ #ifndef COMMON_VIDEO_H264_SPS_VUI_REWRITER_H_
#define COMMON_VIDEO_H264_SPS_VUI_REWRITER_H_ #define COMMON_VIDEO_H264_SPS_VUI_REWRITER_H_
#include "api/optional.h" #include "absl/types/optional.h"
#include "common_video/h264/sps_parser.h" #include "common_video/h264/sps_parser.h"
#include "rtc_base/buffer.h" #include "rtc_base/buffer.h"
@ -43,9 +43,10 @@ class SpsVuiRewriter : private SpsParser {
// SPS state. This function assumes that any previous headers // SPS state. This function assumes that any previous headers
// (NALU start, type, Stap-A, etc) have already been parsed and that RBSP // (NALU start, type, Stap-A, etc) have already been parsed and that RBSP
// decoding has been performed. // decoding has been performed.
static ParseResult ParseAndRewriteSps(const uint8_t* buffer, static ParseResult ParseAndRewriteSps(
const uint8_t* buffer,
size_t length, size_t length,
rtc::Optional<SpsParser::SpsState>* sps, absl::optional<SpsParser::SpsState>* sps,
rtc::Buffer* destination); rtc::Buffer* destination);
}; };

View File

@ -159,7 +159,7 @@ void TestSps(SpsMode mode, SpsVuiRewriter::ParseResult expected_parse_result) {
index.payload_start_offset += H264::kNaluTypeSize; index.payload_start_offset += H264::kNaluTypeSize;
index.payload_size -= H264::kNaluTypeSize; index.payload_size -= H264::kNaluTypeSize;
rtc::Optional<SpsParser::SpsState> sps; absl::optional<SpsParser::SpsState> sps;
rtc::Buffer out_buffer; rtc::Buffer out_buffer;
SpsVuiRewriter::ParseResult result = SpsVuiRewriter::ParseResult result =
SpsVuiRewriter::ParseAndRewriteSps(&buffer[index.payload_start_offset], SpsVuiRewriter::ParseAndRewriteSps(&buffer[index.payload_start_offset],

View File

@ -44,7 +44,7 @@ class BitrateAdjuster {
uint32_t GetAdjustedBitrateBps() const; uint32_t GetAdjustedBitrateBps() const;
// Returns what we think the current bitrate is. // Returns what we think the current bitrate is.
rtc::Optional<uint32_t> GetEstimatedBitrateBps(); absl::optional<uint32_t> GetEstimatedBitrateBps();
// This should be called after each frame is encoded. The timestamp at which // This should be called after each frame is encoded. The timestamp at which
// it is called is used to estimate the output bitrate of the encoder. // it is called is used to estimate the output bitrate of the encoder.

View File

@ -69,7 +69,7 @@ void IncomingVideoStream::OnFrame(const VideoFrame& video_frame) {
void IncomingVideoStream::Dequeue() { void IncomingVideoStream::Dequeue() {
TRACE_EVENT0("webrtc", "IncomingVideoStream::Dequeue"); TRACE_EVENT0("webrtc", "IncomingVideoStream::Dequeue");
RTC_DCHECK(incoming_render_queue_.IsCurrent()); RTC_DCHECK(incoming_render_queue_.IsCurrent());
rtc::Optional<VideoFrame> frame_to_render = render_buffers_.FrameToRender(); absl::optional<VideoFrame> frame_to_render = render_buffers_.FrameToRender();
if (frame_to_render) if (frame_to_render)
callback_->OnFrame(*frame_to_render); callback_->OnFrame(*frame_to_render);

View File

@ -73,8 +73,8 @@ int32_t VideoRenderFrames::AddFrame(VideoFrame&& new_frame) {
return static_cast<int32_t>(incoming_frames_.size()); return static_cast<int32_t>(incoming_frames_.size());
} }
rtc::Optional<VideoFrame> VideoRenderFrames::FrameToRender() { absl::optional<VideoFrame> VideoRenderFrames::FrameToRender() {
rtc::Optional<VideoFrame> render_frame; absl::optional<VideoFrame> render_frame;
// Get the newest frame that can be released for rendering. // Get the newest frame that can be released for rendering.
while (!incoming_frames_.empty() && TimeToNextFrameRelease() <= 0) { while (!incoming_frames_.empty() && TimeToNextFrameRelease() <= 0) {
render_frame = std::move(incoming_frames_.front()); render_frame = std::move(incoming_frames_.front());

View File

@ -15,7 +15,7 @@
#include <list> #include <list>
#include "api/optional.h" #include "absl/types/optional.h"
#include "api/video/video_frame.h" #include "api/video/video_frame.h"
namespace webrtc { namespace webrtc {
@ -30,7 +30,7 @@ class VideoRenderFrames {
int32_t AddFrame(VideoFrame&& new_frame); int32_t AddFrame(VideoFrame&& new_frame);
// Get a frame for rendering, or false if it's not time to render. // Get a frame for rendering, or false if it's not time to render.
rtc::Optional<VideoFrame> FrameToRender(); absl::optional<VideoFrame> FrameToRender();
// Returns the number of ms to next frame to render // Returns the number of ms to next frame to render
uint32_t TimeToNextFrameRelease(); uint32_t TimeToNextFrameRelease();

View File

@ -270,7 +270,7 @@ void Conductor::OnMessageFromPeer(int peer_id, const std::string& message) {
} }
return; return;
} }
rtc::Optional<webrtc::SdpType> type_maybe = absl::optional<webrtc::SdpType> type_maybe =
webrtc::SdpTypeFromString(type_str); webrtc::SdpTypeFromString(type_str);
if (!type_maybe) { if (!type_maybe) {
RTC_LOG(LS_ERROR) << "Unknown SDP type: " << type_str; RTC_LOG(LS_ERROR) << "Unknown SDP type: " << type_str;

View File

@ -57,8 +57,8 @@ class ParsedRtcEventLog {
struct BweProbeResultEvent { struct BweProbeResultEvent {
uint64_t timestamp; uint64_t timestamp;
uint32_t id; uint32_t id;
rtc::Optional<uint64_t> bitrate_bps; absl::optional<uint64_t> bitrate_bps;
rtc::Optional<ProbeFailureReason> failure_reason; absl::optional<ProbeFailureReason> failure_reason;
}; };
struct BweDelayBasedUpdate { struct BweDelayBasedUpdate {

View File

@ -748,8 +748,8 @@ TEST(RtcEventLogTest, CircularBufferKeepsMostRecentEvents) {
EXPECT_GT(parsed_log.GetNumberOfEvents(), 2u); EXPECT_GT(parsed_log.GetNumberOfEvents(), 2u);
RtcEventLogTestHelper::VerifyLogStartEvent(parsed_log, 0); RtcEventLogTestHelper::VerifyLogStartEvent(parsed_log, 0);
rtc::Optional<int64_t> last_timestamp; absl::optional<int64_t> last_timestamp;
rtc::Optional<uint32_t> last_ssrc; absl::optional<uint32_t> last_ssrc;
for (size_t i = 1; i < parsed_log.GetNumberOfEvents() - 1; i++) { for (size_t i = 1; i < parsed_log.GetNumberOfEvents() - 1; i++) {
EXPECT_EQ(parsed_log.GetEventType(i), EXPECT_EQ(parsed_log.GetEventType(i),
ParsedRtcEventLogNew::EventType::AUDIO_PLAYOUT_EVENT); ParsedRtcEventLogNew::EventType::AUDIO_PLAYOUT_EVENT);

View File

@ -37,7 +37,7 @@ rtc_source_set("module_api_public") {
deps = [ deps = [
"..:webrtc_common", "..:webrtc_common",
"../:typedefs", "../:typedefs",
"../api:optional", "//third_party/abseil-cpp/absl/types:optional",
] ]
} }
@ -53,13 +53,13 @@ rtc_source_set("module_api") {
"..:webrtc_common", "..:webrtc_common",
"../:typedefs", "../:typedefs",
"../api:libjingle_peerconnection_api", "../api:libjingle_peerconnection_api",
"../api:optional",
"../api/transport:network_control", "../api/transport:network_control",
"../api/video:video_frame", "../api/video:video_frame",
"../api/video:video_frame_i420", "../api/video:video_frame_i420",
"../rtc_base:deprecation", "../rtc_base:deprecation",
"../rtc_base:rtc_base_approved", "../rtc_base:rtc_base_approved",
"video_coding:codec_globals_headers", "video_coding:codec_globals_headers",
"//third_party/abseil-cpp/absl/types:optional",
] ]
} }

View File

@ -204,9 +204,9 @@ rtc_source_set("audio_device_module_from_input_and_output") {
":audio_device_api", ":audio_device_api",
":audio_device_buffer", ":audio_device_buffer",
":windows_core_audio_utility", ":windows_core_audio_utility",
"../../api:optional",
"../../rtc_base:checks", "../../rtc_base:checks",
"../../rtc_base:rtc_base_approved", "../../rtc_base:rtc_base_approved",
"//third_party/abseil-cpp/absl/types:optional",
] ]
} }
} }
@ -483,7 +483,6 @@ if (rtc_include_tests) {
":audio_device_impl", ":audio_device_impl",
":mock_audio_device", ":mock_audio_device",
"../../api:array_view", "../../api:array_view",
"../../api:optional",
"../../common_audio", "../../common_audio",
"../../rtc_base:checks", "../../rtc_base:checks",
"../../rtc_base:rtc_base_approved", "../../rtc_base:rtc_base_approved",
@ -491,6 +490,7 @@ if (rtc_include_tests) {
"../../test:fileutils", "../../test:fileutils",
"../../test:test_support", "../../test:test_support",
"../utility:utility", "../utility:utility",
"//third_party/abseil-cpp/absl/types:optional",
] ]
if (is_linux || is_mac || is_win) { if (is_linux || is_mac || is_win) {
sources += [ "audio_device_unittest.cc" ] sources += [ "audio_device_unittest.cc" ]

View File

@ -13,8 +13,8 @@
#include <memory> #include <memory>
#include <numeric> #include <numeric>
#include "absl/types/optional.h"
#include "api/array_view.h" #include "api/array_view.h"
#include "api/optional.h"
#include "modules/audio_device/audio_device_impl.h" #include "modules/audio_device/audio_device_impl.h"
#include "modules/audio_device/include/audio_device.h" #include "modules/audio_device/include/audio_device.h"
#include "modules/audio_device/include/mock_audio_transport.h" #include "modules/audio_device/include/mock_audio_transport.h"
@ -294,7 +294,7 @@ class LatencyAudioStream : public AudioStream {
rtc::ThreadChecker read_thread_checker_; rtc::ThreadChecker read_thread_checker_;
rtc::ThreadChecker write_thread_checker_; rtc::ThreadChecker write_thread_checker_;
rtc::Optional<int64_t> pulse_time_ RTC_GUARDED_BY(lock_); absl::optional<int64_t> pulse_time_ RTC_GUARDED_BY(lock_);
std::vector<int> latencies_ RTC_GUARDED_BY(race_checker_); std::vector<int> latencies_ RTC_GUARDED_BY(race_checker_);
size_t read_count_ RTC_GUARDED_BY(read_thread_checker_) = 0; size_t read_count_ RTC_GUARDED_BY(read_thread_checker_) = 0;
size_t write_count_ RTC_GUARDED_BY(write_thread_checker_) = 0; size_t write_count_ RTC_GUARDED_BY(write_thread_checker_) = 0;

View File

@ -315,10 +315,10 @@ bool CoreAudioInput::OnDataCallback(uint64_t device_frequency) {
return true; return true;
} }
rtc::Optional<int> CoreAudioInput::EstimateLatencyMillis( absl::optional<int> CoreAudioInput::EstimateLatencyMillis(
uint64_t capture_time_100ns) { uint64_t capture_time_100ns) {
if (!qpc_to_100ns_) { if (!qpc_to_100ns_) {
return rtc::nullopt; return absl::nullopt;
} }
// Input parameter |capture_time_100ns| contains the performance counter at // Input parameter |capture_time_100ns| contains the performance counter at
// the time that the audio endpoint device recorded the device position of // the time that the audio endpoint device recorded the device position of
@ -329,7 +329,7 @@ rtc::Optional<int> CoreAudioInput::EstimateLatencyMillis(
// - subtracting |capture_time_100ns| from now_time_100ns. // - subtracting |capture_time_100ns| from now_time_100ns.
LARGE_INTEGER perf_counter_now = {}; LARGE_INTEGER perf_counter_now = {};
if (!::QueryPerformanceCounter(&perf_counter_now)) { if (!::QueryPerformanceCounter(&perf_counter_now)) {
return rtc::nullopt; return absl::nullopt;
} }
uint64_t qpc_now_raw = perf_counter_now.QuadPart; uint64_t qpc_now_raw = perf_counter_now.QuadPart;
uint64_t now_time_100ns = qpc_now_raw * (*qpc_to_100ns_); uint64_t now_time_100ns = qpc_now_raw * (*qpc_to_100ns_);

View File

@ -14,7 +14,7 @@
#include <memory> #include <memory>
#include <string> #include <string>
#include "api/optional.h" #include "absl/types/optional.h"
#include "modules/audio_device/win/audio_device_module_win.h" #include "modules/audio_device/win/audio_device_module_win.h"
#include "modules/audio_device/win/core_audio_base_win.h" #include "modules/audio_device/win/core_audio_base_win.h"
@ -53,11 +53,11 @@ class CoreAudioInput final : public CoreAudioBase, public AudioInput {
private: private:
bool OnDataCallback(uint64_t device_frequency); bool OnDataCallback(uint64_t device_frequency);
rtc::Optional<int> EstimateLatencyMillis(uint64_t capture_time_100ns); absl::optional<int> EstimateLatencyMillis(uint64_t capture_time_100ns);
std::unique_ptr<FineAudioBuffer> fine_audio_buffer_; std::unique_ptr<FineAudioBuffer> fine_audio_buffer_;
Microsoft::WRL::ComPtr<IAudioCaptureClient> audio_capture_client_; Microsoft::WRL::ComPtr<IAudioCaptureClient> audio_capture_client_;
rtc::Optional<double> qpc_to_100ns_; absl::optional<double> qpc_to_100ns_;
}; };
} // namespace webrtc_win } // namespace webrtc_win

View File

@ -17,7 +17,7 @@
#include <algorithm> #include <algorithm>
#include <limits> #include <limits>
#include "api/optional.h" #include "absl/types/optional.h"
#include "api/rtp_headers.h" #include "api/rtp_headers.h"
#include "api/transport/network_types.h" #include "api/transport/network_types.h"
#include "api/video/video_rotation.h" #include "api/video/video_rotation.h"

View File

@ -13,7 +13,7 @@
#include <limits> #include <limits>
#include "api/optional.h" #include "absl/types/optional.h"
#include "typedefs.h" // NOLINT(build/include) #include "typedefs.h" // NOLINT(build/include)
namespace webrtc { namespace webrtc {
@ -78,7 +78,7 @@ class Unwrapper {
} }
private: private:
rtc::Optional<int64_t> last_value_; absl::optional<int64_t> last_value_;
}; };
using SequenceNumberUnwrapper = Unwrapper<uint16_t>; using SequenceNumberUnwrapper = Unwrapper<uint16_t>;

View File

@ -304,12 +304,12 @@ if (is_ios || is_mac) {
":videoframebuffer_objc", ":videoframebuffer_objc",
":videosource_objc", ":videosource_objc",
"../api:libjingle_peerconnection_api", "../api:libjingle_peerconnection_api",
"../api:optional",
"../api/video:video_frame", "../api/video:video_frame",
"../common_video", "../common_video",
"../media:rtc_media_base", "../media:rtc_media_base",
"../rtc_base:checks", "../rtc_base:checks",
"../rtc_base:rtc_base", "../rtc_base:rtc_base",
"//third_party/abseil-cpp/absl/types:optional",
] ]
configs += [ configs += [

View File

@ -87,11 +87,11 @@ rtc_source_set("base_jni") {
":internal_jni", ":internal_jni",
":native_api_jni", ":native_api_jni",
"../../api:libjingle_peerconnection_api", "../../api:libjingle_peerconnection_api",
"../../api:optional",
"../../rtc_base:checks", "../../rtc_base:checks",
"../../rtc_base:rtc_base", "../../rtc_base:rtc_base",
"../../rtc_base:rtc_base_approved", "../../rtc_base:rtc_base_approved",
"../../system_wrappers:metrics_api", "../../system_wrappers:metrics_api",
"//third_party/abseil-cpp/absl/types:optional",
] ]
} }
@ -145,11 +145,11 @@ rtc_source_set("audio_device_module_base") {
":base_jni", ":base_jni",
":generated_audio_device_module_base_jni", ":generated_audio_device_module_base_jni",
":native_api_jni", ":native_api_jni",
"../../api:optional",
"../../modules/audio_device:audio_device_buffer", "../../modules/audio_device:audio_device_buffer",
"../../rtc_base:checks", "../../rtc_base:checks",
"../../rtc_base:rtc_base_approved", "../../rtc_base:rtc_base_approved",
"../../system_wrappers:metrics_api", "../../system_wrappers:metrics_api",
"//third_party/abseil-cpp/absl/types:optional",
] ]
} }
@ -170,13 +170,13 @@ if (rtc_enable_android_aaudio) {
":audio_device_module_base", ":audio_device_module_base",
":base_jni", ":base_jni",
"../../api:array_view", "../../api:array_view",
"../../api:optional",
"../../modules/audio_device:audio_device", "../../modules/audio_device:audio_device",
"../../modules/audio_device:audio_device_buffer", "../../modules/audio_device:audio_device_buffer",
"../../rtc_base:checks", "../../rtc_base:checks",
"../../rtc_base:rtc_base", "../../rtc_base:rtc_base",
"../../rtc_base:rtc_base_approved", "../../rtc_base:rtc_base_approved",
"../../system_wrappers", "../../system_wrappers",
"//third_party/abseil-cpp/absl/types:optional",
] ]
} }
} }
@ -196,11 +196,11 @@ rtc_source_set("opensles_audio_device_module") {
":audio_device_module_base", ":audio_device_module_base",
":base_jni", ":base_jni",
"../../api:array_view", "../../api:array_view",
"../../api:optional",
"../../modules/audio_device:audio_device", "../../modules/audio_device:audio_device",
"../../modules/audio_device:audio_device_buffer", "../../modules/audio_device:audio_device_buffer",
"../../rtc_base:checks", "../../rtc_base:checks",
"../../rtc_base:rtc_base_approved", "../../rtc_base:rtc_base_approved",
"//third_party/abseil-cpp/absl/types:optional",
] ]
} }
@ -217,12 +217,12 @@ rtc_source_set("java_audio_device_module") {
":audio_device_module_base", ":audio_device_module_base",
":base_jni", ":base_jni",
":generated_java_audio_device_module_native_jni", ":generated_java_audio_device_module_native_jni",
"../../api:optional",
"../../modules/audio_device:audio_device", "../../modules/audio_device:audio_device",
"../../modules/audio_device:audio_device_buffer", "../../modules/audio_device:audio_device_buffer",
"../../rtc_base:checks", "../../rtc_base:checks",
"../../rtc_base:rtc_base_approved", "../../rtc_base:rtc_base_approved",
"../../system_wrappers:metrics_api", "../../system_wrappers:metrics_api",
"//third_party/abseil-cpp/absl/types:optional",
] ]
} }
@ -1148,9 +1148,9 @@ rtc_static_library("native_api_jni") {
":generated_external_classes_jni", ":generated_external_classes_jni",
":generated_native_api_jni", ":generated_native_api_jni",
":internal_jni", ":internal_jni",
"//api:optional",
"//rtc_base:checks", "//rtc_base:checks",
"//rtc_base:rtc_base_approved", "//rtc_base:rtc_base_approved",
"//third_party/abseil-cpp/absl/types:optional",
] ]
} }

View File

@ -125,18 +125,18 @@ int64_t JavaToNativeLong(JNIEnv* env, const JavaRef<jobject>& j_long) {
return JNI_Long::Java_Long_longValue(env, j_long); return JNI_Long::Java_Long_longValue(env, j_long);
} }
rtc::Optional<bool> JavaToNativeOptionalBool(JNIEnv* jni, absl::optional<bool> JavaToNativeOptionalBool(JNIEnv* jni,
const JavaRef<jobject>& boolean) { const JavaRef<jobject>& boolean) {
if (IsNull(jni, boolean)) if (IsNull(jni, boolean))
return rtc::nullopt; return absl::nullopt;
return JNI_Boolean::Java_Boolean_booleanValue(jni, boolean); return JNI_Boolean::Java_Boolean_booleanValue(jni, boolean);
} }
rtc::Optional<int32_t> JavaToNativeOptionalInt( absl::optional<int32_t> JavaToNativeOptionalInt(
JNIEnv* jni, JNIEnv* jni,
const JavaRef<jobject>& integer) { const JavaRef<jobject>& integer) {
if (IsNull(jni, integer)) if (IsNull(jni, integer))
return rtc::nullopt; return absl::nullopt;
return JNI_Integer::Java_Integer_intValue(jni, integer); return JNI_Integer::Java_Integer_intValue(jni, integer);
} }
@ -196,13 +196,13 @@ ScopedJavaLocalRef<jstring> NativeToJavaString(JNIEnv* jni,
ScopedJavaLocalRef<jobject> NativeToJavaInteger( ScopedJavaLocalRef<jobject> NativeToJavaInteger(
JNIEnv* jni, JNIEnv* jni,
const rtc::Optional<int32_t>& optional_int) { const absl::optional<int32_t>& optional_int) {
return optional_int ? NativeToJavaInteger(jni, *optional_int) : nullptr; return optional_int ? NativeToJavaInteger(jni, *optional_int) : nullptr;
} }
ScopedJavaLocalRef<jstring> NativeToJavaString( ScopedJavaLocalRef<jstring> NativeToJavaString(
JNIEnv* jni, JNIEnv* jni,
const rtc::Optional<std::string>& str) { const absl::optional<std::string>& str) {
return str ? NativeToJavaString(jni, *str) : nullptr; return str ? NativeToJavaString(jni, *str) : nullptr;
} }

View File

@ -22,7 +22,7 @@
#include <string> #include <string>
#include <vector> #include <vector>
#include "api/optional.h" #include "absl/types/optional.h"
#include "rtc_base/checks.h" #include "rtc_base/checks.h"
#include "rtc_base/thread_checker.h" #include "rtc_base/thread_checker.h"
#include "sdk/android/native_api/jni/scoped_java_ref.h" #include "sdk/android/native_api/jni/scoped_java_ref.h"
@ -126,9 +126,10 @@ ScopedJavaLocalRef<jobject> GetJavaMapEntryValue(
int64_t JavaToNativeLong(JNIEnv* env, const JavaRef<jobject>& j_long); int64_t JavaToNativeLong(JNIEnv* env, const JavaRef<jobject>& j_long);
rtc::Optional<bool> JavaToNativeOptionalBool(JNIEnv* jni, absl::optional<bool> JavaToNativeOptionalBool(JNIEnv* jni,
const JavaRef<jobject>& boolean); const JavaRef<jobject>& boolean);
rtc::Optional<int32_t> JavaToNativeOptionalInt(JNIEnv* jni, absl::optional<int32_t> JavaToNativeOptionalInt(
JNIEnv* jni,
const JavaRef<jobject>& integer); const JavaRef<jobject>& integer);
// Given a (UTF-16) jstring return a new UTF-8 native string. // Given a (UTF-16) jstring return a new UTF-8 native string.
@ -196,10 +197,10 @@ ScopedJavaLocalRef<jstring> NativeToJavaString(JNIEnv* jni,
ScopedJavaLocalRef<jobject> NativeToJavaInteger( ScopedJavaLocalRef<jobject> NativeToJavaInteger(
JNIEnv* jni, JNIEnv* jni,
const rtc::Optional<int32_t>& optional_int); const absl::optional<int32_t>& optional_int);
ScopedJavaLocalRef<jstring> NativeToJavaString( ScopedJavaLocalRef<jstring> NativeToJavaString(
JNIEnv* jni, JNIEnv* jni,
const rtc::Optional<std::string>& str); const absl::optional<std::string>& str);
// Helper function for converting std::vector<T> into a Java array. // Helper function for converting std::vector<T> into a Java array.
template <typename T, typename Convert> template <typename T, typename Convert>

View File

@ -74,7 +74,7 @@ class JavaVideoTrackSourceImpl : public JavaVideoTrackSourceInterface {
return android_video_track_source_->is_screencast(); return android_video_track_source_->is_screencast();
} }
rtc::Optional<bool> needs_denoising() const override { absl::optional<bool> needs_denoising() const override {
return android_video_track_source_->needs_denoising(); return android_video_track_source_->needs_denoising();
} }

View File

@ -124,7 +124,7 @@ class MediaCodecVideoDecoder : public VideoDecoder, public rtc::MessageHandler {
int current_delay_time_ms_; // Overall delay time in the current second. int current_delay_time_ms_; // Overall delay time in the current second.
int32_t max_pending_frames_; // Maximum number of pending input frames. int32_t max_pending_frames_; // Maximum number of pending input frames.
H264BitstreamParser h264_bitstream_parser_; H264BitstreamParser h264_bitstream_parser_;
std::deque<rtc::Optional<uint8_t>> pending_frame_qps_; std::deque<absl::optional<uint8_t>> pending_frame_qps_;
// State that is constant for the lifetime of this object once the ctor // State that is constant for the lifetime of this object once the ctor
// returns. // returns.
@ -506,7 +506,7 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
// Save input image timestamps for later output. // Save input image timestamps for later output.
frames_received_++; frames_received_++;
current_bytes_ += inputImage._length; current_bytes_ += inputImage._length;
rtc::Optional<uint8_t> qp; absl::optional<uint8_t> qp;
if (codecType_ == kVideoCodecVP8) { if (codecType_ == kVideoCodecVP8) {
int qp_int; int qp_int;
if (vp8::GetQp(inputImage._buffer, inputImage._length, &qp_int)) { if (vp8::GetQp(inputImage._buffer, inputImage._length, &qp_int)) {
@ -743,7 +743,7 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(JNIEnv* jni,
decoded_frame.set_timestamp(output_timestamps_ms); decoded_frame.set_timestamp(output_timestamps_ms);
decoded_frame.set_ntp_time_ms(output_ntp_timestamps_ms); decoded_frame.set_ntp_time_ms(output_ntp_timestamps_ms);
rtc::Optional<uint8_t> qp = pending_frame_qps_.front(); absl::optional<uint8_t> qp = pending_frame_qps_.front();
pending_frame_qps_.pop_front(); pending_frame_qps_.pop_front();
callback_->Decoded(decoded_frame, decode_time_ms, qp); callback_->Decoded(decoded_frame, decode_time_ms, qp);
} }

View File

@ -347,7 +347,7 @@ int32_t MediaCodecVideoEncoder::InitEncode(const VideoCodec* codec_settings,
// Check allowed H.264 profile // Check allowed H.264 profile
profile_ = H264::Profile::kProfileBaseline; profile_ = H264::Profile::kProfileBaseline;
if (codec_type == kVideoCodecH264) { if (codec_type == kVideoCodecH264) {
const rtc::Optional<H264::ProfileLevelId> profile_level_id = const absl::optional<H264::ProfileLevelId> profile_level_id =
H264::ParseSdpProfileLevelId(codec_.params); H264::ParseSdpProfileLevelId(codec_.params);
RTC_DCHECK(profile_level_id); RTC_DCHECK(profile_level_id);
profile_ = profile_level_id->profile; profile_ = profile_level_id->profile;

View File

@ -38,7 +38,7 @@ bool AndroidVideoTrackSource::is_screencast() const {
return is_screencast_; return is_screencast_;
} }
rtc::Optional<bool> AndroidVideoTrackSource::needs_denoising() const { absl::optional<bool> AndroidVideoTrackSource::needs_denoising() const {
return false; return false;
} }

View File

@ -37,7 +37,7 @@ class AndroidVideoTrackSource : public rtc::AdaptedVideoTrackSource {
// Indicates that the encoder should denoise video before encoding it. // Indicates that the encoder should denoise video before encoding it.
// If it is not set, the default configuration is used which is different // If it is not set, the default configuration is used which is different
// depending on video codec. // depending on video codec.
rtc::Optional<bool> needs_denoising() const override; absl::optional<bool> needs_denoising() const override;
// Called by the native capture observer // Called by the native capture observer
void SetState(SourceState state); void SetState(SourceState state);

View File

@ -135,16 +135,16 @@ int AAudioPlayer::SetSpeakerVolume(uint32_t volume) {
return -1; return -1;
} }
rtc::Optional<uint32_t> AAudioPlayer::SpeakerVolume() const { absl::optional<uint32_t> AAudioPlayer::SpeakerVolume() const {
return rtc::nullopt; return absl::nullopt;
} }
rtc::Optional<uint32_t> AAudioPlayer::MaxSpeakerVolume() const { absl::optional<uint32_t> AAudioPlayer::MaxSpeakerVolume() const {
return rtc::nullopt; return absl::nullopt;
} }
rtc::Optional<uint32_t> AAudioPlayer::MinSpeakerVolume() const { absl::optional<uint32_t> AAudioPlayer::MinSpeakerVolume() const {
return rtc::nullopt; return absl::nullopt;
} }
void AAudioPlayer::OnErrorCallback(aaudio_result_t error) { void AAudioPlayer::OnErrorCallback(aaudio_result_t error) {

View File

@ -14,7 +14,7 @@
#include <aaudio/AAudio.h> #include <aaudio/AAudio.h>
#include <memory> #include <memory>
#include "api/optional.h" #include "absl/types/optional.h"
#include "modules/audio_device/audio_device_buffer.h" #include "modules/audio_device/audio_device_buffer.h"
#include "modules/audio_device/include/audio_device_defines.h" #include "modules/audio_device/include/audio_device_defines.h"
#include "rtc_base/messagehandler.h" #include "rtc_base/messagehandler.h"
@ -73,9 +73,9 @@ class AAudioPlayer final : public AudioOutput,
// Not implemented in AAudio. // Not implemented in AAudio.
bool SpeakerVolumeIsAvailable() override; bool SpeakerVolumeIsAvailable() override;
int SetSpeakerVolume(uint32_t volume) override; int SetSpeakerVolume(uint32_t volume) override;
rtc::Optional<uint32_t> SpeakerVolume() const override; absl::optional<uint32_t> SpeakerVolume() const override;
rtc::Optional<uint32_t> MaxSpeakerVolume() const override; absl::optional<uint32_t> MaxSpeakerVolume() const override;
rtc::Optional<uint32_t> MinSpeakerVolume() const override; absl::optional<uint32_t> MinSpeakerVolume() const override;
protected: protected:
// AAudioObserverInterface implementation. // AAudioObserverInterface implementation.

View File

@ -341,7 +341,7 @@ class AndroidAudioDeviceModule : public AudioDeviceModule {
RTC_LOG(INFO) << __FUNCTION__; RTC_LOG(INFO) << __FUNCTION__;
if (!initialized_) if (!initialized_)
return -1; return -1;
rtc::Optional<uint32_t> volume = output_->SpeakerVolume(); absl::optional<uint32_t> volume = output_->SpeakerVolume();
if (!volume) if (!volume)
return -1; return -1;
*output_volume = *volume; *output_volume = *volume;
@ -353,7 +353,7 @@ class AndroidAudioDeviceModule : public AudioDeviceModule {
RTC_LOG(INFO) << __FUNCTION__; RTC_LOG(INFO) << __FUNCTION__;
if (!initialized_) if (!initialized_)
return -1; return -1;
rtc::Optional<uint32_t> max_volume = output_->MaxSpeakerVolume(); absl::optional<uint32_t> max_volume = output_->MaxSpeakerVolume();
if (!max_volume) if (!max_volume)
return -1; return -1;
*output_max_volume = *max_volume; *output_max_volume = *max_volume;
@ -364,7 +364,7 @@ class AndroidAudioDeviceModule : public AudioDeviceModule {
RTC_LOG(INFO) << __FUNCTION__; RTC_LOG(INFO) << __FUNCTION__;
if (!initialized_) if (!initialized_)
return -1; return -1;
rtc::Optional<uint32_t> min_volume = output_->MinSpeakerVolume(); absl::optional<uint32_t> min_volume = output_->MinSpeakerVolume();
if (!min_volume) if (!min_volume)
return -1; return -1;
*output_min_volume = *min_volume; *output_min_volume = *min_volume;

View File

@ -13,7 +13,7 @@
#include <memory> #include <memory>
#include "api/optional.h" #include "absl/types/optional.h"
#include "modules/audio_device/audio_device_buffer.h" #include "modules/audio_device/audio_device_buffer.h"
#include "sdk/android/native_api/jni/scoped_java_ref.h" #include "sdk/android/native_api/jni/scoped_java_ref.h"
@ -58,9 +58,9 @@ class AudioOutput {
virtual bool Playing() const = 0; virtual bool Playing() const = 0;
virtual bool SpeakerVolumeIsAvailable() = 0; virtual bool SpeakerVolumeIsAvailable() = 0;
virtual int SetSpeakerVolume(uint32_t volume) = 0; virtual int SetSpeakerVolume(uint32_t volume) = 0;
virtual rtc::Optional<uint32_t> SpeakerVolume() const = 0; virtual absl::optional<uint32_t> SpeakerVolume() const = 0;
virtual rtc::Optional<uint32_t> MaxSpeakerVolume() const = 0; virtual absl::optional<uint32_t> MaxSpeakerVolume() const = 0;
virtual rtc::Optional<uint32_t> MinSpeakerVolume() const = 0; virtual absl::optional<uint32_t> MinSpeakerVolume() const = 0;
virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) = 0; virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) = 0;
}; };

View File

@ -144,17 +144,17 @@ int AudioTrackJni::SetSpeakerVolume(uint32_t volume) {
: -1; : -1;
} }
rtc::Optional<uint32_t> AudioTrackJni::MaxSpeakerVolume() const { absl::optional<uint32_t> AudioTrackJni::MaxSpeakerVolume() const {
RTC_DCHECK(thread_checker_.CalledOnValidThread()); RTC_DCHECK(thread_checker_.CalledOnValidThread());
return Java_WebRtcAudioTrack_getStreamMaxVolume(env_, j_audio_track_); return Java_WebRtcAudioTrack_getStreamMaxVolume(env_, j_audio_track_);
} }
rtc::Optional<uint32_t> AudioTrackJni::MinSpeakerVolume() const { absl::optional<uint32_t> AudioTrackJni::MinSpeakerVolume() const {
RTC_DCHECK(thread_checker_.CalledOnValidThread()); RTC_DCHECK(thread_checker_.CalledOnValidThread());
return 0; return 0;
} }
rtc::Optional<uint32_t> AudioTrackJni::SpeakerVolume() const { absl::optional<uint32_t> AudioTrackJni::SpeakerVolume() const {
RTC_DCHECK(thread_checker_.CalledOnValidThread()); RTC_DCHECK(thread_checker_.CalledOnValidThread());
const uint32_t volume = const uint32_t volume =
Java_WebRtcAudioTrack_getStreamVolume(env_, j_audio_track_); Java_WebRtcAudioTrack_getStreamVolume(env_, j_audio_track_);

View File

@ -14,7 +14,7 @@
#include <jni.h> #include <jni.h>
#include <memory> #include <memory>
#include "api/optional.h" #include "absl/types/optional.h"
#include "modules/audio_device/audio_device_buffer.h" #include "modules/audio_device/audio_device_buffer.h"
#include "modules/audio_device/include/audio_device_defines.h" #include "modules/audio_device/include/audio_device_defines.h"
#include "rtc_base/thread_checker.h" #include "rtc_base/thread_checker.h"
@ -62,9 +62,9 @@ class AudioTrackJni : public AudioOutput {
bool SpeakerVolumeIsAvailable() override; bool SpeakerVolumeIsAvailable() override;
int SetSpeakerVolume(uint32_t volume) override; int SetSpeakerVolume(uint32_t volume) override;
rtc::Optional<uint32_t> SpeakerVolume() const override; absl::optional<uint32_t> SpeakerVolume() const override;
rtc::Optional<uint32_t> MaxSpeakerVolume() const override; absl::optional<uint32_t> MaxSpeakerVolume() const override;
rtc::Optional<uint32_t> MinSpeakerVolume() const override; absl::optional<uint32_t> MinSpeakerVolume() const override;
void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override; void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override;

View File

@ -182,16 +182,16 @@ int OpenSLESPlayer::SetSpeakerVolume(uint32_t volume) {
return -1; return -1;
} }
rtc::Optional<uint32_t> OpenSLESPlayer::SpeakerVolume() const { absl::optional<uint32_t> OpenSLESPlayer::SpeakerVolume() const {
return rtc::nullopt; return absl::nullopt;
} }
rtc::Optional<uint32_t> OpenSLESPlayer::MaxSpeakerVolume() const { absl::optional<uint32_t> OpenSLESPlayer::MaxSpeakerVolume() const {
return rtc::nullopt; return absl::nullopt;
} }
rtc::Optional<uint32_t> OpenSLESPlayer::MinSpeakerVolume() const { absl::optional<uint32_t> OpenSLESPlayer::MinSpeakerVolume() const {
return rtc::nullopt; return absl::nullopt;
} }
void OpenSLESPlayer::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) { void OpenSLESPlayer::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {

View File

@ -16,7 +16,7 @@
#include <SLES/OpenSLES_AndroidConfiguration.h> #include <SLES/OpenSLES_AndroidConfiguration.h>
#include <memory> #include <memory>
#include "api/optional.h" #include "absl/types/optional.h"
#include "modules/audio_device/audio_device_buffer.h" #include "modules/audio_device/audio_device_buffer.h"
#include "modules/audio_device/fine_audio_buffer.h" #include "modules/audio_device/fine_audio_buffer.h"
#include "modules/audio_device/include/audio_device_defines.h" #include "modules/audio_device/include/audio_device_defines.h"
@ -75,9 +75,9 @@ class OpenSLESPlayer : public AudioOutput {
bool SpeakerVolumeIsAvailable() override; bool SpeakerVolumeIsAvailable() override;
int SetSpeakerVolume(uint32_t volume) override; int SetSpeakerVolume(uint32_t volume) override;
rtc::Optional<uint32_t> SpeakerVolume() const override; absl::optional<uint32_t> SpeakerVolume() const override;
rtc::Optional<uint32_t> MaxSpeakerVolume() const override; absl::optional<uint32_t> MaxSpeakerVolume() const override;
rtc::Optional<uint32_t> MinSpeakerVolume() const override; absl::optional<uint32_t> MinSpeakerVolume() const override;
void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override; void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override;

View File

@ -207,13 +207,13 @@ PeerConnectionInterface::TlsCertPolicy JavaToNativeTlsCertPolicy(
return PeerConnectionInterface::kTlsCertPolicySecure; return PeerConnectionInterface::kTlsCertPolicySecure;
} }
rtc::Optional<rtc::AdapterType> JavaToNativeNetworkPreference( absl::optional<rtc::AdapterType> JavaToNativeNetworkPreference(
JNIEnv* jni, JNIEnv* jni,
const JavaRef<jobject>& j_network_preference) { const JavaRef<jobject>& j_network_preference) {
std::string enum_name = GetJavaEnumName(jni, j_network_preference); std::string enum_name = GetJavaEnumName(jni, j_network_preference);
if (enum_name == "UNKNOWN") if (enum_name == "UNKNOWN")
return rtc::nullopt; return absl::nullopt;
if (enum_name == "ETHERNET") if (enum_name == "ETHERNET")
return rtc::ADAPTER_TYPE_ETHERNET; return rtc::ADAPTER_TYPE_ETHERNET;
@ -231,7 +231,7 @@ rtc::Optional<rtc::AdapterType> JavaToNativeNetworkPreference(
return rtc::ADAPTER_TYPE_LOOPBACK; return rtc::ADAPTER_TYPE_LOOPBACK;
RTC_CHECK(false) << "Unexpected NetworkPreference enum_name " << enum_name; RTC_CHECK(false) << "Unexpected NetworkPreference enum_name " << enum_name;
return rtc::nullopt; return absl::nullopt;
} }
} // namespace jni } // namespace jni

View File

@ -75,7 +75,7 @@ PeerConnectionInterface::TlsCertPolicy JavaToNativeTlsCertPolicy(
JNIEnv* jni, JNIEnv* jni,
const JavaRef<jobject>& j_ice_server_tls_cert_policy); const JavaRef<jobject>& j_ice_server_tls_cert_policy);
rtc::Optional<rtc::AdapterType> JavaToNativeNetworkPreference( absl::optional<rtc::AdapterType> JavaToNativeNetworkPreference(
JNIEnv* jni, JNIEnv* jni,
const JavaRef<jobject>& j_network_preference); const JavaRef<jobject>& j_network_preference);

View File

@ -428,7 +428,7 @@ static jlong JNI_PeerConnectionFactory_CreatePeerConnection(
if (key_type != rtc::KT_DEFAULT) { if (key_type != rtc::KT_DEFAULT) {
rtc::scoped_refptr<rtc::RTCCertificate> certificate = rtc::scoped_refptr<rtc::RTCCertificate> certificate =
rtc::RTCCertificateGenerator::GenerateCertificate( rtc::RTCCertificateGenerator::GenerateCertificate(
rtc::KeyParams(key_type), rtc::nullopt); rtc::KeyParams(key_type), absl::nullopt);
if (!certificate) { if (!certificate) {
RTC_LOG(LS_ERROR) << "Failed to generate certificate. KeyType: " RTC_LOG(LS_ERROR) << "Failed to generate certificate. KeyType: "
<< key_type; << key_type;

View File

@ -89,7 +89,7 @@ ScopedJavaLocalRef<jstring> JNI_RtpTransceiver_GetMid(
JNIEnv* jni, JNIEnv* jni,
const base::android::JavaParamRef<jclass>&, const base::android::JavaParamRef<jclass>&,
jlong j_rtp_transceiver_pointer) { jlong j_rtp_transceiver_pointer) {
rtc::Optional<std::string> mid = absl::optional<std::string> mid =
reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer) reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer)
->mid(); ->mid();
return NativeToJavaString(jni, mid); return NativeToJavaString(jni, mid);
@ -133,7 +133,7 @@ ScopedJavaLocalRef<jobject> JNI_RtpTransceiver_CurrentDirection(
JNIEnv* jni, JNIEnv* jni,
const base::android::JavaParamRef<jclass>&, const base::android::JavaParamRef<jclass>&,
jlong j_rtp_transceiver_pointer) { jlong j_rtp_transceiver_pointer) {
rtc::Optional<RtpTransceiverDirection> direction = absl::optional<RtpTransceiverDirection> direction =
reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer) reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer)
->current_direction(); ->current_direction();
return direction ? NativeToJavaRtpTransceiverDirection(jni, *direction) return direction ? NativeToJavaRtpTransceiverDirection(jni, *direction)

View File

@ -27,7 +27,7 @@ std::unique_ptr<SessionDescriptionInterface> JavaToNativeSessionDescription(
jni, Java_SessionDescription_getTypeInCanonicalForm(jni, j_sdp)); jni, Java_SessionDescription_getTypeInCanonicalForm(jni, j_sdp));
std::string std_description = std::string std_description =
JavaToStdString(jni, Java_SessionDescription_getDescription(jni, j_sdp)); JavaToStdString(jni, Java_SessionDescription_getDescription(jni, j_sdp));
rtc::Optional<SdpType> sdp_type_maybe = SdpTypeFromString(std_type); absl::optional<SdpType> sdp_type_maybe = SdpTypeFromString(std_type);
if (!sdp_type_maybe) { if (!sdp_type_maybe) {
RTC_LOG(LS_ERROR) << "Unexpected SDP type: " << std_type; RTC_LOG(LS_ERROR) << "Unexpected SDP type: " << std_type;
return nullptr; return nullptr;

View File

@ -30,9 +30,9 @@ namespace {
const int64_t kNumRtpTicksPerMillisec = 90000 / rtc::kNumMillisecsPerSec; const int64_t kNumRtpTicksPerMillisec = 90000 / rtc::kNumMillisecsPerSec;
template <typename Dst, typename Src> template <typename Dst, typename Src>
inline rtc::Optional<Dst> cast_optional(const rtc::Optional<Src>& value) { inline absl::optional<Dst> cast_optional(const absl::optional<Src>& value) {
return value ? rtc::Optional<Dst>(rtc::dchecked_cast<Dst, Src>(*value)) return value ? absl::optional<Dst>(rtc::dchecked_cast<Dst, Src>(*value))
: rtc::nullopt; : absl::nullopt;
} }
} // namespace } // namespace
@ -106,7 +106,7 @@ int32_t VideoDecoderWrapper::Decode(
frame_extra_info.timestamp_rtp = input_image._timeStamp; frame_extra_info.timestamp_rtp = input_image._timeStamp;
frame_extra_info.timestamp_ntp = input_image.ntp_time_ms_; frame_extra_info.timestamp_ntp = input_image.ntp_time_ms_;
frame_extra_info.qp = frame_extra_info.qp =
qp_parsing_enabled_ ? ParseQP(input_image) : rtc::nullopt; qp_parsing_enabled_ ? ParseQP(input_image) : absl::nullopt;
{ {
rtc::CritScope cs(&frame_extra_infos_lock_); rtc::CritScope cs(&frame_extra_infos_lock_);
frame_extra_infos_.push_back(frame_extra_info); frame_extra_infos_.push_back(frame_extra_info);
@ -183,10 +183,10 @@ void VideoDecoderWrapper::OnDecodedFrame(
JavaToNativeFrame(env, j_frame, frame_extra_info.timestamp_rtp); JavaToNativeFrame(env, j_frame, frame_extra_info.timestamp_rtp);
frame.set_ntp_time_ms(frame_extra_info.timestamp_ntp); frame.set_ntp_time_ms(frame_extra_info.timestamp_ntp);
rtc::Optional<int32_t> decoding_time_ms = absl::optional<int32_t> decoding_time_ms =
JavaToNativeOptionalInt(env, j_decode_time_ms); JavaToNativeOptionalInt(env, j_decode_time_ms);
rtc::Optional<uint8_t> decoder_qp = absl::optional<uint8_t> decoder_qp =
cast_optional<uint8_t, int32_t>(JavaToNativeOptionalInt(env, j_qp)); cast_optional<uint8_t, int32_t>(JavaToNativeOptionalInt(env, j_qp));
// If the decoder provides QP values itself, no need to parse the bitstream. // If the decoder provides QP values itself, no need to parse the bitstream.
// Enable QP parsing if decoder does not provide QP values itself. // Enable QP parsing if decoder does not provide QP values itself.
@ -226,13 +226,13 @@ int32_t VideoDecoderWrapper::HandleReturnCode(JNIEnv* jni,
return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
} }
rtc::Optional<uint8_t> VideoDecoderWrapper::ParseQP( absl::optional<uint8_t> VideoDecoderWrapper::ParseQP(
const EncodedImage& input_image) { const EncodedImage& input_image) {
if (input_image.qp_ != -1) { if (input_image.qp_ != -1) {
return input_image.qp_; return input_image.qp_;
} }
rtc::Optional<uint8_t> qp; absl::optional<uint8_t> qp;
switch (codec_settings_.codecType) { switch (codec_settings_.codecType) {
case kVideoCodecVP8: { case kVideoCodecVP8: {
int qp_int; int qp_int;

View File

@ -66,7 +66,7 @@ class VideoDecoderWrapper : public VideoDecoder {
uint32_t timestamp_rtp; uint32_t timestamp_rtp;
int64_t timestamp_ntp; int64_t timestamp_ntp;
rtc::Optional<uint8_t> qp; absl::optional<uint8_t> qp;
FrameExtraInfo(); FrameExtraInfo();
FrameExtraInfo(const FrameExtraInfo&); FrameExtraInfo(const FrameExtraInfo&);
@ -82,7 +82,7 @@ class VideoDecoderWrapper : public VideoDecoder {
const char* method_name) const char* method_name)
RTC_RUN_ON(decoder_thread_checker_); RTC_RUN_ON(decoder_thread_checker_);
rtc::Optional<uint8_t> ParseQP(const EncodedImage& input_image) absl::optional<uint8_t> ParseQP(const EncodedImage& input_image)
RTC_RUN_ON(decoder_thread_checker_); RTC_RUN_ON(decoder_thread_checker_);
const ScopedJavaGlobalRef<jobject> decoder_; const ScopedJavaGlobalRef<jobject> decoder_;

View File

@ -165,10 +165,10 @@ VideoEncoderWrapper::ScalingSettings VideoEncoderWrapper::GetScalingSettings()
if (!isOn) if (!isOn)
return ScalingSettings::kOff; return ScalingSettings::kOff;
rtc::Optional<int> low = JavaToNativeOptionalInt( absl::optional<int> low = JavaToNativeOptionalInt(
jni, jni,
Java_VideoEncoderWrapper_getScalingSettingsLow(jni, j_scaling_settings)); Java_VideoEncoderWrapper_getScalingSettingsLow(jni, j_scaling_settings));
rtc::Optional<int> high = JavaToNativeOptionalInt( absl::optional<int> high = JavaToNativeOptionalInt(
jni, jni,
Java_VideoEncoderWrapper_getScalingSettingsHigh(jni, j_scaling_settings)); Java_VideoEncoderWrapper_getScalingSettingsHigh(jni, j_scaling_settings));

View File

@ -171,8 +171,8 @@
// Generate non-default certificate. // Generate non-default certificate.
if (keyType != rtc::KT_DEFAULT) { if (keyType != rtc::KT_DEFAULT) {
rtc::scoped_refptr<rtc::RTCCertificate> certificate = rtc::scoped_refptr<rtc::RTCCertificate> certificate =
rtc::RTCCertificateGenerator::GenerateCertificate( rtc::RTCCertificateGenerator::GenerateCertificate(rtc::KeyParams(keyType),
rtc::KeyParams(keyType), rtc::Optional<uint64_t>()); absl::optional<uint64_t>());
if (!certificate) { if (!certificate) {
RTCLogError(@"Failed to generate certificate."); RTCLogError(@"Failed to generate certificate.");
return nullptr; return nullptr;
@ -184,14 +184,13 @@
nativeConfig->presume_writable_when_fully_relayed = nativeConfig->presume_writable_when_fully_relayed =
_shouldPresumeWritableWhenFullyRelayed ? true : false; _shouldPresumeWritableWhenFullyRelayed ? true : false;
if (_iceCheckMinInterval != nil) { if (_iceCheckMinInterval != nil) {
nativeConfig->ice_check_min_interval = nativeConfig->ice_check_min_interval = absl::optional<int>(_iceCheckMinInterval.intValue);
rtc::Optional<int>(_iceCheckMinInterval.intValue);
} }
if (_iceRegatherIntervalRange != nil) { if (_iceRegatherIntervalRange != nil) {
std::unique_ptr<rtc::IntervalRange> nativeIntervalRange( std::unique_ptr<rtc::IntervalRange> nativeIntervalRange(
_iceRegatherIntervalRange.nativeIntervalRange); _iceRegatherIntervalRange.nativeIntervalRange);
nativeConfig->ice_regather_interval_range = nativeConfig->ice_regather_interval_range =
rtc::Optional<rtc::IntervalRange>(*nativeIntervalRange); absl::optional<rtc::IntervalRange>(*nativeIntervalRange);
} }
nativeConfig->sdp_semantics = [[self class] nativeSdpSemanticsForSdpSemantics:_sdpSemantics]; nativeConfig->sdp_semantics = [[self class] nativeSdpSemanticsForSdpSemantics:_sdpSemantics];
if (_turnCustomizer) { if (_turnCustomizer) {

View File

@ -31,7 +31,7 @@
if (self = [super init]) { if (self = [super init]) {
self.hexString = hexString; self.hexString = hexString;
rtc::Optional<webrtc::H264::ProfileLevelId> profile_level_id = absl::optional<webrtc::H264::ProfileLevelId> profile_level_id =
webrtc::H264::ParseProfileLevelId([hexString cStringUsingEncoding:NSUTF8StringEncoding]); webrtc::H264::ParseProfileLevelId([hexString cStringUsingEncoding:NSUTF8StringEncoding]);
if (profile_level_id.has_value()) { if (profile_level_id.has_value()) {
self.profile = static_cast<RTCH264Profile>(profile_level_id->profile); self.profile = static_cast<RTCH264Profile>(profile_level_id->profile);
@ -46,7 +46,7 @@
self.profile = profile; self.profile = profile;
self.level = level; self.level = level;
rtc::Optional<std::string> hex_string = absl::optional<std::string> hex_string =
webrtc::H264::ProfileLevelIdToString(webrtc::H264::ProfileLevelId( webrtc::H264::ProfileLevelIdToString(webrtc::H264::ProfileLevelId(
static_cast<webrtc::H264::Profile>(profile), static_cast<webrtc::H264::Level>(level))); static_cast<webrtc::H264::Profile>(profile), static_cast<webrtc::H264::Level>(level)));
self.hexString = self.hexString =

View File

@ -470,13 +470,13 @@ void PeerConnectionDelegateAdapter::OnAddTrack(
maxBitrateBps:(nullable NSNumber *)maxBitrateBps { maxBitrateBps:(nullable NSNumber *)maxBitrateBps {
webrtc::PeerConnectionInterface::BitrateParameters params; webrtc::PeerConnectionInterface::BitrateParameters params;
if (minBitrateBps != nil) { if (minBitrateBps != nil) {
params.min_bitrate_bps = rtc::Optional<int>(minBitrateBps.intValue); params.min_bitrate_bps = absl::optional<int>(minBitrateBps.intValue);
} }
if (currentBitrateBps != nil) { if (currentBitrateBps != nil) {
params.current_bitrate_bps = rtc::Optional<int>(currentBitrateBps.intValue); params.current_bitrate_bps = absl::optional<int>(currentBitrateBps.intValue);
} }
if (maxBitrateBps != nil) { if (maxBitrateBps != nil) {
params.max_bitrate_bps = rtc::Optional<int>(maxBitrateBps.intValue); params.max_bitrate_bps = absl::optional<int>(maxBitrateBps.intValue);
} }
return _peerConnection->SetBitrate(params).ok(); return _peerConnection->SetBitrate(params).ok();
} }

View File

@ -93,10 +93,10 @@ const NSString * const kRTCH264CodecName = @(cricket::kH264CodecName);
RTC_NOTREACHED(); RTC_NOTREACHED();
} }
if (_clockRate != nil) { if (_clockRate != nil) {
parameters.clock_rate = rtc::Optional<int>(_clockRate.intValue); parameters.clock_rate = absl::optional<int>(_clockRate.intValue);
} }
if (_numChannels != nil) { if (_numChannels != nil) {
parameters.num_channels = rtc::Optional<int>(_numChannels.intValue); parameters.num_channels = absl::optional<int>(_numChannels.intValue);
} }
for (NSString *paramKey in _parameters.allKeys) { for (NSString *paramKey in _parameters.allKeys) {
std::string key = [NSString stdStringForString:paramKey]; std::string key = [NSString stdStringForString:paramKey];

View File

@ -44,13 +44,13 @@
webrtc::RtpEncodingParameters parameters; webrtc::RtpEncodingParameters parameters;
parameters.active = _isActive; parameters.active = _isActive;
if (_maxBitrateBps != nil) { if (_maxBitrateBps != nil) {
parameters.max_bitrate_bps = rtc::Optional<int>(_maxBitrateBps.intValue); parameters.max_bitrate_bps = absl::optional<int>(_maxBitrateBps.intValue);
} }
if (_minBitrateBps != nil) { if (_minBitrateBps != nil) {
parameters.min_bitrate_bps = rtc::Optional<int>(_minBitrateBps.intValue); parameters.min_bitrate_bps = absl::optional<int>(_minBitrateBps.intValue);
} }
if (_ssrc != nil) { if (_ssrc != nil) {
parameters.ssrc = rtc::Optional<uint32_t>(_ssrc.unsignedLongValue); parameters.ssrc = absl::optional<uint32_t>(_ssrc.unsignedLongValue);
} }
return parameters; return parameters;
} }

View File

@ -43,9 +43,9 @@ namespace {
using namespace webrtc::H264; using namespace webrtc::H264;
NSString *MaxSupportedLevelForProfile(Profile profile) { NSString *MaxSupportedLevelForProfile(Profile profile) {
const rtc::Optional<ProfileLevelId> profileLevelId = [UIDevice maxSupportedH264Profile]; const absl::optional<ProfileLevelId> profileLevelId = [UIDevice maxSupportedH264Profile];
if (profileLevelId && profileLevelId->profile >= profile) { if (profileLevelId && profileLevelId->profile >= profile) {
const rtc::Optional<std::string> profileString = const absl::optional<std::string> profileString =
ProfileLevelIdToString(ProfileLevelId(profile, profileLevelId->level)); ProfileLevelIdToString(ProfileLevelId(profile, profileLevelId->level));
if (profileString) { if (profileString) {
return [NSString stringForStdString:*profileString]; return [NSString stringForStdString:*profileString];

View File

@ -20,7 +20,7 @@
#import "RTCShader.h" #import "RTCShader.h"
#import "WebRTC/RTCLogging.h" #import "WebRTC/RTCLogging.h"
#include "api/optional.h" #include "absl/types/optional.h"
static const int kYTextureUnit = 0; static const int kYTextureUnit = 0;
static const int kUTextureUnit = 1; static const int kUTextureUnit = 1;
@ -73,7 +73,7 @@ static const char kNV12FragmentShaderSource[] =
GLuint _vertexBuffer; GLuint _vertexBuffer;
GLuint _vertexArray; GLuint _vertexArray;
// Store current rotation and only upload new vertex data when rotation changes. // Store current rotation and only upload new vertex data when rotation changes.
rtc::Optional<RTCVideoRotation> _currentRotation; absl::optional<RTCVideoRotation> _currentRotation;
GLuint _i420Program; GLuint _i420Program;
GLuint _nv12Program; GLuint _nv12Program;
@ -144,7 +144,7 @@ static const char kNV12FragmentShaderSource[] =
#endif #endif
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer); glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
if (!_currentRotation || rotation != *_currentRotation) { if (!_currentRotation || rotation != *_currentRotation) {
_currentRotation = rtc::Optional<RTCVideoRotation>(rotation); _currentRotation = absl::optional<RTCVideoRotation>(rotation);
RTCSetVertexData(*_currentRotation); RTCSetVertexData(*_currentRotation);
} }
return YES; return YES;

View File

@ -14,6 +14,6 @@
@interface UIDevice (H264Profile) @interface UIDevice (H264Profile)
+ (rtc::Optional<webrtc::H264::ProfileLevelId>)maxSupportedH264Profile; + (absl::optional<webrtc::H264::ProfileLevelId>)maxSupportedH264Profile;
@end @end

View File

@ -85,7 +85,7 @@ constexpr SupportedH264Profile kH264MaxSupportedProfiles[] = {
{RTCDeviceTypeIPadPro10Inch, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP762 {RTCDeviceTypeIPadPro10Inch, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP762
}; };
rtc::Optional<ProfileLevelId> FindMaxSupportedProfileForDevice(RTCDeviceType deviceType) { absl::optional<ProfileLevelId> FindMaxSupportedProfileForDevice(RTCDeviceType deviceType) {
const auto* result = std::find_if(std::begin(kH264MaxSupportedProfiles), const auto* result = std::find_if(std::begin(kH264MaxSupportedProfiles),
std::end(kH264MaxSupportedProfiles), std::end(kH264MaxSupportedProfiles),
[deviceType](const SupportedH264Profile& supportedProfile) { [deviceType](const SupportedH264Profile& supportedProfile) {
@ -94,14 +94,14 @@ rtc::Optional<ProfileLevelId> FindMaxSupportedProfileForDevice(RTCDeviceType dev
if (result != std::end(kH264MaxSupportedProfiles)) { if (result != std::end(kH264MaxSupportedProfiles)) {
return result->profile; return result->profile;
} }
return rtc::nullopt; return absl::nullopt;
} }
} // namespace } // namespace
@implementation UIDevice (H264Profile) @implementation UIDevice (H264Profile)
+ (rtc::Optional<webrtc::H264::ProfileLevelId>)maxSupportedH264Profile { + (absl::optional<webrtc::H264::ProfileLevelId>)maxSupportedH264Profile {
return FindMaxSupportedProfileForDevice([self deviceType]); return FindMaxSupportedProfileForDevice([self deviceType]);
} }

View File

@ -172,7 +172,7 @@ void compressionOutputCallback(void *encoder,
// returned. The user must initialize the encoder with a resolution and // returned. The user must initialize the encoder with a resolution and
// framerate conforming to the selected H264 level regardless. // framerate conforming to the selected H264 level regardless.
CFStringRef ExtractProfile(webrtc::SdpVideoFormat videoFormat) { CFStringRef ExtractProfile(webrtc::SdpVideoFormat videoFormat) {
const rtc::Optional<webrtc::H264::ProfileLevelId> profile_level_id = const absl::optional<webrtc::H264::ProfileLevelId> profile_level_id =
webrtc::H264::ParseSdpProfileLevelId(videoFormat.parameters); webrtc::H264::ParseSdpProfileLevelId(videoFormat.parameters);
RTC_DCHECK(profile_level_id); RTC_DCHECK(profile_level_id);
switch (profile_level_id->profile) { switch (profile_level_id->profile) {

View File

@ -36,7 +36,7 @@ class ObjCVideoTrackSource : public rtc::AdaptedVideoTrackSource {
// Indicates that the encoder should denoise video before encoding it. // Indicates that the encoder should denoise video before encoding it.
// If it is not set, the default configuration is used which is different // If it is not set, the default configuration is used which is different
// depending on video codec. // depending on video codec.
rtc::Optional<bool> needs_denoising() const override { return false; } absl::optional<bool> needs_denoising() const override { return false; }
SourceState state() const override { return SourceState::kLive; } SourceState state() const override { return SourceState::kLive; }

View File

@ -40,10 +40,10 @@ rtc_static_library("system_wrappers") {
":runtime_enabled_features_api", ":runtime_enabled_features_api",
"..:webrtc_common", "..:webrtc_common",
"../:typedefs", "../:typedefs",
"../api:optional",
"../modules:module_api_public", "../modules:module_api_public",
"../rtc_base:checks", "../rtc_base:checks",
"../rtc_base/synchronization:rw_lock_wrapper", "../rtc_base/synchronization:rw_lock_wrapper",
"//third_party/abseil-cpp/absl/types:optional",
] ]
if (is_posix || is_fuchsia) { if (is_posix || is_fuchsia) {

View File

@ -13,7 +13,7 @@
#include <list> #include <list>
#include "api/optional.h" #include "absl/types/optional.h"
#include "modules/include/module_common_types_public.h" #include "modules/include/module_common_types_public.h"
#include "rtc_base/numerics/moving_median_filter.h" #include "rtc_base/numerics/moving_median_filter.h"
#include "system_wrappers/include/ntp_time.h" #include "system_wrappers/include/ntp_time.h"
@ -72,7 +72,7 @@ class RtpToNtpEstimator {
bool Estimate(int64_t rtp_timestamp, int64_t* rtp_timestamp_ms) const; bool Estimate(int64_t rtp_timestamp, int64_t* rtp_timestamp_ms) const;
// Returns estimated rtp to ntp linear transform parameters. // Returns estimated rtp to ntp linear transform parameters.
const rtc::Optional<Parameters> params() const; const absl::optional<Parameters> params() const;
static const int kMaxInvalidSamples = 3; static const int kMaxInvalidSamples = 3;

View File

@ -193,9 +193,9 @@ bool RtpToNtpEstimator::Estimate(int64_t rtp_timestamp,
return true; return true;
} }
const rtc::Optional<RtpToNtpEstimator::Parameters> RtpToNtpEstimator::params() const absl::optional<RtpToNtpEstimator::Parameters> RtpToNtpEstimator::params()
const { const {
rtc::Optional<Parameters> res; absl::optional<Parameters> res;
if (params_calculated_) { if (params_calculated_) {
res.emplace(smoothing_filter_.GetFilteredValue()); res.emplace(smoothing_filter_.GetFilteredValue());
} }