Replace rtc::Optional with absl::optional
This is a no-op change because rtc::Optional is an alias to absl::optional
This CL generated by running script passing top level directories except rtc_base and api
find $@ -type f \( -name \*.h -o -name \*.cc -o -name \*.mm \) \
-exec sed -i 's|rtc::Optional|absl::optional|g' {} \+ \
-exec sed -i 's|rtc::nullopt|absl::nullopt|g' {} \+ \
-exec sed -i 's|#include "api/optional.h"|#include "absl/types/optional.h"|' {} \+
find $@ -type f -name BUILD.gn \
-exec sed -r -i 's|"[\./api]*:optional"|"//third_party/abseil-cpp/absl/types:optional"|' {} \+;
git cl format
Bug: webrtc:9078
Change-Id: I9465c172e65ba6e6ed4e4fdc35b0b265038d6f71
Reviewed-on: https://webrtc-review.googlesource.com/84584
Reviewed-by: Karl Wiberg <kwiberg@webrtc.org>
Commit-Queue: Danil Chapovalov <danilchap@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23697}
diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn
index 1e3bd62..37764b4 100644
--- a/sdk/BUILD.gn
+++ b/sdk/BUILD.gn
@@ -304,12 +304,12 @@
":videoframebuffer_objc",
":videosource_objc",
"../api:libjingle_peerconnection_api",
- "../api:optional",
"../api/video:video_frame",
"../common_video",
"../media:rtc_media_base",
"../rtc_base:checks",
"../rtc_base:rtc_base",
+ "//third_party/abseil-cpp/absl/types:optional",
]
configs += [
diff --git a/sdk/android/BUILD.gn b/sdk/android/BUILD.gn
index 031a20f..498c7dc 100644
--- a/sdk/android/BUILD.gn
+++ b/sdk/android/BUILD.gn
@@ -87,11 +87,11 @@
":internal_jni",
":native_api_jni",
"../../api:libjingle_peerconnection_api",
- "../../api:optional",
"../../rtc_base:checks",
"../../rtc_base:rtc_base",
"../../rtc_base:rtc_base_approved",
"../../system_wrappers:metrics_api",
+ "//third_party/abseil-cpp/absl/types:optional",
]
}
@@ -145,11 +145,11 @@
":base_jni",
":generated_audio_device_module_base_jni",
":native_api_jni",
- "../../api:optional",
"../../modules/audio_device:audio_device_buffer",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
"../../system_wrappers:metrics_api",
+ "//third_party/abseil-cpp/absl/types:optional",
]
}
@@ -170,13 +170,13 @@
":audio_device_module_base",
":base_jni",
"../../api:array_view",
- "../../api:optional",
"../../modules/audio_device:audio_device",
"../../modules/audio_device:audio_device_buffer",
"../../rtc_base:checks",
"../../rtc_base:rtc_base",
"../../rtc_base:rtc_base_approved",
"../../system_wrappers",
+ "//third_party/abseil-cpp/absl/types:optional",
]
}
}
@@ -196,11 +196,11 @@
":audio_device_module_base",
":base_jni",
"../../api:array_view",
- "../../api:optional",
"../../modules/audio_device:audio_device",
"../../modules/audio_device:audio_device_buffer",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
+ "//third_party/abseil-cpp/absl/types:optional",
]
}
@@ -217,12 +217,12 @@
":audio_device_module_base",
":base_jni",
":generated_java_audio_device_module_native_jni",
- "../../api:optional",
"../../modules/audio_device:audio_device",
"../../modules/audio_device:audio_device_buffer",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
"../../system_wrappers:metrics_api",
+ "//third_party/abseil-cpp/absl/types:optional",
]
}
@@ -1148,9 +1148,9 @@
":generated_external_classes_jni",
":generated_native_api_jni",
":internal_jni",
- "//api:optional",
"//rtc_base:checks",
"//rtc_base:rtc_base_approved",
+ "//third_party/abseil-cpp/absl/types:optional",
]
}
diff --git a/sdk/android/native_api/jni/java_types.cc b/sdk/android/native_api/jni/java_types.cc
index fbc35aa..df0a229 100644
--- a/sdk/android/native_api/jni/java_types.cc
+++ b/sdk/android/native_api/jni/java_types.cc
@@ -125,18 +125,18 @@
return JNI_Long::Java_Long_longValue(env, j_long);
}
-rtc::Optional<bool> JavaToNativeOptionalBool(JNIEnv* jni,
- const JavaRef<jobject>& boolean) {
+absl::optional<bool> JavaToNativeOptionalBool(JNIEnv* jni,
+ const JavaRef<jobject>& boolean) {
if (IsNull(jni, boolean))
- return rtc::nullopt;
+ return absl::nullopt;
return JNI_Boolean::Java_Boolean_booleanValue(jni, boolean);
}
-rtc::Optional<int32_t> JavaToNativeOptionalInt(
+absl::optional<int32_t> JavaToNativeOptionalInt(
JNIEnv* jni,
const JavaRef<jobject>& integer) {
if (IsNull(jni, integer))
- return rtc::nullopt;
+ return absl::nullopt;
return JNI_Integer::Java_Integer_intValue(jni, integer);
}
@@ -196,13 +196,13 @@
ScopedJavaLocalRef<jobject> NativeToJavaInteger(
JNIEnv* jni,
- const rtc::Optional<int32_t>& optional_int) {
+ const absl::optional<int32_t>& optional_int) {
return optional_int ? NativeToJavaInteger(jni, *optional_int) : nullptr;
}
ScopedJavaLocalRef<jstring> NativeToJavaString(
JNIEnv* jni,
- const rtc::Optional<std::string>& str) {
+ const absl::optional<std::string>& str) {
return str ? NativeToJavaString(jni, *str) : nullptr;
}
diff --git a/sdk/android/native_api/jni/java_types.h b/sdk/android/native_api/jni/java_types.h
index a84b7d9..3b85704 100644
--- a/sdk/android/native_api/jni/java_types.h
+++ b/sdk/android/native_api/jni/java_types.h
@@ -22,7 +22,7 @@
#include <string>
#include <vector>
-#include "api/optional.h"
+#include "absl/types/optional.h"
#include "rtc_base/checks.h"
#include "rtc_base/thread_checker.h"
#include "sdk/android/native_api/jni/scoped_java_ref.h"
@@ -126,10 +126,11 @@
int64_t JavaToNativeLong(JNIEnv* env, const JavaRef<jobject>& j_long);
-rtc::Optional<bool> JavaToNativeOptionalBool(JNIEnv* jni,
- const JavaRef<jobject>& boolean);
-rtc::Optional<int32_t> JavaToNativeOptionalInt(JNIEnv* jni,
- const JavaRef<jobject>& integer);
+absl::optional<bool> JavaToNativeOptionalBool(JNIEnv* jni,
+ const JavaRef<jobject>& boolean);
+absl::optional<int32_t> JavaToNativeOptionalInt(
+ JNIEnv* jni,
+ const JavaRef<jobject>& integer);
// Given a (UTF-16) jstring return a new UTF-8 native string.
std::string JavaToNativeString(JNIEnv* jni, const JavaRef<jstring>& j_string);
@@ -196,10 +197,10 @@
ScopedJavaLocalRef<jobject> NativeToJavaInteger(
JNIEnv* jni,
- const rtc::Optional<int32_t>& optional_int);
+ const absl::optional<int32_t>& optional_int);
ScopedJavaLocalRef<jstring> NativeToJavaString(
JNIEnv* jni,
- const rtc::Optional<std::string>& str);
+ const absl::optional<std::string>& str);
// Helper function for converting std::vector<T> into a Java array.
template <typename T, typename Convert>
diff --git a/sdk/android/native_api/video/videosource.cc b/sdk/android/native_api/video/videosource.cc
index 9470feb..4c302da 100644
--- a/sdk/android/native_api/video/videosource.cc
+++ b/sdk/android/native_api/video/videosource.cc
@@ -74,7 +74,7 @@
return android_video_track_source_->is_screencast();
}
- rtc::Optional<bool> needs_denoising() const override {
+ absl::optional<bool> needs_denoising() const override {
return android_video_track_source_->needs_denoising();
}
diff --git a/sdk/android/src/jni/androidmediadecoder.cc b/sdk/android/src/jni/androidmediadecoder.cc
index a5f47da..1ca9e06 100644
--- a/sdk/android/src/jni/androidmediadecoder.cc
+++ b/sdk/android/src/jni/androidmediadecoder.cc
@@ -124,7 +124,7 @@
int current_delay_time_ms_; // Overall delay time in the current second.
int32_t max_pending_frames_; // Maximum number of pending input frames.
H264BitstreamParser h264_bitstream_parser_;
- std::deque<rtc::Optional<uint8_t>> pending_frame_qps_;
+ std::deque<absl::optional<uint8_t>> pending_frame_qps_;
// State that is constant for the lifetime of this object once the ctor
// returns.
@@ -506,7 +506,7 @@
// Save input image timestamps for later output.
frames_received_++;
current_bytes_ += inputImage._length;
- rtc::Optional<uint8_t> qp;
+ absl::optional<uint8_t> qp;
if (codecType_ == kVideoCodecVP8) {
int qp_int;
if (vp8::GetQp(inputImage._buffer, inputImage._length, &qp_int)) {
@@ -743,7 +743,7 @@
decoded_frame.set_timestamp(output_timestamps_ms);
decoded_frame.set_ntp_time_ms(output_ntp_timestamps_ms);
- rtc::Optional<uint8_t> qp = pending_frame_qps_.front();
+ absl::optional<uint8_t> qp = pending_frame_qps_.front();
pending_frame_qps_.pop_front();
callback_->Decoded(decoded_frame, decode_time_ms, qp);
}
diff --git a/sdk/android/src/jni/androidmediaencoder.cc b/sdk/android/src/jni/androidmediaencoder.cc
index de2743b..9edbf2d 100644
--- a/sdk/android/src/jni/androidmediaencoder.cc
+++ b/sdk/android/src/jni/androidmediaencoder.cc
@@ -347,7 +347,7 @@
// Check allowed H.264 profile
profile_ = H264::Profile::kProfileBaseline;
if (codec_type == kVideoCodecH264) {
- const rtc::Optional<H264::ProfileLevelId> profile_level_id =
+ const absl::optional<H264::ProfileLevelId> profile_level_id =
H264::ParseSdpProfileLevelId(codec_.params);
RTC_DCHECK(profile_level_id);
profile_ = profile_level_id->profile;
diff --git a/sdk/android/src/jni/androidvideotracksource.cc b/sdk/android/src/jni/androidvideotracksource.cc
index 1d75a4f..41d4278 100644
--- a/sdk/android/src/jni/androidvideotracksource.cc
+++ b/sdk/android/src/jni/androidvideotracksource.cc
@@ -38,7 +38,7 @@
return is_screencast_;
}
-rtc::Optional<bool> AndroidVideoTrackSource::needs_denoising() const {
+absl::optional<bool> AndroidVideoTrackSource::needs_denoising() const {
return false;
}
diff --git a/sdk/android/src/jni/androidvideotracksource.h b/sdk/android/src/jni/androidvideotracksource.h
index 3dbcb2a..3c4d1ef 100644
--- a/sdk/android/src/jni/androidvideotracksource.h
+++ b/sdk/android/src/jni/androidvideotracksource.h
@@ -37,7 +37,7 @@
// Indicates that the encoder should denoise video before encoding it.
// If it is not set, the default configuration is used which is different
// depending on video codec.
- rtc::Optional<bool> needs_denoising() const override;
+ absl::optional<bool> needs_denoising() const override;
// Called by the native capture observer
void SetState(SourceState state);
diff --git a/sdk/android/src/jni/audio_device/aaudio_player.cc b/sdk/android/src/jni/audio_device/aaudio_player.cc
index e6bcddd..f32c265 100644
--- a/sdk/android/src/jni/audio_device/aaudio_player.cc
+++ b/sdk/android/src/jni/audio_device/aaudio_player.cc
@@ -135,16 +135,16 @@
return -1;
}
-rtc::Optional<uint32_t> AAudioPlayer::SpeakerVolume() const {
- return rtc::nullopt;
+absl::optional<uint32_t> AAudioPlayer::SpeakerVolume() const {
+ return absl::nullopt;
}
-rtc::Optional<uint32_t> AAudioPlayer::MaxSpeakerVolume() const {
- return rtc::nullopt;
+absl::optional<uint32_t> AAudioPlayer::MaxSpeakerVolume() const {
+ return absl::nullopt;
}
-rtc::Optional<uint32_t> AAudioPlayer::MinSpeakerVolume() const {
- return rtc::nullopt;
+absl::optional<uint32_t> AAudioPlayer::MinSpeakerVolume() const {
+ return absl::nullopt;
}
void AAudioPlayer::OnErrorCallback(aaudio_result_t error) {
diff --git a/sdk/android/src/jni/audio_device/aaudio_player.h b/sdk/android/src/jni/audio_device/aaudio_player.h
index 92d1800..b43b5b3 100644
--- a/sdk/android/src/jni/audio_device/aaudio_player.h
+++ b/sdk/android/src/jni/audio_device/aaudio_player.h
@@ -14,7 +14,7 @@
#include <aaudio/AAudio.h>
#include <memory>
-#include "api/optional.h"
+#include "absl/types/optional.h"
#include "modules/audio_device/audio_device_buffer.h"
#include "modules/audio_device/include/audio_device_defines.h"
#include "rtc_base/messagehandler.h"
@@ -73,9 +73,9 @@
// Not implemented in AAudio.
bool SpeakerVolumeIsAvailable() override;
int SetSpeakerVolume(uint32_t volume) override;
- rtc::Optional<uint32_t> SpeakerVolume() const override;
- rtc::Optional<uint32_t> MaxSpeakerVolume() const override;
- rtc::Optional<uint32_t> MinSpeakerVolume() const override;
+ absl::optional<uint32_t> SpeakerVolume() const override;
+ absl::optional<uint32_t> MaxSpeakerVolume() const override;
+ absl::optional<uint32_t> MinSpeakerVolume() const override;
protected:
// AAudioObserverInterface implementation.
diff --git a/sdk/android/src/jni/audio_device/audio_device_module.cc b/sdk/android/src/jni/audio_device/audio_device_module.cc
index 0b18fe4..196c655 100644
--- a/sdk/android/src/jni/audio_device/audio_device_module.cc
+++ b/sdk/android/src/jni/audio_device/audio_device_module.cc
@@ -341,7 +341,7 @@
RTC_LOG(INFO) << __FUNCTION__;
if (!initialized_)
return -1;
- rtc::Optional<uint32_t> volume = output_->SpeakerVolume();
+ absl::optional<uint32_t> volume = output_->SpeakerVolume();
if (!volume)
return -1;
*output_volume = *volume;
@@ -353,7 +353,7 @@
RTC_LOG(INFO) << __FUNCTION__;
if (!initialized_)
return -1;
- rtc::Optional<uint32_t> max_volume = output_->MaxSpeakerVolume();
+ absl::optional<uint32_t> max_volume = output_->MaxSpeakerVolume();
if (!max_volume)
return -1;
*output_max_volume = *max_volume;
@@ -364,7 +364,7 @@
RTC_LOG(INFO) << __FUNCTION__;
if (!initialized_)
return -1;
- rtc::Optional<uint32_t> min_volume = output_->MinSpeakerVolume();
+ absl::optional<uint32_t> min_volume = output_->MinSpeakerVolume();
if (!min_volume)
return -1;
*output_min_volume = *min_volume;
diff --git a/sdk/android/src/jni/audio_device/audio_device_module.h b/sdk/android/src/jni/audio_device/audio_device_module.h
index c8fdfc3..cddd3e0 100644
--- a/sdk/android/src/jni/audio_device/audio_device_module.h
+++ b/sdk/android/src/jni/audio_device/audio_device_module.h
@@ -13,7 +13,7 @@
#include <memory>
-#include "api/optional.h"
+#include "absl/types/optional.h"
#include "modules/audio_device/audio_device_buffer.h"
#include "sdk/android/native_api/jni/scoped_java_ref.h"
@@ -58,9 +58,9 @@
virtual bool Playing() const = 0;
virtual bool SpeakerVolumeIsAvailable() = 0;
virtual int SetSpeakerVolume(uint32_t volume) = 0;
- virtual rtc::Optional<uint32_t> SpeakerVolume() const = 0;
- virtual rtc::Optional<uint32_t> MaxSpeakerVolume() const = 0;
- virtual rtc::Optional<uint32_t> MinSpeakerVolume() const = 0;
+ virtual absl::optional<uint32_t> SpeakerVolume() const = 0;
+ virtual absl::optional<uint32_t> MaxSpeakerVolume() const = 0;
+ virtual absl::optional<uint32_t> MinSpeakerVolume() const = 0;
virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) = 0;
};
diff --git a/sdk/android/src/jni/audio_device/audio_track_jni.cc b/sdk/android/src/jni/audio_device/audio_track_jni.cc
index 1b49de6..03959d6 100644
--- a/sdk/android/src/jni/audio_device/audio_track_jni.cc
+++ b/sdk/android/src/jni/audio_device/audio_track_jni.cc
@@ -144,17 +144,17 @@
: -1;
}
-rtc::Optional<uint32_t> AudioTrackJni::MaxSpeakerVolume() const {
+absl::optional<uint32_t> AudioTrackJni::MaxSpeakerVolume() const {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
return Java_WebRtcAudioTrack_getStreamMaxVolume(env_, j_audio_track_);
}
-rtc::Optional<uint32_t> AudioTrackJni::MinSpeakerVolume() const {
+absl::optional<uint32_t> AudioTrackJni::MinSpeakerVolume() const {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
return 0;
}
-rtc::Optional<uint32_t> AudioTrackJni::SpeakerVolume() const {
+absl::optional<uint32_t> AudioTrackJni::SpeakerVolume() const {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
const uint32_t volume =
Java_WebRtcAudioTrack_getStreamVolume(env_, j_audio_track_);
diff --git a/sdk/android/src/jni/audio_device/audio_track_jni.h b/sdk/android/src/jni/audio_device/audio_track_jni.h
index 1225caf..25c6b6f 100644
--- a/sdk/android/src/jni/audio_device/audio_track_jni.h
+++ b/sdk/android/src/jni/audio_device/audio_track_jni.h
@@ -14,7 +14,7 @@
#include <jni.h>
#include <memory>
-#include "api/optional.h"
+#include "absl/types/optional.h"
#include "modules/audio_device/audio_device_buffer.h"
#include "modules/audio_device/include/audio_device_defines.h"
#include "rtc_base/thread_checker.h"
@@ -62,9 +62,9 @@
bool SpeakerVolumeIsAvailable() override;
int SetSpeakerVolume(uint32_t volume) override;
- rtc::Optional<uint32_t> SpeakerVolume() const override;
- rtc::Optional<uint32_t> MaxSpeakerVolume() const override;
- rtc::Optional<uint32_t> MinSpeakerVolume() const override;
+ absl::optional<uint32_t> SpeakerVolume() const override;
+ absl::optional<uint32_t> MaxSpeakerVolume() const override;
+ absl::optional<uint32_t> MinSpeakerVolume() const override;
void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override;
diff --git a/sdk/android/src/jni/audio_device/opensles_player.cc b/sdk/android/src/jni/audio_device/opensles_player.cc
index f5f56bf..3e831a9 100644
--- a/sdk/android/src/jni/audio_device/opensles_player.cc
+++ b/sdk/android/src/jni/audio_device/opensles_player.cc
@@ -182,16 +182,16 @@
return -1;
}
-rtc::Optional<uint32_t> OpenSLESPlayer::SpeakerVolume() const {
- return rtc::nullopt;
+absl::optional<uint32_t> OpenSLESPlayer::SpeakerVolume() const {
+ return absl::nullopt;
}
-rtc::Optional<uint32_t> OpenSLESPlayer::MaxSpeakerVolume() const {
- return rtc::nullopt;
+absl::optional<uint32_t> OpenSLESPlayer::MaxSpeakerVolume() const {
+ return absl::nullopt;
}
-rtc::Optional<uint32_t> OpenSLESPlayer::MinSpeakerVolume() const {
- return rtc::nullopt;
+absl::optional<uint32_t> OpenSLESPlayer::MinSpeakerVolume() const {
+ return absl::nullopt;
}
void OpenSLESPlayer::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
diff --git a/sdk/android/src/jni/audio_device/opensles_player.h b/sdk/android/src/jni/audio_device/opensles_player.h
index d5f4a68..d8befe5 100644
--- a/sdk/android/src/jni/audio_device/opensles_player.h
+++ b/sdk/android/src/jni/audio_device/opensles_player.h
@@ -16,7 +16,7 @@
#include <SLES/OpenSLES_AndroidConfiguration.h>
#include <memory>
-#include "api/optional.h"
+#include "absl/types/optional.h"
#include "modules/audio_device/audio_device_buffer.h"
#include "modules/audio_device/fine_audio_buffer.h"
#include "modules/audio_device/include/audio_device_defines.h"
@@ -75,9 +75,9 @@
bool SpeakerVolumeIsAvailable() override;
int SetSpeakerVolume(uint32_t volume) override;
- rtc::Optional<uint32_t> SpeakerVolume() const override;
- rtc::Optional<uint32_t> MaxSpeakerVolume() const override;
- rtc::Optional<uint32_t> MinSpeakerVolume() const override;
+ absl::optional<uint32_t> SpeakerVolume() const override;
+ absl::optional<uint32_t> MaxSpeakerVolume() const override;
+ absl::optional<uint32_t> MinSpeakerVolume() const override;
void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override;
diff --git a/sdk/android/src/jni/pc/icecandidate.cc b/sdk/android/src/jni/pc/icecandidate.cc
index 84e0f6e..b9d66a4 100644
--- a/sdk/android/src/jni/pc/icecandidate.cc
+++ b/sdk/android/src/jni/pc/icecandidate.cc
@@ -207,13 +207,13 @@
return PeerConnectionInterface::kTlsCertPolicySecure;
}
-rtc::Optional<rtc::AdapterType> JavaToNativeNetworkPreference(
+absl::optional<rtc::AdapterType> JavaToNativeNetworkPreference(
JNIEnv* jni,
const JavaRef<jobject>& j_network_preference) {
std::string enum_name = GetJavaEnumName(jni, j_network_preference);
if (enum_name == "UNKNOWN")
- return rtc::nullopt;
+ return absl::nullopt;
if (enum_name == "ETHERNET")
return rtc::ADAPTER_TYPE_ETHERNET;
@@ -231,7 +231,7 @@
return rtc::ADAPTER_TYPE_LOOPBACK;
RTC_CHECK(false) << "Unexpected NetworkPreference enum_name " << enum_name;
- return rtc::nullopt;
+ return absl::nullopt;
}
} // namespace jni
diff --git a/sdk/android/src/jni/pc/icecandidate.h b/sdk/android/src/jni/pc/icecandidate.h
index be4d27c..662b649 100644
--- a/sdk/android/src/jni/pc/icecandidate.h
+++ b/sdk/android/src/jni/pc/icecandidate.h
@@ -75,7 +75,7 @@
JNIEnv* jni,
const JavaRef<jobject>& j_ice_server_tls_cert_policy);
-rtc::Optional<rtc::AdapterType> JavaToNativeNetworkPreference(
+absl::optional<rtc::AdapterType> JavaToNativeNetworkPreference(
JNIEnv* jni,
const JavaRef<jobject>& j_network_preference);
diff --git a/sdk/android/src/jni/pc/peerconnectionfactory.cc b/sdk/android/src/jni/pc/peerconnectionfactory.cc
index ab5bf55..5730c20 100644
--- a/sdk/android/src/jni/pc/peerconnectionfactory.cc
+++ b/sdk/android/src/jni/pc/peerconnectionfactory.cc
@@ -428,7 +428,7 @@
if (key_type != rtc::KT_DEFAULT) {
rtc::scoped_refptr<rtc::RTCCertificate> certificate =
rtc::RTCCertificateGenerator::GenerateCertificate(
- rtc::KeyParams(key_type), rtc::nullopt);
+ rtc::KeyParams(key_type), absl::nullopt);
if (!certificate) {
RTC_LOG(LS_ERROR) << "Failed to generate certificate. KeyType: "
<< key_type;
diff --git a/sdk/android/src/jni/pc/rtptransceiver.cc b/sdk/android/src/jni/pc/rtptransceiver.cc
index 0a115c2..fa20d80 100644
--- a/sdk/android/src/jni/pc/rtptransceiver.cc
+++ b/sdk/android/src/jni/pc/rtptransceiver.cc
@@ -89,7 +89,7 @@
JNIEnv* jni,
const base::android::JavaParamRef<jclass>&,
jlong j_rtp_transceiver_pointer) {
- rtc::Optional<std::string> mid =
+ absl::optional<std::string> mid =
reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer)
->mid();
return NativeToJavaString(jni, mid);
@@ -133,7 +133,7 @@
JNIEnv* jni,
const base::android::JavaParamRef<jclass>&,
jlong j_rtp_transceiver_pointer) {
- rtc::Optional<RtpTransceiverDirection> direction =
+ absl::optional<RtpTransceiverDirection> direction =
reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer)
->current_direction();
return direction ? NativeToJavaRtpTransceiverDirection(jni, *direction)
diff --git a/sdk/android/src/jni/pc/sessiondescription.cc b/sdk/android/src/jni/pc/sessiondescription.cc
index 6bdbfa1..bd3806d 100644
--- a/sdk/android/src/jni/pc/sessiondescription.cc
+++ b/sdk/android/src/jni/pc/sessiondescription.cc
@@ -27,7 +27,7 @@
jni, Java_SessionDescription_getTypeInCanonicalForm(jni, j_sdp));
std::string std_description =
JavaToStdString(jni, Java_SessionDescription_getDescription(jni, j_sdp));
- rtc::Optional<SdpType> sdp_type_maybe = SdpTypeFromString(std_type);
+ absl::optional<SdpType> sdp_type_maybe = SdpTypeFromString(std_type);
if (!sdp_type_maybe) {
RTC_LOG(LS_ERROR) << "Unexpected SDP type: " << std_type;
return nullptr;
diff --git a/sdk/android/src/jni/videodecoderwrapper.cc b/sdk/android/src/jni/videodecoderwrapper.cc
index a7aee04..5fbd72f 100644
--- a/sdk/android/src/jni/videodecoderwrapper.cc
+++ b/sdk/android/src/jni/videodecoderwrapper.cc
@@ -30,9 +30,9 @@
const int64_t kNumRtpTicksPerMillisec = 90000 / rtc::kNumMillisecsPerSec;
template <typename Dst, typename Src>
-inline rtc::Optional<Dst> cast_optional(const rtc::Optional<Src>& value) {
- return value ? rtc::Optional<Dst>(rtc::dchecked_cast<Dst, Src>(*value))
- : rtc::nullopt;
+inline absl::optional<Dst> cast_optional(const absl::optional<Src>& value) {
+ return value ? absl::optional<Dst>(rtc::dchecked_cast<Dst, Src>(*value))
+ : absl::nullopt;
}
} // namespace
@@ -106,7 +106,7 @@
frame_extra_info.timestamp_rtp = input_image._timeStamp;
frame_extra_info.timestamp_ntp = input_image.ntp_time_ms_;
frame_extra_info.qp =
- qp_parsing_enabled_ ? ParseQP(input_image) : rtc::nullopt;
+ qp_parsing_enabled_ ? ParseQP(input_image) : absl::nullopt;
{
rtc::CritScope cs(&frame_extra_infos_lock_);
frame_extra_infos_.push_back(frame_extra_info);
@@ -183,10 +183,10 @@
JavaToNativeFrame(env, j_frame, frame_extra_info.timestamp_rtp);
frame.set_ntp_time_ms(frame_extra_info.timestamp_ntp);
- rtc::Optional<int32_t> decoding_time_ms =
+ absl::optional<int32_t> decoding_time_ms =
JavaToNativeOptionalInt(env, j_decode_time_ms);
- rtc::Optional<uint8_t> decoder_qp =
+ absl::optional<uint8_t> decoder_qp =
cast_optional<uint8_t, int32_t>(JavaToNativeOptionalInt(env, j_qp));
// If the decoder provides QP values itself, no need to parse the bitstream.
// Enable QP parsing if decoder does not provide QP values itself.
@@ -226,13 +226,13 @@
return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
}
-rtc::Optional<uint8_t> VideoDecoderWrapper::ParseQP(
+absl::optional<uint8_t> VideoDecoderWrapper::ParseQP(
const EncodedImage& input_image) {
if (input_image.qp_ != -1) {
return input_image.qp_;
}
- rtc::Optional<uint8_t> qp;
+ absl::optional<uint8_t> qp;
switch (codec_settings_.codecType) {
case kVideoCodecVP8: {
int qp_int;
diff --git a/sdk/android/src/jni/videodecoderwrapper.h b/sdk/android/src/jni/videodecoderwrapper.h
index b56a3a7..c719aa4 100644
--- a/sdk/android/src/jni/videodecoderwrapper.h
+++ b/sdk/android/src/jni/videodecoderwrapper.h
@@ -66,7 +66,7 @@
uint32_t timestamp_rtp;
int64_t timestamp_ntp;
- rtc::Optional<uint8_t> qp;
+ absl::optional<uint8_t> qp;
FrameExtraInfo();
FrameExtraInfo(const FrameExtraInfo&);
@@ -82,7 +82,7 @@
const char* method_name)
RTC_RUN_ON(decoder_thread_checker_);
- rtc::Optional<uint8_t> ParseQP(const EncodedImage& input_image)
+ absl::optional<uint8_t> ParseQP(const EncodedImage& input_image)
RTC_RUN_ON(decoder_thread_checker_);
const ScopedJavaGlobalRef<jobject> decoder_;
diff --git a/sdk/android/src/jni/videoencoderwrapper.cc b/sdk/android/src/jni/videoencoderwrapper.cc
index ce48f56..94719ea 100644
--- a/sdk/android/src/jni/videoencoderwrapper.cc
+++ b/sdk/android/src/jni/videoencoderwrapper.cc
@@ -165,10 +165,10 @@
if (!isOn)
return ScalingSettings::kOff;
- rtc::Optional<int> low = JavaToNativeOptionalInt(
+ absl::optional<int> low = JavaToNativeOptionalInt(
jni,
Java_VideoEncoderWrapper_getScalingSettingsLow(jni, j_scaling_settings));
- rtc::Optional<int> high = JavaToNativeOptionalInt(
+ absl::optional<int> high = JavaToNativeOptionalInt(
jni,
Java_VideoEncoderWrapper_getScalingSettingsHigh(jni, j_scaling_settings));
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration.mm
index 4bd0450..a357085 100644
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration.mm
+++ b/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration.mm
@@ -171,8 +171,8 @@
// Generate non-default certificate.
if (keyType != rtc::KT_DEFAULT) {
rtc::scoped_refptr<rtc::RTCCertificate> certificate =
- rtc::RTCCertificateGenerator::GenerateCertificate(
- rtc::KeyParams(keyType), rtc::Optional<uint64_t>());
+ rtc::RTCCertificateGenerator::GenerateCertificate(rtc::KeyParams(keyType),
+ absl::optional<uint64_t>());
if (!certificate) {
RTCLogError(@"Failed to generate certificate.");
return nullptr;
@@ -184,14 +184,13 @@
nativeConfig->presume_writable_when_fully_relayed =
_shouldPresumeWritableWhenFullyRelayed ? true : false;
if (_iceCheckMinInterval != nil) {
- nativeConfig->ice_check_min_interval =
- rtc::Optional<int>(_iceCheckMinInterval.intValue);
+ nativeConfig->ice_check_min_interval = absl::optional<int>(_iceCheckMinInterval.intValue);
}
if (_iceRegatherIntervalRange != nil) {
std::unique_ptr<rtc::IntervalRange> nativeIntervalRange(
_iceRegatherIntervalRange.nativeIntervalRange);
nativeConfig->ice_regather_interval_range =
- rtc::Optional<rtc::IntervalRange>(*nativeIntervalRange);
+ absl::optional<rtc::IntervalRange>(*nativeIntervalRange);
}
nativeConfig->sdp_semantics = [[self class] nativeSdpSemanticsForSdpSemantics:_sdpSemantics];
if (_turnCustomizer) {
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCH264ProfileLevelId.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCH264ProfileLevelId.mm
index 33f9ae9..04a5689 100644
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCH264ProfileLevelId.mm
+++ b/sdk/objc/Framework/Classes/PeerConnection/RTCH264ProfileLevelId.mm
@@ -31,7 +31,7 @@
if (self = [super init]) {
self.hexString = hexString;
- rtc::Optional<webrtc::H264::ProfileLevelId> profile_level_id =
+ absl::optional<webrtc::H264::ProfileLevelId> profile_level_id =
webrtc::H264::ParseProfileLevelId([hexString cStringUsingEncoding:NSUTF8StringEncoding]);
if (profile_level_id.has_value()) {
self.profile = static_cast<RTCH264Profile>(profile_level_id->profile);
@@ -46,7 +46,7 @@
self.profile = profile;
self.level = level;
- rtc::Optional<std::string> hex_string =
+ absl::optional<std::string> hex_string =
webrtc::H264::ProfileLevelIdToString(webrtc::H264::ProfileLevelId(
static_cast<webrtc::H264::Profile>(profile), static_cast<webrtc::H264::Level>(level)));
self.hexString =
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection.mm
index c169422..bea0ede 100644
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection.mm
+++ b/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection.mm
@@ -470,13 +470,13 @@
maxBitrateBps:(nullable NSNumber *)maxBitrateBps {
webrtc::PeerConnectionInterface::BitrateParameters params;
if (minBitrateBps != nil) {
- params.min_bitrate_bps = rtc::Optional<int>(minBitrateBps.intValue);
+ params.min_bitrate_bps = absl::optional<int>(minBitrateBps.intValue);
}
if (currentBitrateBps != nil) {
- params.current_bitrate_bps = rtc::Optional<int>(currentBitrateBps.intValue);
+ params.current_bitrate_bps = absl::optional<int>(currentBitrateBps.intValue);
}
if (maxBitrateBps != nil) {
- params.max_bitrate_bps = rtc::Optional<int>(maxBitrateBps.intValue);
+ params.max_bitrate_bps = absl::optional<int>(maxBitrateBps.intValue);
}
return _peerConnection->SetBitrate(params).ok();
}
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpCodecParameters.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCRtpCodecParameters.mm
index b6baee6..7951cee 100644
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpCodecParameters.mm
+++ b/sdk/objc/Framework/Classes/PeerConnection/RTCRtpCodecParameters.mm
@@ -93,10 +93,10 @@
RTC_NOTREACHED();
}
if (_clockRate != nil) {
- parameters.clock_rate = rtc::Optional<int>(_clockRate.intValue);
+ parameters.clock_rate = absl::optional<int>(_clockRate.intValue);
}
if (_numChannels != nil) {
- parameters.num_channels = rtc::Optional<int>(_numChannels.intValue);
+ parameters.num_channels = absl::optional<int>(_numChannels.intValue);
}
for (NSString *paramKey in _parameters.allKeys) {
std::string key = [NSString stdStringForString:paramKey];
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpEncodingParameters.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCRtpEncodingParameters.mm
index 8521862..299e318 100644
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpEncodingParameters.mm
+++ b/sdk/objc/Framework/Classes/PeerConnection/RTCRtpEncodingParameters.mm
@@ -44,13 +44,13 @@
webrtc::RtpEncodingParameters parameters;
parameters.active = _isActive;
if (_maxBitrateBps != nil) {
- parameters.max_bitrate_bps = rtc::Optional<int>(_maxBitrateBps.intValue);
+ parameters.max_bitrate_bps = absl::optional<int>(_maxBitrateBps.intValue);
}
if (_minBitrateBps != nil) {
- parameters.min_bitrate_bps = rtc::Optional<int>(_minBitrateBps.intValue);
+ parameters.min_bitrate_bps = absl::optional<int>(_minBitrateBps.intValue);
}
if (_ssrc != nil) {
- parameters.ssrc = rtc::Optional<uint32_t>(_ssrc.unsignedLongValue);
+ parameters.ssrc = absl::optional<uint32_t>(_ssrc.unsignedLongValue);
}
return parameters;
}
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodec.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodec.mm
index 63be2dc..d752126 100644
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodec.mm
+++ b/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodec.mm
@@ -43,9 +43,9 @@
using namespace webrtc::H264;
NSString *MaxSupportedLevelForProfile(Profile profile) {
- const rtc::Optional<ProfileLevelId> profileLevelId = [UIDevice maxSupportedH264Profile];
+ const absl::optional<ProfileLevelId> profileLevelId = [UIDevice maxSupportedH264Profile];
if (profileLevelId && profileLevelId->profile >= profile) {
- const rtc::Optional<std::string> profileString =
+ const absl::optional<std::string> profileString =
ProfileLevelIdToString(ProfileLevelId(profile, profileLevelId->level));
if (profileString) {
return [NSString stringForStdString:*profileString];
diff --git a/sdk/objc/Framework/Classes/Video/RTCDefaultShader.mm b/sdk/objc/Framework/Classes/Video/RTCDefaultShader.mm
index 3caf144..c5fbde1 100644
--- a/sdk/objc/Framework/Classes/Video/RTCDefaultShader.mm
+++ b/sdk/objc/Framework/Classes/Video/RTCDefaultShader.mm
@@ -20,7 +20,7 @@
#import "RTCShader.h"
#import "WebRTC/RTCLogging.h"
-#include "api/optional.h"
+#include "absl/types/optional.h"
static const int kYTextureUnit = 0;
static const int kUTextureUnit = 1;
@@ -73,7 +73,7 @@
GLuint _vertexBuffer;
GLuint _vertexArray;
// Store current rotation and only upload new vertex data when rotation changes.
- rtc::Optional<RTCVideoRotation> _currentRotation;
+ absl::optional<RTCVideoRotation> _currentRotation;
GLuint _i420Program;
GLuint _nv12Program;
@@ -144,7 +144,7 @@
#endif
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
if (!_currentRotation || rotation != *_currentRotation) {
- _currentRotation = rtc::Optional<RTCVideoRotation>(rotation);
+ _currentRotation = absl::optional<RTCVideoRotation>(rotation);
RTCSetVertexData(*_currentRotation);
}
return YES;
diff --git a/sdk/objc/Framework/Classes/Video/UIDevice+H264Profile.h b/sdk/objc/Framework/Classes/Video/UIDevice+H264Profile.h
index 03ea780..bb6f6ce 100644
--- a/sdk/objc/Framework/Classes/Video/UIDevice+H264Profile.h
+++ b/sdk/objc/Framework/Classes/Video/UIDevice+H264Profile.h
@@ -14,6 +14,6 @@
@interface UIDevice (H264Profile)
-+ (rtc::Optional<webrtc::H264::ProfileLevelId>)maxSupportedH264Profile;
++ (absl::optional<webrtc::H264::ProfileLevelId>)maxSupportedH264Profile;
@end
diff --git a/sdk/objc/Framework/Classes/Video/UIDevice+H264Profile.mm b/sdk/objc/Framework/Classes/Video/UIDevice+H264Profile.mm
index ef94c14..196e34e 100644
--- a/sdk/objc/Framework/Classes/Video/UIDevice+H264Profile.mm
+++ b/sdk/objc/Framework/Classes/Video/UIDevice+H264Profile.mm
@@ -85,7 +85,7 @@
{RTCDeviceTypeIPadPro10Inch, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP762
};
-rtc::Optional<ProfileLevelId> FindMaxSupportedProfileForDevice(RTCDeviceType deviceType) {
+absl::optional<ProfileLevelId> FindMaxSupportedProfileForDevice(RTCDeviceType deviceType) {
const auto* result = std::find_if(std::begin(kH264MaxSupportedProfiles),
std::end(kH264MaxSupportedProfiles),
[deviceType](const SupportedH264Profile& supportedProfile) {
@@ -94,14 +94,14 @@
if (result != std::end(kH264MaxSupportedProfiles)) {
return result->profile;
}
- return rtc::nullopt;
+ return absl::nullopt;
}
} // namespace
@implementation UIDevice (H264Profile)
-+ (rtc::Optional<webrtc::H264::ProfileLevelId>)maxSupportedH264Profile {
++ (absl::optional<webrtc::H264::ProfileLevelId>)maxSupportedH264Profile {
return FindMaxSupportedProfileForDevice([self deviceType]);
}
diff --git a/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoEncoderH264.mm b/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoEncoderH264.mm
index 66e9b61..27dcdee 100644
--- a/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoEncoderH264.mm
+++ b/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoEncoderH264.mm
@@ -172,7 +172,7 @@
// returned. The user must initialize the encoder with a resolution and
// framerate conforming to the selected H264 level regardless.
CFStringRef ExtractProfile(webrtc::SdpVideoFormat videoFormat) {
- const rtc::Optional<webrtc::H264::ProfileLevelId> profile_level_id =
+ const absl::optional<webrtc::H264::ProfileLevelId> profile_level_id =
webrtc::H264::ParseSdpProfileLevelId(videoFormat.parameters);
RTC_DCHECK(profile_level_id);
switch (profile_level_id->profile) {
diff --git a/sdk/objc/Framework/Native/src/objc_video_track_source.h b/sdk/objc/Framework/Native/src/objc_video_track_source.h
index 1062e96..d237980 100644
--- a/sdk/objc/Framework/Native/src/objc_video_track_source.h
+++ b/sdk/objc/Framework/Native/src/objc_video_track_source.h
@@ -36,7 +36,7 @@
// Indicates that the encoder should denoise video before encoding it.
// If it is not set, the default configuration is used which is different
// depending on video codec.
- rtc::Optional<bool> needs_denoising() const override { return false; }
+ absl::optional<bool> needs_denoising() const override { return false; }
SourceState state() const override { return SourceState::kLive; }