Prepare for splitting FrameType into AudioFrameType and VideoFrameType
This cl deprecates the FrameType enum, and adds aliases AudioFrameType
and VideoFrameType.
After downstream usage is updated, the enums will be separated
and be moved out of common_types.h.
Bug: webrtc:6883
Change-Id: I2aaf660169da45f22574b4cbb16aea8522cc07a6
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/123184
Commit-Queue: Niels Moller <nisse@webrtc.org>
Reviewed-by: Karl Wiberg <kwiberg@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#27011}
diff --git a/modules/video_coding/codecs/h264/h264_encoder_impl.cc b/modules/video_coding/codecs/h264/h264_encoder_impl.cc
index 09a5d02..1800b9e 100644
--- a/modules/video_coding/codecs/h264/h264_encoder_impl.cc
+++ b/modules/video_coding/codecs/h264/h264_encoder_impl.cc
@@ -64,7 +64,7 @@
return 1;
}
-FrameType ConvertToVideoFrameType(EVideoFrameType type) {
+VideoFrameType ConvertToVideoFrameType(EVideoFrameType type) {
switch (type) {
case videoFrameTypeIDR:
return kVideoFrameKey;
@@ -381,9 +381,10 @@
return WEBRTC_VIDEO_CODEC_OK;
}
-int32_t H264EncoderImpl::Encode(const VideoFrame& input_frame,
- const CodecSpecificInfo* codec_specific_info,
- const std::vector<FrameType>* frame_types) {
+int32_t H264EncoderImpl::Encode(
+ const VideoFrame& input_frame,
+ const CodecSpecificInfo* codec_specific_info,
+ const std::vector<VideoFrameType>* frame_types) {
if (encoders_.empty()) {
ReportError();
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
diff --git a/modules/video_coding/codecs/h264/h264_encoder_impl.h b/modules/video_coding/codecs/h264/h264_encoder_impl.h
index 75a8758..36a7f02 100644
--- a/modules/video_coding/codecs/h264/h264_encoder_impl.h
+++ b/modules/video_coding/codecs/h264/h264_encoder_impl.h
@@ -68,7 +68,7 @@
// passed to the encode complete callback.
int32_t Encode(const VideoFrame& frame,
const CodecSpecificInfo* codec_specific_info,
- const std::vector<FrameType>* frame_types) override;
+ const std::vector<VideoFrameType>* frame_types) override;
EncoderInfo GetEncoderInfo() const override;
diff --git a/modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h b/modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h
index 0dd1930..de010c9 100644
--- a/modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h
+++ b/modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h
@@ -43,7 +43,7 @@
int number_of_cores,
size_t max_payload_size) override;
int Encode(const VideoFrame& input_image,
- const std::vector<FrameType>* frame_types) override;
+ const std::vector<VideoFrameType>* frame_types) override;
int RegisterEncodeCompleteCallback(EncodedImageCallback* callback) override;
int SetRateAllocation(const VideoBitrateAllocation& bitrate,
uint32_t new_framerate) override;
diff --git a/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc b/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc
index dcba67e..e3eceac 100644
--- a/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc
+++ b/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc
@@ -115,11 +115,13 @@
ByteReader<uint32_t>::ReadBigEndian(buffer + offset);
offset += sizeof(uint32_t);
+ // TODO(nisse): This makes the wire format depend on the numeric values of the
+ // VideoCodecType and VideoFrameType enum constants.
frame_header.codec_type = static_cast<VideoCodecType>(
ByteReader<uint8_t>::ReadBigEndian(buffer + offset));
offset += sizeof(uint8_t);
- frame_header.frame_type = static_cast<FrameType>(
+ frame_header.frame_type = static_cast<VideoFrameType>(
ByteReader<uint8_t>::ReadBigEndian(buffer + offset));
offset += sizeof(uint8_t);
@@ -181,8 +183,8 @@
// key frame so as to decode the whole image without previous frame data.
// Thus only when all components are key frames, we can mark the combined
// frame as key frame.
- if (frame_header.frame_type == FrameType::kVideoFrameDelta) {
- combined_image._frameType = FrameType::kVideoFrameDelta;
+ if (frame_header.frame_type == VideoFrameType::kVideoFrameDelta) {
+ combined_image._frameType = VideoFrameType::kVideoFrameDelta;
}
frame_headers.push_back(frame_header);
diff --git a/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h b/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h
index 9d9be26..d3505e4 100644
--- a/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h
+++ b/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h
@@ -67,7 +67,7 @@
VideoCodecType codec_type;
// Indicated the underlying frame is a key frame or delta frame.
- FrameType frame_type;
+ VideoFrameType frame_type;
};
const int kMultiplexImageComponentHeaderSize =
sizeof(uint32_t) + sizeof(uint8_t) + sizeof(uint32_t) + sizeof(uint32_t) +
diff --git a/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc b/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc
index 4b27b18..6e3c5e2 100644
--- a/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc
+++ b/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc
@@ -138,12 +138,12 @@
int MultiplexEncoderAdapter::Encode(
const VideoFrame& input_image,
- const std::vector<FrameType>* frame_types) {
+ const std::vector<VideoFrameType>* frame_types) {
if (!encoded_complete_callback_) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
- std::vector<FrameType> adjusted_frame_types;
+ std::vector<VideoFrameType> adjusted_frame_types;
if (key_frame_interval_ > 0 && picture_index_ % key_frame_interval_ == 0) {
adjusted_frame_types.push_back(kVideoFrameKey);
} else {
diff --git a/modules/video_coding/codecs/test/videoprocessor.cc b/modules/video_coding/codecs/test/videoprocessor.cc
index ac63738..7458006 100644
--- a/modules/video_coding/codecs/test/videoprocessor.cc
+++ b/modules/video_coding/codecs/test/videoprocessor.cc
@@ -285,9 +285,9 @@
}
// Encode.
- const std::vector<FrameType> frame_types =
- (frame_number == 0) ? std::vector<FrameType>{kVideoFrameKey}
- : std::vector<FrameType>{kVideoFrameDelta};
+ const std::vector<VideoFrameType> frame_types =
+ (frame_number == 0) ? std::vector<VideoFrameType>{kVideoFrameKey}
+ : std::vector<VideoFrameType>{kVideoFrameDelta};
const int encode_return_code = encoder_->Encode(input_frame, &frame_types);
for (size_t i = 0; i < num_simulcast_or_spatial_layers_; ++i) {
FrameStatistics* frame_stat = stats_->GetFrame(frame_number, i);
diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc
index 1bf42ee..94b079f 100644
--- a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc
+++ b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc
@@ -737,7 +737,7 @@
int LibvpxVp8Encoder::Encode(const VideoFrame& frame,
const CodecSpecificInfo* codec_specific_info,
- const std::vector<FrameType>* frame_types) {
+ const std::vector<VideoFrameType>* frame_types) {
RTC_DCHECK_EQ(frame.width(), codec_.width);
RTC_DCHECK_EQ(frame.height(), codec_.height);
diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h
index 5a2205b..2710559 100644
--- a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h
+++ b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h
@@ -47,7 +47,7 @@
int Encode(const VideoFrame& input_image,
const CodecSpecificInfo* codec_specific_info,
- const std::vector<FrameType>* frame_types) override;
+ const std::vector<VideoFrameType>* frame_types) override;
int RegisterEncodeCompleteCallback(EncodedImageCallback* callback) override;
diff --git a/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc b/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
index ec687df..771471f 100644
--- a/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
+++ b/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
@@ -74,11 +74,11 @@
EncodedImage* encoded_frame,
CodecSpecificInfo* codec_specific_info,
bool keyframe = false) {
- std::vector<FrameType> frame_types;
+ std::vector<VideoFrameType> frame_types;
if (keyframe) {
- frame_types.emplace_back(FrameType::kVideoFrameKey);
+ frame_types.emplace_back(VideoFrameType::kVideoFrameKey);
} else {
- frame_types.emplace_back(FrameType::kVideoFrameDelta);
+ frame_types.emplace_back(VideoFrameType::kVideoFrameDelta);
}
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->Encode(input_frame, &frame_types));
@@ -484,7 +484,7 @@
.Times(2)
.WillRepeatedly(Return(vpx_codec_err_t::VPX_CODEC_OK));
- auto delta_frame = std::vector<FrameType>{kVideoFrameDelta};
+ auto delta_frame = std::vector<VideoFrameType>{kVideoFrameDelta};
encoder.Encode(*NextInputFrame(), nullptr, &delta_frame);
}
diff --git a/modules/video_coding/codecs/vp9/vp9_impl.cc b/modules/video_coding/codecs/vp9/vp9_impl.cc
index a342b39..177c839 100644
--- a/modules/video_coding/codecs/vp9/vp9_impl.cc
+++ b/modules/video_coding/codecs/vp9/vp9_impl.cc
@@ -714,7 +714,7 @@
int VP9EncoderImpl::Encode(const VideoFrame& input_image,
const CodecSpecificInfo* codec_specific_info,
- const std::vector<FrameType>* frame_types) {
+ const std::vector<VideoFrameType>* frame_types) {
if (!inited_) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
diff --git a/modules/video_coding/codecs/vp9/vp9_impl.h b/modules/video_coding/codecs/vp9/vp9_impl.h
index 1e9979f..62aeeb5 100644
--- a/modules/video_coding/codecs/vp9/vp9_impl.h
+++ b/modules/video_coding/codecs/vp9/vp9_impl.h
@@ -45,7 +45,7 @@
int Encode(const VideoFrame& input_image,
const CodecSpecificInfo* codec_specific_info,
- const std::vector<FrameType>* frame_types) override;
+ const std::vector<VideoFrameType>* frame_types) override;
int RegisterEncodeCompleteCallback(EncodedImageCallback* callback) override;