Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license |
| 5 | * that can be found in the LICENSE file in the root of the source |
| 6 | * tree. An additional intellectual property rights grant can be found |
| 7 | * in the file PATENTS. All contributing project authors may |
| 8 | * be found in the AUTHORS file in the root of the source tree. |
| 9 | */ |
| 10 | |
| 11 | #include "call/rtp_payload_params.h" |
| 12 | |
Yves Gerey | 3e70781 | 2018-11-28 16:47:49 +0100 | [diff] [blame] | 13 | #include <stddef.h> |
Jonas Olsson | a4d8737 | 2019-07-05 19:08:33 +0200 | [diff] [blame] | 14 | |
Elad Alon | f5b216a | 2019-01-28 14:25:17 +0100 | [diff] [blame] | 15 | #include <algorithm> |
Yves Gerey | 3e70781 | 2018-11-28 16:47:49 +0100 | [diff] [blame] | 16 | |
| 17 | #include "absl/container/inlined_vector.h" |
Erik Språng | cbc0cba | 2020-04-18 14:36:59 +0200 | [diff] [blame] | 18 | #include "absl/strings/match.h" |
Yves Gerey | 3e70781 | 2018-11-28 16:47:49 +0100 | [diff] [blame] | 19 | #include "absl/types/variant.h" |
| 20 | #include "api/video/video_timing.h" |
Yves Gerey | 3e70781 | 2018-11-28 16:47:49 +0100 | [diff] [blame] | 21 | #include "modules/video_coding/codecs/h264/include/h264_globals.h" |
| 22 | #include "modules/video_coding/codecs/interface/common_constants.h" |
| 23 | #include "modules/video_coding/codecs/vp8/include/vp8_globals.h" |
| 24 | #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" |
Danil Chapovalov | 02d71fb | 2020-02-10 16:22:57 +0100 | [diff] [blame] | 25 | #include "modules/video_coding/frame_dependencies_calculator.h" |
Elad Alon | f5b216a | 2019-01-28 14:25:17 +0100 | [diff] [blame] | 26 | #include "rtc_base/arraysize.h" |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 27 | #include "rtc_base/checks.h" |
philipel | bf2b620 | 2018-08-27 14:33:18 +0200 | [diff] [blame] | 28 | #include "rtc_base/logging.h" |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 29 | #include "rtc_base/random.h" |
Steve Anton | 10542f2 | 2019-01-11 09:11:00 -0800 | [diff] [blame] | 30 | #include "rtc_base/time_utils.h" |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 31 | |
| 32 | namespace webrtc { |
| 33 | |
| 34 | namespace { |
| 35 | void PopulateRtpWithCodecSpecifics(const CodecSpecificInfo& info, |
Niels Möller | d3b8c63 | 2018-08-27 15:33:42 +0200 | [diff] [blame] | 36 | absl::optional<int> spatial_index, |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 37 | RTPVideoHeader* rtp) { |
| 38 | rtp->codec = info.codecType; |
Danil Chapovalov | 62a9a32 | 2020-11-11 16:15:07 +0100 | [diff] [blame] | 39 | rtp->is_last_frame_in_picture = info.end_of_picture; |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 40 | switch (info.codecType) { |
| 41 | case kVideoCodecVP8: { |
Philip Eliasson | d52a1a6 | 2018-09-07 13:03:55 +0000 | [diff] [blame] | 42 | auto& vp8_header = rtp->video_type_header.emplace<RTPVideoHeaderVP8>(); |
| 43 | vp8_header.InitRTPVideoHeaderVP8(); |
| 44 | vp8_header.nonReference = info.codecSpecific.VP8.nonReference; |
| 45 | vp8_header.temporalIdx = info.codecSpecific.VP8.temporalIdx; |
| 46 | vp8_header.layerSync = info.codecSpecific.VP8.layerSync; |
| 47 | vp8_header.keyIdx = info.codecSpecific.VP8.keyIdx; |
Niels Möller | d3b8c63 | 2018-08-27 15:33:42 +0200 | [diff] [blame] | 48 | rtp->simulcastIdx = spatial_index.value_or(0); |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 49 | return; |
| 50 | } |
| 51 | case kVideoCodecVP9: { |
philipel | 29d8846 | 2018-08-08 14:26:00 +0200 | [diff] [blame] | 52 | auto& vp9_header = rtp->video_type_header.emplace<RTPVideoHeaderVP9>(); |
| 53 | vp9_header.InitRTPVideoHeaderVP9(); |
| 54 | vp9_header.inter_pic_predicted = |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 55 | info.codecSpecific.VP9.inter_pic_predicted; |
philipel | 29d8846 | 2018-08-08 14:26:00 +0200 | [diff] [blame] | 56 | vp9_header.flexible_mode = info.codecSpecific.VP9.flexible_mode; |
| 57 | vp9_header.ss_data_available = info.codecSpecific.VP9.ss_data_available; |
| 58 | vp9_header.non_ref_for_inter_layer_pred = |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 59 | info.codecSpecific.VP9.non_ref_for_inter_layer_pred; |
philipel | 29d8846 | 2018-08-08 14:26:00 +0200 | [diff] [blame] | 60 | vp9_header.temporal_idx = info.codecSpecific.VP9.temporal_idx; |
philipel | 29d8846 | 2018-08-08 14:26:00 +0200 | [diff] [blame] | 61 | vp9_header.temporal_up_switch = info.codecSpecific.VP9.temporal_up_switch; |
| 62 | vp9_header.inter_layer_predicted = |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 63 | info.codecSpecific.VP9.inter_layer_predicted; |
philipel | 29d8846 | 2018-08-08 14:26:00 +0200 | [diff] [blame] | 64 | vp9_header.gof_idx = info.codecSpecific.VP9.gof_idx; |
| 65 | vp9_header.num_spatial_layers = info.codecSpecific.VP9.num_spatial_layers; |
Ilya Nikolaevskiy | f5d8778 | 2020-02-04 10:06:33 +0000 | [diff] [blame] | 66 | vp9_header.first_active_layer = info.codecSpecific.VP9.first_active_layer; |
Niels Möller | d3b8c63 | 2018-08-27 15:33:42 +0200 | [diff] [blame] | 67 | if (vp9_header.num_spatial_layers > 1) { |
| 68 | vp9_header.spatial_idx = spatial_index.value_or(kNoSpatialIdx); |
| 69 | } else { |
| 70 | vp9_header.spatial_idx = kNoSpatialIdx; |
| 71 | } |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 72 | if (info.codecSpecific.VP9.ss_data_available) { |
philipel | 29d8846 | 2018-08-08 14:26:00 +0200 | [diff] [blame] | 73 | vp9_header.spatial_layer_resolution_present = |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 74 | info.codecSpecific.VP9.spatial_layer_resolution_present; |
| 75 | if (info.codecSpecific.VP9.spatial_layer_resolution_present) { |
| 76 | for (size_t i = 0; i < info.codecSpecific.VP9.num_spatial_layers; |
| 77 | ++i) { |
philipel | 29d8846 | 2018-08-08 14:26:00 +0200 | [diff] [blame] | 78 | vp9_header.width[i] = info.codecSpecific.VP9.width[i]; |
| 79 | vp9_header.height[i] = info.codecSpecific.VP9.height[i]; |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 80 | } |
| 81 | } |
philipel | 29d8846 | 2018-08-08 14:26:00 +0200 | [diff] [blame] | 82 | vp9_header.gof.CopyGofInfoVP9(info.codecSpecific.VP9.gof); |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 83 | } |
| 84 | |
philipel | 29d8846 | 2018-08-08 14:26:00 +0200 | [diff] [blame] | 85 | vp9_header.num_ref_pics = info.codecSpecific.VP9.num_ref_pics; |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 86 | for (int i = 0; i < info.codecSpecific.VP9.num_ref_pics; ++i) { |
philipel | 29d8846 | 2018-08-08 14:26:00 +0200 | [diff] [blame] | 87 | vp9_header.pid_diff[i] = info.codecSpecific.VP9.p_diff[i]; |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 88 | } |
Danil Chapovalov | 06bbeb3 | 2020-11-11 12:42:56 +0100 | [diff] [blame] | 89 | vp9_header.end_of_picture = info.end_of_picture; |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 90 | return; |
| 91 | } |
| 92 | case kVideoCodecH264: { |
philipel | 7d745e5 | 2018-08-02 14:03:53 +0200 | [diff] [blame] | 93 | auto& h264_header = rtp->video_type_header.emplace<RTPVideoHeaderH264>(); |
| 94 | h264_header.packetization_mode = |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 95 | info.codecSpecific.H264.packetization_mode; |
Niels Möller | d3b8c63 | 2018-08-27 15:33:42 +0200 | [diff] [blame] | 96 | rtp->simulcastIdx = spatial_index.value_or(0); |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 97 | return; |
| 98 | } |
| 99 | case kVideoCodecMultiplex: |
| 100 | case kVideoCodecGeneric: |
| 101 | rtp->codec = kVideoCodecGeneric; |
Niels Möller | d3b8c63 | 2018-08-27 15:33:42 +0200 | [diff] [blame] | 102 | rtp->simulcastIdx = spatial_index.value_or(0); |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 103 | return; |
| 104 | default: |
| 105 | return; |
| 106 | } |
| 107 | } |
| 108 | |
| 109 | void SetVideoTiming(const EncodedImage& image, VideoSendTiming* timing) { |
| 110 | if (image.timing_.flags == VideoSendTiming::TimingFrameFlags::kInvalid || |
| 111 | image.timing_.flags == VideoSendTiming::TimingFrameFlags::kNotTriggered) { |
| 112 | timing->flags = VideoSendTiming::TimingFrameFlags::kInvalid; |
| 113 | return; |
| 114 | } |
| 115 | |
| 116 | timing->encode_start_delta_ms = VideoSendTiming::GetDeltaCappedMs( |
| 117 | image.capture_time_ms_, image.timing_.encode_start_ms); |
| 118 | timing->encode_finish_delta_ms = VideoSendTiming::GetDeltaCappedMs( |
| 119 | image.capture_time_ms_, image.timing_.encode_finish_ms); |
| 120 | timing->packetization_finish_delta_ms = 0; |
| 121 | timing->pacer_exit_delta_ms = 0; |
| 122 | timing->network_timestamp_delta_ms = 0; |
| 123 | timing->network2_timestamp_delta_ms = 0; |
| 124 | timing->flags = image.timing_.flags; |
| 125 | } |
| 126 | } // namespace |
| 127 | |
| 128 | RtpPayloadParams::RtpPayloadParams(const uint32_t ssrc, |
Erik Språng | cbc0cba | 2020-04-18 14:36:59 +0200 | [diff] [blame] | 129 | const RtpPayloadState* state, |
| 130 | const WebRtcKeyValueConfig& trials) |
philipel | bf2b620 | 2018-08-27 14:33:18 +0200 | [diff] [blame] | 131 | : ssrc_(ssrc), |
| 132 | generic_picture_id_experiment_( |
Erik Språng | cbc0cba | 2020-04-18 14:36:59 +0200 | [diff] [blame] | 133 | absl::StartsWith(trials.Lookup("WebRTC-GenericPictureId"), |
Danil Chapovalov | af36644 | 2021-04-22 15:20:28 +0200 | [diff] [blame] | 134 | "Enabled")), |
| 135 | simulate_generic_vp9_( |
Emil Lundmark | 4727071 | 2021-07-28 13:04:36 +0200 | [diff] [blame] | 136 | !absl::StartsWith(trials.Lookup("WebRTC-Vp9DependencyDescriptor"), |
| 137 | "Disabled")) { |
philipel | bf2b620 | 2018-08-27 14:33:18 +0200 | [diff] [blame] | 138 | for (auto& spatial_layer : last_shared_frame_id_) |
| 139 | spatial_layer.fill(-1); |
| 140 | |
Emil Lundmark | adfc700 | 2021-07-30 09:45:10 +0200 | [diff] [blame^] | 141 | chain_last_frame_id_.fill(-1); |
Elad Alon | f5b216a | 2019-01-28 14:25:17 +0100 | [diff] [blame] | 142 | buffer_id_to_frame_id_.fill(-1); |
| 143 | |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 144 | Random random(rtc::TimeMicros()); |
| 145 | state_.picture_id = |
| 146 | state ? state->picture_id : (random.Rand<int16_t>() & 0x7FFF); |
| 147 | state_.tl0_pic_idx = state ? state->tl0_pic_idx : (random.Rand<uint8_t>()); |
| 148 | } |
philipel | bf2b620 | 2018-08-27 14:33:18 +0200 | [diff] [blame] | 149 | |
| 150 | RtpPayloadParams::RtpPayloadParams(const RtpPayloadParams& other) = default; |
| 151 | |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 152 | RtpPayloadParams::~RtpPayloadParams() {} |
| 153 | |
| 154 | RTPVideoHeader RtpPayloadParams::GetRtpVideoHeader( |
| 155 | const EncodedImage& image, |
philipel | bf2b620 | 2018-08-27 14:33:18 +0200 | [diff] [blame] | 156 | const CodecSpecificInfo* codec_specific_info, |
| 157 | int64_t shared_frame_id) { |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 158 | RTPVideoHeader rtp_video_header; |
| 159 | if (codec_specific_info) { |
Niels Möller | d3b8c63 | 2018-08-27 15:33:42 +0200 | [diff] [blame] | 160 | PopulateRtpWithCodecSpecifics(*codec_specific_info, image.SpatialIndex(), |
| 161 | &rtp_video_header); |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 162 | } |
Zhaoliang Ma | f3dc47e | 2021-02-05 13:19:02 +0800 | [diff] [blame] | 163 | rtp_video_header.frame_type = image._frameType; |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 164 | rtp_video_header.rotation = image.rotation_; |
| 165 | rtp_video_header.content_type = image.content_type_; |
| 166 | rtp_video_header.playout_delay = image.playout_delay_; |
philipel | fab9129 | 2018-10-17 14:36:08 +0200 | [diff] [blame] | 167 | rtp_video_header.width = image._encodedWidth; |
| 168 | rtp_video_header.height = image._encodedHeight; |
Johannes Kron | d0b69a8 | 2018-12-03 14:18:53 +0100 | [diff] [blame] | 169 | rtp_video_header.color_space = image.ColorSpace() |
| 170 | ? absl::make_optional(*image.ColorSpace()) |
| 171 | : absl::nullopt; |
Jeremy Leconte | b258c56 | 2021-03-18 13:50:42 +0100 | [diff] [blame] | 172 | rtp_video_header.video_frame_tracking_id = image.VideoFrameTrackingId(); |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 173 | SetVideoTiming(image, &rtp_video_header.video_timing); |
| 174 | |
Niels Möller | 8f7ce22 | 2019-03-21 15:43:58 +0100 | [diff] [blame] | 175 | const bool is_keyframe = image._frameType == VideoFrameType::kVideoFrameKey; |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 176 | const bool first_frame_in_picture = |
| 177 | (codec_specific_info && codec_specific_info->codecType == kVideoCodecVP9) |
| 178 | ? codec_specific_info->codecSpecific.VP9.first_frame_in_picture |
| 179 | : true; |
philipel | bf2b620 | 2018-08-27 14:33:18 +0200 | [diff] [blame] | 180 | |
| 181 | SetCodecSpecific(&rtp_video_header, first_frame_in_picture); |
philipel | 569397f | 2018-09-26 12:25:31 +0200 | [diff] [blame] | 182 | |
Danil Chapovalov | 636865e | 2020-06-03 14:11:26 +0200 | [diff] [blame] | 183 | SetGeneric(codec_specific_info, shared_frame_id, is_keyframe, |
| 184 | &rtp_video_header); |
philipel | bf2b620 | 2018-08-27 14:33:18 +0200 | [diff] [blame] | 185 | |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 186 | return rtp_video_header; |
| 187 | } |
| 188 | |
| 189 | uint32_t RtpPayloadParams::ssrc() const { |
| 190 | return ssrc_; |
| 191 | } |
| 192 | |
| 193 | RtpPayloadState RtpPayloadParams::state() const { |
| 194 | return state_; |
| 195 | } |
| 196 | |
philipel | bf2b620 | 2018-08-27 14:33:18 +0200 | [diff] [blame] | 197 | void RtpPayloadParams::SetCodecSpecific(RTPVideoHeader* rtp_video_header, |
| 198 | bool first_frame_in_picture) { |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 199 | // Always set picture id. Set tl0_pic_idx iff temporal index is set. |
| 200 | if (first_frame_in_picture) { |
| 201 | state_.picture_id = (static_cast<uint16_t>(state_.picture_id) + 1) & 0x7FFF; |
| 202 | } |
| 203 | if (rtp_video_header->codec == kVideoCodecVP8) { |
Philip Eliasson | d52a1a6 | 2018-09-07 13:03:55 +0000 | [diff] [blame] | 204 | auto& vp8_header = |
| 205 | absl::get<RTPVideoHeaderVP8>(rtp_video_header->video_type_header); |
| 206 | vp8_header.pictureId = state_.picture_id; |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 207 | |
Philip Eliasson | d52a1a6 | 2018-09-07 13:03:55 +0000 | [diff] [blame] | 208 | if (vp8_header.temporalIdx != kNoTemporalIdx) { |
| 209 | if (vp8_header.temporalIdx == 0) { |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 210 | ++state_.tl0_pic_idx; |
| 211 | } |
Philip Eliasson | d52a1a6 | 2018-09-07 13:03:55 +0000 | [diff] [blame] | 212 | vp8_header.tl0PicIdx = state_.tl0_pic_idx; |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 213 | } |
| 214 | } |
| 215 | if (rtp_video_header->codec == kVideoCodecVP9) { |
philipel | 29d8846 | 2018-08-08 14:26:00 +0200 | [diff] [blame] | 216 | auto& vp9_header = |
| 217 | absl::get<RTPVideoHeaderVP9>(rtp_video_header->video_type_header); |
| 218 | vp9_header.picture_id = state_.picture_id; |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 219 | |
| 220 | // Note that in the case that we have no temporal layers but we do have |
| 221 | // spatial layers, packets will carry layering info with a temporal_idx of |
| 222 | // zero, and we then have to set and increment tl0_pic_idx. |
philipel | 29d8846 | 2018-08-08 14:26:00 +0200 | [diff] [blame] | 223 | if (vp9_header.temporal_idx != kNoTemporalIdx || |
| 224 | vp9_header.spatial_idx != kNoSpatialIdx) { |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 225 | if (first_frame_in_picture && |
philipel | 29d8846 | 2018-08-08 14:26:00 +0200 | [diff] [blame] | 226 | (vp9_header.temporal_idx == 0 || |
| 227 | vp9_header.temporal_idx == kNoTemporalIdx)) { |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 228 | ++state_.tl0_pic_idx; |
| 229 | } |
philipel | 29d8846 | 2018-08-08 14:26:00 +0200 | [diff] [blame] | 230 | vp9_header.tl0_pic_idx = state_.tl0_pic_idx; |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 231 | } |
| 232 | } |
philipel | bf2b620 | 2018-08-27 14:33:18 +0200 | [diff] [blame] | 233 | if (generic_picture_id_experiment_ && |
| 234 | rtp_video_header->codec == kVideoCodecGeneric) { |
Danil Chapovalov | b6bf0b2 | 2020-01-28 18:36:57 +0100 | [diff] [blame] | 235 | rtp_video_header->video_type_header.emplace<RTPVideoHeaderLegacyGeneric>() |
| 236 | .picture_id = state_.picture_id; |
philipel | bf2b620 | 2018-08-27 14:33:18 +0200 | [diff] [blame] | 237 | } |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 238 | } |
philipel | bf2b620 | 2018-08-27 14:33:18 +0200 | [diff] [blame] | 239 | |
Danil Chapovalov | 02d71fb | 2020-02-10 16:22:57 +0100 | [diff] [blame] | 240 | RTPVideoHeader::GenericDescriptorInfo |
| 241 | RtpPayloadParams::GenericDescriptorFromFrameInfo( |
| 242 | const GenericFrameInfo& frame_info, |
Danil Chapovalov | cf1308f | 2020-11-18 18:27:37 +0100 | [diff] [blame] | 243 | int64_t frame_id) { |
Danil Chapovalov | 02d71fb | 2020-02-10 16:22:57 +0100 | [diff] [blame] | 244 | RTPVideoHeader::GenericDescriptorInfo generic; |
| 245 | generic.frame_id = frame_id; |
| 246 | generic.dependencies = dependencies_calculator_.FromBuffersUsage( |
Danil Chapovalov | cf1308f | 2020-11-18 18:27:37 +0100 | [diff] [blame] | 247 | frame_id, frame_info.encoder_buffers); |
Danil Chapovalov | 4b860c1 | 2020-05-19 14:48:19 +0200 | [diff] [blame] | 248 | generic.chain_diffs = |
| 249 | chains_calculator_.From(frame_id, frame_info.part_of_chain); |
Danil Chapovalov | 02d71fb | 2020-02-10 16:22:57 +0100 | [diff] [blame] | 250 | generic.spatial_index = frame_info.spatial_id; |
| 251 | generic.temporal_index = frame_info.temporal_id; |
| 252 | generic.decode_target_indications = frame_info.decode_target_indications; |
Danil Chapovalov | e6ac8ff | 2020-06-26 13:51:08 +0200 | [diff] [blame] | 253 | generic.active_decode_targets = frame_info.active_decode_targets; |
Danil Chapovalov | 02d71fb | 2020-02-10 16:22:57 +0100 | [diff] [blame] | 254 | return generic; |
| 255 | } |
| 256 | |
Elad Alon | f5b216a | 2019-01-28 14:25:17 +0100 | [diff] [blame] | 257 | void RtpPayloadParams::SetGeneric(const CodecSpecificInfo* codec_specific_info, |
| 258 | int64_t frame_id, |
philipel | bf2b620 | 2018-08-27 14:33:18 +0200 | [diff] [blame] | 259 | bool is_keyframe, |
| 260 | RTPVideoHeader* rtp_video_header) { |
Danil Chapovalov | 02d71fb | 2020-02-10 16:22:57 +0100 | [diff] [blame] | 261 | if (codec_specific_info && codec_specific_info->generic_frame_info && |
| 262 | !codec_specific_info->generic_frame_info->encoder_buffers.empty()) { |
Danil Chapovalov | 4b860c1 | 2020-05-19 14:48:19 +0200 | [diff] [blame] | 263 | if (is_keyframe) { |
| 264 | // Key frame resets all chains it is in. |
| 265 | chains_calculator_.Reset( |
| 266 | codec_specific_info->generic_frame_info->part_of_chain); |
| 267 | } |
Danil Chapovalov | cf1308f | 2020-11-18 18:27:37 +0100 | [diff] [blame] | 268 | rtp_video_header->generic = GenericDescriptorFromFrameInfo( |
| 269 | *codec_specific_info->generic_frame_info, frame_id); |
Danil Chapovalov | 02d71fb | 2020-02-10 16:22:57 +0100 | [diff] [blame] | 270 | return; |
| 271 | } |
| 272 | |
Elad Alon | f5b216a | 2019-01-28 14:25:17 +0100 | [diff] [blame] | 273 | switch (rtp_video_header->codec) { |
| 274 | case VideoCodecType::kVideoCodecGeneric: |
philipel | 8aba8fe | 2019-06-13 15:13:16 +0200 | [diff] [blame] | 275 | GenericToGeneric(frame_id, is_keyframe, rtp_video_header); |
Elad Alon | f5b216a | 2019-01-28 14:25:17 +0100 | [diff] [blame] | 276 | return; |
| 277 | case VideoCodecType::kVideoCodecVP8: |
| 278 | if (codec_specific_info) { |
| 279 | Vp8ToGeneric(codec_specific_info->codecSpecific.VP8, frame_id, |
| 280 | is_keyframe, rtp_video_header); |
| 281 | } |
| 282 | return; |
| 283 | case VideoCodecType::kVideoCodecVP9: |
Danil Chapovalov | af36644 | 2021-04-22 15:20:28 +0200 | [diff] [blame] | 284 | if (simulate_generic_vp9_ && codec_specific_info != nullptr) { |
| 285 | Vp9ToGeneric(codec_specific_info->codecSpecific.VP9, frame_id, |
| 286 | *rtp_video_header); |
| 287 | } |
| 288 | return; |
Danil Chapovalov | dc36829 | 2019-11-26 14:48:20 +0100 | [diff] [blame] | 289 | case VideoCodecType::kVideoCodecAV1: |
Danil Chapovalov | af36644 | 2021-04-22 15:20:28 +0200 | [diff] [blame] | 290 | // TODO(philipel): Implement AV1 to generic descriptor. |
Elad Alon | f5b216a | 2019-01-28 14:25:17 +0100 | [diff] [blame] | 291 | return; |
| 292 | case VideoCodecType::kVideoCodecH264: |
philipel | 8aba8fe | 2019-06-13 15:13:16 +0200 | [diff] [blame] | 293 | if (codec_specific_info) { |
| 294 | H264ToGeneric(codec_specific_info->codecSpecific.H264, frame_id, |
| 295 | is_keyframe, rtp_video_header); |
| 296 | } |
| 297 | return; |
Elad Alon | f5b216a | 2019-01-28 14:25:17 +0100 | [diff] [blame] | 298 | case VideoCodecType::kVideoCodecMultiplex: |
| 299 | return; |
philipel | bf2b620 | 2018-08-27 14:33:18 +0200 | [diff] [blame] | 300 | } |
Elad Alon | f5b216a | 2019-01-28 14:25:17 +0100 | [diff] [blame] | 301 | RTC_NOTREACHED() << "Unsupported codec."; |
philipel | bf2b620 | 2018-08-27 14:33:18 +0200 | [diff] [blame] | 302 | } |
| 303 | |
philipel | 8aba8fe | 2019-06-13 15:13:16 +0200 | [diff] [blame] | 304 | void RtpPayloadParams::GenericToGeneric(int64_t shared_frame_id, |
| 305 | bool is_keyframe, |
| 306 | RTPVideoHeader* rtp_video_header) { |
| 307 | RTPVideoHeader::GenericDescriptorInfo& generic = |
| 308 | rtp_video_header->generic.emplace(); |
| 309 | |
| 310 | generic.frame_id = shared_frame_id; |
| 311 | |
| 312 | if (is_keyframe) { |
| 313 | last_shared_frame_id_[0].fill(-1); |
| 314 | } else { |
| 315 | int64_t frame_id = last_shared_frame_id_[0][0]; |
| 316 | RTC_DCHECK_NE(frame_id, -1); |
| 317 | RTC_DCHECK_LT(frame_id, shared_frame_id); |
| 318 | generic.dependencies.push_back(frame_id); |
| 319 | } |
| 320 | |
| 321 | last_shared_frame_id_[0][0] = shared_frame_id; |
| 322 | } |
| 323 | |
| 324 | void RtpPayloadParams::H264ToGeneric(const CodecSpecificInfoH264& h264_info, |
| 325 | int64_t shared_frame_id, |
| 326 | bool is_keyframe, |
| 327 | RTPVideoHeader* rtp_video_header) { |
| 328 | const int temporal_index = |
| 329 | h264_info.temporal_idx != kNoTemporalIdx ? h264_info.temporal_idx : 0; |
| 330 | |
| 331 | if (temporal_index >= RtpGenericFrameDescriptor::kMaxTemporalLayers) { |
| 332 | RTC_LOG(LS_WARNING) << "Temporal and/or spatial index is too high to be " |
| 333 | "used with generic frame descriptor."; |
| 334 | return; |
| 335 | } |
| 336 | |
| 337 | RTPVideoHeader::GenericDescriptorInfo& generic = |
| 338 | rtp_video_header->generic.emplace(); |
| 339 | |
| 340 | generic.frame_id = shared_frame_id; |
| 341 | generic.temporal_index = temporal_index; |
| 342 | |
| 343 | if (is_keyframe) { |
| 344 | RTC_DCHECK_EQ(temporal_index, 0); |
| 345 | last_shared_frame_id_[/*spatial index*/ 0].fill(-1); |
| 346 | last_shared_frame_id_[/*spatial index*/ 0][temporal_index] = |
| 347 | shared_frame_id; |
| 348 | return; |
| 349 | } |
| 350 | |
| 351 | if (h264_info.base_layer_sync) { |
| 352 | int64_t tl0_frame_id = last_shared_frame_id_[/*spatial index*/ 0][0]; |
| 353 | |
| 354 | for (int i = 1; i < RtpGenericFrameDescriptor::kMaxTemporalLayers; ++i) { |
| 355 | if (last_shared_frame_id_[/*spatial index*/ 0][i] < tl0_frame_id) { |
| 356 | last_shared_frame_id_[/*spatial index*/ 0][i] = -1; |
| 357 | } |
| 358 | } |
| 359 | |
| 360 | RTC_DCHECK_GE(tl0_frame_id, 0); |
| 361 | RTC_DCHECK_LT(tl0_frame_id, shared_frame_id); |
| 362 | generic.dependencies.push_back(tl0_frame_id); |
| 363 | } else { |
| 364 | for (int i = 0; i <= temporal_index; ++i) { |
| 365 | int64_t frame_id = last_shared_frame_id_[/*spatial index*/ 0][i]; |
| 366 | |
| 367 | if (frame_id != -1) { |
| 368 | RTC_DCHECK_LT(frame_id, shared_frame_id); |
| 369 | generic.dependencies.push_back(frame_id); |
| 370 | } |
| 371 | } |
| 372 | } |
| 373 | |
| 374 | last_shared_frame_id_[/*spatial_index*/ 0][temporal_index] = shared_frame_id; |
| 375 | } |
| 376 | |
Elad Alon | f5b216a | 2019-01-28 14:25:17 +0100 | [diff] [blame] | 377 | void RtpPayloadParams::Vp8ToGeneric(const CodecSpecificInfoVP8& vp8_info, |
| 378 | int64_t shared_frame_id, |
philipel | bf2b620 | 2018-08-27 14:33:18 +0200 | [diff] [blame] | 379 | bool is_keyframe, |
| 380 | RTPVideoHeader* rtp_video_header) { |
| 381 | const auto& vp8_header = |
| 382 | absl::get<RTPVideoHeaderVP8>(rtp_video_header->video_type_header); |
| 383 | const int spatial_index = 0; |
| 384 | const int temporal_index = |
| 385 | vp8_header.temporalIdx != kNoTemporalIdx ? vp8_header.temporalIdx : 0; |
| 386 | |
| 387 | if (temporal_index >= RtpGenericFrameDescriptor::kMaxTemporalLayers || |
| 388 | spatial_index >= RtpGenericFrameDescriptor::kMaxSpatialLayers) { |
| 389 | RTC_LOG(LS_WARNING) << "Temporal and/or spatial index is too high to be " |
| 390 | "used with generic frame descriptor."; |
| 391 | return; |
| 392 | } |
| 393 | |
| 394 | RTPVideoHeader::GenericDescriptorInfo& generic = |
| 395 | rtp_video_header->generic.emplace(); |
| 396 | |
| 397 | generic.frame_id = shared_frame_id; |
| 398 | generic.spatial_index = spatial_index; |
| 399 | generic.temporal_index = temporal_index; |
| 400 | |
Qingsi Wang | 1c1b99e | 2020-01-07 19:16:33 +0000 | [diff] [blame] | 401 | if (vp8_info.useExplicitDependencies) { |
| 402 | SetDependenciesVp8New(vp8_info, shared_frame_id, is_keyframe, |
| 403 | vp8_header.layerSync, &generic); |
| 404 | } else { |
| 405 | SetDependenciesVp8Deprecated(vp8_info, shared_frame_id, is_keyframe, |
| 406 | spatial_index, temporal_index, |
| 407 | vp8_header.layerSync, &generic); |
| 408 | } |
| 409 | } |
| 410 | |
Danil Chapovalov | 748550d | 2021-04-29 11:42:54 +0200 | [diff] [blame] | 411 | FrameDependencyStructure RtpPayloadParams::MinimalisticVp9Structure( |
| 412 | const CodecSpecificInfoVP9& vp9) { |
| 413 | const int num_spatial_layers = vp9.num_spatial_layers; |
| 414 | const int num_temporal_layers = kMaxTemporalStreams; |
| 415 | FrameDependencyStructure structure; |
| 416 | structure.num_decode_targets = num_spatial_layers * num_temporal_layers; |
| 417 | structure.num_chains = num_spatial_layers; |
| 418 | structure.templates.reserve(num_spatial_layers * num_temporal_layers); |
| 419 | for (int sid = 0; sid < num_spatial_layers; ++sid) { |
| 420 | for (int tid = 0; tid < num_temporal_layers; ++tid) { |
| 421 | FrameDependencyTemplate a_template; |
| 422 | a_template.spatial_id = sid; |
| 423 | a_template.temporal_id = tid; |
| 424 | for (int s = 0; s < num_spatial_layers; ++s) { |
| 425 | for (int t = 0; t < num_temporal_layers; ++t) { |
| 426 | // Prefer kSwitch for indication frame is part of the decode target |
| 427 | // because RtpPayloadParams::Vp9ToGeneric uses that indication more |
| 428 | // often that kRequired, increasing chance custom dti need not to |
| 429 | // use more bits in dependency descriptor on the wire. |
| 430 | a_template.decode_target_indications.push_back( |
| 431 | sid <= s && tid <= t ? DecodeTargetIndication::kSwitch |
| 432 | : DecodeTargetIndication::kNotPresent); |
| 433 | } |
| 434 | } |
| 435 | a_template.frame_diffs.push_back(tid == 0 ? num_spatial_layers * |
| 436 | num_temporal_layers |
| 437 | : num_spatial_layers); |
| 438 | a_template.chain_diffs.assign(structure.num_chains, 1); |
| 439 | structure.templates.push_back(a_template); |
| 440 | |
| 441 | structure.decode_target_protected_by_chain.push_back(sid); |
| 442 | } |
| 443 | if (vp9.ss_data_available && vp9.spatial_layer_resolution_present) { |
| 444 | structure.resolutions.emplace_back(vp9.width[sid], vp9.height[sid]); |
| 445 | } |
| 446 | } |
| 447 | return structure; |
| 448 | } |
| 449 | |
Danil Chapovalov | af36644 | 2021-04-22 15:20:28 +0200 | [diff] [blame] | 450 | void RtpPayloadParams::Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info, |
| 451 | int64_t shared_frame_id, |
| 452 | RTPVideoHeader& rtp_video_header) { |
| 453 | const auto& vp9_header = |
| 454 | absl::get<RTPVideoHeaderVP9>(rtp_video_header.video_type_header); |
| 455 | const int num_spatial_layers = vp9_header.num_spatial_layers; |
| 456 | const int num_temporal_layers = kMaxTemporalStreams; |
| 457 | |
| 458 | int spatial_index = |
| 459 | vp9_header.spatial_idx != kNoSpatialIdx ? vp9_header.spatial_idx : 0; |
| 460 | int temporal_index = |
| 461 | vp9_header.temporal_idx != kNoTemporalIdx ? vp9_header.temporal_idx : 0; |
| 462 | |
| 463 | if (spatial_index >= num_spatial_layers || |
| 464 | temporal_index >= num_temporal_layers || |
| 465 | num_spatial_layers > RtpGenericFrameDescriptor::kMaxSpatialLayers) { |
| 466 | // Prefer to generate no generic layering than an inconsistent one. |
| 467 | return; |
| 468 | } |
| 469 | |
| 470 | RTPVideoHeader::GenericDescriptorInfo& result = |
| 471 | rtp_video_header.generic.emplace(); |
| 472 | |
| 473 | result.frame_id = shared_frame_id; |
| 474 | result.spatial_index = spatial_index; |
| 475 | result.temporal_index = temporal_index; |
| 476 | |
| 477 | result.decode_target_indications.reserve(num_spatial_layers * |
| 478 | num_temporal_layers); |
| 479 | for (int sid = 0; sid < num_spatial_layers; ++sid) { |
| 480 | for (int tid = 0; tid < num_temporal_layers; ++tid) { |
| 481 | DecodeTargetIndication dti; |
| 482 | if (sid < spatial_index || tid < temporal_index) { |
| 483 | dti = DecodeTargetIndication::kNotPresent; |
| 484 | } else if (spatial_index != sid && |
| 485 | vp9_header.non_ref_for_inter_layer_pred) { |
| 486 | dti = DecodeTargetIndication::kNotPresent; |
| 487 | } else if (sid == spatial_index && tid == temporal_index) { |
| 488 | // Assume that if frame is decodable, all of its own layer is decodable. |
| 489 | dti = DecodeTargetIndication::kSwitch; |
| 490 | } else if (sid == spatial_index && vp9_header.temporal_up_switch) { |
| 491 | dti = DecodeTargetIndication::kSwitch; |
| 492 | } else if (!vp9_header.inter_pic_predicted) { |
| 493 | // Key frame or spatial upswitch |
| 494 | dti = DecodeTargetIndication::kSwitch; |
| 495 | } else { |
| 496 | // Make no other assumptions. That should be safe, though suboptimal. |
| 497 | // To provide more accurate dti, encoder wrapper should fill in |
| 498 | // CodecSpecificInfo::generic_frame_info |
| 499 | dti = DecodeTargetIndication::kRequired; |
| 500 | } |
| 501 | result.decode_target_indications.push_back(dti); |
| 502 | } |
| 503 | } |
| 504 | |
| 505 | // Calculate frame dependencies. |
| 506 | static constexpr int kPictureDiffLimit = 128; |
| 507 | if (last_vp9_frame_id_.empty()) { |
| 508 | // Create the array only if it is ever used. |
| 509 | last_vp9_frame_id_.resize(kPictureDiffLimit); |
| 510 | } |
| 511 | if (vp9_header.inter_layer_predicted && spatial_index > 0) { |
| 512 | result.dependencies.push_back( |
| 513 | last_vp9_frame_id_[vp9_header.picture_id % kPictureDiffLimit] |
| 514 | [spatial_index - 1]); |
| 515 | } |
| 516 | if (vp9_header.inter_pic_predicted) { |
| 517 | for (size_t i = 0; i < vp9_header.num_ref_pics; ++i) { |
| 518 | // picture_id is 15 bit number that wraps around. Though undeflow may |
| 519 | // produce picture that exceeds 2^15, it is ok because in this |
| 520 | // code block only last 7 bits of the picture_id are used. |
| 521 | uint16_t depend_on = vp9_header.picture_id - vp9_header.pid_diff[i]; |
| 522 | result.dependencies.push_back( |
| 523 | last_vp9_frame_id_[depend_on % kPictureDiffLimit][spatial_index]); |
| 524 | } |
| 525 | } |
| 526 | last_vp9_frame_id_[vp9_header.picture_id % kPictureDiffLimit][spatial_index] = |
| 527 | shared_frame_id; |
| 528 | |
| 529 | // Calculate chains, asuming chain includes all frames with temporal_id = 0 |
| 530 | if (!vp9_header.inter_pic_predicted && !vp9_header.inter_layer_predicted) { |
| 531 | // Assume frames without dependencies also reset chains. |
| 532 | for (int sid = spatial_index; sid < num_spatial_layers; ++sid) { |
| 533 | chain_last_frame_id_[sid] = -1; |
| 534 | } |
| 535 | } |
| 536 | result.chain_diffs.resize(num_spatial_layers); |
| 537 | for (int sid = 0; sid < num_spatial_layers; ++sid) { |
| 538 | if (chain_last_frame_id_[sid] == -1) { |
| 539 | result.chain_diffs[sid] = 0; |
| 540 | continue; |
| 541 | } |
| 542 | result.chain_diffs[sid] = shared_frame_id - chain_last_frame_id_[sid]; |
| 543 | } |
| 544 | |
| 545 | if (temporal_index == 0) { |
| 546 | chain_last_frame_id_[spatial_index] = shared_frame_id; |
| 547 | if (!vp9_header.non_ref_for_inter_layer_pred) { |
| 548 | for (int sid = spatial_index + 1; sid < num_spatial_layers; ++sid) { |
| 549 | chain_last_frame_id_[sid] = shared_frame_id; |
| 550 | } |
| 551 | } |
| 552 | } |
| 553 | } |
| 554 | |
Qingsi Wang | 1c1b99e | 2020-01-07 19:16:33 +0000 | [diff] [blame] | 555 | void RtpPayloadParams::SetDependenciesVp8Deprecated( |
| 556 | const CodecSpecificInfoVP8& vp8_info, |
| 557 | int64_t shared_frame_id, |
| 558 | bool is_keyframe, |
| 559 | int spatial_index, |
| 560 | int temporal_index, |
| 561 | bool layer_sync, |
| 562 | RTPVideoHeader::GenericDescriptorInfo* generic) { |
| 563 | RTC_DCHECK(!vp8_info.useExplicitDependencies); |
| 564 | RTC_DCHECK(!new_version_used_.has_value() || !new_version_used_.value()); |
| 565 | new_version_used_ = false; |
| 566 | |
| 567 | if (is_keyframe) { |
| 568 | RTC_DCHECK_EQ(temporal_index, 0); |
| 569 | last_shared_frame_id_[spatial_index].fill(-1); |
| 570 | last_shared_frame_id_[spatial_index][temporal_index] = shared_frame_id; |
| 571 | return; |
| 572 | } |
| 573 | |
| 574 | if (layer_sync) { |
| 575 | int64_t tl0_frame_id = last_shared_frame_id_[spatial_index][0]; |
| 576 | |
| 577 | for (int i = 1; i < RtpGenericFrameDescriptor::kMaxTemporalLayers; ++i) { |
| 578 | if (last_shared_frame_id_[spatial_index][i] < tl0_frame_id) { |
| 579 | last_shared_frame_id_[spatial_index][i] = -1; |
| 580 | } |
| 581 | } |
| 582 | |
| 583 | RTC_DCHECK_GE(tl0_frame_id, 0); |
| 584 | RTC_DCHECK_LT(tl0_frame_id, shared_frame_id); |
| 585 | generic->dependencies.push_back(tl0_frame_id); |
| 586 | } else { |
| 587 | for (int i = 0; i <= temporal_index; ++i) { |
| 588 | int64_t frame_id = last_shared_frame_id_[spatial_index][i]; |
| 589 | |
| 590 | if (frame_id != -1) { |
| 591 | RTC_DCHECK_LT(frame_id, shared_frame_id); |
| 592 | generic->dependencies.push_back(frame_id); |
| 593 | } |
| 594 | } |
| 595 | } |
| 596 | |
| 597 | last_shared_frame_id_[spatial_index][temporal_index] = shared_frame_id; |
| 598 | } |
| 599 | |
| 600 | void RtpPayloadParams::SetDependenciesVp8New( |
| 601 | const CodecSpecificInfoVP8& vp8_info, |
| 602 | int64_t shared_frame_id, |
| 603 | bool is_keyframe, |
| 604 | bool layer_sync, |
| 605 | RTPVideoHeader::GenericDescriptorInfo* generic) { |
| 606 | RTC_DCHECK(vp8_info.useExplicitDependencies); |
| 607 | RTC_DCHECK(!new_version_used_.has_value() || new_version_used_.value()); |
| 608 | new_version_used_ = true; |
| 609 | |
Elad Alon | f5b216a | 2019-01-28 14:25:17 +0100 | [diff] [blame] | 610 | if (is_keyframe) { |
| 611 | RTC_DCHECK_EQ(vp8_info.referencedBuffersCount, 0u); |
| 612 | buffer_id_to_frame_id_.fill(shared_frame_id); |
| 613 | return; |
| 614 | } |
| 615 | |
| 616 | constexpr size_t kBuffersCountVp8 = CodecSpecificInfoVP8::kBuffersCount; |
| 617 | |
| 618 | RTC_DCHECK_GT(vp8_info.referencedBuffersCount, 0u); |
| 619 | RTC_DCHECK_LE(vp8_info.referencedBuffersCount, |
| 620 | arraysize(vp8_info.referencedBuffers)); |
| 621 | |
| 622 | for (size_t i = 0; i < vp8_info.referencedBuffersCount; ++i) { |
| 623 | const size_t referenced_buffer = vp8_info.referencedBuffers[i]; |
| 624 | RTC_DCHECK_LT(referenced_buffer, kBuffersCountVp8); |
| 625 | RTC_DCHECK_LT(referenced_buffer, buffer_id_to_frame_id_.size()); |
| 626 | |
| 627 | const int64_t dependency_frame_id = |
| 628 | buffer_id_to_frame_id_[referenced_buffer]; |
| 629 | RTC_DCHECK_GE(dependency_frame_id, 0); |
| 630 | RTC_DCHECK_LT(dependency_frame_id, shared_frame_id); |
| 631 | |
| 632 | const bool is_new_dependency = |
Qingsi Wang | 1c1b99e | 2020-01-07 19:16:33 +0000 | [diff] [blame] | 633 | std::find(generic->dependencies.begin(), generic->dependencies.end(), |
| 634 | dependency_frame_id) == generic->dependencies.end(); |
Elad Alon | f5b216a | 2019-01-28 14:25:17 +0100 | [diff] [blame] | 635 | if (is_new_dependency) { |
Qingsi Wang | 1c1b99e | 2020-01-07 19:16:33 +0000 | [diff] [blame] | 636 | generic->dependencies.push_back(dependency_frame_id); |
Elad Alon | f5b216a | 2019-01-28 14:25:17 +0100 | [diff] [blame] | 637 | } |
| 638 | } |
| 639 | |
| 640 | RTC_DCHECK_LE(vp8_info.updatedBuffersCount, kBuffersCountVp8); |
| 641 | for (size_t i = 0; i < vp8_info.updatedBuffersCount; ++i) { |
| 642 | const size_t updated_id = vp8_info.updatedBuffers[i]; |
| 643 | buffer_id_to_frame_id_[updated_id] = shared_frame_id; |
| 644 | } |
| 645 | |
| 646 | RTC_DCHECK_LE(buffer_id_to_frame_id_.size(), kBuffersCountVp8); |
| 647 | } |
| 648 | |
Stefan Holmer | f704468 | 2018-07-17 10:16:41 +0200 | [diff] [blame] | 649 | } // namespace webrtc |