philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license |
| 5 | * that can be found in the LICENSE file in the root of the source |
| 6 | * tree. An additional intellectual property rights grant can be found |
| 7 | * in the file PATENTS. All contributing project authors may |
| 8 | * be found in the AUTHORS file in the root of the source tree. |
| 9 | */ |
| 10 | |
Mirko Bonadei | 92ea95e | 2017-09-15 06:47:31 +0200 | [diff] [blame] | 11 | #include "modules/video_coding/rtp_frame_reference_finder.h" |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 12 | |
| 13 | #include <algorithm> |
| 14 | #include <limits> |
| 15 | |
philipel | 1a4746a | 2018-07-09 15:52:29 +0200 | [diff] [blame] | 16 | #include "absl/types/variant.h" |
Mirko Bonadei | 92ea95e | 2017-09-15 06:47:31 +0200 | [diff] [blame] | 17 | #include "modules/video_coding/frame_object.h" |
| 18 | #include "modules/video_coding/packet_buffer.h" |
| 19 | #include "rtc_base/checks.h" |
| 20 | #include "rtc_base/logging.h" |
Karl Wiberg | 80ba333 | 2018-02-05 10:33:35 +0100 | [diff] [blame] | 21 | #include "rtc_base/system/fallthrough.h" |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 22 | |
| 23 | namespace webrtc { |
| 24 | namespace video_coding { |
| 25 | |
| 26 | RtpFrameReferenceFinder::RtpFrameReferenceFinder( |
| 27 | OnCompleteFrameCallback* frame_callback) |
| 28 | : last_picture_id_(-1), |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 29 | current_ss_idx_(0), |
philipel | 463d301 | 2016-09-09 03:32:44 -0700 | [diff] [blame] | 30 | cleared_to_seq_num_(-1), |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 31 | frame_callback_(frame_callback) {} |
| 32 | |
Mirko Bonadei | 8fdcac3 | 2018-08-28 16:30:18 +0200 | [diff] [blame] | 33 | RtpFrameReferenceFinder::~RtpFrameReferenceFinder() = default; |
| 34 | |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 35 | void RtpFrameReferenceFinder::ManageFrame( |
| 36 | std::unique_ptr<RtpFrameObject> frame) { |
| 37 | rtc::CritScope lock(&crit_); |
philipel | 463d301 | 2016-09-09 03:32:44 -0700 | [diff] [blame] | 38 | |
| 39 | // If we have cleared past this frame, drop it. |
| 40 | if (cleared_to_seq_num_ != -1 && |
| 41 | AheadOf<uint16_t>(cleared_to_seq_num_, frame->first_seq_num())) { |
| 42 | return; |
| 43 | } |
| 44 | |
philipel | afcf7f5 | 2017-04-26 08:17:35 -0700 | [diff] [blame] | 45 | FrameDecision decision = ManageFrameInternal(frame.get()); |
| 46 | |
| 47 | switch (decision) { |
| 48 | case kStash: |
| 49 | if (stashed_frames_.size() > kMaxStashedFrames) |
| 50 | stashed_frames_.pop_back(); |
| 51 | stashed_frames_.push_front(std::move(frame)); |
| 52 | break; |
| 53 | case kHandOff: |
| 54 | frame_callback_->OnCompleteFrame(std::move(frame)); |
| 55 | RetryStashedFrames(); |
| 56 | break; |
| 57 | case kDrop: |
| 58 | break; |
| 59 | } |
| 60 | } |
| 61 | |
| 62 | void RtpFrameReferenceFinder::RetryStashedFrames() { |
| 63 | bool complete_frame = false; |
| 64 | do { |
| 65 | complete_frame = false; |
| 66 | for (auto frame_it = stashed_frames_.begin(); |
| 67 | frame_it != stashed_frames_.end();) { |
| 68 | FrameDecision decision = ManageFrameInternal(frame_it->get()); |
| 69 | |
| 70 | switch (decision) { |
| 71 | case kStash: |
| 72 | ++frame_it; |
| 73 | break; |
| 74 | case kHandOff: |
| 75 | complete_frame = true; |
| 76 | frame_callback_->OnCompleteFrame(std::move(*frame_it)); |
Karl Wiberg | 80ba333 | 2018-02-05 10:33:35 +0100 | [diff] [blame] | 77 | RTC_FALLTHROUGH(); |
philipel | afcf7f5 | 2017-04-26 08:17:35 -0700 | [diff] [blame] | 78 | case kDrop: |
| 79 | frame_it = stashed_frames_.erase(frame_it); |
| 80 | } |
| 81 | } |
| 82 | } while (complete_frame); |
| 83 | } |
| 84 | |
| 85 | RtpFrameReferenceFinder::FrameDecision |
| 86 | RtpFrameReferenceFinder::ManageFrameInternal(RtpFrameObject* frame) { |
philipel | 2837edc | 2018-10-02 13:55:47 +0200 | [diff] [blame] | 87 | absl::optional<RtpGenericFrameDescriptor> generic_descriptor = |
| 88 | frame->GetGenericFrameDescriptor(); |
| 89 | if (generic_descriptor) { |
| 90 | return ManageFrameGeneric(frame, *generic_descriptor); |
philipel | dabfcae | 2018-09-25 12:54:37 +0200 | [diff] [blame] | 91 | } |
| 92 | |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 93 | switch (frame->codec_type()) { |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 94 | case kVideoCodecVP8: |
philipel | afcf7f5 | 2017-04-26 08:17:35 -0700 | [diff] [blame] | 95 | return ManageFrameVp8(frame); |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 96 | case kVideoCodecVP9: |
philipel | afcf7f5 | 2017-04-26 08:17:35 -0700 | [diff] [blame] | 97 | return ManageFrameVp9(frame); |
Johnny Lee | bc7f41b | 2019-05-01 14:41:32 -0400 | [diff] [blame^] | 98 | case kVideoCodecH264: |
| 99 | return ManageFrameH264(frame); |
Sami Kalliomäki | 9882495 | 2018-08-28 14:39:21 +0200 | [diff] [blame] | 100 | default: { |
| 101 | // Use 15 first bits of frame ID as picture ID if available. |
philipel | 2837edc | 2018-10-02 13:55:47 +0200 | [diff] [blame] | 102 | absl::optional<RTPVideoHeader> video_header = frame->GetRtpVideoHeader(); |
philipel | dabfcae | 2018-09-25 12:54:37 +0200 | [diff] [blame] | 103 | int picture_id = kNoPictureId; |
| 104 | if (video_header && video_header->generic) |
| 105 | picture_id = video_header->generic->frame_id & 0x7fff; |
| 106 | |
| 107 | return ManageFramePidOrSeqNum(frame, picture_id); |
Sami Kalliomäki | 9882495 | 2018-08-28 14:39:21 +0200 | [diff] [blame] | 108 | } |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 109 | } |
| 110 | } |
| 111 | |
philipel | 9b2ce6b | 2016-07-05 05:04:46 -0700 | [diff] [blame] | 112 | void RtpFrameReferenceFinder::PaddingReceived(uint16_t seq_num) { |
| 113 | rtc::CritScope lock(&crit_); |
| 114 | auto clean_padding_to = |
| 115 | stashed_padding_.lower_bound(seq_num - kMaxPaddingAge); |
| 116 | stashed_padding_.erase(stashed_padding_.begin(), clean_padding_to); |
| 117 | stashed_padding_.insert(seq_num); |
| 118 | UpdateLastPictureIdWithPadding(seq_num); |
| 119 | RetryStashedFrames(); |
| 120 | } |
| 121 | |
philipel | 463d301 | 2016-09-09 03:32:44 -0700 | [diff] [blame] | 122 | void RtpFrameReferenceFinder::ClearTo(uint16_t seq_num) { |
| 123 | rtc::CritScope lock(&crit_); |
| 124 | cleared_to_seq_num_ = seq_num; |
| 125 | |
| 126 | auto it = stashed_frames_.begin(); |
| 127 | while (it != stashed_frames_.end()) { |
| 128 | if (AheadOf<uint16_t>(cleared_to_seq_num_, (*it)->first_seq_num())) { |
| 129 | it = stashed_frames_.erase(it); |
| 130 | } else { |
| 131 | ++it; |
| 132 | } |
| 133 | } |
| 134 | } |
| 135 | |
philipel | 9b2ce6b | 2016-07-05 05:04:46 -0700 | [diff] [blame] | 136 | void RtpFrameReferenceFinder::UpdateLastPictureIdWithPadding(uint16_t seq_num) { |
| 137 | auto gop_seq_num_it = last_seq_num_gop_.upper_bound(seq_num); |
| 138 | |
| 139 | // If this padding packet "belongs" to a group of pictures that we don't track |
| 140 | // anymore, do nothing. |
| 141 | if (gop_seq_num_it == last_seq_num_gop_.begin()) |
| 142 | return; |
| 143 | --gop_seq_num_it; |
| 144 | |
| 145 | // Calculate the next contiuous sequence number and search for it in |
| 146 | // the padding packets we have stashed. |
| 147 | uint16_t next_seq_num_with_padding = gop_seq_num_it->second.second + 1; |
| 148 | auto padding_seq_num_it = |
| 149 | stashed_padding_.lower_bound(next_seq_num_with_padding); |
| 150 | |
| 151 | // While there still are padding packets and those padding packets are |
| 152 | // continuous, then advance the "last-picture-id-with-padding" and remove |
| 153 | // the stashed padding packet. |
| 154 | while (padding_seq_num_it != stashed_padding_.end() && |
| 155 | *padding_seq_num_it == next_seq_num_with_padding) { |
| 156 | gop_seq_num_it->second.second = next_seq_num_with_padding; |
| 157 | ++next_seq_num_with_padding; |
| 158 | padding_seq_num_it = stashed_padding_.erase(padding_seq_num_it); |
| 159 | } |
philipel | 41bb792 | 2017-02-20 07:53:23 -0800 | [diff] [blame] | 160 | |
| 161 | // In the case where the stream has been continuous without any new keyframes |
| 162 | // for a while there is a risk that new frames will appear to be older than |
| 163 | // the keyframe they belong to due to wrapping sequence number. In order |
| 164 | // to prevent this we advance the picture id of the keyframe every so often. |
| 165 | if (ForwardDiff(gop_seq_num_it->first, seq_num) > 10000) { |
| 166 | RTC_DCHECK_EQ(1ul, last_seq_num_gop_.size()); |
| 167 | last_seq_num_gop_[seq_num] = gop_seq_num_it->second; |
| 168 | last_seq_num_gop_.erase(gop_seq_num_it); |
| 169 | } |
philipel | 9b2ce6b | 2016-07-05 05:04:46 -0700 | [diff] [blame] | 170 | } |
| 171 | |
philipel | afcf7f5 | 2017-04-26 08:17:35 -0700 | [diff] [blame] | 172 | RtpFrameReferenceFinder::FrameDecision |
philipel | dabfcae | 2018-09-25 12:54:37 +0200 | [diff] [blame] | 173 | RtpFrameReferenceFinder::ManageFrameGeneric( |
| 174 | RtpFrameObject* frame, |
philipel | 2837edc | 2018-10-02 13:55:47 +0200 | [diff] [blame] | 175 | const RtpGenericFrameDescriptor& descriptor) { |
| 176 | int64_t frame_id = generic_frame_id_unwrapper_.Unwrap(descriptor.FrameId()); |
| 177 | frame->id.picture_id = frame_id; |
| 178 | frame->id.spatial_layer = descriptor.SpatialLayer(); |
| 179 | |
| 180 | rtc::ArrayView<const uint16_t> diffs = descriptor.FrameDependenciesDiffs(); |
| 181 | if (EncodedFrame::kMaxFrameReferences < diffs.size()) { |
philipel | dabfcae | 2018-09-25 12:54:37 +0200 | [diff] [blame] | 182 | RTC_LOG(LS_WARNING) << "Too many dependencies in generic descriptor."; |
| 183 | return kDrop; |
| 184 | } |
| 185 | |
philipel | 2837edc | 2018-10-02 13:55:47 +0200 | [diff] [blame] | 186 | frame->num_references = diffs.size(); |
| 187 | for (size_t i = 0; i < diffs.size(); ++i) |
| 188 | frame->references[i] = frame_id - diffs[i]; |
philipel | dabfcae | 2018-09-25 12:54:37 +0200 | [diff] [blame] | 189 | |
| 190 | return kHandOff; |
| 191 | } |
| 192 | |
| 193 | RtpFrameReferenceFinder::FrameDecision |
| 194 | RtpFrameReferenceFinder::ManageFramePidOrSeqNum(RtpFrameObject* frame, |
| 195 | int picture_id) { |
philipel | 647998c | 2016-06-03 09:40:16 -0700 | [diff] [blame] | 196 | // If |picture_id| is specified then we use that to set the frame references, |
| 197 | // otherwise we use sequence number. |
| 198 | if (picture_id != kNoPictureId) { |
philipel | 0fa82a6 | 2018-03-19 15:34:53 +0100 | [diff] [blame] | 199 | frame->id.picture_id = unwrapper_.Unwrap(picture_id); |
Niels Möller | 8f7ce22 | 2019-03-21 15:43:58 +0100 | [diff] [blame] | 200 | frame->num_references = |
| 201 | frame->frame_type() == VideoFrameType::kVideoFrameKey ? 0 : 1; |
philipel | 0fa82a6 | 2018-03-19 15:34:53 +0100 | [diff] [blame] | 202 | frame->references[0] = frame->id.picture_id - 1; |
philipel | afcf7f5 | 2017-04-26 08:17:35 -0700 | [diff] [blame] | 203 | return kHandOff; |
philipel | 647998c | 2016-06-03 09:40:16 -0700 | [diff] [blame] | 204 | } |
| 205 | |
Niels Möller | 8f7ce22 | 2019-03-21 15:43:58 +0100 | [diff] [blame] | 206 | if (frame->frame_type() == VideoFrameType::kVideoFrameKey) { |
philipel | 9b2ce6b | 2016-07-05 05:04:46 -0700 | [diff] [blame] | 207 | last_seq_num_gop_.insert(std::make_pair( |
| 208 | frame->last_seq_num(), |
| 209 | std::make_pair(frame->last_seq_num(), frame->last_seq_num()))); |
| 210 | } |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 211 | |
| 212 | // We have received a frame but not yet a keyframe, stash this frame. |
philipel | afcf7f5 | 2017-04-26 08:17:35 -0700 | [diff] [blame] | 213 | if (last_seq_num_gop_.empty()) |
| 214 | return kStash; |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 215 | |
| 216 | // Clean up info for old keyframes but make sure to keep info |
| 217 | // for the last keyframe. |
| 218 | auto clean_to = last_seq_num_gop_.lower_bound(frame->last_seq_num() - 100); |
philipel | 41bb792 | 2017-02-20 07:53:23 -0800 | [diff] [blame] | 219 | for (auto it = last_seq_num_gop_.begin(); |
| 220 | it != clean_to && last_seq_num_gop_.size() > 1;) { |
| 221 | it = last_seq_num_gop_.erase(it); |
| 222 | } |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 223 | |
| 224 | // Find the last sequence number of the last frame for the keyframe |
| 225 | // that this frame indirectly references. |
| 226 | auto seq_num_it = last_seq_num_gop_.upper_bound(frame->last_seq_num()); |
philipel | 9b2ce6b | 2016-07-05 05:04:46 -0700 | [diff] [blame] | 227 | if (seq_num_it == last_seq_num_gop_.begin()) { |
Mirko Bonadei | 675513b | 2017-11-09 11:09:25 +0100 | [diff] [blame] | 228 | RTC_LOG(LS_WARNING) << "Generic frame with packet range [" |
| 229 | << frame->first_seq_num() << ", " |
| 230 | << frame->last_seq_num() |
| 231 | << "] has no GoP, dropping frame."; |
philipel | afcf7f5 | 2017-04-26 08:17:35 -0700 | [diff] [blame] | 232 | return kDrop; |
philipel | 9b2ce6b | 2016-07-05 05:04:46 -0700 | [diff] [blame] | 233 | } |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 234 | seq_num_it--; |
| 235 | |
| 236 | // Make sure the packet sequence numbers are continuous, otherwise stash |
| 237 | // this frame. |
philipel | 9b2ce6b | 2016-07-05 05:04:46 -0700 | [diff] [blame] | 238 | uint16_t last_picture_id_gop = seq_num_it->second.first; |
| 239 | uint16_t last_picture_id_with_padding_gop = seq_num_it->second.second; |
Niels Möller | 8f7ce22 | 2019-03-21 15:43:58 +0100 | [diff] [blame] | 240 | if (frame->frame_type() == VideoFrameType::kVideoFrameDelta) { |
philipel | 9b2ce6b | 2016-07-05 05:04:46 -0700 | [diff] [blame] | 241 | uint16_t prev_seq_num = frame->first_seq_num() - 1; |
philipel | afcf7f5 | 2017-04-26 08:17:35 -0700 | [diff] [blame] | 242 | |
| 243 | if (prev_seq_num != last_picture_id_with_padding_gop) |
| 244 | return kStash; |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 245 | } |
| 246 | |
| 247 | RTC_DCHECK(AheadOrAt(frame->last_seq_num(), seq_num_it->first)); |
| 248 | |
| 249 | // Since keyframes can cause reordering we can't simply assign the |
| 250 | // picture id according to some incrementing counter. |
philipel | 0fa82a6 | 2018-03-19 15:34:53 +0100 | [diff] [blame] | 251 | frame->id.picture_id = frame->last_seq_num(); |
Niels Möller | 8f7ce22 | 2019-03-21 15:43:58 +0100 | [diff] [blame] | 252 | frame->num_references = |
| 253 | frame->frame_type() == VideoFrameType::kVideoFrameDelta; |
philipel | dabfcae | 2018-09-25 12:54:37 +0200 | [diff] [blame] | 254 | frame->references[0] = rtp_seq_num_unwrapper_.Unwrap(last_picture_id_gop); |
philipel | 0fa82a6 | 2018-03-19 15:34:53 +0100 | [diff] [blame] | 255 | if (AheadOf<uint16_t>(frame->id.picture_id, last_picture_id_gop)) { |
| 256 | seq_num_it->second.first = frame->id.picture_id; |
| 257 | seq_num_it->second.second = frame->id.picture_id; |
philipel | 9b2ce6b | 2016-07-05 05:04:46 -0700 | [diff] [blame] | 258 | } |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 259 | |
philipel | 0fa82a6 | 2018-03-19 15:34:53 +0100 | [diff] [blame] | 260 | last_picture_id_ = frame->id.picture_id; |
| 261 | UpdateLastPictureIdWithPadding(frame->id.picture_id); |
philipel | dabfcae | 2018-09-25 12:54:37 +0200 | [diff] [blame] | 262 | frame->id.picture_id = rtp_seq_num_unwrapper_.Unwrap(frame->id.picture_id); |
philipel | afcf7f5 | 2017-04-26 08:17:35 -0700 | [diff] [blame] | 263 | return kHandOff; |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 264 | } |
| 265 | |
philipel | afcf7f5 | 2017-04-26 08:17:35 -0700 | [diff] [blame] | 266 | RtpFrameReferenceFinder::FrameDecision RtpFrameReferenceFinder::ManageFrameVp8( |
| 267 | RtpFrameObject* frame) { |
philipel | 5470f40 | 2018-09-07 13:38:53 +0200 | [diff] [blame] | 268 | absl::optional<RTPVideoHeader> video_header = frame->GetRtpVideoHeader(); |
Sami Kalliomäki | 9882495 | 2018-08-28 14:39:21 +0200 | [diff] [blame] | 269 | if (!video_header) { |
Mirko Bonadei | 675513b | 2017-11-09 11:09:25 +0100 | [diff] [blame] | 270 | RTC_LOG(LS_WARNING) |
| 271 | << "Failed to get codec header from frame, dropping frame."; |
philipel | afcf7f5 | 2017-04-26 08:17:35 -0700 | [diff] [blame] | 272 | return kDrop; |
philipel | d4fac69 | 2017-09-04 07:03:46 -0700 | [diff] [blame] | 273 | } |
Sami Kalliomäki | 9882495 | 2018-08-28 14:39:21 +0200 | [diff] [blame] | 274 | RTPVideoTypeHeader rtp_codec_header = video_header->video_type_header; |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 275 | |
philipel | 1a4746a | 2018-07-09 15:52:29 +0200 | [diff] [blame] | 276 | const RTPVideoHeaderVP8& codec_header = |
Sami Kalliomäki | 9882495 | 2018-08-28 14:39:21 +0200 | [diff] [blame] | 277 | absl::get<RTPVideoHeaderVP8>(rtp_codec_header); |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 278 | |
| 279 | if (codec_header.pictureId == kNoPictureId || |
| 280 | codec_header.temporalIdx == kNoTemporalIdx || |
| 281 | codec_header.tl0PicIdx == kNoTl0PicIdx) { |
Mirko Bonadei | 05cf6be | 2019-01-31 21:38:12 +0100 | [diff] [blame] | 282 | return ManageFramePidOrSeqNum(frame, codec_header.pictureId); |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 283 | } |
| 284 | |
philipel | 0fa82a6 | 2018-03-19 15:34:53 +0100 | [diff] [blame] | 285 | frame->id.picture_id = codec_header.pictureId % kPicIdLength; |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 286 | |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 287 | if (last_picture_id_ == -1) |
philipel | 0fa82a6 | 2018-03-19 15:34:53 +0100 | [diff] [blame] | 288 | last_picture_id_ = frame->id.picture_id; |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 289 | |
| 290 | // Find if there has been a gap in fully received frames and save the picture |
| 291 | // id of those frames in |not_yet_received_frames_|. |
philipel | 0fa82a6 | 2018-03-19 15:34:53 +0100 | [diff] [blame] | 292 | if (AheadOf<uint16_t, kPicIdLength>(frame->id.picture_id, last_picture_id_)) { |
philipel | 9bd1d66 | 2017-07-14 04:52:01 -0700 | [diff] [blame] | 293 | do { |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 294 | last_picture_id_ = Add<kPicIdLength>(last_picture_id_, 1); |
philipel | 9bd1d66 | 2017-07-14 04:52:01 -0700 | [diff] [blame] | 295 | not_yet_received_frames_.insert(last_picture_id_); |
philipel | 0fa82a6 | 2018-03-19 15:34:53 +0100 | [diff] [blame] | 296 | } while (last_picture_id_ != frame->id.picture_id); |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 297 | } |
| 298 | |
philipel | 57ec685 | 2018-07-03 18:09:32 +0200 | [diff] [blame] | 299 | int64_t unwrapped_tl0 = tl0_unwrapper_.Unwrap(codec_header.tl0PicIdx); |
| 300 | |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 301 | // Clean up info for base layers that are too old. |
philipel | 57ec685 | 2018-07-03 18:09:32 +0200 | [diff] [blame] | 302 | int64_t old_tl0_pic_idx = unwrapped_tl0 - kMaxLayerInfo; |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 303 | auto clean_layer_info_to = layer_info_.lower_bound(old_tl0_pic_idx); |
| 304 | layer_info_.erase(layer_info_.begin(), clean_layer_info_to); |
| 305 | |
| 306 | // Clean up info about not yet received frames that are too old. |
| 307 | uint16_t old_picture_id = |
philipel | 0fa82a6 | 2018-03-19 15:34:53 +0100 | [diff] [blame] | 308 | Subtract<kPicIdLength>(frame->id.picture_id, kMaxNotYetReceivedFrames); |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 309 | auto clean_frames_to = not_yet_received_frames_.lower_bound(old_picture_id); |
| 310 | not_yet_received_frames_.erase(not_yet_received_frames_.begin(), |
| 311 | clean_frames_to); |
| 312 | |
Niels Möller | 8f7ce22 | 2019-03-21 15:43:58 +0100 | [diff] [blame] | 313 | if (frame->frame_type() == VideoFrameType::kVideoFrameKey) { |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 314 | frame->num_references = 0; |
philipel | 57ec685 | 2018-07-03 18:09:32 +0200 | [diff] [blame] | 315 | layer_info_[unwrapped_tl0].fill(-1); |
| 316 | UpdateLayerInfoVp8(frame, unwrapped_tl0, codec_header.temporalIdx); |
philipel | afcf7f5 | 2017-04-26 08:17:35 -0700 | [diff] [blame] | 317 | return kHandOff; |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 318 | } |
| 319 | |
philipel | 57ec685 | 2018-07-03 18:09:32 +0200 | [diff] [blame] | 320 | auto layer_info_it = layer_info_.find( |
| 321 | codec_header.temporalIdx == 0 ? unwrapped_tl0 - 1 : unwrapped_tl0); |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 322 | |
| 323 | // If we don't have the base layer frame yet, stash this frame. |
philipel | afcf7f5 | 2017-04-26 08:17:35 -0700 | [diff] [blame] | 324 | if (layer_info_it == layer_info_.end()) |
| 325 | return kStash; |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 326 | |
| 327 | // A non keyframe base layer frame has been received, copy the layer info |
| 328 | // from the previous base layer frame and set a reference to the previous |
| 329 | // base layer frame. |
| 330 | if (codec_header.temporalIdx == 0) { |
| 331 | layer_info_it = |
philipel | 57ec685 | 2018-07-03 18:09:32 +0200 | [diff] [blame] | 332 | layer_info_.emplace(unwrapped_tl0, layer_info_it->second).first; |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 333 | frame->num_references = 1; |
| 334 | frame->references[0] = layer_info_it->second[0]; |
philipel | 57ec685 | 2018-07-03 18:09:32 +0200 | [diff] [blame] | 335 | UpdateLayerInfoVp8(frame, unwrapped_tl0, codec_header.temporalIdx); |
philipel | afcf7f5 | 2017-04-26 08:17:35 -0700 | [diff] [blame] | 336 | return kHandOff; |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 337 | } |
| 338 | |
| 339 | // Layer sync frame, this frame only references its base layer frame. |
| 340 | if (codec_header.layerSync) { |
| 341 | frame->num_references = 1; |
| 342 | frame->references[0] = layer_info_it->second[0]; |
| 343 | |
philipel | 57ec685 | 2018-07-03 18:09:32 +0200 | [diff] [blame] | 344 | UpdateLayerInfoVp8(frame, unwrapped_tl0, codec_header.temporalIdx); |
philipel | afcf7f5 | 2017-04-26 08:17:35 -0700 | [diff] [blame] | 345 | return kHandOff; |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 346 | } |
| 347 | |
| 348 | // Find all references for this frame. |
| 349 | frame->num_references = 0; |
| 350 | for (uint8_t layer = 0; layer <= codec_header.temporalIdx; ++layer) { |
philipel | d268d6f | 2016-09-15 13:43:13 +0200 | [diff] [blame] | 351 | // If we have not yet received a previous frame on this temporal layer, |
| 352 | // stash this frame. |
philipel | afcf7f5 | 2017-04-26 08:17:35 -0700 | [diff] [blame] | 353 | if (layer_info_it->second[layer] == -1) |
| 354 | return kStash; |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 355 | |
philipel | 86b92e0 | 2016-10-24 07:11:53 -0700 | [diff] [blame] | 356 | // If the last frame on this layer is ahead of this frame it means that |
| 357 | // a layer sync frame has been received after this frame for the same |
| 358 | // base layer frame, drop this frame. |
| 359 | if (AheadOf<uint16_t, kPicIdLength>(layer_info_it->second[layer], |
philipel | 0fa82a6 | 2018-03-19 15:34:53 +0100 | [diff] [blame] | 360 | frame->id.picture_id)) { |
philipel | afcf7f5 | 2017-04-26 08:17:35 -0700 | [diff] [blame] | 361 | return kDrop; |
philipel | 86b92e0 | 2016-10-24 07:11:53 -0700 | [diff] [blame] | 362 | } |
| 363 | |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 364 | // If we have not yet received a frame between this frame and the referenced |
| 365 | // frame then we have to wait for that frame to be completed first. |
| 366 | auto not_received_frame_it = |
| 367 | not_yet_received_frames_.upper_bound(layer_info_it->second[layer]); |
| 368 | if (not_received_frame_it != not_yet_received_frames_.end() && |
philipel | 0fa82a6 | 2018-03-19 15:34:53 +0100 | [diff] [blame] | 369 | AheadOf<uint16_t, kPicIdLength>(frame->id.picture_id, |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 370 | *not_received_frame_it)) { |
philipel | afcf7f5 | 2017-04-26 08:17:35 -0700 | [diff] [blame] | 371 | return kStash; |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 372 | } |
| 373 | |
philipel | 0fa82a6 | 2018-03-19 15:34:53 +0100 | [diff] [blame] | 374 | if (!(AheadOf<uint16_t, kPicIdLength>(frame->id.picture_id, |
philipel | 57f19cc | 2017-03-07 03:54:05 -0800 | [diff] [blame] | 375 | layer_info_it->second[layer]))) { |
philipel | 0fa82a6 | 2018-03-19 15:34:53 +0100 | [diff] [blame] | 376 | RTC_LOG(LS_WARNING) << "Frame with picture id " << frame->id.picture_id |
Mirko Bonadei | 675513b | 2017-11-09 11:09:25 +0100 | [diff] [blame] | 377 | << " and packet range [" << frame->first_seq_num() |
| 378 | << ", " << frame->last_seq_num() |
| 379 | << "] already received, " |
| 380 | << " dropping frame."; |
philipel | afcf7f5 | 2017-04-26 08:17:35 -0700 | [diff] [blame] | 381 | return kDrop; |
philipel | 57f19cc | 2017-03-07 03:54:05 -0800 | [diff] [blame] | 382 | } |
| 383 | |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 384 | ++frame->num_references; |
| 385 | frame->references[layer] = layer_info_it->second[layer]; |
| 386 | } |
| 387 | |
philipel | 57ec685 | 2018-07-03 18:09:32 +0200 | [diff] [blame] | 388 | UpdateLayerInfoVp8(frame, unwrapped_tl0, codec_header.temporalIdx); |
philipel | afcf7f5 | 2017-04-26 08:17:35 -0700 | [diff] [blame] | 389 | return kHandOff; |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 390 | } |
| 391 | |
philipel | 57ec685 | 2018-07-03 18:09:32 +0200 | [diff] [blame] | 392 | void RtpFrameReferenceFinder::UpdateLayerInfoVp8(RtpFrameObject* frame, |
| 393 | int64_t unwrapped_tl0, |
| 394 | uint8_t temporal_idx) { |
| 395 | auto layer_info_it = layer_info_.find(unwrapped_tl0); |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 396 | |
| 397 | // Update this layer info and newer. |
| 398 | while (layer_info_it != layer_info_.end()) { |
philipel | 57ec685 | 2018-07-03 18:09:32 +0200 | [diff] [blame] | 399 | if (layer_info_it->second[temporal_idx] != -1 && |
| 400 | AheadOf<uint16_t, kPicIdLength>(layer_info_it->second[temporal_idx], |
philipel | 0fa82a6 | 2018-03-19 15:34:53 +0100 | [diff] [blame] | 401 | frame->id.picture_id)) { |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 402 | // The frame was not newer, then no subsequent layer info have to be |
| 403 | // update. |
| 404 | break; |
| 405 | } |
| 406 | |
philipel | 57ec685 | 2018-07-03 18:09:32 +0200 | [diff] [blame] | 407 | layer_info_it->second[temporal_idx] = frame->id.picture_id; |
| 408 | ++unwrapped_tl0; |
| 409 | layer_info_it = layer_info_.find(unwrapped_tl0); |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 410 | } |
philipel | 0fa82a6 | 2018-03-19 15:34:53 +0100 | [diff] [blame] | 411 | not_yet_received_frames_.erase(frame->id.picture_id); |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 412 | |
philipel | afcf7f5 | 2017-04-26 08:17:35 -0700 | [diff] [blame] | 413 | UnwrapPictureIds(frame); |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 414 | } |
| 415 | |
philipel | afcf7f5 | 2017-04-26 08:17:35 -0700 | [diff] [blame] | 416 | RtpFrameReferenceFinder::FrameDecision RtpFrameReferenceFinder::ManageFrameVp9( |
| 417 | RtpFrameObject* frame) { |
philipel | 5470f40 | 2018-09-07 13:38:53 +0200 | [diff] [blame] | 418 | absl::optional<RTPVideoHeader> video_header = frame->GetRtpVideoHeader(); |
Sami Kalliomäki | 9882495 | 2018-08-28 14:39:21 +0200 | [diff] [blame] | 419 | if (!video_header) { |
Mirko Bonadei | 675513b | 2017-11-09 11:09:25 +0100 | [diff] [blame] | 420 | RTC_LOG(LS_WARNING) |
| 421 | << "Failed to get codec header from frame, dropping frame."; |
philipel | 4c14009 | 2017-08-31 08:31:45 -0700 | [diff] [blame] | 422 | return kDrop; |
philipel | d4fac69 | 2017-09-04 07:03:46 -0700 | [diff] [blame] | 423 | } |
Sami Kalliomäki | 9882495 | 2018-08-28 14:39:21 +0200 | [diff] [blame] | 424 | RTPVideoTypeHeader rtp_codec_header = video_header->video_type_header; |
philipel | 4c14009 | 2017-08-31 08:31:45 -0700 | [diff] [blame] | 425 | |
philipel | 1a4746a | 2018-07-09 15:52:29 +0200 | [diff] [blame] | 426 | const RTPVideoHeaderVP9& codec_header = |
Sami Kalliomäki | 9882495 | 2018-08-28 14:39:21 +0200 | [diff] [blame] | 427 | absl::get<RTPVideoHeaderVP9>(rtp_codec_header); |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 428 | |
philipel | 647998c | 2016-06-03 09:40:16 -0700 | [diff] [blame] | 429 | if (codec_header.picture_id == kNoPictureId || |
Sergey Silkin | d34a188 | 2018-08-20 16:46:05 +0200 | [diff] [blame] | 430 | codec_header.temporal_idx == kNoTemporalIdx) { |
Mirko Bonadei | 05cf6be | 2019-01-31 21:38:12 +0100 | [diff] [blame] | 431 | return ManageFramePidOrSeqNum(frame, codec_header.picture_id); |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 432 | } |
| 433 | |
philipel | 0fa82a6 | 2018-03-19 15:34:53 +0100 | [diff] [blame] | 434 | frame->id.spatial_layer = codec_header.spatial_idx; |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 435 | frame->inter_layer_predicted = codec_header.inter_layer_predicted; |
philipel | 0fa82a6 | 2018-03-19 15:34:53 +0100 | [diff] [blame] | 436 | frame->id.picture_id = codec_header.picture_id % kPicIdLength; |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 437 | |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 438 | if (last_picture_id_ == -1) |
philipel | 0fa82a6 | 2018-03-19 15:34:53 +0100 | [diff] [blame] | 439 | last_picture_id_ = frame->id.picture_id; |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 440 | |
| 441 | if (codec_header.flexible_mode) { |
| 442 | frame->num_references = codec_header.num_ref_pics; |
| 443 | for (size_t i = 0; i < frame->num_references; ++i) { |
philipel | 0fa82a6 | 2018-03-19 15:34:53 +0100 | [diff] [blame] | 444 | frame->references[i] = Subtract<kPicIdLength>(frame->id.picture_id, |
| 445 | codec_header.pid_diff[i]); |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 446 | } |
| 447 | |
philipel | afcf7f5 | 2017-04-26 08:17:35 -0700 | [diff] [blame] | 448 | UnwrapPictureIds(frame); |
| 449 | return kHandOff; |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 450 | } |
| 451 | |
Sergey Silkin | d34a188 | 2018-08-20 16:46:05 +0200 | [diff] [blame] | 452 | if (codec_header.tl0_pic_idx == kNoTl0PicIdx) { |
| 453 | RTC_LOG(LS_WARNING) << "TL0PICIDX is expected to be present in " |
| 454 | "non-flexible mode."; |
| 455 | return kDrop; |
| 456 | } |
| 457 | |
philipel | 1564360 | 2018-05-03 16:14:13 +0200 | [diff] [blame] | 458 | GofInfo* info; |
philipel | 57ec685 | 2018-07-03 18:09:32 +0200 | [diff] [blame] | 459 | int64_t unwrapped_tl0 = tl0_unwrapper_.Unwrap(codec_header.tl0_pic_idx); |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 460 | if (codec_header.ss_data_available) { |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 461 | if (codec_header.temporal_idx != 0) { |
philipel | 1564360 | 2018-05-03 16:14:13 +0200 | [diff] [blame] | 462 | RTC_LOG(LS_WARNING) << "Received scalability structure on a non base " |
| 463 | "layer frame. Scalability structure ignored."; |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 464 | } else { |
Sergey Silkin | 2f864fb | 2018-09-07 11:49:38 +0200 | [diff] [blame] | 465 | if (codec_header.gof.num_frames_in_gof > kMaxVp9FramesInGof) { |
Natalie Silvanovich | 3ea3e30 | 2018-05-16 11:03:12 -0700 | [diff] [blame] | 466 | return kDrop; |
| 467 | } |
| 468 | |
Sergey Silkin | 2f864fb | 2018-09-07 11:49:38 +0200 | [diff] [blame] | 469 | GofInfoVP9 gof = codec_header.gof; |
| 470 | if (gof.num_frames_in_gof == 0) { |
| 471 | RTC_LOG(LS_WARNING) << "Number of frames in GOF is zero. Assume " |
| 472 | "that stream has only one temporal layer."; |
| 473 | gof.SetGofInfoVP9(kTemporalStructureMode1); |
| 474 | } |
| 475 | |
| 476 | current_ss_idx_ = Add<kMaxGofSaved>(current_ss_idx_, 1); |
| 477 | scalability_structures_[current_ss_idx_] = gof; |
philipel | 0fa82a6 | 2018-03-19 15:34:53 +0100 | [diff] [blame] | 478 | scalability_structures_[current_ss_idx_].pid_start = frame->id.picture_id; |
philipel | 57ec685 | 2018-07-03 18:09:32 +0200 | [diff] [blame] | 479 | gof_info_.emplace(unwrapped_tl0, |
philipel | 1564360 | 2018-05-03 16:14:13 +0200 | [diff] [blame] | 480 | GofInfo(&scalability_structures_[current_ss_idx_], |
| 481 | frame->id.picture_id)); |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 482 | } |
philipel | 1564360 | 2018-05-03 16:14:13 +0200 | [diff] [blame] | 483 | |
philipel | 57ec685 | 2018-07-03 18:09:32 +0200 | [diff] [blame] | 484 | const auto gof_info_it = gof_info_.find(unwrapped_tl0); |
philipel | 1564360 | 2018-05-03 16:14:13 +0200 | [diff] [blame] | 485 | if (gof_info_it == gof_info_.end()) |
| 486 | return kStash; |
| 487 | |
| 488 | info = &gof_info_it->second; |
| 489 | |
Niels Möller | 8f7ce22 | 2019-03-21 15:43:58 +0100 | [diff] [blame] | 490 | if (frame->frame_type() == VideoFrameType::kVideoFrameKey) { |
philipel | 1564360 | 2018-05-03 16:14:13 +0200 | [diff] [blame] | 491 | frame->num_references = 0; |
| 492 | FrameReceivedVp9(frame->id.picture_id, info); |
| 493 | UnwrapPictureIds(frame); |
| 494 | return kHandOff; |
| 495 | } |
Niels Möller | 8f7ce22 | 2019-03-21 15:43:58 +0100 | [diff] [blame] | 496 | } else if (frame->frame_type() == VideoFrameType::kVideoFrameKey) { |
Ilya Nikolaevskiy | 5546aef | 2018-12-04 15:54:52 +0100 | [diff] [blame] | 497 | if (frame->id.spatial_layer == 0) { |
philipel | 1564360 | 2018-05-03 16:14:13 +0200 | [diff] [blame] | 498 | RTC_LOG(LS_WARNING) << "Received keyframe without scalability structure"; |
| 499 | return kDrop; |
| 500 | } |
Ilya Nikolaevskiy | 5546aef | 2018-12-04 15:54:52 +0100 | [diff] [blame] | 501 | const auto gof_info_it = gof_info_.find(unwrapped_tl0); |
| 502 | if (gof_info_it == gof_info_.end()) |
| 503 | return kStash; |
philipel | 1564360 | 2018-05-03 16:14:13 +0200 | [diff] [blame] | 504 | |
Ilya Nikolaevskiy | 5546aef | 2018-12-04 15:54:52 +0100 | [diff] [blame] | 505 | info = &gof_info_it->second; |
| 506 | |
Niels Möller | 8f7ce22 | 2019-03-21 15:43:58 +0100 | [diff] [blame] | 507 | if (frame->frame_type() == VideoFrameType::kVideoFrameKey) { |
Ilya Nikolaevskiy | 5546aef | 2018-12-04 15:54:52 +0100 | [diff] [blame] | 508 | frame->num_references = 0; |
| 509 | FrameReceivedVp9(frame->id.picture_id, info); |
| 510 | UnwrapPictureIds(frame); |
| 511 | return kHandOff; |
| 512 | } |
| 513 | } else { |
philipel | 57ec685 | 2018-07-03 18:09:32 +0200 | [diff] [blame] | 514 | auto gof_info_it = gof_info_.find( |
| 515 | (codec_header.temporal_idx == 0) ? unwrapped_tl0 - 1 : unwrapped_tl0); |
philipel | 1564360 | 2018-05-03 16:14:13 +0200 | [diff] [blame] | 516 | |
| 517 | // Gof info for this frame is not available yet, stash this frame. |
| 518 | if (gof_info_it == gof_info_.end()) |
| 519 | return kStash; |
| 520 | |
| 521 | if (codec_header.temporal_idx == 0) { |
philipel | 57ec685 | 2018-07-03 18:09:32 +0200 | [diff] [blame] | 522 | gof_info_it = gof_info_ |
| 523 | .emplace(unwrapped_tl0, GofInfo(gof_info_it->second.gof, |
| 524 | frame->id.picture_id)) |
| 525 | .first; |
philipel | 1564360 | 2018-05-03 16:14:13 +0200 | [diff] [blame] | 526 | } |
| 527 | |
| 528 | info = &gof_info_it->second; |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 529 | } |
| 530 | |
| 531 | // Clean up info for base layers that are too old. |
philipel | 57ec685 | 2018-07-03 18:09:32 +0200 | [diff] [blame] | 532 | int64_t old_tl0_pic_idx = unwrapped_tl0 - kMaxGofSaved; |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 533 | auto clean_gof_info_to = gof_info_.lower_bound(old_tl0_pic_idx); |
| 534 | gof_info_.erase(gof_info_.begin(), clean_gof_info_to); |
| 535 | |
philipel | 0fa82a6 | 2018-03-19 15:34:53 +0100 | [diff] [blame] | 536 | FrameReceivedVp9(frame->id.picture_id, info); |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 537 | |
| 538 | // Make sure we don't miss any frame that could potentially have the |
| 539 | // up switch flag set. |
philipel | 0fa82a6 | 2018-03-19 15:34:53 +0100 | [diff] [blame] | 540 | if (MissingRequiredFrameVp9(frame->id.picture_id, *info)) |
philipel | afcf7f5 | 2017-04-26 08:17:35 -0700 | [diff] [blame] | 541 | return kStash; |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 542 | |
philipel | 1564360 | 2018-05-03 16:14:13 +0200 | [diff] [blame] | 543 | if (codec_header.temporal_up_switch) |
| 544 | up_switch_.emplace(frame->id.picture_id, codec_header.temporal_idx); |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 545 | |
| 546 | // Clean out old info about up switch frames. |
philipel | 0fa82a6 | 2018-03-19 15:34:53 +0100 | [diff] [blame] | 547 | uint16_t old_picture_id = Subtract<kPicIdLength>(frame->id.picture_id, 50); |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 548 | auto up_switch_erase_to = up_switch_.lower_bound(old_picture_id); |
| 549 | up_switch_.erase(up_switch_.begin(), up_switch_erase_to); |
| 550 | |
philipel | c9b27d5 | 2016-07-15 06:50:27 -0700 | [diff] [blame] | 551 | size_t diff = ForwardDiff<uint16_t, kPicIdLength>(info->gof->pid_start, |
philipel | 0fa82a6 | 2018-03-19 15:34:53 +0100 | [diff] [blame] | 552 | frame->id.picture_id); |
philipel | c9b27d5 | 2016-07-15 06:50:27 -0700 | [diff] [blame] | 553 | size_t gof_idx = diff % info->gof->num_frames_in_gof; |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 554 | |
| 555 | // Populate references according to the scalability structure. |
philipel | c9b27d5 | 2016-07-15 06:50:27 -0700 | [diff] [blame] | 556 | frame->num_references = info->gof->num_ref_pics[gof_idx]; |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 557 | for (size_t i = 0; i < frame->num_references; ++i) { |
philipel | c9b27d5 | 2016-07-15 06:50:27 -0700 | [diff] [blame] | 558 | frame->references[i] = Subtract<kPicIdLength>( |
philipel | 0fa82a6 | 2018-03-19 15:34:53 +0100 | [diff] [blame] | 559 | frame->id.picture_id, info->gof->pid_diff[gof_idx][i]); |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 560 | |
| 561 | // If this is a reference to a frame earlier than the last up switch point, |
| 562 | // then ignore this reference. |
philipel | 0fa82a6 | 2018-03-19 15:34:53 +0100 | [diff] [blame] | 563 | if (UpSwitchInIntervalVp9(frame->id.picture_id, codec_header.temporal_idx, |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 564 | frame->references[i])) { |
| 565 | --frame->num_references; |
| 566 | } |
| 567 | } |
| 568 | |
Ilya Nikolaevskiy | 2ec0c65 | 2019-01-18 11:56:48 +0100 | [diff] [blame] | 569 | // Override GOF references. |
| 570 | if (!codec_header.inter_pic_predicted) { |
| 571 | frame->num_references = 0; |
| 572 | } |
| 573 | |
philipel | afcf7f5 | 2017-04-26 08:17:35 -0700 | [diff] [blame] | 574 | UnwrapPictureIds(frame); |
| 575 | return kHandOff; |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 576 | } |
| 577 | |
| 578 | bool RtpFrameReferenceFinder::MissingRequiredFrameVp9(uint16_t picture_id, |
philipel | c9b27d5 | 2016-07-15 06:50:27 -0700 | [diff] [blame] | 579 | const GofInfo& info) { |
| 580 | size_t diff = |
| 581 | ForwardDiff<uint16_t, kPicIdLength>(info.gof->pid_start, picture_id); |
| 582 | size_t gof_idx = diff % info.gof->num_frames_in_gof; |
| 583 | size_t temporal_idx = info.gof->temporal_idx[gof_idx]; |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 584 | |
philipel | a157e08 | 2018-05-02 15:19:01 +0200 | [diff] [blame] | 585 | if (temporal_idx >= kMaxTemporalLayers) { |
| 586 | RTC_LOG(LS_WARNING) << "At most " << kMaxTemporalLayers << " temporal " |
| 587 | << "layers are supported."; |
| 588 | return true; |
| 589 | } |
| 590 | |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 591 | // For every reference this frame has, check if there is a frame missing in |
| 592 | // the interval (|ref_pid|, |picture_id|) in any of the lower temporal |
| 593 | // layers. If so, we are missing a required frame. |
philipel | c9b27d5 | 2016-07-15 06:50:27 -0700 | [diff] [blame] | 594 | uint8_t num_references = info.gof->num_ref_pics[gof_idx]; |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 595 | for (size_t i = 0; i < num_references; ++i) { |
| 596 | uint16_t ref_pid = |
philipel | c9b27d5 | 2016-07-15 06:50:27 -0700 | [diff] [blame] | 597 | Subtract<kPicIdLength>(picture_id, info.gof->pid_diff[gof_idx][i]); |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 598 | for (size_t l = 0; l < temporal_idx; ++l) { |
| 599 | auto missing_frame_it = missing_frames_for_layer_[l].lower_bound(ref_pid); |
| 600 | if (missing_frame_it != missing_frames_for_layer_[l].end() && |
| 601 | AheadOf<uint16_t, kPicIdLength>(picture_id, *missing_frame_it)) { |
| 602 | return true; |
| 603 | } |
| 604 | } |
| 605 | } |
| 606 | return false; |
| 607 | } |
| 608 | |
| 609 | void RtpFrameReferenceFinder::FrameReceivedVp9(uint16_t picture_id, |
philipel | c9b27d5 | 2016-07-15 06:50:27 -0700 | [diff] [blame] | 610 | GofInfo* info) { |
| 611 | int last_picture_id = info->last_picture_id; |
philipel | 459f4e3 | 2018-03-02 10:55:12 +0100 | [diff] [blame] | 612 | size_t gof_size = std::min(info->gof->num_frames_in_gof, kMaxVp9FramesInGof); |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 613 | |
| 614 | // If there is a gap, find which temporal layer the missing frames |
| 615 | // belong to and add the frame as missing for that temporal layer. |
| 616 | // Otherwise, remove this frame from the set of missing frames. |
philipel | c9b27d5 | 2016-07-15 06:50:27 -0700 | [diff] [blame] | 617 | if (AheadOf<uint16_t, kPicIdLength>(picture_id, last_picture_id)) { |
| 618 | size_t diff = ForwardDiff<uint16_t, kPicIdLength>(info->gof->pid_start, |
| 619 | last_picture_id); |
philipel | 459f4e3 | 2018-03-02 10:55:12 +0100 | [diff] [blame] | 620 | size_t gof_idx = diff % gof_size; |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 621 | |
philipel | c9b27d5 | 2016-07-15 06:50:27 -0700 | [diff] [blame] | 622 | last_picture_id = Add<kPicIdLength>(last_picture_id, 1); |
| 623 | while (last_picture_id != picture_id) { |
Yves Gerey | 665174f | 2018-06-19 15:03:05 +0200 | [diff] [blame] | 624 | gof_idx = (gof_idx + 1) % gof_size; |
philipel | 459f4e3 | 2018-03-02 10:55:12 +0100 | [diff] [blame] | 625 | RTC_CHECK(gof_idx < kMaxVp9FramesInGof); |
| 626 | |
philipel | c9b27d5 | 2016-07-15 06:50:27 -0700 | [diff] [blame] | 627 | size_t temporal_idx = info->gof->temporal_idx[gof_idx]; |
philipel | 459f4e3 | 2018-03-02 10:55:12 +0100 | [diff] [blame] | 628 | if (temporal_idx >= kMaxTemporalLayers) { |
| 629 | RTC_LOG(LS_WARNING) << "At most " << kMaxTemporalLayers << " temporal " |
| 630 | << "layers are supported."; |
| 631 | return; |
| 632 | } |
| 633 | |
philipel | c9b27d5 | 2016-07-15 06:50:27 -0700 | [diff] [blame] | 634 | missing_frames_for_layer_[temporal_idx].insert(last_picture_id); |
| 635 | last_picture_id = Add<kPicIdLength>(last_picture_id, 1); |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 636 | } |
philipel | 459f4e3 | 2018-03-02 10:55:12 +0100 | [diff] [blame] | 637 | |
philipel | c9b27d5 | 2016-07-15 06:50:27 -0700 | [diff] [blame] | 638 | info->last_picture_id = last_picture_id; |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 639 | } else { |
| 640 | size_t diff = |
philipel | c9b27d5 | 2016-07-15 06:50:27 -0700 | [diff] [blame] | 641 | ForwardDiff<uint16_t, kPicIdLength>(info->gof->pid_start, picture_id); |
philipel | 459f4e3 | 2018-03-02 10:55:12 +0100 | [diff] [blame] | 642 | size_t gof_idx = diff % gof_size; |
| 643 | RTC_CHECK(gof_idx < kMaxVp9FramesInGof); |
| 644 | |
philipel | c9b27d5 | 2016-07-15 06:50:27 -0700 | [diff] [blame] | 645 | size_t temporal_idx = info->gof->temporal_idx[gof_idx]; |
philipel | 459f4e3 | 2018-03-02 10:55:12 +0100 | [diff] [blame] | 646 | if (temporal_idx >= kMaxTemporalLayers) { |
| 647 | RTC_LOG(LS_WARNING) << "At most " << kMaxTemporalLayers << " temporal " |
| 648 | << "layers are supported."; |
| 649 | return; |
| 650 | } |
| 651 | |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 652 | missing_frames_for_layer_[temporal_idx].erase(picture_id); |
| 653 | } |
| 654 | } |
| 655 | |
| 656 | bool RtpFrameReferenceFinder::UpSwitchInIntervalVp9(uint16_t picture_id, |
| 657 | uint8_t temporal_idx, |
| 658 | uint16_t pid_ref) { |
| 659 | for (auto up_switch_it = up_switch_.upper_bound(pid_ref); |
| 660 | up_switch_it != up_switch_.end() && |
| 661 | AheadOf<uint16_t, kPicIdLength>(picture_id, up_switch_it->first); |
| 662 | ++up_switch_it) { |
| 663 | if (up_switch_it->second < temporal_idx) |
| 664 | return true; |
| 665 | } |
| 666 | |
| 667 | return false; |
| 668 | } |
| 669 | |
philipel | afcf7f5 | 2017-04-26 08:17:35 -0700 | [diff] [blame] | 670 | void RtpFrameReferenceFinder::UnwrapPictureIds(RtpFrameObject* frame) { |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 671 | for (size_t i = 0; i < frame->num_references; ++i) |
philipel | d4fac69 | 2017-09-04 07:03:46 -0700 | [diff] [blame] | 672 | frame->references[i] = unwrapper_.Unwrap(frame->references[i]); |
philipel | 0fa82a6 | 2018-03-19 15:34:53 +0100 | [diff] [blame] | 673 | frame->id.picture_id = unwrapper_.Unwrap(frame->id.picture_id); |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 674 | } |
| 675 | |
Johnny Lee | bc7f41b | 2019-05-01 14:41:32 -0400 | [diff] [blame^] | 676 | RtpFrameReferenceFinder::FrameDecision RtpFrameReferenceFinder::ManageFrameH264( |
| 677 | RtpFrameObject* frame) { |
| 678 | absl::optional<FrameMarking> rtp_frame_marking = frame->GetFrameMarking(); |
| 679 | if (!rtp_frame_marking) { |
| 680 | return ManageFramePidOrSeqNum(std::move(frame), kNoPictureId); |
| 681 | } |
| 682 | |
| 683 | uint8_t tid = rtp_frame_marking->temporal_id; |
| 684 | bool blSync = rtp_frame_marking->base_layer_sync; |
| 685 | |
| 686 | if (tid == kNoTemporalIdx) |
| 687 | return ManageFramePidOrSeqNum(std::move(frame), kNoPictureId); |
| 688 | |
| 689 | frame->id.picture_id = frame->last_seq_num(); |
| 690 | |
| 691 | if (frame->frame_type() == VideoFrameType::kVideoFrameKey) { |
| 692 | // For H264, use last_seq_num_gop_ to simply store last picture id |
| 693 | // as a pair of unpadded and padded sequence numbers. |
| 694 | if (last_seq_num_gop_.empty()) { |
| 695 | last_seq_num_gop_.insert(std::make_pair( |
| 696 | 0, std::make_pair(frame->id.picture_id, frame->id.picture_id))); |
| 697 | } |
| 698 | } |
| 699 | |
| 700 | // Stash if we have no keyframe yet. |
| 701 | if (last_seq_num_gop_.empty()) |
| 702 | return kStash; |
| 703 | |
| 704 | // Check for gap in sequence numbers. Store in |not_yet_received_seq_num_|. |
| 705 | if (frame->frame_type() == VideoFrameType::kVideoFrameDelta) { |
| 706 | uint16_t last_pic_id_padded = last_seq_num_gop_.begin()->second.second; |
| 707 | if (AheadOf<uint16_t>(frame->id.picture_id, last_pic_id_padded)) { |
| 708 | do { |
| 709 | last_pic_id_padded = last_pic_id_padded + 1; |
| 710 | not_yet_received_seq_num_.insert(last_pic_id_padded); |
| 711 | } while (last_pic_id_padded != frame->id.picture_id); |
| 712 | } |
| 713 | } |
| 714 | |
| 715 | int64_t unwrapped_tl0 = tl0_unwrapper_.Unwrap(rtp_frame_marking->tl0_pic_idx); |
| 716 | |
| 717 | // Clean up info for base layers that are too old. |
| 718 | int64_t old_tl0_pic_idx = unwrapped_tl0 - kMaxLayerInfo; |
| 719 | auto clean_layer_info_to = layer_info_.lower_bound(old_tl0_pic_idx); |
| 720 | layer_info_.erase(layer_info_.begin(), clean_layer_info_to); |
| 721 | |
| 722 | // Clean up info about not yet received frames that are too old. |
| 723 | uint16_t old_picture_id = frame->id.picture_id - kMaxNotYetReceivedFrames * 2; |
| 724 | auto clean_frames_to = not_yet_received_seq_num_.lower_bound(old_picture_id); |
| 725 | not_yet_received_seq_num_.erase(not_yet_received_seq_num_.begin(), |
| 726 | clean_frames_to); |
| 727 | |
| 728 | if (frame->frame_type() == VideoFrameType::kVideoFrameKey) { |
| 729 | frame->num_references = 0; |
| 730 | layer_info_[unwrapped_tl0].fill(-1); |
| 731 | UpdateDataH264(frame, unwrapped_tl0, tid); |
| 732 | return kHandOff; |
| 733 | } |
| 734 | |
| 735 | auto layer_info_it = layer_info_.find( |
| 736 | tid == 0 ? unwrapped_tl0 - 1 : unwrapped_tl0); |
| 737 | |
| 738 | // Stash if we have no base layer frame yet. |
| 739 | if (layer_info_it == layer_info_.end()) |
| 740 | return kStash; |
| 741 | |
| 742 | // Base layer frame. Copy layer info from previous base layer frame. |
| 743 | if (tid == 0) { |
| 744 | layer_info_it = layer_info_.insert( |
| 745 | std::make_pair(unwrapped_tl0, layer_info_it->second)).first; |
| 746 | frame->num_references = 1; |
| 747 | frame->references[0] = layer_info_it->second[0]; |
| 748 | UpdateDataH264(frame, unwrapped_tl0, tid); |
| 749 | return kHandOff; |
| 750 | } |
| 751 | |
| 752 | // This frame only references its base layer frame. |
| 753 | if (blSync) { |
| 754 | frame->num_references = 1; |
| 755 | frame->references[0] = layer_info_it->second[0]; |
| 756 | UpdateDataH264(frame, unwrapped_tl0, tid); |
| 757 | return kHandOff; |
| 758 | } |
| 759 | |
| 760 | // Find all references for general frame. |
| 761 | frame->num_references = 0; |
| 762 | for (uint8_t layer = 0; layer <= tid; ++layer) { |
| 763 | // Stash if we have not yet received frames on this temporal layer. |
| 764 | if (layer_info_it->second[layer] == -1) |
| 765 | return kStash; |
| 766 | |
| 767 | // Drop if the last frame on this layer is ahead of this frame. A layer sync |
| 768 | // frame was received after this frame for the same base layer frame. |
| 769 | uint16_t last_frame_in_layer = layer_info_it->second[layer]; |
| 770 | if (AheadOf<uint16_t>(last_frame_in_layer, frame->id.picture_id)) |
| 771 | return kDrop; |
| 772 | |
| 773 | // Stash and wait for missing frame between this frame and the reference |
| 774 | auto not_received_seq_num_it = |
| 775 | not_yet_received_seq_num_.upper_bound(last_frame_in_layer); |
| 776 | if (not_received_seq_num_it != not_yet_received_seq_num_.end() && |
| 777 | AheadOf<uint16_t>(frame->id.picture_id, *not_received_seq_num_it)) { |
| 778 | return kStash; |
| 779 | } |
| 780 | |
| 781 | if (!(AheadOf<uint16_t>(frame->id.picture_id, last_frame_in_layer))) { |
| 782 | RTC_LOG(LS_WARNING) << "Frame with picture id " << frame->id.picture_id |
| 783 | << " and packet range [" << frame->first_seq_num() |
| 784 | << ", " << frame->last_seq_num() |
| 785 | << "] already received, " |
| 786 | << " dropping frame."; |
| 787 | return kDrop; |
| 788 | } |
| 789 | |
| 790 | ++frame->num_references; |
| 791 | frame->references[layer] = last_frame_in_layer; |
| 792 | } |
| 793 | |
| 794 | UpdateDataH264(frame, unwrapped_tl0, tid); |
| 795 | return kHandOff; |
| 796 | } |
| 797 | |
| 798 | void RtpFrameReferenceFinder::UpdateLastPictureIdWithPaddingH264() { |
| 799 | auto seq_num_it = last_seq_num_gop_.begin(); |
| 800 | |
| 801 | // Check if next sequence number is in a stashed padding packet. |
| 802 | uint16_t next_padded_seq_num = seq_num_it->second.second + 1; |
| 803 | auto padding_seq_num_it = stashed_padding_.lower_bound(next_padded_seq_num); |
| 804 | |
| 805 | // Check for more consecutive padding packets to increment |
| 806 | // the "last-picture-id-with-padding" and remove the stashed packets. |
| 807 | while (padding_seq_num_it != stashed_padding_.end() && |
| 808 | *padding_seq_num_it == next_padded_seq_num) { |
| 809 | seq_num_it->second.second = next_padded_seq_num; |
| 810 | ++next_padded_seq_num; |
| 811 | padding_seq_num_it = stashed_padding_.erase(padding_seq_num_it); |
| 812 | } |
| 813 | } |
| 814 | |
| 815 | void RtpFrameReferenceFinder::UpdateLayerInfoH264(RtpFrameObject* frame, |
| 816 | int64_t unwrapped_tl0, |
| 817 | uint8_t temporal_idx) { |
| 818 | auto layer_info_it = layer_info_.find(unwrapped_tl0); |
| 819 | |
| 820 | // Update this layer info and newer. |
| 821 | while (layer_info_it != layer_info_.end()) { |
| 822 | if (layer_info_it->second[temporal_idx] != -1 && |
| 823 | AheadOf<uint16_t>(layer_info_it->second[temporal_idx], |
| 824 | frame->id.picture_id)) { |
| 825 | // Not a newer frame. No subsequent layer info needs update. |
| 826 | break; |
| 827 | } |
| 828 | |
| 829 | layer_info_it->second[temporal_idx] = frame->id.picture_id; |
| 830 | ++unwrapped_tl0; |
| 831 | layer_info_it = layer_info_.find(unwrapped_tl0); |
| 832 | } |
| 833 | |
| 834 | for (size_t i = 0; i < frame->num_references; ++i) |
| 835 | frame->references[i] = rtp_seq_num_unwrapper_.Unwrap(frame->references[i]); |
| 836 | frame->id.picture_id = rtp_seq_num_unwrapper_.Unwrap(frame->id.picture_id); |
| 837 | } |
| 838 | |
| 839 | void RtpFrameReferenceFinder::UpdateDataH264(RtpFrameObject* frame, |
| 840 | int64_t unwrapped_tl0, |
| 841 | uint8_t temporal_idx) { |
| 842 | // Update last_seq_num_gop_ entry for last picture id. |
| 843 | auto seq_num_it = last_seq_num_gop_.begin(); |
| 844 | uint16_t last_pic_id = seq_num_it->second.first; |
| 845 | if (AheadOf<uint16_t>(frame->id.picture_id, last_pic_id)) { |
| 846 | seq_num_it->second.first = frame->id.picture_id; |
| 847 | seq_num_it->second.second = frame->id.picture_id; |
| 848 | } |
| 849 | UpdateLastPictureIdWithPaddingH264(); |
| 850 | |
| 851 | UpdateLayerInfoH264(frame, unwrapped_tl0, temporal_idx); |
| 852 | |
| 853 | // Remove any current packets from |not_yet_received_seq_num_|. |
| 854 | uint16_t last_seq_num_padded = seq_num_it->second.second; |
| 855 | for (uint16_t n = frame->first_seq_num(); |
| 856 | AheadOrAt(last_seq_num_padded, n); ++n) { |
| 857 | not_yet_received_seq_num_.erase(n); |
| 858 | } |
| 859 | } |
| 860 | |
philipel | 02447bc | 2016-05-13 06:01:03 -0700 | [diff] [blame] | 861 | } // namespace video_coding |
| 862 | } // namespace webrtc |