blob: 23eadfc0dbdfb3900dbb0d951fcd127943353d16 [file] [log] [blame]
kjellander@webrtc.org35a17562011-10-06 06:44:54 +00001/*
pwestin@webrtc.orgce330352012-04-12 06:59:14 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
kjellander@webrtc.org35a17562011-10-06 06:44:54 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
kjellander@webrtc.org5b97b122011-12-08 07:42:18 +000010
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020011#include "modules/video_coding/codecs/test/videoprocessor.h"
kjellander@webrtc.org35a17562011-10-06 06:44:54 +000012
Yves Gerey3e707812018-11-28 16:47:49 +010013#include <string.h>
Jonas Olssona4d87372019-07-05 19:08:33 +020014
ssilkin612f8582017-09-28 09:23:17 -070015#include <algorithm>
Yves Gerey3e707812018-11-28 16:47:49 +010016#include <cstddef>
kjellander@webrtc.org35a17562011-10-06 06:44:54 +000017#include <limits>
Mirko Bonadei317a1f02019-09-17 17:06:18 +020018#include <memory>
Erik Språng08127a92016-11-16 16:41:30 +010019#include <utility>
kjellander@webrtc.org35a17562011-10-06 06:44:54 +000020
Mirko Bonadeid9708072019-01-25 20:26:48 +010021#include "api/scoped_refptr.h"
Jiawei Ouc2ebe212018-11-08 10:02:56 -080022#include "api/video/builtin_video_bitrate_allocator_factory.h"
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020023#include "api/video/i420_buffer.h"
Yves Gerey3e707812018-11-28 16:47:49 +010024#include "api/video/video_bitrate_allocator_factory.h"
25#include "api/video/video_frame_buffer.h"
26#include "api/video/video_rotation.h"
27#include "api/video_codecs/video_codec.h"
Elad Alon370f93a2019-06-11 14:57:57 +020028#include "api/video_codecs/video_encoder.h"
ssilkin612f8582017-09-28 09:23:17 -070029#include "common_video/h264/h264_common.h"
Rasmus Brandt5f7a8912018-02-28 17:17:15 +010030#include "common_video/libyuv/include/webrtc_libyuv.h"
Sergey Silkin3be2a552018-01-17 15:11:44 +010031#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
Yves Gerey3e707812018-11-28 16:47:49 +010032#include "modules/video_coding/codecs/interface/common_constants.h"
Rasmus Brandtd00c8952018-03-14 12:29:57 +010033#include "modules/video_coding/include/video_error_codes.h"
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020034#include "rtc_base/checks.h"
Danil Chapovalovad895282019-03-11 10:28:05 +000035#include "rtc_base/task_utils/to_queued_task.h"
Steve Anton10542f22019-01-11 09:11:00 -080036#include "rtc_base/time_utils.h"
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020037#include "test/gtest.h"
Sergey Silkin8d3758e2018-03-14 11:28:15 +010038#include "third_party/libyuv/include/libyuv/compare.h"
Sergey Silkin10d9d592018-02-01 13:25:17 +010039#include "third_party/libyuv/include/libyuv/scale.h"
kjellander@webrtc.org35a17562011-10-06 06:44:54 +000040
41namespace webrtc {
42namespace test {
43
Kári Tristan Helgason169005d2018-05-22 13:34:14 +020044namespace {
Åsa Persson91af24a2018-01-24 17:20:18 +010045const int kMsToRtpTimestamp = kVideoPayloadTypeFrequency / 1000;
Sami Kalliomäkic75a5e82018-07-09 13:27:42 +020046const int kMaxBufferedInputFrames = 20;
brandtr17b958c2017-03-07 01:41:43 -080047
Elad Alon370f93a2019-06-11 14:57:57 +020048const VideoEncoder::Capabilities kCapabilities(false);
49
Sergey Silkin3be2a552018-01-17 15:11:44 +010050size_t GetMaxNaluSizeBytes(const EncodedImage& encoded_frame,
Kári Tristan Helgason169005d2018-05-22 13:34:14 +020051 const VideoCodecTestFixture::Config& config) {
ssilkin612f8582017-09-28 09:23:17 -070052 if (config.codec_settings.codecType != kVideoCodecH264)
Sergey Silkin3be2a552018-01-17 15:11:44 +010053 return 0;
ssilkin612f8582017-09-28 09:23:17 -070054
55 std::vector<webrtc::H264::NaluIndex> nalu_indices =
Niels Möller77536a22019-01-15 08:50:01 +010056 webrtc::H264::FindNaluIndices(encoded_frame.data(), encoded_frame.size());
ssilkin612f8582017-09-28 09:23:17 -070057
58 RTC_CHECK(!nalu_indices.empty());
59
Sergey Silkin3be2a552018-01-17 15:11:44 +010060 size_t max_size = 0;
ssilkin612f8582017-09-28 09:23:17 -070061 for (const webrtc::H264::NaluIndex& index : nalu_indices)
Sergey Silkin3be2a552018-01-17 15:11:44 +010062 max_size = std::max(max_size, index.payload_size);
ssilkin612f8582017-09-28 09:23:17 -070063
Sergey Silkin3be2a552018-01-17 15:11:44 +010064 return max_size;
ssilkin612f8582017-09-28 09:23:17 -070065}
66
Niels Möllerd3b8c632018-08-27 15:33:42 +020067size_t GetTemporalLayerIndex(const CodecSpecificInfo& codec_specific) {
68 size_t temporal_idx = 0;
Rasmus Brandtd062a3c2018-03-08 16:45:54 +010069 if (codec_specific.codecType == kVideoCodecVP8) {
Niels Möllerd3b8c632018-08-27 15:33:42 +020070 temporal_idx = codec_specific.codecSpecific.VP8.temporalIdx;
Rasmus Brandtd062a3c2018-03-08 16:45:54 +010071 } else if (codec_specific.codecType == kVideoCodecVP9) {
Niels Möllerd3b8c632018-08-27 15:33:42 +020072 temporal_idx = codec_specific.codecSpecific.VP9.temporal_idx;
Rasmus Brandtd062a3c2018-03-08 16:45:54 +010073 }
Niels Möllerd3b8c632018-08-27 15:33:42 +020074 if (temporal_idx == kNoTemporalIdx) {
75 temporal_idx = 0;
Rasmus Brandtd062a3c2018-03-08 16:45:54 +010076 }
Niels Möllerd3b8c632018-08-27 15:33:42 +020077 return temporal_idx;
Rasmus Brandtd062a3c2018-03-08 16:45:54 +010078}
79
asaperssonae9ba042017-03-07 00:25:38 -080080int GetElapsedTimeMicroseconds(int64_t start_ns, int64_t stop_ns) {
81 int64_t diff_us = (stop_ns - start_ns) / rtc::kNumNanosecsPerMicrosec;
82 RTC_DCHECK_GE(diff_us, std::numeric_limits<int>::min());
83 RTC_DCHECK_LE(diff_us, std::numeric_limits<int>::max());
84 return static_cast<int>(diff_us);
85}
86
Sergey Silkin8d3758e2018-03-14 11:28:15 +010087void CalculateFrameQuality(const I420BufferInterface& ref_buffer,
88 const I420BufferInterface& dec_buffer,
Sergey Silkinb72cc6d2020-10-29 08:29:26 +010089 VideoCodecTestStats::FrameStatistics* frame_stat,
Sergey Silkin6e1402b2019-02-13 09:33:00 +010090 bool calc_ssim) {
Sergey Silkin8d3758e2018-03-14 11:28:15 +010091 if (ref_buffer.width() != dec_buffer.width() ||
92 ref_buffer.height() != dec_buffer.height()) {
93 RTC_CHECK_GE(ref_buffer.width(), dec_buffer.width());
94 RTC_CHECK_GE(ref_buffer.height(), dec_buffer.height());
95 // Downscale reference frame.
Rasmus Brandtd062a3c2018-03-08 16:45:54 +010096 rtc::scoped_refptr<I420Buffer> scaled_buffer =
Sergey Silkin8d3758e2018-03-14 11:28:15 +010097 I420Buffer::Create(dec_buffer.width(), dec_buffer.height());
Rasmus Brandtd062a3c2018-03-08 16:45:54 +010098 I420Scale(ref_buffer.DataY(), ref_buffer.StrideY(), ref_buffer.DataU(),
99 ref_buffer.StrideU(), ref_buffer.DataV(), ref_buffer.StrideV(),
100 ref_buffer.width(), ref_buffer.height(),
101 scaled_buffer->MutableDataY(), scaled_buffer->StrideY(),
102 scaled_buffer->MutableDataU(), scaled_buffer->StrideU(),
103 scaled_buffer->MutableDataV(), scaled_buffer->StrideV(),
104 scaled_buffer->width(), scaled_buffer->height(),
105 libyuv::kFilterBox);
Sergey Silkin8d3758e2018-03-14 11:28:15 +0100106
Sergey Silkin6e1402b2019-02-13 09:33:00 +0100107 CalculateFrameQuality(*scaled_buffer, dec_buffer, frame_stat, calc_ssim);
Sergey Silkin8d3758e2018-03-14 11:28:15 +0100108 } else {
109 const uint64_t sse_y = libyuv::ComputeSumSquareErrorPlane(
110 dec_buffer.DataY(), dec_buffer.StrideY(), ref_buffer.DataY(),
111 ref_buffer.StrideY(), dec_buffer.width(), dec_buffer.height());
112
113 const uint64_t sse_u = libyuv::ComputeSumSquareErrorPlane(
114 dec_buffer.DataU(), dec_buffer.StrideU(), ref_buffer.DataU(),
115 ref_buffer.StrideU(), dec_buffer.width() / 2, dec_buffer.height() / 2);
116
117 const uint64_t sse_v = libyuv::ComputeSumSquareErrorPlane(
118 dec_buffer.DataV(), dec_buffer.StrideV(), ref_buffer.DataV(),
119 ref_buffer.StrideV(), dec_buffer.width() / 2, dec_buffer.height() / 2);
120
121 const size_t num_y_samples = dec_buffer.width() * dec_buffer.height();
122 const size_t num_u_samples =
123 dec_buffer.width() / 2 * dec_buffer.height() / 2;
124
125 frame_stat->psnr_y = libyuv::SumSquareErrorToPsnr(sse_y, num_y_samples);
126 frame_stat->psnr_u = libyuv::SumSquareErrorToPsnr(sse_u, num_u_samples);
127 frame_stat->psnr_v = libyuv::SumSquareErrorToPsnr(sse_v, num_u_samples);
128 frame_stat->psnr = libyuv::SumSquareErrorToPsnr(
129 sse_y + sse_u + sse_v, num_y_samples + 2 * num_u_samples);
Sergey Silkin6e1402b2019-02-13 09:33:00 +0100130
131 if (calc_ssim) {
132 frame_stat->ssim = I420SSIM(ref_buffer, dec_buffer);
133 }
Rasmus Brandtd062a3c2018-03-08 16:45:54 +0100134 }
135}
136
brandtrb78bc752017-02-22 01:26:59 -0800137} // namespace
138
brandtrc4095522017-08-07 08:12:33 -0700139VideoProcessor::VideoProcessor(webrtc::VideoEncoder* encoder,
Sergey Silkin10d9d592018-02-01 13:25:17 +0100140 VideoDecoderList* decoders,
141 FrameReader* input_frame_reader,
Kári Tristan Helgason169005d2018-05-22 13:34:14 +0200142 const VideoCodecTestFixture::Config& config,
Rasmus Brandt7d72d0f2019-03-26 16:56:14 +0100143 VideoCodecTestStatsImpl* stats,
Rasmus Brandt001c7822019-03-22 13:41:48 +0100144 IvfFileWriterMap* encoded_frame_writers,
Sergey Silkin10d9d592018-02-01 13:25:17 +0100145 FrameWriterList* decoded_frame_writers)
Åsa Perssonf0c44672017-10-24 16:03:39 +0200146 : config_(config),
Sergey Silkin10d9d592018-02-01 13:25:17 +0100147 num_simulcast_or_spatial_layers_(
148 std::max(config_.NumberOfSimulcastStreams(),
149 config_.NumberOfSpatialLayers())),
Sergey Silkinb72cc6d2020-10-29 08:29:26 +0100150 analyze_frame_quality_(!config_.measure_cpu),
Rasmus Brandtd062a3c2018-03-08 16:45:54 +0100151 stats_(stats),
brandtr07734a52017-08-08 08:35:53 -0700152 encoder_(encoder),
Sergey Silkin10d9d592018-02-01 13:25:17 +0100153 decoders_(decoders),
Jiawei Ouc2ebe212018-11-08 10:02:56 -0800154 bitrate_allocator_(
155 CreateBuiltinVideoBitrateAllocatorFactory()
156 ->CreateVideoBitrateAllocator(config_.codec_settings)),
Rasmus Brandtd062a3c2018-03-08 16:45:54 +0100157 framerate_fps_(0),
brandtrbdd555c2017-08-21 01:34:04 -0700158 encode_callback_(this),
Sergey Silkin10d9d592018-02-01 13:25:17 +0100159 input_frame_reader_(input_frame_reader),
Rasmus Brandt0f1c0bd2018-03-12 10:01:16 +0100160 merged_encoded_frames_(num_simulcast_or_spatial_layers_),
Sergey Silkin10d9d592018-02-01 13:25:17 +0100161 encoded_frame_writers_(encoded_frame_writers),
162 decoded_frame_writers_(decoded_frame_writers),
Sergey Silkin3be2a552018-01-17 15:11:44 +0100163 last_inputed_frame_num_(0),
Rasmus Brandtd062a3c2018-03-08 16:45:54 +0100164 last_inputed_timestamp_(0),
Rasmus Brandt0f1c0bd2018-03-12 10:01:16 +0100165 first_encoded_frame_(num_simulcast_or_spatial_layers_, true),
166 last_encoded_frame_num_(num_simulcast_or_spatial_layers_),
167 first_decoded_frame_(num_simulcast_or_spatial_layers_, true),
Sergey Silkinc89eed92018-04-01 23:57:51 +0200168 last_decoded_frame_num_(num_simulcast_or_spatial_layers_),
Sergey Silkinb72cc6d2020-10-29 08:29:26 +0100169 last_decoded_frame_buffer_(num_simulcast_or_spatial_layers_),
170 post_encode_time_ns_(0),
171 is_finalized_(false) {
Rasmus Brandtd062a3c2018-03-08 16:45:54 +0100172 // Sanity checks.
Danil Chapovalovad895282019-03-11 10:28:05 +0000173 RTC_CHECK(TaskQueueBase::Current())
Rasmus Brandt4b381af2018-02-07 13:56:16 +0100174 << "VideoProcessor must be run on a task queue.";
Rasmus Brandt001c7822019-03-22 13:41:48 +0100175 RTC_CHECK(stats_);
176 RTC_CHECK(encoder_);
177 RTC_CHECK(decoders_);
178 RTC_CHECK_EQ(decoders_->size(), num_simulcast_or_spatial_layers_);
179 RTC_CHECK(input_frame_reader_);
180 RTC_CHECK(encoded_frame_writers_);
Sergey Silkin10d9d592018-02-01 13:25:17 +0100181 RTC_CHECK(!decoded_frame_writers ||
182 decoded_frame_writers->size() == num_simulcast_or_spatial_layers_);
brandtr17b958c2017-03-07 01:41:43 -0800183
Sergey Silkin10d9d592018-02-01 13:25:17 +0100184 // Setup required callbacks for the encoder and decoder and initialize them.
brandtrbdd555c2017-08-21 01:34:04 -0700185 RTC_CHECK_EQ(encoder_->RegisterEncodeCompleteCallback(&encode_callback_),
Åsa Perssonf0c44672017-10-24 16:03:39 +0200186 WEBRTC_VIDEO_CODEC_OK);
asapersson654d54c2017-02-10 00:16:07 -0800187
Rasmus Brandtd062a3c2018-03-08 16:45:54 +0100188 // Initialize codecs so that they are ready to receive frames.
Elad Alon370f93a2019-06-11 14:57:57 +0200189 RTC_CHECK_EQ(encoder_->InitEncode(
190 &config_.codec_settings,
191 VideoEncoder::Settings(
192 kCapabilities, static_cast<int>(config_.NumberOfCores()),
193 config_.max_payload_size_bytes)),
Sergey Silkin1723cf92018-01-22 15:49:55 +0100194 WEBRTC_VIDEO_CODEC_OK);
Sergey Silkin645e2e02018-04-06 09:42:13 +0200195
Sergey Silkinbc20fe12018-04-15 21:06:16 +0200196 for (size_t i = 0; i < num_simulcast_or_spatial_layers_; ++i) {
Sergey Silkin645e2e02018-04-06 09:42:13 +0200197 decode_callback_.push_back(
Mirko Bonadei317a1f02019-09-17 17:06:18 +0200198 std::make_unique<VideoProcessorDecodeCompleteCallback>(this, i));
Sergey Silkinbc20fe12018-04-15 21:06:16 +0200199 RTC_CHECK_EQ(
200 decoders_->at(i)->InitDecode(&config_.codec_settings,
201 static_cast<int>(config_.NumberOfCores())),
202 WEBRTC_VIDEO_CODEC_OK);
203 RTC_CHECK_EQ(decoders_->at(i)->RegisterDecodeCompleteCallback(
204 decode_callback_.at(i).get()),
Sergey Silkin10d9d592018-02-01 13:25:17 +0100205 WEBRTC_VIDEO_CODEC_OK);
206 }
kjellander@webrtc.org35a17562011-10-06 06:44:54 +0000207}
208
Åsa Perssonf0c44672017-10-24 16:03:39 +0200209VideoProcessor::~VideoProcessor() {
Sebastian Janssonb55015e2019-04-09 13:44:04 +0200210 RTC_DCHECK_RUN_ON(&sequence_checker_);
brandtrc8c59052017-08-21 06:44:16 -0700211
Sergey Silkinb72cc6d2020-10-29 08:29:26 +0100212 if (!is_finalized_) {
213 Finalize();
214 }
215
Rasmus Brandtd062a3c2018-03-08 16:45:54 +0100216 // Explicitly reset codecs, in case they don't do that themselves when they
217 // go out of scope.
brandtr77920a42017-08-11 07:48:15 -0700218 RTC_CHECK_EQ(encoder_->Release(), WEBRTC_VIDEO_CODEC_OK);
brandtrbdd555c2017-08-21 01:34:04 -0700219 encoder_->RegisterEncodeCompleteCallback(nullptr);
Sergey Silkin10d9d592018-02-01 13:25:17 +0100220 for (auto& decoder : *decoders_) {
221 RTC_CHECK_EQ(decoder->Release(), WEBRTC_VIDEO_CODEC_OK);
222 decoder->RegisterDecodeCompleteCallback(nullptr);
223 }
224
Rasmus Brandtd00c8952018-03-14 12:29:57 +0100225 // Sanity check.
226 RTC_CHECK_LE(input_frames_.size(), kMaxBufferedInputFrames);
brandtr77920a42017-08-11 07:48:15 -0700227}
228
brandtr8935d972017-09-06 01:53:22 -0700229void VideoProcessor::ProcessFrame() {
Sebastian Janssonb55015e2019-04-09 13:44:04 +0200230 RTC_DCHECK_RUN_ON(&sequence_checker_);
Sergey Silkinb72cc6d2020-10-29 08:29:26 +0100231 RTC_DCHECK(!is_finalized_);
232
Sergey Silkin3be2a552018-01-17 15:11:44 +0100233 const size_t frame_number = last_inputed_frame_num_++;
asapersson654d54c2017-02-10 00:16:07 -0800234
Rasmus Brandtd062a3c2018-03-08 16:45:54 +0100235 // Get input frame and store for future quality calculation.
236 rtc::scoped_refptr<I420BufferInterface> buffer =
237 input_frame_reader_->ReadFrame();
brandtrbdd555c2017-08-21 01:34:04 -0700238 RTC_CHECK(buffer) << "Tried to read too many frames from the file.";
Rasmus Brandtd062a3c2018-03-08 16:45:54 +0100239 const size_t timestamp =
Sergey Silkin44cec0b2019-07-11 14:20:38 +0200240 last_inputed_timestamp_ +
241 static_cast<size_t>(kVideoPayloadTypeFrequency / framerate_fps_);
Artem Titov1ebfb6a2019-01-03 23:49:37 +0100242 VideoFrame input_frame =
243 VideoFrame::Builder()
244 .set_video_frame_buffer(buffer)
245 .set_timestamp_rtp(static_cast<uint32_t>(timestamp))
246 .set_timestamp_ms(static_cast<int64_t>(timestamp / kMsToRtpTimestamp))
247 .set_rotation(webrtc::kVideoRotation_0)
248 .build();
Rasmus Brandtd00c8952018-03-14 12:29:57 +0100249 // Store input frame as a reference for quality calculations.
250 if (config_.decode && !config_.measure_cpu) {
Rasmus Brandt7b92ceb2018-06-26 12:34:59 +0200251 if (input_frames_.size() == kMaxBufferedInputFrames) {
252 input_frames_.erase(input_frames_.begin());
253 }
Erik Språngebe5acb2020-12-03 16:18:44 +0100254
255 if (config_.reference_width != -1 && config_.reference_height != -1 &&
256 (input_frame.width() != config_.reference_width ||
257 input_frame.height() != config_.reference_height)) {
258 rtc::scoped_refptr<I420Buffer> scaled_buffer = I420Buffer::Create(
259 config_.codec_settings.width, config_.codec_settings.height);
260 scaled_buffer->ScaleFrom(*input_frame.video_frame_buffer()->ToI420());
261
262 VideoFrame scaled_reference_frame = input_frame;
263 scaled_reference_frame.set_video_frame_buffer(scaled_buffer);
264 input_frames_.emplace(frame_number, scaled_reference_frame);
265
266 if (config_.reference_width == config_.codec_settings.width &&
267 config_.reference_height == config_.codec_settings.height) {
268 // Both encoding and comparison uses the same down-scale factor, reuse
269 // it for encoder below.
270 input_frame = scaled_reference_frame;
271 }
272 } else {
273 input_frames_.emplace(frame_number, input_frame);
274 }
Rasmus Brandtd00c8952018-03-14 12:29:57 +0100275 }
Rasmus Brandtd062a3c2018-03-08 16:45:54 +0100276 last_inputed_timestamp_ = timestamp;
brandtr17b958c2017-03-07 01:41:43 -0800277
Sergey Silkinc89eed92018-04-01 23:57:51 +0200278 post_encode_time_ns_ = 0;
279
Rasmus Brandt5f7a8912018-02-28 17:17:15 +0100280 // Create frame statistics object for all simulcast/spatial layers.
Sergey Silkinbc20fe12018-04-15 21:06:16 +0200281 for (size_t i = 0; i < num_simulcast_or_spatial_layers_; ++i) {
Sergey Silkin02fed022018-09-25 13:48:19 +0200282 FrameStatistics frame_stat(frame_number, timestamp, i);
283 stats_->AddFrame(frame_stat);
Sergey Silkin10d9d592018-02-01 13:25:17 +0100284 }
brandtr17b958c2017-03-07 01:41:43 -0800285
286 // For the highest measurement accuracy of the encode time, the start/stop
287 // time recordings should wrap the Encode call as tightly as possible.
Sergey Silkin10d9d592018-02-01 13:25:17 +0100288 const int64_t encode_start_ns = rtc::TimeNanos();
Sergey Silkinbc20fe12018-04-15 21:06:16 +0200289 for (size_t i = 0; i < num_simulcast_or_spatial_layers_; ++i) {
290 FrameStatistics* frame_stat = stats_->GetFrame(frame_number, i);
Sergey Silkin10d9d592018-02-01 13:25:17 +0100291 frame_stat->encode_start_ns = encode_start_ns;
292 }
293
Erik Språngebe5acb2020-12-03 16:18:44 +0100294 if (input_frame.width() != config_.codec_settings.width ||
295 input_frame.height() != config_.codec_settings.height) {
296 rtc::scoped_refptr<I420Buffer> scaled_buffer = I420Buffer::Create(
297 config_.codec_settings.width, config_.codec_settings.height);
298 scaled_buffer->ScaleFrom(*input_frame.video_frame_buffer()->ToI420());
299 input_frame.set_video_frame_buffer(scaled_buffer);
300 }
301
Rasmus Brandtd062a3c2018-03-08 16:45:54 +0100302 // Encode.
Niels Möller87e2d782019-03-07 10:18:23 +0100303 const std::vector<VideoFrameType> frame_types =
Niels Möller8f7ce222019-03-21 15:43:58 +0100304 (frame_number == 0)
305 ? std::vector<VideoFrameType>{VideoFrameType::kVideoFrameKey}
306 : std::vector<VideoFrameType>{VideoFrameType::kVideoFrameDelta};
Niels Möllerc8d2e732019-03-06 12:00:33 +0100307 const int encode_return_code = encoder_->Encode(input_frame, &frame_types);
Sergey Silkinbc20fe12018-04-15 21:06:16 +0200308 for (size_t i = 0; i < num_simulcast_or_spatial_layers_; ++i) {
309 FrameStatistics* frame_stat = stats_->GetFrame(frame_number, i);
Sergey Silkin10d9d592018-02-01 13:25:17 +0100310 frame_stat->encode_return_code = encode_return_code;
311 }
kjellander@webrtc.org35a17562011-10-06 06:44:54 +0000312}
313
Sergey Silkin44cec0b2019-07-11 14:20:38 +0200314void VideoProcessor::SetRates(size_t bitrate_kbps, double framerate_fps) {
Sebastian Janssonb55015e2019-04-09 13:44:04 +0200315 RTC_DCHECK_RUN_ON(&sequence_checker_);
Sergey Silkinb72cc6d2020-10-29 08:29:26 +0100316 RTC_DCHECK(!is_finalized_);
317
Sergey Silkin44cec0b2019-07-11 14:20:38 +0200318 framerate_fps_ = framerate_fps;
Florent Castelli8bbdb5b2019-08-02 15:16:28 +0200319 bitrate_allocation_ =
320 bitrate_allocator_->Allocate(VideoBitrateAllocationParameters(
321 static_cast<uint32_t>(bitrate_kbps * 1000), framerate_fps_));
Sergey Silkin44cec0b2019-07-11 14:20:38 +0200322 encoder_->SetRates(
323 VideoEncoder::RateControlParameters(bitrate_allocation_, framerate_fps_));
brandtrbea36fd2017-08-07 03:36:54 -0700324}
325
Sami Kalliomäki451b29c2018-07-04 14:33:51 +0200326int32_t VideoProcessor::VideoProcessorDecodeCompleteCallback::Decoded(
327 VideoFrame& image) {
328 // Post the callback to the right task queue, if needed.
329 if (!task_queue_->IsCurrent()) {
330 // There might be a limited amount of output buffers, make a copy to make
331 // sure we don't block the decoder.
Artem Titov1ebfb6a2019-01-03 23:49:37 +0100332 VideoFrame copy = VideoFrame::Builder()
333 .set_video_frame_buffer(I420Buffer::Copy(
334 *image.video_frame_buffer()->ToI420()))
335 .set_rotation(image.rotation())
336 .set_timestamp_us(image.timestamp_us())
337 .set_id(image.id())
338 .build();
Sami Kalliomäki451b29c2018-07-04 14:33:51 +0200339 copy.set_timestamp(image.timestamp());
340
Danil Chapovalovad895282019-03-11 10:28:05 +0000341 task_queue_->PostTask(ToQueuedTask([this, copy]() {
Sami Kalliomäki451b29c2018-07-04 14:33:51 +0200342 video_processor_->FrameDecoded(copy, simulcast_svc_idx_);
Danil Chapovalovad895282019-03-11 10:28:05 +0000343 }));
Sami Kalliomäki451b29c2018-07-04 14:33:51 +0200344 return 0;
345 }
346 video_processor_->FrameDecoded(image, simulcast_svc_idx_);
347 return 0;
348}
349
Sergey Silkin10d9d592018-02-01 13:25:17 +0100350void VideoProcessor::FrameEncoded(
351 const webrtc::EncodedImage& encoded_image,
352 const webrtc::CodecSpecificInfo& codec_specific) {
Sebastian Janssonb55015e2019-04-09 13:44:04 +0200353 RTC_DCHECK_RUN_ON(&sequence_checker_);
brandtrc8c59052017-08-21 06:44:16 -0700354
brandtr32e0d262017-02-15 05:29:38 -0800355 // For the highest measurement accuracy of the encode time, the start/stop
356 // time recordings should wrap the Encode call as tightly as possible.
Rasmus Brandt5f7a8912018-02-28 17:17:15 +0100357 const int64_t encode_stop_ns = rtc::TimeNanos();
brandtr32e0d262017-02-15 05:29:38 -0800358
Rasmus Brandtd062a3c2018-03-08 16:45:54 +0100359 const VideoCodecType codec_type = codec_specific.codecType;
Rasmus Brandtf7a35582017-10-24 10:16:33 +0200360 if (config_.encoded_frame_checker) {
Rasmus Brandtd062a3c2018-03-08 16:45:54 +0100361 config_.encoded_frame_checker->CheckEncodedFrame(codec_type, encoded_image);
Rasmus Brandtf7a35582017-10-24 10:16:33 +0200362 }
brandtrb78bc752017-02-22 01:26:59 -0800363
Rasmus Brandtd062a3c2018-03-08 16:45:54 +0100364 // Layer metadata.
Niels Möllerd3b8c632018-08-27 15:33:42 +0200365 size_t spatial_idx = encoded_image.SpatialIndex().value_or(0);
366 size_t temporal_idx = GetTemporalLayerIndex(codec_specific);
Sergey Silkin10d9d592018-02-01 13:25:17 +0100367
Sergey Silkinbc20fe12018-04-15 21:06:16 +0200368 FrameStatistics* frame_stat =
Niels Möller23775882018-08-16 10:24:12 +0200369 stats_->GetFrameWithTimestamp(encoded_image.Timestamp(), spatial_idx);
Åsa Perssona6e7b882018-01-19 14:57:10 +0100370 const size_t frame_number = frame_stat->frame_number;
Sergey Silkin10d9d592018-02-01 13:25:17 +0100371
Rasmus Brandt0f1c0bd2018-03-12 10:01:16 +0100372 // Ensure that the encode order is monotonically increasing, within this
373 // simulcast/spatial layer.
Sergey Silkinbc20fe12018-04-15 21:06:16 +0200374 RTC_CHECK(first_encoded_frame_[spatial_idx] ||
375 last_encoded_frame_num_[spatial_idx] < frame_number);
Sergey Silkin10d9d592018-02-01 13:25:17 +0100376
377 // Ensure SVC spatial layers are delivered in ascending order.
Rasmus Brandt001c7822019-03-22 13:41:48 +0100378 const size_t num_spatial_layers = config_.NumberOfSpatialLayers();
379 if (!first_encoded_frame_[spatial_idx] && num_spatial_layers > 1) {
Sergey Silkinbc20fe12018-04-15 21:06:16 +0200380 for (size_t i = 0; i < spatial_idx; ++i) {
Sergey Silkin122ba6c2018-03-27 14:32:21 +0200381 RTC_CHECK_LE(last_encoded_frame_num_[i], frame_number);
Rasmus Brandt0f1c0bd2018-03-12 10:01:16 +0100382 }
Sergey Silkinbc20fe12018-04-15 21:06:16 +0200383 for (size_t i = spatial_idx + 1; i < num_simulcast_or_spatial_layers_;
Rasmus Brandt0f1c0bd2018-03-12 10:01:16 +0100384 ++i) {
385 RTC_CHECK_GT(frame_number, last_encoded_frame_num_[i]);
386 }
Sergey Silkin3be2a552018-01-17 15:11:44 +0100387 }
Sergey Silkinbc20fe12018-04-15 21:06:16 +0200388 first_encoded_frame_[spatial_idx] = false;
389 last_encoded_frame_num_[spatial_idx] = frame_number;
brandtr17b958c2017-03-07 01:41:43 -0800390
brandtr8935d972017-09-06 01:53:22 -0700391 // Update frame statistics.
Sergey Silkin10d9d592018-02-01 13:25:17 +0100392 frame_stat->encoding_successful = true;
Sergey Silkinc89eed92018-04-01 23:57:51 +0200393 frame_stat->encode_time_us = GetElapsedTimeMicroseconds(
394 frame_stat->encode_start_ns, encode_stop_ns - post_encode_time_ns_);
Sergey Silkinbc20fe12018-04-15 21:06:16 +0200395 frame_stat->target_bitrate_kbps =
Sergey Silkinbfd54ef2018-04-13 23:41:11 +0200396 bitrate_allocation_.GetTemporalLayerSum(spatial_idx, temporal_idx) / 1000;
Sergey Silkin44cec0b2019-07-11 14:20:38 +0200397 frame_stat->target_framerate_fps = framerate_fps_;
Niels Möller77536a22019-01-15 08:50:01 +0100398 frame_stat->length_bytes = encoded_image.size();
brandtr17b958c2017-03-07 01:41:43 -0800399 frame_stat->frame_type = encoded_image._frameType;
Sergey Silkinbc20fe12018-04-15 21:06:16 +0200400 frame_stat->temporal_idx = temporal_idx;
Sergey Silkin3be2a552018-01-17 15:11:44 +0100401 frame_stat->max_nalu_size_bytes = GetMaxNaluSizeBytes(encoded_image, config_);
Sergey Silkin10d9d592018-02-01 13:25:17 +0100402 frame_stat->qp = encoded_image.qp_;
ssilkin612f8582017-09-28 09:23:17 -0700403
Sergey Silkin645e2e02018-04-06 09:42:13 +0200404 if (codec_type == kVideoCodecVP9) {
405 const CodecSpecificInfoVP9& vp9_info = codec_specific.codecSpecific.VP9;
406 frame_stat->inter_layer_predicted = vp9_info.inter_layer_predicted;
Sergey Silkin3c30c9c2018-05-02 09:18:48 +0200407 frame_stat->non_ref_for_inter_layer_pred =
408 vp9_info.non_ref_for_inter_layer_pred;
Sergey Silkin3c30c9c2018-05-02 09:18:48 +0200409 } else {
410 frame_stat->inter_layer_predicted = false;
411 frame_stat->non_ref_for_inter_layer_pred = true;
Sergey Silkin645e2e02018-04-06 09:42:13 +0200412 }
413
Sergey Silkin122ba6c2018-03-27 14:32:21 +0200414 const webrtc::EncodedImage* encoded_image_for_decode = &encoded_image;
Rasmus Brandt001c7822019-03-22 13:41:48 +0100415 if (config_.decode || !encoded_frame_writers_->empty()) {
Sergey Silkin645e2e02018-04-06 09:42:13 +0200416 if (num_spatial_layers > 1) {
417 encoded_image_for_decode = BuildAndStoreSuperframe(
Sergey Silkinbc20fe12018-04-15 21:06:16 +0200418 encoded_image, codec_type, frame_number, spatial_idx,
Sergey Silkin645e2e02018-04-06 09:42:13 +0200419 frame_stat->inter_layer_predicted);
Rasmus Brandtd00c8952018-03-14 12:29:57 +0100420 }
Sergey Silkin645e2e02018-04-06 09:42:13 +0200421 }
422
423 if (config_.decode) {
Sergey Silkinbc20fe12018-04-15 21:06:16 +0200424 DecodeFrame(*encoded_image_for_decode, spatial_idx);
Sergey Silkin645e2e02018-04-06 09:42:13 +0200425
Danil Chapovalov06bbeb32020-11-11 12:42:56 +0100426 if (codec_specific.end_of_picture && num_spatial_layers > 1) {
Sergey Silkin645e2e02018-04-06 09:42:13 +0200427 // If inter-layer prediction is enabled and upper layer was dropped then
428 // base layer should be passed to upper layer decoder. Otherwise decoder
429 // won't be able to decode next superframe.
430 const EncodedImage* base_image = nullptr;
Sergey Silkin3c30c9c2018-05-02 09:18:48 +0200431 const FrameStatistics* base_stat = nullptr;
Sergey Silkinbc20fe12018-04-15 21:06:16 +0200432 for (size_t i = 0; i < num_spatial_layers; ++i) {
Sergey Silkin3c30c9c2018-05-02 09:18:48 +0200433 const bool layer_dropped = (first_decoded_frame_[i] ||
434 last_decoded_frame_num_[i] < frame_number);
Sergey Silkin645e2e02018-04-06 09:42:13 +0200435
436 // Ensure current layer was decoded.
Sergey Silkinbc20fe12018-04-15 21:06:16 +0200437 RTC_CHECK(layer_dropped == false || i != spatial_idx);
Sergey Silkin645e2e02018-04-06 09:42:13 +0200438
439 if (!layer_dropped) {
Sergey Silkinbc20fe12018-04-15 21:06:16 +0200440 base_image = &merged_encoded_frames_[i];
Sergey Silkin3c30c9c2018-05-02 09:18:48 +0200441 base_stat =
Niels Möller23775882018-08-16 10:24:12 +0200442 stats_->GetFrameWithTimestamp(encoded_image.Timestamp(), i);
Sergey Silkin3c30c9c2018-05-02 09:18:48 +0200443 } else if (base_image && !base_stat->non_ref_for_inter_layer_pred) {
Sergey Silkinbc20fe12018-04-15 21:06:16 +0200444 DecodeFrame(*base_image, i);
Sergey Silkin645e2e02018-04-06 09:42:13 +0200445 }
446 }
447 }
Rasmus Brandtd00c8952018-03-14 12:29:57 +0100448 } else {
449 frame_stat->decode_return_code = WEBRTC_VIDEO_CODEC_NO_OUTPUT;
Rasmus Brandt0f1c0bd2018-03-12 10:01:16 +0100450 }
Rasmus Brandt0f1c0bd2018-03-12 10:01:16 +0100451
Rasmus Brandt001c7822019-03-22 13:41:48 +0100452 // Since frames in higher TLs typically depend on frames in lower TLs,
453 // write out frames in lower TLs to bitstream dumps of higher TLs.
454 for (size_t write_temporal_idx = temporal_idx;
455 write_temporal_idx < config_.NumberOfTemporalLayers();
456 ++write_temporal_idx) {
457 const VideoProcessor::LayerKey layer_key(spatial_idx, write_temporal_idx);
458 auto it = encoded_frame_writers_->find(layer_key);
459 if (it != encoded_frame_writers_->cend()) {
460 RTC_CHECK(it->second->WriteFrame(*encoded_image_for_decode,
461 config_.codec_settings.codecType));
462 }
brandtr8935d972017-09-06 01:53:22 -0700463 }
Sergey Silkinc89eed92018-04-01 23:57:51 +0200464
Kári Tristan Helgasonf1677622018-08-24 13:21:26 +0200465 if (!config_.encode_in_real_time) {
Sergey Silkinc89eed92018-04-01 23:57:51 +0200466 // To get pure encode time for next layers, measure time spent in encode
467 // callback and subtract it from encode time of next layers.
468 post_encode_time_ns_ += rtc::TimeNanos() - encode_stop_ns;
469 }
kjellander@webrtc.org35a17562011-10-06 06:44:54 +0000470}
471
Sergey Silkinb72cc6d2020-10-29 08:29:26 +0100472void VideoProcessor::CalcFrameQuality(const I420BufferInterface& decoded_frame,
473 FrameStatistics* frame_stat) {
474 RTC_DCHECK_RUN_ON(&sequence_checker_);
475
476 const auto reference_frame = input_frames_.find(frame_stat->frame_number);
477 RTC_CHECK(reference_frame != input_frames_.cend())
478 << "The codecs are either buffering too much, dropping too much, or "
479 "being too slow relative to the input frame rate.";
480
481 // SSIM calculation is not optimized. Skip it in real-time mode.
482 const bool calc_ssim = !config_.encode_in_real_time;
483 CalculateFrameQuality(*reference_frame->second.video_frame_buffer()->ToI420(),
484 decoded_frame, frame_stat, calc_ssim);
485
486 frame_stat->quality_analysis_successful = true;
487}
488
489void VideoProcessor::WriteDecodedFrame(const I420BufferInterface& decoded_frame,
490 FrameWriter& frame_writer) {
491 int input_video_width = config_.codec_settings.width;
492 int input_video_height = config_.codec_settings.height;
493
494 rtc::scoped_refptr<I420Buffer> scaled_buffer;
495 const I420BufferInterface* scaled_frame;
496
497 if (decoded_frame.width() == input_video_width &&
498 decoded_frame.height() == input_video_height) {
499 scaled_frame = &decoded_frame;
500 } else {
501 EXPECT_DOUBLE_EQ(
502 static_cast<double>(input_video_width) / input_video_height,
503 static_cast<double>(decoded_frame.width()) / decoded_frame.height());
504
505 scaled_buffer = I420Buffer::Create(input_video_width, input_video_height);
506 scaled_buffer->ScaleFrom(decoded_frame);
507
508 scaled_frame = scaled_buffer;
509 }
510
511 // Ensure there is no padding.
512 RTC_CHECK_EQ(scaled_frame->StrideY(), input_video_width);
513 RTC_CHECK_EQ(scaled_frame->StrideU(), input_video_width / 2);
514 RTC_CHECK_EQ(scaled_frame->StrideV(), input_video_width / 2);
515
516 RTC_CHECK_EQ(3 * input_video_width * input_video_height / 2,
517 frame_writer.FrameLength());
518
519 RTC_CHECK(frame_writer.WriteFrame(scaled_frame->DataY()));
520}
521
Sergey Silkin645e2e02018-04-06 09:42:13 +0200522void VideoProcessor::FrameDecoded(const VideoFrame& decoded_frame,
Sergey Silkinbc20fe12018-04-15 21:06:16 +0200523 size_t spatial_idx) {
Sebastian Janssonb55015e2019-04-09 13:44:04 +0200524 RTC_DCHECK_RUN_ON(&sequence_checker_);
brandtrc8c59052017-08-21 06:44:16 -0700525
brandtr32e0d262017-02-15 05:29:38 -0800526 // For the highest measurement accuracy of the decode time, the start/stop
527 // time recordings should wrap the Decode call as tightly as possible.
Rasmus Brandt5f7a8912018-02-28 17:17:15 +0100528 const int64_t decode_stop_ns = rtc::TimeNanos();
brandtr8bc93852017-02-15 05:19:51 -0800529
Sergey Silkinbc20fe12018-04-15 21:06:16 +0200530 FrameStatistics* frame_stat =
531 stats_->GetFrameWithTimestamp(decoded_frame.timestamp(), spatial_idx);
Åsa Perssona6e7b882018-01-19 14:57:10 +0100532 const size_t frame_number = frame_stat->frame_number;
Sergey Silkin64eaa992017-11-17 14:47:32 +0100533
Sergey Silkinb72cc6d2020-10-29 08:29:26 +0100534 if (!first_decoded_frame_[spatial_idx]) {
535 for (size_t dropped_frame_number = last_decoded_frame_num_[spatial_idx] + 1;
536 dropped_frame_number < frame_number; ++dropped_frame_number) {
537 FrameStatistics* dropped_frame_stat =
538 stats_->GetFrame(dropped_frame_number, spatial_idx);
539
540 if (analyze_frame_quality_ && config_.analyze_quality_of_dropped_frames) {
541 // Calculate frame quality comparing input frame with last decoded one.
542 CalcFrameQuality(*last_decoded_frame_buffer_[spatial_idx],
543 dropped_frame_stat);
544 }
545
546 if (decoded_frame_writers_ != nullptr) {
547 // Fill drops with last decoded frame to make them look like freeze at
548 // playback and to keep decoded layers in sync.
549 WriteDecodedFrame(*last_decoded_frame_buffer_[spatial_idx],
550 *decoded_frame_writers_->at(spatial_idx));
551 }
Sergey Silkin56138792018-05-02 10:50:55 +0200552 }
553 }
554
Rasmus Brandt0f1c0bd2018-03-12 10:01:16 +0100555 // Ensure that the decode order is monotonically increasing, within this
556 // simulcast/spatial layer.
Sergey Silkinbc20fe12018-04-15 21:06:16 +0200557 RTC_CHECK(first_decoded_frame_[spatial_idx] ||
558 last_decoded_frame_num_[spatial_idx] < frame_number);
559 first_decoded_frame_[spatial_idx] = false;
560 last_decoded_frame_num_[spatial_idx] = frame_number;
brandtr17b958c2017-03-07 01:41:43 -0800561
Sergey Silkin10d9d592018-02-01 13:25:17 +0100562 // Update frame statistics.
563 frame_stat->decoding_successful = true;
564 frame_stat->decode_time_us =
565 GetElapsedTimeMicroseconds(frame_stat->decode_start_ns, decode_stop_ns);
566 frame_stat->decoded_width = decoded_frame.width();
567 frame_stat->decoded_height = decoded_frame.height();
568
Sergey Silkin64eaa992017-11-17 14:47:32 +0100569 // Skip quality metrics calculation to not affect CPU usage.
Sergey Silkinb72cc6d2020-10-29 08:29:26 +0100570 if (analyze_frame_quality_ || decoded_frame_writers_) {
571 // Save last decoded frame to handle possible future drops.
572 rtc::scoped_refptr<I420BufferInterface> i420buffer =
573 decoded_frame.video_frame_buffer()->ToI420();
Sergey Silkin6e1402b2019-02-13 09:33:00 +0100574
Sergey Silkinb72cc6d2020-10-29 08:29:26 +0100575 // Copy decoded frame to a buffer without padding/stride such that we can
576 // dump Y, U and V planes into a file in one shot.
577 last_decoded_frame_buffer_[spatial_idx] = I420Buffer::Copy(
578 i420buffer->width(), i420buffer->height(), i420buffer->DataY(),
579 i420buffer->StrideY(), i420buffer->DataU(), i420buffer->StrideU(),
580 i420buffer->DataV(), i420buffer->StrideV());
Rasmus Brandtd00c8952018-03-14 12:29:57 +0100581 }
Sergey Silkin64eaa992017-11-17 14:47:32 +0100582
Sergey Silkinb72cc6d2020-10-29 08:29:26 +0100583 if (analyze_frame_quality_) {
584 CalcFrameQuality(*decoded_frame.video_frame_buffer()->ToI420(), frame_stat);
Sergey Silkin64eaa992017-11-17 14:47:32 +0100585 }
Sergey Silkinb72cc6d2020-10-29 08:29:26 +0100586
587 if (decoded_frame_writers_ != nullptr) {
588 WriteDecodedFrame(*last_decoded_frame_buffer_[spatial_idx],
589 *decoded_frame_writers_->at(spatial_idx));
590 }
591
592 // Erase all buffered input frames that we have moved past for all
593 // simulcast/spatial layers. Never buffer more than
594 // |kMaxBufferedInputFrames| frames, to protect against long runs of
595 // consecutive frame drops for a particular layer.
596 const auto min_last_decoded_frame_num = std::min_element(
597 last_decoded_frame_num_.cbegin(), last_decoded_frame_num_.cend());
598 const size_t min_buffered_frame_num =
599 std::max(0, static_cast<int>(frame_number) - kMaxBufferedInputFrames + 1);
600 RTC_CHECK(min_last_decoded_frame_num != last_decoded_frame_num_.cend());
601 const auto input_frames_erase_before = input_frames_.lower_bound(
602 std::max(*min_last_decoded_frame_num, min_buffered_frame_num));
603 input_frames_.erase(input_frames_.cbegin(), input_frames_erase_before);
Åsa Perssonf0c44672017-10-24 16:03:39 +0200604}
brandtr17b958c2017-03-07 01:41:43 -0800605
Sergey Silkin645e2e02018-04-06 09:42:13 +0200606void VideoProcessor::DecodeFrame(const EncodedImage& encoded_image,
Sergey Silkinbc20fe12018-04-15 21:06:16 +0200607 size_t spatial_idx) {
Sebastian Janssonb55015e2019-04-09 13:44:04 +0200608 RTC_DCHECK_RUN_ON(&sequence_checker_);
Sergey Silkinbc20fe12018-04-15 21:06:16 +0200609 FrameStatistics* frame_stat =
Niels Möller23775882018-08-16 10:24:12 +0200610 stats_->GetFrameWithTimestamp(encoded_image.Timestamp(), spatial_idx);
Sergey Silkin645e2e02018-04-06 09:42:13 +0200611
612 frame_stat->decode_start_ns = rtc::TimeNanos();
613 frame_stat->decode_return_code =
Niels Möller7aacdd92019-03-25 09:11:40 +0100614 decoders_->at(spatial_idx)->Decode(encoded_image, false, 0);
Sergey Silkin645e2e02018-04-06 09:42:13 +0200615}
616
617const webrtc::EncodedImage* VideoProcessor::BuildAndStoreSuperframe(
Rasmus Brandt0f1c0bd2018-03-12 10:01:16 +0100618 const EncodedImage& encoded_image,
619 const VideoCodecType codec,
620 size_t frame_number,
Sergey Silkinbc20fe12018-04-15 21:06:16 +0200621 size_t spatial_idx,
Sergey Silkin645e2e02018-04-06 09:42:13 +0200622 bool inter_layer_predicted) {
Rasmus Brandt0f1c0bd2018-03-12 10:01:16 +0100623 // Should only be called for SVC.
624 RTC_CHECK_GT(config_.NumberOfSpatialLayers(), 1);
Sergey Silkin10d9d592018-02-01 13:25:17 +0100625
626 EncodedImage base_image;
Niels Möller77536a22019-01-15 08:50:01 +0100627 RTC_CHECK_EQ(base_image.size(), 0);
Sergey Silkin10d9d592018-02-01 13:25:17 +0100628
Sergey Silkin122ba6c2018-03-27 14:32:21 +0200629 // Each SVC layer is decoded with dedicated decoder. Find the nearest
630 // non-dropped base frame and merge it and current frame into superframe.
Sergey Silkin645e2e02018-04-06 09:42:13 +0200631 if (inter_layer_predicted) {
Sergey Silkinbc20fe12018-04-15 21:06:16 +0200632 for (int base_idx = static_cast<int>(spatial_idx) - 1; base_idx >= 0;
Sergey Silkin122ba6c2018-03-27 14:32:21 +0200633 --base_idx) {
634 EncodedImage lower_layer = merged_encoded_frames_.at(base_idx);
Niels Möller23775882018-08-16 10:24:12 +0200635 if (lower_layer.Timestamp() == encoded_image.Timestamp()) {
Sergey Silkin122ba6c2018-03-27 14:32:21 +0200636 base_image = lower_layer;
637 break;
638 }
639 }
Sergey Silkin10d9d592018-02-01 13:25:17 +0100640 }
Niels Möller77536a22019-01-15 08:50:01 +0100641 const size_t payload_size_bytes = base_image.size() + encoded_image.size();
Sergey Silkin10d9d592018-02-01 13:25:17 +0100642
Niels Möller08ae7ce2020-09-23 15:58:12 +0200643 auto buffer = EncodedImageBuffer::Create(payload_size_bytes);
Niels Möller77536a22019-01-15 08:50:01 +0100644 if (base_image.size()) {
Niels Möller24871e42019-01-17 11:31:13 +0100645 RTC_CHECK(base_image.data());
Niels Möller08ae7ce2020-09-23 15:58:12 +0200646 memcpy(buffer->data(), base_image.data(), base_image.size());
Sergey Silkin10d9d592018-02-01 13:25:17 +0100647 }
Niels Möller08ae7ce2020-09-23 15:58:12 +0200648 memcpy(buffer->data() + base_image.size(), encoded_image.data(),
Niels Möller77536a22019-01-15 08:50:01 +0100649 encoded_image.size());
Sergey Silkin10d9d592018-02-01 13:25:17 +0100650
Niels Möller08ae7ce2020-09-23 15:58:12 +0200651 EncodedImage copied_image = encoded_image;
652 copied_image.SetEncodedData(buffer);
Zhaoliang Ma074edf62021-05-11 16:04:46 +0800653 if (base_image.size())
654 copied_image._frameType = base_image._frameType;
Sergey Silkin10d9d592018-02-01 13:25:17 +0100655
Rasmus Brandt0f1c0bd2018-03-12 10:01:16 +0100656 // Replace previous EncodedImage for this spatial layer.
Niels Möller663844d2019-02-14 16:15:54 +0100657 merged_encoded_frames_.at(spatial_idx) = std::move(copied_image);
Rasmus Brandt0f1c0bd2018-03-12 10:01:16 +0100658
Sergey Silkinbc20fe12018-04-15 21:06:16 +0200659 return &merged_encoded_frames_.at(spatial_idx);
Sergey Silkin10d9d592018-02-01 13:25:17 +0100660}
661
Sergey Silkinb72cc6d2020-10-29 08:29:26 +0100662void VideoProcessor::Finalize() {
663 RTC_DCHECK_RUN_ON(&sequence_checker_);
664 RTC_DCHECK(!is_finalized_);
665 is_finalized_ = true;
666
667 if (!(analyze_frame_quality_ && config_.analyze_quality_of_dropped_frames) &&
668 decoded_frame_writers_ == nullptr) {
669 return;
670 }
671
672 for (size_t spatial_idx = 0; spatial_idx < num_simulcast_or_spatial_layers_;
673 ++spatial_idx) {
674 if (first_decoded_frame_[spatial_idx]) {
675 continue; // No decoded frames on this spatial layer.
676 }
677
678 for (size_t dropped_frame_number = last_decoded_frame_num_[spatial_idx] + 1;
679 dropped_frame_number < last_inputed_frame_num_;
680 ++dropped_frame_number) {
681 FrameStatistics* frame_stat =
682 stats_->GetFrame(dropped_frame_number, spatial_idx);
683
684 RTC_DCHECK(!frame_stat->decoding_successful);
685
686 if (analyze_frame_quality_ && config_.analyze_quality_of_dropped_frames) {
687 CalcFrameQuality(*last_decoded_frame_buffer_[spatial_idx], frame_stat);
688 }
689
690 if (decoded_frame_writers_ != nullptr) {
691 WriteDecodedFrame(*last_decoded_frame_buffer_[spatial_idx],
692 *decoded_frame_writers_->at(spatial_idx));
693 }
694 }
695 }
696}
697
kjellander@webrtc.org35a17562011-10-06 06:44:54 +0000698} // namespace test
699} // namespace webrtc