blob: 8d10594da9c4c18f24463ee8755ff9802936ad6f [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
niklase@google.com470e71d2011-07-07 08:21:25 +00009 */
pwestin@webrtc.org8d89b582012-09-20 20:49:12 +000010
mikhal@webrtc.orge07c6612013-01-31 16:37:13 +000011#include "webrtc/modules/video_coding/codecs/vp8/vp8_impl.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
13#include <stdlib.h>
14#include <string.h>
15#include <time.h>
pbos@webrtc.org9115cde2014-12-09 10:36:40 +000016#include <algorithm>
asaperssond0d08b12017-03-13 03:43:40 -070017#include <string>
niklase@google.com470e71d2011-07-07 08:21:25 +000018
pbos@webrtc.org9115cde2014-12-09 10:36:40 +000019// NOTE(ajm): Path provided by gyp.
philipelcce46fc2015-12-21 03:04:49 -080020#include "libyuv/scale.h" // NOLINT
magjed@webrtc.org73d763e2015-03-17 11:40:45 +000021#include "libyuv/convert.h" // NOLINT
niklase@google.com470e71d2011-07-07 08:21:25 +000022
pbos@webrtc.org9115cde2014-12-09 10:36:40 +000023#include "webrtc/common_types.h"
mikhal@webrtc.orge07c6612013-01-31 16:37:13 +000024#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010025#include "webrtc/modules/include/module_common_types.h"
pbos@webrtc.org9115cde2014-12-09 10:36:40 +000026#include "webrtc/modules/video_coding/codecs/vp8/include/vp8_common_types.h"
27#include "webrtc/modules/video_coding/codecs/vp8/screenshare_layers.h"
kjellandera8d8aad2017-03-08 05:42:26 -080028#include "webrtc/modules/video_coding/codecs/vp8/simulcast_rate_allocator.h"
andresp@webrtc.org5500d932013-09-06 11:26:15 +000029#include "webrtc/modules/video_coding/codecs/vp8/temporal_layers.h"
brandtr080830c2017-05-03 03:25:53 -070030#include "webrtc/modules/video_coding/include/video_codec_interface.h"
Edward Lemurc20978e2017-07-06 19:44:34 +020031#include "webrtc/rtc_base/checks.h"
asaperssone5d02f92017-08-09 23:37:05 -070032#include "webrtc/rtc_base/numerics/exp_filter.h"
Edward Lemurc20978e2017-07-06 19:44:34 +020033#include "webrtc/rtc_base/random.h"
34#include "webrtc/rtc_base/timeutils.h"
35#include "webrtc/rtc_base/trace_event.h"
sprangb0fdfea2016-03-01 05:51:16 -080036#include "webrtc/system_wrappers/include/clock.h"
asapersson55eb6d62017-03-01 23:52:16 -080037#include "webrtc/system_wrappers/include/field_trial.h"
Per00983572016-11-04 08:57:26 +010038#include "webrtc/system_wrappers/include/metrics.h"
pwestin@webrtc.org8d89b582012-09-20 20:49:12 +000039
pwestin@webrtc.org8d89b582012-09-20 20:49:12 +000040namespace webrtc {
pbos@webrtc.org9115cde2014-12-09 10:36:40 +000041namespace {
42
asaperssona505fb02017-09-06 05:34:11 -070043const char kVp8PostProcArmFieldTrial[] = "WebRTC-VP8-Postproc-Config-Arm";
asaperssond0d08b12017-03-13 03:43:40 -070044const char kVp8GfBoostFieldTrial[] = "WebRTC-VP8-GfBoost";
asapersson142fcc92017-08-17 08:58:54 -070045const char kVp8ForceFallbackEncoderFieldTrial[] =
46 "WebRTC-VP8-Forced-Fallback-Encoder";
asapersson55eb6d62017-03-01 23:52:16 -080047
Peter Boström1182afe2017-03-21 12:35:51 -040048const int kTokenPartitions = VP8_ONE_TOKENPARTITION;
pbos@webrtc.org9115cde2014-12-09 10:36:40 +000049enum { kVp8ErrorPropagationTh = 30 };
50enum { kVp832ByteAlign = 32 };
51
52// VP8 denoiser states.
53enum denoiserState {
54 kDenoiserOff,
55 kDenoiserOnYOnly,
56 kDenoiserOnYUV,
57 kDenoiserOnYUVAggressive,
58 // Adaptive mode defaults to kDenoiserOnYUV on key frame, but may switch
59 // to kDenoiserOnYUVAggressive based on a computed noise metric.
60 kDenoiserOnAdaptive
61};
62
63// Greatest common divisior
64int GCD(int a, int b) {
65 int c = a % b;
66 while (c != 0) {
67 a = b;
68 b = c;
69 c = a % b;
70 }
71 return b;
72}
73
pbos@webrtc.org9115cde2014-12-09 10:36:40 +000074uint32_t SumStreamMaxBitrate(int streams, const VideoCodec& codec) {
75 uint32_t bitrate_sum = 0;
76 for (int i = 0; i < streams; ++i) {
77 bitrate_sum += codec.simulcastStream[i].maxBitrate;
78 }
79 return bitrate_sum;
80}
81
82int NumberOfStreams(const VideoCodec& codec) {
83 int streams =
84 codec.numberOfSimulcastStreams < 1 ? 1 : codec.numberOfSimulcastStreams;
85 uint32_t simulcast_max_bitrate = SumStreamMaxBitrate(streams, codec);
86 if (simulcast_max_bitrate == 0) {
87 streams = 1;
88 }
89 return streams;
90}
91
92bool ValidSimulcastResolutions(const VideoCodec& codec, int num_streams) {
93 if (codec.width != codec.simulcastStream[num_streams - 1].width ||
94 codec.height != codec.simulcastStream[num_streams - 1].height) {
95 return false;
96 }
97 for (int i = 0; i < num_streams; ++i) {
98 if (codec.width * codec.simulcastStream[i].height !=
99 codec.height * codec.simulcastStream[i].width) {
100 return false;
101 }
102 }
103 return true;
104}
asaperssonda535c42015-10-19 23:32:41 -0700105
philipelcce46fc2015-12-21 03:04:49 -0800106int NumStreamsDisabled(const std::vector<bool>& streams) {
asaperssonda535c42015-10-19 23:32:41 -0700107 int num_disabled = 0;
108 for (bool stream : streams) {
109 if (!stream)
110 ++num_disabled;
111 }
112 return num_disabled;
113}
asaperssond0d08b12017-03-13 03:43:40 -0700114
asapersson142fcc92017-08-17 08:58:54 -0700115rtc::Optional<int> GetForcedFallbackMinPixelsFromFieldTrialGroup() {
116 if (!webrtc::field_trial::IsEnabled(kVp8ForceFallbackEncoderFieldTrial))
117 return rtc::Optional<int>();
118
119 std::string group =
120 webrtc::field_trial::FindFullName(kVp8ForceFallbackEncoderFieldTrial);
121 if (group.empty())
122 return rtc::Optional<int>();
123
124 int low_kbps;
125 int high_kbps;
126 int min_low_ms;
127 int min_pixels;
128 if (sscanf(group.c_str(), "Enabled-%d,%d,%d,%d", &low_kbps, &high_kbps,
129 &min_low_ms, &min_pixels) != 4) {
130 return rtc::Optional<int>();
131 }
132
133 if (min_low_ms <= 0 || min_pixels <= 0 || low_kbps <= 0 ||
134 high_kbps <= low_kbps) {
135 return rtc::Optional<int>();
136 }
137 return rtc::Optional<int>(min_pixels);
138}
139
asaperssond0d08b12017-03-13 03:43:40 -0700140bool GetGfBoostPercentageFromFieldTrialGroup(int* boost_percentage) {
141 std::string group = webrtc::field_trial::FindFullName(kVp8GfBoostFieldTrial);
142 if (group.empty())
143 return false;
144
145 if (sscanf(group.c_str(), "Enabled-%d", boost_percentage) != 1)
146 return false;
147
148 if (*boost_percentage < 0 || *boost_percentage > 100)
149 return false;
150
151 return true;
152}
asaperssone5d02f92017-08-09 23:37:05 -0700153
154void GetPostProcParamsFromFieldTrialGroup(
155 VP8DecoderImpl::DeblockParams* deblock_params) {
156 std::string group =
157 webrtc::field_trial::FindFullName(kVp8PostProcArmFieldTrial);
158 if (group.empty())
159 return;
160
161 VP8DecoderImpl::DeblockParams params;
162 if (sscanf(group.c_str(), "Enabled-%d,%d,%d", &params.max_level,
163 &params.min_qp, &params.degrade_qp) != 3)
164 return;
165
166 if (params.max_level < 0 || params.max_level > 16)
167 return;
168
169 if (params.min_qp < 0 || params.degrade_qp <= params.min_qp)
170 return;
171
172 *deblock_params = params;
173}
174
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000175} // namespace
176
Peter Boström85b22e22016-01-25 17:58:00 +0100177VP8Encoder* VP8Encoder::Create() {
178 return new VP8EncoderImpl();
179}
180
181VP8Decoder* VP8Decoder::Create() {
182 return new VP8DecoderImpl();
183}
184
pbosfa18e252017-05-02 02:51:12 -0700185vpx_enc_frame_flags_t VP8EncoderImpl::EncodeFlags(
pbos18ad1d42017-05-04 05:04:46 -0700186 const TemporalLayers::FrameConfig& references) {
pbosfa18e252017-05-02 02:51:12 -0700187 RTC_DCHECK(!references.drop_frame);
188
189 vpx_enc_frame_flags_t flags = 0;
190
pbos18ad1d42017-05-04 05:04:46 -0700191 if ((references.last_buffer_flags & TemporalLayers::kReference) == 0)
pbosfa18e252017-05-02 02:51:12 -0700192 flags |= VP8_EFLAG_NO_REF_LAST;
pbos18ad1d42017-05-04 05:04:46 -0700193 if ((references.last_buffer_flags & TemporalLayers::kUpdate) == 0)
pbosfa18e252017-05-02 02:51:12 -0700194 flags |= VP8_EFLAG_NO_UPD_LAST;
pbos18ad1d42017-05-04 05:04:46 -0700195 if ((references.golden_buffer_flags & TemporalLayers::kReference) == 0)
pbosfa18e252017-05-02 02:51:12 -0700196 flags |= VP8_EFLAG_NO_REF_GF;
pbos18ad1d42017-05-04 05:04:46 -0700197 if ((references.golden_buffer_flags & TemporalLayers::kUpdate) == 0)
pbosfa18e252017-05-02 02:51:12 -0700198 flags |= VP8_EFLAG_NO_UPD_GF;
pbos18ad1d42017-05-04 05:04:46 -0700199 if ((references.arf_buffer_flags & TemporalLayers::kReference) == 0)
pbosfa18e252017-05-02 02:51:12 -0700200 flags |= VP8_EFLAG_NO_REF_ARF;
pbos18ad1d42017-05-04 05:04:46 -0700201 if ((references.arf_buffer_flags & TemporalLayers::kUpdate) == 0)
pbosfa18e252017-05-02 02:51:12 -0700202 flags |= VP8_EFLAG_NO_UPD_ARF;
203 if (references.freeze_entropy)
204 flags |= VP8_EFLAG_NO_UPD_ENTROPY;
205
206 return flags;
207}
208
pwestin@webrtc.org8d89b582012-09-20 20:49:12 +0000209VP8EncoderImpl::VP8EncoderImpl()
Peter Boström1182afe2017-03-21 12:35:51 -0400210 : use_gf_boost_(webrtc::field_trial::IsEnabled(kVp8GfBoostFieldTrial)),
asapersson142fcc92017-08-17 08:58:54 -0700211 min_pixels_per_frame_(GetForcedFallbackMinPixelsFromFieldTrialGroup()),
Peter Boström1182afe2017-03-21 12:35:51 -0400212 encoded_complete_callback_(nullptr),
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000213 inited_(false),
214 timestamp_(0),
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000215 qp_max_(56), // Setting for max quantizer.
marpan@webrtc.org6daacbc2015-03-04 21:47:06 +0000216 cpu_speed_default_(-6),
asapersson384e7312016-11-01 04:08:22 -0700217 number_of_cores_(0),
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000218 rc_max_intra_target_(0),
kthelgason876222f2016-11-29 01:44:11 -0800219 key_frame_request_(kMaxSimulcastStreams, false) {
brandtr080830c2017-05-03 03:25:53 -0700220 Random random(rtc::TimeMicros());
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000221 picture_id_.reserve(kMaxSimulcastStreams);
brandtr080830c2017-05-03 03:25:53 -0700222 for (int i = 0; i < kMaxSimulcastStreams; ++i) {
223 picture_id_.push_back(random.Rand<uint16_t>() & 0x7FFF);
224 tl0_pic_idx_.push_back(random.Rand<uint8_t>());
225 }
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000226 temporal_layers_.reserve(kMaxSimulcastStreams);
227 raw_images_.reserve(kMaxSimulcastStreams);
228 encoded_images_.reserve(kMaxSimulcastStreams);
229 send_stream_.reserve(kMaxSimulcastStreams);
Peter Boström1182afe2017-03-21 12:35:51 -0400230 cpu_speed_.assign(kMaxSimulcastStreams, cpu_speed_default_);
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000231 encoders_.reserve(kMaxSimulcastStreams);
232 configurations_.reserve(kMaxSimulcastStreams);
233 downsampling_factors_.reserve(kMaxSimulcastStreams);
niklase@google.com470e71d2011-07-07 08:21:25 +0000234}
235
pwestin@webrtc.org8d89b582012-09-20 20:49:12 +0000236VP8EncoderImpl::~VP8EncoderImpl() {
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000237 Release();
niklase@google.com470e71d2011-07-07 08:21:25 +0000238}
239
pwestin@webrtc.org8d89b582012-09-20 20:49:12 +0000240int VP8EncoderImpl::Release() {
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000241 int ret_val = WEBRTC_VIDEO_CODEC_OK;
242
243 while (!encoded_images_.empty()) {
244 EncodedImage& image = encoded_images_.back();
philipelcce46fc2015-12-21 03:04:49 -0800245 delete[] image._buffer;
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000246 encoded_images_.pop_back();
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000247 }
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000248 while (!encoders_.empty()) {
249 vpx_codec_ctx_t& encoder = encoders_.back();
250 if (vpx_codec_destroy(&encoder)) {
251 ret_val = WEBRTC_VIDEO_CODEC_MEMORY;
niklase@google.com470e71d2011-07-07 08:21:25 +0000252 }
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000253 encoders_.pop_back();
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000254 }
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000255 configurations_.clear();
256 send_stream_.clear();
257 cpu_speed_.clear();
258 while (!raw_images_.empty()) {
259 vpx_img_free(&raw_images_.back());
260 raw_images_.pop_back();
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000261 }
brandtr080830c2017-05-03 03:25:53 -0700262 for (size_t i = 0; i < temporal_layers_.size(); ++i) {
263 tl0_pic_idx_[i] = temporal_layers_[i]->Tl0PicIdx();
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000264 }
brandtr080830c2017-05-03 03:25:53 -0700265 temporal_layers_.clear();
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000266 inited_ = false;
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000267 return ret_val;
niklase@google.com470e71d2011-07-07 08:21:25 +0000268}
269
Erik SprĂ¥ng08127a92016-11-16 16:41:30 +0100270int VP8EncoderImpl::SetRateAllocation(const BitrateAllocation& bitrate,
271 uint32_t new_framerate) {
272 if (!inited_)
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000273 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
Erik SprĂ¥ng08127a92016-11-16 16:41:30 +0100274
275 if (encoders_[0].err)
sprang1369c832016-11-10 08:30:33 -0800276 return WEBRTC_VIDEO_CODEC_ERROR;
Erik SprĂ¥ng08127a92016-11-16 16:41:30 +0100277
278 if (new_framerate < 1)
sprang1369c832016-11-10 08:30:33 -0800279 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
Erik SprĂ¥ng08127a92016-11-16 16:41:30 +0100280
281 if (bitrate.get_sum_bps() == 0) {
282 // Encoder paused, turn off all encoding.
283 const int num_streams = static_cast<size_t>(encoders_.size());
284 for (int i = 0; i < num_streams; ++i)
285 SetStreamState(false, i);
286 return WEBRTC_VIDEO_CODEC_OK;
sprang1369c832016-11-10 08:30:33 -0800287 }
Erik SprĂ¥ng08127a92016-11-16 16:41:30 +0100288
289 // At this point, bitrate allocation should already match codec settings.
290 if (codec_.maxBitrate > 0)
291 RTC_DCHECK_LE(bitrate.get_sum_kbps(), codec_.maxBitrate);
292 RTC_DCHECK_GE(bitrate.get_sum_kbps(), codec_.minBitrate);
293 if (codec_.numberOfSimulcastStreams > 0)
294 RTC_DCHECK_GE(bitrate.get_sum_kbps(), codec_.simulcastStream[0].minBitrate);
295
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000296 codec_.maxFramerate = new_framerate;
297
Peter Boström1182afe2017-03-21 12:35:51 -0400298 if (encoders_.size() > 1) {
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000299 // If we have more than 1 stream, reduce the qp_max for the low resolution
300 // stream if frame rate is not too low. The trade-off with lower qp_max is
301 // possibly more dropped frames, so we only do this if the frame rate is
302 // above some threshold (base temporal layer is down to 1/4 for 3 layers).
303 // We may want to condition this on bitrate later.
304 if (new_framerate > 20) {
305 configurations_[encoders_.size() - 1].rc_max_quantizer = 45;
306 } else {
307 // Go back to default value set in InitEncode.
308 configurations_[encoders_.size() - 1].rc_max_quantizer = qp_max_;
309 }
310 }
311
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000312 size_t stream_idx = encoders_.size() - 1;
313 for (size_t i = 0; i < encoders_.size(); ++i, --stream_idx) {
Erik SprĂ¥ng08127a92016-11-16 16:41:30 +0100314 unsigned int target_bitrate_kbps =
315 bitrate.GetSpatialLayerSum(stream_idx) / 1000;
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000316
Erik SprĂ¥ng08127a92016-11-16 16:41:30 +0100317 bool send_stream = target_bitrate_kbps > 0;
318 if (send_stream || encoders_.size() > 1)
319 SetStreamState(send_stream, stream_idx);
320
321 configurations_[i].rc_target_bitrate = target_bitrate_kbps;
322 temporal_layers_[stream_idx]->UpdateConfiguration(&configurations_[i]);
323
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000324 if (vpx_codec_enc_config_set(&encoders_[i], &configurations_[i])) {
325 return WEBRTC_VIDEO_CODEC_ERROR;
326 }
327 }
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000328 return WEBRTC_VIDEO_CODEC_OK;
niklase@google.com470e71d2011-07-07 08:21:25 +0000329}
330
Peter Boströmb7d9a972015-12-18 16:01:11 +0100331const char* VP8EncoderImpl::ImplementationName() const {
332 return "libvpx";
333}
334
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000335void VP8EncoderImpl::SetStreamState(bool send_stream,
336 int stream_idx) {
337 if (send_stream && !send_stream_[stream_idx]) {
338 // Need a key frame if we have not sent this stream before.
339 key_frame_request_[stream_idx] = true;
340 }
341 send_stream_[stream_idx] = send_stream;
342}
343
344void VP8EncoderImpl::SetupTemporalLayers(int num_streams,
philipelcce46fc2015-12-21 03:04:49 -0800345 int num_temporal_layers,
346 const VideoCodec& codec) {
Erik SprĂ¥ng08127a92016-11-16 16:41:30 +0100347 RTC_DCHECK(codec.VP8().tl_factory != nullptr);
sprang4bc98d42016-11-09 06:14:47 -0800348 const TemporalLayersFactory* tl_factory = codec.VP8().tl_factory;
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000349 if (num_streams == 1) {
brandtr080830c2017-05-03 03:25:53 -0700350 temporal_layers_.emplace_back(
351 tl_factory->Create(0, num_temporal_layers, tl0_pic_idx_[0]));
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000352 } else {
353 for (int i = 0; i < num_streams; ++i) {
Erik SprĂ¥ng08127a92016-11-16 16:41:30 +0100354 RTC_CHECK_GT(num_temporal_layers, 0);
355 int layers = std::max(static_cast<uint8_t>(1),
356 codec.simulcastStream[i].numberOfTemporalLayers);
brandtr080830c2017-05-03 03:25:53 -0700357 temporal_layers_.emplace_back(
358 tl_factory->Create(i, layers, tl0_pic_idx_[i]));
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000359 }
360 }
361}
362
pwestin@webrtc.org8d89b582012-09-20 20:49:12 +0000363int VP8EncoderImpl::InitEncode(const VideoCodec* inst,
philipelcce46fc2015-12-21 03:04:49 -0800364 int number_of_cores,
365 size_t /*maxPayloadSize */) {
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000366 if (inst == NULL) {
367 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
368 }
369 if (inst->maxFramerate < 1) {
370 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
371 }
372 // allow zero to represent an unspecified maxBitRate
373 if (inst->maxBitrate > 0 && inst->startBitrate > inst->maxBitrate) {
374 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
375 }
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000376 if (inst->width <= 1 || inst->height <= 1) {
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000377 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
378 }
379 if (number_of_cores < 1) {
380 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
381 }
hta257dc392016-10-25 09:05:06 -0700382 if (inst->VP8().automaticResizeOn && inst->numberOfSimulcastStreams > 1) {
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000383 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
384 }
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000385 int retVal = Release();
386 if (retVal < 0) {
387 return retVal;
388 }
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000389
390 int number_of_streams = NumberOfStreams(*inst);
391 bool doing_simulcast = (number_of_streams > 1);
392
393 if (doing_simulcast && !ValidSimulcastResolutions(*inst, number_of_streams)) {
394 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000395 }
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000396
philipelcce46fc2015-12-21 03:04:49 -0800397 int num_temporal_layers =
398 doing_simulcast ? inst->simulcastStream[0].numberOfTemporalLayers
hta257dc392016-10-25 09:05:06 -0700399 : inst->VP8().numberOfTemporalLayers;
Erik SprĂ¥ng08127a92016-11-16 16:41:30 +0100400 RTC_DCHECK_GT(num_temporal_layers, 0);
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000401
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000402 SetupTemporalLayers(number_of_streams, num_temporal_layers, *inst);
403
asapersson384e7312016-11-01 04:08:22 -0700404 number_of_cores_ = number_of_cores;
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000405 timestamp_ = 0;
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000406 codec_ = *inst;
niklase@google.com470e71d2011-07-07 08:21:25 +0000407
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000408 // Code expects simulcastStream resolutions to be correct, make sure they are
409 // filled even when there are no simulcast layers.
410 if (codec_.numberOfSimulcastStreams == 0) {
411 codec_.simulcastStream[0].width = codec_.width;
412 codec_.simulcastStream[0].height = codec_.height;
pbos@webrtc.orgd25b6022013-04-23 13:08:04 +0000413 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000414
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000415 encoded_images_.resize(number_of_streams);
416 encoders_.resize(number_of_streams);
417 configurations_.resize(number_of_streams);
418 downsampling_factors_.resize(number_of_streams);
419 raw_images_.resize(number_of_streams);
420 send_stream_.resize(number_of_streams);
421 send_stream_[0] = true; // For non-simulcast case.
422 cpu_speed_.resize(number_of_streams);
423 std::fill(key_frame_request_.begin(), key_frame_request_.end(), false);
andresp@webrtc.org5500d932013-09-06 11:26:15 +0000424
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000425 int idx = number_of_streams - 1;
426 for (int i = 0; i < (number_of_streams - 1); ++i, --idx) {
427 int gcd = GCD(inst->simulcastStream[idx].width,
philipelcce46fc2015-12-21 03:04:49 -0800428 inst->simulcastStream[idx - 1].width);
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000429 downsampling_factors_[i].num = inst->simulcastStream[idx].width / gcd;
430 downsampling_factors_[i].den = inst->simulcastStream[idx - 1].width / gcd;
431 send_stream_[i] = false;
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000432 }
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000433 if (number_of_streams > 1) {
434 send_stream_[number_of_streams - 1] = false;
435 downsampling_factors_[number_of_streams - 1].num = 1;
436 downsampling_factors_[number_of_streams - 1].den = 1;
437 }
438 for (int i = 0; i < number_of_streams; ++i) {
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000439 // allocate memory for encoded image
440 if (encoded_images_[i]._buffer != NULL) {
philipelcce46fc2015-12-21 03:04:49 -0800441 delete[] encoded_images_[i]._buffer;
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000442 }
philipelcce46fc2015-12-21 03:04:49 -0800443 encoded_images_[i]._size =
nisseeb44b392017-04-28 07:18:05 -0700444 CalcBufferSize(VideoType::kI420, codec_.width, codec_.height);
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000445 encoded_images_[i]._buffer = new uint8_t[encoded_images_[i]._size];
446 encoded_images_[i]._completeFrame = true;
447 }
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000448 // populate encoder configuration with default values
philipelcce46fc2015-12-21 03:04:49 -0800449 if (vpx_codec_enc_config_default(vpx_codec_vp8_cx(), &configurations_[0],
450 0)) {
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000451 return WEBRTC_VIDEO_CODEC_ERROR;
452 }
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000453 // setting the time base of the codec
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000454 configurations_[0].g_timebase.num = 1;
455 configurations_[0].g_timebase.den = 90000;
456 configurations_[0].g_lag_in_frames = 0; // 0- no frame lagging
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000457
458 // Set the error resilience mode according to user settings.
hta257dc392016-10-25 09:05:06 -0700459 switch (inst->VP8().resilience) {
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000460 case kResilienceOff:
asapersson5f7226f2016-11-25 04:37:00 -0800461 configurations_[0].g_error_resilient = 0;
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000462 break;
463 case kResilientStream:
Peter Boström4e713ff2017-03-15 14:19:58 -0400464 configurations_[0].g_error_resilient = VPX_ERROR_RESILIENT_DEFAULT;
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000465 break;
466 case kResilientFrames:
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000467 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; // Not supported
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000468 }
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000469
470 // rate control settings
hta257dc392016-10-25 09:05:06 -0700471 configurations_[0].rc_dropframe_thresh = inst->VP8().frameDroppingOn ? 30 : 0;
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000472 configurations_[0].rc_end_usage = VPX_CBR;
473 configurations_[0].g_pass = VPX_RC_ONE_PASS;
Peter Boström1182afe2017-03-21 12:35:51 -0400474 // Handle resizing outside of libvpx.
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000475 configurations_[0].rc_resize_allowed = 0;
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000476 configurations_[0].rc_min_quantizer = 2;
477 if (inst->qpMax >= configurations_[0].rc_min_quantizer) {
478 qp_max_ = inst->qpMax;
479 }
480 configurations_[0].rc_max_quantizer = qp_max_;
481 configurations_[0].rc_undershoot_pct = 100;
482 configurations_[0].rc_overshoot_pct = 15;
483 configurations_[0].rc_buf_initial_sz = 500;
484 configurations_[0].rc_buf_optimal_sz = 600;
485 configurations_[0].rc_buf_sz = 1000;
486
487 // Set the maximum target size of any key-frame.
488 rc_max_intra_target_ = MaxIntraTarget(configurations_[0].rc_buf_optimal_sz);
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000489
nisse3257b162017-03-21 01:54:13 -0700490 if (inst->VP8().keyFrameInterval > 0) {
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000491 configurations_[0].kf_mode = VPX_KF_AUTO;
hta257dc392016-10-25 09:05:06 -0700492 configurations_[0].kf_max_dist = inst->VP8().keyFrameInterval;
mikhal@webrtc.orge07c6612013-01-31 16:37:13 +0000493 } else {
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000494 configurations_[0].kf_mode = VPX_KF_DISABLED;
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000495 }
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000496
497 // Allow the user to set the complexity for the base stream.
hta257dc392016-10-25 09:05:06 -0700498 switch (inst->VP8().complexity) {
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000499 case kComplexityHigh:
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000500 cpu_speed_[0] = -5;
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000501 break;
502 case kComplexityHigher:
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000503 cpu_speed_[0] = -4;
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000504 break;
505 case kComplexityMax:
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000506 cpu_speed_[0] = -3;
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000507 break;
508 default:
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000509 cpu_speed_[0] = -6;
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000510 break;
511 }
marpan@webrtc.org6daacbc2015-03-04 21:47:06 +0000512 cpu_speed_default_ = cpu_speed_[0];
513 // Set encoding complexity (cpu_speed) based on resolution and/or platform.
514 cpu_speed_[0] = SetCpuSpeed(inst->width, inst->height);
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000515 for (int i = 1; i < number_of_streams; ++i) {
marpan@webrtc.org6daacbc2015-03-04 21:47:06 +0000516 cpu_speed_[i] =
517 SetCpuSpeed(inst->simulcastStream[number_of_streams - 1 - i].width,
518 inst->simulcastStream[number_of_streams - 1 - i].height);
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000519 }
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000520 configurations_[0].g_w = inst->width;
521 configurations_[0].g_h = inst->height;
522
523 // Determine number of threads based on the image size and #cores.
524 // TODO(fbarchard): Consider number of Simulcast layers.
philipelcce46fc2015-12-21 03:04:49 -0800525 configurations_[0].g_threads = NumberOfThreads(
526 configurations_[0].g_w, configurations_[0].g_h, number_of_cores);
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000527
528 // Creating a wrapper to the image - setting image data to NULL.
529 // Actual pointer will be set in encode. Setting align to 1, as it
530 // is meaningless (no memory allocation is done here).
philipelcce46fc2015-12-21 03:04:49 -0800531 vpx_img_wrap(&raw_images_[0], VPX_IMG_FMT_I420, inst->width, inst->height, 1,
532 NULL);
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000533
Erik SprĂ¥ng08127a92016-11-16 16:41:30 +0100534 // Note the order we use is different from webm, we have lowest resolution
535 // at position 0 and they have highest resolution at position 0.
536 int stream_idx = encoders_.size() - 1;
537 SimulcastRateAllocator init_allocator(codec_, nullptr);
538 BitrateAllocation allocation = init_allocator.GetAllocation(
539 inst->startBitrate * 1000, inst->maxFramerate);
540 std::vector<uint32_t> stream_bitrates;
541 for (int i = 0; i == 0 || i < inst->numberOfSimulcastStreams; ++i) {
542 uint32_t bitrate = allocation.GetSpatialLayerSum(i) / 1000;
543 stream_bitrates.push_back(bitrate);
544 }
545
546 configurations_[0].rc_target_bitrate = stream_bitrates[stream_idx];
547 temporal_layers_[stream_idx]->OnRatesUpdated(
548 stream_bitrates[stream_idx], inst->maxBitrate, inst->maxFramerate);
549 temporal_layers_[stream_idx]->UpdateConfiguration(&configurations_[0]);
550 --stream_idx;
551 for (size_t i = 1; i < encoders_.size(); ++i, --stream_idx) {
552 memcpy(&configurations_[i], &configurations_[0],
553 sizeof(configurations_[0]));
554
555 configurations_[i].g_w = inst->simulcastStream[stream_idx].width;
556 configurations_[i].g_h = inst->simulcastStream[stream_idx].height;
557
558 // Use 1 thread for lower resolutions.
559 configurations_[i].g_threads = 1;
560
561 // Setting alignment to 32 - as that ensures at least 16 for all
562 // planes (32 for Y, 16 for U,V). Libvpx sets the requested stride for
563 // the y plane, but only half of it to the u and v planes.
564 vpx_img_alloc(&raw_images_[i], VPX_IMG_FMT_I420,
565 inst->simulcastStream[stream_idx].width,
566 inst->simulcastStream[stream_idx].height, kVp832ByteAlign);
Peter Boström1b9add92015-06-08 22:52:33 +0200567 SetStreamState(stream_bitrates[stream_idx] > 0, stream_idx);
Erik SprĂ¥ng08127a92016-11-16 16:41:30 +0100568 configurations_[i].rc_target_bitrate = stream_bitrates[stream_idx];
569 temporal_layers_[stream_idx]->OnRatesUpdated(
570 stream_bitrates[stream_idx], inst->maxBitrate, inst->maxFramerate);
571 temporal_layers_[stream_idx]->UpdateConfiguration(&configurations_[i]);
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000572 }
573
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000574 return InitAndSetControlSettings();
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000575}
576
marpan@webrtc.org6daacbc2015-03-04 21:47:06 +0000577int VP8EncoderImpl::SetCpuSpeed(int width, int height) {
Alex Glaznevfecb7c32016-03-31 14:23:27 -0700578#if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || defined(ANDROID)
asapersson384e7312016-11-01 04:08:22 -0700579 // On mobile platform, use a lower speed setting for lower resolutions for
580 // CPUs with 4 or more cores.
581 RTC_DCHECK_GT(number_of_cores_, 0);
582 if (number_of_cores_ <= 3)
583 return -12;
584
585 if (width * height <= 352 * 288)
586 return -8;
587 else if (width * height <= 640 * 480)
588 return -10;
589 else
590 return -12;
marpan@webrtc.org6daacbc2015-03-04 21:47:06 +0000591#else
592 // For non-ARM, increase encoding complexity (i.e., use lower speed setting)
593 // if resolution is below CIF. Otherwise, keep the default/user setting
hta257dc392016-10-25 09:05:06 -0700594 // (|cpu_speed_default_|) set on InitEncode via VP8().complexity.
marpan@webrtc.org6daacbc2015-03-04 21:47:06 +0000595 if (width * height < 352 * 288)
596 return (cpu_speed_default_ < -4) ? -4 : cpu_speed_default_;
597 else
598 return cpu_speed_default_;
599#endif
600}
601
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000602int VP8EncoderImpl::NumberOfThreads(int width, int height, int cpus) {
Alex Glaznev2cc8baa2016-06-14 14:28:29 -0700603#if defined(ANDROID)
604 if (width * height >= 320 * 180) {
605 if (cpus >= 4) {
606 // 3 threads for CPUs with 4 and more cores since most of times only 4
607 // cores will be active.
608 return 3;
609 } else if (cpus == 3 || cpus == 2) {
610 return 2;
611 } else {
612 return 1;
613 }
614 }
615 return 1;
616#else
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000617 if (width * height >= 1920 * 1080 && cpus > 8) {
618 return 8; // 8 threads for 1080p on high perf machines.
619 } else if (width * height > 1280 * 960 && cpus >= 6) {
620 // 3 threads for 1080p.
621 return 3;
622 } else if (width * height > 640 * 480 && cpus >= 3) {
623 // 2 threads for qHD/HD.
624 return 2;
625 } else {
626 // 1 thread for VGA or less.
627 return 1;
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000628 }
Alex Glaznev2cc8baa2016-06-14 14:28:29 -0700629#endif
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000630}
pbos@webrtc.orga0d78272014-09-12 11:51:47 +0000631
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000632int VP8EncoderImpl::InitAndSetControlSettings() {
633 vpx_codec_flags_t flags = 0;
634 flags |= VPX_CODEC_USE_OUTPUT_PARTITION;
635
636 if (encoders_.size() > 1) {
philipelcce46fc2015-12-21 03:04:49 -0800637 int error = vpx_codec_enc_init_multi(&encoders_[0], vpx_codec_vp8_cx(),
638 &configurations_[0], encoders_.size(),
639 flags, &downsampling_factors_[0]);
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000640 if (error) {
641 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
642 }
643 } else {
philipelcce46fc2015-12-21 03:04:49 -0800644 if (vpx_codec_enc_init(&encoders_[0], vpx_codec_vp8_cx(),
645 &configurations_[0], flags)) {
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000646 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
647 }
648 }
649 // Enable denoising for the highest resolution stream, and for
650 // the second highest resolution if we are doing more than 2
651 // spatial layers/streams.
652 // TODO(holmer): Investigate possibility of adding a libvpx API
653 // for getting the denoised frame from the encoder and using that
654 // when encoding lower resolution streams. Would it work with the
655 // multi-res encoding feature?
656 denoiserState denoiser_state = kDenoiserOnYOnly;
Alex Glaznevfecb7c32016-03-31 14:23:27 -0700657#if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || defined(ANDROID)
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000658 denoiser_state = kDenoiserOnYOnly;
659#else
660 denoiser_state = kDenoiserOnAdaptive;
661#endif
hta257dc392016-10-25 09:05:06 -0700662 vpx_codec_control(&encoders_[0], VP8E_SET_NOISE_SENSITIVITY,
663 codec_.VP8()->denoisingOn ? denoiser_state : kDenoiserOff);
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000664 if (encoders_.size() > 2) {
philipelcce46fc2015-12-21 03:04:49 -0800665 vpx_codec_control(
666 &encoders_[1], VP8E_SET_NOISE_SENSITIVITY,
hta257dc392016-10-25 09:05:06 -0700667 codec_.VP8()->denoisingOn ? denoiser_state : kDenoiserOff);
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000668 }
669 for (size_t i = 0; i < encoders_.size(); ++i) {
sprangfb30c1b2015-10-01 06:26:10 -0700670 // Allow more screen content to be detected as static.
671 vpx_codec_control(&(encoders_[i]), VP8E_SET_STATIC_THRESHOLD,
672 codec_.mode == kScreensharing ? 300 : 1);
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000673 vpx_codec_control(&(encoders_[i]), VP8E_SET_CPUUSED, cpu_speed_[i]);
674 vpx_codec_control(&(encoders_[i]), VP8E_SET_TOKEN_PARTITIONS,
Peter Boström1182afe2017-03-21 12:35:51 -0400675 static_cast<vp8e_token_partitions>(kTokenPartitions));
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000676 vpx_codec_control(&(encoders_[i]), VP8E_SET_MAX_INTRA_BITRATE_PCT,
677 rc_max_intra_target_);
Erik SprĂ¥ng2c4c9142015-06-24 11:24:44 +0200678 // VP8E_SET_SCREEN_CONTENT_MODE 2 = screen content with more aggressive
679 // rate control (drop frames on large target bitrate overshoot)
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000680 vpx_codec_control(&(encoders_[i]), VP8E_SET_SCREEN_CONTENT_MODE,
Erik SprĂ¥ng2c4c9142015-06-24 11:24:44 +0200681 codec_.mode == kScreensharing ? 2 : 0);
asaperssond0d08b12017-03-13 03:43:40 -0700682 // Apply boost on golden frames (has only effect when resilience is off).
683 if (use_gf_boost_ && codec_.VP8()->resilience == kResilienceOff) {
684 int gf_boost_percent;
685 if (GetGfBoostPercentageFromFieldTrialGroup(&gf_boost_percent)) {
686 vpx_codec_control(&(encoders_[i]), VP8E_SET_GF_CBR_BOOST_PCT,
687 gf_boost_percent);
688 }
689 }
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000690 }
691 inited_ = true;
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000692 return WEBRTC_VIDEO_CODEC_OK;
693}
694
pwestin@webrtc.org8d89b582012-09-20 20:49:12 +0000695uint32_t VP8EncoderImpl::MaxIntraTarget(uint32_t optimalBuffersize) {
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000696 // Set max to the optimal buffer level (normalized by target BR),
697 // and scaled by a scalePar.
698 // Max target size = scalePar * optimalBufferSize * targetBR[Kbps].
699 // This values is presented in percentage of perFrameBw:
700 // perFrameBw = targetBR[Kbps] * 1000 / frameRate.
701 // The target in % is as follows:
702
703 float scalePar = 0.5;
704 uint32_t targetPct = optimalBuffersize * scalePar * codec_.maxFramerate / 10;
705
706 // Don't go below 3 times the per frame bandwidth.
707 const uint32_t minIntraTh = 300;
philipelcce46fc2015-12-21 03:04:49 -0800708 return (targetPct < minIntraTh) ? minIntraTh : targetPct;
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000709}
710
Miguel Casas-Sanchez47650702015-05-29 17:21:40 -0700711int VP8EncoderImpl::Encode(const VideoFrame& frame,
712 const CodecSpecificInfo* codec_specific_info,
pbos22993e12015-10-19 02:39:06 -0700713 const std::vector<FrameType>* frame_types) {
kthelgason876222f2016-11-29 01:44:11 -0800714 RTC_DCHECK_EQ(frame.width(), codec_.width);
715 RTC_DCHECK_EQ(frame.height(), codec_.height);
716
Erik SprĂ¥ng2c4c9142015-06-24 11:24:44 +0200717 if (!inited_)
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000718 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
Erik SprĂ¥ng2c4c9142015-06-24 11:24:44 +0200719 if (encoded_complete_callback_ == NULL)
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000720 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
marpan@webrtc.org26762e32012-03-02 16:48:36 +0000721
Magnus Jedvert72dbe2a2017-06-10 17:03:37 +0000722 rtc::scoped_refptr<I420BufferInterface> input_image =
723 frame.video_frame_buffer()->ToI420();
magjed@webrtc.orge155dbe2015-03-17 12:27:26 +0000724 // Since we are extracting raw pointers from |input_image| to
brandtrfc31a432017-05-02 02:21:17 -0700725 // |raw_images_[0]|, the resolution of these frames must match.
kwiberg352444f2016-11-28 15:58:53 -0800726 RTC_DCHECK_EQ(input_image->width(), raw_images_[0].d_w);
727 RTC_DCHECK_EQ(input_image->height(), raw_images_[0].d_h);
magjed@webrtc.orge155dbe2015-03-17 12:27:26 +0000728
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000729 // Image in vpx_image_t format.
730 // Input image is const. VP8's raw image is not defined as const.
731 raw_images_[0].planes[VPX_PLANE_Y] =
Niels Möller718a7632016-06-13 13:06:01 +0200732 const_cast<uint8_t*>(input_image->DataY());
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000733 raw_images_[0].planes[VPX_PLANE_U] =
Niels Möller718a7632016-06-13 13:06:01 +0200734 const_cast<uint8_t*>(input_image->DataU());
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000735 raw_images_[0].planes[VPX_PLANE_V] =
Niels Möller718a7632016-06-13 13:06:01 +0200736 const_cast<uint8_t*>(input_image->DataV());
pbos@webrtc.orga0d78272014-09-12 11:51:47 +0000737
Niels Möller718a7632016-06-13 13:06:01 +0200738 raw_images_[0].stride[VPX_PLANE_Y] = input_image->StrideY();
739 raw_images_[0].stride[VPX_PLANE_U] = input_image->StrideU();
740 raw_images_[0].stride[VPX_PLANE_V] = input_image->StrideV();
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000741
742 for (size_t i = 1; i < encoders_.size(); ++i) {
743 // Scale the image down a number of times by downsampling factor
744 libyuv::I420Scale(
philipelcce46fc2015-12-21 03:04:49 -0800745 raw_images_[i - 1].planes[VPX_PLANE_Y],
746 raw_images_[i - 1].stride[VPX_PLANE_Y],
747 raw_images_[i - 1].planes[VPX_PLANE_U],
748 raw_images_[i - 1].stride[VPX_PLANE_U],
749 raw_images_[i - 1].planes[VPX_PLANE_V],
750 raw_images_[i - 1].stride[VPX_PLANE_V], raw_images_[i - 1].d_w,
751 raw_images_[i - 1].d_h, raw_images_[i].planes[VPX_PLANE_Y],
752 raw_images_[i].stride[VPX_PLANE_Y], raw_images_[i].planes[VPX_PLANE_U],
753 raw_images_[i].stride[VPX_PLANE_U], raw_images_[i].planes[VPX_PLANE_V],
754 raw_images_[i].stride[VPX_PLANE_V], raw_images_[i].d_w,
755 raw_images_[i].d_h, libyuv::kFilterBilinear);
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000756 }
757 vpx_enc_frame_flags_t flags[kMaxSimulcastStreams];
pbos18ad1d42017-05-04 05:04:46 -0700758 TemporalLayers::FrameConfig tl_configs[kMaxSimulcastStreams];
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000759 for (size_t i = 0; i < encoders_.size(); ++i) {
pbos18ad1d42017-05-04 05:04:46 -0700760 tl_configs[i] = temporal_layers_[i]->UpdateLayerConfig(frame.timestamp());
pbosfa18e252017-05-02 02:51:12 -0700761
pbos18ad1d42017-05-04 05:04:46 -0700762 if (tl_configs[i].drop_frame) {
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000763 // Drop this frame.
764 return WEBRTC_VIDEO_CODEC_OK;
765 }
pbos18ad1d42017-05-04 05:04:46 -0700766 flags[i] = EncodeFlags(tl_configs[i]);
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000767 }
768 bool send_key_frame = false;
769 for (size_t i = 0; i < key_frame_request_.size() && i < send_stream_.size();
770 ++i) {
771 if (key_frame_request_[i] && send_stream_[i]) {
772 send_key_frame = true;
773 break;
marpan@webrtc.org26762e32012-03-02 16:48:36 +0000774 }
775 }
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000776 if (!send_key_frame && frame_types) {
777 for (size_t i = 0; i < frame_types->size() && i < send_stream_.size();
778 ++i) {
Peter Boström49e196a2015-10-23 15:58:18 +0200779 if ((*frame_types)[i] == kVideoFrameKey && send_stream_[i]) {
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000780 send_key_frame = true;
781 break;
782 }
783 }
784 }
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000785 if (send_key_frame) {
786 // Adapt the size of the key frame when in screenshare with 1 temporal
787 // layer.
philipelcce46fc2015-12-21 03:04:49 -0800788 if (encoders_.size() == 1 && codec_.mode == kScreensharing &&
hta257dc392016-10-25 09:05:06 -0700789 codec_.VP8()->numberOfTemporalLayers <= 1) {
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000790 const uint32_t forceKeyFrameIntraTh = 100;
791 vpx_codec_control(&(encoders_[0]), VP8E_SET_MAX_INTRA_BITRATE_PCT,
792 forceKeyFrameIntraTh);
793 }
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000794 // Key frame request from caller.
795 // Will update both golden and alt-ref.
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000796 for (size_t i = 0; i < encoders_.size(); ++i) {
797 flags[i] = VPX_EFLAG_FORCE_KF;
798 }
799 std::fill(key_frame_request_.begin(), key_frame_request_.end(), false);
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000800 }
nisse3257b162017-03-21 01:54:13 -0700801
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000802 // Set the encoder frame flags and temporal layer_id for each spatial stream.
803 // Note that |temporal_layers_| are defined starting from lowest resolution at
804 // position 0 to highest resolution at position |encoders_.size() - 1|,
805 // whereas |encoder_| is from highest to lowest resolution.
806 size_t stream_idx = encoders_.size() - 1;
807 for (size_t i = 0; i < encoders_.size(); ++i, --stream_idx) {
Erik SprĂ¥ng2c4c9142015-06-24 11:24:44 +0200808 // Allow the layers adapter to temporarily modify the configuration. This
809 // change isn't stored in configurations_ so change will be discarded at
810 // the next update.
811 vpx_codec_enc_cfg_t temp_config;
812 memcpy(&temp_config, &configurations_[i], sizeof(vpx_codec_enc_cfg_t));
813 if (temporal_layers_[stream_idx]->UpdateConfiguration(&temp_config)) {
814 if (vpx_codec_enc_config_set(&encoders_[i], &temp_config))
815 return WEBRTC_VIDEO_CODEC_ERROR;
816 }
817
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000818 vpx_codec_control(&encoders_[i], VP8E_SET_FRAME_FLAGS, flags[stream_idx]);
pbos1777c5f2017-07-19 17:04:02 -0700819 vpx_codec_control(&encoders_[i], VP8E_SET_TEMPORAL_LAYER_ID,
820 tl_configs[i].encoder_layer_id);
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000821 }
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000822 // TODO(holmer): Ideally the duration should be the timestamp diff of this
823 // frame and the next frame to be encoded, which we don't have. Instead we
824 // would like to use the duration of the previous frame. Unfortunately the
825 // rate control seems to be off with that setup. Using the average input
826 // frame rate to calculate an average duration for now.
827 assert(codec_.maxFramerate > 0);
828 uint32_t duration = 90000 / codec_.maxFramerate;
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000829
ilnike9973812017-09-12 10:24:46 -0700830 int error = WEBRTC_VIDEO_CODEC_OK;
831 int num_tries = 0;
832 // If the first try returns WEBRTC_VIDEO_CODEC_TARGET_BITRATE_OVERSHOOT
833 // the frame must be reencoded with the same parameters again because
834 // target bitrate is exceeded and encoder state has been reset.
835 while (num_tries == 0 ||
836 (num_tries == 1 &&
837 error == WEBRTC_VIDEO_CODEC_TARGET_BITRATE_OVERSHOOT)) {
838 ++num_tries;
839 // Note we must pass 0 for |flags| field in encode call below since they are
840 // set above in |vpx_codec_control| function for each encoder/spatial layer.
841 error = vpx_codec_encode(&encoders_[0], &raw_images_[0], timestamp_,
842 duration, 0, VPX_DL_REALTIME);
843 // Reset specific intra frame thresholds, following the key frame.
844 if (send_key_frame) {
845 vpx_codec_control(&(encoders_[0]), VP8E_SET_MAX_INTRA_BITRATE_PCT,
846 rc_max_intra_target_);
847 }
848 if (error)
849 return WEBRTC_VIDEO_CODEC_ERROR;
850 timestamp_ += duration;
851 // Examines frame timestamps only.
852 error = GetEncodedPartitions(tl_configs, frame);
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000853 }
ilnike9973812017-09-12 10:24:46 -0700854 return error;
stefan@webrtc.orgc3d89102011-09-08 06:50:28 +0000855}
856
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000857void VP8EncoderImpl::PopulateCodecSpecific(
858 CodecSpecificInfo* codec_specific,
pbos18ad1d42017-05-04 05:04:46 -0700859 const TemporalLayers::FrameConfig& tl_config,
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000860 const vpx_codec_cx_pkt_t& pkt,
861 int stream_idx,
nisse25d0bdc2017-03-22 07:15:09 -0700862 uint32_t timestamp) {
stefan@webrtc.orgc3d89102011-09-08 06:50:28 +0000863 assert(codec_specific != NULL);
864 codec_specific->codecType = kVideoCodecVP8;
perkj275afc52016-09-01 00:21:16 -0700865 codec_specific->codec_name = ImplementationName();
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000866 CodecSpecificInfoVP8* vp8Info = &(codec_specific->codecSpecific.VP8);
867 vp8Info->pictureId = picture_id_[stream_idx];
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000868 vp8Info->simulcastIdx = stream_idx;
henrik.lundin@webrtc.org85962652011-11-25 10:17:00 +0000869 vp8Info->keyIdx = kNoKeyIdx; // TODO(hlundin) populate this
Peter Boström1436c832017-03-27 15:01:49 -0400870 vp8Info->nonReference = (pkt.data.frame.flags & VPX_FRAME_IS_DROPPABLE) != 0;
871 temporal_layers_[stream_idx]->PopulateCodecSpecific(
pbos18ad1d42017-05-04 05:04:46 -0700872 (pkt.data.frame.flags & VPX_FRAME_IS_KEY) != 0, tl_config, vp8Info,
873 timestamp);
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000874 // Prepare next.
875 picture_id_[stream_idx] = (picture_id_[stream_idx] + 1) & 0x7FFF;
stefan@webrtc.orgc3d89102011-09-08 06:50:28 +0000876}
877
pbos18ad1d42017-05-04 05:04:46 -0700878int VP8EncoderImpl::GetEncodedPartitions(
879 const TemporalLayers::FrameConfig tl_configs[],
880 const VideoFrame& input_image) {
asaperssonda535c42015-10-19 23:32:41 -0700881 int bw_resolutions_disabled =
882 (encoders_.size() > 1) ? NumStreamsDisabled(send_stream_) : -1;
883
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000884 int stream_idx = static_cast<int>(encoders_.size()) - 1;
Erik SprĂ¥ng2c4c9142015-06-24 11:24:44 +0200885 int result = WEBRTC_VIDEO_CODEC_OK;
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000886 for (size_t encoder_idx = 0; encoder_idx < encoders_.size();
philipelcce46fc2015-12-21 03:04:49 -0800887 ++encoder_idx, --stream_idx) {
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000888 vpx_codec_iter_t iter = NULL;
889 int part_idx = 0;
890 encoded_images_[encoder_idx]._length = 0;
Peter Boström49e196a2015-10-23 15:58:18 +0200891 encoded_images_[encoder_idx]._frameType = kVideoFrameDelta;
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000892 RTPFragmentationHeader frag_info;
Peter Boström1182afe2017-03-21 12:35:51 -0400893 // kTokenPartitions is number of bits used.
894 frag_info.VerifyAndAllocateFragmentationHeader((1 << kTokenPartitions) + 1);
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000895 CodecSpecificInfo codec_specific;
philipelcce46fc2015-12-21 03:04:49 -0800896 const vpx_codec_cx_pkt_t* pkt = NULL;
897 while ((pkt = vpx_codec_get_cx_data(&encoders_[encoder_idx], &iter)) !=
898 NULL) {
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000899 switch (pkt->kind) {
900 case VPX_CODEC_CX_FRAME_PKT: {
asapersson7fd88172016-01-26 07:26:09 -0800901 size_t length = encoded_images_[encoder_idx]._length;
asaperssonffa3fdc2016-01-26 01:56:32 -0800902 if (pkt->data.frame.sz + length >
903 encoded_images_[encoder_idx]._size) {
904 uint8_t* buffer = new uint8_t[pkt->data.frame.sz + length];
905 memcpy(buffer, encoded_images_[encoder_idx]._buffer, length);
906 delete[] encoded_images_[encoder_idx]._buffer;
907 encoded_images_[encoder_idx]._buffer = buffer;
908 encoded_images_[encoder_idx]._size = pkt->data.frame.sz + length;
909 }
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000910 memcpy(&encoded_images_[encoder_idx]._buffer[length],
philipelcce46fc2015-12-21 03:04:49 -0800911 pkt->data.frame.buf, pkt->data.frame.sz);
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000912 frag_info.fragmentationOffset[part_idx] = length;
philipelcce46fc2015-12-21 03:04:49 -0800913 frag_info.fragmentationLength[part_idx] = pkt->data.frame.sz;
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000914 frag_info.fragmentationPlType[part_idx] = 0; // not known here
915 frag_info.fragmentationTimeDiff[part_idx] = 0;
916 encoded_images_[encoder_idx]._length += pkt->data.frame.sz;
917 assert(length <= encoded_images_[encoder_idx]._size);
918 ++part_idx;
919 break;
920 }
921 default:
922 break;
stefan@webrtc.orgc3d89102011-09-08 06:50:28 +0000923 }
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000924 // End of frame
925 if ((pkt->data.frame.flags & VPX_FRAME_IS_FRAGMENT) == 0) {
926 // check if encoded frame is a key frame
927 if (pkt->data.frame.flags & VPX_FRAME_IS_KEY) {
Peter Boström49e196a2015-10-23 15:58:18 +0200928 encoded_images_[encoder_idx]._frameType = kVideoFrameKey;
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000929 }
pbos18ad1d42017-05-04 05:04:46 -0700930 PopulateCodecSpecific(&codec_specific, tl_configs[stream_idx], *pkt,
931 stream_idx, input_image.timestamp());
stefan@webrtc.orgc3d89102011-09-08 06:50:28 +0000932 break;
933 }
934 }
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000935 encoded_images_[encoder_idx]._timeStamp = input_image.timestamp();
936 encoded_images_[encoder_idx].capture_time_ms_ =
937 input_image.render_time_ms();
Perba7dc722016-04-19 15:01:23 +0200938 encoded_images_[encoder_idx].rotation_ = input_image.rotation();
ilnik00d802b2017-04-11 10:34:31 -0700939 encoded_images_[encoder_idx].content_type_ =
940 (codec_.mode == kScreensharing) ? VideoContentType::SCREENSHARE
941 : VideoContentType::UNSPECIFIED;
sprangba050a62017-08-18 02:51:12 -0700942 encoded_images_[encoder_idx].timing_.flags = TimingFrameFlags::kInvalid;
Erik SprĂ¥ng2c4c9142015-06-24 11:24:44 +0200943
944 int qp = -1;
945 vpx_codec_control(&encoders_[encoder_idx], VP8E_GET_LAST_QUANTIZER_64, &qp);
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000946 temporal_layers_[stream_idx]->FrameEncoded(
Peter Boström1436c832017-03-27 15:01:49 -0400947 encoded_images_[encoder_idx]._length, qp);
pbos@webrtc.org9115cde2014-12-09 10:36:40 +0000948 if (send_stream_[stream_idx]) {
949 if (encoded_images_[encoder_idx]._length > 0) {
950 TRACE_COUNTER_ID1("webrtc", "EncodedFrameSize", encoder_idx,
951 encoded_images_[encoder_idx]._length);
952 encoded_images_[encoder_idx]._encodedHeight =
953 codec_.simulcastStream[stream_idx].height;
954 encoded_images_[encoder_idx]._encodedWidth =
955 codec_.simulcastStream[stream_idx].width;
asaperssonda535c42015-10-19 23:32:41 -0700956 // Report once per frame (lowest stream always sent).
957 encoded_images_[encoder_idx].adapt_reason_.bw_resolutions_disabled =
958 (stream_idx == 0) ? bw_resolutions_disabled : -1;
asapersson118ef002016-03-31 00:00:19 -0700959 int qp_128 = -1;
960 vpx_codec_control(&encoders_[encoder_idx], VP8E_GET_LAST_QUANTIZER,
961 &qp_128);
962 encoded_images_[encoder_idx].qp_ = qp_128;
sergeyu2cb155a2016-11-04 11:39:29 -0700963 encoded_complete_callback_->OnEncodedImage(encoded_images_[encoder_idx],
964 &codec_specific, &frag_info);
Erik SprĂ¥ng2c4c9142015-06-24 11:24:44 +0200965 } else if (codec_.mode == kScreensharing) {
966 result = WEBRTC_VIDEO_CODEC_TARGET_BITRATE_OVERSHOOT;
stefan@webrtc.orgc3d89102011-09-08 06:50:28 +0000967 }
stefan@webrtc.orgc3d89102011-09-08 06:50:28 +0000968 }
969 }
Erik SprĂ¥ng2c4c9142015-06-24 11:24:44 +0200970 return result;
stefan@webrtc.orgc3d89102011-09-08 06:50:28 +0000971}
972
kthelgason876222f2016-11-29 01:44:11 -0800973VideoEncoder::ScalingSettings VP8EncoderImpl::GetScalingSettings() const {
974 const bool enable_scaling = encoders_.size() == 1 &&
975 configurations_[0].rc_dropframe_thresh > 0 &&
976 codec_.VP8().automaticResizeOn;
asapersson142fcc92017-08-17 08:58:54 -0700977 if (enable_scaling && min_pixels_per_frame_) {
978 return VideoEncoder::ScalingSettings(enable_scaling,
979 *min_pixels_per_frame_);
980 }
kthelgason876222f2016-11-29 01:44:11 -0800981 return VideoEncoder::ScalingSettings(enable_scaling);
982}
983
pkasting@chromium.org16825b12015-01-12 21:51:21 +0000984int VP8EncoderImpl::SetChannelParameters(uint32_t packetLoss, int64_t rtt) {
stefan@webrtc.orga4a88f92011-12-02 08:34:05 +0000985 return WEBRTC_VIDEO_CODEC_OK;
niklase@google.com470e71d2011-07-07 08:21:25 +0000986}
987
pwestin@webrtc.org8d89b582012-09-20 20:49:12 +0000988int VP8EncoderImpl::RegisterEncodeCompleteCallback(
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +0000989 EncodedImageCallback* callback) {
990 encoded_complete_callback_ = callback;
991 return WEBRTC_VIDEO_CODEC_OK;
niklase@google.com470e71d2011-07-07 08:21:25 +0000992}
993
asaperssone5d02f92017-08-09 23:37:05 -0700994class VP8DecoderImpl::QpSmoother {
995 public:
996 QpSmoother() : last_sample_ms_(rtc::TimeMillis()), smoother_(kAlpha) {}
997
998 int GetAvg() const {
999 float value = smoother_.filtered();
1000 return (value == rtc::ExpFilter::kValueUndefined) ? 0
1001 : static_cast<int>(value);
1002 }
1003
1004 void Add(float sample) {
1005 int64_t now_ms = rtc::TimeMillis();
1006 smoother_.Apply(static_cast<float>(now_ms - last_sample_ms_), sample);
1007 last_sample_ms_ = now_ms;
1008 }
1009
1010 void Reset() { smoother_.Reset(kAlpha); }
1011
1012 private:
1013 const float kAlpha = 0.95f;
1014 int64_t last_sample_ms_;
1015 rtc::ExpFilter smoother_;
1016};
1017
pwestin@webrtc.org8d89b582012-09-20 20:49:12 +00001018VP8DecoderImpl::VP8DecoderImpl()
asaperssone5d02f92017-08-09 23:37:05 -07001019 : use_postproc_arm_(
1020 webrtc::field_trial::IsEnabled(kVp8PostProcArmFieldTrial)),
Peter Boström1182afe2017-03-21 12:35:51 -04001021 buffer_pool_(false, 300 /* max_number_of_buffers*/),
Per00983572016-11-04 08:57:26 +01001022 decode_complete_callback_(NULL),
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001023 inited_(false),
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001024 decoder_(NULL),
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001025 propagation_cnt_(-1),
pbos@webrtc.org9115cde2014-12-09 10:36:40 +00001026 last_frame_width_(0),
1027 last_frame_height_(0),
asaperssone5d02f92017-08-09 23:37:05 -07001028 key_frame_required_(true),
1029 qp_smoother_(use_postproc_arm_ ? new QpSmoother() : nullptr) {
1030 if (use_postproc_arm_)
1031 GetPostProcParamsFromFieldTrialGroup(&deblock_);
1032}
niklase@google.com470e71d2011-07-07 08:21:25 +00001033
pwestin@webrtc.org8d89b582012-09-20 20:49:12 +00001034VP8DecoderImpl::~VP8DecoderImpl() {
1035 inited_ = true; // in order to do the actual release
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001036 Release();
niklase@google.com470e71d2011-07-07 08:21:25 +00001037}
1038
philipelcce46fc2015-12-21 03:04:49 -08001039int VP8DecoderImpl::InitDecode(const VideoCodec* inst, int number_of_cores) {
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001040 int ret_val = Release();
pwestin@webrtc.org8d89b582012-09-20 20:49:12 +00001041 if (ret_val < 0) {
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001042 return ret_val;
1043 }
1044 if (decoder_ == NULL) {
pbos@webrtc.org9115cde2014-12-09 10:36:40 +00001045 decoder_ = new vpx_codec_ctx_t;
magjed5c711662016-12-02 02:46:18 -08001046 memset(decoder_, 0, sizeof(*decoder_));
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001047 }
philipelcce46fc2015-12-21 03:04:49 -08001048 vpx_codec_dec_cfg_t cfg;
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001049 // Setting number of threads to a constant value (1)
1050 cfg.threads = 1;
pwestin@webrtc.org8d89b582012-09-20 20:49:12 +00001051 cfg.h = cfg.w = 0; // set after decode
niklase@google.com470e71d2011-07-07 08:21:25 +00001052
asapersson55eb6d62017-03-01 23:52:16 -08001053#if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || defined(ANDROID)
Peter Boström1182afe2017-03-21 12:35:51 -04001054 vpx_codec_flags_t flags = use_postproc_arm_ ? VPX_CODEC_USE_POSTPROC : 0;
asapersson55eb6d62017-03-01 23:52:16 -08001055#else
Peter Boström1182afe2017-03-21 12:35:51 -04001056 vpx_codec_flags_t flags = VPX_CODEC_USE_POSTPROC;
mikhal@webrtc.org6724cf82011-08-24 00:51:36 +00001057#endif
holmer@google.com2cdc7b92011-08-18 09:35:45 +00001058
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001059 if (vpx_codec_dec_init(decoder_, vpx_codec_vp8_dx(), &cfg, flags)) {
magjed5c711662016-12-02 02:46:18 -08001060 delete decoder_;
1061 decoder_ = nullptr;
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001062 return WEBRTC_VIDEO_CODEC_MEMORY;
1063 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001064
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001065 propagation_cnt_ = -1;
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001066 inited_ = true;
mikhal@webrtc.orgb2c28c32013-08-23 21:54:50 +00001067
1068 // Always start with a complete key frame.
1069 key_frame_required_ = true;
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001070 return WEBRTC_VIDEO_CODEC_OK;
niklase@google.com470e71d2011-07-07 08:21:25 +00001071}
1072
pwestin@webrtc.org8d89b582012-09-20 20:49:12 +00001073int VP8DecoderImpl::Decode(const EncodedImage& input_image,
philipelcce46fc2015-12-21 03:04:49 -08001074 bool missing_frames,
1075 const RTPFragmentationHeader* fragmentation,
1076 const CodecSpecificInfo* codec_specific_info,
1077 int64_t /*render_time_ms*/) {
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001078 if (!inited_) {
1079 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
1080 }
1081 if (decode_complete_callback_ == NULL) {
1082 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
1083 }
1084 if (input_image._buffer == NULL && input_image._length > 0) {
1085 // Reset to avoid requesting key frames too often.
1086 if (propagation_cnt_ > 0)
1087 propagation_cnt_ = 0;
1088 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
1089 }
stefan@webrtc.org4c059d82011-10-13 07:35:37 +00001090
asapersson55eb6d62017-03-01 23:52:16 -08001091// Post process configurations.
1092#if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || defined(ANDROID)
1093 if (use_postproc_arm_) {
1094 vp8_postproc_cfg_t ppcfg;
1095 ppcfg.post_proc_flag = VP8_MFQE;
asaperssone5d02f92017-08-09 23:37:05 -07001096 // For low resolutions, use stronger deblocking filter.
asapersson55eb6d62017-03-01 23:52:16 -08001097 int last_width_x_height = last_frame_width_ * last_frame_height_;
1098 if (last_width_x_height > 0 && last_width_x_height <= 320 * 240) {
asaperssone5d02f92017-08-09 23:37:05 -07001099 // Enable the deblock and demacroblocker based on qp thresholds.
1100 RTC_DCHECK(qp_smoother_);
1101 int qp = qp_smoother_->GetAvg();
1102 if (qp > deblock_.min_qp) {
1103 int level = deblock_.max_level;
1104 if (qp < deblock_.degrade_qp) {
1105 // Use lower level.
1106 level = deblock_.max_level * (qp - deblock_.min_qp) /
1107 (deblock_.degrade_qp - deblock_.min_qp);
1108 }
1109 // Deblocking level only affects VP8_DEMACROBLOCK.
1110 ppcfg.deblocking_level = std::max(level, 1);
1111 ppcfg.post_proc_flag |= VP8_DEBLOCK | VP8_DEMACROBLOCK;
1112 }
asapersson55eb6d62017-03-01 23:52:16 -08001113 }
1114 vpx_codec_control(decoder_, VP8_SET_POSTPROC, &ppcfg);
1115 }
1116#else
pbos@webrtc.org9115cde2014-12-09 10:36:40 +00001117 vp8_postproc_cfg_t ppcfg;
1118 // MFQE enabled to reduce key frame popping.
1119 ppcfg.post_proc_flag = VP8_MFQE | VP8_DEBLOCK;
1120 // For VGA resolutions and lower, enable the demacroblocker postproc.
1121 if (last_frame_width_ * last_frame_height_ <= 640 * 360) {
1122 ppcfg.post_proc_flag |= VP8_DEMACROBLOCK;
stefan@webrtc.org1bb1da42012-02-27 13:52:34 +00001123 }
pbos@webrtc.org9115cde2014-12-09 10:36:40 +00001124 // Strength of deblocking filter. Valid range:[0,16]
1125 ppcfg.deblocking_level = 3;
1126 vpx_codec_control(decoder_, VP8_SET_POSTPROC, &ppcfg);
stefan@webrtc.org1bb1da42012-02-27 13:52:34 +00001127#endif
1128
mikhal@webrtc.orgb2c28c32013-08-23 21:54:50 +00001129 // Always start with a complete key frame.
1130 if (key_frame_required_) {
Peter Boström49e196a2015-10-23 15:58:18 +02001131 if (input_image._frameType != kVideoFrameKey)
mikhal@webrtc.orgb2c28c32013-08-23 21:54:50 +00001132 return WEBRTC_VIDEO_CODEC_ERROR;
1133 // We have a key frame - is it complete?
1134 if (input_image._completeFrame) {
1135 key_frame_required_ = false;
1136 } else {
1137 return WEBRTC_VIDEO_CODEC_ERROR;
1138 }
1139 }
nisse3257b162017-03-21 01:54:13 -07001140 // Restrict error propagation using key frame requests.
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001141 // Reset on a key frame refresh.
Peter Boström1182afe2017-03-21 12:35:51 -04001142 if (input_image._frameType == kVideoFrameKey && input_image._completeFrame) {
1143 propagation_cnt_ = -1;
1144 // Start count on first loss.
nisse3257b162017-03-21 01:54:13 -07001145 } else if ((!input_image._completeFrame || missing_frames) &&
1146 propagation_cnt_ == -1) {
1147 propagation_cnt_ = 0;
1148 }
1149 if (propagation_cnt_ >= 0) {
1150 propagation_cnt_++;
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001151 }
stefan@webrtc.orgffd28f92011-10-19 15:55:39 +00001152
stefan@webrtc.orgf5d934d2012-06-01 07:43:02 +00001153 vpx_codec_iter_t iter = NULL;
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001154 vpx_image_t* img;
1155 int ret;
niklase@google.com470e71d2011-07-07 08:21:25 +00001156
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001157 // Check for missing frames.
1158 if (missing_frames) {
1159 // Call decoder with zero data length to signal missing frames.
1160 if (vpx_codec_decode(decoder_, NULL, 0, 0, VPX_DL_REALTIME)) {
1161 // Reset to avoid requesting key frames too often.
1162 if (propagation_cnt_ > 0)
1163 propagation_cnt_ = 0;
1164 return WEBRTC_VIDEO_CODEC_ERROR;
niklase@google.com470e71d2011-07-07 08:21:25 +00001165 }
pbos@webrtc.org9115cde2014-12-09 10:36:40 +00001166 img = vpx_codec_get_frame(decoder_, &iter);
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001167 iter = NULL;
1168 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001169
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001170 uint8_t* buffer = input_image._buffer;
1171 if (input_image._length == 0) {
pwestin@webrtc.org8d89b582012-09-20 20:49:12 +00001172 buffer = NULL; // Triggers full frame concealment.
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001173 }
pbos@webrtc.org9115cde2014-12-09 10:36:40 +00001174 if (vpx_codec_decode(decoder_, buffer, input_image._length, 0,
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001175 VPX_DL_REALTIME)) {
1176 // Reset to avoid requesting key frames too often.
pbos@webrtc.org9115cde2014-12-09 10:36:40 +00001177 if (propagation_cnt_ > 0) {
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001178 propagation_cnt_ = 0;
pbos@webrtc.org9115cde2014-12-09 10:36:40 +00001179 }
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001180 return WEBRTC_VIDEO_CODEC_ERROR;
1181 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001182
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001183 img = vpx_codec_get_frame(decoder_, &iter);
sakal5fec1282017-02-20 06:43:58 -08001184 int qp;
1185 vpx_codec_err_t vpx_ret =
1186 vpx_codec_control(decoder_, VPXD_GET_LAST_QUANTIZER, &qp);
1187 RTC_DCHECK_EQ(vpx_ret, VPX_CODEC_OK);
1188 ret = ReturnFrame(img, input_image._timeStamp, input_image.ntp_time_ms_, qp);
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001189 if (ret != 0) {
1190 // Reset to avoid requesting key frames too often.
1191 if (ret < 0 && propagation_cnt_ > 0)
1192 propagation_cnt_ = 0;
1193 return ret;
1194 }
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001195 // Check Vs. threshold
1196 if (propagation_cnt_ > kVp8ErrorPropagationTh) {
1197 // Reset to avoid requesting key frames too often.
1198 propagation_cnt_ = 0;
1199 return WEBRTC_VIDEO_CODEC_ERROR;
1200 }
1201 return WEBRTC_VIDEO_CODEC_OK;
niklase@google.com470e71d2011-07-07 08:21:25 +00001202}
1203
wu@webrtc.org6c75c982014-04-15 17:46:33 +00001204int VP8DecoderImpl::ReturnFrame(const vpx_image_t* img,
philipelcce46fc2015-12-21 03:04:49 -08001205 uint32_t timestamp,
sakal5fec1282017-02-20 06:43:58 -08001206 int64_t ntp_time_ms,
1207 int qp) {
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001208 if (img == NULL) {
1209 // Decoder OK and NULL image => No show frame
1210 return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
1211 }
asaperssone5d02f92017-08-09 23:37:05 -07001212 if (qp_smoother_) {
1213 if (last_frame_width_ != static_cast<int>(img->d_w) ||
1214 last_frame_height_ != static_cast<int>(img->d_h)) {
1215 qp_smoother_->Reset();
1216 }
1217 qp_smoother_->Add(qp);
1218 }
pbos@webrtc.org9115cde2014-12-09 10:36:40 +00001219 last_frame_width_ = img->d_w;
1220 last_frame_height_ = img->d_h;
1221 // Allocate memory for decoded image.
nisse64ec8f82016-09-27 00:17:25 -07001222 rtc::scoped_refptr<I420Buffer> buffer =
1223 buffer_pool_.CreateBuffer(img->d_w, img->d_h);
Per00983572016-11-04 08:57:26 +01001224 if (!buffer.get()) {
1225 // Pool has too many pending frames.
1226 RTC_HISTOGRAM_BOOLEAN("WebRTC.Video.VP8DecoderImpl.TooManyPendingFrames",
1227 1);
1228 return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
1229 }
nisse64ec8f82016-09-27 00:17:25 -07001230
philipelcce46fc2015-12-21 03:04:49 -08001231 libyuv::I420Copy(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y],
1232 img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U],
1233 img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V],
nisse64ec8f82016-09-27 00:17:25 -07001234 buffer->MutableDataY(), buffer->StrideY(),
1235 buffer->MutableDataU(), buffer->StrideU(),
1236 buffer->MutableDataV(), buffer->StrideV(),
philipelcce46fc2015-12-21 03:04:49 -08001237 img->d_w, img->d_h);
nisse64ec8f82016-09-27 00:17:25 -07001238
1239 VideoFrame decoded_image(buffer, timestamp, 0, kVideoRotation_0);
magjed@webrtc.org73d763e2015-03-17 11:40:45 +00001240 decoded_image.set_ntp_time_ms(ntp_time_ms);
sakal5fec1282017-02-20 06:43:58 -08001241 decode_complete_callback_->Decoded(decoded_image, rtc::Optional<int32_t>(),
1242 rtc::Optional<uint8_t>(qp));
stefan@webrtc.org93d216c2011-10-04 06:48:11 +00001243
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001244 return WEBRTC_VIDEO_CODEC_OK;
stefan@webrtc.org93d216c2011-10-04 06:48:11 +00001245}
1246
pwestin@webrtc.org8d89b582012-09-20 20:49:12 +00001247int VP8DecoderImpl::RegisterDecodeCompleteCallback(
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001248 DecodedImageCallback* callback) {
1249 decode_complete_callback_ = callback;
1250 return WEBRTC_VIDEO_CODEC_OK;
niklase@google.com470e71d2011-07-07 08:21:25 +00001251}
1252
pwestin@webrtc.org8d89b582012-09-20 20:49:12 +00001253int VP8DecoderImpl::Release() {
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001254 if (decoder_ != NULL) {
pwestin@webrtc.org8d89b582012-09-20 20:49:12 +00001255 if (vpx_codec_destroy(decoder_)) {
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001256 return WEBRTC_VIDEO_CODEC_MEMORY;
niklase@google.com470e71d2011-07-07 08:21:25 +00001257 }
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001258 delete decoder_;
1259 decoder_ = NULL;
1260 }
pbos@webrtc.orga3209a22015-03-20 13:35:56 +00001261 buffer_pool_.Release();
pwestin@webrtc.org4ea57e52012-02-02 12:21:47 +00001262 inited_ = false;
1263 return WEBRTC_VIDEO_CODEC_OK;
niklase@google.com470e71d2011-07-07 08:21:25 +00001264}
1265
Peter Boströmb7d9a972015-12-18 16:01:11 +01001266const char* VP8DecoderImpl::ImplementationName() const {
1267 return "libvpx";
1268}
1269
pwestin@webrtc.org8d89b582012-09-20 20:49:12 +00001270} // namespace webrtc