blob: a42cc97f2b28260974fd20ee9a7cc0eb75e5ca55 [file] [log] [blame]
magjed73c0eb52017-08-07 06:55:28 -07001/*
2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 *
10 */
11
12#import "WebRTC/RTCVideoCodecH264.h"
13
14#import <VideoToolbox/VideoToolbox.h>
15#include <vector>
16
17#if defined(WEBRTC_IOS)
18#import "Common/RTCUIApplicationStatusObserver.h"
19#import "WebRTC/UIDevice+RTCDevice.h"
20#endif
21#import "PeerConnection/RTCVideoCodec+Private.h"
22#import "WebRTC/RTCVideoCodec.h"
23#import "WebRTC/RTCVideoFrame.h"
24#import "WebRTC/RTCVideoFrameBuffer.h"
25#import "helpers.h"
26#include "libyuv/convert_from.h"
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020027#include "common_video/h264/h264_bitstream_parser.h"
28#include "common_video/h264/profile_level_id.h"
29#include "common_video/include/bitrate_adjuster.h"
30#include "modules/include/module_common_types.h"
31#include "modules/video_coding/include/video_error_codes.h"
32#include "rtc_base/buffer.h"
33#include "rtc_base/logging.h"
34#include "rtc_base/timeutils.h"
35#include "sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.h"
36#include "system_wrappers/include/clock.h"
magjed73c0eb52017-08-07 06:55:28 -070037
38@interface RTCVideoEncoderH264 ()
39
40- (void)frameWasEncoded:(OSStatus)status
41 flags:(VTEncodeInfoFlags)infoFlags
42 sampleBuffer:(CMSampleBufferRef)sampleBuffer
43 codecSpecificInfo:(id<RTCCodecSpecificInfo>)codecSpecificInfo
44 width:(int32_t)width
45 height:(int32_t)height
46 renderTimeMs:(int64_t)renderTimeMs
47 timestamp:(uint32_t)timestamp
48 rotation:(RTCVideoRotation)rotation;
49
50@end
51
52// The ratio between kVTCompressionPropertyKey_DataRateLimits and
53// kVTCompressionPropertyKey_AverageBitRate. The data rate limit is set higher
54// than the average bit rate to avoid undershooting the target.
55const float kLimitToAverageBitRateFactor = 1.5f;
56// These thresholds deviate from the default h264 QP thresholds, as they
57// have been found to work better on devices that support VideoToolbox
58const int kLowH264QpThreshold = 28;
59const int kHighH264QpThreshold = 39;
60
61// Struct that we pass to the encoder per frame to encode. We receive it again
62// in the encoder callback.
63struct RTCFrameEncodeParams {
64 RTCFrameEncodeParams(RTCVideoEncoderH264 *e,
65 RTCCodecSpecificInfoH264 *csi,
66 int32_t w,
67 int32_t h,
68 int64_t rtms,
69 uint32_t ts,
70 RTCVideoRotation r)
71 : encoder(e), width(w), height(h), render_time_ms(rtms), timestamp(ts), rotation(r) {
72 if (csi) {
73 codecSpecificInfo = csi;
74 } else {
75 codecSpecificInfo = [[RTCCodecSpecificInfoH264 alloc] init];
76 }
77 }
78
79 RTCVideoEncoderH264 *encoder;
80 RTCCodecSpecificInfoH264 *codecSpecificInfo;
81 int32_t width;
82 int32_t height;
83 int64_t render_time_ms;
84 uint32_t timestamp;
85 RTCVideoRotation rotation;
86};
87
88// We receive I420Frames as input, but we need to feed CVPixelBuffers into the
89// encoder. This performs the copy and format conversion.
90// TODO(tkchin): See if encoder will accept i420 frames and compare performance.
91bool CopyVideoFrameToPixelBuffer(id<RTCI420Buffer> frameBuffer, CVPixelBufferRef pixelBuffer) {
92 RTC_DCHECK(pixelBuffer);
93 RTC_DCHECK_EQ(CVPixelBufferGetPixelFormatType(pixelBuffer),
94 kCVPixelFormatType_420YpCbCr8BiPlanarFullRange);
95 RTC_DCHECK_EQ(CVPixelBufferGetHeightOfPlane(pixelBuffer, 0), frameBuffer.height);
96 RTC_DCHECK_EQ(CVPixelBufferGetWidthOfPlane(pixelBuffer, 0), frameBuffer.width);
97
98 CVReturn cvRet = CVPixelBufferLockBaseAddress(pixelBuffer, 0);
99 if (cvRet != kCVReturnSuccess) {
100 LOG(LS_ERROR) << "Failed to lock base address: " << cvRet;
101 return false;
102 }
103 uint8_t *dstY = reinterpret_cast<uint8_t *>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0));
104 int dstStrideY = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
105 uint8_t *dstUV = reinterpret_cast<uint8_t *>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1));
106 int dstStrideUV = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
107 // Convert I420 to NV12.
108 int ret = libyuv::I420ToNV12(frameBuffer.dataY,
109 frameBuffer.strideY,
110 frameBuffer.dataU,
111 frameBuffer.strideU,
112 frameBuffer.dataV,
113 frameBuffer.strideV,
114 dstY,
115 dstStrideY,
116 dstUV,
117 dstStrideUV,
118 frameBuffer.width,
119 frameBuffer.height);
120 CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
121 if (ret) {
122 LOG(LS_ERROR) << "Error converting I420 VideoFrame to NV12 :" << ret;
123 return false;
124 }
125 return true;
126}
127
128CVPixelBufferRef CreatePixelBuffer(CVPixelBufferPoolRef pixel_buffer_pool) {
129 if (!pixel_buffer_pool) {
130 LOG(LS_ERROR) << "Failed to get pixel buffer pool.";
131 return nullptr;
132 }
133 CVPixelBufferRef pixel_buffer;
134 CVReturn ret = CVPixelBufferPoolCreatePixelBuffer(nullptr, pixel_buffer_pool, &pixel_buffer);
135 if (ret != kCVReturnSuccess) {
136 LOG(LS_ERROR) << "Failed to create pixel buffer: " << ret;
137 // We probably want to drop frames here, since failure probably means
138 // that the pool is empty.
139 return nullptr;
140 }
141 return pixel_buffer;
142}
143
144// This is the callback function that VideoToolbox calls when encode is
145// complete. From inspection this happens on its own queue.
146void compressionOutputCallback(void *encoder,
147 void *params,
148 OSStatus status,
149 VTEncodeInfoFlags infoFlags,
150 CMSampleBufferRef sampleBuffer) {
magjed85d18d42017-09-01 06:32:57 -0700151 RTC_CHECK(params);
magjed73c0eb52017-08-07 06:55:28 -0700152 std::unique_ptr<RTCFrameEncodeParams> encodeParams(
153 reinterpret_cast<RTCFrameEncodeParams *>(params));
magjed85d18d42017-09-01 06:32:57 -0700154 RTC_CHECK(encodeParams->encoder);
magjed73c0eb52017-08-07 06:55:28 -0700155 [encodeParams->encoder frameWasEncoded:status
156 flags:infoFlags
157 sampleBuffer:sampleBuffer
158 codecSpecificInfo:encodeParams->codecSpecificInfo
159 width:encodeParams->width
160 height:encodeParams->height
161 renderTimeMs:encodeParams->render_time_ms
162 timestamp:encodeParams->timestamp
163 rotation:encodeParams->rotation];
164}
165
166// Extract VideoToolbox profile out of the cricket::VideoCodec. If there is no
167// specific VideoToolbox profile for the specified level, AutoLevel will be
168// returned. The user must initialize the encoder with a resolution and
169// framerate conforming to the selected H264 level regardless.
170CFStringRef ExtractProfile(const cricket::VideoCodec &codec) {
171 const rtc::Optional<webrtc::H264::ProfileLevelId> profile_level_id =
172 webrtc::H264::ParseSdpProfileLevelId(codec.params);
173 RTC_DCHECK(profile_level_id);
174 switch (profile_level_id->profile) {
175 case webrtc::H264::kProfileConstrainedBaseline:
176 case webrtc::H264::kProfileBaseline:
177 switch (profile_level_id->level) {
178 case webrtc::H264::kLevel3:
179 return kVTProfileLevel_H264_Baseline_3_0;
180 case webrtc::H264::kLevel3_1:
181 return kVTProfileLevel_H264_Baseline_3_1;
182 case webrtc::H264::kLevel3_2:
183 return kVTProfileLevel_H264_Baseline_3_2;
184 case webrtc::H264::kLevel4:
185 return kVTProfileLevel_H264_Baseline_4_0;
186 case webrtc::H264::kLevel4_1:
187 return kVTProfileLevel_H264_Baseline_4_1;
188 case webrtc::H264::kLevel4_2:
189 return kVTProfileLevel_H264_Baseline_4_2;
190 case webrtc::H264::kLevel5:
191 return kVTProfileLevel_H264_Baseline_5_0;
192 case webrtc::H264::kLevel5_1:
193 return kVTProfileLevel_H264_Baseline_5_1;
194 case webrtc::H264::kLevel5_2:
195 return kVTProfileLevel_H264_Baseline_5_2;
196 case webrtc::H264::kLevel1:
197 case webrtc::H264::kLevel1_b:
198 case webrtc::H264::kLevel1_1:
199 case webrtc::H264::kLevel1_2:
200 case webrtc::H264::kLevel1_3:
201 case webrtc::H264::kLevel2:
202 case webrtc::H264::kLevel2_1:
203 case webrtc::H264::kLevel2_2:
204 return kVTProfileLevel_H264_Baseline_AutoLevel;
205 }
206
207 case webrtc::H264::kProfileMain:
208 switch (profile_level_id->level) {
209 case webrtc::H264::kLevel3:
210 return kVTProfileLevel_H264_Main_3_0;
211 case webrtc::H264::kLevel3_1:
212 return kVTProfileLevel_H264_Main_3_1;
213 case webrtc::H264::kLevel3_2:
214 return kVTProfileLevel_H264_Main_3_2;
215 case webrtc::H264::kLevel4:
216 return kVTProfileLevel_H264_Main_4_0;
217 case webrtc::H264::kLevel4_1:
218 return kVTProfileLevel_H264_Main_4_1;
219 case webrtc::H264::kLevel4_2:
220 return kVTProfileLevel_H264_Main_4_2;
221 case webrtc::H264::kLevel5:
222 return kVTProfileLevel_H264_Main_5_0;
223 case webrtc::H264::kLevel5_1:
224 return kVTProfileLevel_H264_Main_5_1;
225 case webrtc::H264::kLevel5_2:
226 return kVTProfileLevel_H264_Main_5_2;
227 case webrtc::H264::kLevel1:
228 case webrtc::H264::kLevel1_b:
229 case webrtc::H264::kLevel1_1:
230 case webrtc::H264::kLevel1_2:
231 case webrtc::H264::kLevel1_3:
232 case webrtc::H264::kLevel2:
233 case webrtc::H264::kLevel2_1:
234 case webrtc::H264::kLevel2_2:
235 return kVTProfileLevel_H264_Main_AutoLevel;
236 }
237
238 case webrtc::H264::kProfileConstrainedHigh:
239 case webrtc::H264::kProfileHigh:
240 switch (profile_level_id->level) {
241 case webrtc::H264::kLevel3:
242 return kVTProfileLevel_H264_High_3_0;
243 case webrtc::H264::kLevel3_1:
244 return kVTProfileLevel_H264_High_3_1;
245 case webrtc::H264::kLevel3_2:
246 return kVTProfileLevel_H264_High_3_2;
247 case webrtc::H264::kLevel4:
248 return kVTProfileLevel_H264_High_4_0;
249 case webrtc::H264::kLevel4_1:
250 return kVTProfileLevel_H264_High_4_1;
251 case webrtc::H264::kLevel4_2:
252 return kVTProfileLevel_H264_High_4_2;
253 case webrtc::H264::kLevel5:
254 return kVTProfileLevel_H264_High_5_0;
255 case webrtc::H264::kLevel5_1:
256 return kVTProfileLevel_H264_High_5_1;
257 case webrtc::H264::kLevel5_2:
258 return kVTProfileLevel_H264_High_5_2;
259 case webrtc::H264::kLevel1:
260 case webrtc::H264::kLevel1_b:
261 case webrtc::H264::kLevel1_1:
262 case webrtc::H264::kLevel1_2:
263 case webrtc::H264::kLevel1_3:
264 case webrtc::H264::kLevel2:
265 case webrtc::H264::kLevel2_1:
266 case webrtc::H264::kLevel2_2:
267 return kVTProfileLevel_H264_High_AutoLevel;
268 }
269 }
270}
271
272@implementation RTCVideoEncoderH264 {
273 RTCVideoCodecInfo *_codecInfo;
274 webrtc::BitrateAdjuster *_bitrateAdjuster;
275 uint32_t _targetBitrateBps;
276 uint32_t _encoderBitrateBps;
277 RTCH264PacketizationMode _packetizationMode;
278 CFStringRef _profile;
279 RTCVideoEncoderCallback _callback;
280 int32_t _width;
281 int32_t _height;
282 VTCompressionSessionRef _compressionSession;
283 RTCVideoCodecMode _mode;
284
285 webrtc::H264BitstreamParser _h264BitstreamParser;
286 std::vector<uint8_t> _nv12ScaleBuffer;
287}
288
289// .5 is set as a mininum to prevent overcompensating for large temporary
290// overshoots. We don't want to degrade video quality too badly.
291// .95 is set to prevent oscillations. When a lower bitrate is set on the
292// encoder than previously set, its output seems to have a brief period of
293// drastically reduced bitrate, so we want to avoid that. In steady state
294// conditions, 0.95 seems to give us better overall bitrate over long periods
295// of time.
296- (instancetype)initWithCodecInfo:(RTCVideoCodecInfo *)codecInfo {
297 if (self = [super init]) {
298 _codecInfo = codecInfo;
299 _bitrateAdjuster = new webrtc::BitrateAdjuster(webrtc::Clock::GetRealTimeClock(), .5, .95);
300 _packetizationMode = RTCH264PacketizationModeNonInterleaved;
301 _profile = ExtractProfile([codecInfo nativeVideoCodec]);
302 LOG(LS_INFO) << "Using profile " << CFStringToString(_profile);
303 RTC_CHECK([codecInfo.name isEqualToString:@"H264"]);
andersc9a85f072017-09-13 07:31:46 -0700304
305#if defined(WEBRTC_IOS)
306 [RTCUIApplicationStatusObserver prepareForUse];
307#endif
magjed73c0eb52017-08-07 06:55:28 -0700308 }
309 return self;
310}
311
312- (void)dealloc {
313 [self destroyCompressionSession];
314}
315
316- (NSInteger)startEncodeWithSettings:(RTCVideoEncoderSettings *)settings
317 numberOfCores:(int)numberOfCores {
318 RTC_DCHECK(settings);
319 RTC_DCHECK([settings.name isEqualToString:@"H264"]);
320
321 _width = settings.width;
322 _height = settings.height;
323 _mode = settings.mode;
324
325 // We can only set average bitrate on the HW encoder.
326 _targetBitrateBps = settings.startBitrate;
327 _bitrateAdjuster->SetTargetBitrateBps(_targetBitrateBps);
328
329 // TODO(tkchin): Try setting payload size via
330 // kVTCompressionPropertyKey_MaxH264SliceBytes.
331
332 return [self resetCompressionSession];
333}
334
335- (NSInteger)encode:(RTCVideoFrame *)frame
336 codecSpecificInfo:(id<RTCCodecSpecificInfo>)codecSpecificInfo
337 frameTypes:(NSArray<NSNumber *> *)frameTypes {
338 RTC_DCHECK_EQ(frame.width, _width);
339 RTC_DCHECK_EQ(frame.height, _height);
340 if (!_callback || !_compressionSession) {
341 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
342 }
343#if defined(WEBRTC_IOS)
344 if (![[RTCUIApplicationStatusObserver sharedInstance] isApplicationActive]) {
345 // Ignore all encode requests when app isn't active. In this state, the
346 // hardware encoder has been invalidated by the OS.
347 return WEBRTC_VIDEO_CODEC_OK;
348 }
349#endif
350 BOOL isKeyframeRequired = NO;
351
352 // Get a pixel buffer from the pool and copy frame data over.
353 CVPixelBufferPoolRef pixelBufferPool =
354 VTCompressionSessionGetPixelBufferPool(_compressionSession);
355
356#if defined(WEBRTC_IOS)
357 if (!pixelBufferPool) {
358 // Kind of a hack. On backgrounding, the compression session seems to get
359 // invalidated, which causes this pool call to fail when the application
360 // is foregrounded and frames are being sent for encoding again.
361 // Resetting the session when this happens fixes the issue.
362 // In addition we request a keyframe so video can recover quickly.
363 [self resetCompressionSession];
364 pixelBufferPool = VTCompressionSessionGetPixelBufferPool(_compressionSession);
365 isKeyframeRequired = YES;
366 LOG(LS_INFO) << "Resetting compression session due to invalid pool.";
367 }
368#endif
369
370 CVPixelBufferRef pixelBuffer = nullptr;
371 if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
372 // Native frame buffer
373 RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
374 if (![rtcPixelBuffer requiresCropping]) {
375 // This pixel buffer might have a higher resolution than what the
376 // compression session is configured to. The compression session can
377 // handle that and will output encoded frames in the configured
378 // resolution regardless of the input pixel buffer resolution.
379 pixelBuffer = rtcPixelBuffer.pixelBuffer;
380 CVBufferRetain(pixelBuffer);
381 } else {
382 // Cropping required, we need to crop and scale to a new pixel buffer.
383 pixelBuffer = CreatePixelBuffer(pixelBufferPool);
384 if (!pixelBuffer) {
385 return WEBRTC_VIDEO_CODEC_ERROR;
386 }
387 int dstWidth = CVPixelBufferGetWidth(pixelBuffer);
388 int dstHeight = CVPixelBufferGetHeight(pixelBuffer);
389 if ([rtcPixelBuffer requiresScalingToWidth:dstWidth height:dstHeight]) {
390 int size =
391 [rtcPixelBuffer bufferSizeForCroppingAndScalingToWidth:dstWidth height:dstHeight];
392 _nv12ScaleBuffer.resize(size);
393 } else {
394 _nv12ScaleBuffer.clear();
395 }
396 _nv12ScaleBuffer.shrink_to_fit();
397 if (![rtcPixelBuffer cropAndScaleTo:pixelBuffer withTempBuffer:_nv12ScaleBuffer.data()]) {
398 return WEBRTC_VIDEO_CODEC_ERROR;
399 }
400 }
401 }
402
403 if (!pixelBuffer) {
404 // We did not have a native frame buffer
405 pixelBuffer = CreatePixelBuffer(pixelBufferPool);
406 if (!pixelBuffer) {
407 return WEBRTC_VIDEO_CODEC_ERROR;
408 }
409 RTC_DCHECK(pixelBuffer);
410 if (!CopyVideoFrameToPixelBuffer([frame.buffer toI420], pixelBuffer)) {
411 LOG(LS_ERROR) << "Failed to copy frame data.";
412 CVBufferRelease(pixelBuffer);
413 return WEBRTC_VIDEO_CODEC_ERROR;
414 }
415 }
416
417 // Check if we need a keyframe.
418 if (!isKeyframeRequired && frameTypes) {
419 for (NSNumber *frameType in frameTypes) {
420 if ((RTCFrameType)frameType.intValue == RTCFrameTypeVideoFrameKey) {
421 isKeyframeRequired = YES;
422 break;
423 }
424 }
425 }
426
427 CMTime presentationTimeStamp = CMTimeMake(frame.timeStampNs / rtc::kNumNanosecsPerMillisec, 1000);
428 CFDictionaryRef frameProperties = nullptr;
429 if (isKeyframeRequired) {
430 CFTypeRef keys[] = {kVTEncodeFrameOptionKey_ForceKeyFrame};
431 CFTypeRef values[] = {kCFBooleanTrue};
432 frameProperties = CreateCFTypeDictionary(keys, values, 1);
433 }
434
435 std::unique_ptr<RTCFrameEncodeParams> encodeParams;
436 encodeParams.reset(new RTCFrameEncodeParams(self,
437 codecSpecificInfo,
438 _width,
439 _height,
440 frame.timeStampNs / rtc::kNumNanosecsPerMillisec,
441 frame.timeStamp,
442 frame.rotation));
443 encodeParams->codecSpecificInfo.packetizationMode = _packetizationMode;
444
445 // Update the bitrate if needed.
446 [self setBitrateBps:_bitrateAdjuster->GetAdjustedBitrateBps()];
447
448 OSStatus status = VTCompressionSessionEncodeFrame(_compressionSession,
449 pixelBuffer,
450 presentationTimeStamp,
451 kCMTimeInvalid,
452 frameProperties,
453 encodeParams.release(),
454 nullptr);
455 if (frameProperties) {
456 CFRelease(frameProperties);
457 }
458 if (pixelBuffer) {
459 CVBufferRelease(pixelBuffer);
460 }
461 if (status != noErr) {
462 LOG(LS_ERROR) << "Failed to encode frame with code: " << status;
463 return WEBRTC_VIDEO_CODEC_ERROR;
464 }
465 return WEBRTC_VIDEO_CODEC_OK;
466}
467
468- (void)setCallback:(RTCVideoEncoderCallback)callback {
469 _callback = callback;
470}
471
472- (int)setBitrate:(uint32_t)bitrateKbit framerate:(uint32_t)framerate {
473 _targetBitrateBps = 1000 * bitrateKbit;
474 _bitrateAdjuster->SetTargetBitrateBps(_targetBitrateBps);
475 [self setBitrateBps:_bitrateAdjuster->GetAdjustedBitrateBps()];
476 return WEBRTC_VIDEO_CODEC_OK;
477}
478
479#pragma mark - Private
480
481- (NSInteger)releaseEncoder {
482 // Need to destroy so that the session is invalidated and won't use the
483 // callback anymore. Do not remove callback until the session is invalidated
484 // since async encoder callbacks can occur until invalidation.
485 [self destroyCompressionSession];
486 _callback = nullptr;
487 return WEBRTC_VIDEO_CODEC_OK;
488}
489
490- (int)resetCompressionSession {
491 [self destroyCompressionSession];
492
493 // Set source image buffer attributes. These attributes will be present on
494 // buffers retrieved from the encoder's pixel buffer pool.
495 const size_t attributesSize = 3;
496 CFTypeRef keys[attributesSize] = {
497#if defined(WEBRTC_IOS)
498 kCVPixelBufferOpenGLESCompatibilityKey,
499#elif defined(WEBRTC_MAC)
500 kCVPixelBufferOpenGLCompatibilityKey,
501#endif
502 kCVPixelBufferIOSurfacePropertiesKey,
503 kCVPixelBufferPixelFormatTypeKey
504 };
505 CFDictionaryRef ioSurfaceValue = CreateCFTypeDictionary(nullptr, nullptr, 0);
506 int64_t nv12type = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
507 CFNumberRef pixelFormat = CFNumberCreate(nullptr, kCFNumberLongType, &nv12type);
508 CFTypeRef values[attributesSize] = {kCFBooleanTrue, ioSurfaceValue, pixelFormat};
509 CFDictionaryRef sourceAttributes = CreateCFTypeDictionary(keys, values, attributesSize);
510 if (ioSurfaceValue) {
511 CFRelease(ioSurfaceValue);
512 ioSurfaceValue = nullptr;
513 }
514 if (pixelFormat) {
515 CFRelease(pixelFormat);
516 pixelFormat = nullptr;
517 }
kthelgasona4955b42017-08-24 04:22:58 -0700518 CFMutableDictionaryRef encoder_specs = nullptr;
519#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
520 // Currently hw accl is supported above 360p on mac, below 360p
521 // the compression session will be created with hw accl disabled.
522 encoder_specs = CFDictionaryCreateMutable(
523 nullptr, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
524 CFDictionarySetValue(encoder_specs,
525 kVTVideoEncoderSpecification_EnableHardwareAcceleratedVideoEncoder,
526 kCFBooleanTrue);
527#endif
528 OSStatus status =
529 VTCompressionSessionCreate(nullptr, // use default allocator
530 _width,
531 _height,
532 kCMVideoCodecType_H264,
533 encoder_specs, // use hardware accelerated encoder if available
534 sourceAttributes,
535 nullptr, // use default compressed data allocator
536 compressionOutputCallback,
537 nullptr,
538 &_compressionSession);
magjed73c0eb52017-08-07 06:55:28 -0700539 if (sourceAttributes) {
540 CFRelease(sourceAttributes);
541 sourceAttributes = nullptr;
542 }
kthelgasona4955b42017-08-24 04:22:58 -0700543 if (encoder_specs) {
544 CFRelease(encoder_specs);
545 encoder_specs = nullptr;
546 }
magjed73c0eb52017-08-07 06:55:28 -0700547 if (status != noErr) {
548 LOG(LS_ERROR) << "Failed to create compression session: " << status;
549 return WEBRTC_VIDEO_CODEC_ERROR;
550 }
kthelgasona4955b42017-08-24 04:22:58 -0700551#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
552 CFBooleanRef hwaccl_enabled = nullptr;
553 status = VTSessionCopyProperty(_compressionSession,
554 kVTCompressionPropertyKey_UsingHardwareAcceleratedVideoEncoder,
555 nullptr,
556 &hwaccl_enabled);
557 if (status == noErr && (CFBooleanGetValue(hwaccl_enabled))) {
558 LOG(LS_INFO) << "Compression session created with hw accl enabled";
559 } else {
560 LOG(LS_INFO) << "Compression session created with hw accl disabled";
561 }
562#endif
magjed73c0eb52017-08-07 06:55:28 -0700563 [self configureCompressionSession];
564 return WEBRTC_VIDEO_CODEC_OK;
565}
566
567- (void)configureCompressionSession {
568 RTC_DCHECK(_compressionSession);
569 SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_RealTime, true);
570 SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_ProfileLevel, _profile);
571 SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_AllowFrameReordering, false);
572 [self setEncoderBitrateBps:_targetBitrateBps];
573 // TODO(tkchin): Look at entropy mode and colorspace matrices.
574 // TODO(tkchin): Investigate to see if there's any way to make this work.
575 // May need it to interop with Android. Currently this call just fails.
576 // On inspecting encoder output on iOS8, this value is set to 6.
577 // internal::SetVTSessionProperty(compression_session_,
578 // kVTCompressionPropertyKey_MaxFrameDelayCount,
579 // 1);
580
581 // Set a relatively large value for keyframe emission (7200 frames or 4 minutes).
582 SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_MaxKeyFrameInterval, 7200);
583 SetVTSessionProperty(
584 _compressionSession, kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration, 240);
585}
586
587- (void)destroyCompressionSession {
588 if (_compressionSession) {
589 VTCompressionSessionInvalidate(_compressionSession);
590 CFRelease(_compressionSession);
591 _compressionSession = nullptr;
592 }
593}
594
595- (NSString *)implementationName {
596 return @"VideoToolbox";
597}
598
599- (void)setBitrateBps:(uint32_t)bitrateBps {
600 if (_encoderBitrateBps != bitrateBps) {
601 [self setEncoderBitrateBps:bitrateBps];
602 }
603}
604
605- (void)setEncoderBitrateBps:(uint32_t)bitrateBps {
606 if (_compressionSession) {
607 SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_AverageBitRate, bitrateBps);
608
609 // TODO(tkchin): Add a helper method to set array value.
610 int64_t dataLimitBytesPerSecondValue =
611 static_cast<int64_t>(bitrateBps * kLimitToAverageBitRateFactor / 8);
612 CFNumberRef bytesPerSecond =
613 CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt64Type, &dataLimitBytesPerSecondValue);
614 int64_t oneSecondValue = 1;
615 CFNumberRef oneSecond =
616 CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt64Type, &oneSecondValue);
617 const void *nums[2] = {bytesPerSecond, oneSecond};
618 CFArrayRef dataRateLimits = CFArrayCreate(nullptr, nums, 2, &kCFTypeArrayCallBacks);
619 OSStatus status = VTSessionSetProperty(
620 _compressionSession, kVTCompressionPropertyKey_DataRateLimits, dataRateLimits);
621 if (bytesPerSecond) {
622 CFRelease(bytesPerSecond);
623 }
624 if (oneSecond) {
625 CFRelease(oneSecond);
626 }
627 if (dataRateLimits) {
628 CFRelease(dataRateLimits);
629 }
630 if (status != noErr) {
631 LOG(LS_ERROR) << "Failed to set data rate limit";
632 }
633
634 _encoderBitrateBps = bitrateBps;
635 }
636}
637
638- (void)frameWasEncoded:(OSStatus)status
639 flags:(VTEncodeInfoFlags)infoFlags
640 sampleBuffer:(CMSampleBufferRef)sampleBuffer
641 codecSpecificInfo:(id<RTCCodecSpecificInfo>)codecSpecificInfo
642 width:(int32_t)width
643 height:(int32_t)height
644 renderTimeMs:(int64_t)renderTimeMs
645 timestamp:(uint32_t)timestamp
646 rotation:(RTCVideoRotation)rotation {
647 if (status != noErr) {
648 LOG(LS_ERROR) << "H264 encode failed.";
649 return;
650 }
651 if (infoFlags & kVTEncodeInfo_FrameDropped) {
652 LOG(LS_INFO) << "H264 encode dropped frame.";
653 return;
654 }
655
656 BOOL isKeyframe = NO;
657 CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, 0);
658 if (attachments != nullptr && CFArrayGetCount(attachments)) {
659 CFDictionaryRef attachment =
660 static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(attachments, 0));
661 isKeyframe = !CFDictionaryContainsKey(attachment, kCMSampleAttachmentKey_NotSync);
662 }
663
664 if (isKeyframe) {
665 LOG(LS_INFO) << "Generated keyframe";
666 }
667
668 // Convert the sample buffer into a buffer suitable for RTP packetization.
669 // TODO(tkchin): Allocate buffers through a pool.
670 std::unique_ptr<rtc::Buffer> buffer(new rtc::Buffer());
671 RTCRtpFragmentationHeader *header;
672 {
kthelgasonf8084d42017-08-30 04:47:10 -0700673 std::unique_ptr<webrtc::RTPFragmentationHeader> header_cpp;
magjed73c0eb52017-08-07 06:55:28 -0700674 bool result =
675 H264CMSampleBufferToAnnexBBuffer(sampleBuffer, isKeyframe, buffer.get(), &header_cpp);
kthelgasonf8084d42017-08-30 04:47:10 -0700676 header = [[RTCRtpFragmentationHeader alloc] initWithNativeFragmentationHeader:header_cpp.get()];
magjed73c0eb52017-08-07 06:55:28 -0700677 if (!result) {
678 return;
679 }
680 }
681
682 RTCEncodedImage *frame = [[RTCEncodedImage alloc] init];
683 frame.buffer = [NSData dataWithBytesNoCopy:buffer->data() length:buffer->size() freeWhenDone:NO];
684 frame.encodedWidth = width;
685 frame.encodedHeight = height;
686 frame.completeFrame = YES;
687 frame.frameType = isKeyframe ? RTCFrameTypeVideoFrameKey : RTCFrameTypeVideoFrameDelta;
688 frame.captureTimeMs = renderTimeMs;
689 frame.timeStamp = timestamp;
690 frame.rotation = rotation;
691 frame.contentType = (_mode == RTCVideoCodecModeScreensharing) ? RTCVideoContentTypeScreenshare :
692 RTCVideoContentTypeUnspecified;
sprangba050a62017-08-18 02:51:12 -0700693 frame.flags = webrtc::TimingFrameFlags::kInvalid;
magjed73c0eb52017-08-07 06:55:28 -0700694
695 int qp;
696 _h264BitstreamParser.ParseBitstream(buffer->data(), buffer->size());
697 _h264BitstreamParser.GetLastSliceQp(&qp);
698 frame.qp = @(qp);
699
700 BOOL res = _callback(frame, codecSpecificInfo, header);
701 if (!res) {
702 LOG(LS_ERROR) << "Encode callback failed";
703 return;
704 }
705 _bitrateAdjuster->Update(frame.buffer.length);
706}
707
708- (RTCVideoEncoderQpThresholds *)scalingSettings {
709 return [[RTCVideoEncoderQpThresholds alloc] initWithThresholdsLow:kLowH264QpThreshold
710 high:kHighH264QpThreshold];
711}
712
713@end