magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license |
| 5 | * that can be found in the LICENSE file in the root of the source |
| 6 | * tree. An additional intellectual property rights grant can be found |
| 7 | * in the file PATENTS. All contributing project authors may |
| 8 | * be found in the AUTHORS file in the root of the source tree. |
| 9 | * |
| 10 | */ |
| 11 | |
| 12 | #import "WebRTC/RTCVideoCodecH264.h" |
| 13 | |
| 14 | #import <VideoToolbox/VideoToolbox.h> |
| 15 | #include <vector> |
| 16 | |
| 17 | #if defined(WEBRTC_IOS) |
| 18 | #import "Common/RTCUIApplicationStatusObserver.h" |
| 19 | #import "WebRTC/UIDevice+RTCDevice.h" |
| 20 | #endif |
| 21 | #import "PeerConnection/RTCVideoCodec+Private.h" |
| 22 | #import "WebRTC/RTCVideoCodec.h" |
| 23 | #import "WebRTC/RTCVideoFrame.h" |
| 24 | #import "WebRTC/RTCVideoFrameBuffer.h" |
Mirko Bonadei | 92ea95e | 2017-09-15 06:47:31 +0200 | [diff] [blame] | 25 | #include "common_video/h264/h264_bitstream_parser.h" |
| 26 | #include "common_video/h264/profile_level_id.h" |
| 27 | #include "common_video/include/bitrate_adjuster.h" |
Mirko Bonadei | 6543206 | 2017-12-11 09:32:13 +0100 | [diff] [blame] | 28 | #import "helpers.h" |
Mirko Bonadei | 92ea95e | 2017-09-15 06:47:31 +0200 | [diff] [blame] | 29 | #include "modules/include/module_common_types.h" |
| 30 | #include "modules/video_coding/include/video_error_codes.h" |
| 31 | #include "rtc_base/buffer.h" |
| 32 | #include "rtc_base/logging.h" |
| 33 | #include "rtc_base/timeutils.h" |
| 34 | #include "sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.h" |
Mirko Bonadei | 6543206 | 2017-12-11 09:32:13 +0100 | [diff] [blame] | 35 | #include "third_party/libyuv/include/libyuv/convert_from.h" |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 36 | |
| 37 | @interface RTCVideoEncoderH264 () |
| 38 | |
| 39 | - (void)frameWasEncoded:(OSStatus)status |
| 40 | flags:(VTEncodeInfoFlags)infoFlags |
| 41 | sampleBuffer:(CMSampleBufferRef)sampleBuffer |
| 42 | codecSpecificInfo:(id<RTCCodecSpecificInfo>)codecSpecificInfo |
| 43 | width:(int32_t)width |
| 44 | height:(int32_t)height |
| 45 | renderTimeMs:(int64_t)renderTimeMs |
| 46 | timestamp:(uint32_t)timestamp |
| 47 | rotation:(RTCVideoRotation)rotation; |
| 48 | |
| 49 | @end |
| 50 | |
Kári Tristan Helgason | 0bf6071 | 2017-09-25 10:26:42 +0200 | [diff] [blame] | 51 | namespace { // anonymous namespace |
| 52 | |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 53 | // The ratio between kVTCompressionPropertyKey_DataRateLimits and |
| 54 | // kVTCompressionPropertyKey_AverageBitRate. The data rate limit is set higher |
| 55 | // than the average bit rate to avoid undershooting the target. |
| 56 | const float kLimitToAverageBitRateFactor = 1.5f; |
| 57 | // These thresholds deviate from the default h264 QP thresholds, as they |
| 58 | // have been found to work better on devices that support VideoToolbox |
| 59 | const int kLowH264QpThreshold = 28; |
| 60 | const int kHighH264QpThreshold = 39; |
| 61 | |
Anders Carlsson | f3ee3b7 | 2017-10-23 15:23:00 +0200 | [diff] [blame] | 62 | const OSType kNV12PixelFormat = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange; |
| 63 | |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 64 | // Struct that we pass to the encoder per frame to encode. We receive it again |
| 65 | // in the encoder callback. |
| 66 | struct RTCFrameEncodeParams { |
| 67 | RTCFrameEncodeParams(RTCVideoEncoderH264 *e, |
| 68 | RTCCodecSpecificInfoH264 *csi, |
| 69 | int32_t w, |
| 70 | int32_t h, |
| 71 | int64_t rtms, |
| 72 | uint32_t ts, |
| 73 | RTCVideoRotation r) |
| 74 | : encoder(e), width(w), height(h), render_time_ms(rtms), timestamp(ts), rotation(r) { |
| 75 | if (csi) { |
| 76 | codecSpecificInfo = csi; |
| 77 | } else { |
| 78 | codecSpecificInfo = [[RTCCodecSpecificInfoH264 alloc] init]; |
| 79 | } |
| 80 | } |
| 81 | |
| 82 | RTCVideoEncoderH264 *encoder; |
| 83 | RTCCodecSpecificInfoH264 *codecSpecificInfo; |
| 84 | int32_t width; |
| 85 | int32_t height; |
| 86 | int64_t render_time_ms; |
| 87 | uint32_t timestamp; |
| 88 | RTCVideoRotation rotation; |
| 89 | }; |
| 90 | |
| 91 | // We receive I420Frames as input, but we need to feed CVPixelBuffers into the |
| 92 | // encoder. This performs the copy and format conversion. |
| 93 | // TODO(tkchin): See if encoder will accept i420 frames and compare performance. |
Anders Carlsson | f3ee3b7 | 2017-10-23 15:23:00 +0200 | [diff] [blame] | 94 | bool CopyVideoFrameToNV12PixelBuffer(id<RTCI420Buffer> frameBuffer, CVPixelBufferRef pixelBuffer) { |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 95 | RTC_DCHECK(pixelBuffer); |
Anders Carlsson | f3ee3b7 | 2017-10-23 15:23:00 +0200 | [diff] [blame] | 96 | RTC_DCHECK_EQ(CVPixelBufferGetPixelFormatType(pixelBuffer), kNV12PixelFormat); |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 97 | RTC_DCHECK_EQ(CVPixelBufferGetHeightOfPlane(pixelBuffer, 0), frameBuffer.height); |
| 98 | RTC_DCHECK_EQ(CVPixelBufferGetWidthOfPlane(pixelBuffer, 0), frameBuffer.width); |
| 99 | |
| 100 | CVReturn cvRet = CVPixelBufferLockBaseAddress(pixelBuffer, 0); |
| 101 | if (cvRet != kCVReturnSuccess) { |
Mirko Bonadei | 675513b | 2017-11-09 11:09:25 +0100 | [diff] [blame] | 102 | RTC_LOG(LS_ERROR) << "Failed to lock base address: " << cvRet; |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 103 | return false; |
| 104 | } |
| 105 | uint8_t *dstY = reinterpret_cast<uint8_t *>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0)); |
| 106 | int dstStrideY = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0); |
| 107 | uint8_t *dstUV = reinterpret_cast<uint8_t *>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1)); |
| 108 | int dstStrideUV = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1); |
| 109 | // Convert I420 to NV12. |
| 110 | int ret = libyuv::I420ToNV12(frameBuffer.dataY, |
| 111 | frameBuffer.strideY, |
| 112 | frameBuffer.dataU, |
| 113 | frameBuffer.strideU, |
| 114 | frameBuffer.dataV, |
| 115 | frameBuffer.strideV, |
| 116 | dstY, |
| 117 | dstStrideY, |
| 118 | dstUV, |
| 119 | dstStrideUV, |
| 120 | frameBuffer.width, |
| 121 | frameBuffer.height); |
| 122 | CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); |
| 123 | if (ret) { |
Mirko Bonadei | 675513b | 2017-11-09 11:09:25 +0100 | [diff] [blame] | 124 | RTC_LOG(LS_ERROR) << "Error converting I420 VideoFrame to NV12 :" << ret; |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 125 | return false; |
| 126 | } |
| 127 | return true; |
| 128 | } |
| 129 | |
| 130 | CVPixelBufferRef CreatePixelBuffer(CVPixelBufferPoolRef pixel_buffer_pool) { |
| 131 | if (!pixel_buffer_pool) { |
Mirko Bonadei | 675513b | 2017-11-09 11:09:25 +0100 | [diff] [blame] | 132 | RTC_LOG(LS_ERROR) << "Failed to get pixel buffer pool."; |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 133 | return nullptr; |
| 134 | } |
| 135 | CVPixelBufferRef pixel_buffer; |
| 136 | CVReturn ret = CVPixelBufferPoolCreatePixelBuffer(nullptr, pixel_buffer_pool, &pixel_buffer); |
| 137 | if (ret != kCVReturnSuccess) { |
Mirko Bonadei | 675513b | 2017-11-09 11:09:25 +0100 | [diff] [blame] | 138 | RTC_LOG(LS_ERROR) << "Failed to create pixel buffer: " << ret; |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 139 | // We probably want to drop frames here, since failure probably means |
| 140 | // that the pool is empty. |
| 141 | return nullptr; |
| 142 | } |
| 143 | return pixel_buffer; |
| 144 | } |
| 145 | |
| 146 | // This is the callback function that VideoToolbox calls when encode is |
| 147 | // complete. From inspection this happens on its own queue. |
| 148 | void compressionOutputCallback(void *encoder, |
| 149 | void *params, |
| 150 | OSStatus status, |
| 151 | VTEncodeInfoFlags infoFlags, |
| 152 | CMSampleBufferRef sampleBuffer) { |
Anders Carlsson | ed2b1c9 | 2017-11-02 13:15:15 +0100 | [diff] [blame] | 153 | if (!params) { |
| 154 | // If there are pending callbacks when the encoder is destroyed, this can happen. |
| 155 | return; |
| 156 | } |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 157 | std::unique_ptr<RTCFrameEncodeParams> encodeParams( |
| 158 | reinterpret_cast<RTCFrameEncodeParams *>(params)); |
| 159 | [encodeParams->encoder frameWasEncoded:status |
| 160 | flags:infoFlags |
| 161 | sampleBuffer:sampleBuffer |
| 162 | codecSpecificInfo:encodeParams->codecSpecificInfo |
| 163 | width:encodeParams->width |
| 164 | height:encodeParams->height |
| 165 | renderTimeMs:encodeParams->render_time_ms |
| 166 | timestamp:encodeParams->timestamp |
| 167 | rotation:encodeParams->rotation]; |
| 168 | } |
| 169 | |
Magnus Jedvert | 8b4e92d | 2018-04-13 15:36:43 +0200 | [diff] [blame^] | 170 | // Extract VideoToolbox profile out of the webrtc::SdpVideoFormat. If there is |
| 171 | // no specific VideoToolbox profile for the specified level, AutoLevel will be |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 172 | // returned. The user must initialize the encoder with a resolution and |
| 173 | // framerate conforming to the selected H264 level regardless. |
Anders Carlsson | 7e04281 | 2017-10-05 16:55:38 +0200 | [diff] [blame] | 174 | CFStringRef ExtractProfile(webrtc::SdpVideoFormat videoFormat) { |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 175 | const rtc::Optional<webrtc::H264::ProfileLevelId> profile_level_id = |
Anders Carlsson | 7e04281 | 2017-10-05 16:55:38 +0200 | [diff] [blame] | 176 | webrtc::H264::ParseSdpProfileLevelId(videoFormat.parameters); |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 177 | RTC_DCHECK(profile_level_id); |
| 178 | switch (profile_level_id->profile) { |
| 179 | case webrtc::H264::kProfileConstrainedBaseline: |
| 180 | case webrtc::H264::kProfileBaseline: |
| 181 | switch (profile_level_id->level) { |
| 182 | case webrtc::H264::kLevel3: |
| 183 | return kVTProfileLevel_H264_Baseline_3_0; |
| 184 | case webrtc::H264::kLevel3_1: |
| 185 | return kVTProfileLevel_H264_Baseline_3_1; |
| 186 | case webrtc::H264::kLevel3_2: |
| 187 | return kVTProfileLevel_H264_Baseline_3_2; |
| 188 | case webrtc::H264::kLevel4: |
| 189 | return kVTProfileLevel_H264_Baseline_4_0; |
| 190 | case webrtc::H264::kLevel4_1: |
| 191 | return kVTProfileLevel_H264_Baseline_4_1; |
| 192 | case webrtc::H264::kLevel4_2: |
| 193 | return kVTProfileLevel_H264_Baseline_4_2; |
| 194 | case webrtc::H264::kLevel5: |
| 195 | return kVTProfileLevel_H264_Baseline_5_0; |
| 196 | case webrtc::H264::kLevel5_1: |
| 197 | return kVTProfileLevel_H264_Baseline_5_1; |
| 198 | case webrtc::H264::kLevel5_2: |
| 199 | return kVTProfileLevel_H264_Baseline_5_2; |
| 200 | case webrtc::H264::kLevel1: |
| 201 | case webrtc::H264::kLevel1_b: |
| 202 | case webrtc::H264::kLevel1_1: |
| 203 | case webrtc::H264::kLevel1_2: |
| 204 | case webrtc::H264::kLevel1_3: |
| 205 | case webrtc::H264::kLevel2: |
| 206 | case webrtc::H264::kLevel2_1: |
| 207 | case webrtc::H264::kLevel2_2: |
| 208 | return kVTProfileLevel_H264_Baseline_AutoLevel; |
| 209 | } |
| 210 | |
| 211 | case webrtc::H264::kProfileMain: |
| 212 | switch (profile_level_id->level) { |
| 213 | case webrtc::H264::kLevel3: |
| 214 | return kVTProfileLevel_H264_Main_3_0; |
| 215 | case webrtc::H264::kLevel3_1: |
| 216 | return kVTProfileLevel_H264_Main_3_1; |
| 217 | case webrtc::H264::kLevel3_2: |
| 218 | return kVTProfileLevel_H264_Main_3_2; |
| 219 | case webrtc::H264::kLevel4: |
| 220 | return kVTProfileLevel_H264_Main_4_0; |
| 221 | case webrtc::H264::kLevel4_1: |
| 222 | return kVTProfileLevel_H264_Main_4_1; |
| 223 | case webrtc::H264::kLevel4_2: |
| 224 | return kVTProfileLevel_H264_Main_4_2; |
| 225 | case webrtc::H264::kLevel5: |
| 226 | return kVTProfileLevel_H264_Main_5_0; |
| 227 | case webrtc::H264::kLevel5_1: |
| 228 | return kVTProfileLevel_H264_Main_5_1; |
| 229 | case webrtc::H264::kLevel5_2: |
| 230 | return kVTProfileLevel_H264_Main_5_2; |
| 231 | case webrtc::H264::kLevel1: |
| 232 | case webrtc::H264::kLevel1_b: |
| 233 | case webrtc::H264::kLevel1_1: |
| 234 | case webrtc::H264::kLevel1_2: |
| 235 | case webrtc::H264::kLevel1_3: |
| 236 | case webrtc::H264::kLevel2: |
| 237 | case webrtc::H264::kLevel2_1: |
| 238 | case webrtc::H264::kLevel2_2: |
| 239 | return kVTProfileLevel_H264_Main_AutoLevel; |
| 240 | } |
| 241 | |
| 242 | case webrtc::H264::kProfileConstrainedHigh: |
| 243 | case webrtc::H264::kProfileHigh: |
| 244 | switch (profile_level_id->level) { |
| 245 | case webrtc::H264::kLevel3: |
| 246 | return kVTProfileLevel_H264_High_3_0; |
| 247 | case webrtc::H264::kLevel3_1: |
| 248 | return kVTProfileLevel_H264_High_3_1; |
| 249 | case webrtc::H264::kLevel3_2: |
| 250 | return kVTProfileLevel_H264_High_3_2; |
| 251 | case webrtc::H264::kLevel4: |
| 252 | return kVTProfileLevel_H264_High_4_0; |
| 253 | case webrtc::H264::kLevel4_1: |
| 254 | return kVTProfileLevel_H264_High_4_1; |
| 255 | case webrtc::H264::kLevel4_2: |
| 256 | return kVTProfileLevel_H264_High_4_2; |
| 257 | case webrtc::H264::kLevel5: |
| 258 | return kVTProfileLevel_H264_High_5_0; |
| 259 | case webrtc::H264::kLevel5_1: |
| 260 | return kVTProfileLevel_H264_High_5_1; |
| 261 | case webrtc::H264::kLevel5_2: |
| 262 | return kVTProfileLevel_H264_High_5_2; |
| 263 | case webrtc::H264::kLevel1: |
| 264 | case webrtc::H264::kLevel1_b: |
| 265 | case webrtc::H264::kLevel1_1: |
| 266 | case webrtc::H264::kLevel1_2: |
| 267 | case webrtc::H264::kLevel1_3: |
| 268 | case webrtc::H264::kLevel2: |
| 269 | case webrtc::H264::kLevel2_1: |
| 270 | case webrtc::H264::kLevel2_2: |
| 271 | return kVTProfileLevel_H264_High_AutoLevel; |
| 272 | } |
| 273 | } |
| 274 | } |
Kári Tristan Helgason | 0bf6071 | 2017-09-25 10:26:42 +0200 | [diff] [blame] | 275 | } // namespace |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 276 | |
| 277 | @implementation RTCVideoEncoderH264 { |
| 278 | RTCVideoCodecInfo *_codecInfo; |
Daniela | f328282 | 2017-09-29 14:14:54 +0200 | [diff] [blame] | 279 | std::unique_ptr<webrtc::BitrateAdjuster> _bitrateAdjuster; |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 280 | uint32_t _targetBitrateBps; |
| 281 | uint32_t _encoderBitrateBps; |
| 282 | RTCH264PacketizationMode _packetizationMode; |
| 283 | CFStringRef _profile; |
| 284 | RTCVideoEncoderCallback _callback; |
| 285 | int32_t _width; |
| 286 | int32_t _height; |
| 287 | VTCompressionSessionRef _compressionSession; |
| 288 | RTCVideoCodecMode _mode; |
| 289 | |
| 290 | webrtc::H264BitstreamParser _h264BitstreamParser; |
Anders Carlsson | f3ee3b7 | 2017-10-23 15:23:00 +0200 | [diff] [blame] | 291 | std::vector<uint8_t> _frameScaleBuffer; |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 292 | } |
| 293 | |
| 294 | // .5 is set as a mininum to prevent overcompensating for large temporary |
| 295 | // overshoots. We don't want to degrade video quality too badly. |
| 296 | // .95 is set to prevent oscillations. When a lower bitrate is set on the |
| 297 | // encoder than previously set, its output seems to have a brief period of |
| 298 | // drastically reduced bitrate, so we want to avoid that. In steady state |
| 299 | // conditions, 0.95 seems to give us better overall bitrate over long periods |
| 300 | // of time. |
| 301 | - (instancetype)initWithCodecInfo:(RTCVideoCodecInfo *)codecInfo { |
| 302 | if (self = [super init]) { |
| 303 | _codecInfo = codecInfo; |
Niels Möller | 2cb7b5e | 2018-04-19 10:02:26 +0200 | [diff] [blame] | 304 | _bitrateAdjuster.reset(new webrtc::BitrateAdjuster(.5, .95)); |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 305 | _packetizationMode = RTCH264PacketizationModeNonInterleaved; |
Anders Carlsson | 7e04281 | 2017-10-05 16:55:38 +0200 | [diff] [blame] | 306 | _profile = ExtractProfile([codecInfo nativeSdpVideoFormat]); |
Mirko Bonadei | 675513b | 2017-11-09 11:09:25 +0100 | [diff] [blame] | 307 | RTC_LOG(LS_INFO) << "Using profile " << CFStringToString(_profile); |
Kári Tristan Helgason | fc313dc | 2017-10-20 11:01:22 +0200 | [diff] [blame] | 308 | RTC_CHECK([codecInfo.name isEqualToString:kRTCVideoCodecH264Name]); |
andersc | 9a85f07 | 2017-09-13 07:31:46 -0700 | [diff] [blame] | 309 | |
| 310 | #if defined(WEBRTC_IOS) |
| 311 | [RTCUIApplicationStatusObserver prepareForUse]; |
| 312 | #endif |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 313 | } |
| 314 | return self; |
| 315 | } |
| 316 | |
| 317 | - (void)dealloc { |
| 318 | [self destroyCompressionSession]; |
| 319 | } |
| 320 | |
| 321 | - (NSInteger)startEncodeWithSettings:(RTCVideoEncoderSettings *)settings |
| 322 | numberOfCores:(int)numberOfCores { |
| 323 | RTC_DCHECK(settings); |
Kári Tristan Helgason | fc313dc | 2017-10-20 11:01:22 +0200 | [diff] [blame] | 324 | RTC_DCHECK([settings.name isEqualToString:kRTCVideoCodecH264Name]); |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 325 | |
| 326 | _width = settings.width; |
| 327 | _height = settings.height; |
| 328 | _mode = settings.mode; |
| 329 | |
| 330 | // We can only set average bitrate on the HW encoder. |
Kári Tristan Helgason | 87c5463 | 2018-04-05 09:56:14 +0200 | [diff] [blame] | 331 | _targetBitrateBps = settings.startBitrate * 1000; // startBitrate is in kbps. |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 332 | _bitrateAdjuster->SetTargetBitrateBps(_targetBitrateBps); |
| 333 | |
| 334 | // TODO(tkchin): Try setting payload size via |
| 335 | // kVTCompressionPropertyKey_MaxH264SliceBytes. |
| 336 | |
Anders Carlsson | f3ee3b7 | 2017-10-23 15:23:00 +0200 | [diff] [blame] | 337 | return [self resetCompressionSessionWithPixelFormat:kNV12PixelFormat]; |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 338 | } |
| 339 | |
| 340 | - (NSInteger)encode:(RTCVideoFrame *)frame |
Peter Hanspers | d9b64cd | 2018-01-12 16:16:18 +0100 | [diff] [blame] | 341 | codecSpecificInfo:(nullable id<RTCCodecSpecificInfo>)codecSpecificInfo |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 342 | frameTypes:(NSArray<NSNumber *> *)frameTypes { |
| 343 | RTC_DCHECK_EQ(frame.width, _width); |
| 344 | RTC_DCHECK_EQ(frame.height, _height); |
| 345 | if (!_callback || !_compressionSession) { |
| 346 | return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
| 347 | } |
| 348 | #if defined(WEBRTC_IOS) |
| 349 | if (![[RTCUIApplicationStatusObserver sharedInstance] isApplicationActive]) { |
| 350 | // Ignore all encode requests when app isn't active. In this state, the |
| 351 | // hardware encoder has been invalidated by the OS. |
| 352 | return WEBRTC_VIDEO_CODEC_OK; |
| 353 | } |
| 354 | #endif |
| 355 | BOOL isKeyframeRequired = NO; |
| 356 | |
| 357 | // Get a pixel buffer from the pool and copy frame data over. |
| 358 | CVPixelBufferPoolRef pixelBufferPool = |
| 359 | VTCompressionSessionGetPixelBufferPool(_compressionSession); |
Anders Carlsson | f3ee3b7 | 2017-10-23 15:23:00 +0200 | [diff] [blame] | 360 | if ([self resetCompressionSessionIfNeededForPool:pixelBufferPool withFrame:frame]) { |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 361 | pixelBufferPool = VTCompressionSessionGetPixelBufferPool(_compressionSession); |
| 362 | isKeyframeRequired = YES; |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 363 | } |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 364 | |
| 365 | CVPixelBufferRef pixelBuffer = nullptr; |
| 366 | if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) { |
| 367 | // Native frame buffer |
| 368 | RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer; |
| 369 | if (![rtcPixelBuffer requiresCropping]) { |
| 370 | // This pixel buffer might have a higher resolution than what the |
| 371 | // compression session is configured to. The compression session can |
| 372 | // handle that and will output encoded frames in the configured |
| 373 | // resolution regardless of the input pixel buffer resolution. |
| 374 | pixelBuffer = rtcPixelBuffer.pixelBuffer; |
| 375 | CVBufferRetain(pixelBuffer); |
| 376 | } else { |
| 377 | // Cropping required, we need to crop and scale to a new pixel buffer. |
| 378 | pixelBuffer = CreatePixelBuffer(pixelBufferPool); |
| 379 | if (!pixelBuffer) { |
| 380 | return WEBRTC_VIDEO_CODEC_ERROR; |
| 381 | } |
| 382 | int dstWidth = CVPixelBufferGetWidth(pixelBuffer); |
| 383 | int dstHeight = CVPixelBufferGetHeight(pixelBuffer); |
| 384 | if ([rtcPixelBuffer requiresScalingToWidth:dstWidth height:dstHeight]) { |
| 385 | int size = |
| 386 | [rtcPixelBuffer bufferSizeForCroppingAndScalingToWidth:dstWidth height:dstHeight]; |
Anders Carlsson | f3ee3b7 | 2017-10-23 15:23:00 +0200 | [diff] [blame] | 387 | _frameScaleBuffer.resize(size); |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 388 | } else { |
Anders Carlsson | f3ee3b7 | 2017-10-23 15:23:00 +0200 | [diff] [blame] | 389 | _frameScaleBuffer.clear(); |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 390 | } |
Anders Carlsson | f3ee3b7 | 2017-10-23 15:23:00 +0200 | [diff] [blame] | 391 | _frameScaleBuffer.shrink_to_fit(); |
| 392 | if (![rtcPixelBuffer cropAndScaleTo:pixelBuffer withTempBuffer:_frameScaleBuffer.data()]) { |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 393 | return WEBRTC_VIDEO_CODEC_ERROR; |
| 394 | } |
| 395 | } |
| 396 | } |
| 397 | |
| 398 | if (!pixelBuffer) { |
| 399 | // We did not have a native frame buffer |
| 400 | pixelBuffer = CreatePixelBuffer(pixelBufferPool); |
| 401 | if (!pixelBuffer) { |
| 402 | return WEBRTC_VIDEO_CODEC_ERROR; |
| 403 | } |
| 404 | RTC_DCHECK(pixelBuffer); |
Anders Carlsson | f3ee3b7 | 2017-10-23 15:23:00 +0200 | [diff] [blame] | 405 | if (!CopyVideoFrameToNV12PixelBuffer([frame.buffer toI420], pixelBuffer)) { |
Mirko Bonadei | 675513b | 2017-11-09 11:09:25 +0100 | [diff] [blame] | 406 | RTC_LOG(LS_ERROR) << "Failed to copy frame data."; |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 407 | CVBufferRelease(pixelBuffer); |
| 408 | return WEBRTC_VIDEO_CODEC_ERROR; |
| 409 | } |
| 410 | } |
| 411 | |
| 412 | // Check if we need a keyframe. |
| 413 | if (!isKeyframeRequired && frameTypes) { |
| 414 | for (NSNumber *frameType in frameTypes) { |
| 415 | if ((RTCFrameType)frameType.intValue == RTCFrameTypeVideoFrameKey) { |
| 416 | isKeyframeRequired = YES; |
| 417 | break; |
| 418 | } |
| 419 | } |
| 420 | } |
| 421 | |
| 422 | CMTime presentationTimeStamp = CMTimeMake(frame.timeStampNs / rtc::kNumNanosecsPerMillisec, 1000); |
| 423 | CFDictionaryRef frameProperties = nullptr; |
| 424 | if (isKeyframeRequired) { |
| 425 | CFTypeRef keys[] = {kVTEncodeFrameOptionKey_ForceKeyFrame}; |
| 426 | CFTypeRef values[] = {kCFBooleanTrue}; |
| 427 | frameProperties = CreateCFTypeDictionary(keys, values, 1); |
| 428 | } |
| 429 | |
| 430 | std::unique_ptr<RTCFrameEncodeParams> encodeParams; |
| 431 | encodeParams.reset(new RTCFrameEncodeParams(self, |
| 432 | codecSpecificInfo, |
| 433 | _width, |
| 434 | _height, |
| 435 | frame.timeStampNs / rtc::kNumNanosecsPerMillisec, |
| 436 | frame.timeStamp, |
| 437 | frame.rotation)); |
| 438 | encodeParams->codecSpecificInfo.packetizationMode = _packetizationMode; |
| 439 | |
| 440 | // Update the bitrate if needed. |
| 441 | [self setBitrateBps:_bitrateAdjuster->GetAdjustedBitrateBps()]; |
| 442 | |
| 443 | OSStatus status = VTCompressionSessionEncodeFrame(_compressionSession, |
| 444 | pixelBuffer, |
| 445 | presentationTimeStamp, |
| 446 | kCMTimeInvalid, |
| 447 | frameProperties, |
| 448 | encodeParams.release(), |
| 449 | nullptr); |
| 450 | if (frameProperties) { |
| 451 | CFRelease(frameProperties); |
| 452 | } |
| 453 | if (pixelBuffer) { |
| 454 | CVBufferRelease(pixelBuffer); |
| 455 | } |
| 456 | if (status != noErr) { |
Mirko Bonadei | 675513b | 2017-11-09 11:09:25 +0100 | [diff] [blame] | 457 | RTC_LOG(LS_ERROR) << "Failed to encode frame with code: " << status; |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 458 | return WEBRTC_VIDEO_CODEC_ERROR; |
| 459 | } |
| 460 | return WEBRTC_VIDEO_CODEC_OK; |
| 461 | } |
| 462 | |
| 463 | - (void)setCallback:(RTCVideoEncoderCallback)callback { |
| 464 | _callback = callback; |
| 465 | } |
| 466 | |
| 467 | - (int)setBitrate:(uint32_t)bitrateKbit framerate:(uint32_t)framerate { |
| 468 | _targetBitrateBps = 1000 * bitrateKbit; |
| 469 | _bitrateAdjuster->SetTargetBitrateBps(_targetBitrateBps); |
| 470 | [self setBitrateBps:_bitrateAdjuster->GetAdjustedBitrateBps()]; |
| 471 | return WEBRTC_VIDEO_CODEC_OK; |
| 472 | } |
| 473 | |
| 474 | #pragma mark - Private |
| 475 | |
| 476 | - (NSInteger)releaseEncoder { |
| 477 | // Need to destroy so that the session is invalidated and won't use the |
| 478 | // callback anymore. Do not remove callback until the session is invalidated |
| 479 | // since async encoder callbacks can occur until invalidation. |
| 480 | [self destroyCompressionSession]; |
| 481 | _callback = nullptr; |
| 482 | return WEBRTC_VIDEO_CODEC_OK; |
| 483 | } |
| 484 | |
Anders Carlsson | f3ee3b7 | 2017-10-23 15:23:00 +0200 | [diff] [blame] | 485 | - (BOOL)resetCompressionSessionIfNeededForPool:(CVPixelBufferPoolRef)pixelBufferPool |
| 486 | withFrame:(RTCVideoFrame *)frame { |
| 487 | BOOL resetCompressionSession = NO; |
| 488 | |
Anders Carlsson | 5b07c24 | 2018-04-13 14:12:22 +0200 | [diff] [blame] | 489 | // If we're capturing native frames in another pixel format than the compression session is |
| 490 | // configured with, make sure the compression session is reset using the correct pixel format. |
| 491 | // If we're capturing non-native frames and the compression session is configured with a non-NV12 |
| 492 | // format, reset it to NV12. |
| 493 | OSType framePixelFormat = kNV12PixelFormat; |
| 494 | if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) { |
| 495 | RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer; |
| 496 | framePixelFormat = CVPixelBufferGetPixelFormatType(rtcPixelBuffer.pixelBuffer); |
| 497 | } |
| 498 | |
Anders Carlsson | f3ee3b7 | 2017-10-23 15:23:00 +0200 | [diff] [blame] | 499 | #if defined(WEBRTC_IOS) |
| 500 | if (!pixelBufferPool) { |
| 501 | // Kind of a hack. On backgrounding, the compression session seems to get |
| 502 | // invalidated, which causes this pool call to fail when the application |
| 503 | // is foregrounded and frames are being sent for encoding again. |
| 504 | // Resetting the session when this happens fixes the issue. |
| 505 | // In addition we request a keyframe so video can recover quickly. |
| 506 | resetCompressionSession = YES; |
Mirko Bonadei | 675513b | 2017-11-09 11:09:25 +0100 | [diff] [blame] | 507 | RTC_LOG(LS_INFO) << "Resetting compression session due to invalid pool."; |
Anders Carlsson | f3ee3b7 | 2017-10-23 15:23:00 +0200 | [diff] [blame] | 508 | } |
| 509 | #endif |
| 510 | |
Anders Carlsson | 4df8e1a | 2017-12-15 10:57:57 +0100 | [diff] [blame] | 511 | if (pixelBufferPool) { |
Anders Carlsson | f3ee3b7 | 2017-10-23 15:23:00 +0200 | [diff] [blame] | 512 | // The pool attribute `kCVPixelBufferPixelFormatTypeKey` can contain either an array of pixel |
| 513 | // formats or a single pixel format. |
| 514 | NSDictionary *poolAttributes = |
| 515 | (__bridge NSDictionary *)CVPixelBufferPoolGetPixelBufferAttributes(pixelBufferPool); |
| 516 | id pixelFormats = |
| 517 | [poolAttributes objectForKey:(__bridge NSString *)kCVPixelBufferPixelFormatTypeKey]; |
| 518 | NSArray<NSNumber *> *compressionSessionPixelFormats = nil; |
| 519 | if ([pixelFormats isKindOfClass:[NSArray class]]) { |
| 520 | compressionSessionPixelFormats = (NSArray *)pixelFormats; |
| 521 | } else { |
| 522 | compressionSessionPixelFormats = @[ (NSNumber *)pixelFormats ]; |
| 523 | } |
| 524 | |
| 525 | if (![compressionSessionPixelFormats |
| 526 | containsObject:[NSNumber numberWithLong:framePixelFormat]]) { |
| 527 | resetCompressionSession = YES; |
Mirko Bonadei | 675513b | 2017-11-09 11:09:25 +0100 | [diff] [blame] | 528 | RTC_LOG(LS_INFO) << "Resetting compression session due to non-matching pixel format."; |
Anders Carlsson | f3ee3b7 | 2017-10-23 15:23:00 +0200 | [diff] [blame] | 529 | } |
| 530 | } |
| 531 | |
| 532 | if (resetCompressionSession) { |
| 533 | [self resetCompressionSessionWithPixelFormat:framePixelFormat]; |
| 534 | } |
| 535 | return resetCompressionSession; |
| 536 | } |
| 537 | |
| 538 | - (int)resetCompressionSessionWithPixelFormat:(OSType)framePixelFormat { |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 539 | [self destroyCompressionSession]; |
| 540 | |
| 541 | // Set source image buffer attributes. These attributes will be present on |
| 542 | // buffers retrieved from the encoder's pixel buffer pool. |
| 543 | const size_t attributesSize = 3; |
| 544 | CFTypeRef keys[attributesSize] = { |
| 545 | #if defined(WEBRTC_IOS) |
| 546 | kCVPixelBufferOpenGLESCompatibilityKey, |
| 547 | #elif defined(WEBRTC_MAC) |
| 548 | kCVPixelBufferOpenGLCompatibilityKey, |
| 549 | #endif |
| 550 | kCVPixelBufferIOSurfacePropertiesKey, |
| 551 | kCVPixelBufferPixelFormatTypeKey |
| 552 | }; |
| 553 | CFDictionaryRef ioSurfaceValue = CreateCFTypeDictionary(nullptr, nullptr, 0); |
Anders Carlsson | f3ee3b7 | 2017-10-23 15:23:00 +0200 | [diff] [blame] | 554 | int64_t pixelFormatType = framePixelFormat; |
| 555 | CFNumberRef pixelFormat = CFNumberCreate(nullptr, kCFNumberLongType, &pixelFormatType); |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 556 | CFTypeRef values[attributesSize] = {kCFBooleanTrue, ioSurfaceValue, pixelFormat}; |
| 557 | CFDictionaryRef sourceAttributes = CreateCFTypeDictionary(keys, values, attributesSize); |
| 558 | if (ioSurfaceValue) { |
| 559 | CFRelease(ioSurfaceValue); |
| 560 | ioSurfaceValue = nullptr; |
| 561 | } |
| 562 | if (pixelFormat) { |
| 563 | CFRelease(pixelFormat); |
| 564 | pixelFormat = nullptr; |
| 565 | } |
kthelgason | a4955b4 | 2017-08-24 04:22:58 -0700 | [diff] [blame] | 566 | CFMutableDictionaryRef encoder_specs = nullptr; |
| 567 | #if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) |
| 568 | // Currently hw accl is supported above 360p on mac, below 360p |
| 569 | // the compression session will be created with hw accl disabled. |
| 570 | encoder_specs = CFDictionaryCreateMutable( |
| 571 | nullptr, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); |
| 572 | CFDictionarySetValue(encoder_specs, |
| 573 | kVTVideoEncoderSpecification_EnableHardwareAcceleratedVideoEncoder, |
| 574 | kCFBooleanTrue); |
| 575 | #endif |
| 576 | OSStatus status = |
| 577 | VTCompressionSessionCreate(nullptr, // use default allocator |
| 578 | _width, |
| 579 | _height, |
| 580 | kCMVideoCodecType_H264, |
| 581 | encoder_specs, // use hardware accelerated encoder if available |
| 582 | sourceAttributes, |
| 583 | nullptr, // use default compressed data allocator |
| 584 | compressionOutputCallback, |
| 585 | nullptr, |
| 586 | &_compressionSession); |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 587 | if (sourceAttributes) { |
| 588 | CFRelease(sourceAttributes); |
| 589 | sourceAttributes = nullptr; |
| 590 | } |
kthelgason | a4955b4 | 2017-08-24 04:22:58 -0700 | [diff] [blame] | 591 | if (encoder_specs) { |
| 592 | CFRelease(encoder_specs); |
| 593 | encoder_specs = nullptr; |
| 594 | } |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 595 | if (status != noErr) { |
Mirko Bonadei | 675513b | 2017-11-09 11:09:25 +0100 | [diff] [blame] | 596 | RTC_LOG(LS_ERROR) << "Failed to create compression session: " << status; |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 597 | return WEBRTC_VIDEO_CODEC_ERROR; |
| 598 | } |
kthelgason | a4955b4 | 2017-08-24 04:22:58 -0700 | [diff] [blame] | 599 | #if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) |
| 600 | CFBooleanRef hwaccl_enabled = nullptr; |
| 601 | status = VTSessionCopyProperty(_compressionSession, |
| 602 | kVTCompressionPropertyKey_UsingHardwareAcceleratedVideoEncoder, |
| 603 | nullptr, |
| 604 | &hwaccl_enabled); |
| 605 | if (status == noErr && (CFBooleanGetValue(hwaccl_enabled))) { |
Mirko Bonadei | 675513b | 2017-11-09 11:09:25 +0100 | [diff] [blame] | 606 | RTC_LOG(LS_INFO) << "Compression session created with hw accl enabled"; |
kthelgason | a4955b4 | 2017-08-24 04:22:58 -0700 | [diff] [blame] | 607 | } else { |
Mirko Bonadei | 675513b | 2017-11-09 11:09:25 +0100 | [diff] [blame] | 608 | RTC_LOG(LS_INFO) << "Compression session created with hw accl disabled"; |
kthelgason | a4955b4 | 2017-08-24 04:22:58 -0700 | [diff] [blame] | 609 | } |
| 610 | #endif |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 611 | [self configureCompressionSession]; |
| 612 | return WEBRTC_VIDEO_CODEC_OK; |
| 613 | } |
| 614 | |
| 615 | - (void)configureCompressionSession { |
| 616 | RTC_DCHECK(_compressionSession); |
| 617 | SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_RealTime, true); |
| 618 | SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_ProfileLevel, _profile); |
| 619 | SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_AllowFrameReordering, false); |
| 620 | [self setEncoderBitrateBps:_targetBitrateBps]; |
| 621 | // TODO(tkchin): Look at entropy mode and colorspace matrices. |
| 622 | // TODO(tkchin): Investigate to see if there's any way to make this work. |
| 623 | // May need it to interop with Android. Currently this call just fails. |
| 624 | // On inspecting encoder output on iOS8, this value is set to 6. |
| 625 | // internal::SetVTSessionProperty(compression_session_, |
| 626 | // kVTCompressionPropertyKey_MaxFrameDelayCount, |
| 627 | // 1); |
| 628 | |
| 629 | // Set a relatively large value for keyframe emission (7200 frames or 4 minutes). |
| 630 | SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_MaxKeyFrameInterval, 7200); |
| 631 | SetVTSessionProperty( |
| 632 | _compressionSession, kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration, 240); |
| 633 | } |
| 634 | |
| 635 | - (void)destroyCompressionSession { |
| 636 | if (_compressionSession) { |
| 637 | VTCompressionSessionInvalidate(_compressionSession); |
| 638 | CFRelease(_compressionSession); |
| 639 | _compressionSession = nullptr; |
| 640 | } |
| 641 | } |
| 642 | |
| 643 | - (NSString *)implementationName { |
| 644 | return @"VideoToolbox"; |
| 645 | } |
| 646 | |
| 647 | - (void)setBitrateBps:(uint32_t)bitrateBps { |
| 648 | if (_encoderBitrateBps != bitrateBps) { |
| 649 | [self setEncoderBitrateBps:bitrateBps]; |
| 650 | } |
| 651 | } |
| 652 | |
| 653 | - (void)setEncoderBitrateBps:(uint32_t)bitrateBps { |
| 654 | if (_compressionSession) { |
| 655 | SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_AverageBitRate, bitrateBps); |
| 656 | |
| 657 | // TODO(tkchin): Add a helper method to set array value. |
| 658 | int64_t dataLimitBytesPerSecondValue = |
| 659 | static_cast<int64_t>(bitrateBps * kLimitToAverageBitRateFactor / 8); |
| 660 | CFNumberRef bytesPerSecond = |
| 661 | CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt64Type, &dataLimitBytesPerSecondValue); |
| 662 | int64_t oneSecondValue = 1; |
| 663 | CFNumberRef oneSecond = |
| 664 | CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt64Type, &oneSecondValue); |
| 665 | const void *nums[2] = {bytesPerSecond, oneSecond}; |
| 666 | CFArrayRef dataRateLimits = CFArrayCreate(nullptr, nums, 2, &kCFTypeArrayCallBacks); |
| 667 | OSStatus status = VTSessionSetProperty( |
| 668 | _compressionSession, kVTCompressionPropertyKey_DataRateLimits, dataRateLimits); |
| 669 | if (bytesPerSecond) { |
| 670 | CFRelease(bytesPerSecond); |
| 671 | } |
| 672 | if (oneSecond) { |
| 673 | CFRelease(oneSecond); |
| 674 | } |
| 675 | if (dataRateLimits) { |
| 676 | CFRelease(dataRateLimits); |
| 677 | } |
| 678 | if (status != noErr) { |
Yura Yaroshevich | 27af5db | 2018-04-10 19:43:20 +0300 | [diff] [blame] | 679 | RTC_LOG(LS_ERROR) << "Failed to set data rate limit with code: " << status; |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 680 | } |
| 681 | |
| 682 | _encoderBitrateBps = bitrateBps; |
| 683 | } |
| 684 | } |
| 685 | |
| 686 | - (void)frameWasEncoded:(OSStatus)status |
| 687 | flags:(VTEncodeInfoFlags)infoFlags |
| 688 | sampleBuffer:(CMSampleBufferRef)sampleBuffer |
| 689 | codecSpecificInfo:(id<RTCCodecSpecificInfo>)codecSpecificInfo |
| 690 | width:(int32_t)width |
| 691 | height:(int32_t)height |
| 692 | renderTimeMs:(int64_t)renderTimeMs |
| 693 | timestamp:(uint32_t)timestamp |
| 694 | rotation:(RTCVideoRotation)rotation { |
| 695 | if (status != noErr) { |
Yura Yaroshevich | 27af5db | 2018-04-10 19:43:20 +0300 | [diff] [blame] | 696 | RTC_LOG(LS_ERROR) << "H264 encode failed with code: " << status; |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 697 | return; |
| 698 | } |
| 699 | if (infoFlags & kVTEncodeInfo_FrameDropped) { |
Mirko Bonadei | 675513b | 2017-11-09 11:09:25 +0100 | [diff] [blame] | 700 | RTC_LOG(LS_INFO) << "H264 encode dropped frame."; |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 701 | return; |
| 702 | } |
| 703 | |
| 704 | BOOL isKeyframe = NO; |
| 705 | CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, 0); |
| 706 | if (attachments != nullptr && CFArrayGetCount(attachments)) { |
| 707 | CFDictionaryRef attachment = |
| 708 | static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(attachments, 0)); |
| 709 | isKeyframe = !CFDictionaryContainsKey(attachment, kCMSampleAttachmentKey_NotSync); |
| 710 | } |
| 711 | |
| 712 | if (isKeyframe) { |
Mirko Bonadei | 675513b | 2017-11-09 11:09:25 +0100 | [diff] [blame] | 713 | RTC_LOG(LS_INFO) << "Generated keyframe"; |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 714 | } |
| 715 | |
| 716 | // Convert the sample buffer into a buffer suitable for RTP packetization. |
| 717 | // TODO(tkchin): Allocate buffers through a pool. |
| 718 | std::unique_ptr<rtc::Buffer> buffer(new rtc::Buffer()); |
| 719 | RTCRtpFragmentationHeader *header; |
| 720 | { |
kthelgason | f8084d4 | 2017-08-30 04:47:10 -0700 | [diff] [blame] | 721 | std::unique_ptr<webrtc::RTPFragmentationHeader> header_cpp; |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 722 | bool result = |
| 723 | H264CMSampleBufferToAnnexBBuffer(sampleBuffer, isKeyframe, buffer.get(), &header_cpp); |
kthelgason | f8084d4 | 2017-08-30 04:47:10 -0700 | [diff] [blame] | 724 | header = [[RTCRtpFragmentationHeader alloc] initWithNativeFragmentationHeader:header_cpp.get()]; |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 725 | if (!result) { |
| 726 | return; |
| 727 | } |
| 728 | } |
| 729 | |
| 730 | RTCEncodedImage *frame = [[RTCEncodedImage alloc] init]; |
| 731 | frame.buffer = [NSData dataWithBytesNoCopy:buffer->data() length:buffer->size() freeWhenDone:NO]; |
| 732 | frame.encodedWidth = width; |
| 733 | frame.encodedHeight = height; |
| 734 | frame.completeFrame = YES; |
| 735 | frame.frameType = isKeyframe ? RTCFrameTypeVideoFrameKey : RTCFrameTypeVideoFrameDelta; |
| 736 | frame.captureTimeMs = renderTimeMs; |
| 737 | frame.timeStamp = timestamp; |
| 738 | frame.rotation = rotation; |
| 739 | frame.contentType = (_mode == RTCVideoCodecModeScreensharing) ? RTCVideoContentTypeScreenshare : |
| 740 | RTCVideoContentTypeUnspecified; |
sprang | ba050a6 | 2017-08-18 02:51:12 -0700 | [diff] [blame] | 741 | frame.flags = webrtc::TimingFrameFlags::kInvalid; |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 742 | |
| 743 | int qp; |
| 744 | _h264BitstreamParser.ParseBitstream(buffer->data(), buffer->size()); |
| 745 | _h264BitstreamParser.GetLastSliceQp(&qp); |
| 746 | frame.qp = @(qp); |
| 747 | |
| 748 | BOOL res = _callback(frame, codecSpecificInfo, header); |
| 749 | if (!res) { |
Mirko Bonadei | 675513b | 2017-11-09 11:09:25 +0100 | [diff] [blame] | 750 | RTC_LOG(LS_ERROR) << "Encode callback failed"; |
magjed | 73c0eb5 | 2017-08-07 06:55:28 -0700 | [diff] [blame] | 751 | return; |
| 752 | } |
| 753 | _bitrateAdjuster->Update(frame.buffer.length); |
| 754 | } |
| 755 | |
| 756 | - (RTCVideoEncoderQpThresholds *)scalingSettings { |
| 757 | return [[RTCVideoEncoderQpThresholds alloc] initWithThresholdsLow:kLowH264QpThreshold |
| 758 | high:kHighH264QpThreshold]; |
| 759 | } |
| 760 | |
| 761 | @end |