Fixing some of the issues found by clang static analyzer.
Bug: webrtc:8737
Change-Id: Ib436449c493336e7c35a72a96dc88cccdbb5bbaf
Reviewed-on: https://webrtc-review.googlesource.com/39200
Commit-Queue: Peter Hanspers <peterhanspers@webrtc.org>
Reviewed-by: Henrik Andreassson <henrika@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#21607}
diff --git a/sdk/objc/Framework/Classes/Common/RTCFileLogger.mm b/sdk/objc/Framework/Classes/Common/RTCFileLogger.mm
index 15461f4..6acb942 100644
--- a/sdk/objc/Framework/Classes/Common/RTCFileLogger.mm
+++ b/sdk/objc/Framework/Classes/Common/RTCFileLogger.mm
@@ -124,7 +124,7 @@
_logSink.reset();
}
-- (NSData *)logData {
+- (nullable NSData *)logData {
if (_hasStarted) {
return nil;
}
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCAudioTrack.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCAudioTrack.mm
index fffbd7c..e26088f 100644
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCAudioTrack.mm
+++ b/sdk/objc/Framework/Classes/PeerConnection/RTCAudioTrack.mm
@@ -31,7 +31,7 @@
std::string nativeId = [NSString stdStringForString:trackId];
rtc::scoped_refptr<webrtc::AudioTrackInterface> track =
factory.nativeFactory->CreateAudioTrack(nativeId, source.nativeAudioSource);
- if ([self initWithNativeTrack:track type:RTCMediaStreamTrackTypeAudio]) {
+ if (self = [self initWithNativeTrack:track type:RTCMediaStreamTrackTypeAudio]) {
_source = source;
}
return self;
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration+Private.h
index bf00d18..d9a9916 100644
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration+Private.h
+++ b/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration+Private.h
@@ -62,8 +62,7 @@
* RTCConfiguration struct representation of this RTCConfiguration. This is
* needed to pass to the underlying C++ APIs.
*/
-- (webrtc::PeerConnectionInterface::RTCConfiguration *)
- createNativeConfiguration;
+- (nullable webrtc::PeerConnectionInterface::RTCConfiguration *)createNativeConfiguration;
- (instancetype)initWithNativeConfiguration:
(const webrtc::PeerConnectionInterface::RTCConfiguration &)config NS_DESIGNATED_INITIALIZER;
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCEncodedImage.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCEncodedImage.mm
index ef2840f..467f225 100644
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCEncodedImage.mm
+++ b/sdk/objc/Framework/Classes/PeerConnection/RTCEncodedImage.mm
@@ -48,7 +48,7 @@
_frameType = static_cast<RTCFrameType>(encodedImage._frameType);
_rotation = static_cast<RTCVideoRotation>(encodedImage.rotation_);
_completeFrame = encodedImage._completeFrame;
- _qp = encodedImage.qp_ == -1 ? nil : @(encodedImage.qp_);
+ _qp = @(encodedImage.qp_);
_contentType = (encodedImage.content_type_ == webrtc::VideoContentType::SCREENSHARE) ?
RTCVideoContentTypeScreenshare :
RTCVideoContentTypeUnspecified;
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCFileVideoCapturer.m b/sdk/objc/Framework/Classes/PeerConnection/RTCFileVideoCapturer.m
index 07cb2c6..7e27bf2 100644
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCFileVideoCapturer.m
+++ b/sdk/objc/Framework/Classes/PeerConnection/RTCFileVideoCapturer.m
@@ -139,6 +139,7 @@
}
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
!CMSampleBufferDataIsReady(sampleBuffer)) {
+ CFRelease(sampleBuffer);
[self readNextBuffer];
return;
}
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection+DataChannel.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection+DataChannel.mm
index 80b5108..d17d9ac 100644
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection+DataChannel.mm
+++ b/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection+DataChannel.mm
@@ -16,9 +16,8 @@
@implementation RTCPeerConnection (DataChannel)
-- (RTCDataChannel *)dataChannelForLabel:(NSString *)label
- configuration:
- (RTCDataChannelConfiguration *)configuration {
+- (nullable RTCDataChannel *)dataChannelForLabel:(NSString *)label
+ configuration:(RTCDataChannelConfiguration *)configuration {
std::string labelString = [NSString stdStringForString:label];
const webrtc::DataChannelInit nativeInit =
configuration.nativeDataChannelInit;
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpReceiver.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCRtpReceiver.mm
index 0888f24..cb2771a 100644
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpReceiver.mm
+++ b/sdk/objc/Framework/Classes/PeerConnection/RTCRtpReceiver.mm
@@ -58,7 +58,7 @@
}
}
-- (RTCMediaStreamTrack *)track {
+- (nullable RTCMediaStreamTrack *)track {
rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> nativeTrack(
_nativeRtpReceiver->track());
if (nativeTrack) {
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCSessionDescription+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCSessionDescription+Private.h
index 5811a5a..4c58fa7 100644
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCSessionDescription+Private.h
+++ b/sdk/objc/Framework/Classes/PeerConnection/RTCSessionDescription+Private.h
@@ -21,8 +21,7 @@
* RTCSessionDescription object. This is needed to pass to the underlying C++
* APIs.
*/
-@property(nonatomic, readonly)
- webrtc::SessionDescriptionInterface *nativeDescription;
+@property(nonatomic, readonly, nullable) webrtc::SessionDescriptionInterface *nativeDescription;
/**
* Initialize an RTCSessionDescription from a native
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoTrack.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCVideoTrack.mm
index 9fa6111..2f03110 100644
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoTrack.mm
+++ b/sdk/objc/Framework/Classes/PeerConnection/RTCVideoTrack.mm
@@ -32,7 +32,7 @@
rtc::scoped_refptr<webrtc::VideoTrackInterface> track =
factory.nativeFactory->CreateVideoTrack(nativeId,
source.nativeVideoSource);
- if ([self initWithNativeTrack:track type:RTCMediaStreamTrackTypeVideo]) {
+ if (self = [self initWithNativeTrack:track type:RTCMediaStreamTrackTypeVideo]) {
_source = source;
}
return self;
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoDecoder.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoDecoder.mm
index 7d82c5b..7529ebf 100644
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoDecoder.mm
+++ b/sdk/objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoDecoder.mm
@@ -49,7 +49,7 @@
- (NSInteger)decode:(RTCEncodedImage *)encodedImage
missingFrames:(BOOL)missingFrames
fragmentationHeader:(RTCRtpFragmentationHeader *)fragmentationHeader
- codecSpecificInfo:(__nullable id<RTCCodecSpecificInfo>)info
+ codecSpecificInfo:(nullable id<RTCCodecSpecificInfo>)info
renderTimeMs:(int64_t)renderTimeMs {
RTC_NOTREACHED();
return 0;
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoEncoder.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoEncoder.mm
index 9d2f81b..8988e6d 100644
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoEncoder.mm
+++ b/sdk/objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoEncoder.mm
@@ -47,7 +47,7 @@
}
- (NSInteger)encode:(RTCVideoFrame *)frame
- codecSpecificInfo:(id<RTCCodecSpecificInfo>)info
+ codecSpecificInfo:(nullable id<RTCCodecSpecificInfo>)info
frameTypes:(NSArray<NSNumber *> *)frameTypes {
RTC_NOTREACHED();
return 0;
diff --git a/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoDecoderH264.mm b/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoDecoderH264.mm
index debd504..2f4b6bd 100644
--- a/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoDecoderH264.mm
+++ b/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoDecoderH264.mm
@@ -22,6 +22,7 @@
#import "WebRTC/RTCVideoFrame.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#import "helpers.h"
+#import "scoped_cftyperef.h"
#if defined(WEBRTC_IOS)
#import "Common/RTCUIApplicationStatusObserver.h"
@@ -99,7 +100,7 @@
- (NSInteger)decode:(RTCEncodedImage *)inputImage
missingFrames:(BOOL)missingFrames
fragmentationHeader:(RTCRtpFragmentationHeader *)fragmentationHeader
- codecSpecificInfo:(__nullable id<RTCCodecSpecificInfo>)info
+ codecSpecificInfo:(nullable id<RTCCodecSpecificInfo>)info
renderTimeMs:(int64_t)renderTimeMs {
RTC_DCHECK(inputImage.buffer);
@@ -119,19 +120,22 @@
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
}
#endif
- CMVideoFormatDescriptionRef inputFormat = nullptr;
if (webrtc::H264AnnexBBufferHasVideoFormatDescription((uint8_t *)inputImage.buffer.bytes,
inputImage.buffer.length)) {
- inputFormat = webrtc::CreateVideoFormatDescription((uint8_t *)inputImage.buffer.bytes,
- inputImage.buffer.length);
+ rtc::ScopedCFTypeRef<CMVideoFormatDescriptionRef> inputFormat =
+ rtc::ScopedCF(webrtc::CreateVideoFormatDescription((uint8_t *)inputImage.buffer.bytes,
+ inputImage.buffer.length));
if (inputFormat) {
// Check if the video format has changed, and reinitialize decoder if
// needed.
- if (!CMFormatDescriptionEqual(inputFormat, _videoFormat)) {
- [self setVideoFormat:inputFormat];
- [self resetDecompressionSession];
+ if (!CMFormatDescriptionEqual(inputFormat.get(), _videoFormat)) {
+ [self setVideoFormat:inputFormat.get()];
+
+ int resetDecompressionSessionError = [self resetDecompressionSession];
+ if (resetDecompressionSessionError != WEBRTC_VIDEO_CODEC_OK) {
+ return resetDecompressionSessionError;
+ }
}
- CFRelease(inputFormat);
}
}
if (!_videoFormat) {
diff --git a/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoEncoderH264.mm b/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoEncoderH264.mm
index 86855a2..a818c27 100644
--- a/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoEncoderH264.mm
+++ b/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoEncoderH264.mm
@@ -340,7 +340,7 @@
}
- (NSInteger)encode:(RTCVideoFrame *)frame
- codecSpecificInfo:(id<RTCCodecSpecificInfo>)codecSpecificInfo
+ codecSpecificInfo:(nullable id<RTCCodecSpecificInfo>)codecSpecificInfo
frameTypes:(NSArray<NSNumber *> *)frameTypes {
RTC_DCHECK_EQ(frame.width, _width);
RTC_DCHECK_EQ(frame.height, _height);