Obj-C SDK Cleanup
This CL separates the files under sdk/objc into logical directories, replacing
the previous file layout under Framework/.
A long term goal is to have some system set up to generate the files under
sdk/objc/api (the PeerConnection API wrappers) from the C++ code. In the shorter
term the goal is to abstract out shared concepts from these classes in order to
make them as uniform as possible.
The separation into base/, components/, and helpers/ are to differentiate between
the base layer's common protocols, various utilities and the actual platform
specific components.
The old directory layout that resembled a framework's internal layout is not
necessary, since it is generated by the framework target when building it.
Bug: webrtc:9627
Change-Id: Ib084fd83f050ae980649ca99e841f4fb0580bd8f
Reviewed-on: https://webrtc-review.googlesource.com/94142
Reviewed-by: Kári Helgason <kthelgason@webrtc.org>
Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Rasmus Brandt <brandtr@webrtc.org>
Reviewed-by: Henrik Andreassson <henrika@webrtc.org>
Commit-Queue: Anders Carlsson <andersc@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#24493}
diff --git a/sdk/objc/Framework/Classes/Audio/RTCAudioSession+Configuration.mm b/sdk/objc/Framework/Classes/Audio/RTCAudioSession+Configuration.mm
deleted file mode 100644
index c4d0d0c..0000000
--- a/sdk/objc/Framework/Classes/Audio/RTCAudioSession+Configuration.mm
+++ /dev/null
@@ -1,172 +0,0 @@
- /*
- * Copyright 2016 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCAudioSession.h"
-#import "WebRTC/RTCAudioSessionConfiguration.h"
-
-#import "WebRTC/RTCLogging.h"
-#import "RTCAudioSession+Private.h"
-
-
-@implementation RTCAudioSession (Configuration)
-
-- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration
- error:(NSError **)outError {
- return [self setConfiguration:configuration
- active:NO
- shouldSetActive:NO
- error:outError];
-}
-
-- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration
- active:(BOOL)active
- error:(NSError **)outError {
- return [self setConfiguration:configuration
- active:active
- shouldSetActive:YES
- error:outError];
-}
-
-#pragma mark - Private
-
-- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration
- active:(BOOL)active
- shouldSetActive:(BOOL)shouldSetActive
- error:(NSError **)outError {
- NSParameterAssert(configuration);
- if (outError) {
- *outError = nil;
- }
- if (![self checkLock:outError]) {
- return NO;
- }
-
- // Provide an error even if there isn't one so we can log it. We will not
- // return immediately on error in this function and instead try to set
- // everything we can.
- NSError *error = nil;
-
- if (self.category != configuration.category ||
- self.categoryOptions != configuration.categoryOptions) {
- NSError *categoryError = nil;
- if (![self setCategory:configuration.category
- withOptions:configuration.categoryOptions
- error:&categoryError]) {
- RTCLogError(@"Failed to set category: %@",
- categoryError.localizedDescription);
- error = categoryError;
- } else {
- RTCLog(@"Set category to: %@", configuration.category);
- }
- }
-
- if (self.mode != configuration.mode) {
- NSError *modeError = nil;
- if (![self setMode:configuration.mode error:&modeError]) {
- RTCLogError(@"Failed to set mode: %@",
- modeError.localizedDescription);
- error = modeError;
- } else {
- RTCLog(@"Set mode to: %@", configuration.mode);
- }
- }
-
- // Sometimes category options don't stick after setting mode.
- if (self.categoryOptions != configuration.categoryOptions) {
- NSError *categoryError = nil;
- if (![self setCategory:configuration.category
- withOptions:configuration.categoryOptions
- error:&categoryError]) {
- RTCLogError(@"Failed to set category options: %@",
- categoryError.localizedDescription);
- error = categoryError;
- } else {
- RTCLog(@"Set category options to: %ld",
- (long)configuration.categoryOptions);
- }
- }
-
- if (self.preferredSampleRate != configuration.sampleRate) {
- NSError *sampleRateError = nil;
- if (![self setPreferredSampleRate:configuration.sampleRate
- error:&sampleRateError]) {
- RTCLogError(@"Failed to set preferred sample rate: %@",
- sampleRateError.localizedDescription);
- error = sampleRateError;
- } else {
- RTCLog(@"Set preferred sample rate to: %.2f",
- configuration.sampleRate);
- }
- }
-
- if (self.preferredIOBufferDuration != configuration.ioBufferDuration) {
- NSError *bufferDurationError = nil;
- if (![self setPreferredIOBufferDuration:configuration.ioBufferDuration
- error:&bufferDurationError]) {
- RTCLogError(@"Failed to set preferred IO buffer duration: %@",
- bufferDurationError.localizedDescription);
- error = bufferDurationError;
- } else {
- RTCLog(@"Set preferred IO buffer duration to: %f",
- configuration.ioBufferDuration);
- }
- }
-
- if (shouldSetActive) {
- NSError *activeError = nil;
- if (![self setActive:active error:&activeError]) {
- RTCLogError(@"Failed to setActive to %d: %@",
- active, activeError.localizedDescription);
- error = activeError;
- }
- }
-
- if (self.isActive &&
- // TODO(tkchin): Figure out which category/mode numChannels is valid for.
- [self.mode isEqualToString:AVAudioSessionModeVoiceChat]) {
- // Try to set the preferred number of hardware audio channels. These calls
- // must be done after setting the audio session’s category and mode and
- // activating the session.
- NSInteger inputNumberOfChannels = configuration.inputNumberOfChannels;
- if (self.inputNumberOfChannels != inputNumberOfChannels) {
- NSError *inputChannelsError = nil;
- if (![self setPreferredInputNumberOfChannels:inputNumberOfChannels
- error:&inputChannelsError]) {
- RTCLogError(@"Failed to set preferred input number of channels: %@",
- inputChannelsError.localizedDescription);
- error = inputChannelsError;
- } else {
- RTCLog(@"Set input number of channels to: %ld",
- (long)inputNumberOfChannels);
- }
- }
- NSInteger outputNumberOfChannels = configuration.outputNumberOfChannels;
- if (self.outputNumberOfChannels != outputNumberOfChannels) {
- NSError *outputChannelsError = nil;
- if (![self setPreferredOutputNumberOfChannels:outputNumberOfChannels
- error:&outputChannelsError]) {
- RTCLogError(@"Failed to set preferred output number of channels: %@",
- outputChannelsError.localizedDescription);
- error = outputChannelsError;
- } else {
- RTCLog(@"Set output number of channels to: %ld",
- (long)outputNumberOfChannels);
- }
- }
- }
-
- if (outError) {
- *outError = error;
- }
-
- return error == nil;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/Audio/RTCAudioSession+Private.h b/sdk/objc/Framework/Classes/Audio/RTCAudioSession+Private.h
deleted file mode 100644
index f163b5a..0000000
--- a/sdk/objc/Framework/Classes/Audio/RTCAudioSession+Private.h
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Copyright 2016 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "sdk/objc/Framework/Headers/WebRTC/RTCAudioSession.h"
-
-#include <vector>
-
-NS_ASSUME_NONNULL_BEGIN
-
-@class RTCAudioSessionConfiguration;
-
-@interface RTCAudioSession ()
-
-/** Number of times setActive:YES has succeeded without a balanced call to
- * setActive:NO.
- */
-@property(nonatomic, readonly) int activationCount;
-
-/** The number of times |beginWebRTCSession| was called without a balanced call
- * to |endWebRTCSession|.
- */
-@property(nonatomic, readonly) int webRTCSessionCount;
-
-/** Convenience BOOL that checks useManualAudio and isAudioEnebled. */
-@property(readonly) BOOL canPlayOrRecord;
-
-/** Tracks whether we have been sent an interruption event that hasn't been matched by either an
- * interrupted end event or a foreground event.
- */
-@property(nonatomic, assign) BOOL isInterrupted;
-
-- (BOOL)checkLock:(NSError **)outError;
-
-/** Adds the delegate to the list of delegates, and places it at the front of
- * the list. This delegate will be notified before other delegates of
- * audio events.
- */
-- (void)pushDelegate:(id<RTCAudioSessionDelegate>)delegate;
-
-/** Signals RTCAudioSession that a WebRTC session is about to begin and
- * audio configuration is needed. Will configure the audio session for WebRTC
- * if not already configured and if configuration is not delayed.
- * Successful calls must be balanced by a call to endWebRTCSession.
- */
-- (BOOL)beginWebRTCSession:(NSError **)outError;
-
-/** Signals RTCAudioSession that a WebRTC session is about to end and audio
- * unconfiguration is needed. Will unconfigure the audio session for WebRTC
- * if this is the last unmatched call and if configuration is not delayed.
- */
-- (BOOL)endWebRTCSession:(NSError **)outError;
-
-/** Configure the audio session for WebRTC. This call will fail if the session
- * is already configured. On other failures, we will attempt to restore the
- * previously used audio session configuration.
- * |lockForConfiguration| must be called first.
- * Successful calls to configureWebRTCSession must be matched by calls to
- * |unconfigureWebRTCSession|.
- */
-- (BOOL)configureWebRTCSession:(NSError **)outError;
-
-/** Unconfigures the session for WebRTC. This will attempt to restore the
- * audio session to the settings used before |configureWebRTCSession| was
- * called.
- * |lockForConfiguration| must be called first.
- */
-- (BOOL)unconfigureWebRTCSession:(NSError **)outError;
-
-/** Returns a configuration error with the given description. */
-- (NSError *)configurationErrorWithDescription:(NSString *)description;
-
-// Properties and methods for tests.
-@property(nonatomic, readonly) std::vector<__weak id<RTCAudioSessionDelegate> > delegates;
-
-- (void)notifyDidBeginInterruption;
-- (void)notifyDidEndInterruptionWithShouldResumeSession:(BOOL)shouldResumeSession;
-- (void)notifyDidChangeRouteWithReason:(AVAudioSessionRouteChangeReason)reason
- previousRoute:(AVAudioSessionRouteDescription *)previousRoute;
-- (void)notifyMediaServicesWereLost;
-- (void)notifyMediaServicesWereReset;
-- (void)notifyDidChangeCanPlayOrRecord:(BOOL)canPlayOrRecord;
-- (void)notifyDidStartPlayOrRecord;
-- (void)notifyDidStopPlayOrRecord;
-- (void)notifyDidDetectPlayoutGlitch:(int64_t)totalNumberOfGlitches;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/Audio/RTCAudioSession.mm b/sdk/objc/Framework/Classes/Audio/RTCAudioSession.mm
deleted file mode 100644
index 307b979..0000000
--- a/sdk/objc/Framework/Classes/Audio/RTCAudioSession.mm
+++ /dev/null
@@ -1,978 +0,0 @@
-/*
- * Copyright 2016 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCAudioSession.h"
-
-#import <UIKit/UIKit.h>
-
-#include "rtc_base/atomicops.h"
-#include "rtc_base/checks.h"
-#include "rtc_base/criticalsection.h"
-
-#import "WebRTC/RTCAudioSessionConfiguration.h"
-#import "WebRTC/RTCLogging.h"
-
-#import "RTCAudioSession+Private.h"
-
-
-NSString * const kRTCAudioSessionErrorDomain = @"org.webrtc.RTCAudioSession";
-NSInteger const kRTCAudioSessionErrorLockRequired = -1;
-NSInteger const kRTCAudioSessionErrorConfiguration = -2;
-NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume";
-
-// This class needs to be thread-safe because it is accessed from many threads.
-// TODO(tkchin): Consider more granular locking. We're not expecting a lot of
-// lock contention so coarse locks should be fine for now.
-@implementation RTCAudioSession {
- rtc::CriticalSection _crit;
- AVAudioSession *_session;
- volatile int _activationCount;
- volatile int _lockRecursionCount;
- volatile int _webRTCSessionCount;
- BOOL _isActive;
- BOOL _useManualAudio;
- BOOL _isAudioEnabled;
- BOOL _canPlayOrRecord;
- BOOL _isInterrupted;
-}
-
-@synthesize session = _session;
-@synthesize delegates = _delegates;
-
-+ (instancetype)sharedInstance {
- static dispatch_once_t onceToken;
- static RTCAudioSession *sharedInstance = nil;
- dispatch_once(&onceToken, ^{
- sharedInstance = [[self alloc] init];
- });
- return sharedInstance;
-}
-
-- (instancetype)init {
- return [self initWithAudioSession:[AVAudioSession sharedInstance]];
-}
-
-/** This initializer provides a way for unit tests to inject a fake/mock audio session. */
-- (instancetype)initWithAudioSession:(id)audioSession {
- if (self = [super init]) {
- _session = audioSession;
-
- NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
- [center addObserver:self
- selector:@selector(handleInterruptionNotification:)
- name:AVAudioSessionInterruptionNotification
- object:nil];
- [center addObserver:self
- selector:@selector(handleRouteChangeNotification:)
- name:AVAudioSessionRouteChangeNotification
- object:nil];
- [center addObserver:self
- selector:@selector(handleMediaServicesWereLost:)
- name:AVAudioSessionMediaServicesWereLostNotification
- object:nil];
- [center addObserver:self
- selector:@selector(handleMediaServicesWereReset:)
- name:AVAudioSessionMediaServicesWereResetNotification
- object:nil];
- // Posted on the main thread when the primary audio from other applications
- // starts and stops. Foreground applications may use this notification as a
- // hint to enable or disable audio that is secondary.
- [center addObserver:self
- selector:@selector(handleSilenceSecondaryAudioHintNotification:)
- name:AVAudioSessionSilenceSecondaryAudioHintNotification
- object:nil];
- // Also track foreground event in order to deal with interruption ended situation.
- [center addObserver:self
- selector:@selector(handleApplicationDidBecomeActive:)
- name:UIApplicationDidBecomeActiveNotification
- object:nil];
- [_session addObserver:self
- forKeyPath:kRTCAudioSessionOutputVolumeSelector
- options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld
- context:(__bridge void*)RTCAudioSession.class];
-
- RTCLog(@"RTCAudioSession (%p): init.", self);
- }
- return self;
-}
-
-- (void)dealloc {
- [[NSNotificationCenter defaultCenter] removeObserver:self];
- [_session removeObserver:self
- forKeyPath:kRTCAudioSessionOutputVolumeSelector
- context:(__bridge void*)RTCAudioSession.class];
- RTCLog(@"RTCAudioSession (%p): dealloc.", self);
-}
-
-- (NSString *)description {
- NSString *format =
- @"RTCAudioSession: {\n"
- " category: %@\n"
- " categoryOptions: %ld\n"
- " mode: %@\n"
- " isActive: %d\n"
- " sampleRate: %.2f\n"
- " IOBufferDuration: %f\n"
- " outputNumberOfChannels: %ld\n"
- " inputNumberOfChannels: %ld\n"
- " outputLatency: %f\n"
- " inputLatency: %f\n"
- " outputVolume: %f\n"
- "}";
- NSString *description = [NSString stringWithFormat:format,
- self.category, (long)self.categoryOptions, self.mode,
- self.isActive, self.sampleRate, self.IOBufferDuration,
- self.outputNumberOfChannels, self.inputNumberOfChannels,
- self.outputLatency, self.inputLatency, self.outputVolume];
- return description;
-}
-
-- (void)setIsActive:(BOOL)isActive {
- @synchronized(self) {
- _isActive = isActive;
- }
-}
-
-- (BOOL)isActive {
- @synchronized(self) {
- return _isActive;
- }
-}
-
-- (BOOL)isLocked {
- return _lockRecursionCount > 0;
-}
-
-- (void)setUseManualAudio:(BOOL)useManualAudio {
- @synchronized(self) {
- if (_useManualAudio == useManualAudio) {
- return;
- }
- _useManualAudio = useManualAudio;
- }
- [self updateCanPlayOrRecord];
-}
-
-- (BOOL)useManualAudio {
- @synchronized(self) {
- return _useManualAudio;
- }
-}
-
-- (void)setIsAudioEnabled:(BOOL)isAudioEnabled {
- @synchronized(self) {
- if (_isAudioEnabled == isAudioEnabled) {
- return;
- }
- _isAudioEnabled = isAudioEnabled;
- }
- [self updateCanPlayOrRecord];
-}
-
-- (BOOL)isAudioEnabled {
- @synchronized(self) {
- return _isAudioEnabled;
- }
-}
-
-// TODO(tkchin): Check for duplicates.
-- (void)addDelegate:(id<RTCAudioSessionDelegate>)delegate {
- RTCLog(@"Adding delegate: (%p)", delegate);
- if (!delegate) {
- return;
- }
- @synchronized(self) {
- _delegates.push_back(delegate);
- [self removeZeroedDelegates];
- }
-}
-
-- (void)removeDelegate:(id<RTCAudioSessionDelegate>)delegate {
- RTCLog(@"Removing delegate: (%p)", delegate);
- if (!delegate) {
- return;
- }
- @synchronized(self) {
- _delegates.erase(std::remove(_delegates.begin(),
- _delegates.end(),
- delegate),
- _delegates.end());
- [self removeZeroedDelegates];
- }
-}
-
-#pragma clang diagnostic push
-#pragma clang diagnostic ignored "-Wthread-safety-analysis"
-
-- (void)lockForConfiguration {
- _crit.Enter();
- rtc::AtomicOps::Increment(&_lockRecursionCount);
-}
-
-- (void)unlockForConfiguration {
- // Don't let threads other than the one that called lockForConfiguration
- // unlock.
- if (_crit.TryEnter()) {
- rtc::AtomicOps::Decrement(&_lockRecursionCount);
- // One unlock for the tryLock, and another one to actually unlock. If this
- // was called without anyone calling lock, we will hit an assertion.
- _crit.Leave();
- _crit.Leave();
- }
-}
-
-#pragma clang diagnostic pop
-
-#pragma mark - AVAudioSession proxy methods
-
-- (NSString *)category {
- return self.session.category;
-}
-
-- (AVAudioSessionCategoryOptions)categoryOptions {
- return self.session.categoryOptions;
-}
-
-- (NSString *)mode {
- return self.session.mode;
-}
-
-- (BOOL)secondaryAudioShouldBeSilencedHint {
- return self.session.secondaryAudioShouldBeSilencedHint;
-}
-
-- (AVAudioSessionRouteDescription *)currentRoute {
- return self.session.currentRoute;
-}
-
-- (NSInteger)maximumInputNumberOfChannels {
- return self.session.maximumInputNumberOfChannels;
-}
-
-- (NSInteger)maximumOutputNumberOfChannels {
- return self.session.maximumOutputNumberOfChannels;
-}
-
-- (float)inputGain {
- return self.session.inputGain;
-}
-
-- (BOOL)inputGainSettable {
- return self.session.inputGainSettable;
-}
-
-- (BOOL)inputAvailable {
- return self.session.inputAvailable;
-}
-
-- (NSArray<AVAudioSessionDataSourceDescription *> *)inputDataSources {
- return self.session.inputDataSources;
-}
-
-- (AVAudioSessionDataSourceDescription *)inputDataSource {
- return self.session.inputDataSource;
-}
-
-- (NSArray<AVAudioSessionDataSourceDescription *> *)outputDataSources {
- return self.session.outputDataSources;
-}
-
-- (AVAudioSessionDataSourceDescription *)outputDataSource {
- return self.session.outputDataSource;
-}
-
-- (double)sampleRate {
- return self.session.sampleRate;
-}
-
-- (double)preferredSampleRate {
- return self.session.preferredSampleRate;
-}
-
-- (NSInteger)inputNumberOfChannels {
- return self.session.inputNumberOfChannels;
-}
-
-- (NSInteger)outputNumberOfChannels {
- return self.session.outputNumberOfChannels;
-}
-
-- (float)outputVolume {
- return self.session.outputVolume;
-}
-
-- (NSTimeInterval)inputLatency {
- return self.session.inputLatency;
-}
-
-- (NSTimeInterval)outputLatency {
- return self.session.outputLatency;
-}
-
-- (NSTimeInterval)IOBufferDuration {
- return self.session.IOBufferDuration;
-}
-
-- (NSTimeInterval)preferredIOBufferDuration {
- return self.session.preferredIOBufferDuration;
-}
-
-// TODO(tkchin): Simplify the amount of locking happening here. Likely that we
-// can just do atomic increments / decrements.
-- (BOOL)setActive:(BOOL)active
- error:(NSError **)outError {
- if (![self checkLock:outError]) {
- return NO;
- }
- int activationCount = _activationCount;
- if (!active && activationCount == 0) {
- RTCLogWarning(@"Attempting to deactivate without prior activation.");
- }
- [self notifyWillSetActive:active];
- BOOL success = YES;
- BOOL isActive = self.isActive;
- // Keep a local error so we can log it.
- NSError *error = nil;
- BOOL shouldSetActive =
- (active && !isActive) || (!active && isActive && activationCount == 1);
- // Attempt to activate if we're not active.
- // Attempt to deactivate if we're active and it's the last unbalanced call.
- if (shouldSetActive) {
- AVAudioSession *session = self.session;
- // AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation is used to ensure
- // that other audio sessions that were interrupted by our session can return
- // to their active state. It is recommended for VoIP apps to use this
- // option.
- AVAudioSessionSetActiveOptions options =
- active ? 0 : AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation;
- success = [session setActive:active
- withOptions:options
- error:&error];
- if (outError) {
- *outError = error;
- }
- }
- if (success) {
- if (shouldSetActive) {
- self.isActive = active;
- }
- if (active) {
- [self incrementActivationCount];
- }
- [self notifyDidSetActive:active];
- } else {
- RTCLogError(@"Failed to setActive:%d. Error: %@",
- active, error.localizedDescription);
- [self notifyFailedToSetActive:active error:error];
- }
- // Decrement activation count on deactivation whether or not it succeeded.
- if (!active) {
- [self decrementActivationCount];
- }
- RTCLog(@"Number of current activations: %d", _activationCount);
- return success;
-}
-
-- (BOOL)setCategory:(NSString *)category
- withOptions:(AVAudioSessionCategoryOptions)options
- error:(NSError **)outError {
- if (![self checkLock:outError]) {
- return NO;
- }
- return [self.session setCategory:category withOptions:options error:outError];
-}
-
-- (BOOL)setMode:(NSString *)mode error:(NSError **)outError {
- if (![self checkLock:outError]) {
- return NO;
- }
- return [self.session setMode:mode error:outError];
-}
-
-- (BOOL)setInputGain:(float)gain error:(NSError **)outError {
- if (![self checkLock:outError]) {
- return NO;
- }
- return [self.session setInputGain:gain error:outError];
-}
-
-- (BOOL)setPreferredSampleRate:(double)sampleRate error:(NSError **)outError {
- if (![self checkLock:outError]) {
- return NO;
- }
- return [self.session setPreferredSampleRate:sampleRate error:outError];
-}
-
-- (BOOL)setPreferredIOBufferDuration:(NSTimeInterval)duration
- error:(NSError **)outError {
- if (![self checkLock:outError]) {
- return NO;
- }
- return [self.session setPreferredIOBufferDuration:duration error:outError];
-}
-
-- (BOOL)setPreferredInputNumberOfChannels:(NSInteger)count
- error:(NSError **)outError {
- if (![self checkLock:outError]) {
- return NO;
- }
- return [self.session setPreferredInputNumberOfChannels:count error:outError];
-}
-- (BOOL)setPreferredOutputNumberOfChannels:(NSInteger)count
- error:(NSError **)outError {
- if (![self checkLock:outError]) {
- return NO;
- }
- return [self.session setPreferredOutputNumberOfChannels:count error:outError];
-}
-
-- (BOOL)overrideOutputAudioPort:(AVAudioSessionPortOverride)portOverride
- error:(NSError **)outError {
- if (![self checkLock:outError]) {
- return NO;
- }
- return [self.session overrideOutputAudioPort:portOverride error:outError];
-}
-
-- (BOOL)setPreferredInput:(AVAudioSessionPortDescription *)inPort
- error:(NSError **)outError {
- if (![self checkLock:outError]) {
- return NO;
- }
- return [self.session setPreferredInput:inPort error:outError];
-}
-
-- (BOOL)setInputDataSource:(AVAudioSessionDataSourceDescription *)dataSource
- error:(NSError **)outError {
- if (![self checkLock:outError]) {
- return NO;
- }
- return [self.session setInputDataSource:dataSource error:outError];
-}
-
-- (BOOL)setOutputDataSource:(AVAudioSessionDataSourceDescription *)dataSource
- error:(NSError **)outError {
- if (![self checkLock:outError]) {
- return NO;
- }
- return [self.session setOutputDataSource:dataSource error:outError];
-}
-
-#pragma mark - Notifications
-
-- (void)handleInterruptionNotification:(NSNotification *)notification {
- NSNumber* typeNumber =
- notification.userInfo[AVAudioSessionInterruptionTypeKey];
- AVAudioSessionInterruptionType type =
- (AVAudioSessionInterruptionType)typeNumber.unsignedIntegerValue;
- switch (type) {
- case AVAudioSessionInterruptionTypeBegan:
- RTCLog(@"Audio session interruption began.");
- self.isActive = NO;
- self.isInterrupted = YES;
- [self notifyDidBeginInterruption];
- break;
- case AVAudioSessionInterruptionTypeEnded: {
- RTCLog(@"Audio session interruption ended.");
- self.isInterrupted = NO;
- [self updateAudioSessionAfterEvent];
- NSNumber *optionsNumber =
- notification.userInfo[AVAudioSessionInterruptionOptionKey];
- AVAudioSessionInterruptionOptions options =
- optionsNumber.unsignedIntegerValue;
- BOOL shouldResume =
- options & AVAudioSessionInterruptionOptionShouldResume;
- [self notifyDidEndInterruptionWithShouldResumeSession:shouldResume];
- break;
- }
- }
-}
-
-- (void)handleRouteChangeNotification:(NSNotification *)notification {
- // Get reason for current route change.
- NSNumber* reasonNumber =
- notification.userInfo[AVAudioSessionRouteChangeReasonKey];
- AVAudioSessionRouteChangeReason reason =
- (AVAudioSessionRouteChangeReason)reasonNumber.unsignedIntegerValue;
- RTCLog(@"Audio route changed:");
- switch (reason) {
- case AVAudioSessionRouteChangeReasonUnknown:
- RTCLog(@"Audio route changed: ReasonUnknown");
- break;
- case AVAudioSessionRouteChangeReasonNewDeviceAvailable:
- RTCLog(@"Audio route changed: NewDeviceAvailable");
- break;
- case AVAudioSessionRouteChangeReasonOldDeviceUnavailable:
- RTCLog(@"Audio route changed: OldDeviceUnavailable");
- break;
- case AVAudioSessionRouteChangeReasonCategoryChange:
- RTCLog(@"Audio route changed: CategoryChange to :%@",
- self.session.category);
- break;
- case AVAudioSessionRouteChangeReasonOverride:
- RTCLog(@"Audio route changed: Override");
- break;
- case AVAudioSessionRouteChangeReasonWakeFromSleep:
- RTCLog(@"Audio route changed: WakeFromSleep");
- break;
- case AVAudioSessionRouteChangeReasonNoSuitableRouteForCategory:
- RTCLog(@"Audio route changed: NoSuitableRouteForCategory");
- break;
- case AVAudioSessionRouteChangeReasonRouteConfigurationChange:
- RTCLog(@"Audio route changed: RouteConfigurationChange");
- break;
- }
- AVAudioSessionRouteDescription* previousRoute =
- notification.userInfo[AVAudioSessionRouteChangePreviousRouteKey];
- // Log previous route configuration.
- RTCLog(@"Previous route: %@\nCurrent route:%@",
- previousRoute, self.session.currentRoute);
- [self notifyDidChangeRouteWithReason:reason previousRoute:previousRoute];
-}
-
-- (void)handleMediaServicesWereLost:(NSNotification *)notification {
- RTCLog(@"Media services were lost.");
- [self updateAudioSessionAfterEvent];
- [self notifyMediaServicesWereLost];
-}
-
-- (void)handleMediaServicesWereReset:(NSNotification *)notification {
- RTCLog(@"Media services were reset.");
- [self updateAudioSessionAfterEvent];
- [self notifyMediaServicesWereReset];
-}
-
-- (void)handleSilenceSecondaryAudioHintNotification:(NSNotification *)notification {
- // TODO(henrika): just adding logs here for now until we know if we are ever
- // see this notification and might be affected by it or if further actions
- // are required.
- NSNumber *typeNumber =
- notification.userInfo[AVAudioSessionSilenceSecondaryAudioHintTypeKey];
- AVAudioSessionSilenceSecondaryAudioHintType type =
- (AVAudioSessionSilenceSecondaryAudioHintType)typeNumber.unsignedIntegerValue;
- switch (type) {
- case AVAudioSessionSilenceSecondaryAudioHintTypeBegin:
- RTCLog(@"Another application's primary audio has started.");
- break;
- case AVAudioSessionSilenceSecondaryAudioHintTypeEnd:
- RTCLog(@"Another application's primary audio has stopped.");
- break;
- }
-}
-
-- (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
- BOOL isInterrupted = self.isInterrupted;
- RTCLog(@"Application became active after an interruption. Treating as interruption "
- "end. isInterrupted changed from %d to 0.",
- isInterrupted);
- if (isInterrupted) {
- self.isInterrupted = NO;
- [self updateAudioSessionAfterEvent];
- }
- // Always treat application becoming active as an interruption end event.
- [self notifyDidEndInterruptionWithShouldResumeSession:YES];
-}
-
-#pragma mark - Private
-
-+ (NSError *)lockError {
- NSDictionary *userInfo = @{
- NSLocalizedDescriptionKey:
- @"Must call lockForConfiguration before calling this method."
- };
- NSError *error =
- [[NSError alloc] initWithDomain:kRTCAudioSessionErrorDomain
- code:kRTCAudioSessionErrorLockRequired
- userInfo:userInfo];
- return error;
-}
-
-- (std::vector<__weak id<RTCAudioSessionDelegate> >)delegates {
- @synchronized(self) {
- // Note: this returns a copy.
- return _delegates;
- }
-}
-
-// TODO(tkchin): check for duplicates.
-- (void)pushDelegate:(id<RTCAudioSessionDelegate>)delegate {
- @synchronized(self) {
- _delegates.insert(_delegates.begin(), delegate);
- }
-}
-
-- (void)removeZeroedDelegates {
- @synchronized(self) {
- _delegates.erase(
- std::remove_if(_delegates.begin(),
- _delegates.end(),
- [](id delegate) -> bool { return delegate == nil; }),
- _delegates.end());
- }
-}
-
-- (int)activationCount {
- return _activationCount;
-}
-
-- (int)incrementActivationCount {
- RTCLog(@"Incrementing activation count.");
- return rtc::AtomicOps::Increment(&_activationCount);
-}
-
-- (NSInteger)decrementActivationCount {
- RTCLog(@"Decrementing activation count.");
- return rtc::AtomicOps::Decrement(&_activationCount);
-}
-
-- (int)webRTCSessionCount {
- return _webRTCSessionCount;
-}
-
-- (BOOL)canPlayOrRecord {
- return !self.useManualAudio || self.isAudioEnabled;
-}
-
-- (BOOL)isInterrupted {
- @synchronized(self) {
- return _isInterrupted;
- }
-}
-
-- (void)setIsInterrupted:(BOOL)isInterrupted {
- @synchronized(self) {
- if (_isInterrupted == isInterrupted) {
- return;
- }
- _isInterrupted = isInterrupted;
- }
-}
-
-- (BOOL)checkLock:(NSError **)outError {
- // Check ivar instead of trying to acquire lock so that we won't accidentally
- // acquire lock if it hasn't already been called.
- if (!self.isLocked) {
- if (outError) {
- *outError = [RTCAudioSession lockError];
- }
- return NO;
- }
- return YES;
-}
-
-- (BOOL)beginWebRTCSession:(NSError **)outError {
- if (outError) {
- *outError = nil;
- }
- if (![self checkLock:outError]) {
- return NO;
- }
- rtc::AtomicOps::Increment(&_webRTCSessionCount);
- [self notifyDidStartPlayOrRecord];
- return YES;
-}
-
-- (BOOL)endWebRTCSession:(NSError **)outError {
- if (outError) {
- *outError = nil;
- }
- if (![self checkLock:outError]) {
- return NO;
- }
- rtc::AtomicOps::Decrement(&_webRTCSessionCount);
- [self notifyDidStopPlayOrRecord];
- return YES;
-}
-
-- (BOOL)configureWebRTCSession:(NSError **)outError {
- if (outError) {
- *outError = nil;
- }
- if (![self checkLock:outError]) {
- return NO;
- }
- RTCLog(@"Configuring audio session for WebRTC.");
-
- // Configure the AVAudioSession and activate it.
- // Provide an error even if there isn't one so we can log it.
- NSError *error = nil;
- RTCAudioSessionConfiguration *webRTCConfig =
- [RTCAudioSessionConfiguration webRTCConfiguration];
- if (![self setConfiguration:webRTCConfig active:YES error:&error]) {
- RTCLogError(@"Failed to set WebRTC audio configuration: %@",
- error.localizedDescription);
- // Do not call setActive:NO if setActive:YES failed.
- if (outError) {
- *outError = error;
- }
- return NO;
- }
-
- // Ensure that the device currently supports audio input.
- // TODO(tkchin): Figure out if this is really necessary.
- if (!self.inputAvailable) {
- RTCLogError(@"No audio input path is available!");
- [self unconfigureWebRTCSession:nil];
- if (outError) {
- *outError = [self configurationErrorWithDescription:@"No input path."];
- }
- return NO;
- }
-
- // It can happen (e.g. in combination with BT devices) that the attempt to set
- // the preferred sample rate for WebRTC (48kHz) fails. If so, make a new
- // configuration attempt using the sample rate that worked using the active
- // audio session. A typical case is that only 8 or 16kHz can be set, e.g. in
- // combination with BT headsets. Using this "trick" seems to avoid a state
- // where Core Audio asks for a different number of audio frames than what the
- // session's I/O buffer duration corresponds to.
- // TODO(henrika): this fix resolves bugs.webrtc.org/6004 but it has only been
- // tested on a limited set of iOS devices and BT devices.
- double sessionSampleRate = self.sampleRate;
- double preferredSampleRate = webRTCConfig.sampleRate;
- if (sessionSampleRate != preferredSampleRate) {
- RTCLogWarning(
- @"Current sample rate (%.2f) is not the preferred rate (%.2f)",
- sessionSampleRate, preferredSampleRate);
- if (![self setPreferredSampleRate:sessionSampleRate
- error:&error]) {
- RTCLogError(@"Failed to set preferred sample rate: %@",
- error.localizedDescription);
- if (outError) {
- *outError = error;
- }
- }
- }
-
- return YES;
-}
-
-- (BOOL)unconfigureWebRTCSession:(NSError **)outError {
- if (outError) {
- *outError = nil;
- }
- if (![self checkLock:outError]) {
- return NO;
- }
- RTCLog(@"Unconfiguring audio session for WebRTC.");
- [self setActive:NO error:outError];
-
- return YES;
-}
-
-- (NSError *)configurationErrorWithDescription:(NSString *)description {
- NSDictionary* userInfo = @{
- NSLocalizedDescriptionKey: description,
- };
- return [[NSError alloc] initWithDomain:kRTCAudioSessionErrorDomain
- code:kRTCAudioSessionErrorConfiguration
- userInfo:userInfo];
-}
-
-- (void)updateAudioSessionAfterEvent {
- BOOL shouldActivate = self.activationCount > 0;
- AVAudioSessionSetActiveOptions options = shouldActivate ?
- 0 : AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation;
- NSError *error = nil;
- if ([self.session setActive:shouldActivate
- withOptions:options
- error:&error]) {
- self.isActive = shouldActivate;
- } else {
- RTCLogError(@"Failed to set session active to %d. Error:%@",
- shouldActivate, error.localizedDescription);
- }
-}
-
-- (void)updateCanPlayOrRecord {
- BOOL canPlayOrRecord = NO;
- BOOL shouldNotify = NO;
- @synchronized(self) {
- canPlayOrRecord = !self.useManualAudio || self.isAudioEnabled;
- if (_canPlayOrRecord == canPlayOrRecord) {
- return;
- }
- _canPlayOrRecord = canPlayOrRecord;
- shouldNotify = YES;
- }
- if (shouldNotify) {
- [self notifyDidChangeCanPlayOrRecord:canPlayOrRecord];
- }
-}
-
-- (void)audioSessionDidActivate:(AVAudioSession *)session {
- if (_session != session) {
- RTCLogError(@"audioSessionDidActivate called on different AVAudioSession");
- }
- RTCLog(@"Audio session was externally activated.");
- [self incrementActivationCount];
- self.isActive = YES;
- // When a CallKit call begins, it's possible that we receive an interruption
- // begin without a corresponding end. Since we know that we have an activated
- // audio session at this point, just clear any saved interruption flag since
- // the app may never be foregrounded during the duration of the call.
- if (self.isInterrupted) {
- RTCLog(@"Clearing interrupted state due to external activation.");
- self.isInterrupted = NO;
- }
- // Treat external audio session activation as an end interruption event.
- [self notifyDidEndInterruptionWithShouldResumeSession:YES];
-}
-
-- (void)audioSessionDidDeactivate:(AVAudioSession *)session {
- if (_session != session) {
- RTCLogError(@"audioSessionDidDeactivate called on different AVAudioSession");
- }
- RTCLog(@"Audio session was externally deactivated.");
- self.isActive = NO;
- [self decrementActivationCount];
-}
-
-- (void)observeValueForKeyPath:(NSString *)keyPath
- ofObject:(id)object
- change:(NSDictionary *)change
- context:(void *)context {
- if (context == (__bridge void*)RTCAudioSession.class) {
- if (object == _session) {
- NSNumber *newVolume = change[NSKeyValueChangeNewKey];
- RTCLog(@"OutputVolumeDidChange to %f", newVolume.floatValue);
- [self notifyDidChangeOutputVolume:newVolume.floatValue];
- }
- } else {
- [super observeValueForKeyPath:keyPath
- ofObject:object
- change:change
- context:context];
- }
-}
-
-- (void)notifyDidBeginInterruption {
- for (auto delegate : self.delegates) {
- SEL sel = @selector(audioSessionDidBeginInterruption:);
- if ([delegate respondsToSelector:sel]) {
- [delegate audioSessionDidBeginInterruption:self];
- }
- }
-}
-
-- (void)notifyDidEndInterruptionWithShouldResumeSession:
- (BOOL)shouldResumeSession {
- for (auto delegate : self.delegates) {
- SEL sel = @selector(audioSessionDidEndInterruption:shouldResumeSession:);
- if ([delegate respondsToSelector:sel]) {
- [delegate audioSessionDidEndInterruption:self
- shouldResumeSession:shouldResumeSession];
- }
- }
-}
-
-- (void)notifyDidChangeRouteWithReason:(AVAudioSessionRouteChangeReason)reason
- previousRoute:(AVAudioSessionRouteDescription *)previousRoute {
- for (auto delegate : self.delegates) {
- SEL sel = @selector(audioSessionDidChangeRoute:reason:previousRoute:);
- if ([delegate respondsToSelector:sel]) {
- [delegate audioSessionDidChangeRoute:self
- reason:reason
- previousRoute:previousRoute];
- }
- }
-}
-
-- (void)notifyMediaServicesWereLost {
- for (auto delegate : self.delegates) {
- SEL sel = @selector(audioSessionMediaServerTerminated:);
- if ([delegate respondsToSelector:sel]) {
- [delegate audioSessionMediaServerTerminated:self];
- }
- }
-}
-
-- (void)notifyMediaServicesWereReset {
- for (auto delegate : self.delegates) {
- SEL sel = @selector(audioSessionMediaServerReset:);
- if ([delegate respondsToSelector:sel]) {
- [delegate audioSessionMediaServerReset:self];
- }
- }
-}
-
-- (void)notifyDidChangeCanPlayOrRecord:(BOOL)canPlayOrRecord {
- for (auto delegate : self.delegates) {
- SEL sel = @selector(audioSession:didChangeCanPlayOrRecord:);
- if ([delegate respondsToSelector:sel]) {
- [delegate audioSession:self didChangeCanPlayOrRecord:canPlayOrRecord];
- }
- }
-}
-
-- (void)notifyDidStartPlayOrRecord {
- for (auto delegate : self.delegates) {
- SEL sel = @selector(audioSessionDidStartPlayOrRecord:);
- if ([delegate respondsToSelector:sel]) {
- [delegate audioSessionDidStartPlayOrRecord:self];
- }
- }
-}
-
-- (void)notifyDidStopPlayOrRecord {
- for (auto delegate : self.delegates) {
- SEL sel = @selector(audioSessionDidStopPlayOrRecord:);
- if ([delegate respondsToSelector:sel]) {
- [delegate audioSessionDidStopPlayOrRecord:self];
- }
- }
-}
-
-- (void)notifyDidChangeOutputVolume:(float)volume {
- for (auto delegate : self.delegates) {
- SEL sel = @selector(audioSession:didChangeOutputVolume:);
- if ([delegate respondsToSelector:sel]) {
- [delegate audioSession:self didChangeOutputVolume:volume];
- }
- }
-}
-
-- (void)notifyDidDetectPlayoutGlitch:(int64_t)totalNumberOfGlitches {
- for (auto delegate : self.delegates) {
- SEL sel = @selector(audioSession:didDetectPlayoutGlitch:);
- if ([delegate respondsToSelector:sel]) {
- [delegate audioSession:self didDetectPlayoutGlitch:totalNumberOfGlitches];
- }
- }
-}
-
-- (void)notifyWillSetActive:(BOOL)active {
- for (id delegate : self.delegates) {
- SEL sel = @selector(audioSession:willSetActive:);
- if ([delegate respondsToSelector:sel]) {
- [delegate audioSession:self willSetActive:active];
- }
- }
-}
-
-- (void)notifyDidSetActive:(BOOL)active {
- for (id delegate : self.delegates) {
- SEL sel = @selector(audioSession:didSetActive:);
- if ([delegate respondsToSelector:sel]) {
- [delegate audioSession:self didSetActive:active];
- }
- }
-}
-
-- (void)notifyFailedToSetActive:(BOOL)active error:(NSError *)error {
- for (id delegate : self.delegates) {
- SEL sel = @selector(audioSession:failedToSetActive:error:);
- if ([delegate respondsToSelector:sel]) {
- [delegate audioSession:self failedToSetActive:active error:error];
- }
- }
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/Audio/RTCAudioSessionConfiguration.m b/sdk/objc/Framework/Classes/Audio/RTCAudioSessionConfiguration.m
deleted file mode 100644
index 3bcb034..0000000
--- a/sdk/objc/Framework/Classes/Audio/RTCAudioSessionConfiguration.m
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
- * Copyright 2016 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCAudioSession.h"
-#import "WebRTC/RTCAudioSessionConfiguration.h"
-
-#import "WebRTC/RTCDispatcher.h"
-#import "WebRTC/UIDevice+RTCDevice.h"
-
-
-// Try to use mono to save resources. Also avoids channel format conversion
-// in the I/O audio unit. Initial tests have shown that it is possible to use
-// mono natively for built-in microphones and for BT headsets but not for
-// wired headsets. Wired headsets only support stereo as native channel format
-// but it is a low cost operation to do a format conversion to mono in the
-// audio unit. Hence, we will not hit a RTC_CHECK in
-// VerifyAudioParametersForActiveAudioSession() for a mismatch between the
-// preferred number of channels and the actual number of channels.
-const int kRTCAudioSessionPreferredNumberOfChannels = 1;
-
-// Preferred hardware sample rate (unit is in Hertz). The client sample rate
-// will be set to this value as well to avoid resampling the the audio unit's
-// format converter. Note that, some devices, e.g. BT headsets, only supports
-// 8000Hz as native sample rate.
-const double kRTCAudioSessionHighPerformanceSampleRate = 48000.0;
-
-// A lower sample rate will be used for devices with only one core
-// (e.g. iPhone 4). The goal is to reduce the CPU load of the application.
-const double kRTCAudioSessionLowComplexitySampleRate = 16000.0;
-
-// Use a hardware I/O buffer size (unit is in seconds) that matches the 10ms
-// size used by WebRTC. The exact actual size will differ between devices.
-// Example: using 48kHz on iPhone 6 results in a native buffer size of
-// ~10.6667ms or 512 audio frames per buffer. The FineAudioBuffer instance will
-// take care of any buffering required to convert between native buffers and
-// buffers used by WebRTC. It is beneficial for the performance if the native
-// size is as an even multiple of 10ms as possible since it results in "clean"
-// callback sequence without bursts of callbacks back to back.
-const double kRTCAudioSessionHighPerformanceIOBufferDuration = 0.02;
-
-// Use a larger buffer size on devices with only one core (e.g. iPhone 4).
-// It will result in a lower CPU consumption at the cost of a larger latency.
-// The size of 60ms is based on instrumentation that shows a significant
-// reduction in CPU load compared with 10ms on low-end devices.
-// TODO(henrika): monitor this size and determine if it should be modified.
-const double kRTCAudioSessionLowComplexityIOBufferDuration = 0.06;
-
-static RTCAudioSessionConfiguration *gWebRTCConfiguration = nil;
-
-@implementation RTCAudioSessionConfiguration
-
-@synthesize category = _category;
-@synthesize categoryOptions = _categoryOptions;
-@synthesize mode = _mode;
-@synthesize sampleRate = _sampleRate;
-@synthesize ioBufferDuration = _ioBufferDuration;
-@synthesize inputNumberOfChannels = _inputNumberOfChannels;
-@synthesize outputNumberOfChannels = _outputNumberOfChannels;
-
-- (instancetype)init {
- if (self = [super init]) {
- // Use a category which supports simultaneous recording and playback.
- // By default, using this category implies that our app’s audio is
- // nonmixable, hence activating the session will interrupt any other
- // audio sessions which are also nonmixable.
- _category = AVAudioSessionCategoryPlayAndRecord;
- _categoryOptions = AVAudioSessionCategoryOptionAllowBluetooth;
-
- // Specify mode for two-way voice communication (e.g. VoIP).
- _mode = AVAudioSessionModeVoiceChat;
-
- // Set the session's sample rate or the hardware sample rate.
- // It is essential that we use the same sample rate as stream format
- // to ensure that the I/O unit does not have to do sample rate conversion.
- // Set the preferred audio I/O buffer duration, in seconds.
- NSUInteger processorCount = [NSProcessInfo processInfo].processorCount;
- // Use best sample rate and buffer duration if the CPU has more than one
- // core.
- if (processorCount > 1 && [UIDevice deviceType] != RTCDeviceTypeIPhone4S) {
- _sampleRate = kRTCAudioSessionHighPerformanceSampleRate;
- _ioBufferDuration = kRTCAudioSessionHighPerformanceIOBufferDuration;
- } else {
- _sampleRate = kRTCAudioSessionLowComplexitySampleRate;
- _ioBufferDuration = kRTCAudioSessionLowComplexityIOBufferDuration;
- }
-
- // We try to use mono in both directions to save resources and format
- // conversions in the audio unit. Some devices does only support stereo;
- // e.g. wired headset on iPhone 6.
- // TODO(henrika): add support for stereo if needed.
- _inputNumberOfChannels = kRTCAudioSessionPreferredNumberOfChannels;
- _outputNumberOfChannels = kRTCAudioSessionPreferredNumberOfChannels;
- }
- return self;
-}
-
-+ (void)initialize {
- gWebRTCConfiguration = [[self alloc] init];
-}
-
-+ (instancetype)currentConfiguration {
- RTCAudioSession *session = [RTCAudioSession sharedInstance];
- RTCAudioSessionConfiguration *config =
- [[RTCAudioSessionConfiguration alloc] init];
- config.category = session.category;
- config.categoryOptions = session.categoryOptions;
- config.mode = session.mode;
- config.sampleRate = session.sampleRate;
- config.ioBufferDuration = session.IOBufferDuration;
- config.inputNumberOfChannels = session.inputNumberOfChannels;
- config.outputNumberOfChannels = session.outputNumberOfChannels;
- return config;
-}
-
-+ (instancetype)webRTCConfiguration {
- @synchronized(self) {
- return (RTCAudioSessionConfiguration *)gWebRTCConfiguration;
- }
-}
-
-+ (void)setWebRTCConfiguration:(RTCAudioSessionConfiguration *)configuration {
- @synchronized(self) {
- gWebRTCConfiguration = configuration;
- }
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/Audio/RTCNativeAudioSessionDelegateAdapter.h b/sdk/objc/Framework/Classes/Audio/RTCNativeAudioSessionDelegateAdapter.h
deleted file mode 100644
index 7fb2184..0000000
--- a/sdk/objc/Framework/Classes/Audio/RTCNativeAudioSessionDelegateAdapter.h
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright 2018 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "sdk/objc/Framework/Headers/WebRTC/RTCAudioSession.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-namespace webrtc {
-class AudioSessionObserver;
-}
-
-/** Adapter that forwards RTCAudioSessionDelegate calls to the appropriate
- * methods on the AudioSessionObserver.
- */
-@interface RTCNativeAudioSessionDelegateAdapter : NSObject <RTCAudioSessionDelegate>
-
-- (instancetype)init NS_UNAVAILABLE;
-
-/** |observer| is a raw pointer and should be kept alive
- * for this object's lifetime.
- */
-- (instancetype)initWithObserver:(webrtc::AudioSessionObserver *)observer NS_DESIGNATED_INITIALIZER;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/Audio/RTCNativeAudioSessionDelegateAdapter.mm b/sdk/objc/Framework/Classes/Audio/RTCNativeAudioSessionDelegateAdapter.mm
deleted file mode 100644
index a443e51..0000000
--- a/sdk/objc/Framework/Classes/Audio/RTCNativeAudioSessionDelegateAdapter.mm
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Copyright 2018 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCNativeAudioSessionDelegateAdapter.h"
-
-#include "sdk/objc/Framework/Native/src/audio/audio_session_observer.h"
-
-#import "WebRTC/RTCLogging.h"
-
-@implementation RTCNativeAudioSessionDelegateAdapter {
- webrtc::AudioSessionObserver *_observer;
-}
-
-- (instancetype)initWithObserver:(webrtc::AudioSessionObserver *)observer {
- RTC_DCHECK(observer);
- if (self = [super init]) {
- _observer = observer;
- }
- return self;
-}
-
-#pragma mark - RTCAudioSessionDelegate
-
-- (void)audioSessionDidBeginInterruption:(RTCAudioSession *)session {
- _observer->OnInterruptionBegin();
-}
-
-- (void)audioSessionDidEndInterruption:(RTCAudioSession *)session
- shouldResumeSession:(BOOL)shouldResumeSession {
- _observer->OnInterruptionEnd();
-}
-
-- (void)audioSessionDidChangeRoute:(RTCAudioSession *)session
- reason:(AVAudioSessionRouteChangeReason)reason
- previousRoute:(AVAudioSessionRouteDescription *)previousRoute {
- switch (reason) {
- case AVAudioSessionRouteChangeReasonUnknown:
- case AVAudioSessionRouteChangeReasonNewDeviceAvailable:
- case AVAudioSessionRouteChangeReasonOldDeviceUnavailable:
- case AVAudioSessionRouteChangeReasonCategoryChange:
- // It turns out that we see a category change (at least in iOS 9.2)
- // when making a switch from a BT device to e.g. Speaker using the
- // iOS Control Center and that we therefore must check if the sample
- // rate has changed. And if so is the case, restart the audio unit.
- case AVAudioSessionRouteChangeReasonOverride:
- case AVAudioSessionRouteChangeReasonWakeFromSleep:
- case AVAudioSessionRouteChangeReasonNoSuitableRouteForCategory:
- _observer->OnValidRouteChange();
- break;
- case AVAudioSessionRouteChangeReasonRouteConfigurationChange:
- // The set of input and output ports has not changed, but their
- // configuration has, e.g., a port’s selected data source has
- // changed. Ignore this type of route change since we are focusing
- // on detecting headset changes.
- RTCLog(@"Ignoring RouteConfigurationChange");
- break;
- }
-}
-
-- (void)audioSessionMediaServerTerminated:(RTCAudioSession *)session {
-}
-
-- (void)audioSessionMediaServerReset:(RTCAudioSession *)session {
-}
-
-- (void)audioSession:(RTCAudioSession *)session
- didChangeCanPlayOrRecord:(BOOL)canPlayOrRecord {
- _observer->OnCanPlayOrRecordChange(canPlayOrRecord);
-}
-
-- (void)audioSessionDidStartPlayOrRecord:(RTCAudioSession *)session {
-}
-
-- (void)audioSessionDidStopPlayOrRecord:(RTCAudioSession *)session {
-}
-
-- (void)audioSession:(RTCAudioSession *)audioSession
- didChangeOutputVolume:(float)outputVolume {
- _observer->OnChangedOutputVolume();
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/Common/NSString+StdString.h b/sdk/objc/Framework/Classes/Common/NSString+StdString.h
index 7f51a9f..3ec1b61 100644
--- a/sdk/objc/Framework/Classes/Common/NSString+StdString.h
+++ b/sdk/objc/Framework/Classes/Common/NSString+StdString.h
@@ -8,19 +8,4 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import <Foundation/Foundation.h>
-
-#include <string>
-
-NS_ASSUME_NONNULL_BEGIN
-
-@interface NSString (StdString)
-
-@property(nonatomic, readonly) std::string stdString;
-
-+ (std::string)stdStringForString:(NSString *)nsString;
-+ (NSString *)stringForStdString:(const std::string &)stdString;
-
-@end
-
-NS_ASSUME_NONNULL_END
+#import "helpers/NSString+StdString.h"
diff --git a/sdk/objc/Framework/Classes/Common/NSString+StdString.mm b/sdk/objc/Framework/Classes/Common/NSString+StdString.mm
deleted file mode 100644
index 3210ff0..0000000
--- a/sdk/objc/Framework/Classes/Common/NSString+StdString.mm
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "NSString+StdString.h"
-
-@implementation NSString (StdString)
-
-- (std::string)stdString {
- return [NSString stdStringForString:self];
-}
-
-+ (std::string)stdStringForString:(NSString *)nsString {
- NSData *charData = [nsString dataUsingEncoding:NSUTF8StringEncoding];
- return std::string(reinterpret_cast<const char *>(charData.bytes),
- charData.length);
-}
-
-+ (NSString *)stringForStdString:(const std::string&)stdString {
- // std::string may contain null termination character so we construct
- // using length.
- return [[NSString alloc] initWithBytes:stdString.data()
- length:stdString.length()
- encoding:NSUTF8StringEncoding];
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/Common/RTCCallbackLogger.mm b/sdk/objc/Framework/Classes/Common/RTCCallbackLogger.mm
deleted file mode 100644
index 4802c12..0000000
--- a/sdk/objc/Framework/Classes/Common/RTCCallbackLogger.mm
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Copyright 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCCallbackLogger.h"
-
-#include <memory>
-
-#include "rtc_base/checks.h"
-#include "rtc_base/logging.h"
-#include "rtc_base/logsinks.h"
-
-class CallbackLogSink : public rtc::LogSink {
- public:
- CallbackLogSink(void (^callbackHandler)(NSString *message)) {
- callback_handler_ = callbackHandler;
- }
-
- ~CallbackLogSink() override { callback_handler_ = nil; }
-
- void OnLogMessage(const std::string &message) override {
- if (callback_handler_) {
- callback_handler_([NSString stringWithUTF8String:message.c_str()]);
- }
- }
-
- private:
- void (^callback_handler_)(NSString *message);
-};
-
-@implementation RTCCallbackLogger {
- BOOL _hasStarted;
- std::unique_ptr<CallbackLogSink> _logSink;
-}
-
-@synthesize severity = _severity;
-
-- (instancetype)init {
- self = [super init];
- if (self != nil) {
- _severity = RTCLoggingSeverityInfo;
- }
- return self;
-}
-
-- (void)dealloc {
- [self stop];
-}
-
-- (void)start:(nullable void (^)(NSString *))callback {
- if (_hasStarted) {
- return;
- }
-
- _logSink.reset(new CallbackLogSink(callback));
-
- rtc::LogMessage::AddLogToStream(_logSink.get(), [self rtcSeverity]);
- _hasStarted = YES;
-}
-
-- (void)stop {
- if (!_hasStarted) {
- return;
- }
- RTC_DCHECK(_logSink);
- rtc::LogMessage::RemoveLogToStream(_logSink.get());
- _hasStarted = NO;
- _logSink.reset();
-}
-
-#pragma mark - Private
-
-- (rtc::LoggingSeverity)rtcSeverity {
- switch (_severity) {
- case RTCLoggingSeverityVerbose:
- return rtc::LS_VERBOSE;
- case RTCLoggingSeverityInfo:
- return rtc::LS_INFO;
- case RTCLoggingSeverityWarning:
- return rtc::LS_WARNING;
- case RTCLoggingSeverityError:
- return rtc::LS_ERROR;
- }
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/Common/RTCDispatcher+Private.h b/sdk/objc/Framework/Classes/Common/RTCDispatcher+Private.h
deleted file mode 100644
index 3c114e5..0000000
--- a/sdk/objc/Framework/Classes/Common/RTCDispatcher+Private.h
+++ /dev/null
@@ -1,17 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCDispatcher.h"
-
-@interface RTCDispatcher ()
-
-+ (dispatch_queue_t)dispatchQueueForType:(RTCDispatcherQueueType)dispatchType;
-
-@end
diff --git a/sdk/objc/Framework/Classes/Common/RTCDispatcher.m b/sdk/objc/Framework/Classes/Common/RTCDispatcher.m
deleted file mode 100644
index 530e51a..0000000
--- a/sdk/objc/Framework/Classes/Common/RTCDispatcher.m
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCDispatcher+Private.h"
-
-static dispatch_queue_t kAudioSessionQueue = nil;
-static dispatch_queue_t kCaptureSessionQueue = nil;
-
-@implementation RTCDispatcher
-
-+ (void)initialize {
- static dispatch_once_t onceToken;
- dispatch_once(&onceToken, ^{
- kAudioSessionQueue = dispatch_queue_create(
- "org.webrtc.RTCDispatcherAudioSession",
- DISPATCH_QUEUE_SERIAL);
- kCaptureSessionQueue = dispatch_queue_create(
- "org.webrtc.RTCDispatcherCaptureSession",
- DISPATCH_QUEUE_SERIAL);
- });
-}
-
-+ (void)dispatchAsyncOnType:(RTCDispatcherQueueType)dispatchType
- block:(dispatch_block_t)block {
- dispatch_queue_t queue = [self dispatchQueueForType:dispatchType];
- dispatch_async(queue, block);
-}
-
-+ (BOOL)isOnQueueForType:(RTCDispatcherQueueType)dispatchType {
- dispatch_queue_t targetQueue = [self dispatchQueueForType:dispatchType];
- const char* targetLabel = dispatch_queue_get_label(targetQueue);
- const char* currentLabel = dispatch_queue_get_label(DISPATCH_CURRENT_QUEUE_LABEL);
-
- NSAssert(strlen(targetLabel) > 0, @"Label is required for the target queue.");
- NSAssert(strlen(currentLabel) > 0, @"Label is required for the current queue.");
-
- return strcmp(targetLabel, currentLabel) == 0;
-}
-
-#pragma mark - Private
-
-+ (dispatch_queue_t)dispatchQueueForType:(RTCDispatcherQueueType)dispatchType {
- switch (dispatchType) {
- case RTCDispatcherTypeMain:
- return dispatch_get_main_queue();
- case RTCDispatcherTypeCaptureSession:
- return kCaptureSessionQueue;
- case RTCDispatcherTypeAudioSession:
- return kAudioSessionQueue;
- }
-}
-
-@end
-
diff --git a/sdk/objc/Framework/Classes/Common/RTCFieldTrials.mm b/sdk/objc/Framework/Classes/Common/RTCFieldTrials.mm
deleted file mode 100644
index 9cbd640..0000000
--- a/sdk/objc/Framework/Classes/Common/RTCFieldTrials.mm
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Copyright 2016 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCFieldTrials.h"
-
-#include <memory>
-
-#import "WebRTC/RTCLogging.h"
-
-// Adding 'nogncheck' to disable the gn include headers check.
-// We don't want to depend on 'system_wrappers:field_trial_default' because
-// clients should be able to provide their own implementation.
-#include "system_wrappers/include/field_trial_default.h" // nogncheck
-
-NSString * const kRTCFieldTrialAudioSendSideBweKey = @"WebRTC-Audio-SendSideBwe";
-NSString * const kRTCFieldTrialAudioSendSideBweForVideoKey = @"WebRTC-Audio-SendSideBwe-For-Video";
-NSString * const kRTCFieldTrialAudioForceNoTWCCKey = @"WebRTC-Audio-ForceNoTWCC";
-NSString * const kRTCFieldTrialAudioForceABWENoTWCCKey = @"WebRTC-Audio-ABWENoTWCC";
-NSString * const kRTCFieldTrialSendSideBweWithOverheadKey = @"WebRTC-SendSideBwe-WithOverhead";
-NSString * const kRTCFieldTrialFlexFec03AdvertisedKey = @"WebRTC-FlexFEC-03-Advertised";
-NSString * const kRTCFieldTrialFlexFec03Key = @"WebRTC-FlexFEC-03";
-NSString * const kRTCFieldTrialImprovedBitrateEstimateKey = @"WebRTC-ImprovedBitrateEstimate";
-NSString * const kRTCFieldTrialMedianSlopeFilterKey = @"WebRTC-BweMedianSlopeFilter";
-NSString * const kRTCFieldTrialTrendlineFilterKey = @"WebRTC-BweTrendlineFilter";
-NSString * const kRTCFieldTrialH264HighProfileKey = @"WebRTC-H264HighProfile";
-NSString * const kRTCFieldTrialMinimizeResamplingOnMobileKey =
- @"WebRTC-Audio-MinimizeResamplingOnMobile";
-NSString * const kRTCFieldTrialEnabledValue = @"Enabled";
-
-static std::unique_ptr<char[]> gFieldTrialInitString;
-
-NSString *RTCFieldTrialMedianSlopeFilterValue(
- size_t windowSize, double thresholdGain) {
- NSString *format = @"Enabled-%zu,%lf";
- return [NSString stringWithFormat:format, windowSize, thresholdGain];
-}
-
-NSString *RTCFieldTrialTrendlineFilterValue(
- size_t windowSize, double smoothingCoeff, double thresholdGain) {
- NSString *format = @"Enabled-%zu,%lf,%lf";
- return [NSString stringWithFormat:format, windowSize, smoothingCoeff, thresholdGain];
-}
-
-void RTCInitFieldTrialDictionary(NSDictionary<NSString *, NSString *> *fieldTrials) {
- if (!fieldTrials) {
- RTCLogWarning(@"No fieldTrials provided.");
- return;
- }
- // Assemble the keys and values into the field trial string.
- // We don't perform any extra format checking. That should be done by the underlying WebRTC calls.
- NSMutableString *fieldTrialInitString = [NSMutableString string];
- for (NSString *key in fieldTrials) {
- NSString *fieldTrialEntry = [NSString stringWithFormat:@"%@/%@/", key, fieldTrials[key]];
- [fieldTrialInitString appendString:fieldTrialEntry];
- }
- size_t len = fieldTrialInitString.length + 1;
- gFieldTrialInitString.reset(new char[len]);
- if (![fieldTrialInitString getCString:gFieldTrialInitString.get()
- maxLength:len
- encoding:NSUTF8StringEncoding]) {
- RTCLogError(@"Failed to convert field trial string.");
- return;
- }
- webrtc::field_trial::InitFieldTrialsFromString(gFieldTrialInitString.get());
-}
diff --git a/sdk/objc/Framework/Classes/Common/RTCFileLogger.mm b/sdk/objc/Framework/Classes/Common/RTCFileLogger.mm
deleted file mode 100644
index 6acb942..0000000
--- a/sdk/objc/Framework/Classes/Common/RTCFileLogger.mm
+++ /dev/null
@@ -1,175 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCFileLogger.h"
-
-#include <memory>
-
-#include "rtc_base/checks.h"
-#include "rtc_base/filerotatingstream.h"
-#include "rtc_base/logging.h"
-#include "rtc_base/logsinks.h"
-
-NSString *const kDefaultLogDirName = @"webrtc_logs";
-NSUInteger const kDefaultMaxFileSize = 10 * 1024 * 1024; // 10MB.
-const char *kRTCFileLoggerRotatingLogPrefix = "rotating_log";
-
-@implementation RTCFileLogger {
- BOOL _hasStarted;
- NSString *_dirPath;
- NSUInteger _maxFileSize;
- std::unique_ptr<rtc::FileRotatingLogSink> _logSink;
-}
-
-@synthesize severity = _severity;
-@synthesize rotationType = _rotationType;
-@synthesize shouldDisableBuffering = _shouldDisableBuffering;
-
-- (instancetype)init {
- NSArray *paths = NSSearchPathForDirectoriesInDomains(
- NSDocumentDirectory, NSUserDomainMask, YES);
- NSString *documentsDirPath = [paths firstObject];
- NSString *defaultDirPath =
- [documentsDirPath stringByAppendingPathComponent:kDefaultLogDirName];
- return [self initWithDirPath:defaultDirPath
- maxFileSize:kDefaultMaxFileSize];
-}
-
-- (instancetype)initWithDirPath:(NSString *)dirPath
- maxFileSize:(NSUInteger)maxFileSize {
- return [self initWithDirPath:dirPath
- maxFileSize:maxFileSize
- rotationType:RTCFileLoggerTypeCall];
-}
-
-- (instancetype)initWithDirPath:(NSString *)dirPath
- maxFileSize:(NSUInteger)maxFileSize
- rotationType:(RTCFileLoggerRotationType)rotationType {
- NSParameterAssert(dirPath.length);
- NSParameterAssert(maxFileSize);
- if (self = [super init]) {
- BOOL isDir = NO;
- NSFileManager *fileManager = [NSFileManager defaultManager];
- if ([fileManager fileExistsAtPath:dirPath isDirectory:&isDir]) {
- if (!isDir) {
- // Bail if something already exists there.
- return nil;
- }
- } else {
- if (![fileManager createDirectoryAtPath:dirPath
- withIntermediateDirectories:NO
- attributes:nil
- error:nil]) {
- // Bail if we failed to create a directory.
- return nil;
- }
- }
- _dirPath = dirPath;
- _maxFileSize = maxFileSize;
- _severity = RTCFileLoggerSeverityInfo;
- }
- return self;
-}
-
-- (void)dealloc {
- [self stop];
-}
-
-- (void)start {
- if (_hasStarted) {
- return;
- }
- switch (_rotationType) {
- case RTCFileLoggerTypeApp:
- _logSink.reset(
- new rtc::FileRotatingLogSink(_dirPath.UTF8String,
- kRTCFileLoggerRotatingLogPrefix,
- _maxFileSize,
- _maxFileSize / 10));
- break;
- case RTCFileLoggerTypeCall:
- _logSink.reset(
- new rtc::CallSessionFileRotatingLogSink(_dirPath.UTF8String,
- _maxFileSize));
- break;
- }
- if (!_logSink->Init()) {
- RTC_LOG(LS_ERROR) << "Failed to open log files at path: " << _dirPath.UTF8String;
- _logSink.reset();
- return;
- }
- if (_shouldDisableBuffering) {
- _logSink->DisableBuffering();
- }
- rtc::LogMessage::LogThreads(true);
- rtc::LogMessage::LogTimestamps(true);
- rtc::LogMessage::AddLogToStream(_logSink.get(), [self rtcSeverity]);
- _hasStarted = YES;
-}
-
-- (void)stop {
- if (!_hasStarted) {
- return;
- }
- RTC_DCHECK(_logSink);
- rtc::LogMessage::RemoveLogToStream(_logSink.get());
- _hasStarted = NO;
- _logSink.reset();
-}
-
-- (nullable NSData *)logData {
- if (_hasStarted) {
- return nil;
- }
- NSMutableData* logData = [NSMutableData data];
- std::unique_ptr<rtc::FileRotatingStream> stream;
- switch(_rotationType) {
- case RTCFileLoggerTypeApp:
- stream.reset(
- new rtc::FileRotatingStream(_dirPath.UTF8String,
- kRTCFileLoggerRotatingLogPrefix));
- break;
- case RTCFileLoggerTypeCall:
- stream.reset(new rtc::CallSessionFileRotatingStream(_dirPath.UTF8String));
- break;
- }
- if (!stream->Open()) {
- return logData;
- }
- size_t bufferSize = 0;
- if (!stream->GetSize(&bufferSize) || bufferSize == 0) {
- return logData;
- }
- size_t read = 0;
- // Allocate memory using malloc so we can pass it direcly to NSData without
- // copying.
- std::unique_ptr<uint8_t[]> buffer(static_cast<uint8_t*>(malloc(bufferSize)));
- stream->ReadAll(buffer.get(), bufferSize, &read, nullptr);
- logData = [[NSMutableData alloc] initWithBytesNoCopy:buffer.release()
- length:read];
- return logData;
-}
-
-#pragma mark - Private
-
-- (rtc::LoggingSeverity)rtcSeverity {
- switch (_severity) {
- case RTCFileLoggerSeverityVerbose:
- return rtc::LS_VERBOSE;
- case RTCFileLoggerSeverityInfo:
- return rtc::LS_INFO;
- case RTCFileLoggerSeverityWarning:
- return rtc::LS_WARNING;
- case RTCFileLoggerSeverityError:
- return rtc::LS_ERROR;
- }
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/Common/RTCLogging.mm b/sdk/objc/Framework/Classes/Common/RTCLogging.mm
deleted file mode 100644
index ab76a1d..0000000
--- a/sdk/objc/Framework/Classes/Common/RTCLogging.mm
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCLogging.h"
-
-#include "rtc_base/logging.h"
-
-rtc::LoggingSeverity RTCGetNativeLoggingSeverity(RTCLoggingSeverity severity) {
- switch (severity) {
- case RTCLoggingSeverityVerbose:
- return rtc::LS_VERBOSE;
- case RTCLoggingSeverityInfo:
- return rtc::LS_INFO;
- case RTCLoggingSeverityWarning:
- return rtc::LS_WARNING;
- case RTCLoggingSeverityError:
- return rtc::LS_ERROR;
- }
-}
-
-void RTCLogEx(RTCLoggingSeverity severity, NSString* log_string) {
- if (log_string.length) {
- const char* utf8_string = log_string.UTF8String;
- RTC_LOG_V(RTCGetNativeLoggingSeverity(severity)) << utf8_string;
- }
-}
-
-void RTCSetMinDebugLogLevel(RTCLoggingSeverity severity) {
- rtc::LogMessage::LogToDebug(RTCGetNativeLoggingSeverity(severity));
-}
-
-NSString* RTCFileName(const char* file_path) {
- NSString* ns_file_path =
- [[NSString alloc] initWithBytesNoCopy:const_cast<char*>(file_path)
- length:strlen(file_path)
- encoding:NSUTF8StringEncoding
- freeWhenDone:NO];
- return ns_file_path.lastPathComponent;
-}
-
diff --git a/sdk/objc/Framework/Classes/Common/RTCUIApplicationStatusObserver.h b/sdk/objc/Framework/Classes/Common/RTCUIApplicationStatusObserver.h
index 0c03295..f380d3f 100644
--- a/sdk/objc/Framework/Classes/Common/RTCUIApplicationStatusObserver.h
+++ b/sdk/objc/Framework/Classes/Common/RTCUIApplicationStatusObserver.h
@@ -8,18 +8,4 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#if defined(WEBRTC_IOS)
-
-#import <Foundation/Foundation.h>
-
-NS_EXTENSION_UNAVAILABLE_IOS("Application status not available in app extensions.")
-@interface RTCUIApplicationStatusObserver : NSObject
-
-+ (instancetype)sharedInstance;
-+ (void)prepareForUse;
-
-- (BOOL)isApplicationActive;
-
-@end
-
-#endif // WEBRTC_IOS
+#import "helpers/RTCUIApplicationStatusObserver.h"
diff --git a/sdk/objc/Framework/Classes/Common/RTCUIApplicationStatusObserver.m b/sdk/objc/Framework/Classes/Common/RTCUIApplicationStatusObserver.m
deleted file mode 100644
index 37554e7..0000000
--- a/sdk/objc/Framework/Classes/Common/RTCUIApplicationStatusObserver.m
+++ /dev/null
@@ -1,114 +0,0 @@
-/*
- * Copyright 2016 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "RTCUIApplicationStatusObserver.h"
-
-#if defined(WEBRTC_IOS)
-
-#import <UIKit/UIKit.h>
-
-#include "rtc_base/checks.h"
-
-@interface RTCUIApplicationStatusObserver ()
-
-@property(nonatomic, assign) BOOL initialized;
-@property(nonatomic, assign) UIApplicationState state;
-
-@end
-
-@implementation RTCUIApplicationStatusObserver {
- BOOL _initialized;
- dispatch_block_t _initializeBlock;
- dispatch_semaphore_t _waitForInitializeSemaphore;
- UIApplicationState _state;
-
- id<NSObject> _activeObserver;
- id<NSObject> _backgroundObserver;
-}
-
-@synthesize initialized = _initialized;
-@synthesize state = _state;
-
-+ (instancetype)sharedInstance {
- static id sharedInstance;
- static dispatch_once_t onceToken;
- dispatch_once(&onceToken, ^{
- sharedInstance = [[self alloc] init];
- });
-
- return sharedInstance;
-}
-
-// Method to make sure observers are added and the initialization block is
-// scheduled to run on the main queue.
-+ (void)prepareForUse {
- __unused RTCUIApplicationStatusObserver *observer = [self sharedInstance];
-}
-
-- (id)init {
- if (self = [super init]) {
- NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
- __weak RTCUIApplicationStatusObserver *weakSelf = self;
- _activeObserver = [center addObserverForName:UIApplicationDidBecomeActiveNotification
- object:nil
- queue:[NSOperationQueue mainQueue]
- usingBlock:^(NSNotification *note) {
- weakSelf.state =
- [UIApplication sharedApplication].applicationState;
- }];
-
- _backgroundObserver = [center addObserverForName:UIApplicationDidEnterBackgroundNotification
- object:nil
- queue:[NSOperationQueue mainQueue]
- usingBlock:^(NSNotification *note) {
- weakSelf.state =
- [UIApplication sharedApplication].applicationState;
- }];
-
- _waitForInitializeSemaphore = dispatch_semaphore_create(1);
- _initialized = NO;
- _initializeBlock = dispatch_block_create(DISPATCH_BLOCK_INHERIT_QOS_CLASS, ^{
- weakSelf.state = [UIApplication sharedApplication].applicationState;
- weakSelf.initialized = YES;
- });
-
- dispatch_async(dispatch_get_main_queue(), _initializeBlock);
- }
-
- return self;
-}
-
-- (void)dealloc {
- NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
- [center removeObserver:_activeObserver];
- [center removeObserver:_backgroundObserver];
-}
-
-- (BOOL)isApplicationActive {
- // NOTE: The function `dispatch_block_wait` can only legally be called once.
- // Because of this, if several threads call the `isApplicationActive` method before
- // the `_initializeBlock` has been executed, instead of multiple threads calling
- // `dispatch_block_wait`, the other threads need to wait for the first waiting thread
- // instead.
- if (!_initialized) {
- dispatch_semaphore_wait(_waitForInitializeSemaphore, DISPATCH_TIME_FOREVER);
- if (!_initialized) {
- long ret = dispatch_block_wait(_initializeBlock,
- dispatch_time(DISPATCH_TIME_NOW, 10.0 * NSEC_PER_SEC));
- RTC_DCHECK_EQ(ret, 0);
- }
- dispatch_semaphore_signal(_waitForInitializeSemaphore);
- }
- return _state == UIApplicationStateActive;
-}
-
-@end
-
-#endif // WEBRTC_IOS
diff --git a/sdk/objc/Framework/Classes/Common/UIDevice+RTCDevice.mm b/sdk/objc/Framework/Classes/Common/UIDevice+RTCDevice.mm
deleted file mode 100644
index 997666d..0000000
--- a/sdk/objc/Framework/Classes/Common/UIDevice+RTCDevice.mm
+++ /dev/null
@@ -1,120 +0,0 @@
-/*
- * Copyright 2016 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/UIDevice+RTCDevice.h"
-
-#include <memory>
-#import <sys/utsname.h>
-
-@implementation UIDevice (RTCDevice)
-
-+ (RTCDeviceType)deviceType {
- NSDictionary *machineNameToType = @{
- @"iPhone1,1": @(RTCDeviceTypeIPhone1G),
- @"iPhone1,2": @(RTCDeviceTypeIPhone3G),
- @"iPhone2,1": @(RTCDeviceTypeIPhone3GS),
- @"iPhone3,1": @(RTCDeviceTypeIPhone4),
- @"iPhone3,2": @(RTCDeviceTypeIPhone4),
- @"iPhone3,3": @(RTCDeviceTypeIPhone4Verizon),
- @"iPhone4,1": @(RTCDeviceTypeIPhone4S),
- @"iPhone5,1": @(RTCDeviceTypeIPhone5GSM),
- @"iPhone5,2": @(RTCDeviceTypeIPhone5GSM_CDMA),
- @"iPhone5,3": @(RTCDeviceTypeIPhone5CGSM),
- @"iPhone5,4": @(RTCDeviceTypeIPhone5CGSM_CDMA),
- @"iPhone6,1": @(RTCDeviceTypeIPhone5SGSM),
- @"iPhone6,2": @(RTCDeviceTypeIPhone5SGSM_CDMA),
- @"iPhone7,1": @(RTCDeviceTypeIPhone6Plus),
- @"iPhone7,2": @(RTCDeviceTypeIPhone6),
- @"iPhone8,1": @(RTCDeviceTypeIPhone6S),
- @"iPhone8,2": @(RTCDeviceTypeIPhone6SPlus),
- @"iPhone8,4": @(RTCDeviceTypeIPhoneSE),
- @"iPhone9,1": @(RTCDeviceTypeIPhone7),
- @"iPhone9,2": @(RTCDeviceTypeIPhone7Plus),
- @"iPhone9,3": @(RTCDeviceTypeIPhone7),
- @"iPhone9,4": @(RTCDeviceTypeIPhone7Plus),
- @"iPhone10,1": @(RTCDeviceTypeIPhone8),
- @"iPhone10,2": @(RTCDeviceTypeIPhone8Plus),
- @"iPhone10,3": @(RTCDeviceTypeIPhoneX),
- @"iPhone10,4": @(RTCDeviceTypeIPhone8),
- @"iPhone10,5": @(RTCDeviceTypeIPhone8Plus),
- @"iPhone10,6": @(RTCDeviceTypeIPhoneX),
- @"iPod1,1": @(RTCDeviceTypeIPodTouch1G),
- @"iPod2,1": @(RTCDeviceTypeIPodTouch2G),
- @"iPod3,1": @(RTCDeviceTypeIPodTouch3G),
- @"iPod4,1": @(RTCDeviceTypeIPodTouch4G),
- @"iPod5,1": @(RTCDeviceTypeIPodTouch5G),
- @"iPod7,1": @(RTCDeviceTypeIPodTouch6G),
- @"iPad1,1": @(RTCDeviceTypeIPad),
- @"iPad2,1": @(RTCDeviceTypeIPad2Wifi),
- @"iPad2,2": @(RTCDeviceTypeIPad2GSM),
- @"iPad2,3": @(RTCDeviceTypeIPad2CDMA),
- @"iPad2,4": @(RTCDeviceTypeIPad2Wifi2),
- @"iPad2,5": @(RTCDeviceTypeIPadMiniWifi),
- @"iPad2,6": @(RTCDeviceTypeIPadMiniGSM),
- @"iPad2,7": @(RTCDeviceTypeIPadMiniGSM_CDMA),
- @"iPad3,1": @(RTCDeviceTypeIPad3Wifi),
- @"iPad3,2": @(RTCDeviceTypeIPad3GSM_CDMA),
- @"iPad3,3": @(RTCDeviceTypeIPad3GSM),
- @"iPad3,4": @(RTCDeviceTypeIPad4Wifi),
- @"iPad3,5": @(RTCDeviceTypeIPad4GSM),
- @"iPad3,6": @(RTCDeviceTypeIPad4GSM_CDMA),
- @"iPad4,1": @(RTCDeviceTypeIPadAirWifi),
- @"iPad4,2": @(RTCDeviceTypeIPadAirCellular),
- @"iPad4,3": @(RTCDeviceTypeIPadAirWifiCellular),
- @"iPad4,4": @(RTCDeviceTypeIPadMini2GWifi),
- @"iPad4,5": @(RTCDeviceTypeIPadMini2GCellular),
- @"iPad4,6": @(RTCDeviceTypeIPadMini2GWifiCellular),
- @"iPad4,7": @(RTCDeviceTypeIPadMini3),
- @"iPad4,8": @(RTCDeviceTypeIPadMini3),
- @"iPad4,9": @(RTCDeviceTypeIPadMini3),
- @"iPad5,1": @(RTCDeviceTypeIPadMini4),
- @"iPad5,2": @(RTCDeviceTypeIPadMini4),
- @"iPad5,3": @(RTCDeviceTypeIPadAir2),
- @"iPad5,4": @(RTCDeviceTypeIPadAir2),
- @"iPad6,3": @(RTCDeviceTypeIPadPro9Inch),
- @"iPad6,4": @(RTCDeviceTypeIPadPro9Inch),
- @"iPad6,7": @(RTCDeviceTypeIPadPro12Inch),
- @"iPad6,8": @(RTCDeviceTypeIPadPro12Inch),
- @"iPad6,11": @(RTCDeviceTypeIPad5),
- @"iPad6,12": @(RTCDeviceTypeIPad5),
- @"iPad7,1": @(RTCDeviceTypeIPadPro12Inch2),
- @"iPad7,2": @(RTCDeviceTypeIPadPro12Inch2),
- @"iPad7,3": @(RTCDeviceTypeIPadPro10Inch),
- @"iPad7,4": @(RTCDeviceTypeIPadPro10Inch),
- @"iPad7,5": @(RTCDeviceTypeIPad6),
- @"iPad7,6": @(RTCDeviceTypeIPad6),
- @"i386": @(RTCDeviceTypeSimulatori386),
- @"x86_64": @(RTCDeviceTypeSimulatorx86_64),
- };
-
- RTCDeviceType deviceType = RTCDeviceTypeUnknown;
- NSNumber *typeNumber = machineNameToType[[self machineName]];
- if (typeNumber) {
- deviceType = static_cast<RTCDeviceType>(typeNumber.integerValue);
- }
- return deviceType;
-}
-
-+ (NSString *)machineName {
- struct utsname systemInfo;
- uname(&systemInfo);
- return [[NSString alloc] initWithCString:systemInfo.machine
- encoding:NSUTF8StringEncoding];
-}
-
-+ (double)currentDeviceSystemVersion {
- return [self currentDevice].systemVersion.doubleValue;
-}
-
-+ (BOOL)isIOS11OrLater {
- return [self currentDeviceSystemVersion] >= 11.0;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/Common/helpers.h b/sdk/objc/Framework/Classes/Common/helpers.h
deleted file mode 100644
index 7fd9b34..0000000
--- a/sdk/objc/Framework/Classes/Common/helpers.h
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef SDK_OBJC_FRAMEWORK_CLASSES_COMMON_HELPERS_H_
-#define SDK_OBJC_FRAMEWORK_CLASSES_COMMON_HELPERS_H_
-
-#include <string>
-
-namespace webrtc {
-namespace ios {
-
-bool CheckAndLogError(BOOL success, NSError* error);
-
-NSString* NSStringFromStdString(const std::string& stdString);
-std::string StdStringFromNSString(NSString* nsString);
-
-// Return thread ID as a string.
-std::string GetThreadId();
-
-// Return thread ID as string suitable for debug logging.
-std::string GetThreadInfo();
-
-// Returns [NSThread currentThread] description as string.
-// Example: <NSThread: 0x170066d80>{number = 1, name = main}
-std::string GetCurrentThreadDescription();
-
-#if defined(WEBRTC_IOS)
-// Returns the current name of the operating system.
-std::string GetSystemName();
-
-// Returns the current version of the operating system as a string.
-std::string GetSystemVersionAsString();
-
-// Returns the version of the operating system in double representation.
-// Uses a cached value of the system version.
-double GetSystemVersion();
-
-// Returns the device type.
-// Examples: ”iPhone” and ”iPod touch”.
-std::string GetDeviceType();
-#endif // defined(WEBRTC_IOS)
-
-// Returns a more detailed device name.
-// Examples: "iPhone 5s (GSM)" and "iPhone 6 Plus".
-std::string GetDeviceName();
-
-// Returns the name of the process. Does not uniquely identify the process.
-std::string GetProcessName();
-
-// Returns the identifier of the process (often called process ID).
-int GetProcessID();
-
-// Returns a string containing the version of the operating system on which the
-// process is executing. The string is string is human readable, localized, and
-// is appropriate for displaying to the user.
-std::string GetOSVersionString();
-
-// Returns the number of processing cores available on the device.
-int GetProcessorCount();
-
-#if defined(WEBRTC_IOS)
-// Indicates whether Low Power Mode is enabled on the iOS device.
-bool GetLowPowerModeEnabled();
-#endif
-
-} // namespace ios
-} // namespace webrtc
-
-#endif // SDK_OBJC_FRAMEWORK_CLASSES_COMMON_HELPERS_H_
diff --git a/sdk/objc/Framework/Classes/Common/helpers.mm b/sdk/objc/Framework/Classes/Common/helpers.mm
deleted file mode 100644
index 657a4a2..0000000
--- a/sdk/objc/Framework/Classes/Common/helpers.mm
+++ /dev/null
@@ -1,111 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-
-#import <Foundation/Foundation.h>
-#import <sys/sysctl.h>
-#if defined(WEBRTC_IOS)
-#import <UIKit/UIKit.h>
-#endif
-
-#include <memory>
-
-#include "rtc_base/checks.h"
-#include "rtc_base/logging.h"
-#include "sdk/objc/Framework/Classes/Common/helpers.h"
-
-namespace webrtc {
-namespace ios {
-
-NSString* NSStringFromStdString(const std::string& stdString) {
- // std::string may contain null termination character so we construct
- // using length.
- return [[NSString alloc] initWithBytes:stdString.data()
- length:stdString.length()
- encoding:NSUTF8StringEncoding];
-}
-
-std::string StdStringFromNSString(NSString* nsString) {
- NSData* charData = [nsString dataUsingEncoding:NSUTF8StringEncoding];
- return std::string(reinterpret_cast<const char*>([charData bytes]),
- [charData length]);
-}
-
-bool CheckAndLogError(BOOL success, NSError* error) {
- if (!success) {
- NSString* msg =
- [NSString stringWithFormat:@"Error: %ld, %@, %@", (long)error.code,
- error.localizedDescription,
- error.localizedFailureReason];
- RTC_LOG(LS_ERROR) << StdStringFromNSString(msg);
- return false;
- }
- return true;
-}
-
-// TODO(henrika): see if it is possible to move to GetThreadName in
-// platform_thread.h and base it on pthread methods instead.
-std::string GetCurrentThreadDescription() {
- NSString* name = [NSString stringWithFormat:@"%@", [NSThread currentThread]];
- return StdStringFromNSString(name);
-}
-
-#if defined(WEBRTC_IOS)
-std::string GetSystemName() {
- NSString* osName = [[UIDevice currentDevice] systemName];
- return StdStringFromNSString(osName);
-}
-
-std::string GetSystemVersionAsString() {
- NSString* osVersion = [[UIDevice currentDevice] systemVersion];
- return StdStringFromNSString(osVersion);
-}
-
-std::string GetDeviceType() {
- NSString* deviceModel = [[UIDevice currentDevice] model];
- return StdStringFromNSString(deviceModel);
-}
-
-bool GetLowPowerModeEnabled() {
- return [NSProcessInfo processInfo].lowPowerModeEnabled;
-}
-#endif
-
-std::string GetDeviceName() {
- size_t size;
- sysctlbyname("hw.machine", NULL, &size, NULL, 0);
- std::unique_ptr<char[]> machine;
- machine.reset(new char[size]);
- sysctlbyname("hw.machine", machine.get(), &size, NULL, 0);
- return std::string(machine.get());
-}
-
-std::string GetProcessName() {
- NSString* processName = [NSProcessInfo processInfo].processName;
- return StdStringFromNSString(processName);
-}
-
-int GetProcessID() {
- return [NSProcessInfo processInfo].processIdentifier;
-}
-
-std::string GetOSVersionString() {
- NSString* osVersion =
- [NSProcessInfo processInfo].operatingSystemVersionString;
- return StdStringFromNSString(osVersion);
-}
-
-int GetProcessorCount() {
- return [NSProcessInfo processInfo].processorCount;
-}
-
-} // namespace ios
-} // namespace webrtc
-
diff --git a/sdk/objc/Framework/Classes/Common/noop.mm b/sdk/objc/Framework/Classes/Common/noop.mm
deleted file mode 100644
index 16a8e6d..0000000
--- a/sdk/objc/Framework/Classes/Common/noop.mm
+++ /dev/null
@@ -1,13 +0,0 @@
-/*
- * Copyright 2015 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// This file is only needed to make ninja happy on some platforms.
-// On some platforms it is not possible to link an rtc_static_library
-// without any source file listed in the GN target.
diff --git a/sdk/objc/Framework/Classes/Common/scoped_cftyperef.h b/sdk/objc/Framework/Classes/Common/scoped_cftyperef.h
index c54b039..e5e376b 100644
--- a/sdk/objc/Framework/Classes/Common/scoped_cftyperef.h
+++ b/sdk/objc/Framework/Classes/Common/scoped_cftyperef.h
@@ -9,108 +9,4 @@
*
*/
-#ifndef WEBRTC_SDK_OBJC_FRAMEWORK_CLASSES_COMMON_SCOPED_CFTYPEREF_H_
-#define WEBRTC_SDK_OBJC_FRAMEWORK_CLASSES_COMMON_SCOPED_CFTYPEREF_H_
-
-#include <CoreFoundation/CoreFoundation.h>
-namespace rtc {
-
-// RETAIN: ScopedTypeRef should retain the object when it takes
-// ownership.
-// ASSUME: Assume the object already has already been retained.
-// ScopedTypeRef takes over ownership.
-enum class RetainPolicy { RETAIN, ASSUME };
-
-namespace internal {
-template <typename T>
-struct CFTypeRefTraits {
- static T InvalidValue() { return nullptr; }
- static void Release(T ref) { CFRelease(ref); }
- static T Retain(T ref) {
- CFRetain(ref);
- return ref;
- }
-};
-
-template <typename T, typename Traits>
-class ScopedTypeRef {
- public:
- ScopedTypeRef() : ptr_(Traits::InvalidValue()) {}
- explicit ScopedTypeRef(T ptr) : ptr_(ptr) {}
- ScopedTypeRef(T ptr, RetainPolicy policy) : ScopedTypeRef(ptr) {
- if (ptr_ && policy == RetainPolicy::RETAIN)
- Traits::Retain(ptr_);
- }
-
- ScopedTypeRef(const ScopedTypeRef<T, Traits>& rhs) : ptr_(rhs.ptr_) {
- if (ptr_)
- ptr_ = Traits::Retain(ptr_);
- }
-
- ~ScopedTypeRef() {
- if (ptr_) {
- Traits::Release(ptr_);
- }
- }
-
- T get() const { return ptr_; }
- T operator->() const { return ptr_; }
- explicit operator bool() const { return ptr_; }
-
- bool operator!() const { return !ptr_; }
-
- ScopedTypeRef& operator=(const T& rhs) {
- if (ptr_)
- Traits::Release(ptr_);
- ptr_ = rhs;
- return *this;
- }
-
- ScopedTypeRef& operator=(const ScopedTypeRef<T, Traits>& rhs) {
- reset(rhs.get(), RetainPolicy::RETAIN);
- return *this;
- }
-
- // This is intended to take ownership of objects that are
- // created by pass-by-pointer initializers.
- T* InitializeInto() {
- RTC_DCHECK(!ptr_);
- return &ptr_;
- }
-
- void reset(T ptr, RetainPolicy policy = RetainPolicy::ASSUME) {
- if (ptr && policy == RetainPolicy::RETAIN)
- Traits::Retain(ptr);
- if (ptr_)
- Traits::Release(ptr_);
- ptr_ = ptr;
- }
-
- T release() {
- T temp = ptr_;
- ptr_ = Traits::InvalidValue();
- return temp;
- }
-
- private:
- T ptr_;
-};
-} // namespace internal
-
-template <typename T>
-using ScopedCFTypeRef =
- internal::ScopedTypeRef<T, internal::CFTypeRefTraits<T>>;
-
-template <typename T>
-static ScopedCFTypeRef<T> AdoptCF(T cftype) {
- return ScopedCFTypeRef<T>(cftype, RetainPolicy::RETAIN);
-}
-
-template <typename T>
-static ScopedCFTypeRef<T> ScopedCF(T cftype) {
- return ScopedCFTypeRef<T>(cftype);
-}
-
-} // namespace rtc
-
-#endif // WEBRTC_SDK_OBJC_FRAMEWORK_CLASSES_COMMON_SCOPED_CFTYPEREF_H_
+#import "helpers/scoped_cftyperef.h"
diff --git a/sdk/objc/Framework/Classes/Metal/RTCMTLI420Renderer.h b/sdk/objc/Framework/Classes/Metal/RTCMTLI420Renderer.h
deleted file mode 100644
index e5987fe..0000000
--- a/sdk/objc/Framework/Classes/Metal/RTCMTLI420Renderer.h
+++ /dev/null
@@ -1,17 +0,0 @@
-/*
- * Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import <Foundation/Foundation.h>
-
-#import "RTCMTLRenderer.h"
-
-NS_AVAILABLE(10_11, 9_0)
-@interface RTCMTLI420Renderer : RTCMTLRenderer
-@end
diff --git a/sdk/objc/Framework/Classes/Metal/RTCMTLI420Renderer.mm b/sdk/objc/Framework/Classes/Metal/RTCMTLI420Renderer.mm
deleted file mode 100644
index f7a75d8..0000000
--- a/sdk/objc/Framework/Classes/Metal/RTCMTLI420Renderer.mm
+++ /dev/null
@@ -1,169 +0,0 @@
-/*
- * Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCMTLI420Renderer.h"
-#import "WebRTC/RTCVideoFrameBuffer.h"
-
-#import <Metal/Metal.h>
-#import <MetalKit/MetalKit.h>
-
-#import "WebRTC/RTCLogging.h"
-#import "WebRTC/RTCVideoFrame.h"
-
-#import "RTCMTLRenderer+Private.h"
-
-static NSString *const shaderSource = MTL_STRINGIFY(
- using namespace metal;
-
- typedef struct {
- packed_float2 position;
- packed_float2 texcoord;
- } Vertex;
-
- typedef struct {
- float4 position[[position]];
- float2 texcoord;
- } Varyings;
-
- vertex Varyings vertexPassthrough(device Vertex * verticies[[buffer(0)]],
- unsigned int vid[[vertex_id]]) {
- Varyings out;
- device Vertex &v = verticies[vid];
- out.position = float4(float2(v.position), 0.0, 1.0);
- out.texcoord = v.texcoord;
-
- return out;
- }
-
- fragment half4 fragmentColorConversion(
- Varyings in[[stage_in]], texture2d<float, access::sample> textureY[[texture(0)]],
- texture2d<float, access::sample> textureU[[texture(1)]],
- texture2d<float, access::sample> textureV[[texture(2)]]) {
- constexpr sampler s(address::clamp_to_edge, filter::linear);
- float y;
- float u;
- float v;
- float r;
- float g;
- float b;
- // Conversion for YUV to rgb from http://www.fourcc.org/fccyvrgb.php
- y = textureY.sample(s, in.texcoord).r;
- u = textureU.sample(s, in.texcoord).r;
- v = textureV.sample(s, in.texcoord).r;
- u = u - 0.5;
- v = v - 0.5;
- r = y + 1.403 * v;
- g = y - 0.344 * u - 0.714 * v;
- b = y + 1.770 * u;
-
- float4 out = float4(r, g, b, 1.0);
-
- return half4(out);
- });
-
-@implementation RTCMTLI420Renderer {
- // Textures.
- id<MTLTexture> _yTexture;
- id<MTLTexture> _uTexture;
- id<MTLTexture> _vTexture;
-
- MTLTextureDescriptor *_descriptor;
- MTLTextureDescriptor *_chromaDescriptor;
-
- int _width;
- int _height;
- int _chromaWidth;
- int _chromaHeight;
-}
-
-#pragma mark - Virtual
-
-- (NSString *)shaderSource {
- return shaderSource;
-}
-
-- (void)getWidth:(nonnull int *)width
- height:(nonnull int *)height
- cropWidth:(nonnull int *)cropWidth
- cropHeight:(nonnull int *)cropHeight
- cropX:(nonnull int *)cropX
- cropY:(nonnull int *)cropY
- ofFrame:(nonnull RTCVideoFrame *)frame {
- *width = frame.width;
- *height = frame.height;
- *cropWidth = frame.width;
- *cropHeight = frame.height;
- *cropX = 0;
- *cropY = 0;
-}
-
-- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
- if (![super setupTexturesForFrame:frame]) {
- return NO;
- }
-
- id<MTLDevice> device = [self currentMetalDevice];
- if (!device) {
- return NO;
- }
-
- id<RTCI420Buffer> buffer = [frame.buffer toI420];
-
- // Luma (y) texture.
- if (!_descriptor || (_width != frame.width && _height != frame.height)) {
- _width = frame.width;
- _height = frame.height;
- _descriptor = [MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatR8Unorm
- width:_width
- height:_height
- mipmapped:NO];
- _descriptor.usage = MTLTextureUsageShaderRead;
- _yTexture = [device newTextureWithDescriptor:_descriptor];
- }
-
- // Chroma (u,v) textures
- [_yTexture replaceRegion:MTLRegionMake2D(0, 0, _width, _height)
- mipmapLevel:0
- withBytes:buffer.dataY
- bytesPerRow:buffer.strideY];
-
- if (!_chromaDescriptor ||
- (_chromaWidth != frame.width / 2 && _chromaHeight != frame.height / 2)) {
- _chromaWidth = frame.width / 2;
- _chromaHeight = frame.height / 2;
- _chromaDescriptor =
- [MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatR8Unorm
- width:_chromaWidth
- height:_chromaHeight
- mipmapped:NO];
- _chromaDescriptor.usage = MTLTextureUsageShaderRead;
- _uTexture = [device newTextureWithDescriptor:_chromaDescriptor];
- _vTexture = [device newTextureWithDescriptor:_chromaDescriptor];
- }
-
- [_uTexture replaceRegion:MTLRegionMake2D(0, 0, _chromaWidth, _chromaHeight)
- mipmapLevel:0
- withBytes:buffer.dataU
- bytesPerRow:buffer.strideU];
- [_vTexture replaceRegion:MTLRegionMake2D(0, 0, _chromaWidth, _chromaHeight)
- mipmapLevel:0
- withBytes:buffer.dataV
- bytesPerRow:buffer.strideV];
-
- return (_uTexture != nil) && (_yTexture != nil) && (_vTexture != nil);
-}
-
-- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder {
- [renderEncoder setFragmentTexture:_yTexture atIndex:0];
- [renderEncoder setFragmentTexture:_uTexture atIndex:1];
- [renderEncoder setFragmentTexture:_vTexture atIndex:2];
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/Metal/RTCMTLNSVideoView.m b/sdk/objc/Framework/Classes/Metal/RTCMTLNSVideoView.m
deleted file mode 100644
index 869858e..0000000
--- a/sdk/objc/Framework/Classes/Metal/RTCMTLNSVideoView.m
+++ /dev/null
@@ -1,122 +0,0 @@
-/*
- * Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCMTLNSVideoView.h"
-
-#import <Metal/Metal.h>
-#import <MetalKit/MetalKit.h>
-
-#import "WebRTC/RTCVideoFrame.h"
-
-#import "RTCMTLI420Renderer.h"
-
-@interface RTCMTLNSVideoView ()<MTKViewDelegate>
-@property(nonatomic) id<RTCMTLRenderer> renderer;
-@property(nonatomic, strong) MTKView *metalView;
-@property(atomic, strong) RTCVideoFrame *videoFrame;
-@end
-
-@implementation RTCMTLNSVideoView {
- id<RTCMTLRenderer> _renderer;
-}
-
-@synthesize delegate = _delegate;
-@synthesize renderer = _renderer;
-@synthesize metalView = _metalView;
-@synthesize videoFrame = _videoFrame;
-
-- (instancetype)initWithFrame:(CGRect)frameRect {
- self = [super initWithFrame:frameRect];
- if (self) {
- [self configure];
- }
- return self;
-}
-
-- (instancetype)initWithCoder:(NSCoder *)aCoder {
- self = [super initWithCoder:aCoder];
- if (self) {
- [self configure];
- }
- return self;
-}
-
-#pragma mark - Private
-
-+ (BOOL)isMetalAvailable {
- return [MTLCopyAllDevices() count] > 0;
-}
-
-- (void)configure {
- if ([[self class] isMetalAvailable]) {
- _metalView = [[MTKView alloc] initWithFrame:self.bounds];
- [self addSubview:_metalView];
- _metalView.layerContentsPlacement = NSViewLayerContentsPlacementScaleProportionallyToFit;
- _metalView.translatesAutoresizingMaskIntoConstraints = NO;
- _metalView.framebufferOnly = YES;
- _metalView.delegate = self;
-
- _renderer = [[RTCMTLI420Renderer alloc] init];
- if (![(RTCMTLI420Renderer *)_renderer addRenderingDestination:_metalView]) {
- _renderer = nil;
- };
- }
-}
-
-- (void)updateConstraints {
- NSDictionary *views = NSDictionaryOfVariableBindings(_metalView);
-
- NSArray *constraintsHorizontal =
- [NSLayoutConstraint constraintsWithVisualFormat:@"H:|-0-[_metalView]-0-|"
- options:0
- metrics:nil
- views:views];
- [self addConstraints:constraintsHorizontal];
-
- NSArray *constraintsVertical =
- [NSLayoutConstraint constraintsWithVisualFormat:@"V:|-0-[_metalView]-0-|"
- options:0
- metrics:nil
- views:views];
- [self addConstraints:constraintsVertical];
- [super updateConstraints];
-}
-
-#pragma mark - MTKViewDelegate methods
-- (void)drawInMTKView:(nonnull MTKView *)view {
- if (self.videoFrame == nil) {
- return;
- }
- if (view == self.metalView) {
- [_renderer drawFrame:self.videoFrame];
- }
-}
-
-- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size {
-}
-
-#pragma mark - RTCVideoRenderer
-
-- (void)setSize:(CGSize)size {
- _metalView.drawableSize = size;
- dispatch_async(dispatch_get_main_queue(), ^{
- [self.delegate videoView:self didChangeVideoSize:size];
- });
- [_metalView draw];
-}
-
-- (void)renderFrame:(nullable RTCVideoFrame *)frame {
- if (frame == nil) {
- return;
- }
- self.videoFrame = [frame newI420VideoFrame];
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/Metal/RTCMTLNV12Renderer.h b/sdk/objc/Framework/Classes/Metal/RTCMTLNV12Renderer.h
deleted file mode 100644
index 866b7ea..0000000
--- a/sdk/objc/Framework/Classes/Metal/RTCMTLNV12Renderer.h
+++ /dev/null
@@ -1,18 +0,0 @@
-/*
- * Copyright 2017 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import <Foundation/Foundation.h>
-
-#import "RTCMTLRenderer.h"
-
-NS_AVAILABLE(10_11, 9_0)
-@interface RTCMTLNV12Renderer : RTCMTLRenderer
-
-@end
diff --git a/sdk/objc/Framework/Classes/Metal/RTCMTLNV12Renderer.mm b/sdk/objc/Framework/Classes/Metal/RTCMTLNV12Renderer.mm
deleted file mode 100644
index a968014..0000000
--- a/sdk/objc/Framework/Classes/Metal/RTCMTLNV12Renderer.mm
+++ /dev/null
@@ -1,162 +0,0 @@
-/*
- * Copyright 2017 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCMTLNV12Renderer.h"
-
-#import <Metal/Metal.h>
-#import <MetalKit/MetalKit.h>
-
-#import "WebRTC/RTCLogging.h"
-#import "WebRTC/RTCVideoFrame.h"
-#import "WebRTC/RTCVideoFrameBuffer.h"
-
-#import "RTCMTLRenderer+Private.h"
-#include "rtc_base/checks.h"
-
-static NSString *const shaderSource = MTL_STRINGIFY(
- using namespace metal;
-
- typedef struct {
- packed_float2 position;
- packed_float2 texcoord;
- } Vertex;
-
- typedef struct {
- float4 position[[position]];
- float2 texcoord;
- } Varyings;
-
- vertex Varyings vertexPassthrough(device Vertex * verticies[[buffer(0)]],
- unsigned int vid[[vertex_id]]) {
- Varyings out;
- device Vertex &v = verticies[vid];
- out.position = float4(float2(v.position), 0.0, 1.0);
- out.texcoord = v.texcoord;
- return out;
- }
-
- // Receiving YCrCb textures.
- fragment half4 fragmentColorConversion(
- Varyings in[[stage_in]], texture2d<float, access::sample> textureY[[texture(0)]],
- texture2d<float, access::sample> textureCbCr[[texture(1)]]) {
- constexpr sampler s(address::clamp_to_edge, filter::linear);
- float y;
- float2 uv;
- y = textureY.sample(s, in.texcoord).r;
- uv = textureCbCr.sample(s, in.texcoord).rg - float2(0.5, 0.5);
-
- // Conversion for YUV to rgb from http://www.fourcc.org/fccyvrgb.php
- float4 out = float4(y + 1.403 * uv.y, y - 0.344 * uv.x - 0.714 * uv.y, y + 1.770 * uv.x, 1.0);
-
- return half4(out);
- });
-
-@implementation RTCMTLNV12Renderer {
- // Textures.
- CVMetalTextureCacheRef _textureCache;
- id<MTLTexture> _yTexture;
- id<MTLTexture> _CrCbTexture;
-}
-
-- (BOOL)addRenderingDestination:(__kindof MTKView *)view {
- if ([super addRenderingDestination:view]) {
- return [self initializeTextureCache];
- }
- return NO;
-}
-
-- (BOOL)initializeTextureCache {
- CVReturn status = CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, [self currentMetalDevice],
- nil, &_textureCache);
- if (status != kCVReturnSuccess) {
- RTCLogError(@"Metal: Failed to initialize metal texture cache. Return status is %d", status);
- return NO;
- }
-
- return YES;
-}
-
-- (NSString *)shaderSource {
- return shaderSource;
-}
-
-- (void)getWidth:(nonnull int *)width
- height:(nonnull int *)height
- cropWidth:(nonnull int *)cropWidth
- cropHeight:(nonnull int *)cropHeight
- cropX:(nonnull int *)cropX
- cropY:(nonnull int *)cropY
- ofFrame:(nonnull RTCVideoFrame *)frame {
- RTCCVPixelBuffer *pixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
- *width = CVPixelBufferGetWidth(pixelBuffer.pixelBuffer);
- *height = CVPixelBufferGetHeight(pixelBuffer.pixelBuffer);
- *cropWidth = pixelBuffer.cropWidth;
- *cropHeight = pixelBuffer.cropHeight;
- *cropX = pixelBuffer.cropX;
- *cropY = pixelBuffer.cropY;
-}
-
-- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
- RTC_DCHECK([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]);
- if (![super setupTexturesForFrame:frame]) {
- return NO;
- }
- CVPixelBufferRef pixelBuffer = ((RTCCVPixelBuffer *)frame.buffer).pixelBuffer;
-
- id<MTLTexture> lumaTexture = nil;
- id<MTLTexture> chromaTexture = nil;
- CVMetalTextureRef outTexture = nullptr;
-
- // Luma (y) texture.
- int lumaWidth = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0);
- int lumaHeight = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
-
- int indexPlane = 0;
- CVReturn result = CVMetalTextureCacheCreateTextureFromImage(
- kCFAllocatorDefault, _textureCache, pixelBuffer, nil, MTLPixelFormatR8Unorm, lumaWidth,
- lumaHeight, indexPlane, &outTexture);
-
- if (result == kCVReturnSuccess) {
- lumaTexture = CVMetalTextureGetTexture(outTexture);
- }
-
- // Same as CFRelease except it can be passed NULL without crashing.
- CVBufferRelease(outTexture);
- outTexture = nullptr;
-
- // Chroma (CrCb) texture.
- indexPlane = 1;
- result = CVMetalTextureCacheCreateTextureFromImage(
- kCFAllocatorDefault, _textureCache, pixelBuffer, nil, MTLPixelFormatRG8Unorm, lumaWidth / 2,
- lumaHeight / 2, indexPlane, &outTexture);
- if (result == kCVReturnSuccess) {
- chromaTexture = CVMetalTextureGetTexture(outTexture);
- }
- CVBufferRelease(outTexture);
-
- if (lumaTexture != nil && chromaTexture != nil) {
- _yTexture = lumaTexture;
- _CrCbTexture = chromaTexture;
- return YES;
- }
- return NO;
-}
-
-- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder {
- [renderEncoder setFragmentTexture:_yTexture atIndex:0];
- [renderEncoder setFragmentTexture:_CrCbTexture atIndex:1];
-}
-
-- (void)dealloc {
- if (_textureCache) {
- CFRelease(_textureCache);
- }
-}
-@end
diff --git a/sdk/objc/Framework/Classes/Metal/RTCMTLRGBRenderer.h b/sdk/objc/Framework/Classes/Metal/RTCMTLRGBRenderer.h
deleted file mode 100644
index 9db422c..0000000
--- a/sdk/objc/Framework/Classes/Metal/RTCMTLRGBRenderer.h
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Copyright 2018 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import <Foundation/Foundation.h>
-
-#import "RTCMTLRenderer.h"
-
-/** @abstract RGB/BGR renderer.
- * @discussion This renderer handles both kCVPixelFormatType_32BGRA and
- * kCVPixelFormatType_32ARGB.
- */
-NS_AVAILABLE(10_11, 9_0)
-@interface RTCMTLRGBRenderer : RTCMTLRenderer
-
-@end
diff --git a/sdk/objc/Framework/Classes/Metal/RTCMTLRGBRenderer.mm b/sdk/objc/Framework/Classes/Metal/RTCMTLRGBRenderer.mm
deleted file mode 100644
index e5a3704..0000000
--- a/sdk/objc/Framework/Classes/Metal/RTCMTLRGBRenderer.mm
+++ /dev/null
@@ -1,163 +0,0 @@
-/*
- * Copyright 2018 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCMTLRGBRenderer.h"
-
-#import <Metal/Metal.h>
-#import <MetalKit/MetalKit.h>
-
-#import "WebRTC/RTCLogging.h"
-#import "WebRTC/RTCVideoFrame.h"
-#import "WebRTC/RTCVideoFrameBuffer.h"
-
-#import "RTCMTLRenderer+Private.h"
-#include "rtc_base/checks.h"
-
-static NSString *const shaderSource = MTL_STRINGIFY(
- using namespace metal;
-
- typedef struct {
- packed_float2 position;
- packed_float2 texcoord;
- } Vertex;
-
- typedef struct {
- float4 position[[position]];
- float2 texcoord;
- } VertexIO;
-
- vertex VertexIO vertexPassthrough(device Vertex * verticies[[buffer(0)]],
- uint vid[[vertex_id]]) {
- VertexIO out;
- device Vertex &v = verticies[vid];
- out.position = float4(float2(v.position), 0.0, 1.0);
- out.texcoord = v.texcoord;
- return out;
- }
-
- fragment half4 fragmentColorConversion(
- VertexIO in[[stage_in]], texture2d<half, access::sample> texture[[texture(0)]],
- constant bool &isARGB[[buffer(0)]]) {
- constexpr sampler s(address::clamp_to_edge, filter::linear);
-
- half4 out = texture.sample(s, in.texcoord);
- if (isARGB) {
- out = half4(out.g, out.b, out.a, out.r);
- }
-
- return out;
- });
-
-@implementation RTCMTLRGBRenderer {
- // Textures.
- CVMetalTextureCacheRef _textureCache;
- id<MTLTexture> _texture;
-
- // Uniforms.
- id<MTLBuffer> _uniformsBuffer;
-}
-
-- (BOOL)addRenderingDestination:(__kindof MTKView *)view {
- if ([super addRenderingDestination:view]) {
- return [self initializeTextureCache];
- }
- return NO;
-}
-
-- (BOOL)initializeTextureCache {
- CVReturn status = CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, [self currentMetalDevice],
- nil, &_textureCache);
- if (status != kCVReturnSuccess) {
- RTCLogError(@"Metal: Failed to initialize metal texture cache. Return status is %d", status);
- return NO;
- }
-
- return YES;
-}
-
-- (NSString *)shaderSource {
- return shaderSource;
-}
-
-- (void)getWidth:(nonnull int *)width
- height:(nonnull int *)height
- cropWidth:(nonnull int *)cropWidth
- cropHeight:(nonnull int *)cropHeight
- cropX:(nonnull int *)cropX
- cropY:(nonnull int *)cropY
- ofFrame:(nonnull RTCVideoFrame *)frame {
- RTCCVPixelBuffer *pixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
- *width = CVPixelBufferGetWidth(pixelBuffer.pixelBuffer);
- *height = CVPixelBufferGetHeight(pixelBuffer.pixelBuffer);
- *cropWidth = pixelBuffer.cropWidth;
- *cropHeight = pixelBuffer.cropHeight;
- *cropX = pixelBuffer.cropX;
- *cropY = pixelBuffer.cropY;
-}
-
-- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
- RTC_DCHECK([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]);
- if (![super setupTexturesForFrame:frame]) {
- return NO;
- }
- CVPixelBufferRef pixelBuffer = ((RTCCVPixelBuffer *)frame.buffer).pixelBuffer;
-
- id<MTLTexture> gpuTexture = nil;
- CVMetalTextureRef textureOut = nullptr;
- bool isARGB;
-
- int width = CVPixelBufferGetWidth(pixelBuffer);
- int height = CVPixelBufferGetHeight(pixelBuffer);
- OSType pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
-
- MTLPixelFormat mtlPixelFormat;
- if (pixelFormat == kCVPixelFormatType_32BGRA) {
- mtlPixelFormat = MTLPixelFormatBGRA8Unorm;
- isARGB = false;
- } else if (pixelFormat == kCVPixelFormatType_32ARGB) {
- mtlPixelFormat = MTLPixelFormatRGBA8Unorm;
- isARGB = true;
- } else {
- RTC_NOTREACHED();
- return NO;
- }
-
- CVReturn result = CVMetalTextureCacheCreateTextureFromImage(
- kCFAllocatorDefault, _textureCache, pixelBuffer, nil, mtlPixelFormat,
- width, height, 0, &textureOut);
- if (result == kCVReturnSuccess) {
- gpuTexture = CVMetalTextureGetTexture(textureOut);
- }
- CVBufferRelease(textureOut);
-
- if (gpuTexture != nil) {
- _texture = gpuTexture;
- _uniformsBuffer =
- [[self currentMetalDevice] newBufferWithBytes:&isARGB
- length:sizeof(isARGB)
- options:MTLResourceCPUCacheModeDefaultCache];
- return YES;
- }
-
- return NO;
-}
-
-- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder {
- [renderEncoder setFragmentTexture:_texture atIndex:0];
- [renderEncoder setFragmentBuffer:_uniformsBuffer offset:0 atIndex:0];
-}
-
-- (void)dealloc {
- if (_textureCache) {
- CFRelease(_textureCache);
- }
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/Metal/RTCMTLRenderer+Private.h b/sdk/objc/Framework/Classes/Metal/RTCMTLRenderer+Private.h
deleted file mode 100644
index e50ae77..0000000
--- a/sdk/objc/Framework/Classes/Metal/RTCMTLRenderer+Private.h
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import <Metal/Metal.h>
-#import "RTCMTLRenderer.h"
-
-#define MTL_STRINGIFY(s) @ #s
-
-NS_ASSUME_NONNULL_BEGIN
-@interface RTCMTLRenderer (Private)
-- (nullable id<MTLDevice>)currentMetalDevice;
-- (NSString *)shaderSource;
-- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame;
-- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder;
-- (void)getWidth:(nonnull int *)width
- height:(nonnull int *)height
- cropWidth:(nonnull int *)cropWidth
- cropHeight:(nonnull int *)cropHeight
- cropX:(nonnull int *)cropX
- cropY:(nonnull int *)cropY
- ofFrame:(nonnull RTCVideoFrame *)frame;
-@end
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/Metal/RTCMTLRenderer.h b/sdk/objc/Framework/Classes/Metal/RTCMTLRenderer.h
deleted file mode 100644
index f4a293b..0000000
--- a/sdk/objc/Framework/Classes/Metal/RTCMTLRenderer.h
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import <Foundation/Foundation.h>
-#if TARGET_OS_IPHONE
-#import <UIKit/UIKit.h>
-#else
-#import <AppKit/AppKit.h>
-#endif
-
-#import "WebRTC/RTCVideoFrame.h"
-
-NS_ASSUME_NONNULL_BEGIN
-/**
- * Protocol defining ability to render RTCVideoFrame in Metal enabled views.
- */
-@protocol RTCMTLRenderer <NSObject>
-
-/**
- * Method to be implemented to perform actual rendering of the provided frame.
- *
- * @param frame The frame to be rendered.
- */
-- (void)drawFrame:(RTCVideoFrame *)frame;
-
-/**
- * Sets the provided view as rendering destination if possible.
- *
- * If not possible method returns NO and callers of the method are responisble for performing
- * cleanups.
- */
-
-#if TARGET_OS_IOS
-- (BOOL)addRenderingDestination:(__kindof UIView *)view;
-#else
-- (BOOL)addRenderingDestination:(__kindof NSView *)view;
-#endif
-
-@end
-
-/**
- * Implementation of RTCMTLRenderer protocol.
- */
-NS_AVAILABLE(10_11, 9_0)
-@interface RTCMTLRenderer : NSObject <RTCMTLRenderer>
-
-/** @abstract A wrapped RTCVideoRotation, or nil.
- @discussion When not nil, the rotation of the actual frame is ignored when rendering.
- */
-@property(atomic, nullable) NSValue *rotationOverride;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/Metal/RTCMTLRenderer.mm b/sdk/objc/Framework/Classes/Metal/RTCMTLRenderer.mm
deleted file mode 100644
index 5363aae..0000000
--- a/sdk/objc/Framework/Classes/Metal/RTCMTLRenderer.mm
+++ /dev/null
@@ -1,325 +0,0 @@
-/*
- * Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCMTLRenderer+Private.h"
-
-#import <Metal/Metal.h>
-#import <MetalKit/MetalKit.h>
-
-#import "WebRTC/RTCLogging.h"
-#import "WebRTC/RTCVideoFrame.h"
-#import "WebRTC/RTCVideoFrameBuffer.h"
-
-#include "api/video/video_rotation.h"
-#include "rtc_base/checks.h"
-
-// As defined in shaderSource.
-static NSString *const vertexFunctionName = @"vertexPassthrough";
-static NSString *const fragmentFunctionName = @"fragmentColorConversion";
-
-static NSString *const pipelineDescriptorLabel = @"RTCPipeline";
-static NSString *const commandBufferLabel = @"RTCCommandBuffer";
-static NSString *const renderEncoderLabel = @"RTCEncoder";
-static NSString *const renderEncoderDebugGroup = @"RTCDrawFrame";
-
-// Computes the texture coordinates given rotation and cropping.
-static inline void getCubeVertexData(int cropX,
- int cropY,
- int cropWidth,
- int cropHeight,
- size_t frameWidth,
- size_t frameHeight,
- RTCVideoRotation rotation,
- float *buffer) {
- // The computed values are the adjusted texture coordinates, in [0..1].
- // For the left and top, 0.0 means no cropping and e.g. 0.2 means we're skipping 20% of the
- // left/top edge.
- // For the right and bottom, 1.0 means no cropping and e.g. 0.8 means we're skipping 20% of the
- // right/bottom edge (i.e. render up to 80% of the width/height).
- float cropLeft = cropX / (float)frameWidth;
- float cropRight = (cropX + cropWidth) / (float)frameWidth;
- float cropTop = cropY / (float)frameHeight;
- float cropBottom = (cropY + cropHeight) / (float)frameHeight;
-
- // These arrays map the view coordinates to texture coordinates, taking cropping and rotation
- // into account. The first two columns are view coordinates, the last two are texture coordinates.
- switch (rotation) {
- case RTCVideoRotation_0: {
- float values[16] = {-1.0, -1.0, cropLeft, cropBottom,
- 1.0, -1.0, cropRight, cropBottom,
- -1.0, 1.0, cropLeft, cropTop,
- 1.0, 1.0, cropRight, cropTop};
- memcpy(buffer, &values, sizeof(values));
- } break;
- case RTCVideoRotation_90: {
- float values[16] = {-1.0, -1.0, cropRight, cropBottom,
- 1.0, -1.0, cropRight, cropTop,
- -1.0, 1.0, cropLeft, cropBottom,
- 1.0, 1.0, cropLeft, cropTop};
- memcpy(buffer, &values, sizeof(values));
- } break;
- case RTCVideoRotation_180: {
- float values[16] = {-1.0, -1.0, cropRight, cropTop,
- 1.0, -1.0, cropLeft, cropTop,
- -1.0, 1.0, cropRight, cropBottom,
- 1.0, 1.0, cropLeft, cropBottom};
- memcpy(buffer, &values, sizeof(values));
- } break;
- case RTCVideoRotation_270: {
- float values[16] = {-1.0, -1.0, cropLeft, cropTop,
- 1.0, -1.0, cropLeft, cropBottom,
- -1.0, 1.0, cropRight, cropTop,
- 1.0, 1.0, cropRight, cropBottom};
- memcpy(buffer, &values, sizeof(values));
- } break;
- }
-}
-
-// The max number of command buffers in flight (submitted to GPU).
-// For now setting it up to 1.
-// In future we might use triple buffering method if it improves performance.
-static const NSInteger kMaxInflightBuffers = 1;
-
-@implementation RTCMTLRenderer {
- __kindof MTKView *_view;
-
- // Controller.
- dispatch_semaphore_t _inflight_semaphore;
-
- // Renderer.
- id<MTLDevice> _device;
- id<MTLCommandQueue> _commandQueue;
- id<MTLLibrary> _defaultLibrary;
- id<MTLRenderPipelineState> _pipelineState;
-
- // Buffers.
- id<MTLBuffer> _vertexBuffer;
-
- // Values affecting the vertex buffer. Stored for comparison to avoid unnecessary recreation.
- int _oldFrameWidth;
- int _oldFrameHeight;
- int _oldCropWidth;
- int _oldCropHeight;
- int _oldCropX;
- int _oldCropY;
- RTCVideoRotation _oldRotation;
-}
-
-@synthesize rotationOverride = _rotationOverride;
-
-- (instancetype)init {
- if (self = [super init]) {
- _inflight_semaphore = dispatch_semaphore_create(kMaxInflightBuffers);
- }
-
- return self;
-}
-
-- (BOOL)addRenderingDestination:(__kindof MTKView *)view {
- return [self setupWithView:view];
-}
-
-#pragma mark - Private
-
-- (BOOL)setupWithView:(__kindof MTKView *)view {
- BOOL success = NO;
- if ([self setupMetal]) {
- _view = view;
- view.device = _device;
- view.preferredFramesPerSecond = 30;
- view.autoResizeDrawable = NO;
-
- [self loadAssets];
-
- float vertexBufferArray[16] = {0};
- _vertexBuffer = [_device newBufferWithBytes:vertexBufferArray
- length:sizeof(vertexBufferArray)
- options:MTLResourceCPUCacheModeWriteCombined];
- success = YES;
- }
- return success;
-}
-#pragma mark - Inheritance
-
-- (id<MTLDevice>)currentMetalDevice {
- return _device;
-}
-
-- (NSString *)shaderSource {
- RTC_NOTREACHED() << "Virtual method not implemented in subclass.";
- return nil;
-}
-
-- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder {
- RTC_NOTREACHED() << "Virtual method not implemented in subclass.";
-}
-
-- (void)getWidth:(int *)width
- height:(int *)height
- cropWidth:(int *)cropWidth
- cropHeight:(int *)cropHeight
- cropX:(int *)cropX
- cropY:(int *)cropY
- ofFrame:(nonnull RTCVideoFrame *)frame {
- RTC_NOTREACHED() << "Virtual method not implemented in subclass.";
-}
-
-- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
- // Apply rotation override if set.
- RTCVideoRotation rotation;
- NSValue *rotationOverride = self.rotationOverride;
- if (rotationOverride) {
-#if defined(__IPHONE_11_0) && (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0)
- if (@available(iOS 11, *)) {
- [rotationOverride getValue:&rotation size:sizeof(rotation)];
- } else
-#endif
- {
- [rotationOverride getValue:&rotation];
- }
- } else {
- rotation = frame.rotation;
- }
-
- int frameWidth, frameHeight, cropWidth, cropHeight, cropX, cropY;
- [self getWidth:&frameWidth
- height:&frameHeight
- cropWidth:&cropWidth
- cropHeight:&cropHeight
- cropX:&cropX
- cropY:&cropY
- ofFrame:frame];
-
- // Recompute the texture cropping and recreate vertexBuffer if necessary.
- if (cropX != _oldCropX || cropY != _oldCropY || cropWidth != _oldCropWidth ||
- cropHeight != _oldCropHeight || rotation != _oldRotation || frameWidth != _oldFrameWidth ||
- frameHeight != _oldFrameHeight) {
- getCubeVertexData(cropX,
- cropY,
- cropWidth,
- cropHeight,
- frameWidth,
- frameHeight,
- rotation,
- (float *)_vertexBuffer.contents);
- _oldCropX = cropX;
- _oldCropY = cropY;
- _oldCropWidth = cropWidth;
- _oldCropHeight = cropHeight;
- _oldRotation = rotation;
- _oldFrameWidth = frameWidth;
- _oldFrameHeight = frameHeight;
- }
-
- return YES;
-}
-
-#pragma mark - GPU methods
-
-- (BOOL)setupMetal {
- // Set the view to use the default device.
- _device = MTLCreateSystemDefaultDevice();
- if (!_device) {
- return NO;
- }
-
- // Create a new command queue.
- _commandQueue = [_device newCommandQueue];
-
- // Load metal library from source.
- NSError *libraryError = nil;
- NSString *shaderSource = [self shaderSource];
-
- id<MTLLibrary> sourceLibrary =
- [_device newLibraryWithSource:shaderSource options:NULL error:&libraryError];
-
- if (libraryError) {
- RTCLogError(@"Metal: Library with source failed\n%@", libraryError);
- return NO;
- }
-
- if (!sourceLibrary) {
- RTCLogError(@"Metal: Failed to load library. %@", libraryError);
- return NO;
- }
- _defaultLibrary = sourceLibrary;
-
- return YES;
-}
-
-- (void)loadAssets {
- id<MTLFunction> vertexFunction = [_defaultLibrary newFunctionWithName:vertexFunctionName];
- id<MTLFunction> fragmentFunction = [_defaultLibrary newFunctionWithName:fragmentFunctionName];
-
- MTLRenderPipelineDescriptor *pipelineDescriptor = [[MTLRenderPipelineDescriptor alloc] init];
- pipelineDescriptor.label = pipelineDescriptorLabel;
- pipelineDescriptor.vertexFunction = vertexFunction;
- pipelineDescriptor.fragmentFunction = fragmentFunction;
- pipelineDescriptor.colorAttachments[0].pixelFormat = _view.colorPixelFormat;
- pipelineDescriptor.depthAttachmentPixelFormat = MTLPixelFormatInvalid;
- NSError *error = nil;
- _pipelineState = [_device newRenderPipelineStateWithDescriptor:pipelineDescriptor error:&error];
-
- if (!_pipelineState) {
- RTCLogError(@"Metal: Failed to create pipeline state. %@", error);
- }
-}
-
-- (void)render {
- // Wait until the inflight (curently sent to GPU) command buffer
- // has completed the GPU work.
- dispatch_semaphore_wait(_inflight_semaphore, DISPATCH_TIME_FOREVER);
-
- id<MTLCommandBuffer> commandBuffer = [_commandQueue commandBuffer];
- commandBuffer.label = commandBufferLabel;
-
- __block dispatch_semaphore_t block_semaphore = _inflight_semaphore;
- [commandBuffer addCompletedHandler:^(id<MTLCommandBuffer> _Nonnull) {
- // GPU work completed.
- dispatch_semaphore_signal(block_semaphore);
- }];
-
- MTLRenderPassDescriptor *renderPassDescriptor = _view.currentRenderPassDescriptor;
- if (renderPassDescriptor) { // Valid drawable.
- id<MTLRenderCommandEncoder> renderEncoder =
- [commandBuffer renderCommandEncoderWithDescriptor:renderPassDescriptor];
- renderEncoder.label = renderEncoderLabel;
-
- // Set context state.
- [renderEncoder pushDebugGroup:renderEncoderDebugGroup];
- [renderEncoder setRenderPipelineState:_pipelineState];
- [renderEncoder setVertexBuffer:_vertexBuffer offset:0 atIndex:0];
- [self uploadTexturesToRenderEncoder:renderEncoder];
-
- [renderEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip
- vertexStart:0
- vertexCount:4
- instanceCount:1];
- [renderEncoder popDebugGroup];
- [renderEncoder endEncoding];
-
- [commandBuffer presentDrawable:_view.currentDrawable];
- }
-
- // CPU work is completed, GPU work can be started.
- [commandBuffer commit];
-}
-
-#pragma mark - RTCMTLRenderer
-
-- (void)drawFrame:(RTCVideoFrame *)frame {
- @autoreleasepool {
- if ([self setupTexturesForFrame:frame]) {
- [self render];
- }
- }
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/Metal/RTCMTLVideoView.m b/sdk/objc/Framework/Classes/Metal/RTCMTLVideoView.m
deleted file mode 100644
index e0fd524..0000000
--- a/sdk/objc/Framework/Classes/Metal/RTCMTLVideoView.m
+++ /dev/null
@@ -1,257 +0,0 @@
-/*
- * Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCMTLVideoView.h"
-
-#import <Metal/Metal.h>
-#import <MetalKit/MetalKit.h>
-
-#import "WebRTC/RTCLogging.h"
-#import "WebRTC/RTCVideoFrame.h"
-#import "WebRTC/RTCVideoFrameBuffer.h"
-
-#import "RTCMTLI420Renderer.h"
-#import "RTCMTLNV12Renderer.h"
-#import "RTCMTLRGBRenderer.h"
-
-// To avoid unreconized symbol linker errors, we're taking advantage of the objc runtime.
-// Linking errors occur when compiling for architectures that don't support Metal.
-#define MTKViewClass NSClassFromString(@"MTKView")
-#define RTCMTLNV12RendererClass NSClassFromString(@"RTCMTLNV12Renderer")
-#define RTCMTLI420RendererClass NSClassFromString(@"RTCMTLI420Renderer")
-#define RTCMTLRGBRendererClass NSClassFromString(@"RTCMTLRGBRenderer")
-
-@interface RTCMTLVideoView () <MTKViewDelegate>
-@property(nonatomic) RTCMTLI420Renderer *rendererI420;
-@property(nonatomic) RTCMTLNV12Renderer *rendererNV12;
-@property(nonatomic) RTCMTLRGBRenderer *rendererRGB;
-@property(nonatomic) MTKView *metalView;
-@property(atomic) RTCVideoFrame *videoFrame;
-@property(nonatomic) CGSize videoFrameSize;
-@property(nonatomic) int64_t lastFrameTimeNs;
-@end
-
-@implementation RTCMTLVideoView
-
-@synthesize delegate = _delegate;
-@synthesize rendererI420 = _rendererI420;
-@synthesize rendererNV12 = _rendererNV12;
-@synthesize rendererRGB = _rendererRGB;
-@synthesize metalView = _metalView;
-@synthesize videoFrame = _videoFrame;
-@synthesize videoFrameSize = _videoFrameSize;
-@synthesize lastFrameTimeNs = _lastFrameTimeNs;
-@synthesize rotationOverride = _rotationOverride;
-
-- (instancetype)initWithFrame:(CGRect)frameRect {
- self = [super initWithFrame:frameRect];
- if (self) {
- [self configure];
- }
- return self;
-}
-
-- (instancetype)initWithCoder:(NSCoder *)aCoder {
- self = [super initWithCoder:aCoder];
- if (self) {
- [self configure];
- }
- return self;
-}
-
-- (BOOL)isEnabled {
- return !self.metalView.paused;
-}
-
-- (void)setEnabled:(BOOL)enabled {
- self.metalView.paused = !enabled;
-}
-
-- (UIViewContentMode)videoContentMode {
- return self.metalView.contentMode;
-}
-
-- (void)setVideoContentMode:(UIViewContentMode)mode {
- self.metalView.contentMode = mode;
-}
-
-#pragma mark - Private
-
-+ (BOOL)isMetalAvailable {
-#if defined(RTC_SUPPORTS_METAL)
- return MTLCreateSystemDefaultDevice() != nil;
-#else
- return NO;
-#endif
-}
-
-+ (MTKView *)createMetalView:(CGRect)frame {
- return [[MTKViewClass alloc] initWithFrame:frame];
-}
-
-+ (RTCMTLNV12Renderer *)createNV12Renderer {
- return [[RTCMTLNV12RendererClass alloc] init];
-}
-
-+ (RTCMTLI420Renderer *)createI420Renderer {
- return [[RTCMTLI420RendererClass alloc] init];
-}
-
-+ (RTCMTLRGBRenderer *)createRGBRenderer {
- return [[RTCMTLRGBRenderer alloc] init];
-}
-
-- (void)configure {
- NSAssert([RTCMTLVideoView isMetalAvailable], @"Metal not availiable on this device");
-
- self.metalView = [RTCMTLVideoView createMetalView:self.bounds];
- self.metalView.delegate = self;
- self.metalView.contentMode = UIViewContentModeScaleAspectFill;
- [self addSubview:self.metalView];
- self.videoFrameSize = CGSizeZero;
-}
-
-- (void)layoutSubviews {
- [super layoutSubviews];
-
- CGRect bounds = self.bounds;
- self.metalView.frame = bounds;
- if (!CGSizeEqualToSize(self.videoFrameSize, CGSizeZero)) {
- self.metalView.drawableSize = [self drawableSize];
- } else {
- self.metalView.drawableSize = bounds.size;
- }
-}
-
-#pragma mark - MTKViewDelegate methods
-
-- (void)drawInMTKView:(nonnull MTKView *)view {
- NSAssert(view == self.metalView, @"Receiving draw callbacks from foreign instance.");
- RTCVideoFrame *videoFrame = self.videoFrame;
- // Skip rendering if we've already rendered this frame.
- if (!videoFrame || videoFrame.timeStampNs == self.lastFrameTimeNs) {
- return;
- }
-
- RTCMTLRenderer *renderer;
- if ([videoFrame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
- RTCCVPixelBuffer *buffer = (RTCCVPixelBuffer*)videoFrame.buffer;
- const OSType pixelFormat = CVPixelBufferGetPixelFormatType(buffer.pixelBuffer);
- if (pixelFormat == kCVPixelFormatType_32BGRA || pixelFormat == kCVPixelFormatType_32ARGB) {
- if (!self.rendererRGB) {
- self.rendererRGB = [RTCMTLVideoView createRGBRenderer];
- if (![self.rendererRGB addRenderingDestination:self.metalView]) {
- self.rendererRGB = nil;
- RTCLogError(@"Failed to create RGB renderer");
- return;
- }
- }
- renderer = self.rendererRGB;
- } else {
- if (!self.rendererNV12) {
- self.rendererNV12 = [RTCMTLVideoView createNV12Renderer];
- if (![self.rendererNV12 addRenderingDestination:self.metalView]) {
- self.rendererNV12 = nil;
- RTCLogError(@"Failed to create NV12 renderer");
- return;
- }
- }
- renderer = self.rendererNV12;
- }
- } else {
- if (!self.rendererI420) {
- self.rendererI420 = [RTCMTLVideoView createI420Renderer];
- if (![self.rendererI420 addRenderingDestination:self.metalView]) {
- self.rendererI420 = nil;
- RTCLogError(@"Failed to create I420 renderer");
- return;
- }
- }
- renderer = self.rendererI420;
- }
-
- renderer.rotationOverride = self.rotationOverride;
-
- [renderer drawFrame:videoFrame];
- self.lastFrameTimeNs = videoFrame.timeStampNs;
-}
-
-- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size {
-}
-
-#pragma mark -
-
-- (void)setRotationOverride:(NSValue *)rotationOverride {
- _rotationOverride = rotationOverride;
-
- self.metalView.drawableSize = [self drawableSize];
- [self setNeedsLayout];
-}
-
-- (RTCVideoRotation)frameRotation {
- if (self.rotationOverride) {
- RTCVideoRotation rotation;
- if (@available(iOS 11, *)) {
- [self.rotationOverride getValue:&rotation size:sizeof(rotation)];
- } else {
- [self.rotationOverride getValue:&rotation];
- }
- return rotation;
- }
-
- return self.videoFrame.rotation;
-}
-
-- (CGSize)drawableSize {
- // Flip width/height if the rotations are not the same.
- CGSize videoFrameSize = self.videoFrameSize;
- RTCVideoRotation frameRotation = [self frameRotation];
-
- BOOL useLandscape =
- (frameRotation == RTCVideoRotation_0) || (frameRotation == RTCVideoRotation_180);
- BOOL sizeIsLandscape = (self.videoFrame.rotation == RTCVideoRotation_0) ||
- (self.videoFrame.rotation == RTCVideoRotation_180);
-
- if (useLandscape == sizeIsLandscape) {
- return videoFrameSize;
- } else {
- return CGSizeMake(videoFrameSize.height, videoFrameSize.width);
- }
-}
-
-#pragma mark - RTCVideoRenderer
-
-- (void)setSize:(CGSize)size {
- __weak RTCMTLVideoView *weakSelf = self;
- dispatch_async(dispatch_get_main_queue(), ^{
- RTCMTLVideoView *strongSelf = weakSelf;
-
- strongSelf.videoFrameSize = size;
- CGSize drawableSize = [strongSelf drawableSize];
-
- strongSelf.metalView.drawableSize = drawableSize;
- [strongSelf setNeedsLayout];
- [strongSelf.delegate videoView:self didChangeVideoSize:size];
- });
-}
-
-- (void)renderFrame:(nullable RTCVideoFrame *)frame {
- if (!self.isEnabled) {
- return;
- }
-
- if (frame == nil) {
- RTCLogInfo(@"Incoming frame is nil. Exiting render callback.");
- return;
- }
- self.videoFrame = frame;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCAudioSource+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCAudioSource+Private.h
deleted file mode 100644
index 63fff68..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCAudioSource+Private.h
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Copyright 2016 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCAudioSource.h"
-
-#import "RTCMediaSource+Private.h"
-
-@interface RTCAudioSource ()
-
-/**
- * The AudioSourceInterface object passed to this RTCAudioSource during
- * construction.
- */
-@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::AudioSourceInterface> nativeAudioSource;
-
-/** Initialize an RTCAudioSource from a native AudioSourceInterface. */
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory
- nativeAudioSource:(rtc::scoped_refptr<webrtc::AudioSourceInterface>)nativeAudioSource
- NS_DESIGNATED_INITIALIZER;
-
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory
- nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
- type:(RTCMediaSourceType)type NS_UNAVAILABLE;
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCAudioSource.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCAudioSource.mm
deleted file mode 100644
index a6822f6..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCAudioSource.mm
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Copyright 2016 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCAudioSource+Private.h"
-
-#include "rtc_base/checks.h"
-
-@implementation RTCAudioSource {
-}
-
-@synthesize volume = _volume;
-@synthesize nativeAudioSource = _nativeAudioSource;
-
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
- nativeAudioSource:
- (rtc::scoped_refptr<webrtc::AudioSourceInterface>)nativeAudioSource {
- RTC_DCHECK(factory);
- RTC_DCHECK(nativeAudioSource);
-
- if (self = [super initWithFactory:factory
- nativeMediaSource:nativeAudioSource
- type:RTCMediaSourceTypeAudio]) {
- _nativeAudioSource = nativeAudioSource;
- }
- return self;
-}
-
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
- nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
- type:(RTCMediaSourceType)type {
- RTC_NOTREACHED();
- return nil;
-}
-
-- (NSString *)description {
- NSString *stateString = [[self class] stringForState:self.state];
- return [NSString stringWithFormat:@"RTCAudioSource( %p ): %@", self, stateString];
-}
-
-- (void)setVolume:(double)volume {
- _volume = volume;
- _nativeAudioSource->SetVolume(volume);
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCAudioTrack+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCAudioTrack+Private.h
deleted file mode 100644
index 7a2f2c8..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCAudioTrack+Private.h
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCAudioTrack.h"
-
-#include "api/mediastreaminterface.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-@class RTCPeerConnectionFactory;
-@interface RTCAudioTrack ()
-
-/** AudioTrackInterface created or passed in at construction. */
-@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::AudioTrackInterface> nativeAudioTrack;
-
-/** Initialize an RTCAudioTrack with an id. */
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
- source:(RTCAudioSource *)source
- trackId:(NSString *)trackId;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCAudioTrack.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCAudioTrack.mm
deleted file mode 100644
index 73de401..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCAudioTrack.mm
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCAudioTrack+Private.h"
-
-#import "NSString+StdString.h"
-#import "RTCAudioSource+Private.h"
-#import "RTCMediaStreamTrack+Private.h"
-#import "RTCPeerConnectionFactory+Private.h"
-
-#include "rtc_base/checks.h"
-
-@implementation RTCAudioTrack
-
-@synthesize source = _source;
-
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
- source:(RTCAudioSource *)source
- trackId:(NSString *)trackId {
- RTC_DCHECK(factory);
- RTC_DCHECK(source);
- RTC_DCHECK(trackId.length);
-
- std::string nativeId = [NSString stdStringForString:trackId];
- rtc::scoped_refptr<webrtc::AudioTrackInterface> track =
- factory.nativeFactory->CreateAudioTrack(nativeId, source.nativeAudioSource);
- if (self = [self initWithFactory:factory nativeTrack:track type:RTCMediaStreamTrackTypeAudio]) {
- _source = source;
- }
- return self;
-}
-
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
- nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
- type:(RTCMediaStreamTrackType)type {
- NSParameterAssert(factory);
- NSParameterAssert(nativeTrack);
- NSParameterAssert(type == RTCMediaStreamTrackTypeAudio);
- return [super initWithFactory:factory nativeTrack:nativeTrack type:type];
-}
-
-
-- (RTCAudioSource *)source {
- if (!_source) {
- rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
- self.nativeAudioTrack->GetSource();
- if (source) {
- _source =
- [[RTCAudioSource alloc] initWithFactory:self.factory nativeAudioSource:source.get()];
- }
- }
- return _source;
-}
-
-#pragma mark - Private
-
-- (rtc::scoped_refptr<webrtc::AudioTrackInterface>)nativeAudioTrack {
- return static_cast<webrtc::AudioTrackInterface *>(self.nativeTrack.get());
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCCameraVideoCapturer.m b/sdk/objc/Framework/Classes/PeerConnection/RTCCameraVideoCapturer.m
deleted file mode 100644
index 8f93937..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCCameraVideoCapturer.m
+++ /dev/null
@@ -1,498 +0,0 @@
-/*
- * Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import <Foundation/Foundation.h>
-
-#import "WebRTC/RTCCameraVideoCapturer.h"
-#import "WebRTC/RTCLogging.h"
-#import "WebRTC/RTCVideoFrameBuffer.h"
-
-#if TARGET_OS_IPHONE
-#import "WebRTC/UIDevice+RTCDevice.h"
-#endif
-
-#import "AVCaptureSession+DevicePosition.h"
-#import "RTCDispatcher+Private.h"
-
-const int64_t kNanosecondsPerSecond = 1000000000;
-
-@interface RTCCameraVideoCapturer ()<AVCaptureVideoDataOutputSampleBufferDelegate>
-@property(nonatomic, readonly) dispatch_queue_t frameQueue;
-@end
-
-@implementation RTCCameraVideoCapturer {
- AVCaptureVideoDataOutput *_videoDataOutput;
- AVCaptureSession *_captureSession;
- AVCaptureDevice *_currentDevice;
- FourCharCode _preferredOutputPixelFormat;
- FourCharCode _outputPixelFormat;
- BOOL _hasRetriedOnFatalError;
- BOOL _isRunning;
- // Will the session be running once all asynchronous operations have been completed?
- BOOL _willBeRunning;
- RTCVideoRotation _rotation;
-#if TARGET_OS_IPHONE
- UIDeviceOrientation _orientation;
-#endif
-}
-
-@synthesize frameQueue = _frameQueue;
-@synthesize captureSession = _captureSession;
-
-- (instancetype)init {
- return [self initWithDelegate:nil captureSession:[[AVCaptureSession alloc] init]];
-}
-
-- (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate {
- return [self initWithDelegate:delegate captureSession:[[AVCaptureSession alloc] init]];
-}
-
-// This initializer is used for testing.
-- (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate
- captureSession:(AVCaptureSession *)captureSession {
- if (self = [super initWithDelegate:delegate]) {
- // Create the capture session and all relevant inputs and outputs. We need
- // to do this in init because the application may want the capture session
- // before we start the capturer for e.g. AVCapturePreviewLayer. All objects
- // created here are retained until dealloc and never recreated.
- if (![self setupCaptureSession:captureSession]) {
- return nil;
- }
- NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
-#if TARGET_OS_IPHONE
- _orientation = UIDeviceOrientationPortrait;
- _rotation = RTCVideoRotation_90;
- [center addObserver:self
- selector:@selector(deviceOrientationDidChange:)
- name:UIDeviceOrientationDidChangeNotification
- object:nil];
- [center addObserver:self
- selector:@selector(handleCaptureSessionInterruption:)
- name:AVCaptureSessionWasInterruptedNotification
- object:_captureSession];
- [center addObserver:self
- selector:@selector(handleCaptureSessionInterruptionEnded:)
- name:AVCaptureSessionInterruptionEndedNotification
- object:_captureSession];
- [center addObserver:self
- selector:@selector(handleApplicationDidBecomeActive:)
- name:UIApplicationDidBecomeActiveNotification
- object:[UIApplication sharedApplication]];
-#endif
- [center addObserver:self
- selector:@selector(handleCaptureSessionRuntimeError:)
- name:AVCaptureSessionRuntimeErrorNotification
- object:_captureSession];
- [center addObserver:self
- selector:@selector(handleCaptureSessionDidStartRunning:)
- name:AVCaptureSessionDidStartRunningNotification
- object:_captureSession];
- [center addObserver:self
- selector:@selector(handleCaptureSessionDidStopRunning:)
- name:AVCaptureSessionDidStopRunningNotification
- object:_captureSession];
- }
- return self;
-}
-
-- (void)dealloc {
- NSAssert(
- !_willBeRunning,
- @"Session was still running in RTCCameraVideoCapturer dealloc. Forgot to call stopCapture?");
- [[NSNotificationCenter defaultCenter] removeObserver:self];
-}
-
-+ (NSArray<AVCaptureDevice *> *)captureDevices {
-#if defined(WEBRTC_IOS) && defined(__IPHONE_10_0) && \
- __IPHONE_OS_VERSION_MIN_REQUIRED >= __IPHONE_10_0
- AVCaptureDeviceDiscoverySession *session = [AVCaptureDeviceDiscoverySession
- discoverySessionWithDeviceTypes:@[ AVCaptureDeviceTypeBuiltInWideAngleCamera ]
- mediaType:AVMediaTypeVideo
- position:AVCaptureDevicePositionUnspecified];
- return session.devices;
-#else
- return [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
-#endif
-}
-
-+ (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:(AVCaptureDevice *)device {
- // Support opening the device in any format. We make sure it's converted to a format we
- // can handle, if needed, in the method `-setupVideoDataOutput`.
- return device.formats;
-}
-
-- (FourCharCode)preferredOutputPixelFormat {
- return _preferredOutputPixelFormat;
-}
-
-- (void)startCaptureWithDevice:(AVCaptureDevice *)device
- format:(AVCaptureDeviceFormat *)format
- fps:(NSInteger)fps {
- [self startCaptureWithDevice:device format:format fps:fps completionHandler:nil];
-}
-
-- (void)stopCapture {
- [self stopCaptureWithCompletionHandler:nil];
-}
-
-- (void)startCaptureWithDevice:(AVCaptureDevice *)device
- format:(AVCaptureDeviceFormat *)format
- fps:(NSInteger)fps
- completionHandler:(nullable void (^)(NSError *))completionHandler {
- _willBeRunning = YES;
- [RTCDispatcher
- dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
- block:^{
- RTCLogInfo("startCaptureWithDevice %@ @ %ld fps", format, (long)fps);
-
-#if TARGET_OS_IPHONE
- [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
-#endif
-
- _currentDevice = device;
-
- NSError *error = nil;
- if (![_currentDevice lockForConfiguration:&error]) {
- RTCLogError(
- @"Failed to lock device %@. Error: %@", _currentDevice, error.userInfo);
- if (completionHandler) {
- completionHandler(error);
- }
- _willBeRunning = NO;
- return;
- }
- [self reconfigureCaptureSessionInput];
- [self updateOrientation];
- [self updateDeviceCaptureFormat:format fps:fps];
- [self updateVideoDataOutputPixelFormat:format];
- [_captureSession startRunning];
- [_currentDevice unlockForConfiguration];
- _isRunning = YES;
- if (completionHandler) {
- completionHandler(nil);
- }
- }];
-}
-
-- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler {
- _willBeRunning = NO;
- [RTCDispatcher
- dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
- block:^{
- RTCLogInfo("Stop");
- _currentDevice = nil;
- for (AVCaptureDeviceInput *oldInput in [_captureSession.inputs copy]) {
- [_captureSession removeInput:oldInput];
- }
- [_captureSession stopRunning];
-
-#if TARGET_OS_IPHONE
- [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
-#endif
- _isRunning = NO;
- if (completionHandler) {
- completionHandler();
- }
- }];
-}
-
-#pragma mark iOS notifications
-
-#if TARGET_OS_IPHONE
-- (void)deviceOrientationDidChange:(NSNotification *)notification {
- [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
- block:^{
- [self updateOrientation];
- }];
-}
-#endif
-
-#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
-
-- (void)captureOutput:(AVCaptureOutput *)captureOutput
- didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
- fromConnection:(AVCaptureConnection *)connection {
- NSParameterAssert(captureOutput == _videoDataOutput);
-
- if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
- !CMSampleBufferDataIsReady(sampleBuffer)) {
- return;
- }
-
- CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
- if (pixelBuffer == nil) {
- return;
- }
-
-#if TARGET_OS_IPHONE
- // Default to portrait orientation on iPhone.
- BOOL usingFrontCamera = NO;
- // Check the image's EXIF for the camera the image came from as the image could have been
- // delayed as we set alwaysDiscardsLateVideoFrames to NO.
- AVCaptureDevicePosition cameraPosition =
- [AVCaptureSession devicePositionForSampleBuffer:sampleBuffer];
- if (cameraPosition != AVCaptureDevicePositionUnspecified) {
- usingFrontCamera = AVCaptureDevicePositionFront == cameraPosition;
- } else {
- AVCaptureDeviceInput *deviceInput =
- (AVCaptureDeviceInput *)((AVCaptureInputPort *)connection.inputPorts.firstObject).input;
- usingFrontCamera = AVCaptureDevicePositionFront == deviceInput.device.position;
- }
- switch (_orientation) {
- case UIDeviceOrientationPortrait:
- _rotation = RTCVideoRotation_90;
- break;
- case UIDeviceOrientationPortraitUpsideDown:
- _rotation = RTCVideoRotation_270;
- break;
- case UIDeviceOrientationLandscapeLeft:
- _rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0;
- break;
- case UIDeviceOrientationLandscapeRight:
- _rotation = usingFrontCamera ? RTCVideoRotation_0 : RTCVideoRotation_180;
- break;
- case UIDeviceOrientationFaceUp:
- case UIDeviceOrientationFaceDown:
- case UIDeviceOrientationUnknown:
- // Ignore.
- break;
- }
-#else
- // No rotation on Mac.
- _rotation = RTCVideoRotation_0;
-#endif
-
- RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer];
- int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) *
- kNanosecondsPerSecond;
- RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer
- rotation:_rotation
- timeStampNs:timeStampNs];
- [self.delegate capturer:self didCaptureVideoFrame:videoFrame];
-}
-
-- (void)captureOutput:(AVCaptureOutput *)captureOutput
- didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
- fromConnection:(AVCaptureConnection *)connection {
- RTCLogError(@"Dropped sample buffer.");
-}
-
-#pragma mark - AVCaptureSession notifications
-
-- (void)handleCaptureSessionInterruption:(NSNotification *)notification {
- NSString *reasonString = nil;
-#if TARGET_OS_IPHONE
- NSNumber *reason = notification.userInfo[AVCaptureSessionInterruptionReasonKey];
- if (reason) {
- switch (reason.intValue) {
- case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground:
- reasonString = @"VideoDeviceNotAvailableInBackground";
- break;
- case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
- reasonString = @"AudioDeviceInUseByAnotherClient";
- break;
- case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
- reasonString = @"VideoDeviceInUseByAnotherClient";
- break;
- case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps:
- reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
- break;
- }
- }
-#endif
- RTCLog(@"Capture session interrupted: %@", reasonString);
-}
-
-- (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
- RTCLog(@"Capture session interruption ended.");
-}
-
-- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
- NSError *error = [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
- RTCLogError(@"Capture session runtime error: %@", error);
-
- [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
- block:^{
-#if TARGET_OS_IPHONE
- if (error.code == AVErrorMediaServicesWereReset) {
- [self handleNonFatalError];
- } else {
- [self handleFatalError];
- }
-#else
- [self handleFatalError];
-#endif
- }];
-}
-
-- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
- RTCLog(@"Capture session started.");
-
- [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
- block:^{
- // If we successfully restarted after an unknown error,
- // allow future retries on fatal errors.
- _hasRetriedOnFatalError = NO;
- }];
-}
-
-- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
- RTCLog(@"Capture session stopped.");
-}
-
-- (void)handleFatalError {
- [RTCDispatcher
- dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
- block:^{
- if (!_hasRetriedOnFatalError) {
- RTCLogWarning(@"Attempting to recover from fatal capture error.");
- [self handleNonFatalError];
- _hasRetriedOnFatalError = YES;
- } else {
- RTCLogError(@"Previous fatal error recovery failed.");
- }
- }];
-}
-
-- (void)handleNonFatalError {
- [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
- block:^{
- RTCLog(@"Restarting capture session after error.");
- if (_isRunning) {
- [_captureSession startRunning];
- }
- }];
-}
-
-#if TARGET_OS_IPHONE
-
-#pragma mark - UIApplication notifications
-
-- (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
- [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
- block:^{
- if (_isRunning && !_captureSession.isRunning) {
- RTCLog(@"Restarting capture session on active.");
- [_captureSession startRunning];
- }
- }];
-}
-
-#endif // TARGET_OS_IPHONE
-
-#pragma mark - Private
-
-- (dispatch_queue_t)frameQueue {
- if (!_frameQueue) {
- _frameQueue =
- dispatch_queue_create("org.webrtc.cameravideocapturer.video", DISPATCH_QUEUE_SERIAL);
- dispatch_set_target_queue(_frameQueue,
- dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
- }
- return _frameQueue;
-}
-
-- (BOOL)setupCaptureSession:(AVCaptureSession *)captureSession {
- NSAssert(_captureSession == nil, @"Setup capture session called twice.");
- _captureSession = captureSession;
-#if defined(WEBRTC_IOS)
- _captureSession.sessionPreset = AVCaptureSessionPresetInputPriority;
- _captureSession.usesApplicationAudioSession = NO;
-#endif
- [self setupVideoDataOutput];
- // Add the output.
- if (![_captureSession canAddOutput:_videoDataOutput]) {
- RTCLogError(@"Video data output unsupported.");
- return NO;
- }
- [_captureSession addOutput:_videoDataOutput];
-
- return YES;
-}
-
-- (void)setupVideoDataOutput {
- NSAssert(_videoDataOutput == nil, @"Setup video data output called twice.");
- AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
-
- // `videoDataOutput.availableVideoCVPixelFormatTypes` returns the pixel formats supported by the
- // device with the most efficient output format first. Find the first format that we support.
- NSSet<NSNumber *> *supportedPixelFormats = [RTCCVPixelBuffer supportedPixelFormats];
- NSMutableOrderedSet *availablePixelFormats =
- [NSMutableOrderedSet orderedSetWithArray:videoDataOutput.availableVideoCVPixelFormatTypes];
- [availablePixelFormats intersectSet:supportedPixelFormats];
- NSNumber *pixelFormat = availablePixelFormats.firstObject;
- NSAssert(pixelFormat, @"Output device has no supported formats.");
-
- _preferredOutputPixelFormat = [pixelFormat unsignedIntValue];
- _outputPixelFormat = _preferredOutputPixelFormat;
- videoDataOutput.videoSettings = @{(NSString *)kCVPixelBufferPixelFormatTypeKey : pixelFormat};
- videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
- [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
- _videoDataOutput = videoDataOutput;
-}
-
-- (void)updateVideoDataOutputPixelFormat:(AVCaptureDeviceFormat *)format {
- FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
- if (![[RTCCVPixelBuffer supportedPixelFormats] containsObject:@(mediaSubType)]) {
- mediaSubType = _preferredOutputPixelFormat;
- }
-
- if (mediaSubType != _outputPixelFormat) {
- _outputPixelFormat = mediaSubType;
- _videoDataOutput.videoSettings =
- @{ (NSString *)kCVPixelBufferPixelFormatTypeKey : @(mediaSubType) };
- }
-}
-
-#pragma mark - Private, called inside capture queue
-
-- (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps {
- NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
- @"updateDeviceCaptureFormat must be called on the capture queue.");
- @try {
- _currentDevice.activeFormat = format;
- _currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps);
- } @catch (NSException *exception) {
- RTCLogError(@"Failed to set active format!\n User info:%@", exception.userInfo);
- return;
- }
-}
-
-- (void)reconfigureCaptureSessionInput {
- NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
- @"reconfigureCaptureSessionInput must be called on the capture queue.");
- NSError *error = nil;
- AVCaptureDeviceInput *input =
- [AVCaptureDeviceInput deviceInputWithDevice:_currentDevice error:&error];
- if (!input) {
- RTCLogError(@"Failed to create front camera input: %@", error.localizedDescription);
- return;
- }
- [_captureSession beginConfiguration];
- for (AVCaptureDeviceInput *oldInput in [_captureSession.inputs copy]) {
- [_captureSession removeInput:oldInput];
- }
- if ([_captureSession canAddInput:input]) {
- [_captureSession addInput:input];
- } else {
- RTCLogError(@"Cannot add camera as an input to the session.");
- }
- [_captureSession commitConfiguration];
-}
-
-- (void)updateOrientation {
- NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
- @"updateOrientation must be called on the capture queue.");
-#if TARGET_OS_IPHONE
- _orientation = [UIDevice currentDevice].orientation;
-#endif
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCCertificate.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCCertificate.mm
deleted file mode 100644
index 34fa837..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCCertificate.mm
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Copyright 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCCertificate.h"
-#import "WebRTC/RTCLogging.h"
-
-#include "rtc_base/logging.h"
-#include "rtc_base/rtccertificategenerator.h"
-#include "rtc_base/sslidentity.h"
-
-@implementation RTCCertificate
-
-@synthesize private_key = _private_key;
-@synthesize certificate = _certificate;
-
-- (id)copyWithZone:(NSZone *)zone {
- id copy = [[[self class] alloc] initWithPrivateKey:[self.private_key copyWithZone:zone]
- certificate:[self.certificate copyWithZone:zone]];
- return copy;
-}
-
-- (instancetype)initWithPrivateKey:(NSString *)private_key certificate:(NSString *)certificate {
- if (self = [super init]) {
- _private_key = [private_key copy];
- _certificate = [certificate copy];
- }
- return self;
-}
-
-+ (nullable RTCCertificate *)generateCertificateWithParams:(NSDictionary *)params {
- rtc::KeyType keyType = rtc::KT_ECDSA;
- NSString *keyTypeString = [params valueForKey:@"name"];
- if (keyTypeString && [keyTypeString isEqualToString:@"RSASSA-PKCS1-v1_5"]) {
- keyType = rtc::KT_RSA;
- }
-
- NSNumber *expires = [params valueForKey:@"expires"];
- rtc::scoped_refptr<rtc::RTCCertificate> cc_certificate = nullptr;
- if (expires != nil) {
- uint64_t expirationTimestamp = [expires unsignedLongLongValue];
- cc_certificate = rtc::RTCCertificateGenerator::GenerateCertificate(rtc::KeyParams(keyType),
- expirationTimestamp);
- } else {
- cc_certificate =
- rtc::RTCCertificateGenerator::GenerateCertificate(rtc::KeyParams(keyType), absl::nullopt);
- }
- if (!cc_certificate) {
- RTCLogError(@"Failed to generate certificate.");
- return nullptr;
- }
- // grab PEMs and create an NS RTCCerticicate
- rtc::RTCCertificatePEM pem = cc_certificate->ToPEM();
- std::string pem_private_key = pem.private_key();
- std::string pem_certificate = pem.certificate();
- RTC_LOG(LS_INFO) << "CERT PEM ";
- RTC_LOG(LS_INFO) << pem_certificate;
-
- RTCCertificate *cert = [[RTCCertificate alloc] initWithPrivateKey:@(pem_private_key.c_str())
- certificate:@(pem_certificate.c_str())];
- return cert;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration+Native.h b/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration+Native.h
index 64653e4..529aa8d 100644
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration+Native.h
+++ b/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration+Native.h
@@ -8,21 +8,4 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "WebRTC/RTCConfiguration.h"
-
-#include "api/peerconnectioninterface.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-@interface RTCConfiguration ()
-
-/** Optional TurnCustomizer.
- * With this class one can modify outgoing TURN messages.
- * The object passed in must remain valid until PeerConnection::Close() is
- * called.
- */
-@property(nonatomic, nullable) webrtc::TurnCustomizer* turnCustomizer;
-
-@end
-
-NS_ASSUME_NONNULL_END
+#import "api/peerconnection/RTCConfiguration+Native.h"
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration+Private.h
deleted file mode 100644
index ad57336..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration+Private.h
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCConfiguration.h"
-
-#include "api/peerconnectioninterface.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-@interface RTCConfiguration ()
-
-+ (webrtc::PeerConnectionInterface::IceTransportsType)nativeTransportsTypeForTransportPolicy:
- (RTCIceTransportPolicy)policy;
-
-+ (RTCIceTransportPolicy)transportPolicyForTransportsType:
- (webrtc::PeerConnectionInterface::IceTransportsType)nativeType;
-
-+ (NSString *)stringForTransportPolicy:(RTCIceTransportPolicy)policy;
-
-+ (webrtc::PeerConnectionInterface::BundlePolicy)nativeBundlePolicyForPolicy:
- (RTCBundlePolicy)policy;
-
-+ (RTCBundlePolicy)bundlePolicyForNativePolicy:
- (webrtc::PeerConnectionInterface::BundlePolicy)nativePolicy;
-
-+ (NSString *)stringForBundlePolicy:(RTCBundlePolicy)policy;
-
-+ (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativeRtcpMuxPolicyForPolicy:
- (RTCRtcpMuxPolicy)policy;
-
-+ (RTCRtcpMuxPolicy)rtcpMuxPolicyForNativePolicy:
- (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativePolicy;
-
-+ (NSString *)stringForRtcpMuxPolicy:(RTCRtcpMuxPolicy)policy;
-
-+ (webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativeTcpCandidatePolicyForPolicy:
- (RTCTcpCandidatePolicy)policy;
-
-+ (RTCTcpCandidatePolicy)tcpCandidatePolicyForNativePolicy:
- (webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativePolicy;
-
-+ (NSString *)stringForTcpCandidatePolicy:(RTCTcpCandidatePolicy)policy;
-
-+ (webrtc::PeerConnectionInterface::CandidateNetworkPolicy)nativeCandidateNetworkPolicyForPolicy:
- (RTCCandidateNetworkPolicy)policy;
-
-+ (RTCCandidateNetworkPolicy)candidateNetworkPolicyForNativePolicy:
- (webrtc::PeerConnectionInterface::CandidateNetworkPolicy)nativePolicy;
-
-+ (NSString *)stringForCandidateNetworkPolicy:(RTCCandidateNetworkPolicy)policy;
-
-+ (rtc::KeyType)nativeEncryptionKeyTypeForKeyType:(RTCEncryptionKeyType)keyType;
-
-+ (webrtc::SdpSemantics)nativeSdpSemanticsForSdpSemantics:(RTCSdpSemantics)sdpSemantics;
-
-+ (RTCSdpSemantics)sdpSemanticsForNativeSdpSemantics:(webrtc::SdpSemantics)sdpSemantics;
-
-+ (NSString *)stringForSdpSemantics:(RTCSdpSemantics)sdpSemantics;
-
-/**
- * RTCConfiguration struct representation of this RTCConfiguration. This is
- * needed to pass to the underlying C++ APIs.
- */
-- (nullable webrtc::PeerConnectionInterface::RTCConfiguration *)createNativeConfiguration;
-
-- (instancetype)initWithNativeConfiguration:
- (const webrtc::PeerConnectionInterface::RTCConfiguration &)config NS_DESIGNATED_INITIALIZER;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration.mm
deleted file mode 100644
index 1748377..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration.mm
+++ /dev/null
@@ -1,460 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCConfiguration+Private.h"
-
-#include <memory>
-
-#import "RTCConfiguration+Native.h"
-#import "RTCIceServer+Private.h"
-#import "RTCIntervalRange+Private.h"
-#import "WebRTC/RTCLogging.h"
-
-#include "rtc_base/rtccertificategenerator.h"
-#include "rtc_base/sslidentity.h"
-
-@implementation RTCConfiguration
-
-@synthesize iceServers = _iceServers;
-@synthesize certificate = _certificate;
-@synthesize iceTransportPolicy = _iceTransportPolicy;
-@synthesize bundlePolicy = _bundlePolicy;
-@synthesize rtcpMuxPolicy = _rtcpMuxPolicy;
-@synthesize tcpCandidatePolicy = _tcpCandidatePolicy;
-@synthesize candidateNetworkPolicy = _candidateNetworkPolicy;
-@synthesize continualGatheringPolicy = _continualGatheringPolicy;
-@synthesize maxIPv6Networks = _maxIPv6Networks;
-@synthesize disableLinkLocalNetworks = _disableLinkLocalNetworks;
-@synthesize audioJitterBufferMaxPackets = _audioJitterBufferMaxPackets;
-@synthesize audioJitterBufferFastAccelerate = _audioJitterBufferFastAccelerate;
-@synthesize iceConnectionReceivingTimeout = _iceConnectionReceivingTimeout;
-@synthesize iceBackupCandidatePairPingInterval =
- _iceBackupCandidatePairPingInterval;
-@synthesize keyType = _keyType;
-@synthesize iceCandidatePoolSize = _iceCandidatePoolSize;
-@synthesize shouldPruneTurnPorts = _shouldPruneTurnPorts;
-@synthesize shouldPresumeWritableWhenFullyRelayed =
- _shouldPresumeWritableWhenFullyRelayed;
-@synthesize iceCheckMinInterval = _iceCheckMinInterval;
-@synthesize iceRegatherIntervalRange = _iceRegatherIntervalRange;
-@synthesize sdpSemantics = _sdpSemantics;
-@synthesize turnCustomizer = _turnCustomizer;
-@synthesize activeResetSrtpParams = _activeResetSrtpParams;
-
-- (instancetype)init {
- // Copy defaults.
- webrtc::PeerConnectionInterface::RTCConfiguration config(
- webrtc::PeerConnectionInterface::RTCConfigurationType::kAggressive);
- return [self initWithNativeConfiguration:config];
-}
-
-- (instancetype)initWithNativeConfiguration:
- (const webrtc::PeerConnectionInterface::RTCConfiguration &)config {
- if (self = [super init]) {
- NSMutableArray *iceServers = [NSMutableArray array];
- for (const webrtc::PeerConnectionInterface::IceServer& server : config.servers) {
- RTCIceServer *iceServer = [[RTCIceServer alloc] initWithNativeServer:server];
- [iceServers addObject:iceServer];
- }
- _iceServers = iceServers;
- if (!config.certificates.empty()) {
- rtc::scoped_refptr<rtc::RTCCertificate> native_cert;
- native_cert = config.certificates[0];
- rtc::RTCCertificatePEM native_pem = native_cert->ToPEM();
- _certificate =
- [[RTCCertificate alloc] initWithPrivateKey:@(native_pem.private_key().c_str())
- certificate:@(native_pem.certificate().c_str())];
- }
- _iceTransportPolicy =
- [[self class] transportPolicyForTransportsType:config.type];
- _bundlePolicy =
- [[self class] bundlePolicyForNativePolicy:config.bundle_policy];
- _rtcpMuxPolicy =
- [[self class] rtcpMuxPolicyForNativePolicy:config.rtcp_mux_policy];
- _tcpCandidatePolicy = [[self class] tcpCandidatePolicyForNativePolicy:
- config.tcp_candidate_policy];
- _candidateNetworkPolicy = [[self class]
- candidateNetworkPolicyForNativePolicy:config.candidate_network_policy];
- webrtc::PeerConnectionInterface::ContinualGatheringPolicy nativePolicy =
- config.continual_gathering_policy;
- _continualGatheringPolicy =
- [[self class] continualGatheringPolicyForNativePolicy:nativePolicy];
- _maxIPv6Networks = config.max_ipv6_networks;
- _disableLinkLocalNetworks = config.disable_link_local_networks;
- _audioJitterBufferMaxPackets = config.audio_jitter_buffer_max_packets;
- _audioJitterBufferFastAccelerate = config.audio_jitter_buffer_fast_accelerate;
- _iceConnectionReceivingTimeout = config.ice_connection_receiving_timeout;
- _iceBackupCandidatePairPingInterval =
- config.ice_backup_candidate_pair_ping_interval;
- _keyType = RTCEncryptionKeyTypeECDSA;
- _iceCandidatePoolSize = config.ice_candidate_pool_size;
- _shouldPruneTurnPorts = config.prune_turn_ports;
- _shouldPresumeWritableWhenFullyRelayed =
- config.presume_writable_when_fully_relayed;
- if (config.ice_check_min_interval) {
- _iceCheckMinInterval =
- [NSNumber numberWithInt:*config.ice_check_min_interval];
- }
- if (config.ice_regather_interval_range) {
- const rtc::IntervalRange &nativeIntervalRange = config.ice_regather_interval_range.value();
- _iceRegatherIntervalRange =
- [[RTCIntervalRange alloc] initWithNativeIntervalRange:nativeIntervalRange];
- }
- _sdpSemantics = [[self class] sdpSemanticsForNativeSdpSemantics:config.sdp_semantics];
- _turnCustomizer = config.turn_customizer;
- _activeResetSrtpParams = config.active_reset_srtp_params;
- }
- return self;
-}
-
-- (NSString *)description {
- static NSString *formatString =
- @"RTCConfiguration: "
- @"{\n%@\n%@\n%@\n%@\n%@\n%@\n%@\n%@\n%d\n%d\n%d\n%d\n%d\n%d\n%d\n%@\n%@\n%d\n%d\n%d\n}\n";
-
- return [NSString
- stringWithFormat:formatString,
- _iceServers,
- [[self class] stringForTransportPolicy:_iceTransportPolicy],
- [[self class] stringForBundlePolicy:_bundlePolicy],
- [[self class] stringForRtcpMuxPolicy:_rtcpMuxPolicy],
- [[self class] stringForTcpCandidatePolicy:_tcpCandidatePolicy],
- [[self class] stringForCandidateNetworkPolicy:_candidateNetworkPolicy],
- [[self class] stringForContinualGatheringPolicy:_continualGatheringPolicy],
- [[self class] stringForSdpSemantics:_sdpSemantics],
- _audioJitterBufferMaxPackets,
- _audioJitterBufferFastAccelerate,
- _iceConnectionReceivingTimeout,
- _iceBackupCandidatePairPingInterval,
- _iceCandidatePoolSize,
- _shouldPruneTurnPorts,
- _shouldPresumeWritableWhenFullyRelayed,
- _iceCheckMinInterval,
- _iceRegatherIntervalRange,
- _disableLinkLocalNetworks,
- _maxIPv6Networks,
- _activeResetSrtpParams];
-}
-
-#pragma mark - Private
-
-- (webrtc::PeerConnectionInterface::RTCConfiguration *)
- createNativeConfiguration {
- std::unique_ptr<webrtc::PeerConnectionInterface::RTCConfiguration>
- nativeConfig(new webrtc::PeerConnectionInterface::RTCConfiguration(
- webrtc::PeerConnectionInterface::RTCConfigurationType::kAggressive));
-
- for (RTCIceServer *iceServer in _iceServers) {
- nativeConfig->servers.push_back(iceServer.nativeServer);
- }
- nativeConfig->type =
- [[self class] nativeTransportsTypeForTransportPolicy:_iceTransportPolicy];
- nativeConfig->bundle_policy =
- [[self class] nativeBundlePolicyForPolicy:_bundlePolicy];
- nativeConfig->rtcp_mux_policy =
- [[self class] nativeRtcpMuxPolicyForPolicy:_rtcpMuxPolicy];
- nativeConfig->tcp_candidate_policy =
- [[self class] nativeTcpCandidatePolicyForPolicy:_tcpCandidatePolicy];
- nativeConfig->candidate_network_policy = [[self class]
- nativeCandidateNetworkPolicyForPolicy:_candidateNetworkPolicy];
- nativeConfig->continual_gathering_policy = [[self class]
- nativeContinualGatheringPolicyForPolicy:_continualGatheringPolicy];
- nativeConfig->max_ipv6_networks = _maxIPv6Networks;
- nativeConfig->disable_link_local_networks = _disableLinkLocalNetworks;
- nativeConfig->audio_jitter_buffer_max_packets = _audioJitterBufferMaxPackets;
- nativeConfig->audio_jitter_buffer_fast_accelerate =
- _audioJitterBufferFastAccelerate ? true : false;
- nativeConfig->ice_connection_receiving_timeout =
- _iceConnectionReceivingTimeout;
- nativeConfig->ice_backup_candidate_pair_ping_interval =
- _iceBackupCandidatePairPingInterval;
- rtc::KeyType keyType =
- [[self class] nativeEncryptionKeyTypeForKeyType:_keyType];
- if (_certificate != nullptr) {
- // if offered a pemcert use it...
- RTC_LOG(LS_INFO) << "Have configured cert - using it.";
- std::string pem_private_key = [[_certificate private_key] UTF8String];
- std::string pem_certificate = [[_certificate certificate] UTF8String];
- rtc::RTCCertificatePEM pem = rtc::RTCCertificatePEM(pem_private_key, pem_certificate);
- rtc::scoped_refptr<rtc::RTCCertificate> certificate = rtc::RTCCertificate::FromPEM(pem);
- RTC_LOG(LS_INFO) << "Created cert from PEM strings.";
- if (!certificate) {
- RTC_LOG(LS_ERROR) << "Failed to generate certificate from PEM.";
- return nullptr;
- }
- nativeConfig->certificates.push_back(certificate);
- } else {
- RTC_LOG(LS_INFO) << "Don't have configured cert.";
- // Generate non-default certificate.
- if (keyType != rtc::KT_DEFAULT) {
- rtc::scoped_refptr<rtc::RTCCertificate> certificate =
- rtc::RTCCertificateGenerator::GenerateCertificate(rtc::KeyParams(keyType),
- absl::optional<uint64_t>());
- if (!certificate) {
- RTCLogError(@"Failed to generate certificate.");
- return nullptr;
- }
- nativeConfig->certificates.push_back(certificate);
- }
- }
- nativeConfig->ice_candidate_pool_size = _iceCandidatePoolSize;
- nativeConfig->prune_turn_ports = _shouldPruneTurnPorts ? true : false;
- nativeConfig->presume_writable_when_fully_relayed =
- _shouldPresumeWritableWhenFullyRelayed ? true : false;
- if (_iceCheckMinInterval != nil) {
- nativeConfig->ice_check_min_interval = absl::optional<int>(_iceCheckMinInterval.intValue);
- }
- if (_iceRegatherIntervalRange != nil) {
- std::unique_ptr<rtc::IntervalRange> nativeIntervalRange(
- _iceRegatherIntervalRange.nativeIntervalRange);
- nativeConfig->ice_regather_interval_range =
- absl::optional<rtc::IntervalRange>(*nativeIntervalRange);
- }
- nativeConfig->sdp_semantics = [[self class] nativeSdpSemanticsForSdpSemantics:_sdpSemantics];
- if (_turnCustomizer) {
- nativeConfig->turn_customizer = _turnCustomizer;
- }
- nativeConfig->active_reset_srtp_params = _activeResetSrtpParams ? true : false;
- return nativeConfig.release();
-}
-
-+ (webrtc::PeerConnectionInterface::IceTransportsType)
- nativeTransportsTypeForTransportPolicy:(RTCIceTransportPolicy)policy {
- switch (policy) {
- case RTCIceTransportPolicyNone:
- return webrtc::PeerConnectionInterface::kNone;
- case RTCIceTransportPolicyRelay:
- return webrtc::PeerConnectionInterface::kRelay;
- case RTCIceTransportPolicyNoHost:
- return webrtc::PeerConnectionInterface::kNoHost;
- case RTCIceTransportPolicyAll:
- return webrtc::PeerConnectionInterface::kAll;
- }
-}
-
-+ (RTCIceTransportPolicy)transportPolicyForTransportsType:
- (webrtc::PeerConnectionInterface::IceTransportsType)nativeType {
- switch (nativeType) {
- case webrtc::PeerConnectionInterface::kNone:
- return RTCIceTransportPolicyNone;
- case webrtc::PeerConnectionInterface::kRelay:
- return RTCIceTransportPolicyRelay;
- case webrtc::PeerConnectionInterface::kNoHost:
- return RTCIceTransportPolicyNoHost;
- case webrtc::PeerConnectionInterface::kAll:
- return RTCIceTransportPolicyAll;
- }
-}
-
-+ (NSString *)stringForTransportPolicy:(RTCIceTransportPolicy)policy {
- switch (policy) {
- case RTCIceTransportPolicyNone:
- return @"NONE";
- case RTCIceTransportPolicyRelay:
- return @"RELAY";
- case RTCIceTransportPolicyNoHost:
- return @"NO_HOST";
- case RTCIceTransportPolicyAll:
- return @"ALL";
- }
-}
-
-+ (webrtc::PeerConnectionInterface::BundlePolicy)nativeBundlePolicyForPolicy:
- (RTCBundlePolicy)policy {
- switch (policy) {
- case RTCBundlePolicyBalanced:
- return webrtc::PeerConnectionInterface::kBundlePolicyBalanced;
- case RTCBundlePolicyMaxCompat:
- return webrtc::PeerConnectionInterface::kBundlePolicyMaxCompat;
- case RTCBundlePolicyMaxBundle:
- return webrtc::PeerConnectionInterface::kBundlePolicyMaxBundle;
- }
-}
-
-+ (RTCBundlePolicy)bundlePolicyForNativePolicy:
- (webrtc::PeerConnectionInterface::BundlePolicy)nativePolicy {
- switch (nativePolicy) {
- case webrtc::PeerConnectionInterface::kBundlePolicyBalanced:
- return RTCBundlePolicyBalanced;
- case webrtc::PeerConnectionInterface::kBundlePolicyMaxCompat:
- return RTCBundlePolicyMaxCompat;
- case webrtc::PeerConnectionInterface::kBundlePolicyMaxBundle:
- return RTCBundlePolicyMaxBundle;
- }
-}
-
-+ (NSString *)stringForBundlePolicy:(RTCBundlePolicy)policy {
- switch (policy) {
- case RTCBundlePolicyBalanced:
- return @"BALANCED";
- case RTCBundlePolicyMaxCompat:
- return @"MAX_COMPAT";
- case RTCBundlePolicyMaxBundle:
- return @"MAX_BUNDLE";
- }
-}
-
-+ (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativeRtcpMuxPolicyForPolicy:
- (RTCRtcpMuxPolicy)policy {
- switch (policy) {
- case RTCRtcpMuxPolicyNegotiate:
- return webrtc::PeerConnectionInterface::kRtcpMuxPolicyNegotiate;
- case RTCRtcpMuxPolicyRequire:
- return webrtc::PeerConnectionInterface::kRtcpMuxPolicyRequire;
- }
-}
-
-+ (RTCRtcpMuxPolicy)rtcpMuxPolicyForNativePolicy:
- (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativePolicy {
- switch (nativePolicy) {
- case webrtc::PeerConnectionInterface::kRtcpMuxPolicyNegotiate:
- return RTCRtcpMuxPolicyNegotiate;
- case webrtc::PeerConnectionInterface::kRtcpMuxPolicyRequire:
- return RTCRtcpMuxPolicyRequire;
- }
-}
-
-+ (NSString *)stringForRtcpMuxPolicy:(RTCRtcpMuxPolicy)policy {
- switch (policy) {
- case RTCRtcpMuxPolicyNegotiate:
- return @"NEGOTIATE";
- case RTCRtcpMuxPolicyRequire:
- return @"REQUIRE";
- }
-}
-
-+ (webrtc::PeerConnectionInterface::TcpCandidatePolicy)
- nativeTcpCandidatePolicyForPolicy:(RTCTcpCandidatePolicy)policy {
- switch (policy) {
- case RTCTcpCandidatePolicyEnabled:
- return webrtc::PeerConnectionInterface::kTcpCandidatePolicyEnabled;
- case RTCTcpCandidatePolicyDisabled:
- return webrtc::PeerConnectionInterface::kTcpCandidatePolicyDisabled;
- }
-}
-
-+ (webrtc::PeerConnectionInterface::CandidateNetworkPolicy)
- nativeCandidateNetworkPolicyForPolicy:(RTCCandidateNetworkPolicy)policy {
- switch (policy) {
- case RTCCandidateNetworkPolicyAll:
- return webrtc::PeerConnectionInterface::kCandidateNetworkPolicyAll;
- case RTCCandidateNetworkPolicyLowCost:
- return webrtc::PeerConnectionInterface::kCandidateNetworkPolicyLowCost;
- }
-}
-
-+ (RTCTcpCandidatePolicy)tcpCandidatePolicyForNativePolicy:
- (webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativePolicy {
- switch (nativePolicy) {
- case webrtc::PeerConnectionInterface::kTcpCandidatePolicyEnabled:
- return RTCTcpCandidatePolicyEnabled;
- case webrtc::PeerConnectionInterface::kTcpCandidatePolicyDisabled:
- return RTCTcpCandidatePolicyDisabled;
- }
-}
-
-+ (NSString *)stringForTcpCandidatePolicy:(RTCTcpCandidatePolicy)policy {
- switch (policy) {
- case RTCTcpCandidatePolicyEnabled:
- return @"TCP_ENABLED";
- case RTCTcpCandidatePolicyDisabled:
- return @"TCP_DISABLED";
- }
-}
-
-+ (RTCCandidateNetworkPolicy)candidateNetworkPolicyForNativePolicy:
- (webrtc::PeerConnectionInterface::CandidateNetworkPolicy)nativePolicy {
- switch (nativePolicy) {
- case webrtc::PeerConnectionInterface::kCandidateNetworkPolicyAll:
- return RTCCandidateNetworkPolicyAll;
- case webrtc::PeerConnectionInterface::kCandidateNetworkPolicyLowCost:
- return RTCCandidateNetworkPolicyLowCost;
- }
-}
-
-+ (NSString *)stringForCandidateNetworkPolicy:
- (RTCCandidateNetworkPolicy)policy {
- switch (policy) {
- case RTCCandidateNetworkPolicyAll:
- return @"CANDIDATE_ALL_NETWORKS";
- case RTCCandidateNetworkPolicyLowCost:
- return @"CANDIDATE_LOW_COST_NETWORKS";
- }
-}
-
-+ (webrtc::PeerConnectionInterface::ContinualGatheringPolicy)
- nativeContinualGatheringPolicyForPolicy:
- (RTCContinualGatheringPolicy)policy {
- switch (policy) {
- case RTCContinualGatheringPolicyGatherOnce:
- return webrtc::PeerConnectionInterface::GATHER_ONCE;
- case RTCContinualGatheringPolicyGatherContinually:
- return webrtc::PeerConnectionInterface::GATHER_CONTINUALLY;
- }
-}
-
-+ (RTCContinualGatheringPolicy)continualGatheringPolicyForNativePolicy:
- (webrtc::PeerConnectionInterface::ContinualGatheringPolicy)nativePolicy {
- switch (nativePolicy) {
- case webrtc::PeerConnectionInterface::GATHER_ONCE:
- return RTCContinualGatheringPolicyGatherOnce;
- case webrtc::PeerConnectionInterface::GATHER_CONTINUALLY:
- return RTCContinualGatheringPolicyGatherContinually;
- }
-}
-
-+ (NSString *)stringForContinualGatheringPolicy:
- (RTCContinualGatheringPolicy)policy {
- switch (policy) {
- case RTCContinualGatheringPolicyGatherOnce:
- return @"GATHER_ONCE";
- case RTCContinualGatheringPolicyGatherContinually:
- return @"GATHER_CONTINUALLY";
- }
-}
-
-+ (rtc::KeyType)nativeEncryptionKeyTypeForKeyType:
- (RTCEncryptionKeyType)keyType {
- switch (keyType) {
- case RTCEncryptionKeyTypeRSA:
- return rtc::KT_RSA;
- case RTCEncryptionKeyTypeECDSA:
- return rtc::KT_ECDSA;
- }
-}
-
-+ (webrtc::SdpSemantics)nativeSdpSemanticsForSdpSemantics:(RTCSdpSemantics)sdpSemantics {
- switch (sdpSemantics) {
- case RTCSdpSemanticsPlanB:
- return webrtc::SdpSemantics::kPlanB;
- case RTCSdpSemanticsUnifiedPlan:
- return webrtc::SdpSemantics::kUnifiedPlan;
- }
-}
-
-+ (RTCSdpSemantics)sdpSemanticsForNativeSdpSemantics:(webrtc::SdpSemantics)sdpSemantics {
- switch (sdpSemantics) {
- case webrtc::SdpSemantics::kPlanB:
- return RTCSdpSemanticsPlanB;
- case webrtc::SdpSemantics::kUnifiedPlan:
- return RTCSdpSemanticsUnifiedPlan;
- }
-}
-
-+ (NSString *)stringForSdpSemantics:(RTCSdpSemantics)sdpSemantics {
- switch (sdpSemantics) {
- case RTCSdpSemanticsPlanB:
- return @"PLAN_B";
- case RTCSdpSemanticsUnifiedPlan:
- return @"UNIFIED_PLAN";
- }
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCDataChannel+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCDataChannel+Private.h
deleted file mode 100644
index 9849fd9..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCDataChannel+Private.h
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCDataChannel.h"
-
-#include "api/datachannelinterface.h"
-#include "rtc_base/scoped_ref_ptr.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-@class RTCPeerConnectionFactory;
-
-@interface RTCDataBuffer ()
-
-/**
- * The native DataBuffer representation of this RTCDatabuffer object. This is
- * needed to pass to the underlying C++ APIs.
- */
-@property(nonatomic, readonly) const webrtc::DataBuffer *nativeDataBuffer;
-
-/** Initialize an RTCDataBuffer from a native DataBuffer. */
-- (instancetype)initWithNativeBuffer:(const webrtc::DataBuffer &)nativeBuffer;
-
-@end
-
-@interface RTCDataChannel ()
-
-/** Initialize an RTCDataChannel from a native DataChannelInterface. */
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
- nativeDataChannel:(rtc::scoped_refptr<webrtc::DataChannelInterface>)nativeDataChannel
- NS_DESIGNATED_INITIALIZER;
-
-+ (webrtc::DataChannelInterface::DataState)nativeDataChannelStateForState:
- (RTCDataChannelState)state;
-
-+ (RTCDataChannelState)dataChannelStateForNativeState:
- (webrtc::DataChannelInterface::DataState)nativeState;
-
-+ (NSString *)stringForState:(RTCDataChannelState)state;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCDataChannel.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCDataChannel.mm
deleted file mode 100644
index 06ca453..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCDataChannel.mm
+++ /dev/null
@@ -1,223 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCDataChannel+Private.h"
-
-#import "NSString+StdString.h"
-
-#include <memory>
-
-namespace webrtc {
-
-class DataChannelDelegateAdapter : public DataChannelObserver {
- public:
- DataChannelDelegateAdapter(RTCDataChannel *channel) { channel_ = channel; }
-
- void OnStateChange() override {
- [channel_.delegate dataChannelDidChangeState:channel_];
- }
-
- void OnMessage(const DataBuffer& buffer) override {
- RTCDataBuffer *data_buffer =
- [[RTCDataBuffer alloc] initWithNativeBuffer:buffer];
- [channel_.delegate dataChannel:channel_
- didReceiveMessageWithBuffer:data_buffer];
- }
-
- void OnBufferedAmountChange(uint64_t previousAmount) override {
- id<RTCDataChannelDelegate> delegate = channel_.delegate;
- SEL sel = @selector(dataChannel:didChangeBufferedAmount:);
- if ([delegate respondsToSelector:sel]) {
- [delegate dataChannel:channel_ didChangeBufferedAmount:previousAmount];
- }
- }
-
- private:
- __weak RTCDataChannel *channel_;
-};
-}
-
-
-@implementation RTCDataBuffer {
- std::unique_ptr<webrtc::DataBuffer> _dataBuffer;
-}
-
-- (instancetype)initWithData:(NSData *)data isBinary:(BOOL)isBinary {
- NSParameterAssert(data);
- if (self = [super init]) {
- rtc::CopyOnWriteBuffer buffer(
- reinterpret_cast<const uint8_t*>(data.bytes), data.length);
- _dataBuffer.reset(new webrtc::DataBuffer(buffer, isBinary));
- }
- return self;
-}
-
-- (NSData *)data {
- return [NSData dataWithBytes:_dataBuffer->data.data()
- length:_dataBuffer->data.size()];
-}
-
-- (BOOL)isBinary {
- return _dataBuffer->binary;
-}
-
-#pragma mark - Private
-
-- (instancetype)initWithNativeBuffer:(const webrtc::DataBuffer&)nativeBuffer {
- if (self = [super init]) {
- _dataBuffer.reset(new webrtc::DataBuffer(nativeBuffer));
- }
- return self;
-}
-
-- (const webrtc::DataBuffer *)nativeDataBuffer {
- return _dataBuffer.get();
-}
-
-@end
-
-
-@implementation RTCDataChannel {
- RTCPeerConnectionFactory *_factory;
- rtc::scoped_refptr<webrtc::DataChannelInterface> _nativeDataChannel;
- std::unique_ptr<webrtc::DataChannelDelegateAdapter> _observer;
- BOOL _isObserverRegistered;
-}
-
-@synthesize delegate = _delegate;
-
-- (void)dealloc {
- // Handles unregistering the observer properly. We need to do this because
- // there may still be other references to the underlying data channel.
- _nativeDataChannel->UnregisterObserver();
-}
-
-- (NSString *)label {
- return [NSString stringForStdString:_nativeDataChannel->label()];
-}
-
-- (BOOL)isReliable {
- return _nativeDataChannel->reliable();
-}
-
-- (BOOL)isOrdered {
- return _nativeDataChannel->ordered();
-}
-
-- (NSUInteger)maxRetransmitTime {
- return self.maxPacketLifeTime;
-}
-
-- (uint16_t)maxPacketLifeTime {
- return _nativeDataChannel->maxRetransmitTime();
-}
-
-- (uint16_t)maxRetransmits {
- return _nativeDataChannel->maxRetransmits();
-}
-
-- (NSString *)protocol {
- return [NSString stringForStdString:_nativeDataChannel->protocol()];
-}
-
-- (BOOL)isNegotiated {
- return _nativeDataChannel->negotiated();
-}
-
-- (NSInteger)streamId {
- return self.channelId;
-}
-
-- (int)channelId {
- return _nativeDataChannel->id();
-}
-
-- (RTCDataChannelState)readyState {
- return [[self class] dataChannelStateForNativeState:
- _nativeDataChannel->state()];
-}
-
-- (uint64_t)bufferedAmount {
- return _nativeDataChannel->buffered_amount();
-}
-
-- (void)close {
- _nativeDataChannel->Close();
-}
-
-- (BOOL)sendData:(RTCDataBuffer *)data {
- return _nativeDataChannel->Send(*data.nativeDataBuffer);
-}
-
-- (NSString *)description {
- return [NSString stringWithFormat:@"RTCDataChannel:\n%ld\n%@\n%@",
- (long)self.channelId,
- self.label,
- [[self class]
- stringForState:self.readyState]];
-}
-
-#pragma mark - Private
-
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
- nativeDataChannel:
- (rtc::scoped_refptr<webrtc::DataChannelInterface>)nativeDataChannel {
- NSParameterAssert(nativeDataChannel);
- if (self = [super init]) {
- _factory = factory;
- _nativeDataChannel = nativeDataChannel;
- _observer.reset(new webrtc::DataChannelDelegateAdapter(self));
- _nativeDataChannel->RegisterObserver(_observer.get());
- }
- return self;
-}
-
-+ (webrtc::DataChannelInterface::DataState)
- nativeDataChannelStateForState:(RTCDataChannelState)state {
- switch (state) {
- case RTCDataChannelStateConnecting:
- return webrtc::DataChannelInterface::DataState::kConnecting;
- case RTCDataChannelStateOpen:
- return webrtc::DataChannelInterface::DataState::kOpen;
- case RTCDataChannelStateClosing:
- return webrtc::DataChannelInterface::DataState::kClosing;
- case RTCDataChannelStateClosed:
- return webrtc::DataChannelInterface::DataState::kClosed;
- }
-}
-
-+ (RTCDataChannelState)dataChannelStateForNativeState:
- (webrtc::DataChannelInterface::DataState)nativeState {
- switch (nativeState) {
- case webrtc::DataChannelInterface::DataState::kConnecting:
- return RTCDataChannelStateConnecting;
- case webrtc::DataChannelInterface::DataState::kOpen:
- return RTCDataChannelStateOpen;
- case webrtc::DataChannelInterface::DataState::kClosing:
- return RTCDataChannelStateClosing;
- case webrtc::DataChannelInterface::DataState::kClosed:
- return RTCDataChannelStateClosed;
- }
-}
-
-+ (NSString *)stringForState:(RTCDataChannelState)state {
- switch (state) {
- case RTCDataChannelStateConnecting:
- return @"Connecting";
- case RTCDataChannelStateOpen:
- return @"Open";
- case RTCDataChannelStateClosing:
- return @"Closing";
- case RTCDataChannelStateClosed:
- return @"Closed";
- }
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCDataChannelConfiguration+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCDataChannelConfiguration+Private.h
deleted file mode 100644
index 68d2022..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCDataChannelConfiguration+Private.h
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCDataChannelConfiguration.h"
-
-#include "api/datachannelinterface.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-@interface RTCDataChannelConfiguration ()
-
-@property(nonatomic, readonly) webrtc::DataChannelInit nativeDataChannelInit;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCDataChannelConfiguration.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCDataChannelConfiguration.mm
deleted file mode 100644
index 89c56de..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCDataChannelConfiguration.mm
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCDataChannelConfiguration+Private.h"
-
-#import "NSString+StdString.h"
-
-@implementation RTCDataChannelConfiguration
-
-@synthesize nativeDataChannelInit = _nativeDataChannelInit;
-
-- (BOOL)isOrdered {
- return _nativeDataChannelInit.ordered;
-}
-
-- (void)setIsOrdered:(BOOL)isOrdered {
- _nativeDataChannelInit.ordered = isOrdered;
-}
-
-- (NSInteger)maxRetransmitTimeMs {
- return self.maxPacketLifeTime;
-}
-
-- (void)setMaxRetransmitTimeMs:(NSInteger)maxRetransmitTimeMs {
- self.maxPacketLifeTime = maxRetransmitTimeMs;
-}
-
-- (int)maxPacketLifeTime {
- return _nativeDataChannelInit.maxRetransmitTime;
-}
-
-- (void)setMaxPacketLifeTime:(int)maxPacketLifeTime {
- _nativeDataChannelInit.maxRetransmitTime = maxPacketLifeTime;
-}
-
-- (int)maxRetransmits {
- return _nativeDataChannelInit.maxRetransmits;
-}
-
-- (void)setMaxRetransmits:(int)maxRetransmits {
- _nativeDataChannelInit.maxRetransmits = maxRetransmits;
-}
-
-- (NSString *)protocol {
- return [NSString stringForStdString:_nativeDataChannelInit.protocol];
-}
-
-- (void)setProtocol:(NSString *)protocol {
- _nativeDataChannelInit.protocol = [NSString stdStringForString:protocol];
-}
-
-- (BOOL)isNegotiated {
- return _nativeDataChannelInit.negotiated;
-}
-
-- (void)setIsNegotiated:(BOOL)isNegotiated {
- _nativeDataChannelInit.negotiated = isNegotiated;
-}
-
-- (int)streamId {
- return self.channelId;
-}
-
-- (void)setStreamId:(int)streamId {
- self.channelId = streamId;
-}
-
-- (int)channelId {
- return _nativeDataChannelInit.id;
-}
-
-- (void)setChannelId:(int)channelId {
- _nativeDataChannelInit.id = channelId;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCDefaultVideoDecoderFactory.m b/sdk/objc/Framework/Classes/PeerConnection/RTCDefaultVideoDecoderFactory.m
deleted file mode 100644
index 5f292c9..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCDefaultVideoDecoderFactory.m
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Copyright 2017 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCVideoCodecFactory.h"
-
-#import "WebRTC/RTCVideoCodecH264.h"
-#import "WebRTC/RTCVideoDecoderVP8.h"
-#if !defined(RTC_DISABLE_VP9)
-#import "WebRTC/RTCVideoDecoderVP9.h"
-#endif
-
-@implementation RTCDefaultVideoDecoderFactory
-
-- (id<RTCVideoDecoder>)createDecoder:(RTCVideoCodecInfo *)info {
- if ([info.name isEqualToString:kRTCVideoCodecH264Name]) {
- return [[RTCVideoDecoderH264 alloc] init];
- } else if ([info.name isEqualToString:kRTCVideoCodecVp8Name]) {
- return [RTCVideoDecoderVP8 vp8Decoder];
-#if !defined(RTC_DISABLE_VP9)
- } else if ([info.name isEqualToString:kRTCVideoCodecVp9Name]) {
- return [RTCVideoDecoderVP9 vp9Decoder];
-#endif
- }
-
- return nil;
-}
-
-- (NSArray<RTCVideoCodecInfo *> *)supportedCodecs {
- return @[
- [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecH264Name],
- [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecVp8Name],
-#if !defined(RTC_DISABLE_VP9)
- [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecVp9Name],
-#endif
- ];
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCDefaultVideoEncoderFactory.m b/sdk/objc/Framework/Classes/PeerConnection/RTCDefaultVideoEncoderFactory.m
deleted file mode 100644
index a4b8d71..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCDefaultVideoEncoderFactory.m
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Copyright 2017 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCVideoCodecFactory.h"
-
-#import "WebRTC/RTCVideoCodec.h"
-#import "WebRTC/RTCVideoCodecH264.h"
-#import "WebRTC/RTCVideoEncoderVP8.h"
-#if !defined(RTC_DISABLE_VP9)
-#import "WebRTC/RTCVideoEncoderVP9.h"
-#endif
-
-@implementation RTCDefaultVideoEncoderFactory
-
-@synthesize preferredCodec;
-
-+ (NSArray<RTCVideoCodecInfo *> *)supportedCodecs {
- NSDictionary<NSString *, NSString *> *constrainedHighParams = @{
- @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedHigh,
- @"level-asymmetry-allowed" : @"1",
- @"packetization-mode" : @"1",
- };
- RTCVideoCodecInfo *constrainedHighInfo =
- [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecH264Name
- parameters:constrainedHighParams];
-
- NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
- @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedBaseline,
- @"level-asymmetry-allowed" : @"1",
- @"packetization-mode" : @"1",
- };
- RTCVideoCodecInfo *constrainedBaselineInfo =
- [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecH264Name
- parameters:constrainedBaselineParams];
-
- RTCVideoCodecInfo *vp8Info = [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecVp8Name];
-
-#if !defined(RTC_DISABLE_VP9)
- RTCVideoCodecInfo *vp9Info = [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecVp9Name];
-#endif
-
- return @[
- constrainedHighInfo,
- constrainedBaselineInfo,
- vp8Info,
-#if !defined(RTC_DISABLE_VP9)
- vp9Info,
-#endif
- ];
-}
-
-- (id<RTCVideoEncoder>)createEncoder:(RTCVideoCodecInfo *)info {
- if ([info.name isEqualToString:kRTCVideoCodecH264Name]) {
- return [[RTCVideoEncoderH264 alloc] initWithCodecInfo:info];
- } else if ([info.name isEqualToString:kRTCVideoCodecVp8Name]) {
- return [RTCVideoEncoderVP8 vp8Encoder];
-#if !defined(RTC_DISABLE_VP9)
- } else if ([info.name isEqualToString:kRTCVideoCodecVp9Name]) {
- return [RTCVideoEncoderVP9 vp9Encoder];
-#endif
- }
-
- return nil;
-}
-
-- (NSArray<RTCVideoCodecInfo *> *)supportedCodecs {
- NSMutableArray<RTCVideoCodecInfo *> *codecs = [[[self class] supportedCodecs] mutableCopy];
-
- NSMutableArray<RTCVideoCodecInfo *> *orderedCodecs = [NSMutableArray array];
- NSUInteger index = [codecs indexOfObject:self.preferredCodec];
- if (index != NSNotFound) {
- [orderedCodecs addObject:[codecs objectAtIndex:index]];
- [codecs removeObjectAtIndex:index];
- }
- [orderedCodecs addObjectsFromArray:codecs];
-
- return [orderedCodecs copy];
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCDtmfSender+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCDtmfSender+Private.h
deleted file mode 100644
index 197aeb8..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCDtmfSender+Private.h
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCDtmfSender.h"
-
-#include "api/dtmfsenderinterface.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-@interface RTCDtmfSender : NSObject <RTCDtmfSender>
-
-@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::DtmfSenderInterface> nativeDtmfSender;
-
-- (instancetype)init NS_UNAVAILABLE;
-
-/** Initialize an RTCDtmfSender with a native DtmfSenderInterface. */
-- (instancetype)initWithNativeDtmfSender:
- (rtc::scoped_refptr<webrtc::DtmfSenderInterface>)nativeDtmfSender NS_DESIGNATED_INITIALIZER;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCDtmfSender.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCDtmfSender.mm
deleted file mode 100644
index 7a5c97f..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCDtmfSender.mm
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCDtmfSender+Private.h"
-
-#import "NSString+StdString.h"
-#import "WebRTC/RTCLogging.h"
-
-#include "rtc_base/timeutils.h"
-
-@implementation RTCDtmfSender {
- rtc::scoped_refptr<webrtc::DtmfSenderInterface> _nativeDtmfSender;
-}
-
-- (BOOL)canInsertDtmf {
- return _nativeDtmfSender->CanInsertDtmf();
-}
-
-- (BOOL)insertDtmf:(nonnull NSString *)tones
- duration:(NSTimeInterval)duration
- interToneGap:(NSTimeInterval)interToneGap {
- RTC_DCHECK(tones != nil);
-
- int durationMs = static_cast<int>(duration * rtc::kNumMillisecsPerSec);
- int interToneGapMs = static_cast<int>(interToneGap * rtc::kNumMillisecsPerSec);
- return _nativeDtmfSender->InsertDtmf(
- [NSString stdStringForString:tones], durationMs, interToneGapMs);
-}
-
-- (nonnull NSString *)remainingTones {
- return [NSString stringForStdString:_nativeDtmfSender->tones()];
-}
-
-- (NSTimeInterval)duration {
- return static_cast<NSTimeInterval>(_nativeDtmfSender->duration()) / rtc::kNumMillisecsPerSec;
-}
-
-- (NSTimeInterval)interToneGap {
- return static_cast<NSTimeInterval>(_nativeDtmfSender->inter_tone_gap()) /
- rtc::kNumMillisecsPerSec;
-}
-
-- (NSString *)description {
- return [NSString
- stringWithFormat:
- @"RTCDtmfSender {\n remainingTones: %@\n duration: %f sec\n interToneGap: %f sec\n}",
- [self remainingTones],
- [self duration],
- [self interToneGap]];
-}
-
-#pragma mark - Private
-
-- (rtc::scoped_refptr<webrtc::DtmfSenderInterface>)nativeDtmfSender {
- return _nativeDtmfSender;
-}
-
-- (instancetype)initWithNativeDtmfSender:
- (rtc::scoped_refptr<webrtc::DtmfSenderInterface>)nativeDtmfSender {
- NSParameterAssert(nativeDtmfSender);
- if (self = [super init]) {
- _nativeDtmfSender = nativeDtmfSender;
- RTCLogInfo(@"RTCDtmfSender(%p): created DTMF sender: %@", self, self.description);
- }
- return self;
-}
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCEncodedImage.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCEncodedImage.mm
deleted file mode 100644
index e9c0a8c..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCEncodedImage.mm
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCVideoCodec.h"
-
-#import "RTCVideoCodec+Private.h"
-
-#include "rtc_base/numerics/safe_conversions.h"
-
-@implementation RTCEncodedImage
-
-@synthesize buffer = _buffer;
-@synthesize encodedWidth = _encodedWidth;
-@synthesize encodedHeight = _encodedHeight;
-@synthesize timeStamp = _timeStamp;
-@synthesize captureTimeMs = _captureTimeMs;
-@synthesize ntpTimeMs = _ntpTimeMs;
-@synthesize flags = _flags;
-@synthesize encodeStartMs = _encodeStartMs;
-@synthesize encodeFinishMs = _encodeFinishMs;
-@synthesize frameType = _frameType;
-@synthesize rotation = _rotation;
-@synthesize completeFrame = _completeFrame;
-@synthesize qp = _qp;
-@synthesize contentType = _contentType;
-
-- (instancetype)initWithNativeEncodedImage:(webrtc::EncodedImage)encodedImage {
- if (self = [super init]) {
- // Wrap the buffer in NSData without copying, do not take ownership.
- _buffer = [NSData dataWithBytesNoCopy:encodedImage._buffer
- length:encodedImage._length
- freeWhenDone:NO];
- _encodedWidth = rtc::dchecked_cast<int32_t>(encodedImage._encodedWidth);
- _encodedHeight = rtc::dchecked_cast<int32_t>(encodedImage._encodedHeight);
- _timeStamp = encodedImage.Timestamp();
- _captureTimeMs = encodedImage.capture_time_ms_;
- _ntpTimeMs = encodedImage.ntp_time_ms_;
- _flags = encodedImage.timing_.flags;
- _encodeStartMs = encodedImage.timing_.encode_start_ms;
- _encodeFinishMs = encodedImage.timing_.encode_finish_ms;
- _frameType = static_cast<RTCFrameType>(encodedImage._frameType);
- _rotation = static_cast<RTCVideoRotation>(encodedImage.rotation_);
- _completeFrame = encodedImage._completeFrame;
- _qp = @(encodedImage.qp_);
- _contentType = (encodedImage.content_type_ == webrtc::VideoContentType::SCREENSHARE) ?
- RTCVideoContentTypeScreenshare :
- RTCVideoContentTypeUnspecified;
- }
-
- return self;
-}
-
-- (webrtc::EncodedImage)nativeEncodedImage {
- // Return the pointer without copying.
- webrtc::EncodedImage encodedImage(
- (uint8_t *)_buffer.bytes, (size_t)_buffer.length, (size_t)_buffer.length);
- encodedImage._encodedWidth = rtc::dchecked_cast<uint32_t>(_encodedWidth);
- encodedImage._encodedHeight = rtc::dchecked_cast<uint32_t>(_encodedHeight);
- encodedImage.SetTimestamp(_timeStamp);
- encodedImage.capture_time_ms_ = _captureTimeMs;
- encodedImage.ntp_time_ms_ = _ntpTimeMs;
- encodedImage.timing_.flags = _flags;
- encodedImage.timing_.encode_start_ms = _encodeStartMs;
- encodedImage.timing_.encode_finish_ms = _encodeFinishMs;
- encodedImage._frameType = webrtc::FrameType(_frameType);
- encodedImage.rotation_ = webrtc::VideoRotation(_rotation);
- encodedImage._completeFrame = _completeFrame;
- encodedImage.qp_ = _qp ? _qp.intValue : -1;
- encodedImage.content_type_ = (_contentType == RTCVideoContentTypeScreenshare) ?
- webrtc::VideoContentType::SCREENSHARE :
- webrtc::VideoContentType::UNSPECIFIED;
-
- return encodedImage;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCFileVideoCapturer.m b/sdk/objc/Framework/Classes/PeerConnection/RTCFileVideoCapturer.m
deleted file mode 100644
index 7e27bf2..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCFileVideoCapturer.m
+++ /dev/null
@@ -1,202 +0,0 @@
-/**
- * Copyright 2017 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCFileVideoCapturer.h"
-
-#import "WebRTC/RTCLogging.h"
-#import "WebRTC/RTCVideoFrameBuffer.h"
-
-NSString *const kRTCFileVideoCapturerErrorDomain = @"org.webrtc.RTCFileVideoCapturer";
-
-typedef NS_ENUM(NSInteger, RTCFileVideoCapturerErrorCode) {
- RTCFileVideoCapturerErrorCode_CapturerRunning = 2000,
- RTCFileVideoCapturerErrorCode_FileNotFound
-};
-
-typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) {
- RTCFileVideoCapturerStatusNotInitialized,
- RTCFileVideoCapturerStatusStarted,
- RTCFileVideoCapturerStatusStopped
-};
-
-@implementation RTCFileVideoCapturer {
- AVAssetReader *_reader;
- AVAssetReaderTrackOutput *_outTrack;
- RTCFileVideoCapturerStatus _status;
- CMTime _lastPresentationTime;
- dispatch_queue_t _frameQueue;
- NSURL *_fileURL;
-}
-
-- (void)startCapturingFromFileNamed:(NSString *)nameOfFile
- onError:(RTCFileVideoCapturerErrorBlock)errorBlock {
- if (_status == RTCFileVideoCapturerStatusStarted) {
- NSError *error =
- [NSError errorWithDomain:kRTCFileVideoCapturerErrorDomain
- code:RTCFileVideoCapturerErrorCode_CapturerRunning
- userInfo:@{NSUnderlyingErrorKey : @"Capturer has been started."}];
-
- errorBlock(error);
- return;
- } else {
- _status = RTCFileVideoCapturerStatusStarted;
- }
-
- dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
- NSString *pathForFile = [self pathForFileName:nameOfFile];
- if (!pathForFile) {
- NSString *errorString =
- [NSString stringWithFormat:@"File %@ not found in bundle", nameOfFile];
- NSError *error = [NSError errorWithDomain:kRTCFileVideoCapturerErrorDomain
- code:RTCFileVideoCapturerErrorCode_FileNotFound
- userInfo:@{NSUnderlyingErrorKey : errorString}];
- errorBlock(error);
- return;
- }
-
- _lastPresentationTime = CMTimeMake(0, 0);
-
- _fileURL = [NSURL fileURLWithPath:pathForFile];
- [self setupReaderOnError:errorBlock];
- });
-}
-
-- (void)setupReaderOnError:(RTCFileVideoCapturerErrorBlock)errorBlock {
- AVURLAsset *asset = [AVURLAsset URLAssetWithURL:_fileURL options:nil];
-
- NSArray *allTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
- NSError *error = nil;
-
- _reader = [[AVAssetReader alloc] initWithAsset:asset error:&error];
- if (error) {
- errorBlock(error);
- return;
- }
-
- NSDictionary *options = @{
- (NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
- };
- _outTrack =
- [[AVAssetReaderTrackOutput alloc] initWithTrack:allTracks.firstObject outputSettings:options];
- [_reader addOutput:_outTrack];
-
- [_reader startReading];
- RTCLog(@"File capturer started reading");
- [self readNextBuffer];
-}
-- (void)stopCapture {
- _status = RTCFileVideoCapturerStatusStopped;
- RTCLog(@"File capturer stopped.");
-}
-
-#pragma mark - Private
-
-- (nullable NSString *)pathForFileName:(NSString *)fileName {
- NSArray *nameComponents = [fileName componentsSeparatedByString:@"."];
- if (nameComponents.count != 2) {
- return nil;
- }
-
- NSString *path =
- [[NSBundle mainBundle] pathForResource:nameComponents[0] ofType:nameComponents[1]];
- return path;
-}
-
-- (dispatch_queue_t)frameQueue {
- if (!_frameQueue) {
- _frameQueue = dispatch_queue_create("org.webrtc.filecapturer.video", DISPATCH_QUEUE_SERIAL);
- dispatch_set_target_queue(_frameQueue,
- dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND, 0));
- }
- return _frameQueue;
-}
-
-- (void)readNextBuffer {
- if (_status == RTCFileVideoCapturerStatusStopped) {
- [_reader cancelReading];
- _reader = nil;
- return;
- }
-
- if (_reader.status == AVAssetReaderStatusCompleted) {
- [_reader cancelReading];
- _reader = nil;
- [self setupReaderOnError:nil];
- return;
- }
-
- CMSampleBufferRef sampleBuffer = [_outTrack copyNextSampleBuffer];
- if (!sampleBuffer) {
- [self readNextBuffer];
- return;
- }
- if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
- !CMSampleBufferDataIsReady(sampleBuffer)) {
- CFRelease(sampleBuffer);
- [self readNextBuffer];
- return;
- }
-
- [self publishSampleBuffer:sampleBuffer];
-}
-
-- (void)publishSampleBuffer:(CMSampleBufferRef)sampleBuffer {
- CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
- Float64 presentationDifference =
- CMTimeGetSeconds(CMTimeSubtract(presentationTime, _lastPresentationTime));
- _lastPresentationTime = presentationTime;
- int64_t presentationDifferenceRound = lroundf(presentationDifference * NSEC_PER_SEC);
-
- __block dispatch_source_t timer = [self createStrictTimer];
- // Strict timer that will fire |presentationDifferenceRound| ns from now and never again.
- dispatch_source_set_timer(timer,
- dispatch_time(DISPATCH_TIME_NOW, presentationDifferenceRound),
- DISPATCH_TIME_FOREVER,
- 0);
- dispatch_source_set_event_handler(timer, ^{
- dispatch_source_cancel(timer);
- timer = nil;
-
- CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
- if (!pixelBuffer) {
- CFRelease(sampleBuffer);
- dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
- [self readNextBuffer];
- });
- return;
- }
-
- RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer];
- NSTimeInterval timeStampSeconds = CACurrentMediaTime();
- int64_t timeStampNs = lroundf(timeStampSeconds * NSEC_PER_SEC);
- RTCVideoFrame *videoFrame =
- [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer rotation:0 timeStampNs:timeStampNs];
- CFRelease(sampleBuffer);
-
- dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
- [self readNextBuffer];
- });
-
- [self.delegate capturer:self didCaptureVideoFrame:videoFrame];
- });
- dispatch_activate(timer);
-}
-
-- (dispatch_source_t)createStrictTimer {
- dispatch_source_t timer = dispatch_source_create(
- DISPATCH_SOURCE_TYPE_TIMER, 0, DISPATCH_TIMER_STRICT, [self frameQueue]);
- return timer;
-}
-
-- (void)dealloc {
- [self stopCapture];
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCH264ProfileLevelId.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCH264ProfileLevelId.mm
deleted file mode 100644
index 04a5689..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCH264ProfileLevelId.mm
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- *
- */
-
-#import "WebRTC/RTCVideoCodecH264.h"
-
-#include "media/base/h264_profile_level_id.h"
-
-@interface RTCH264ProfileLevelId ()
-
-@property(nonatomic, assign) RTCH264Profile profile;
-@property(nonatomic, assign) RTCH264Level level;
-@property(nonatomic, strong) NSString *hexString;
-
-@end
-
-@implementation RTCH264ProfileLevelId
-
-@synthesize profile = _profile;
-@synthesize level = _level;
-@synthesize hexString = _hexString;
-
-- (instancetype)initWithHexString:(NSString *)hexString {
- if (self = [super init]) {
- self.hexString = hexString;
-
- absl::optional<webrtc::H264::ProfileLevelId> profile_level_id =
- webrtc::H264::ParseProfileLevelId([hexString cStringUsingEncoding:NSUTF8StringEncoding]);
- if (profile_level_id.has_value()) {
- self.profile = static_cast<RTCH264Profile>(profile_level_id->profile);
- self.level = static_cast<RTCH264Level>(profile_level_id->level);
- }
- }
- return self;
-}
-
-- (instancetype)initWithProfile:(RTCH264Profile)profile level:(RTCH264Level)level {
- if (self = [super init]) {
- self.profile = profile;
- self.level = level;
-
- absl::optional<std::string> hex_string =
- webrtc::H264::ProfileLevelIdToString(webrtc::H264::ProfileLevelId(
- static_cast<webrtc::H264::Profile>(profile), static_cast<webrtc::H264::Level>(level)));
- self.hexString =
- [NSString stringWithCString:hex_string.value_or("").c_str() encoding:NSUTF8StringEncoding];
- }
- return self;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCIceCandidate+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCIceCandidate+Private.h
deleted file mode 100644
index 47e934c..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCIceCandidate+Private.h
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCIceCandidate.h"
-
-#include <memory>
-
-#include "api/jsep.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-@interface RTCIceCandidate ()
-
-/**
- * The native IceCandidateInterface representation of this RTCIceCandidate
- * object. This is needed to pass to the underlying C++ APIs.
- */
-@property(nonatomic, readonly) std::unique_ptr<webrtc::IceCandidateInterface> nativeCandidate;
-
-/**
- * Initialize an RTCIceCandidate from a native IceCandidateInterface. No
- * ownership is taken of the native candidate.
- */
-- (instancetype)initWithNativeCandidate:(const webrtc::IceCandidateInterface *)candidate;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCIceCandidate.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCIceCandidate.mm
deleted file mode 100644
index 0e2cc8a..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCIceCandidate.mm
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCIceCandidate+Private.h"
-
-#include <memory>
-
-#import "NSString+StdString.h"
-#import "WebRTC/RTCLogging.h"
-
-@implementation RTCIceCandidate
-
-@synthesize sdpMid = _sdpMid;
-@synthesize sdpMLineIndex = _sdpMLineIndex;
-@synthesize sdp = _sdp;
-@synthesize serverUrl = _serverUrl;
-
-- (instancetype)initWithSdp:(NSString *)sdp
- sdpMLineIndex:(int)sdpMLineIndex
- sdpMid:(NSString *)sdpMid {
- NSParameterAssert(sdp.length);
- if (self = [super init]) {
- _sdpMid = [sdpMid copy];
- _sdpMLineIndex = sdpMLineIndex;
- _sdp = [sdp copy];
- }
- return self;
-}
-
-- (NSString *)description {
- return [NSString stringWithFormat:@"RTCIceCandidate:\n%@\n%d\n%@\n%@",
- _sdpMid,
- _sdpMLineIndex,
- _sdp,
- _serverUrl];
-}
-
-#pragma mark - Private
-
-- (instancetype)initWithNativeCandidate:
- (const webrtc::IceCandidateInterface *)candidate {
- NSParameterAssert(candidate);
- std::string sdp;
- candidate->ToString(&sdp);
-
- RTCIceCandidate *rtcCandidate =
- [self initWithSdp:[NSString stringForStdString:sdp]
- sdpMLineIndex:candidate->sdp_mline_index()
- sdpMid:[NSString stringForStdString:candidate->sdp_mid()]];
- rtcCandidate->_serverUrl = [NSString stringForStdString:candidate->server_url()];
- return rtcCandidate;
-}
-
-- (std::unique_ptr<webrtc::IceCandidateInterface>)nativeCandidate {
- webrtc::SdpParseError error;
-
- webrtc::IceCandidateInterface *candidate = webrtc::CreateIceCandidate(
- _sdpMid.stdString, _sdpMLineIndex, _sdp.stdString, &error);
-
- if (!candidate) {
- RTCLog(@"Failed to create ICE candidate: %s\nline: %s",
- error.description.c_str(),
- error.line.c_str());
- }
-
- return std::unique_ptr<webrtc::IceCandidateInterface>(candidate);
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCIceServer+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCIceServer+Private.h
deleted file mode 100644
index 0f6b7b7..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCIceServer+Private.h
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCIceServer.h"
-
-#include "api/peerconnectioninterface.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-@interface RTCIceServer ()
-
-/**
- * IceServer struct representation of this RTCIceServer object's data.
- * This is needed to pass to the underlying C++ APIs.
- */
-@property(nonatomic, readonly) webrtc::PeerConnectionInterface::IceServer nativeServer;
-
-/** Initialize an RTCIceServer from a native IceServer. */
-- (instancetype)initWithNativeServer:(webrtc::PeerConnectionInterface::IceServer)nativeServer;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCIceServer.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCIceServer.mm
deleted file mode 100644
index eeb1177..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCIceServer.mm
+++ /dev/null
@@ -1,196 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCIceServer+Private.h"
-
-#import "NSString+StdString.h"
-
-@implementation RTCIceServer
-
-@synthesize urlStrings = _urlStrings;
-@synthesize username = _username;
-@synthesize credential = _credential;
-@synthesize tlsCertPolicy = _tlsCertPolicy;
-@synthesize hostname = _hostname;
-@synthesize tlsAlpnProtocols = _tlsAlpnProtocols;
-@synthesize tlsEllipticCurves = _tlsEllipticCurves;
-
-- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings {
- return [self initWithURLStrings:urlStrings
- username:nil
- credential:nil];
-}
-
-- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
- username:(NSString *)username
- credential:(NSString *)credential {
- return [self initWithURLStrings:urlStrings
- username:username
- credential:credential
- tlsCertPolicy:RTCTlsCertPolicySecure];
-}
-
-- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
- username:(NSString *)username
- credential:(NSString *)credential
- tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy {
- return [self initWithURLStrings:urlStrings
- username:username
- credential:credential
- tlsCertPolicy:tlsCertPolicy
- hostname:nil];
-}
-
-- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
- username:(NSString *)username
- credential:(NSString *)credential
- tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy
- hostname:(NSString *)hostname {
- return [self initWithURLStrings:urlStrings
- username:username
- credential:credential
- tlsCertPolicy:tlsCertPolicy
- hostname:hostname
- tlsAlpnProtocols:[NSArray array]];
-}
-
-- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
- username:(NSString *)username
- credential:(NSString *)credential
- tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy
- hostname:(NSString *)hostname
- tlsAlpnProtocols:(NSArray<NSString *> *)tlsAlpnProtocols {
- return [self initWithURLStrings:urlStrings
- username:username
- credential:credential
- tlsCertPolicy:tlsCertPolicy
- hostname:hostname
- tlsAlpnProtocols:tlsAlpnProtocols
- tlsEllipticCurves:[NSArray array]];
-}
-
-- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
- username:(NSString *)username
- credential:(NSString *)credential
- tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy
- hostname:(NSString *)hostname
- tlsAlpnProtocols:(NSArray<NSString *> *)tlsAlpnProtocols
- tlsEllipticCurves:(NSArray<NSString *> *)tlsEllipticCurves {
- NSParameterAssert(urlStrings.count);
- if (self = [super init]) {
- _urlStrings = [[NSArray alloc] initWithArray:urlStrings copyItems:YES];
- _username = [username copy];
- _credential = [credential copy];
- _tlsCertPolicy = tlsCertPolicy;
- _hostname = [hostname copy];
- _tlsAlpnProtocols = [[NSArray alloc] initWithArray:tlsAlpnProtocols copyItems:YES];
- _tlsEllipticCurves = [[NSArray alloc] initWithArray:tlsEllipticCurves copyItems:YES];
- }
- return self;
-}
-
-- (NSString *)description {
- return [NSString stringWithFormat:@"RTCIceServer:\n%@\n%@\n%@\n%@\n%@\n%@\n%@",
- _urlStrings,
- _username,
- _credential,
- [self stringForTlsCertPolicy:_tlsCertPolicy],
- _hostname,
- _tlsAlpnProtocols,
- _tlsEllipticCurves];
-}
-
-#pragma mark - Private
-
-- (NSString *)stringForTlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy {
- switch (tlsCertPolicy) {
- case RTCTlsCertPolicySecure:
- return @"RTCTlsCertPolicySecure";
- case RTCTlsCertPolicyInsecureNoCheck:
- return @"RTCTlsCertPolicyInsecureNoCheck";
- }
-}
-
-- (webrtc::PeerConnectionInterface::IceServer)nativeServer {
- __block webrtc::PeerConnectionInterface::IceServer iceServer;
-
- iceServer.username = [NSString stdStringForString:_username];
- iceServer.password = [NSString stdStringForString:_credential];
- iceServer.hostname = [NSString stdStringForString:_hostname];
-
- [_tlsAlpnProtocols enumerateObjectsUsingBlock:^(NSString *proto, NSUInteger idx, BOOL *stop) {
- iceServer.tls_alpn_protocols.push_back(proto.stdString);
- }];
-
- [_tlsEllipticCurves enumerateObjectsUsingBlock:^(NSString *curve, NSUInteger idx, BOOL *stop) {
- iceServer.tls_elliptic_curves.push_back(curve.stdString);
- }];
-
- [_urlStrings enumerateObjectsUsingBlock:^(NSString *url,
- NSUInteger idx,
- BOOL *stop) {
- iceServer.urls.push_back(url.stdString);
- }];
-
- switch (_tlsCertPolicy) {
- case RTCTlsCertPolicySecure:
- iceServer.tls_cert_policy =
- webrtc::PeerConnectionInterface::kTlsCertPolicySecure;
- break;
- case RTCTlsCertPolicyInsecureNoCheck:
- iceServer.tls_cert_policy =
- webrtc::PeerConnectionInterface::kTlsCertPolicyInsecureNoCheck;
- break;
- }
- return iceServer;
-}
-
-- (instancetype)initWithNativeServer:
- (webrtc::PeerConnectionInterface::IceServer)nativeServer {
- NSMutableArray *urls =
- [NSMutableArray arrayWithCapacity:nativeServer.urls.size()];
- for (auto const &url : nativeServer.urls) {
- [urls addObject:[NSString stringForStdString:url]];
- }
- NSString *username = [NSString stringForStdString:nativeServer.username];
- NSString *credential = [NSString stringForStdString:nativeServer.password];
- NSString *hostname = [NSString stringForStdString:nativeServer.hostname];
- NSMutableArray *tlsAlpnProtocols =
- [NSMutableArray arrayWithCapacity:nativeServer.tls_alpn_protocols.size()];
- for (auto const &proto : nativeServer.tls_alpn_protocols) {
- [tlsAlpnProtocols addObject:[NSString stringForStdString:proto]];
- }
- NSMutableArray *tlsEllipticCurves =
- [NSMutableArray arrayWithCapacity:nativeServer.tls_elliptic_curves.size()];
- for (auto const &curve : nativeServer.tls_elliptic_curves) {
- [tlsEllipticCurves addObject:[NSString stringForStdString:curve]];
- }
- RTCTlsCertPolicy tlsCertPolicy;
-
- switch (nativeServer.tls_cert_policy) {
- case webrtc::PeerConnectionInterface::kTlsCertPolicySecure:
- tlsCertPolicy = RTCTlsCertPolicySecure;
- break;
- case webrtc::PeerConnectionInterface::kTlsCertPolicyInsecureNoCheck:
- tlsCertPolicy = RTCTlsCertPolicyInsecureNoCheck;
- break;
- }
-
- self = [self initWithURLStrings:urls
- username:username
- credential:credential
- tlsCertPolicy:tlsCertPolicy
- hostname:hostname
- tlsAlpnProtocols:tlsAlpnProtocols
- tlsEllipticCurves:tlsEllipticCurves];
- return self;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCIntervalRange+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCIntervalRange+Private.h
deleted file mode 100644
index 690daf3..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCIntervalRange+Private.h
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCIntervalRange.h"
-
-#include "rtc_base/timeutils.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-@interface RTCIntervalRange ()
-
-@property(nonatomic, readonly) std::unique_ptr<rtc::IntervalRange> nativeIntervalRange;
-
-- (instancetype)initWithNativeIntervalRange:(const rtc::IntervalRange &)config;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCIntervalRange.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCIntervalRange.mm
deleted file mode 100644
index 0a861ea..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCIntervalRange.mm
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCIntervalRange+Private.h"
-
-#include "rtc_base/checks.h"
-
-@implementation RTCIntervalRange
-
-@synthesize min = _min;
-@synthesize max = _max;
-
-- (instancetype)init {
- return [self initWithMin:0 max:0];
-}
-
-- (instancetype)initWithMin:(NSInteger)min
- max:(NSInteger)max {
- RTC_DCHECK_LE(min, max);
- if (self = [super init]) {
- _min = min;
- _max = max;
- }
- return self;
-}
-
-- (instancetype)initWithNativeIntervalRange:(const rtc::IntervalRange &)config {
- return [self initWithMin:config.min() max:config.max()];
-}
-
-- (NSString *)description {
- return [NSString stringWithFormat:@"[%ld, %ld]", (long)_min, (long)_max];
-}
-
-#pragma mark - Private
-
-- (std::unique_ptr<rtc::IntervalRange>)nativeIntervalRange {
- std::unique_ptr<rtc::IntervalRange> nativeIntervalRange(
- new rtc::IntervalRange((int)_min, (int)_max));
- return nativeIntervalRange;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCLegacyStatsReport+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCLegacyStatsReport+Private.h
deleted file mode 100644
index 111fc2b..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCLegacyStatsReport+Private.h
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCLegacyStatsReport.h"
-
-#include "api/statstypes.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-@interface RTCLegacyStatsReport ()
-
-/** Initialize an RTCLegacyStatsReport object from a native StatsReport. */
-- (instancetype)initWithNativeReport:(const webrtc::StatsReport &)nativeReport;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCLegacyStatsReport.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCLegacyStatsReport.mm
deleted file mode 100644
index 89b205b..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCLegacyStatsReport.mm
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCLegacyStatsReport+Private.h"
-
-#import "NSString+StdString.h"
-#import "WebRTC/RTCLogging.h"
-
-#include "rtc_base/checks.h"
-
-@implementation RTCLegacyStatsReport
-
-@synthesize timestamp = _timestamp;
-@synthesize type = _type;
-@synthesize reportId = _reportId;
-@synthesize values = _values;
-
-- (NSString *)description {
- return [NSString stringWithFormat:@"RTCLegacyStatsReport:\n%@\n%@\n%f\n%@",
- _reportId,
- _type,
- _timestamp,
- _values];
-}
-
-#pragma mark - Private
-
-- (instancetype)initWithNativeReport:(const webrtc::StatsReport &)nativeReport {
- if (self = [super init]) {
- _timestamp = nativeReport.timestamp();
- _type = [NSString stringForStdString:nativeReport.TypeToString()];
- _reportId = [NSString stringForStdString:
- nativeReport.id()->ToString()];
-
- NSUInteger capacity = nativeReport.values().size();
- NSMutableDictionary *values =
- [NSMutableDictionary dictionaryWithCapacity:capacity];
- for (auto const &valuePair : nativeReport.values()) {
- NSString *key = [NSString stringForStdString:
- valuePair.second->display_name()];
- NSString *value = [NSString stringForStdString:
- valuePair.second->ToString()];
-
- // Not expecting duplicate keys.
- RTC_DCHECK(![values objectForKey:key]);
- [values setObject:value forKey:key];
- }
- _values = values;
- }
- return self;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCMediaConstraints+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCMediaConstraints+Private.h
deleted file mode 100644
index 1c8eb28..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCMediaConstraints+Private.h
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCMediaConstraints.h"
-
-#include <memory>
-
-#include "api/mediaconstraintsinterface.h"
-
-namespace webrtc {
-
-class MediaConstraints : public MediaConstraintsInterface {
- public:
- ~MediaConstraints() override;
- MediaConstraints();
- MediaConstraints(const MediaConstraintsInterface::Constraints& mandatory,
- const MediaConstraintsInterface::Constraints& optional);
- const Constraints& GetMandatory() const override;
- const Constraints& GetOptional() const override;
-
- private:
- MediaConstraintsInterface::Constraints mandatory_;
- MediaConstraintsInterface::Constraints optional_;
-};
-
-} // namespace webrtc
-
-NS_ASSUME_NONNULL_BEGIN
-
-@interface RTCMediaConstraints ()
-
-/**
- * A MediaConstraints representation of this RTCMediaConstraints object. This is
- * needed to pass to the underlying C++ APIs.
- */
-- (std::unique_ptr<webrtc::MediaConstraints>)nativeConstraints;
-
-/** Return a native Constraints object representing these constraints */
-+ (webrtc::MediaConstraintsInterface::Constraints)nativeConstraintsForConstraints:
- (NSDictionary<NSString*, NSString*>*)constraints;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCMediaConstraints.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCMediaConstraints.mm
deleted file mode 100644
index 34e5899..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCMediaConstraints.mm
+++ /dev/null
@@ -1,136 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCMediaConstraints+Private.h"
-
-#import "NSString+StdString.h"
-
-#include <memory>
-
-NSString * const kRTCMediaConstraintsMinAspectRatio =
- @(webrtc::MediaConstraintsInterface::kMinAspectRatio);
-NSString * const kRTCMediaConstraintsMaxAspectRatio =
- @(webrtc::MediaConstraintsInterface::kMaxAspectRatio);
-NSString * const kRTCMediaConstraintsMinWidth =
- @(webrtc::MediaConstraintsInterface::kMinWidth);
-NSString * const kRTCMediaConstraintsMaxWidth =
- @(webrtc::MediaConstraintsInterface::kMaxWidth);
-NSString * const kRTCMediaConstraintsMinHeight =
- @(webrtc::MediaConstraintsInterface::kMinHeight);
-NSString * const kRTCMediaConstraintsMaxHeight =
- @(webrtc::MediaConstraintsInterface::kMaxHeight);
-NSString * const kRTCMediaConstraintsMinFrameRate =
- @(webrtc::MediaConstraintsInterface::kMinFrameRate);
-NSString * const kRTCMediaConstraintsMaxFrameRate =
- @(webrtc::MediaConstraintsInterface::kMaxFrameRate);
-NSString * const kRTCMediaConstraintsAudioNetworkAdaptorConfig =
- @(webrtc::MediaConstraintsInterface::kAudioNetworkAdaptorConfig);
-
-NSString * const kRTCMediaConstraintsIceRestart =
- @(webrtc::MediaConstraintsInterface::kIceRestart);
-NSString * const kRTCMediaConstraintsOfferToReceiveAudio =
- @(webrtc::MediaConstraintsInterface::kOfferToReceiveAudio);
-NSString * const kRTCMediaConstraintsOfferToReceiveVideo =
- @(webrtc::MediaConstraintsInterface::kOfferToReceiveVideo);
-NSString * const kRTCMediaConstraintsVoiceActivityDetection =
- @(webrtc::MediaConstraintsInterface::kVoiceActivityDetection);
-
-NSString * const kRTCMediaConstraintsValueTrue =
- @(webrtc::MediaConstraintsInterface::kValueTrue);
-NSString * const kRTCMediaConstraintsValueFalse =
- @(webrtc::MediaConstraintsInterface::kValueFalse);
-
-namespace webrtc {
-
-MediaConstraints::~MediaConstraints() {}
-
-MediaConstraints::MediaConstraints() {}
-
-MediaConstraints::MediaConstraints(
- const MediaConstraintsInterface::Constraints& mandatory,
- const MediaConstraintsInterface::Constraints& optional)
- : mandatory_(mandatory), optional_(optional) {}
-
-const MediaConstraintsInterface::Constraints&
-MediaConstraints::GetMandatory() const {
- return mandatory_;
-}
-
-const MediaConstraintsInterface::Constraints&
-MediaConstraints::GetOptional() const {
- return optional_;
-}
-
-} // namespace webrtc
-
-
-@implementation RTCMediaConstraints {
- NSDictionary<NSString *, NSString *> *_mandatory;
- NSDictionary<NSString *, NSString *> *_optional;
-}
-
-- (instancetype)initWithMandatoryConstraints:
- (NSDictionary<NSString *, NSString *> *)mandatory
- optionalConstraints:
- (NSDictionary<NSString *, NSString *> *)optional {
- if (self = [super init]) {
- _mandatory = [[NSDictionary alloc] initWithDictionary:mandatory
- copyItems:YES];
- _optional = [[NSDictionary alloc] initWithDictionary:optional
- copyItems:YES];
- }
- return self;
-}
-
-- (NSString *)description {
- return [NSString stringWithFormat:@"RTCMediaConstraints:\n%@\n%@",
- _mandatory,
- _optional];
-}
-
-#pragma mark - Private
-
-- (std::unique_ptr<webrtc::MediaConstraints>)nativeConstraints {
- webrtc::MediaConstraintsInterface::Constraints mandatory =
- [[self class] nativeConstraintsForConstraints:_mandatory];
- webrtc::MediaConstraintsInterface::Constraints optional =
- [[self class] nativeConstraintsForConstraints:_optional];
-
- webrtc::MediaConstraints *nativeConstraints =
- new webrtc::MediaConstraints(mandatory, optional);
- return std::unique_ptr<webrtc::MediaConstraints>(nativeConstraints);
-}
-
-+ (webrtc::MediaConstraintsInterface::Constraints)
- nativeConstraintsForConstraints:
- (NSDictionary<NSString *, NSString *> *)constraints {
- webrtc::MediaConstraintsInterface::Constraints nativeConstraints;
- for (NSString *key in constraints) {
- NSAssert([key isKindOfClass:[NSString class]],
- @"%@ is not an NSString.", key);
- NSString *value = [constraints objectForKey:key];
- NSAssert([value isKindOfClass:[NSString class]],
- @"%@ is not an NSString.", value);
- if ([kRTCMediaConstraintsAudioNetworkAdaptorConfig isEqualToString:key]) {
- // This value is base64 encoded.
- NSData *charData = [[NSData alloc] initWithBase64EncodedString:value options:0];
- std::string configValue =
- std::string(reinterpret_cast<const char *>(charData.bytes), charData.length);
- nativeConstraints.push_back(webrtc::MediaConstraintsInterface::Constraint(
- key.stdString, configValue));
- } else {
- nativeConstraints.push_back(webrtc::MediaConstraintsInterface::Constraint(
- key.stdString, value.stdString));
- }
- }
- return nativeConstraints;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCMediaSource+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCMediaSource+Private.h
deleted file mode 100644
index 9883faf..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCMediaSource+Private.h
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright 2016 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCMediaSource.h"
-
-#include "api/mediastreaminterface.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-@class RTCPeerConnectionFactory;
-
-typedef NS_ENUM(NSInteger, RTCMediaSourceType) {
- RTCMediaSourceTypeAudio,
- RTCMediaSourceTypeVideo,
-};
-
-@interface RTCMediaSource ()
-
-@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::MediaSourceInterface> nativeMediaSource;
-
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
- nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
- type:(RTCMediaSourceType)type NS_DESIGNATED_INITIALIZER;
-
-+ (webrtc::MediaSourceInterface::SourceState)nativeSourceStateForState:(RTCSourceState)state;
-
-+ (RTCSourceState)sourceStateForNativeState:(webrtc::MediaSourceInterface::SourceState)nativeState;
-
-+ (NSString *)stringForState:(RTCSourceState)state;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCMediaSource.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCMediaSource.mm
deleted file mode 100644
index 6ec41c3..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCMediaSource.mm
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Copyright 2016 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCMediaSource+Private.h"
-
-#include "rtc_base/checks.h"
-
-@implementation RTCMediaSource {
- RTCPeerConnectionFactory *_factory;
- RTCMediaSourceType _type;
-}
-
-@synthesize nativeMediaSource = _nativeMediaSource;
-
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
- nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
- type:(RTCMediaSourceType)type {
- RTC_DCHECK(factory);
- RTC_DCHECK(nativeMediaSource);
- if (self = [super init]) {
- _factory = factory;
- _nativeMediaSource = nativeMediaSource;
- _type = type;
- }
- return self;
-}
-
-- (RTCSourceState)state {
- return [[self class] sourceStateForNativeState:_nativeMediaSource->state()];
-}
-
-#pragma mark - Private
-
-+ (webrtc::MediaSourceInterface::SourceState)nativeSourceStateForState:
- (RTCSourceState)state {
- switch (state) {
- case RTCSourceStateInitializing:
- return webrtc::MediaSourceInterface::kInitializing;
- case RTCSourceStateLive:
- return webrtc::MediaSourceInterface::kLive;
- case RTCSourceStateEnded:
- return webrtc::MediaSourceInterface::kEnded;
- case RTCSourceStateMuted:
- return webrtc::MediaSourceInterface::kMuted;
- }
-}
-
-+ (RTCSourceState)sourceStateForNativeState:
- (webrtc::MediaSourceInterface::SourceState)nativeState {
- switch (nativeState) {
- case webrtc::MediaSourceInterface::kInitializing:
- return RTCSourceStateInitializing;
- case webrtc::MediaSourceInterface::kLive:
- return RTCSourceStateLive;
- case webrtc::MediaSourceInterface::kEnded:
- return RTCSourceStateEnded;
- case webrtc::MediaSourceInterface::kMuted:
- return RTCSourceStateMuted;
- }
-}
-
-+ (NSString *)stringForState:(RTCSourceState)state {
- switch (state) {
- case RTCSourceStateInitializing:
- return @"Initializing";
- case RTCSourceStateLive:
- return @"Live";
- case RTCSourceStateEnded:
- return @"Ended";
- case RTCSourceStateMuted:
- return @"Muted";
- }
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCMediaStream+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCMediaStream+Private.h
deleted file mode 100644
index 3986066..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCMediaStream+Private.h
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCMediaStream.h"
-
-#include "api/mediastreaminterface.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-@interface RTCMediaStream ()
-
-/**
- * MediaStreamInterface representation of this RTCMediaStream object. This is
- * needed to pass to the underlying C++ APIs.
- */
-@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::MediaStreamInterface> nativeMediaStream;
-
-/** Initialize an RTCMediaStream with an id. */
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory streamId:(NSString *)streamId;
-
-/** Initialize an RTCMediaStream from a native MediaStreamInterface. */
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
- nativeMediaStream:(rtc::scoped_refptr<webrtc::MediaStreamInterface>)nativeMediaStream;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCMediaStream.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCMediaStream.mm
deleted file mode 100644
index c8bcfd9..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCMediaStream.mm
+++ /dev/null
@@ -1,126 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCMediaStream+Private.h"
-
-#include <vector>
-
-#import "NSString+StdString.h"
-#import "RTCAudioTrack+Private.h"
-#import "RTCMediaStreamTrack+Private.h"
-#import "RTCPeerConnectionFactory+Private.h"
-#import "RTCVideoTrack+Private.h"
-
-@implementation RTCMediaStream {
- RTCPeerConnectionFactory *_factory;
- NSMutableArray *_audioTracks;
- NSMutableArray *_videoTracks;
- rtc::scoped_refptr<webrtc::MediaStreamInterface> _nativeMediaStream;
-}
-
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
- streamId:(NSString *)streamId {
- NSParameterAssert(factory);
- NSParameterAssert(streamId.length);
- std::string nativeId = [NSString stdStringForString:streamId];
- rtc::scoped_refptr<webrtc::MediaStreamInterface> stream =
- factory.nativeFactory->CreateLocalMediaStream(nativeId);
- return [self initWithFactory:factory nativeMediaStream:stream];
-}
-
-- (NSArray<RTCAudioTrack *> *)audioTracks {
- return [_audioTracks copy];
-}
-
-- (NSArray<RTCVideoTrack *> *)videoTracks {
- return [_videoTracks copy];
-}
-
-- (NSString *)streamId {
- return [NSString stringForStdString:_nativeMediaStream->id()];
-}
-
-- (void)addAudioTrack:(RTCAudioTrack *)audioTrack {
- if (_nativeMediaStream->AddTrack(audioTrack.nativeAudioTrack)) {
- [_audioTracks addObject:audioTrack];
- }
-}
-
-- (void)addVideoTrack:(RTCVideoTrack *)videoTrack {
- if (_nativeMediaStream->AddTrack(videoTrack.nativeVideoTrack)) {
- [_videoTracks addObject:videoTrack];
- }
-}
-
-- (void)removeAudioTrack:(RTCAudioTrack *)audioTrack {
- NSUInteger index = [_audioTracks indexOfObjectIdenticalTo:audioTrack];
- NSAssert(index != NSNotFound,
- @"|removeAudioTrack| called on unexpected RTCAudioTrack");
- if (index != NSNotFound &&
- _nativeMediaStream->RemoveTrack(audioTrack.nativeAudioTrack)) {
- [_audioTracks removeObjectAtIndex:index];
- }
-}
-
-- (void)removeVideoTrack:(RTCVideoTrack *)videoTrack {
- NSUInteger index = [_videoTracks indexOfObjectIdenticalTo:videoTrack];
- NSAssert(index != NSNotFound,
- @"|removeVideoTrack| called on unexpected RTCVideoTrack");
- if (index != NSNotFound &&
- _nativeMediaStream->RemoveTrack(videoTrack.nativeVideoTrack)) {
- [_videoTracks removeObjectAtIndex:index];
- }
-}
-
-- (NSString *)description {
- return [NSString stringWithFormat:@"RTCMediaStream:\n%@\nA=%lu\nV=%lu",
- self.streamId,
- (unsigned long)self.audioTracks.count,
- (unsigned long)self.videoTracks.count];
-}
-
-#pragma mark - Private
-
-- (rtc::scoped_refptr<webrtc::MediaStreamInterface>)nativeMediaStream {
- return _nativeMediaStream;
-}
-
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
- nativeMediaStream:
- (rtc::scoped_refptr<webrtc::MediaStreamInterface>)nativeMediaStream {
- NSParameterAssert(nativeMediaStream);
- if (self = [super init]) {
- _factory = factory;
-
- webrtc::AudioTrackVector audioTracks = nativeMediaStream->GetAudioTracks();
- webrtc::VideoTrackVector videoTracks = nativeMediaStream->GetVideoTracks();
-
- _audioTracks = [NSMutableArray arrayWithCapacity:audioTracks.size()];
- _videoTracks = [NSMutableArray arrayWithCapacity:videoTracks.size()];
- _nativeMediaStream = nativeMediaStream;
-
- for (auto &track : audioTracks) {
- RTCMediaStreamTrackType type = RTCMediaStreamTrackTypeAudio;
- RTCAudioTrack *audioTrack =
- [[RTCAudioTrack alloc] initWithFactory:_factory nativeTrack:track type:type];
- [_audioTracks addObject:audioTrack];
- }
-
- for (auto &track : videoTracks) {
- RTCMediaStreamTrackType type = RTCMediaStreamTrackTypeVideo;
- RTCVideoTrack *videoTrack =
- [[RTCVideoTrack alloc] initWithFactory:_factory nativeTrack:track type:type];
- [_videoTracks addObject:videoTrack];
- }
- }
- return self;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCMediaStreamTrack+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCMediaStreamTrack+Private.h
deleted file mode 100644
index 6effeaa..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCMediaStreamTrack+Private.h
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCMediaStreamTrack.h"
-
-#include "api/mediastreaminterface.h"
-
-typedef NS_ENUM(NSInteger, RTCMediaStreamTrackType) {
- RTCMediaStreamTrackTypeAudio,
- RTCMediaStreamTrackTypeVideo,
-};
-
-NS_ASSUME_NONNULL_BEGIN
-
-@class RTCPeerConnectionFactory;
-
-@interface RTCMediaStreamTrack ()
-
-@property(nonatomic, readonly) RTCPeerConnectionFactory *factory;
-
-/**
- * The native MediaStreamTrackInterface passed in or created during
- * construction.
- */
-@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> nativeTrack;
-
-/**
- * Initialize an RTCMediaStreamTrack from a native MediaStreamTrackInterface.
- */
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
- nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
- type:(RTCMediaStreamTrackType)type NS_DESIGNATED_INITIALIZER;
-
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
- nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack;
-
-- (BOOL)isEqualToTrack:(RTCMediaStreamTrack *)track;
-
-+ (webrtc::MediaStreamTrackInterface::TrackState)nativeTrackStateForState:
- (RTCMediaStreamTrackState)state;
-
-+ (RTCMediaStreamTrackState)trackStateForNativeState:
- (webrtc::MediaStreamTrackInterface::TrackState)nativeState;
-
-+ (NSString *)stringForState:(RTCMediaStreamTrackState)state;
-
-+ (RTCMediaStreamTrack *)mediaTrackForNativeTrack:
- (rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
- factory:(RTCPeerConnectionFactory *)factory;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCMediaStreamTrack.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCMediaStreamTrack.mm
deleted file mode 100644
index 07bb009..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCMediaStreamTrack.mm
+++ /dev/null
@@ -1,160 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCAudioTrack+Private.h"
-#import "RTCMediaStreamTrack+Private.h"
-#import "RTCVideoTrack+Private.h"
-
-#import "NSString+StdString.h"
-
-NSString * const kRTCMediaStreamTrackKindAudio =
- @(webrtc::MediaStreamTrackInterface::kAudioKind);
-NSString * const kRTCMediaStreamTrackKindVideo =
- @(webrtc::MediaStreamTrackInterface::kVideoKind);
-
-@implementation RTCMediaStreamTrack {
- RTCPeerConnectionFactory *_factory;
- rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> _nativeTrack;
- RTCMediaStreamTrackType _type;
-}
-
-- (NSString *)kind {
- return [NSString stringForStdString:_nativeTrack->kind()];
-}
-
-- (NSString *)trackId {
- return [NSString stringForStdString:_nativeTrack->id()];
-}
-
-- (BOOL)isEnabled {
- return _nativeTrack->enabled();
-}
-
-- (void)setIsEnabled:(BOOL)isEnabled {
- _nativeTrack->set_enabled(isEnabled);
-}
-
-- (RTCMediaStreamTrackState)readyState {
- return [[self class] trackStateForNativeState:_nativeTrack->state()];
-}
-
-- (NSString *)description {
- NSString *readyState = [[self class] stringForState:self.readyState];
- return [NSString stringWithFormat:@"RTCMediaStreamTrack:\n%@\n%@\n%@\n%@",
- self.kind,
- self.trackId,
- self.isEnabled ? @"enabled" : @"disabled",
- readyState];
-}
-
-- (BOOL)isEqual:(id)object {
- if (self == object) {
- return YES;
- }
- if (![object isMemberOfClass:[self class]]) {
- return NO;
- }
- return [self isEqualToTrack:(RTCMediaStreamTrack *)object];
-}
-
-- (NSUInteger)hash {
- return (NSUInteger)_nativeTrack.get();
-}
-
-#pragma mark - Private
-
-- (rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack {
- return _nativeTrack;
-}
-
-@synthesize factory = _factory;
-
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
- nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
- type:(RTCMediaStreamTrackType)type {
- NSParameterAssert(nativeTrack);
- NSParameterAssert(factory);
- if (self = [super init]) {
- _factory = factory;
- _nativeTrack = nativeTrack;
- _type = type;
- }
- return self;
-}
-
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
- nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack {
- NSParameterAssert(nativeTrack);
- if (nativeTrack->kind() ==
- std::string(webrtc::MediaStreamTrackInterface::kAudioKind)) {
- return [self initWithFactory:factory nativeTrack:nativeTrack type:RTCMediaStreamTrackTypeAudio];
- }
- if (nativeTrack->kind() ==
- std::string(webrtc::MediaStreamTrackInterface::kVideoKind)) {
- return [self initWithFactory:factory nativeTrack:nativeTrack type:RTCMediaStreamTrackTypeVideo];
- }
- return nil;
-}
-
-- (BOOL)isEqualToTrack:(RTCMediaStreamTrack *)track {
- if (!track) {
- return NO;
- }
- return _nativeTrack == track.nativeTrack;
-}
-
-+ (webrtc::MediaStreamTrackInterface::TrackState)nativeTrackStateForState:
- (RTCMediaStreamTrackState)state {
- switch (state) {
- case RTCMediaStreamTrackStateLive:
- return webrtc::MediaStreamTrackInterface::kLive;
- case RTCMediaStreamTrackStateEnded:
- return webrtc::MediaStreamTrackInterface::kEnded;
- }
-}
-
-+ (RTCMediaStreamTrackState)trackStateForNativeState:
- (webrtc::MediaStreamTrackInterface::TrackState)nativeState {
- switch (nativeState) {
- case webrtc::MediaStreamTrackInterface::kLive:
- return RTCMediaStreamTrackStateLive;
- case webrtc::MediaStreamTrackInterface::kEnded:
- return RTCMediaStreamTrackStateEnded;
- }
-}
-
-+ (NSString *)stringForState:(RTCMediaStreamTrackState)state {
- switch (state) {
- case RTCMediaStreamTrackStateLive:
- return @"Live";
- case RTCMediaStreamTrackStateEnded:
- return @"Ended";
- }
-}
-
-+ (RTCMediaStreamTrack *)mediaTrackForNativeTrack:
- (rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
- factory:(RTCPeerConnectionFactory *)factory {
- NSParameterAssert(nativeTrack);
- NSParameterAssert(factory);
- if (nativeTrack->kind() == webrtc::MediaStreamTrackInterface::kAudioKind) {
- return [[RTCAudioTrack alloc] initWithFactory:factory
- nativeTrack:nativeTrack
- type:RTCMediaStreamTrackTypeAudio];
- } else if (nativeTrack->kind() == webrtc::MediaStreamTrackInterface::kVideoKind) {
- return [[RTCVideoTrack alloc] initWithFactory:factory
- nativeTrack:nativeTrack
- type:RTCMediaStreamTrackTypeVideo];
- } else {
- return [[RTCMediaStreamTrack alloc] initWithFactory:factory nativeTrack:nativeTrack];
- }
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCMetrics.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCMetrics.mm
deleted file mode 100644
index 2e80216..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCMetrics.mm
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Copyright 2016 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCMetrics.h"
-
-#import "RTCMetricsSampleInfo+Private.h"
-
-void RTCEnableMetrics(void) {
- webrtc::metrics::Enable();
-}
-
-NSArray<RTCMetricsSampleInfo *> *RTCGetAndResetMetrics(void) {
- std::map<std::string, std::unique_ptr<webrtc::metrics::SampleInfo>>
- histograms;
- webrtc::metrics::GetAndReset(&histograms);
-
- NSMutableArray *metrics =
- [NSMutableArray arrayWithCapacity:histograms.size()];
- for (auto const &histogram : histograms) {
- RTCMetricsSampleInfo *metric = [[RTCMetricsSampleInfo alloc]
- initWithNativeSampleInfo:*histogram.second];
- [metrics addObject:metric];
- }
- return metrics;
-}
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCMetricsSampleInfo+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCMetricsSampleInfo+Private.h
deleted file mode 100644
index ceaca8f..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCMetricsSampleInfo+Private.h
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Copyright 2016 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCMetricsSampleInfo.h"
-
-// Adding 'nogncheck' to disable the gn include headers check.
-// We don't want to depend on 'system_wrappers:metrics_default' because
-// clients should be able to provide their own implementation.
-#include "system_wrappers/include/metrics_default.h" // nogncheck
-
-NS_ASSUME_NONNULL_BEGIN
-
-@interface RTCMetricsSampleInfo ()
-
-/** Initialize an RTCMetricsSampleInfo object from native SampleInfo. */
-- (instancetype)initWithNativeSampleInfo:(const webrtc::metrics::SampleInfo &)info;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCMetricsSampleInfo.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCMetricsSampleInfo.mm
deleted file mode 100644
index 628ba9e..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCMetricsSampleInfo.mm
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright 2016 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCMetricsSampleInfo+Private.h"
-
-#import "NSString+StdString.h"
-
-@implementation RTCMetricsSampleInfo
-
-@synthesize name = _name;
-@synthesize min = _min;
-@synthesize max = _max;
-@synthesize bucketCount = _bucketCount;
-@synthesize samples = _samples;
-
-#pragma mark - Private
-
-- (instancetype)initWithNativeSampleInfo:
- (const webrtc::metrics::SampleInfo &)info {
- if (self = [super init]) {
- _name = [NSString stringForStdString:info.name];
- _min = info.min;
- _max = info.max;
- _bucketCount = info.bucket_count;
-
- NSMutableDictionary *samples =
- [NSMutableDictionary dictionaryWithCapacity:info.samples.size()];
- for (auto const &sample : info.samples) {
- [samples setObject:@(sample.second) forKey:@(sample.first)];
- }
- _samples = samples;
- }
- return self;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection+DataChannel.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection+DataChannel.mm
deleted file mode 100644
index c6f2b0b..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection+DataChannel.mm
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCPeerConnection+Private.h"
-
-#import "NSString+StdString.h"
-#import "RTCDataChannel+Private.h"
-#import "RTCDataChannelConfiguration+Private.h"
-
-@implementation RTCPeerConnection (DataChannel)
-
-- (nullable RTCDataChannel *)dataChannelForLabel:(NSString *)label
- configuration:(RTCDataChannelConfiguration *)configuration {
- std::string labelString = [NSString stdStringForString:label];
- const webrtc::DataChannelInit nativeInit =
- configuration.nativeDataChannelInit;
- rtc::scoped_refptr<webrtc::DataChannelInterface> dataChannel =
- self.nativePeerConnection->CreateDataChannel(labelString,
- &nativeInit);
- if (!dataChannel) {
- return nil;
- }
- return [[RTCDataChannel alloc] initWithFactory:self.factory nativeDataChannel:dataChannel];
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection+Native.h b/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection+Native.h
index 238f808..7d8ab9a 100644
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection+Native.h
+++ b/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection+Native.h
@@ -8,27 +8,4 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "WebRTC/RTCPeerConnection.h"
-
-#include <memory>
-
-namespace rtc {
-class BitrateAllocationStrategy;
-} // namespace rtc
-
-NS_ASSUME_NONNULL_BEGIN
-
-/**
- * This class extension exposes methods that work directly with injectable C++ components.
- */
-@interface RTCPeerConnection ()
-
-/** Sets current strategy. If not set default WebRTC allocator will be used. May be changed during
- * an active session.
- */
-- (void)setBitrateAllocationStrategy:
- (std::unique_ptr<rtc::BitrateAllocationStrategy>)bitrateAllocationStrategy;
-
-@end
-
-NS_ASSUME_NONNULL_END
+#import "api/peerconnection/RTCPeerConnection+Native.h"
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection+Private.h
deleted file mode 100644
index 5451a49..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection+Private.h
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCPeerConnection.h"
-
-#include "api/peerconnectioninterface.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-namespace webrtc {
-
-/**
- * These objects are created by RTCPeerConnectionFactory to wrap an
- * id<RTCPeerConnectionDelegate> and call methods on that interface.
- */
-class PeerConnectionDelegateAdapter : public PeerConnectionObserver {
- public:
- PeerConnectionDelegateAdapter(RTCPeerConnection *peerConnection);
- ~PeerConnectionDelegateAdapter() override;
-
- void OnSignalingChange(PeerConnectionInterface::SignalingState new_state) override;
-
- void OnAddStream(rtc::scoped_refptr<MediaStreamInterface> stream) override;
-
- void OnRemoveStream(rtc::scoped_refptr<MediaStreamInterface> stream) override;
-
- void OnTrack(rtc::scoped_refptr<RtpTransceiverInterface> transceiver) override;
-
- void OnDataChannel(rtc::scoped_refptr<DataChannelInterface> data_channel) override;
-
- void OnRenegotiationNeeded() override;
-
- void OnIceConnectionChange(PeerConnectionInterface::IceConnectionState new_state) override;
-
- void OnIceGatheringChange(PeerConnectionInterface::IceGatheringState new_state) override;
-
- void OnIceCandidate(const IceCandidateInterface *candidate) override;
-
- void OnIceCandidatesRemoved(const std::vector<cricket::Candidate> &candidates) override;
-
- void OnAddTrack(rtc::scoped_refptr<RtpReceiverInterface> receiver,
- const std::vector<rtc::scoped_refptr<MediaStreamInterface>> &streams) override;
-
- void OnRemoveTrack(rtc::scoped_refptr<RtpReceiverInterface> receiver) override;
-
- private:
- __weak RTCPeerConnection *peer_connection_;
-};
-
-} // namespace webrtc
-
-@interface RTCPeerConnection ()
-
-/** The factory used to create this RTCPeerConnection */
-@property(nonatomic, readonly) RTCPeerConnectionFactory *factory;
-
-/** The native PeerConnectionInterface created during construction. */
-@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::PeerConnectionInterface>
- nativePeerConnection;
-
-/** Initialize an RTCPeerConnection with a configuration, constraints, and
- * delegate.
- */
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
- configuration:(RTCConfiguration *)configuration
- constraints:(RTCMediaConstraints *)constraints
- delegate:(nullable id<RTCPeerConnectionDelegate>)delegate
- NS_DESIGNATED_INITIALIZER;
-
-+ (webrtc::PeerConnectionInterface::SignalingState)nativeSignalingStateForState:
- (RTCSignalingState)state;
-
-+ (RTCSignalingState)signalingStateForNativeState:
- (webrtc::PeerConnectionInterface::SignalingState)nativeState;
-
-+ (NSString *)stringForSignalingState:(RTCSignalingState)state;
-
-+ (webrtc::PeerConnectionInterface::IceConnectionState)nativeIceConnectionStateForState:
- (RTCIceConnectionState)state;
-
-+ (RTCIceConnectionState)iceConnectionStateForNativeState:
- (webrtc::PeerConnectionInterface::IceConnectionState)nativeState;
-
-+ (NSString *)stringForIceConnectionState:(RTCIceConnectionState)state;
-
-+ (webrtc::PeerConnectionInterface::IceGatheringState)nativeIceGatheringStateForState:
- (RTCIceGatheringState)state;
-
-+ (RTCIceGatheringState)iceGatheringStateForNativeState:
- (webrtc::PeerConnectionInterface::IceGatheringState)nativeState;
-
-+ (NSString *)stringForIceGatheringState:(RTCIceGatheringState)state;
-
-+ (webrtc::PeerConnectionInterface::StatsOutputLevel)nativeStatsOutputLevelForLevel:
- (RTCStatsOutputLevel)level;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection+Stats.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection+Stats.mm
deleted file mode 100644
index e399722..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection+Stats.mm
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCPeerConnection+Private.h"
-
-#import "NSString+StdString.h"
-#import "RTCMediaStreamTrack+Private.h"
-#import "RTCLegacyStatsReport+Private.h"
-
-#include "rtc_base/checks.h"
-
-namespace webrtc {
-
-class StatsObserverAdapter : public StatsObserver {
- public:
- StatsObserverAdapter(void (^completionHandler)
- (NSArray<RTCLegacyStatsReport *> *stats)) {
- completion_handler_ = completionHandler;
- }
-
- ~StatsObserverAdapter() override { completion_handler_ = nil; }
-
- void OnComplete(const StatsReports& reports) override {
- RTC_DCHECK(completion_handler_);
- NSMutableArray *stats = [NSMutableArray arrayWithCapacity:reports.size()];
- for (const auto* report : reports) {
- RTCLegacyStatsReport *statsReport =
- [[RTCLegacyStatsReport alloc] initWithNativeReport:*report];
- [stats addObject:statsReport];
- }
- completion_handler_(stats);
- completion_handler_ = nil;
- }
-
- private:
- void (^completion_handler_)(NSArray<RTCLegacyStatsReport *> *stats);
-};
-} // namespace webrtc
-
-@implementation RTCPeerConnection (Stats)
-
-- (void)statsForTrack:(RTCMediaStreamTrack *)mediaStreamTrack
- statsOutputLevel:(RTCStatsOutputLevel)statsOutputLevel
- completionHandler:
- (void (^)(NSArray<RTCLegacyStatsReport *> *stats))completionHandler {
- rtc::scoped_refptr<webrtc::StatsObserverAdapter> observer(
- new rtc::RefCountedObject<webrtc::StatsObserverAdapter>
- (completionHandler));
- webrtc::PeerConnectionInterface::StatsOutputLevel nativeOutputLevel =
- [[self class] nativeStatsOutputLevelForLevel:statsOutputLevel];
- self.nativePeerConnection->GetStats(
- observer, mediaStreamTrack.nativeTrack, nativeOutputLevel);
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection.mm
deleted file mode 100644
index 11c3a6d..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection.mm
+++ /dev/null
@@ -1,748 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCPeerConnection+Private.h"
-
-#import "NSString+StdString.h"
-#import "RTCConfiguration+Private.h"
-#import "RTCDataChannel+Private.h"
-#import "RTCIceCandidate+Private.h"
-#import "RTCLegacyStatsReport+Private.h"
-#import "RTCMediaConstraints+Private.h"
-#import "RTCMediaStream+Private.h"
-#import "RTCMediaStreamTrack+Private.h"
-#import "RTCPeerConnection+Native.h"
-#import "RTCPeerConnectionFactory+Private.h"
-#import "RTCRtpReceiver+Private.h"
-#import "RTCRtpSender+Private.h"
-#import "RTCRtpTransceiver+Private.h"
-#import "RTCSessionDescription+Private.h"
-#import "WebRTC/RTCLogging.h"
-
-#include <memory>
-
-#include "api/jsepicecandidate.h"
-#include "rtc_base/checks.h"
-
-NSString * const kRTCPeerConnectionErrorDomain =
- @"org.webrtc.RTCPeerConnection";
-int const kRTCPeerConnnectionSessionDescriptionError = -1;
-
-namespace webrtc {
-
-class CreateSessionDescriptionObserverAdapter
- : public CreateSessionDescriptionObserver {
- public:
- CreateSessionDescriptionObserverAdapter(
- void (^completionHandler)(RTCSessionDescription *sessionDescription,
- NSError *error)) {
- completion_handler_ = completionHandler;
- }
-
- ~CreateSessionDescriptionObserverAdapter() override { completion_handler_ = nil; }
-
- void OnSuccess(SessionDescriptionInterface *desc) override {
- RTC_DCHECK(completion_handler_);
- std::unique_ptr<webrtc::SessionDescriptionInterface> description =
- std::unique_ptr<webrtc::SessionDescriptionInterface>(desc);
- RTCSessionDescription* session =
- [[RTCSessionDescription alloc] initWithNativeDescription:
- description.get()];
- completion_handler_(session, nil);
- completion_handler_ = nil;
- }
-
- void OnFailure(RTCError error) override {
- RTC_DCHECK(completion_handler_);
- // TODO(hta): Add handling of error.type()
- NSString *str = [NSString stringForStdString:error.message()];
- NSError* err =
- [NSError errorWithDomain:kRTCPeerConnectionErrorDomain
- code:kRTCPeerConnnectionSessionDescriptionError
- userInfo:@{ NSLocalizedDescriptionKey : str }];
- completion_handler_(nil, err);
- completion_handler_ = nil;
- }
-
- private:
- void (^completion_handler_)
- (RTCSessionDescription *sessionDescription, NSError *error);
-};
-
-class SetSessionDescriptionObserverAdapter :
- public SetSessionDescriptionObserver {
- public:
- SetSessionDescriptionObserverAdapter(void (^completionHandler)
- (NSError *error)) {
- completion_handler_ = completionHandler;
- }
-
- ~SetSessionDescriptionObserverAdapter() override { completion_handler_ = nil; }
-
- void OnSuccess() override {
- RTC_DCHECK(completion_handler_);
- completion_handler_(nil);
- completion_handler_ = nil;
- }
-
- void OnFailure(RTCError error) override {
- RTC_DCHECK(completion_handler_);
- // TODO(hta): Add handling of error.type()
- NSString *str = [NSString stringForStdString:error.message()];
- NSError* err =
- [NSError errorWithDomain:kRTCPeerConnectionErrorDomain
- code:kRTCPeerConnnectionSessionDescriptionError
- userInfo:@{ NSLocalizedDescriptionKey : str }];
- completion_handler_(err);
- completion_handler_ = nil;
- }
-
- private:
- void (^completion_handler_)(NSError *error);
-};
-
-PeerConnectionDelegateAdapter::PeerConnectionDelegateAdapter(
- RTCPeerConnection *peerConnection) {
- peer_connection_ = peerConnection;
-}
-
-PeerConnectionDelegateAdapter::~PeerConnectionDelegateAdapter() {
- peer_connection_ = nil;
-}
-
-void PeerConnectionDelegateAdapter::OnSignalingChange(
- PeerConnectionInterface::SignalingState new_state) {
- RTCSignalingState state =
- [[RTCPeerConnection class] signalingStateForNativeState:new_state];
- RTCPeerConnection *peer_connection = peer_connection_;
- [peer_connection.delegate peerConnection:peer_connection
- didChangeSignalingState:state];
-}
-
-void PeerConnectionDelegateAdapter::OnAddStream(
- rtc::scoped_refptr<MediaStreamInterface> stream) {
- RTCPeerConnection *peer_connection = peer_connection_;
- RTCMediaStream *mediaStream =
- [[RTCMediaStream alloc] initWithFactory:peer_connection.factory nativeMediaStream:stream];
- [peer_connection.delegate peerConnection:peer_connection
- didAddStream:mediaStream];
-}
-
-void PeerConnectionDelegateAdapter::OnRemoveStream(
- rtc::scoped_refptr<MediaStreamInterface> stream) {
- RTCPeerConnection *peer_connection = peer_connection_;
- RTCMediaStream *mediaStream =
- [[RTCMediaStream alloc] initWithFactory:peer_connection.factory nativeMediaStream:stream];
-
- [peer_connection.delegate peerConnection:peer_connection
- didRemoveStream:mediaStream];
-}
-
-void PeerConnectionDelegateAdapter::OnTrack(
- rtc::scoped_refptr<RtpTransceiverInterface> nativeTransceiver) {
- RTCPeerConnection *peer_connection = peer_connection_;
- RTCRtpTransceiver *transceiver =
- [[RTCRtpTransceiver alloc] initWithFactory:peer_connection.factory
- nativeRtpTransceiver:nativeTransceiver];
- if ([peer_connection.delegate
- respondsToSelector:@selector(peerConnection:didStartReceivingOnTransceiver:)]) {
- [peer_connection.delegate peerConnection:peer_connection
- didStartReceivingOnTransceiver:transceiver];
- }
-}
-
-void PeerConnectionDelegateAdapter::OnDataChannel(
- rtc::scoped_refptr<DataChannelInterface> data_channel) {
- RTCPeerConnection *peer_connection = peer_connection_;
- RTCDataChannel *dataChannel = [[RTCDataChannel alloc] initWithFactory:peer_connection.factory
- nativeDataChannel:data_channel];
- [peer_connection.delegate peerConnection:peer_connection
- didOpenDataChannel:dataChannel];
-}
-
-void PeerConnectionDelegateAdapter::OnRenegotiationNeeded() {
- RTCPeerConnection *peer_connection = peer_connection_;
- [peer_connection.delegate peerConnectionShouldNegotiate:peer_connection];
-}
-
-void PeerConnectionDelegateAdapter::OnIceConnectionChange(
- PeerConnectionInterface::IceConnectionState new_state) {
- RTCIceConnectionState state =
- [[RTCPeerConnection class] iceConnectionStateForNativeState:new_state];
- RTCPeerConnection *peer_connection = peer_connection_;
- [peer_connection.delegate peerConnection:peer_connection
- didChangeIceConnectionState:state];
-}
-
-void PeerConnectionDelegateAdapter::OnIceGatheringChange(
- PeerConnectionInterface::IceGatheringState new_state) {
- RTCIceGatheringState state =
- [[RTCPeerConnection class] iceGatheringStateForNativeState:new_state];
- RTCPeerConnection *peer_connection = peer_connection_;
- [peer_connection.delegate peerConnection:peer_connection
- didChangeIceGatheringState:state];
-}
-
-void PeerConnectionDelegateAdapter::OnIceCandidate(
- const IceCandidateInterface *candidate) {
- RTCIceCandidate *iceCandidate =
- [[RTCIceCandidate alloc] initWithNativeCandidate:candidate];
- RTCPeerConnection *peer_connection = peer_connection_;
- [peer_connection.delegate peerConnection:peer_connection
- didGenerateIceCandidate:iceCandidate];
-}
-
-void PeerConnectionDelegateAdapter::OnIceCandidatesRemoved(
- const std::vector<cricket::Candidate>& candidates) {
- NSMutableArray* ice_candidates =
- [NSMutableArray arrayWithCapacity:candidates.size()];
- for (const auto& candidate : candidates) {
- std::unique_ptr<JsepIceCandidate> candidate_wrapper(
- new JsepIceCandidate(candidate.transport_name(), -1, candidate));
- RTCIceCandidate* ice_candidate = [[RTCIceCandidate alloc]
- initWithNativeCandidate:candidate_wrapper.get()];
- [ice_candidates addObject:ice_candidate];
- }
- RTCPeerConnection* peer_connection = peer_connection_;
- [peer_connection.delegate peerConnection:peer_connection
- didRemoveIceCandidates:ice_candidates];
-}
-
-void PeerConnectionDelegateAdapter::OnAddTrack(
- rtc::scoped_refptr<RtpReceiverInterface> receiver,
- const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams) {
- RTCPeerConnection *peer_connection = peer_connection_;
- if ([peer_connection.delegate
- respondsToSelector:@selector(peerConnection:didAddReceiver:streams:)]) {
- NSMutableArray *mediaStreams = [NSMutableArray arrayWithCapacity:streams.size()];
- for (const auto& nativeStream : streams) {
- RTCMediaStream *mediaStream = [[RTCMediaStream alloc] initWithFactory:peer_connection.factory
- nativeMediaStream:nativeStream];
- [mediaStreams addObject:mediaStream];
- }
- RTCRtpReceiver *rtpReceiver =
- [[RTCRtpReceiver alloc] initWithFactory:peer_connection.factory nativeRtpReceiver:receiver];
-
- [peer_connection.delegate peerConnection:peer_connection
- didAddReceiver:rtpReceiver
- streams:mediaStreams];
- }
-}
-
-void PeerConnectionDelegateAdapter::OnRemoveTrack(
- rtc::scoped_refptr<RtpReceiverInterface> receiver) {
- RTCPeerConnection *peer_connection = peer_connection_;
- if ([peer_connection.delegate respondsToSelector:@selector(peerConnection:didRemoveReceiver:)]) {
- RTCRtpReceiver *rtpReceiver =
- [[RTCRtpReceiver alloc] initWithFactory:peer_connection.factory nativeRtpReceiver:receiver];
- [peer_connection.delegate peerConnection:peer_connection didRemoveReceiver:rtpReceiver];
- }
-}
-
-} // namespace webrtc
-
-
-@implementation RTCPeerConnection {
- RTCPeerConnectionFactory *_factory;
- NSMutableArray<RTCMediaStream *> *_localStreams;
- std::unique_ptr<webrtc::PeerConnectionDelegateAdapter> _observer;
- rtc::scoped_refptr<webrtc::PeerConnectionInterface> _peerConnection;
- std::unique_ptr<webrtc::MediaConstraints> _nativeConstraints;
- BOOL _hasStartedRtcEventLog;
-}
-
-@synthesize delegate = _delegate;
-@synthesize factory = _factory;
-
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
- configuration:(RTCConfiguration *)configuration
- constraints:(RTCMediaConstraints *)constraints
- delegate:(id<RTCPeerConnectionDelegate>)delegate {
- NSParameterAssert(factory);
- std::unique_ptr<webrtc::PeerConnectionInterface::RTCConfiguration> config(
- [configuration createNativeConfiguration]);
- if (!config) {
- return nil;
- }
- if (self = [super init]) {
- _observer.reset(new webrtc::PeerConnectionDelegateAdapter(self));
- _nativeConstraints = constraints.nativeConstraints;
- CopyConstraintsIntoRtcConfiguration(_nativeConstraints.get(),
- config.get());
- _peerConnection =
- factory.nativeFactory->CreatePeerConnection(*config,
- nullptr,
- nullptr,
- _observer.get());
- if (!_peerConnection) {
- return nil;
- }
- _factory = factory;
- _localStreams = [[NSMutableArray alloc] init];
- _delegate = delegate;
- }
- return self;
-}
-
-- (NSArray<RTCMediaStream *> *)localStreams {
- return [_localStreams copy];
-}
-
-- (RTCSessionDescription *)localDescription {
- const webrtc::SessionDescriptionInterface *description =
- _peerConnection->local_description();
- return description ?
- [[RTCSessionDescription alloc] initWithNativeDescription:description]
- : nil;
-}
-
-- (RTCSessionDescription *)remoteDescription {
- const webrtc::SessionDescriptionInterface *description =
- _peerConnection->remote_description();
- return description ?
- [[RTCSessionDescription alloc] initWithNativeDescription:description]
- : nil;
-}
-
-- (RTCSignalingState)signalingState {
- return [[self class]
- signalingStateForNativeState:_peerConnection->signaling_state()];
-}
-
-- (RTCIceConnectionState)iceConnectionState {
- return [[self class] iceConnectionStateForNativeState:
- _peerConnection->ice_connection_state()];
-}
-
-- (RTCIceGatheringState)iceGatheringState {
- return [[self class] iceGatheringStateForNativeState:
- _peerConnection->ice_gathering_state()];
-}
-
-- (BOOL)setConfiguration:(RTCConfiguration *)configuration {
- std::unique_ptr<webrtc::PeerConnectionInterface::RTCConfiguration> config(
- [configuration createNativeConfiguration]);
- if (!config) {
- return NO;
- }
- CopyConstraintsIntoRtcConfiguration(_nativeConstraints.get(),
- config.get());
- return _peerConnection->SetConfiguration(*config);
-}
-
-- (RTCConfiguration *)configuration {
- webrtc::PeerConnectionInterface::RTCConfiguration config =
- _peerConnection->GetConfiguration();
- return [[RTCConfiguration alloc] initWithNativeConfiguration:config];
-}
-
-- (void)close {
- _peerConnection->Close();
-}
-
-- (void)addIceCandidate:(RTCIceCandidate *)candidate {
- std::unique_ptr<const webrtc::IceCandidateInterface> iceCandidate(
- candidate.nativeCandidate);
- _peerConnection->AddIceCandidate(iceCandidate.get());
-}
-
-- (void)removeIceCandidates:(NSArray<RTCIceCandidate *> *)iceCandidates {
- std::vector<cricket::Candidate> candidates;
- for (RTCIceCandidate *iceCandidate in iceCandidates) {
- std::unique_ptr<const webrtc::IceCandidateInterface> candidate(
- iceCandidate.nativeCandidate);
- if (candidate) {
- candidates.push_back(candidate->candidate());
- // Need to fill the transport name from the sdp_mid.
- candidates.back().set_transport_name(candidate->sdp_mid());
- }
- }
- if (!candidates.empty()) {
- _peerConnection->RemoveIceCandidates(candidates);
- }
-}
-
-- (void)addStream:(RTCMediaStream *)stream {
- if (!_peerConnection->AddStream(stream.nativeMediaStream)) {
- RTCLogError(@"Failed to add stream: %@", stream);
- return;
- }
- [_localStreams addObject:stream];
-}
-
-- (void)removeStream:(RTCMediaStream *)stream {
- _peerConnection->RemoveStream(stream.nativeMediaStream);
- [_localStreams removeObject:stream];
-}
-
-- (RTCRtpSender *)addTrack:(RTCMediaStreamTrack *)track streamIds:(NSArray<NSString *> *)streamIds {
- std::vector<std::string> nativeStreamIds;
- for (NSString *streamId in streamIds) {
- nativeStreamIds.push_back([streamId UTF8String]);
- }
- webrtc::RTCErrorOr<rtc::scoped_refptr<webrtc::RtpSenderInterface>> nativeSenderOrError =
- _peerConnection->AddTrack(track.nativeTrack, nativeStreamIds);
- if (!nativeSenderOrError.ok()) {
- RTCLogError(@"Failed to add track %@: %s", track, nativeSenderOrError.error().message());
- return nil;
- }
- return [[RTCRtpSender alloc] initWithFactory:self.factory
- nativeRtpSender:nativeSenderOrError.MoveValue()];
-}
-
-- (BOOL)removeTrack:(RTCRtpSender *)sender {
- bool result = _peerConnection->RemoveTrack(sender.nativeRtpSender);
- if (!result) {
- RTCLogError(@"Failed to remote track %@", sender);
- }
- return result;
-}
-
-- (RTCRtpTransceiver *)addTransceiverWithTrack:(RTCMediaStreamTrack *)track {
- return [self addTransceiverWithTrack:track init:[[RTCRtpTransceiverInit alloc] init]];
-}
-
-- (RTCRtpTransceiver *)addTransceiverWithTrack:(RTCMediaStreamTrack *)track
- init:(RTCRtpTransceiverInit *)init {
- webrtc::RTCErrorOr<rtc::scoped_refptr<webrtc::RtpTransceiverInterface>> nativeTransceiverOrError =
- _peerConnection->AddTransceiver(track.nativeTrack, init.nativeInit);
- if (!nativeTransceiverOrError.ok()) {
- RTCLogError(
- @"Failed to add transceiver %@: %s", track, nativeTransceiverOrError.error().message());
- return nil;
- }
- return [[RTCRtpTransceiver alloc] initWithFactory:self.factory
- nativeRtpTransceiver:nativeTransceiverOrError.MoveValue()];
-}
-
-- (RTCRtpTransceiver *)addTransceiverOfType:(RTCRtpMediaType)mediaType {
- return [self addTransceiverOfType:mediaType init:[[RTCRtpTransceiverInit alloc] init]];
-}
-
-- (RTCRtpTransceiver *)addTransceiverOfType:(RTCRtpMediaType)mediaType
- init:(RTCRtpTransceiverInit *)init {
- webrtc::RTCErrorOr<rtc::scoped_refptr<webrtc::RtpTransceiverInterface>> nativeTransceiverOrError =
- _peerConnection->AddTransceiver([RTCRtpReceiver nativeMediaTypeForMediaType:mediaType],
- init.nativeInit);
- if (!nativeTransceiverOrError.ok()) {
- RTCLogError(@"Failed to add transceiver %@: %s",
- [RTCRtpReceiver stringForMediaType:mediaType],
- nativeTransceiverOrError.error().message());
- return nil;
- }
- return [[RTCRtpTransceiver alloc] initWithFactory:self.factory
- nativeRtpTransceiver:nativeTransceiverOrError.MoveValue()];
-}
-
-- (void)offerForConstraints:(RTCMediaConstraints *)constraints
- completionHandler:
- (void (^)(RTCSessionDescription *sessionDescription,
- NSError *error))completionHandler {
- rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserverAdapter>
- observer(new rtc::RefCountedObject
- <webrtc::CreateSessionDescriptionObserverAdapter>(completionHandler));
- webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options;
- CopyConstraintsIntoOfferAnswerOptions(constraints.nativeConstraints.get(), &options);
-
- _peerConnection->CreateOffer(observer, options);
-}
-
-- (void)answerForConstraints:(RTCMediaConstraints *)constraints
- completionHandler:
- (void (^)(RTCSessionDescription *sessionDescription,
- NSError *error))completionHandler {
- rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserverAdapter>
- observer(new rtc::RefCountedObject
- <webrtc::CreateSessionDescriptionObserverAdapter>(completionHandler));
- webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options;
- CopyConstraintsIntoOfferAnswerOptions(constraints.nativeConstraints.get(), &options);
-
- _peerConnection->CreateAnswer(observer, options);
-}
-
-- (void)setLocalDescription:(RTCSessionDescription *)sdp
- completionHandler:(void (^)(NSError *error))completionHandler {
- rtc::scoped_refptr<webrtc::SetSessionDescriptionObserverAdapter> observer(
- new rtc::RefCountedObject<webrtc::SetSessionDescriptionObserverAdapter>(
- completionHandler));
- _peerConnection->SetLocalDescription(observer, sdp.nativeDescription);
-}
-
-- (void)setRemoteDescription:(RTCSessionDescription *)sdp
- completionHandler:(void (^)(NSError *error))completionHandler {
- rtc::scoped_refptr<webrtc::SetSessionDescriptionObserverAdapter> observer(
- new rtc::RefCountedObject<webrtc::SetSessionDescriptionObserverAdapter>(
- completionHandler));
- _peerConnection->SetRemoteDescription(observer, sdp.nativeDescription);
-}
-
-- (BOOL)setBweMinBitrateBps:(nullable NSNumber *)minBitrateBps
- currentBitrateBps:(nullable NSNumber *)currentBitrateBps
- maxBitrateBps:(nullable NSNumber *)maxBitrateBps {
- webrtc::PeerConnectionInterface::BitrateParameters params;
- if (minBitrateBps != nil) {
- params.min_bitrate_bps = absl::optional<int>(minBitrateBps.intValue);
- }
- if (currentBitrateBps != nil) {
- params.current_bitrate_bps = absl::optional<int>(currentBitrateBps.intValue);
- }
- if (maxBitrateBps != nil) {
- params.max_bitrate_bps = absl::optional<int>(maxBitrateBps.intValue);
- }
- return _peerConnection->SetBitrate(params).ok();
-}
-
-- (void)setBitrateAllocationStrategy:
- (std::unique_ptr<rtc::BitrateAllocationStrategy>)bitrateAllocationStrategy {
- _peerConnection->SetBitrateAllocationStrategy(std::move(bitrateAllocationStrategy));
-}
-
-- (BOOL)startRtcEventLogWithFilePath:(NSString *)filePath
- maxSizeInBytes:(int64_t)maxSizeInBytes {
- RTC_DCHECK(filePath.length);
- RTC_DCHECK_GT(maxSizeInBytes, 0);
- RTC_DCHECK(!_hasStartedRtcEventLog);
- if (_hasStartedRtcEventLog) {
- RTCLogError(@"Event logging already started.");
- return NO;
- }
- int fd = open(filePath.UTF8String, O_WRONLY | O_CREAT | O_TRUNC,
- S_IRUSR | S_IWUSR);
- if (fd < 0) {
- RTCLogError(@"Error opening file: %@. Error: %d", filePath, errno);
- return NO;
- }
- _hasStartedRtcEventLog =
- _peerConnection->StartRtcEventLog(fd, maxSizeInBytes);
- return _hasStartedRtcEventLog;
-}
-
-- (void)stopRtcEventLog {
- _peerConnection->StopRtcEventLog();
- _hasStartedRtcEventLog = NO;
-}
-
-- (RTCRtpSender *)senderWithKind:(NSString *)kind
- streamId:(NSString *)streamId {
- std::string nativeKind = [NSString stdStringForString:kind];
- std::string nativeStreamId = [NSString stdStringForString:streamId];
- rtc::scoped_refptr<webrtc::RtpSenderInterface> nativeSender(
- _peerConnection->CreateSender(nativeKind, nativeStreamId));
- return nativeSender ?
- [[RTCRtpSender alloc] initWithFactory:self.factory nativeRtpSender:nativeSender] :
- nil;
-}
-
-- (NSArray<RTCRtpSender *> *)senders {
- std::vector<rtc::scoped_refptr<webrtc::RtpSenderInterface>> nativeSenders(
- _peerConnection->GetSenders());
- NSMutableArray *senders = [[NSMutableArray alloc] init];
- for (const auto &nativeSender : nativeSenders) {
- RTCRtpSender *sender =
- [[RTCRtpSender alloc] initWithFactory:self.factory nativeRtpSender:nativeSender];
- [senders addObject:sender];
- }
- return senders;
-}
-
-- (NSArray<RTCRtpReceiver *> *)receivers {
- std::vector<rtc::scoped_refptr<webrtc::RtpReceiverInterface>> nativeReceivers(
- _peerConnection->GetReceivers());
- NSMutableArray *receivers = [[NSMutableArray alloc] init];
- for (const auto &nativeReceiver : nativeReceivers) {
- RTCRtpReceiver *receiver =
- [[RTCRtpReceiver alloc] initWithFactory:self.factory nativeRtpReceiver:nativeReceiver];
- [receivers addObject:receiver];
- }
- return receivers;
-}
-
-- (NSArray<RTCRtpTransceiver *> *)transceivers {
- std::vector<rtc::scoped_refptr<webrtc::RtpTransceiverInterface>> nativeTransceivers(
- _peerConnection->GetTransceivers());
- NSMutableArray *transceivers = [[NSMutableArray alloc] init];
- for (auto nativeTransceiver : nativeTransceivers) {
- RTCRtpTransceiver *transceiver = [[RTCRtpTransceiver alloc] initWithFactory:self.factory
- nativeRtpTransceiver:nativeTransceiver];
- [transceivers addObject:transceiver];
- }
- return transceivers;
-}
-
-#pragma mark - Private
-
-+ (webrtc::PeerConnectionInterface::SignalingState)nativeSignalingStateForState:
- (RTCSignalingState)state {
- switch (state) {
- case RTCSignalingStateStable:
- return webrtc::PeerConnectionInterface::kStable;
- case RTCSignalingStateHaveLocalOffer:
- return webrtc::PeerConnectionInterface::kHaveLocalOffer;
- case RTCSignalingStateHaveLocalPrAnswer:
- return webrtc::PeerConnectionInterface::kHaveLocalPrAnswer;
- case RTCSignalingStateHaveRemoteOffer:
- return webrtc::PeerConnectionInterface::kHaveRemoteOffer;
- case RTCSignalingStateHaveRemotePrAnswer:
- return webrtc::PeerConnectionInterface::kHaveRemotePrAnswer;
- case RTCSignalingStateClosed:
- return webrtc::PeerConnectionInterface::kClosed;
- }
-}
-
-+ (RTCSignalingState)signalingStateForNativeState:
- (webrtc::PeerConnectionInterface::SignalingState)nativeState {
- switch (nativeState) {
- case webrtc::PeerConnectionInterface::kStable:
- return RTCSignalingStateStable;
- case webrtc::PeerConnectionInterface::kHaveLocalOffer:
- return RTCSignalingStateHaveLocalOffer;
- case webrtc::PeerConnectionInterface::kHaveLocalPrAnswer:
- return RTCSignalingStateHaveLocalPrAnswer;
- case webrtc::PeerConnectionInterface::kHaveRemoteOffer:
- return RTCSignalingStateHaveRemoteOffer;
- case webrtc::PeerConnectionInterface::kHaveRemotePrAnswer:
- return RTCSignalingStateHaveRemotePrAnswer;
- case webrtc::PeerConnectionInterface::kClosed:
- return RTCSignalingStateClosed;
- }
-}
-
-+ (NSString *)stringForSignalingState:(RTCSignalingState)state {
- switch (state) {
- case RTCSignalingStateStable:
- return @"STABLE";
- case RTCSignalingStateHaveLocalOffer:
- return @"HAVE_LOCAL_OFFER";
- case RTCSignalingStateHaveLocalPrAnswer:
- return @"HAVE_LOCAL_PRANSWER";
- case RTCSignalingStateHaveRemoteOffer:
- return @"HAVE_REMOTE_OFFER";
- case RTCSignalingStateHaveRemotePrAnswer:
- return @"HAVE_REMOTE_PRANSWER";
- case RTCSignalingStateClosed:
- return @"CLOSED";
- }
-}
-
-+ (webrtc::PeerConnectionInterface::IceConnectionState)
- nativeIceConnectionStateForState:(RTCIceConnectionState)state {
- switch (state) {
- case RTCIceConnectionStateNew:
- return webrtc::PeerConnectionInterface::kIceConnectionNew;
- case RTCIceConnectionStateChecking:
- return webrtc::PeerConnectionInterface::kIceConnectionChecking;
- case RTCIceConnectionStateConnected:
- return webrtc::PeerConnectionInterface::kIceConnectionConnected;
- case RTCIceConnectionStateCompleted:
- return webrtc::PeerConnectionInterface::kIceConnectionCompleted;
- case RTCIceConnectionStateFailed:
- return webrtc::PeerConnectionInterface::kIceConnectionFailed;
- case RTCIceConnectionStateDisconnected:
- return webrtc::PeerConnectionInterface::kIceConnectionDisconnected;
- case RTCIceConnectionStateClosed:
- return webrtc::PeerConnectionInterface::kIceConnectionClosed;
- case RTCIceConnectionStateCount:
- return webrtc::PeerConnectionInterface::kIceConnectionMax;
- }
-}
-
-+ (RTCIceConnectionState)iceConnectionStateForNativeState:
- (webrtc::PeerConnectionInterface::IceConnectionState)nativeState {
- switch (nativeState) {
- case webrtc::PeerConnectionInterface::kIceConnectionNew:
- return RTCIceConnectionStateNew;
- case webrtc::PeerConnectionInterface::kIceConnectionChecking:
- return RTCIceConnectionStateChecking;
- case webrtc::PeerConnectionInterface::kIceConnectionConnected:
- return RTCIceConnectionStateConnected;
- case webrtc::PeerConnectionInterface::kIceConnectionCompleted:
- return RTCIceConnectionStateCompleted;
- case webrtc::PeerConnectionInterface::kIceConnectionFailed:
- return RTCIceConnectionStateFailed;
- case webrtc::PeerConnectionInterface::kIceConnectionDisconnected:
- return RTCIceConnectionStateDisconnected;
- case webrtc::PeerConnectionInterface::kIceConnectionClosed:
- return RTCIceConnectionStateClosed;
- case webrtc::PeerConnectionInterface::kIceConnectionMax:
- return RTCIceConnectionStateCount;
- }
-}
-
-+ (NSString *)stringForIceConnectionState:(RTCIceConnectionState)state {
- switch (state) {
- case RTCIceConnectionStateNew:
- return @"NEW";
- case RTCIceConnectionStateChecking:
- return @"CHECKING";
- case RTCIceConnectionStateConnected:
- return @"CONNECTED";
- case RTCIceConnectionStateCompleted:
- return @"COMPLETED";
- case RTCIceConnectionStateFailed:
- return @"FAILED";
- case RTCIceConnectionStateDisconnected:
- return @"DISCONNECTED";
- case RTCIceConnectionStateClosed:
- return @"CLOSED";
- case RTCIceConnectionStateCount:
- return @"COUNT";
- }
-}
-
-+ (webrtc::PeerConnectionInterface::IceGatheringState)
- nativeIceGatheringStateForState:(RTCIceGatheringState)state {
- switch (state) {
- case RTCIceGatheringStateNew:
- return webrtc::PeerConnectionInterface::kIceGatheringNew;
- case RTCIceGatheringStateGathering:
- return webrtc::PeerConnectionInterface::kIceGatheringGathering;
- case RTCIceGatheringStateComplete:
- return webrtc::PeerConnectionInterface::kIceGatheringComplete;
- }
-}
-
-+ (RTCIceGatheringState)iceGatheringStateForNativeState:
- (webrtc::PeerConnectionInterface::IceGatheringState)nativeState {
- switch (nativeState) {
- case webrtc::PeerConnectionInterface::kIceGatheringNew:
- return RTCIceGatheringStateNew;
- case webrtc::PeerConnectionInterface::kIceGatheringGathering:
- return RTCIceGatheringStateGathering;
- case webrtc::PeerConnectionInterface::kIceGatheringComplete:
- return RTCIceGatheringStateComplete;
- }
-}
-
-+ (NSString *)stringForIceGatheringState:(RTCIceGatheringState)state {
- switch (state) {
- case RTCIceGatheringStateNew:
- return @"NEW";
- case RTCIceGatheringStateGathering:
- return @"GATHERING";
- case RTCIceGatheringStateComplete:
- return @"COMPLETE";
- }
-}
-
-+ (webrtc::PeerConnectionInterface::StatsOutputLevel)
- nativeStatsOutputLevelForLevel:(RTCStatsOutputLevel)level {
- switch (level) {
- case RTCStatsOutputLevelStandard:
- return webrtc::PeerConnectionInterface::kStatsOutputLevelStandard;
- case RTCStatsOutputLevelDebug:
- return webrtc::PeerConnectionInterface::kStatsOutputLevelDebug;
- }
-}
-
-- (rtc::scoped_refptr<webrtc::PeerConnectionInterface>)nativePeerConnection {
- return _peerConnection;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactory+Native.h b/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactory+Native.h
index 3d460fe..222e06e 100644
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactory+Native.h
+++ b/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactory+Native.h
@@ -8,47 +8,4 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "WebRTC/RTCPeerConnectionFactory.h"
-
-#include "rtc_base/scoped_ref_ptr.h"
-
-namespace webrtc {
-
-class AudioDeviceModule;
-class AudioEncoderFactory;
-class AudioDecoderFactory;
-class VideoEncoderFactory;
-class VideoDecoderFactory;
-class AudioProcessing;
-
-} // namespace webrtc
-
-NS_ASSUME_NONNULL_BEGIN
-
-/**
- * This class extension exposes methods that work directly with injectable C++ components.
- */
-@interface RTCPeerConnectionFactory ()
-
-- (instancetype)initNative NS_DESIGNATED_INITIALIZER;
-
-/* Initializer used when WebRTC is compiled with no media support */
-- (instancetype)initWithNoMedia;
-
-/* Initialize object with injectable native audio/video encoder/decoder factories */
-- (instancetype)initWithNativeAudioEncoderFactory:
- (rtc::scoped_refptr<webrtc::AudioEncoderFactory>)audioEncoderFactory
- nativeAudioDecoderFactory:
- (rtc::scoped_refptr<webrtc::AudioDecoderFactory>)audioDecoderFactory
- nativeVideoEncoderFactory:
- (std::unique_ptr<webrtc::VideoEncoderFactory>)videoEncoderFactory
- nativeVideoDecoderFactory:
- (std::unique_ptr<webrtc::VideoDecoderFactory>)videoDecoderFactory
- audioDeviceModule:
- (nullable webrtc::AudioDeviceModule *)audioDeviceModule
- audioProcessingModule:
- (rtc::scoped_refptr<webrtc::AudioProcessing>)audioProcessingModule;
-
-@end
-
-NS_ASSUME_NONNULL_END
+#import "api/peerconnection/RTCPeerConnectionFactory+Native.h"
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactory+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactory+Private.h
deleted file mode 100644
index 50944ee..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactory+Private.h
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCPeerConnectionFactory.h"
-
-#include "api/peerconnectioninterface.h"
-#include "rtc_base/scoped_ref_ptr.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-@interface RTCPeerConnectionFactory ()
-
-/**
- * PeerConnectionFactoryInterface created and held by this
- * RTCPeerConnectionFactory object. This is needed to pass to the underlying
- * C++ APIs.
- */
-@property(nonatomic, readonly)
- rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface>
- nativeFactory;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactory.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactory.mm
deleted file mode 100644
index 4c801f0..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactory.mm
+++ /dev/null
@@ -1,256 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCPeerConnectionFactory+Native.h"
-#import "RTCPeerConnectionFactory+Private.h"
-#import "RTCPeerConnectionFactoryOptions+Private.h"
-
-#import "NSString+StdString.h"
-#import "RTCAudioSource+Private.h"
-#import "RTCAudioTrack+Private.h"
-#import "RTCMediaConstraints+Private.h"
-#import "RTCMediaStream+Private.h"
-#import "RTCPeerConnection+Private.h"
-#import "RTCVideoSource+Private.h"
-#import "RTCVideoTrack+Private.h"
-#import "WebRTC/RTCLogging.h"
-#import "WebRTC/RTCVideoCodecFactory.h"
-#ifndef HAVE_NO_MEDIA
-#import "WebRTC/RTCVideoCodecH264.h"
-// The no-media version PeerConnectionFactory doesn't depend on these files, but the gn check tool
-// is not smart enough to take the #ifdef into account.
-#include "api/audio_codecs/builtin_audio_decoder_factory.h" // nogncheck
-#include "api/audio_codecs/builtin_audio_encoder_factory.h" // nogncheck
-#include "media/engine/convert_legacy_video_factory.h" // nogncheck
-#include "modules/audio_device/include/audio_device.h" // nogncheck
-#include "modules/audio_processing/include/audio_processing.h" // nogncheck
-
-#include "sdk/objc/Framework/Native/api/video_decoder_factory.h"
-#include "sdk/objc/Framework/Native/api/video_encoder_factory.h"
-#include "sdk/objc/Framework/Native/src/objc_video_decoder_factory.h"
-#include "sdk/objc/Framework/Native/src/objc_video_encoder_factory.h"
-#endif
-
-#if defined(WEBRTC_IOS)
-#import "sdk/objc/Framework/Native/api/audio_device_module.h"
-#endif
-
-// Adding the nogncheck to disable the including header check.
-// The no-media version PeerConnectionFactory doesn't depend on media related
-// C++ target.
-// TODO(zhihuang): Remove nogncheck once MediaEngineInterface is moved to C++
-// API layer.
-#include "absl/memory/memory.h"
-#include "media/engine/webrtcmediaengine.h" // nogncheck
-
-@implementation RTCPeerConnectionFactory {
- std::unique_ptr<rtc::Thread> _networkThread;
- std::unique_ptr<rtc::Thread> _workerThread;
- std::unique_ptr<rtc::Thread> _signalingThread;
- BOOL _hasStartedAecDump;
-}
-
-@synthesize nativeFactory = _nativeFactory;
-
-- (rtc::scoped_refptr<webrtc::AudioDeviceModule>)audioDeviceModule {
-#if defined(WEBRTC_IOS)
- return webrtc::CreateAudioDeviceModule();
-#else
- return nullptr;
-#endif
-}
-
-- (instancetype)init {
-#ifdef HAVE_NO_MEDIA
- return [self initWithNoMedia];
-#else
- return [self initWithNativeAudioEncoderFactory:webrtc::CreateBuiltinAudioEncoderFactory()
- nativeAudioDecoderFactory:webrtc::CreateBuiltinAudioDecoderFactory()
- nativeVideoEncoderFactory:webrtc::ObjCToNativeVideoEncoderFactory(
- [[RTCVideoEncoderFactoryH264 alloc] init])
- nativeVideoDecoderFactory:webrtc::ObjCToNativeVideoDecoderFactory(
- [[RTCVideoDecoderFactoryH264 alloc] init])
- audioDeviceModule:[self audioDeviceModule]
- audioProcessingModule:nullptr];
-#endif
-}
-
-- (instancetype)initWithEncoderFactory:(nullable id<RTCVideoEncoderFactory>)encoderFactory
- decoderFactory:(nullable id<RTCVideoDecoderFactory>)decoderFactory {
-#ifdef HAVE_NO_MEDIA
- return [self initWithNoMedia];
-#else
- std::unique_ptr<webrtc::VideoEncoderFactory> native_encoder_factory;
- std::unique_ptr<webrtc::VideoDecoderFactory> native_decoder_factory;
- if (encoderFactory) {
- native_encoder_factory = webrtc::ObjCToNativeVideoEncoderFactory(encoderFactory);
- }
- if (decoderFactory) {
- native_decoder_factory = webrtc::ObjCToNativeVideoDecoderFactory(decoderFactory);
- }
- return [self initWithNativeAudioEncoderFactory:webrtc::CreateBuiltinAudioEncoderFactory()
- nativeAudioDecoderFactory:webrtc::CreateBuiltinAudioDecoderFactory()
- nativeVideoEncoderFactory:std::move(native_encoder_factory)
- nativeVideoDecoderFactory:std::move(native_decoder_factory)
- audioDeviceModule:[self audioDeviceModule]
- audioProcessingModule:nullptr];
-#endif
-}
-
-- (instancetype)initNative {
- if (self = [super init]) {
- _networkThread = rtc::Thread::CreateWithSocketServer();
- _networkThread->SetName("network_thread", _networkThread.get());
- BOOL result = _networkThread->Start();
- NSAssert(result, @"Failed to start network thread.");
-
- _workerThread = rtc::Thread::Create();
- _workerThread->SetName("worker_thread", _workerThread.get());
- result = _workerThread->Start();
- NSAssert(result, @"Failed to start worker thread.");
-
- _signalingThread = rtc::Thread::Create();
- _signalingThread->SetName("signaling_thread", _signalingThread.get());
- result = _signalingThread->Start();
- NSAssert(result, @"Failed to start signaling thread.");
- }
- return self;
-}
-
-- (instancetype)initWithNoMedia {
- if (self = [self initNative]) {
- _nativeFactory = webrtc::CreateModularPeerConnectionFactory(
- _networkThread.get(),
- _workerThread.get(),
- _signalingThread.get(),
- std::unique_ptr<cricket::MediaEngineInterface>(),
- std::unique_ptr<webrtc::CallFactoryInterface>(),
- std::unique_ptr<webrtc::RtcEventLogFactoryInterface>());
- NSAssert(_nativeFactory, @"Failed to initialize PeerConnectionFactory!");
- }
- return self;
-}
-
-- (instancetype)initWithNativeAudioEncoderFactory:
- (rtc::scoped_refptr<webrtc::AudioEncoderFactory>)audioEncoderFactory
- nativeAudioDecoderFactory:
- (rtc::scoped_refptr<webrtc::AudioDecoderFactory>)audioDecoderFactory
- nativeVideoEncoderFactory:
- (std::unique_ptr<webrtc::VideoEncoderFactory>)videoEncoderFactory
- nativeVideoDecoderFactory:
- (std::unique_ptr<webrtc::VideoDecoderFactory>)videoDecoderFactory
- audioDeviceModule:
- (nullable webrtc::AudioDeviceModule *)audioDeviceModule
- audioProcessingModule:
- (rtc::scoped_refptr<webrtc::AudioProcessing>)audioProcessingModule {
-#ifdef HAVE_NO_MEDIA
- return [self initWithNoMedia];
-#else
- if (self = [self initNative]) {
- _nativeFactory = webrtc::CreatePeerConnectionFactory(_networkThread.get(),
- _workerThread.get(),
- _signalingThread.get(),
- audioDeviceModule,
- audioEncoderFactory,
- audioDecoderFactory,
- std::move(videoEncoderFactory),
- std::move(videoDecoderFactory),
- nullptr, // audio mixer
- audioProcessingModule);
- NSAssert(_nativeFactory, @"Failed to initialize PeerConnectionFactory!");
- }
- return self;
-#endif
-}
-
-- (RTCAudioSource *)audioSourceWithConstraints:(nullable RTCMediaConstraints *)constraints {
- std::unique_ptr<webrtc::MediaConstraints> nativeConstraints;
- if (constraints) {
- nativeConstraints = constraints.nativeConstraints;
- }
- cricket::AudioOptions options;
- CopyConstraintsIntoAudioOptions(nativeConstraints.get(), &options);
-
- rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
- _nativeFactory->CreateAudioSource(options);
- return [[RTCAudioSource alloc] initWithFactory:self nativeAudioSource:source];
-}
-
-- (RTCAudioTrack *)audioTrackWithTrackId:(NSString *)trackId {
- RTCAudioSource *audioSource = [self audioSourceWithConstraints:nil];
- return [self audioTrackWithSource:audioSource trackId:trackId];
-}
-
-- (RTCAudioTrack *)audioTrackWithSource:(RTCAudioSource *)source
- trackId:(NSString *)trackId {
- return [[RTCAudioTrack alloc] initWithFactory:self
- source:source
- trackId:trackId];
-}
-
-- (RTCVideoSource *)videoSource {
- return [[RTCVideoSource alloc] initWithFactory:self
- signalingThread:_signalingThread.get()
- workerThread:_workerThread.get()];
-}
-
-- (RTCVideoTrack *)videoTrackWithSource:(RTCVideoSource *)source
- trackId:(NSString *)trackId {
- return [[RTCVideoTrack alloc] initWithFactory:self
- source:source
- trackId:trackId];
-}
-
-- (RTCMediaStream *)mediaStreamWithStreamId:(NSString *)streamId {
- return [[RTCMediaStream alloc] initWithFactory:self
- streamId:streamId];
-}
-
-- (RTCPeerConnection *)peerConnectionWithConfiguration:
- (RTCConfiguration *)configuration
- constraints:
- (RTCMediaConstraints *)constraints
- delegate:
- (nullable id<RTCPeerConnectionDelegate>)delegate {
- return [[RTCPeerConnection alloc] initWithFactory:self
- configuration:configuration
- constraints:constraints
- delegate:delegate];
-}
-
-- (void)setOptions:(nonnull RTCPeerConnectionFactoryOptions *)options {
- RTC_DCHECK(options != nil);
- _nativeFactory->SetOptions(options.nativeOptions);
-}
-
-- (BOOL)startAecDumpWithFilePath:(NSString *)filePath
- maxSizeInBytes:(int64_t)maxSizeInBytes {
- RTC_DCHECK(filePath.length);
- RTC_DCHECK_GT(maxSizeInBytes, 0);
-
- if (_hasStartedAecDump) {
- RTCLogError(@"Aec dump already started.");
- return NO;
- }
- int fd = open(filePath.UTF8String, O_WRONLY | O_CREAT | O_TRUNC, S_IRUSR | S_IWUSR);
- if (fd < 0) {
- RTCLogError(@"Error opening file: %@. Error: %d", filePath, errno);
- return NO;
- }
- _hasStartedAecDump = _nativeFactory->StartAecDump(fd, maxSizeInBytes);
- return _hasStartedAecDump;
-}
-
-- (void)stopAecDump {
- _nativeFactory->StopAecDump();
- _hasStartedAecDump = NO;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h b/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h
deleted file mode 100644
index 070a0e7..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h
+++ /dev/null
@@ -1,21 +0,0 @@
-/*
- * Copyright 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCPeerConnectionFactoryBuilder.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-@interface RTCPeerConnectionFactoryBuilder (DefaultComponents)
-
-+ (RTCPeerConnectionFactoryBuilder *)defaultBuilder;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm
deleted file mode 100644
index 9fb50de..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCPeerConnectionFactory+Native.h"
-#import "RTCPeerConnectionFactoryBuilder+DefaultComponents.h"
-
-#import "WebRTC/RTCVideoCodecH264.h"
-#include "api/audio_codecs/builtin_audio_decoder_factory.h"
-#include "api/audio_codecs/builtin_audio_encoder_factory.h"
-#include "sdk/objc/Framework/Native/api/video_decoder_factory.h"
-#include "sdk/objc/Framework/Native/api/video_encoder_factory.h"
-
-#if defined(WEBRTC_IOS)
-#import "sdk/objc/Framework/Native/api/audio_device_module.h"
-#endif
-
-@implementation RTCPeerConnectionFactoryBuilder (DefaultComponents)
-
-+ (RTCPeerConnectionFactoryBuilder *)defaultBuilder {
- RTCPeerConnectionFactoryBuilder *builder = [[RTCPeerConnectionFactoryBuilder alloc] init];
- auto audioEncoderFactory = webrtc::CreateBuiltinAudioEncoderFactory();
- [builder setAudioEncoderFactory:audioEncoderFactory];
-
- auto audioDecoderFactory = webrtc::CreateBuiltinAudioDecoderFactory();
- [builder setAudioDecoderFactory:audioDecoderFactory];
-
- auto videoEncoderFactory =
- webrtc::ObjCToNativeVideoEncoderFactory([[RTCVideoEncoderFactoryH264 alloc] init]);
- [builder setVideoEncoderFactory:std::move(videoEncoderFactory)];
-
- auto videoDecoderFactory =
- webrtc::ObjCToNativeVideoDecoderFactory([[RTCVideoDecoderFactoryH264 alloc] init]);
- [builder setVideoDecoderFactory:std::move(videoDecoderFactory)];
-
-#if defined(WEBRTC_IOS)
- [builder setAudioDeviceModule:webrtc::CreateAudioDeviceModule()];
-#endif
- return builder;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactoryBuilder.h b/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactoryBuilder.h
deleted file mode 100644
index 27bdcd9..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactoryBuilder.h
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCPeerConnectionFactory.h"
-
-#include "rtc_base/scoped_ref_ptr.h"
-
-namespace webrtc {
-
-class AudioDeviceModule;
-class AudioEncoderFactory;
-class AudioDecoderFactory;
-class VideoEncoderFactory;
-class VideoDecoderFactory;
-class AudioProcessing;
-
-} // namespace webrtc
-
-NS_ASSUME_NONNULL_BEGIN
-
-@interface RTCPeerConnectionFactoryBuilder : NSObject
-
-+ (RTCPeerConnectionFactoryBuilder *)builder;
-
-- (RTCPeerConnectionFactory *)createPeerConnectionFactory;
-
-- (void)setVideoEncoderFactory:(std::unique_ptr<webrtc::VideoEncoderFactory>)videoEncoderFactory;
-
-- (void)setVideoDecoderFactory:(std::unique_ptr<webrtc::VideoDecoderFactory>)videoDecoderFactory;
-
-- (void)setAudioEncoderFactory:(rtc::scoped_refptr<webrtc::AudioEncoderFactory>)audioEncoderFactory;
-
-- (void)setAudioDecoderFactory:(rtc::scoped_refptr<webrtc::AudioDecoderFactory>)audioDecoderFactory;
-
-- (void)setAudioDeviceModule:(rtc::scoped_refptr<webrtc::AudioDeviceModule>)audioDeviceModule;
-
-- (void)setAudioProcessingModule:(rtc::scoped_refptr<webrtc::AudioProcessing>)audioProcessingModule;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactoryBuilder.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactoryBuilder.mm
deleted file mode 100644
index a26a639..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactoryBuilder.mm
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCPeerConnectionFactoryBuilder.h"
-#import "RTCPeerConnectionFactory+Native.h"
-
-#include "api/audio_codecs/audio_decoder_factory.h"
-#include "api/audio_codecs/audio_encoder_factory.h"
-#include "api/video_codecs/video_decoder_factory.h"
-#include "api/video_codecs/video_encoder_factory.h"
-#include "modules/audio_device/include/audio_device.h"
-#include "modules/audio_processing/include/audio_processing.h"
-
-@implementation RTCPeerConnectionFactoryBuilder {
- std::unique_ptr<webrtc::VideoEncoderFactory> _videoEncoderFactory;
- std::unique_ptr<webrtc::VideoDecoderFactory> _videoDecoderFactory;
- rtc::scoped_refptr<webrtc::AudioEncoderFactory> _audioEncoderFactory;
- rtc::scoped_refptr<webrtc::AudioDecoderFactory> _audioDecoderFactory;
- rtc::scoped_refptr<webrtc::AudioDeviceModule> _audioDeviceModule;
- rtc::scoped_refptr<webrtc::AudioProcessing> _audioProcessingModule;
-}
-
-+ (RTCPeerConnectionFactoryBuilder *)builder {
- return [[RTCPeerConnectionFactoryBuilder alloc] init];
-}
-
-- (RTCPeerConnectionFactory *)createPeerConnectionFactory {
- RTCPeerConnectionFactory *factory = [RTCPeerConnectionFactory alloc];
- return [factory initWithNativeAudioEncoderFactory:_audioEncoderFactory
- nativeAudioDecoderFactory:_audioDecoderFactory
- nativeVideoEncoderFactory:std::move(_videoEncoderFactory)
- nativeVideoDecoderFactory:std::move(_videoDecoderFactory)
- audioDeviceModule:_audioDeviceModule
- audioProcessingModule:_audioProcessingModule];
-}
-
-- (void)setVideoEncoderFactory:(std::unique_ptr<webrtc::VideoEncoderFactory>)videoEncoderFactory {
- _videoEncoderFactory = std::move(videoEncoderFactory);
-}
-
-- (void)setVideoDecoderFactory:(std::unique_ptr<webrtc::VideoDecoderFactory>)videoDecoderFactory {
- _videoDecoderFactory = std::move(videoDecoderFactory);
-}
-
-- (void)setAudioEncoderFactory:
- (rtc::scoped_refptr<webrtc::AudioEncoderFactory>)audioEncoderFactory {
- _audioEncoderFactory = audioEncoderFactory;
-}
-
-- (void)setAudioDecoderFactory:
- (rtc::scoped_refptr<webrtc::AudioDecoderFactory>)audioDecoderFactory {
- _audioDecoderFactory = audioDecoderFactory;
-}
-
-- (void)setAudioDeviceModule:(rtc::scoped_refptr<webrtc::AudioDeviceModule>)audioDeviceModule {
- _audioDeviceModule = audioDeviceModule;
-}
-
-- (void)setAudioProcessingModule:
- (rtc::scoped_refptr<webrtc::AudioProcessing>)audioProcessingModule {
- _audioProcessingModule = audioProcessingModule;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactoryOptions+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactoryOptions+Private.h
deleted file mode 100644
index 131e8ff..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactoryOptions+Private.h
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCPeerConnectionFactoryOptions.h"
-
-#include "api/peerconnectioninterface.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-@interface RTCPeerConnectionFactoryOptions ()
-
-/** Returns the equivalent native PeerConnectionFactoryInterface::Options
- * structure. */
-@property(nonatomic, readonly)
- webrtc::PeerConnectionFactoryInterface::Options nativeOptions;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactoryOptions.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactoryOptions.mm
deleted file mode 100644
index 103a130..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactoryOptions.mm
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCPeerConnectionFactoryOptions+Private.h"
-
-#include "rtc_base/network_constants.h"
-
-namespace {
-
-void setNetworkBit(webrtc::PeerConnectionFactoryInterface::Options* options,
- rtc::AdapterType type,
- bool ignore) {
- if (ignore) {
- options->network_ignore_mask |= type;
- } else {
- options->network_ignore_mask &= ~type;
- }
-}
-} // namespace
-
-@implementation RTCPeerConnectionFactoryOptions
-
-@synthesize disableEncryption = _disableEncryption;
-@synthesize disableNetworkMonitor = _disableNetworkMonitor;
-@synthesize ignoreLoopbackNetworkAdapter = _ignoreLoopbackNetworkAdapter;
-@synthesize ignoreVPNNetworkAdapter = _ignoreVPNNetworkAdapter;
-@synthesize ignoreCellularNetworkAdapter = _ignoreCellularNetworkAdapter;
-@synthesize ignoreWiFiNetworkAdapter = _ignoreWiFiNetworkAdapter;
-@synthesize ignoreEthernetNetworkAdapter = _ignoreEthernetNetworkAdapter;
-@synthesize enableAes128Sha1_32CryptoCipher = _enableAes128Sha1_32CryptoCipher;
-@synthesize enableGcmCryptoSuites = _enableGcmCryptoSuites;
-
-- (instancetype)init {
- return [super init];
-}
-
-- (webrtc::PeerConnectionFactoryInterface::Options)nativeOptions {
- webrtc::PeerConnectionFactoryInterface::Options options;
- options.disable_encryption = self.disableEncryption;
- options.disable_network_monitor = self.disableNetworkMonitor;
-
- setNetworkBit(&options, rtc::ADAPTER_TYPE_LOOPBACK, self.ignoreLoopbackNetworkAdapter);
- setNetworkBit(&options, rtc::ADAPTER_TYPE_VPN, self.ignoreVPNNetworkAdapter);
- setNetworkBit(&options, rtc::ADAPTER_TYPE_CELLULAR, self.ignoreCellularNetworkAdapter);
- setNetworkBit(&options, rtc::ADAPTER_TYPE_WIFI, self.ignoreWiFiNetworkAdapter);
- setNetworkBit(&options, rtc::ADAPTER_TYPE_ETHERNET, self.ignoreEthernetNetworkAdapter);
-
- options.crypto_options.enable_aes128_sha1_32_crypto_cipher = self.enableAes128Sha1_32CryptoCipher;
- options.crypto_options.enable_gcm_crypto_suites = self.enableGcmCryptoSuites;
-
- return options;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCRtcpParameters+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCRtcpParameters+Private.h
deleted file mode 100644
index 4157ffe..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCRtcpParameters+Private.h
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Copyright 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCRtcpParameters.h"
-
-#include "api/rtpparameters.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-@interface RTCRtcpParameters ()
-
-/** Returns the equivalent native RtcpParameters structure. */
-@property(nonatomic, readonly) webrtc::RtcpParameters nativeParameters;
-
-/** Initialize the object with a native RtcpParameters structure. */
-- (instancetype)initWithNativeParameters:(const webrtc::RtcpParameters &)nativeParameters;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCRtcpParameters.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCRtcpParameters.mm
deleted file mode 100644
index 1c8a31b..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCRtcpParameters.mm
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCRtcpParameters+Private.h"
-
-#import "NSString+StdString.h"
-
-@implementation RTCRtcpParameters
-
-@synthesize cname = _cname;
-@synthesize isReducedSize = _isReducedSize;
-
-- (instancetype)init {
- return [super init];
-}
-
-- (instancetype)initWithNativeParameters:(const webrtc::RtcpParameters &)nativeParameters {
- if (self = [self init]) {
- _cname = [NSString stringForStdString:nativeParameters.cname];
- _isReducedSize = nativeParameters.reduced_size;
- }
- return self;
-}
-
-- (webrtc::RtcpParameters)nativeParameters {
- webrtc::RtcpParameters parameters;
- parameters.cname = [NSString stdStringForString:_cname];
- parameters.reduced_size = _isReducedSize;
- return parameters;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpCodecParameters+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCRtpCodecParameters+Private.h
deleted file mode 100644
index c6ca462..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpCodecParameters+Private.h
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Copyright 2016 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCRtpCodecParameters.h"
-
-#include "api/rtpparameters.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-@interface RTCRtpCodecParameters ()
-
-/** Returns the equivalent native RtpCodecParameters structure. */
-@property(nonatomic, readonly) webrtc::RtpCodecParameters nativeParameters;
-
-/** Initialize the object with a native RtpCodecParameters structure. */
-- (instancetype)initWithNativeParameters:(const webrtc::RtpCodecParameters &)nativeParameters;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpCodecParameters.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCRtpCodecParameters.mm
deleted file mode 100644
index 7951cee..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpCodecParameters.mm
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Copyright 2016 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCRtpCodecParameters+Private.h"
-
-#import "NSString+StdString.h"
-#import "WebRTC/RTCMediaStreamTrack.h" // For "kind" strings.
-
-#include "media/base/mediaconstants.h"
-#include "rtc_base/checks.h"
-
-const NSString * const kRTCRtxCodecName = @(cricket::kRtxCodecName);
-const NSString * const kRTCRedCodecName = @(cricket::kRedCodecName);
-const NSString * const kRTCUlpfecCodecName = @(cricket::kUlpfecCodecName);
-const NSString * const kRTCFlexfecCodecName = @(cricket::kFlexfecCodecName);
-const NSString * const kRTCOpusCodecName = @(cricket::kOpusCodecName);
-const NSString * const kRTCIsacCodecName = @(cricket::kIsacCodecName);
-const NSString * const kRTCL16CodecName = @(cricket::kL16CodecName);
-const NSString * const kRTCG722CodecName = @(cricket::kG722CodecName);
-const NSString * const kRTCIlbcCodecName = @(cricket::kIlbcCodecName);
-const NSString * const kRTCPcmuCodecName = @(cricket::kPcmuCodecName);
-const NSString * const kRTCPcmaCodecName = @(cricket::kPcmaCodecName);
-const NSString * const kRTCDtmfCodecName = @(cricket::kDtmfCodecName);
-const NSString * const kRTCComfortNoiseCodecName =
- @(cricket::kComfortNoiseCodecName);
-const NSString * const kRTCVp8CodecName = @(cricket::kVp8CodecName);
-const NSString * const kRTCVp9CodecName = @(cricket::kVp9CodecName);
-const NSString * const kRTCH264CodecName = @(cricket::kH264CodecName);
-
-@implementation RTCRtpCodecParameters
-
-@synthesize payloadType = _payloadType;
-@synthesize name = _name;
-@synthesize kind = _kind;
-@synthesize clockRate = _clockRate;
-@synthesize numChannels = _numChannels;
-@synthesize parameters = _parameters;
-
-- (instancetype)init {
- return [super init];
-}
-
-- (instancetype)initWithNativeParameters:
- (const webrtc::RtpCodecParameters &)nativeParameters {
- if (self = [self init]) {
- _payloadType = nativeParameters.payload_type;
- _name = [NSString stringForStdString:nativeParameters.name];
- switch (nativeParameters.kind) {
- case cricket::MEDIA_TYPE_AUDIO:
- _kind = kRTCMediaStreamTrackKindAudio;
- break;
- case cricket::MEDIA_TYPE_VIDEO:
- _kind = kRTCMediaStreamTrackKindVideo;
- break;
- case cricket::MEDIA_TYPE_DATA:
- RTC_NOTREACHED();
- break;
- }
- if (nativeParameters.clock_rate) {
- _clockRate = [NSNumber numberWithInt:*nativeParameters.clock_rate];
- }
- if (nativeParameters.num_channels) {
- _numChannels = [NSNumber numberWithInt:*nativeParameters.num_channels];
- }
- NSMutableDictionary *parameters = [NSMutableDictionary dictionary];
- for (const auto ¶meter : nativeParameters.parameters) {
- [parameters setObject:[NSString stringForStdString:parameter.second]
- forKey:[NSString stringForStdString:parameter.first]];
- }
- _parameters = parameters;
- }
- return self;
-}
-
-- (webrtc::RtpCodecParameters)nativeParameters {
- webrtc::RtpCodecParameters parameters;
- parameters.payload_type = _payloadType;
- parameters.name = [NSString stdStringForString:_name];
- // NSString pointer comparison is safe here since "kind" is readonly and only
- // populated above.
- if (_kind == kRTCMediaStreamTrackKindAudio) {
- parameters.kind = cricket::MEDIA_TYPE_AUDIO;
- } else if (_kind == kRTCMediaStreamTrackKindVideo) {
- parameters.kind = cricket::MEDIA_TYPE_VIDEO;
- } else {
- RTC_NOTREACHED();
- }
- if (_clockRate != nil) {
- parameters.clock_rate = absl::optional<int>(_clockRate.intValue);
- }
- if (_numChannels != nil) {
- parameters.num_channels = absl::optional<int>(_numChannels.intValue);
- }
- for (NSString *paramKey in _parameters.allKeys) {
- std::string key = [NSString stdStringForString:paramKey];
- std::string value = [NSString stdStringForString:_parameters[paramKey]];
- parameters.parameters[key] = value;
- }
- return parameters;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpEncodingParameters+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCRtpEncodingParameters+Private.h
deleted file mode 100644
index c64501f..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpEncodingParameters+Private.h
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Copyright 2016 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCRtpEncodingParameters.h"
-
-#include "api/rtpparameters.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-@interface RTCRtpEncodingParameters ()
-
-/** Returns the equivalent native RtpEncodingParameters structure. */
-@property(nonatomic, readonly) webrtc::RtpEncodingParameters nativeParameters;
-
-/** Initialize the object with a native RtpEncodingParameters structure. */
-- (instancetype)initWithNativeParameters:(const webrtc::RtpEncodingParameters &)nativeParameters;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpEncodingParameters.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCRtpEncodingParameters.mm
deleted file mode 100644
index 299e318..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpEncodingParameters.mm
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright 2016 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCRtpEncodingParameters+Private.h"
-
-@implementation RTCRtpEncodingParameters
-
-@synthesize isActive = _isActive;
-@synthesize maxBitrateBps = _maxBitrateBps;
-@synthesize minBitrateBps = _minBitrateBps;
-@synthesize ssrc = _ssrc;
-
-- (instancetype)init {
- return [super init];
-}
-
-- (instancetype)initWithNativeParameters:
- (const webrtc::RtpEncodingParameters &)nativeParameters {
- if (self = [self init]) {
- _isActive = nativeParameters.active;
- if (nativeParameters.max_bitrate_bps) {
- _maxBitrateBps =
- [NSNumber numberWithInt:*nativeParameters.max_bitrate_bps];
- }
- if (nativeParameters.min_bitrate_bps) {
- _minBitrateBps =
- [NSNumber numberWithInt:*nativeParameters.min_bitrate_bps];
- }
- if (nativeParameters.ssrc) {
- _ssrc = [NSNumber numberWithUnsignedLong:*nativeParameters.ssrc];
- }
- }
- return self;
-}
-
-- (webrtc::RtpEncodingParameters)nativeParameters {
- webrtc::RtpEncodingParameters parameters;
- parameters.active = _isActive;
- if (_maxBitrateBps != nil) {
- parameters.max_bitrate_bps = absl::optional<int>(_maxBitrateBps.intValue);
- }
- if (_minBitrateBps != nil) {
- parameters.min_bitrate_bps = absl::optional<int>(_minBitrateBps.intValue);
- }
- if (_ssrc != nil) {
- parameters.ssrc = absl::optional<uint32_t>(_ssrc.unsignedLongValue);
- }
- return parameters;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpFragmentationHeader.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCRtpFragmentationHeader.mm
deleted file mode 100644
index 87c88a4..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpFragmentationHeader.mm
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCVideoCodec.h"
-
-#include "modules/include/module_common_types.h"
-
-@implementation RTCRtpFragmentationHeader
-
-@synthesize fragmentationOffset = _fragmentationOffset;
-@synthesize fragmentationLength = _fragmentationLength;
-@synthesize fragmentationTimeDiff = _fragmentationTimeDiff;
-@synthesize fragmentationPlType = _fragmentationPlType;
-
-- (instancetype)initWithNativeFragmentationHeader:
- (const webrtc::RTPFragmentationHeader *)fragmentationHeader {
- if (self = [super init]) {
- if (fragmentationHeader) {
- int count = fragmentationHeader->fragmentationVectorSize;
- NSMutableArray *offsets = [NSMutableArray array];
- NSMutableArray *lengths = [NSMutableArray array];
- NSMutableArray *timeDiffs = [NSMutableArray array];
- NSMutableArray *plTypes = [NSMutableArray array];
- for (int i = 0; i < count; ++i) {
- [offsets addObject:@(fragmentationHeader->fragmentationOffset[i])];
- [lengths addObject:@(fragmentationHeader->fragmentationLength[i])];
- [timeDiffs addObject:@(fragmentationHeader->fragmentationTimeDiff[i])];
- [plTypes addObject:@(fragmentationHeader->fragmentationPlType[i])];
- }
- _fragmentationOffset = [offsets copy];
- _fragmentationLength = [lengths copy];
- _fragmentationTimeDiff = [timeDiffs copy];
- _fragmentationPlType = [plTypes copy];
- }
- }
-
- return self;
-}
-
-- (std::unique_ptr<webrtc::RTPFragmentationHeader>)createNativeFragmentationHeader {
- auto fragmentationHeader =
- std::unique_ptr<webrtc::RTPFragmentationHeader>(new webrtc::RTPFragmentationHeader);
- fragmentationHeader->VerifyAndAllocateFragmentationHeader(_fragmentationOffset.count);
- for (NSUInteger i = 0; i < _fragmentationOffset.count; ++i) {
- fragmentationHeader->fragmentationOffset[i] = (size_t)_fragmentationOffset[i].unsignedIntValue;
- fragmentationHeader->fragmentationLength[i] = (size_t)_fragmentationLength[i].unsignedIntValue;
- fragmentationHeader->fragmentationTimeDiff[i] =
- (uint16_t)_fragmentationOffset[i].unsignedIntValue;
- fragmentationHeader->fragmentationPlType[i] = (uint8_t)_fragmentationOffset[i].unsignedIntValue;
- }
-
- return fragmentationHeader;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpHeaderExtension+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCRtpHeaderExtension+Private.h
deleted file mode 100644
index 0bc236b..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpHeaderExtension+Private.h
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Copyright 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCRtpHeaderExtension.h"
-
-#include "api/rtpparameters.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-@interface RTCRtpHeaderExtension ()
-
-/** Returns the equivalent native RtpExtension structure. */
-@property(nonatomic, readonly) webrtc::RtpExtension nativeParameters;
-
-/** Initialize the object with a native RtpExtension structure. */
-- (instancetype)initWithNativeParameters:(const webrtc::RtpExtension &)nativeParameters;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpHeaderExtension.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCRtpHeaderExtension.mm
deleted file mode 100644
index 3f448d7..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpHeaderExtension.mm
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Copyright 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCRtpHeaderExtension+Private.h"
-
-#import "NSString+StdString.h"
-
-@implementation RTCRtpHeaderExtension
-
-@synthesize uri = _uri;
-@synthesize id = _id;
-@synthesize encrypted = _encrypted;
-
-- (instancetype)init {
- return [super init];
-}
-
-- (instancetype)initWithNativeParameters:(const webrtc::RtpExtension &)nativeParameters {
- if (self = [self init]) {
- _uri = [NSString stringForStdString:nativeParameters.uri];
- _id = nativeParameters.id;
- _encrypted = nativeParameters.encrypt;
- }
- return self;
-}
-
-- (webrtc::RtpExtension)nativeParameters {
- webrtc::RtpExtension extension;
- extension.uri = [NSString stdStringForString:_uri];
- extension.id = _id;
- extension.encrypt = _encrypted;
- return extension;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpParameters+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCRtpParameters+Private.h
deleted file mode 100644
index 41d1b7c..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpParameters+Private.h
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Copyright 2016 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCRtpParameters.h"
-
-#include "api/rtpparameters.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-@interface RTCRtpParameters ()
-
-/** Returns the equivalent native RtpParameters structure. */
-@property(nonatomic, readonly) webrtc::RtpParameters nativeParameters;
-
-/** Initialize the object with a native RtpParameters structure. */
-- (instancetype)initWithNativeParameters:(const webrtc::RtpParameters &)nativeParameters;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpParameters.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCRtpParameters.mm
deleted file mode 100644
index 97800ca..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpParameters.mm
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Copyright 2016 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCRtpParameters+Private.h"
-
-#import "NSString+StdString.h"
-#import "RTCRtcpParameters+Private.h"
-#import "RTCRtpCodecParameters+Private.h"
-#import "RTCRtpEncodingParameters+Private.h"
-#import "RTCRtpHeaderExtension+Private.h"
-
-@implementation RTCRtpParameters
-
-@synthesize transactionId = _transactionId;
-@synthesize rtcp = _rtcp;
-@synthesize headerExtensions = _headerExtensions;
-@synthesize encodings = _encodings;
-@synthesize codecs = _codecs;
-
-- (instancetype)init {
- return [super init];
-}
-
-- (instancetype)initWithNativeParameters:
- (const webrtc::RtpParameters &)nativeParameters {
- if (self = [self init]) {
- _transactionId = [NSString stringForStdString:nativeParameters.transaction_id];
- _rtcp = [[RTCRtcpParameters alloc] initWithNativeParameters:nativeParameters.rtcp];
-
- NSMutableArray *headerExtensions = [[NSMutableArray alloc] init];
- for (const auto &headerExtension : nativeParameters.header_extensions) {
- [headerExtensions
- addObject:[[RTCRtpHeaderExtension alloc] initWithNativeParameters:headerExtension]];
- }
- _headerExtensions = headerExtensions;
-
- NSMutableArray *encodings = [[NSMutableArray alloc] init];
- for (const auto &encoding : nativeParameters.encodings) {
- [encodings addObject:[[RTCRtpEncodingParameters alloc]
- initWithNativeParameters:encoding]];
- }
- _encodings = encodings;
-
- NSMutableArray *codecs = [[NSMutableArray alloc] init];
- for (const auto &codec : nativeParameters.codecs) {
- [codecs addObject:[[RTCRtpCodecParameters alloc]
- initWithNativeParameters:codec]];
- }
- _codecs = codecs;
- }
- return self;
-}
-
-- (webrtc::RtpParameters)nativeParameters {
- webrtc::RtpParameters parameters;
- parameters.transaction_id = [NSString stdStringForString:_transactionId];
- parameters.rtcp = [_rtcp nativeParameters];
- for (RTCRtpHeaderExtension *headerExtension in _headerExtensions) {
- parameters.header_extensions.push_back(headerExtension.nativeParameters);
- }
- for (RTCRtpEncodingParameters *encoding in _encodings) {
- parameters.encodings.push_back(encoding.nativeParameters);
- }
- for (RTCRtpCodecParameters *codec in _codecs) {
- parameters.codecs.push_back(codec.nativeParameters);
- }
- return parameters;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpReceiver+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCRtpReceiver+Private.h
deleted file mode 100644
index 6ec49be..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpReceiver+Private.h
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright 2016 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCRtpReceiver.h"
-
-#include "api/rtpreceiverinterface.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-@class RTCPeerConnectionFactory;
-
-namespace webrtc {
-
-class RtpReceiverDelegateAdapter : public RtpReceiverObserverInterface {
- public:
- RtpReceiverDelegateAdapter(RTCRtpReceiver* receiver);
-
- void OnFirstPacketReceived(cricket::MediaType media_type) override;
-
- private:
- __weak RTCRtpReceiver* receiver_;
-};
-
-} // namespace webrtc
-
-@interface RTCRtpReceiver ()
-
-@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::RtpReceiverInterface> nativeRtpReceiver;
-
-/** Initialize an RTCRtpReceiver with a native RtpReceiverInterface. */
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory
- nativeRtpReceiver:(rtc::scoped_refptr<webrtc::RtpReceiverInterface>)nativeRtpReceiver
- NS_DESIGNATED_INITIALIZER;
-
-+ (RTCRtpMediaType)mediaTypeForNativeMediaType:(cricket::MediaType)nativeMediaType;
-
-+ (cricket::MediaType)nativeMediaTypeForMediaType:(RTCRtpMediaType)mediaType;
-
-+ (NSString*)stringForMediaType:(RTCRtpMediaType)mediaType;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpReceiver.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCRtpReceiver.mm
deleted file mode 100644
index 895c451..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpReceiver.mm
+++ /dev/null
@@ -1,154 +0,0 @@
-/*
- * Copyright 2016 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCRtpReceiver+Private.h"
-
-#import "NSString+StdString.h"
-#import "RTCMediaStreamTrack+Private.h"
-#import "RTCRtpParameters+Private.h"
-#import "WebRTC/RTCLogging.h"
-
-#include "api/mediastreaminterface.h"
-
-namespace webrtc {
-
-RtpReceiverDelegateAdapter::RtpReceiverDelegateAdapter(
- RTCRtpReceiver *receiver) {
- RTC_CHECK(receiver);
- receiver_ = receiver;
-}
-
-void RtpReceiverDelegateAdapter::OnFirstPacketReceived(
- cricket::MediaType media_type) {
- RTCRtpMediaType packet_media_type =
- [RTCRtpReceiver mediaTypeForNativeMediaType:media_type];
- RTCRtpReceiver *receiver = receiver_;
- [receiver.delegate rtpReceiver:receiver didReceiveFirstPacketForMediaType:packet_media_type];
-}
-
-} // namespace webrtc
-
-@implementation RTCRtpReceiver {
- RTCPeerConnectionFactory *_factory;
- rtc::scoped_refptr<webrtc::RtpReceiverInterface> _nativeRtpReceiver;
- std::unique_ptr<webrtc::RtpReceiverDelegateAdapter> _observer;
-}
-
-@synthesize delegate = _delegate;
-
-- (NSString *)receiverId {
- return [NSString stringForStdString:_nativeRtpReceiver->id()];
-}
-
-- (RTCRtpParameters *)parameters {
- return [[RTCRtpParameters alloc]
- initWithNativeParameters:_nativeRtpReceiver->GetParameters()];
-}
-
-- (void)setParameters:(RTCRtpParameters *)parameters {
- if (!_nativeRtpReceiver->SetParameters(parameters.nativeParameters)) {
- RTCLogError(@"RTCRtpReceiver(%p): Failed to set parameters: %@", self,
- parameters);
- }
-}
-
-- (nullable RTCMediaStreamTrack *)track {
- rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> nativeTrack(
- _nativeRtpReceiver->track());
- if (nativeTrack) {
- return [RTCMediaStreamTrack mediaTrackForNativeTrack:nativeTrack factory:_factory];
- }
- return nil;
-}
-
-- (NSString *)description {
- return [NSString stringWithFormat:@"RTCRtpReceiver {\n receiverId: %@\n}",
- self.receiverId];
-}
-
-- (void)dealloc {
- if (_nativeRtpReceiver) {
- _nativeRtpReceiver->SetObserver(nullptr);
- }
-}
-
-- (BOOL)isEqual:(id)object {
- if (self == object) {
- return YES;
- }
- if (object == nil) {
- return NO;
- }
- if (![object isMemberOfClass:[self class]]) {
- return NO;
- }
- RTCRtpReceiver *receiver = (RTCRtpReceiver *)object;
- return _nativeRtpReceiver == receiver.nativeRtpReceiver;
-}
-
-- (NSUInteger)hash {
- return (NSUInteger)_nativeRtpReceiver.get();
-}
-
-#pragma mark - Private
-
-- (rtc::scoped_refptr<webrtc::RtpReceiverInterface>)nativeRtpReceiver {
- return _nativeRtpReceiver;
-}
-
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
- nativeRtpReceiver:
- (rtc::scoped_refptr<webrtc::RtpReceiverInterface>)nativeRtpReceiver {
- if (self = [super init]) {
- _factory = factory;
- _nativeRtpReceiver = nativeRtpReceiver;
- RTCLogInfo(
- @"RTCRtpReceiver(%p): created receiver: %@", self, self.description);
- _observer.reset(new webrtc::RtpReceiverDelegateAdapter(self));
- _nativeRtpReceiver->SetObserver(_observer.get());
- }
- return self;
-}
-
-+ (RTCRtpMediaType)mediaTypeForNativeMediaType:
- (cricket::MediaType)nativeMediaType {
- switch (nativeMediaType) {
- case cricket::MEDIA_TYPE_AUDIO:
- return RTCRtpMediaTypeAudio;
- case cricket::MEDIA_TYPE_VIDEO:
- return RTCRtpMediaTypeVideo;
- case cricket::MEDIA_TYPE_DATA:
- return RTCRtpMediaTypeData;
- }
-}
-
-+ (cricket::MediaType)nativeMediaTypeForMediaType:(RTCRtpMediaType)mediaType {
- switch (mediaType) {
- case RTCRtpMediaTypeAudio:
- return cricket::MEDIA_TYPE_AUDIO;
- case RTCRtpMediaTypeVideo:
- return cricket::MEDIA_TYPE_VIDEO;
- case RTCRtpMediaTypeData:
- return cricket::MEDIA_TYPE_DATA;
- }
-}
-
-+ (NSString *)stringForMediaType:(RTCRtpMediaType)mediaType {
- switch (mediaType) {
- case RTCRtpMediaTypeAudio:
- return @"AUDIO";
- case RTCRtpMediaTypeVideo:
- return @"VIDEO";
- case RTCRtpMediaTypeData:
- return @"DATA";
- }
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpSender+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCRtpSender+Private.h
deleted file mode 100644
index 5b671ae..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpSender+Private.h
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright 2016 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCRtpSender.h"
-
-#include "api/rtpsenderinterface.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-@class RTCPeerConnectionFactory;
-
-@interface RTCRtpSender ()
-
-@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::RtpSenderInterface> nativeRtpSender;
-
-/** Initialize an RTCRtpSender with a native RtpSenderInterface. */
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory
- nativeRtpSender:(rtc::scoped_refptr<webrtc::RtpSenderInterface>)nativeRtpSender
- NS_DESIGNATED_INITIALIZER;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpSender.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCRtpSender.mm
deleted file mode 100644
index 6a46edf..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpSender.mm
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
- * Copyright 2016 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCRtpSender+Private.h"
-
-#import "NSString+StdString.h"
-#import "RTCDtmfSender+Private.h"
-#import "RTCMediaStreamTrack+Private.h"
-#import "RTCRtpParameters+Private.h"
-#import "WebRTC/RTCLogging.h"
-
-#include "api/mediastreaminterface.h"
-
-@implementation RTCRtpSender {
- RTCPeerConnectionFactory *_factory;
- rtc::scoped_refptr<webrtc::RtpSenderInterface> _nativeRtpSender;
-}
-
-@synthesize dtmfSender = _dtmfSender;
-
-- (NSString *)senderId {
- return [NSString stringForStdString:_nativeRtpSender->id()];
-}
-
-- (RTCRtpParameters *)parameters {
- return [[RTCRtpParameters alloc]
- initWithNativeParameters:_nativeRtpSender->GetParameters()];
-}
-
-- (void)setParameters:(RTCRtpParameters *)parameters {
- if (!_nativeRtpSender->SetParameters(parameters.nativeParameters).ok()) {
- RTCLogError(@"RTCRtpSender(%p): Failed to set parameters: %@", self,
- parameters);
- }
-}
-
-- (RTCMediaStreamTrack *)track {
- rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> nativeTrack(
- _nativeRtpSender->track());
- if (nativeTrack) {
- return [RTCMediaStreamTrack mediaTrackForNativeTrack:nativeTrack factory:_factory];
- }
- return nil;
-}
-
-- (void)setTrack:(RTCMediaStreamTrack *)track {
- if (!_nativeRtpSender->SetTrack(track.nativeTrack)) {
- RTCLogError(@"RTCRtpSender(%p): Failed to set track %@", self, track);
- }
-}
-
-- (NSString *)description {
- return [NSString stringWithFormat:@"RTCRtpSender {\n senderId: %@\n}",
- self.senderId];
-}
-
-- (BOOL)isEqual:(id)object {
- if (self == object) {
- return YES;
- }
- if (object == nil) {
- return NO;
- }
- if (![object isMemberOfClass:[self class]]) {
- return NO;
- }
- RTCRtpSender *sender = (RTCRtpSender *)object;
- return _nativeRtpSender == sender.nativeRtpSender;
-}
-
-- (NSUInteger)hash {
- return (NSUInteger)_nativeRtpSender.get();
-}
-
-#pragma mark - Private
-
-- (rtc::scoped_refptr<webrtc::RtpSenderInterface>)nativeRtpSender {
- return _nativeRtpSender;
-}
-
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
- nativeRtpSender:(rtc::scoped_refptr<webrtc::RtpSenderInterface>)nativeRtpSender {
- NSParameterAssert(factory);
- NSParameterAssert(nativeRtpSender);
- if (self = [super init]) {
- _factory = factory;
- _nativeRtpSender = nativeRtpSender;
- rtc::scoped_refptr<webrtc::DtmfSenderInterface> nativeDtmfSender(
- _nativeRtpSender->GetDtmfSender());
- if (nativeDtmfSender) {
- _dtmfSender = [[RTCDtmfSender alloc] initWithNativeDtmfSender:nativeDtmfSender];
- }
- RTCLogInfo(@"RTCRtpSender(%p): created sender: %@", self, self.description);
- }
- return self;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpTransceiver+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCRtpTransceiver+Private.h
deleted file mode 100644
index ee361ea..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpTransceiver+Private.h
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Copyright 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCRtpTransceiver.h"
-
-#include "api/rtptransceiverinterface.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-@class RTCPeerConnectionFactory;
-
-@interface RTCRtpTransceiverInit ()
-
-@property(nonatomic, readonly) webrtc::RtpTransceiverInit nativeInit;
-
-@end
-
-@interface RTCRtpTransceiver ()
-
-@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::RtpTransceiverInterface>
- nativeRtpTransceiver;
-
-/** Initialize an RTCRtpTransceiver with a native RtpTransceiverInterface. */
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory
- nativeRtpTransceiver:
- (rtc::scoped_refptr<webrtc::RtpTransceiverInterface>)nativeRtpTransceiver
- NS_DESIGNATED_INITIALIZER;
-
-+ (webrtc::RtpTransceiverDirection)nativeRtpTransceiverDirectionFromDirection:
- (RTCRtpTransceiverDirection)direction;
-
-+ (RTCRtpTransceiverDirection)rtpTransceiverDirectionFromNativeDirection:
- (webrtc::RtpTransceiverDirection)nativeDirection;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpTransceiver.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCRtpTransceiver.mm
deleted file mode 100644
index cf39d5b..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpTransceiver.mm
+++ /dev/null
@@ -1,169 +0,0 @@
-/*
- * Copyright 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCRtpTransceiver+Private.h"
-
-#import "NSString+StdString.h"
-#import "RTCRtpEncodingParameters+Private.h"
-#import "RTCRtpParameters+Private.h"
-#import "RTCRtpReceiver+Private.h"
-#import "RTCRtpSender+Private.h"
-#import "WebRTC/RTCLogging.h"
-
-@implementation RTCRtpTransceiverInit
-
-@synthesize direction = _direction;
-@synthesize streamIds = _streamIds;
-@synthesize sendEncodings = _sendEncodings;
-
-- (instancetype)init {
- if (self = [super init]) {
- _direction = RTCRtpTransceiverDirectionSendRecv;
- }
- return self;
-}
-
-- (webrtc::RtpTransceiverInit)nativeInit {
- webrtc::RtpTransceiverInit init;
- init.direction = [RTCRtpTransceiver nativeRtpTransceiverDirectionFromDirection:_direction];
- for (NSString *streamId in _streamIds) {
- init.stream_ids.push_back([streamId UTF8String]);
- }
- for (RTCRtpEncodingParameters *sendEncoding in _sendEncodings) {
- init.send_encodings.push_back(sendEncoding.nativeParameters);
- }
- return init;
-}
-
-@end
-
-@implementation RTCRtpTransceiver {
- RTCPeerConnectionFactory *_factory;
- rtc::scoped_refptr<webrtc::RtpTransceiverInterface> _nativeRtpTransceiver;
-}
-
-- (RTCRtpMediaType)mediaType {
- return [RTCRtpReceiver mediaTypeForNativeMediaType:_nativeRtpTransceiver->media_type()];
-}
-
-- (NSString *)mid {
- if (_nativeRtpTransceiver->mid()) {
- return [NSString stringForStdString:*_nativeRtpTransceiver->mid()];
- } else {
- return nil;
- }
-}
-
-@synthesize sender = _sender;
-@synthesize receiver = _receiver;
-
-- (BOOL)isStopped {
- return _nativeRtpTransceiver->stopped();
-}
-
-- (RTCRtpTransceiverDirection)direction {
- return [RTCRtpTransceiver
- rtpTransceiverDirectionFromNativeDirection:_nativeRtpTransceiver->direction()];
-}
-
-- (void)setDirection:(RTCRtpTransceiverDirection)direction {
- _nativeRtpTransceiver->SetDirection(
- [RTCRtpTransceiver nativeRtpTransceiverDirectionFromDirection:direction]);
-}
-
-- (BOOL)currentDirection:(RTCRtpTransceiverDirection *)currentDirectionOut {
- if (_nativeRtpTransceiver->current_direction()) {
- *currentDirectionOut = [RTCRtpTransceiver
- rtpTransceiverDirectionFromNativeDirection:*_nativeRtpTransceiver->current_direction()];
- return YES;
- } else {
- return NO;
- }
-}
-
-- (void)stop {
- _nativeRtpTransceiver->Stop();
-}
-
-- (NSString *)description {
- return [NSString
- stringWithFormat:@"RTCRtpTransceiver {\n sender: %@\n receiver: %@\n}", _sender, _receiver];
-}
-
-- (BOOL)isEqual:(id)object {
- if (self == object) {
- return YES;
- }
- if (object == nil) {
- return NO;
- }
- if (![object isMemberOfClass:[self class]]) {
- return NO;
- }
- RTCRtpTransceiver *transceiver = (RTCRtpTransceiver *)object;
- return _nativeRtpTransceiver == transceiver.nativeRtpTransceiver;
-}
-
-- (NSUInteger)hash {
- return (NSUInteger)_nativeRtpTransceiver.get();
-}
-
-#pragma mark - Private
-
-- (rtc::scoped_refptr<webrtc::RtpTransceiverInterface>)nativeRtpTransceiver {
- return _nativeRtpTransceiver;
-}
-
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
- nativeRtpTransceiver:
- (rtc::scoped_refptr<webrtc::RtpTransceiverInterface>)nativeRtpTransceiver {
- NSParameterAssert(factory);
- NSParameterAssert(nativeRtpTransceiver);
- if (self = [super init]) {
- _factory = factory;
- _nativeRtpTransceiver = nativeRtpTransceiver;
- _sender = [[RTCRtpSender alloc] initWithFactory:_factory
- nativeRtpSender:nativeRtpTransceiver->sender()];
- _receiver = [[RTCRtpReceiver alloc] initWithFactory:_factory
- nativeRtpReceiver:nativeRtpTransceiver->receiver()];
- RTCLogInfo(@"RTCRtpTransceiver(%p): created transceiver: %@", self, self.description);
- }
- return self;
-}
-
-+ (webrtc::RtpTransceiverDirection)nativeRtpTransceiverDirectionFromDirection:
- (RTCRtpTransceiverDirection)direction {
- switch (direction) {
- case RTCRtpTransceiverDirectionSendRecv:
- return webrtc::RtpTransceiverDirection::kSendRecv;
- case RTCRtpTransceiverDirectionSendOnly:
- return webrtc::RtpTransceiverDirection::kSendOnly;
- case RTCRtpTransceiverDirectionRecvOnly:
- return webrtc::RtpTransceiverDirection::kRecvOnly;
- case RTCRtpTransceiverDirectionInactive:
- return webrtc::RtpTransceiverDirection::kInactive;
- }
-}
-
-+ (RTCRtpTransceiverDirection)rtpTransceiverDirectionFromNativeDirection:
- (webrtc::RtpTransceiverDirection)nativeDirection {
- switch (nativeDirection) {
- case webrtc::RtpTransceiverDirection::kSendRecv:
- return RTCRtpTransceiverDirectionSendRecv;
- case webrtc::RtpTransceiverDirection::kSendOnly:
- return RTCRtpTransceiverDirectionSendOnly;
- case webrtc::RtpTransceiverDirection::kRecvOnly:
- return RTCRtpTransceiverDirectionRecvOnly;
- case webrtc::RtpTransceiverDirection::kInactive:
- return RTCRtpTransceiverDirectionInactive;
- }
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCSSLAdapter.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCSSLAdapter.mm
deleted file mode 100644
index dfe54b5..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCSSLAdapter.mm
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright 2016 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCSSLAdapter.h"
-
-#include "rtc_base/checks.h"
-#include "rtc_base/ssladapter.h"
-
-BOOL RTCInitializeSSL(void) {
- BOOL initialized = rtc::InitializeSSL();
- RTC_DCHECK(initialized);
- return initialized;
-}
-
-BOOL RTCCleanupSSL(void) {
- BOOL cleanedUp = rtc::CleanupSSL();
- RTC_DCHECK(cleanedUp);
- return cleanedUp;
-}
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCSessionDescription+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCSessionDescription+Private.h
deleted file mode 100644
index 08a6f55..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCSessionDescription+Private.h
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCSessionDescription.h"
-
-#include "api/jsep.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-@interface RTCSessionDescription ()
-
-/**
- * The native SessionDescriptionInterface representation of this
- * RTCSessionDescription object. This is needed to pass to the underlying C++
- * APIs.
- */
-@property(nonatomic, readonly, nullable) webrtc::SessionDescriptionInterface *nativeDescription;
-
-/**
- * Initialize an RTCSessionDescription from a native
- * SessionDescriptionInterface. No ownership is taken of the native session
- * description.
- */
-- (instancetype)initWithNativeDescription:
- (const webrtc::SessionDescriptionInterface *)nativeDescription;
-
-+ (std::string)stdStringForType:(RTCSdpType)type;
-
-+ (RTCSdpType)typeForStdString:(const std::string &)string;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCSessionDescription.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCSessionDescription.mm
deleted file mode 100644
index 5748b65..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCSessionDescription.mm
+++ /dev/null
@@ -1,102 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCSessionDescription+Private.h"
-
-#import "NSString+StdString.h"
-#import "WebRTC/RTCLogging.h"
-
-#include "rtc_base/checks.h"
-
-@implementation RTCSessionDescription
-
-@synthesize type = _type;
-@synthesize sdp = _sdp;
-
-+ (NSString *)stringForType:(RTCSdpType)type {
- std::string string = [[self class] stdStringForType:type];
- return [NSString stringForStdString:string];
-}
-
-+ (RTCSdpType)typeForString:(NSString *)string {
- std::string typeString = string.stdString;
- return [[self class] typeForStdString:typeString];
-}
-
-- (instancetype)initWithType:(RTCSdpType)type sdp:(NSString *)sdp {
- NSParameterAssert(sdp.length);
- if (self = [super init]) {
- _type = type;
- _sdp = [sdp copy];
- }
- return self;
-}
-
-- (NSString *)description {
- return [NSString stringWithFormat:@"RTCSessionDescription:\n%@\n%@",
- [[self class] stringForType:_type],
- _sdp];
-}
-
-#pragma mark - Private
-
-- (webrtc::SessionDescriptionInterface *)nativeDescription {
- webrtc::SdpParseError error;
-
- webrtc::SessionDescriptionInterface *description =
- webrtc::CreateSessionDescription([[self class] stdStringForType:_type],
- _sdp.stdString,
- &error);
-
- if (!description) {
- RTCLogError(@"Failed to create session description: %s\nline: %s",
- error.description.c_str(),
- error.line.c_str());
- }
-
- return description;
-}
-
-- (instancetype)initWithNativeDescription:
- (const webrtc::SessionDescriptionInterface *)nativeDescription {
- NSParameterAssert(nativeDescription);
- std::string sdp;
- nativeDescription->ToString(&sdp);
- RTCSdpType type = [[self class] typeForStdString:nativeDescription->type()];
-
- return [self initWithType:type
- sdp:[NSString stringForStdString:sdp]];
-}
-
-+ (std::string)stdStringForType:(RTCSdpType)type {
- switch (type) {
- case RTCSdpTypeOffer:
- return webrtc::SessionDescriptionInterface::kOffer;
- case RTCSdpTypePrAnswer:
- return webrtc::SessionDescriptionInterface::kPrAnswer;
- case RTCSdpTypeAnswer:
- return webrtc::SessionDescriptionInterface::kAnswer;
- }
-}
-
-+ (RTCSdpType)typeForStdString:(const std::string &)string {
- if (string == webrtc::SessionDescriptionInterface::kOffer) {
- return RTCSdpTypeOffer;
- } else if (string == webrtc::SessionDescriptionInterface::kPrAnswer) {
- return RTCSdpTypePrAnswer;
- } else if (string == webrtc::SessionDescriptionInterface::kAnswer) {
- return RTCSdpTypeAnswer;
- } else {
- RTC_NOTREACHED();
- return RTCSdpTypeOffer;
- }
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCTracing.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCTracing.mm
deleted file mode 100644
index 7bcd649..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCTracing.mm
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Copyright 2016 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCTracing.h"
-
-#include "rtc_base/event_tracer.h"
-
-void RTCSetupInternalTracer(void) {
- rtc::tracing::SetupInternalTracer();
-}
-
-BOOL RTCStartInternalCapture(NSString *filePath) {
- return rtc::tracing::StartInternalCapture(filePath.UTF8String);
-}
-
-void RTCStopInternalCapture(void) {
- rtc::tracing::StopInternalCapture();
-}
-
-void RTCShutdownInternalTracer(void) {
- rtc::tracing::ShutdownInternalTracer();
-}
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCapturer.m b/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCapturer.m
deleted file mode 100644
index 3639fa7..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCapturer.m
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCVideoCapturer.h"
-
-@implementation RTCVideoCapturer
-
-@synthesize delegate = _delegate;
-
-- (instancetype)initWithDelegate:(id<RTCVideoCapturerDelegate>)delegate {
- if (self = [super init]) {
- _delegate = delegate;
- }
- return self;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodec+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodec+Private.h
index 4a2cf52..3233e4e 100644
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodec+Private.h
+++ b/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodec+Private.h
@@ -8,51 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "WebRTC/RTCVideoCodec.h"
-
-#import "WebRTC/RTCVideoCodecH264.h"
-
-#include "api/video_codecs/sdp_video_format.h"
-#include "common_video/include/video_frame.h"
-#include "media/base/codec.h"
-#include "modules/video_coding/include/video_codec_interface.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-/* Interfaces for converting to/from internal C++ formats. */
-@interface RTCEncodedImage ()
-
-- (instancetype)initWithNativeEncodedImage:(webrtc::EncodedImage)encodedImage;
-- (webrtc::EncodedImage)nativeEncodedImage;
-
-@end
-
-@interface RTCVideoEncoderSettings ()
-
-- (instancetype)initWithNativeVideoCodec:(const webrtc::VideoCodec *__nullable)videoCodec;
-- (webrtc::VideoCodec)nativeVideoCodec;
-
-@end
-
-@interface RTCCodecSpecificInfoH264 ()
-
-- (webrtc::CodecSpecificInfo)nativeCodecSpecificInfo;
-
-@end
-
-@interface RTCRtpFragmentationHeader ()
-
-- (instancetype)initWithNativeFragmentationHeader:
- (const webrtc::RTPFragmentationHeader *__nullable)fragmentationHeader;
-- (std::unique_ptr<webrtc::RTPFragmentationHeader>)createNativeFragmentationHeader;
-
-@end
-
-@interface RTCVideoCodecInfo ()
-
-- (instancetype)initWithNativeSdpVideoFormat:(webrtc::SdpVideoFormat)format;
-- (webrtc::SdpVideoFormat)nativeSdpVideoFormat;
-
-@end
-
-NS_ASSUME_NONNULL_END
+#import "api/peerconnection/RTCEncodedImage+Private.h"
+#import "api/peerconnection/RTCRtpFragmentationHeader+Private.h"
+#import "api/peerconnection/RTCVideoCodecInfo+Private.h"
+#import "api/peerconnection/RTCVideoEncoderSettings+Private.h"
+#import "components/video_codec/RTCCodecSpecificInfoH264+Private.h"
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodec.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodec.mm
deleted file mode 100644
index d752126..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodec.mm
+++ /dev/null
@@ -1,167 +0,0 @@
-/*
- * Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCVideoCodec.h"
-
-#import "NSString+StdString.h"
-#import "RTCVideoCodec+Private.h"
-#if defined(WEBRTC_IOS)
-#import "UIDevice+H264Profile.h"
-#endif
-#import "WebRTC/RTCVideoCodecFactory.h"
-
-#include "media/base/mediaconstants.h"
-
-namespace {
-
-NSString *MaxSupportedProfileLevelConstrainedHigh();
-NSString *MaxSupportedProfileLevelConstrainedBaseline();
-
-} // namespace
-
-NSString *const kRTCVideoCodecVp8Name = @(cricket::kVp8CodecName);
-NSString *const kRTCVideoCodecVp9Name = @(cricket::kVp9CodecName);
-NSString *const kRTCVideoCodecH264Name = @(cricket::kH264CodecName);
-NSString *const kRTCLevel31ConstrainedHigh = @"640c1f";
-NSString *const kRTCLevel31ConstrainedBaseline = @"42e01f";
-NSString *const kRTCMaxSupportedH264ProfileLevelConstrainedHigh =
- MaxSupportedProfileLevelConstrainedHigh();
-NSString *const kRTCMaxSupportedH264ProfileLevelConstrainedBaseline =
- MaxSupportedProfileLevelConstrainedBaseline();
-
-namespace {
-
-#if defined(WEBRTC_IOS)
-
-using namespace webrtc::H264;
-
-NSString *MaxSupportedLevelForProfile(Profile profile) {
- const absl::optional<ProfileLevelId> profileLevelId = [UIDevice maxSupportedH264Profile];
- if (profileLevelId && profileLevelId->profile >= profile) {
- const absl::optional<std::string> profileString =
- ProfileLevelIdToString(ProfileLevelId(profile, profileLevelId->level));
- if (profileString) {
- return [NSString stringForStdString:*profileString];
- }
- }
- return nil;
-}
-#endif
-
-NSString *MaxSupportedProfileLevelConstrainedBaseline() {
-#if defined(WEBRTC_IOS)
- NSString *profile = MaxSupportedLevelForProfile(webrtc::H264::kProfileConstrainedBaseline);
- if (profile != nil) {
- return profile;
- }
-#endif
- return kRTCLevel31ConstrainedBaseline;
-}
-
-NSString *MaxSupportedProfileLevelConstrainedHigh() {
-#if defined(WEBRTC_IOS)
- NSString *profile = MaxSupportedLevelForProfile(webrtc::H264::kProfileConstrainedHigh);
- if (profile != nil) {
- return profile;
- }
-#endif
- return kRTCLevel31ConstrainedHigh;
-}
-
-} // namespace
-
-@implementation RTCVideoCodecInfo
-
-@synthesize name = _name;
-@synthesize parameters = _parameters;
-
-- (instancetype)initWithName:(NSString *)name {
- return [self initWithName:name parameters:nil];
-}
-
-- (instancetype)initWithName:(NSString *)name
- parameters:(nullable NSDictionary<NSString *, NSString *> *)parameters {
- if (self = [super init]) {
- _name = name;
- _parameters = (parameters ? parameters : @{});
- }
-
- return self;
-}
-
-- (instancetype)initWithNativeSdpVideoFormat:(webrtc::SdpVideoFormat)format {
- NSMutableDictionary *params = [NSMutableDictionary dictionary];
- for (auto it = format.parameters.begin(); it != format.parameters.end(); ++it) {
- [params setObject:[NSString stringForStdString:it->second]
- forKey:[NSString stringForStdString:it->first]];
- }
- return [self initWithName:[NSString stringForStdString:format.name] parameters:params];
-}
-
-- (BOOL)isEqualToCodecInfo:(RTCVideoCodecInfo *)info {
- if (!info ||
- ![self.name isEqualToString:info.name] ||
- ![self.parameters isEqualToDictionary:info.parameters]) {
- return NO;
- }
- return YES;
-}
-
-- (BOOL)isEqual:(id)object {
- if (self == object)
- return YES;
- if (![object isKindOfClass:[self class]])
- return NO;
- return [self isEqualToCodecInfo:object];
-}
-
-- (NSUInteger)hash {
- return [self.name hash] ^ [self.parameters hash];
-}
-
-- (webrtc::SdpVideoFormat)nativeSdpVideoFormat {
- std::map<std::string, std::string> parameters;
- for (NSString *paramKey in _parameters.allKeys) {
- std::string key = [NSString stdStringForString:paramKey];
- std::string value = [NSString stdStringForString:_parameters[paramKey]];
- parameters[key] = value;
- }
-
- return webrtc::SdpVideoFormat([NSString stdStringForString:_name], parameters);
-}
-
-#pragma mark - NSCoding
-
-- (instancetype)initWithCoder:(NSCoder *)decoder {
- return [self initWithName:[decoder decodeObjectForKey:@"name"]
- parameters:[decoder decodeObjectForKey:@"parameters"]];
-}
-
-- (void)encodeWithCoder:(NSCoder *)encoder {
- [encoder encodeObject:_name forKey:@"name"];
- [encoder encodeObject:_parameters forKey:@"parameters"];
-}
-
-@end
-
-@implementation RTCVideoEncoderQpThresholds
-
-@synthesize low = _low;
-@synthesize high = _high;
-
-- (instancetype)initWithThresholdsLow:(NSInteger)low high:(NSInteger)high {
- if (self = [super init]) {
- _low = low;
- _high = high;
- }
- return self;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodecH264.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodecH264.mm
deleted file mode 100644
index 51583d0..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodecH264.mm
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCVideoCodecH264.h"
-
-#include <vector>
-
-#import "RTCVideoCodec+Private.h"
-#import "WebRTC/RTCVideoCodec.h"
-
-#include "rtc_base/timeutils.h"
-
-// H264 specific settings.
-@implementation RTCCodecSpecificInfoH264
-
-@synthesize packetizationMode = _packetizationMode;
-
-- (webrtc::CodecSpecificInfo)nativeCodecSpecificInfo {
- webrtc::CodecSpecificInfo codecSpecificInfo;
- codecSpecificInfo.codecType = webrtc::kVideoCodecH264;
- codecSpecificInfo.codec_name = [kRTCVideoCodecH264Name cStringUsingEncoding:NSUTF8StringEncoding];
- codecSpecificInfo.codecSpecific.H264.packetization_mode =
- (webrtc::H264PacketizationMode)_packetizationMode;
-
- return codecSpecificInfo;
-}
-
-@end
-
-// Encoder factory.
-@implementation RTCVideoEncoderFactoryH264
-
-- (NSArray<RTCVideoCodecInfo *> *)supportedCodecs {
- NSMutableArray<RTCVideoCodecInfo *> *codecs = [NSMutableArray array];
- NSString *codecName = kRTCVideoCodecH264Name;
-
- NSDictionary<NSString *, NSString *> *constrainedHighParams = @{
- @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedHigh,
- @"level-asymmetry-allowed" : @"1",
- @"packetization-mode" : @"1",
- };
- RTCVideoCodecInfo *constrainedHighInfo =
- [[RTCVideoCodecInfo alloc] initWithName:codecName parameters:constrainedHighParams];
- [codecs addObject:constrainedHighInfo];
-
- NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
- @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedBaseline,
- @"level-asymmetry-allowed" : @"1",
- @"packetization-mode" : @"1",
- };
- RTCVideoCodecInfo *constrainedBaselineInfo =
- [[RTCVideoCodecInfo alloc] initWithName:codecName parameters:constrainedBaselineParams];
- [codecs addObject:constrainedBaselineInfo];
-
- return [codecs copy];
-}
-
-- (id<RTCVideoEncoder>)createEncoder:(RTCVideoCodecInfo *)info {
- return [[RTCVideoEncoderH264 alloc] initWithCodecInfo:info];
-}
-
-@end
-
-// Decoder factory.
-@implementation RTCVideoDecoderFactoryH264
-
-- (id<RTCVideoDecoder>)createDecoder:(RTCVideoCodecInfo *)info {
- return [[RTCVideoDecoderH264 alloc] init];
-}
-
-- (NSArray<RTCVideoCodecInfo *> *)supportedCodecs {
- NSString *codecName = kRTCVideoCodecH264Name;
- return @[ [[RTCVideoCodecInfo alloc] initWithName:codecName parameters:nil] ];
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodecVP8.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodecVP8.mm
deleted file mode 100644
index fa785f4..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodecVP8.mm
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- *
- */
-
-#import <Foundation/Foundation.h>
-
-#import "RTCWrappedNativeVideoDecoder.h"
-#import "RTCWrappedNativeVideoEncoder.h"
-#import "WebRTC/RTCVideoDecoderVP8.h"
-#import "WebRTC/RTCVideoEncoderVP8.h"
-
-#include "modules/video_coding/codecs/vp8/include/vp8.h"
-
-#pragma mark - Encoder
-
-@implementation RTCVideoEncoderVP8
-
-+ (id<RTCVideoEncoder>)vp8Encoder {
- return [[RTCWrappedNativeVideoEncoder alloc]
- initWithNativeEncoder:std::unique_ptr<webrtc::VideoEncoder>(webrtc::VP8Encoder::Create())];
-}
-
-@end
-
-#pragma mark - Decoder
-
-@implementation RTCVideoDecoderVP8
-
-+ (id<RTCVideoDecoder>)vp8Decoder {
- return [[RTCWrappedNativeVideoDecoder alloc]
- initWithNativeDecoder:std::unique_ptr<webrtc::VideoDecoder>(webrtc::VP8Decoder::Create())];
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodecVP9.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodecVP9.mm
deleted file mode 100644
index 60fe54f..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodecVP9.mm
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- *
- */
-
-#import <Foundation/Foundation.h>
-
-#import "RTCWrappedNativeVideoDecoder.h"
-#import "RTCWrappedNativeVideoEncoder.h"
-#import "WebRTC/RTCVideoDecoderVP9.h"
-#import "WebRTC/RTCVideoEncoderVP9.h"
-
-#include "modules/video_coding/codecs/vp9/include/vp9.h"
-
-#pragma mark - Encoder
-
-@implementation RTCVideoEncoderVP9
-
-+ (id<RTCVideoEncoder>)vp9Encoder {
- return [[RTCWrappedNativeVideoEncoder alloc]
- initWithNativeEncoder:std::unique_ptr<webrtc::VideoEncoder>(webrtc::VP9Encoder::Create())];
-}
-
-@end
-
-#pragma mark - Decoder
-
-@implementation RTCVideoDecoderVP9
-
-+ (id<RTCVideoDecoder>)vp9Decoder {
- return [[RTCWrappedNativeVideoDecoder alloc]
- initWithNativeDecoder:std::unique_ptr<webrtc::VideoDecoder>(webrtc::VP9Decoder::Create())];
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoEncoderSettings.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCVideoEncoderSettings.mm
deleted file mode 100644
index 95233ec..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoEncoderSettings.mm
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCVideoCodec.h"
-
-#import "NSString+StdString.h"
-#import "RTCVideoCodec+Private.h"
-#import "WebRTC/RTCVideoCodecFactory.h"
-
-@implementation RTCVideoEncoderSettings
-
-@synthesize name = _name;
-@synthesize width = _width;
-@synthesize height = _height;
-@synthesize startBitrate = _startBitrate;
-@synthesize maxBitrate = _maxBitrate;
-@synthesize minBitrate = _minBitrate;
-@synthesize targetBitrate = _targetBitrate;
-@synthesize maxFramerate = _maxFramerate;
-@synthesize qpMax = _qpMax;
-@synthesize mode = _mode;
-
-- (instancetype)initWithNativeVideoCodec:(const webrtc::VideoCodec *)videoCodec {
- if (self = [super init]) {
- if (videoCodec) {
- const char *codecName = CodecTypeToPayloadString(videoCodec->codecType);
- _name = [NSString stringWithUTF8String:codecName];
-
- _width = videoCodec->width;
- _height = videoCodec->height;
- _startBitrate = videoCodec->startBitrate;
- _maxBitrate = videoCodec->maxBitrate;
- _minBitrate = videoCodec->minBitrate;
- _targetBitrate = videoCodec->targetBitrate;
- _maxFramerate = videoCodec->maxFramerate;
- _qpMax = videoCodec->qpMax;
- _mode = (RTCVideoCodecMode)videoCodec->mode;
- }
- }
-
- return self;
-}
-
-- (webrtc::VideoCodec)nativeVideoCodec {
- webrtc::VideoCodec videoCodec;
- videoCodec.width = _width;
- videoCodec.height = _height;
- videoCodec.startBitrate = _startBitrate;
- videoCodec.maxBitrate = _maxBitrate;
- videoCodec.minBitrate = _minBitrate;
- videoCodec.targetBitrate = _targetBitrate;
- videoCodec.maxBitrate = _maxBitrate;
- videoCodec.qpMax = _qpMax;
- videoCodec.mode = (webrtc::VideoCodecMode)_mode;
-
- return videoCodec;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoFrame.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCVideoFrame.mm
deleted file mode 100644
index 9a5d306..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoFrame.mm
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCVideoFrame.h"
-
-#import "WebRTC/RTCVideoFrameBuffer.h"
-
-@implementation RTCVideoFrame {
- RTCVideoRotation _rotation;
- int64_t _timeStampNs;
-}
-
-@synthesize buffer = _buffer;
-@synthesize timeStamp;
-
-- (int)width {
- return _buffer.width;
-}
-
-- (int)height {
- return _buffer.height;
-}
-
-- (RTCVideoRotation)rotation {
- return _rotation;
-}
-
-- (int64_t)timeStampNs {
- return _timeStampNs;
-}
-
-- (RTCVideoFrame *)newI420VideoFrame {
- return [[RTCVideoFrame alloc] initWithBuffer:[_buffer toI420]
- rotation:_rotation
- timeStampNs:_timeStampNs];
-}
-
-- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
- rotation:(RTCVideoRotation)rotation
- timeStampNs:(int64_t)timeStampNs {
- return [self initWithBuffer:[[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer]
- rotation:rotation
- timeStampNs:timeStampNs];
-}
-
-- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
- scaledWidth:(int)scaledWidth
- scaledHeight:(int)scaledHeight
- cropWidth:(int)cropWidth
- cropHeight:(int)cropHeight
- cropX:(int)cropX
- cropY:(int)cropY
- rotation:(RTCVideoRotation)rotation
- timeStampNs:(int64_t)timeStampNs {
- RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer
- adaptedWidth:scaledWidth
- adaptedHeight:scaledHeight
- cropWidth:cropWidth
- cropHeight:cropHeight
- cropX:cropX
- cropY:cropY];
- return [self initWithBuffer:rtcPixelBuffer rotation:rotation timeStampNs:timeStampNs];
-}
-
-- (instancetype)initWithBuffer:(id<RTCVideoFrameBuffer>)buffer
- rotation:(RTCVideoRotation)rotation
- timeStampNs:(int64_t)timeStampNs {
- if (self = [super init]) {
- _buffer = buffer;
- _rotation = rotation;
- _timeStampNs = timeStampNs;
- }
-
- return self;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoRendererAdapter+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCVideoRendererAdapter+Private.h
deleted file mode 100644
index 80a2140..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoRendererAdapter+Private.h
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCVideoRendererAdapter.h"
-
-#import "WebRTC/RTCVideoRenderer.h"
-
-#include "api/mediastreaminterface.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-@interface RTCVideoRendererAdapter ()
-
-/**
- * The Objective-C video renderer passed to this adapter during construction.
- * Calls made to the webrtc::VideoRenderInterface will be adapted and passed to
- * this video renderer.
- */
-@property(nonatomic, readonly) id<RTCVideoRenderer> videoRenderer;
-
-/**
- * The native VideoSinkInterface surface exposed by this adapter. Calls made
- * to this interface will be adapted and passed to the RTCVideoRenderer supplied
- * during construction. This pointer is unsafe and owned by this class.
- */
-@property(nonatomic, readonly) rtc::VideoSinkInterface<webrtc::VideoFrame> *nativeVideoRenderer;
-
-/** Initialize an RTCVideoRendererAdapter with an RTCVideoRenderer. */
-- (instancetype)initWithNativeRenderer:(id<RTCVideoRenderer>)videoRenderer
- NS_DESIGNATED_INITIALIZER;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoRendererAdapter.h b/sdk/objc/Framework/Classes/PeerConnection/RTCVideoRendererAdapter.h
deleted file mode 100644
index b0b6f04..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoRendererAdapter.h
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import <Foundation/Foundation.h>
-
-NS_ASSUME_NONNULL_BEGIN
-
-/*
- * Creates a rtc::VideoSinkInterface surface for an RTCVideoRenderer. The
- * rtc::VideoSinkInterface is used by WebRTC rendering code - this
- * adapter adapts calls made to that interface to the RTCVideoRenderer supplied
- * during construction.
- */
-@interface RTCVideoRendererAdapter : NSObject
-
-- (instancetype)init NS_UNAVAILABLE;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoRendererAdapter.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCVideoRendererAdapter.mm
deleted file mode 100644
index f5c4f01..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoRendererAdapter.mm
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCI420Buffer+Private.h"
-#import "RTCVideoRendererAdapter+Private.h"
-#import "WebRTC/RTCVideoFrame.h"
-#import "WebRTC/RTCVideoFrameBuffer.h"
-
-#include <memory>
-
-#include "sdk/objc/Framework/Native/api/video_frame.h"
-
-namespace webrtc {
-
-class VideoRendererAdapter
- : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
- public:
- VideoRendererAdapter(RTCVideoRendererAdapter* adapter) {
- adapter_ = adapter;
- size_ = CGSizeZero;
- }
-
- void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override {
- RTCVideoFrame* videoFrame = NativeToObjCVideoFrame(nativeVideoFrame);
-
- CGSize current_size = (videoFrame.rotation % 180 == 0)
- ? CGSizeMake(videoFrame.width, videoFrame.height)
- : CGSizeMake(videoFrame.height, videoFrame.width);
-
- if (!CGSizeEqualToSize(size_, current_size)) {
- size_ = current_size;
- [adapter_.videoRenderer setSize:size_];
- }
- [adapter_.videoRenderer renderFrame:videoFrame];
- }
-
- private:
- __weak RTCVideoRendererAdapter *adapter_;
- CGSize size_;
-};
-}
-
-@implementation RTCVideoRendererAdapter {
- std::unique_ptr<webrtc::VideoRendererAdapter> _adapter;
-}
-
-@synthesize videoRenderer = _videoRenderer;
-
-- (instancetype)initWithNativeRenderer:(id<RTCVideoRenderer>)videoRenderer {
- NSParameterAssert(videoRenderer);
- if (self = [super init]) {
- _videoRenderer = videoRenderer;
- _adapter.reset(new webrtc::VideoRendererAdapter(self));
- }
- return self;
-}
-
-- (rtc::VideoSinkInterface<webrtc::VideoFrame> *)nativeVideoRenderer {
- return _adapter.get();
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoSource+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCVideoSource+Private.h
deleted file mode 100644
index 5eea2f9..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoSource+Private.h
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCVideoSource.h"
-
-#import "RTCMediaSource+Private.h"
-
-#include "api/mediastreaminterface.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-@interface RTCVideoSource ()
-
-/**
- * The VideoTrackSourceInterface object passed to this RTCVideoSource during
- * construction.
- */
-@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>
- nativeVideoSource;
-
-/** Initialize an RTCVideoSource from a native VideoTrackSourceInterface. */
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
- nativeVideoSource:
- (rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)nativeVideoSource
- NS_DESIGNATED_INITIALIZER;
-
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
- nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
- type:(RTCMediaSourceType)type NS_UNAVAILABLE;
-
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
- signalingThread:(rtc::Thread *)signalingThread
- workerThread:(rtc::Thread *)workerThread;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoSource.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCVideoSource.mm
deleted file mode 100644
index 63b8014..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoSource.mm
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCVideoSource+Private.h"
-
-#include "api/videosourceproxy.h"
-#include "rtc_base/checks.h"
-#include "sdk/objc/Framework/Native/src/objc_video_track_source.h"
-
-static webrtc::ObjCVideoTrackSource *getObjCVideoSource(
- const rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> nativeSource) {
- webrtc::VideoTrackSourceProxy *proxy_source =
- static_cast<webrtc::VideoTrackSourceProxy *>(nativeSource.get());
- return static_cast<webrtc::ObjCVideoTrackSource *>(proxy_source->internal());
-}
-
-// TODO(magjed): Refactor this class and target ObjCVideoTrackSource only once
-// RTCAVFoundationVideoSource is gone. See http://crbug/webrtc/7177 for more
-// info.
-@implementation RTCVideoSource {
- rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> _nativeVideoSource;
-}
-
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
- nativeVideoSource:
- (rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)nativeVideoSource {
- RTC_DCHECK(factory);
- RTC_DCHECK(nativeVideoSource);
- if (self = [super initWithFactory:factory
- nativeMediaSource:nativeVideoSource
- type:RTCMediaSourceTypeVideo]) {
- _nativeVideoSource = nativeVideoSource;
- }
- return self;
-}
-
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
- nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
- type:(RTCMediaSourceType)type {
- RTC_NOTREACHED();
- return nil;
-}
-
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
- signalingThread:(rtc::Thread *)signalingThread
- workerThread:(rtc::Thread *)workerThread {
- rtc::scoped_refptr<webrtc::ObjCVideoTrackSource> objCVideoTrackSource(
- new rtc::RefCountedObject<webrtc::ObjCVideoTrackSource>());
-
- return [self initWithFactory:factory
- nativeVideoSource:webrtc::VideoTrackSourceProxy::Create(
- signalingThread, workerThread, objCVideoTrackSource)];
-}
-
-- (NSString *)description {
- NSString *stateString = [[self class] stringForState:self.state];
- return [NSString stringWithFormat:@"RTCVideoSource( %p ): %@", self, stateString];
-}
-
-- (void)capturer:(RTCVideoCapturer *)capturer didCaptureVideoFrame:(RTCVideoFrame *)frame {
- getObjCVideoSource(_nativeVideoSource)->OnCapturedFrame(frame);
-}
-
-- (void)adaptOutputFormatToWidth:(int)width height:(int)height fps:(int)fps {
- getObjCVideoSource(_nativeVideoSource)->OnOutputFormatRequest(width, height, fps);
-}
-
-#pragma mark - Private
-
-- (rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)nativeVideoSource {
- return _nativeVideoSource;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoTrack+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCVideoTrack+Private.h
deleted file mode 100644
index 14639be..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoTrack+Private.h
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCVideoTrack.h"
-
-#include "api/mediastreaminterface.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-@interface RTCVideoTrack ()
-
-/** VideoTrackInterface created or passed in at construction. */
-@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::VideoTrackInterface> nativeVideoTrack;
-
-/** Initialize an RTCVideoTrack with its source and an id. */
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
- source:(RTCVideoSource *)source
- trackId:(NSString *)trackId;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoTrack.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCVideoTrack.mm
deleted file mode 100644
index c9eb35c..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoTrack.mm
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCVideoTrack+Private.h"
-
-#import "NSString+StdString.h"
-#import "RTCMediaStreamTrack+Private.h"
-#import "RTCPeerConnectionFactory+Private.h"
-#import "RTCVideoRendererAdapter+Private.h"
-#import "RTCVideoSource+Private.h"
-
-@implementation RTCVideoTrack {
- NSMutableArray *_adapters;
-}
-
-@synthesize source = _source;
-
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
- source:(RTCVideoSource *)source
- trackId:(NSString *)trackId {
- NSParameterAssert(factory);
- NSParameterAssert(source);
- NSParameterAssert(trackId.length);
- std::string nativeId = [NSString stdStringForString:trackId];
- rtc::scoped_refptr<webrtc::VideoTrackInterface> track =
- factory.nativeFactory->CreateVideoTrack(nativeId,
- source.nativeVideoSource);
- if (self = [self initWithFactory:factory nativeTrack:track type:RTCMediaStreamTrackTypeVideo]) {
- _source = source;
- }
- return self;
-}
-
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
- nativeTrack:
- (rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeMediaTrack
- type:(RTCMediaStreamTrackType)type {
- NSParameterAssert(factory);
- NSParameterAssert(nativeMediaTrack);
- NSParameterAssert(type == RTCMediaStreamTrackTypeVideo);
- if (self = [super initWithFactory:factory nativeTrack:nativeMediaTrack type:type]) {
- _adapters = [NSMutableArray array];
- }
- return self;
-}
-
-- (void)dealloc {
- for (RTCVideoRendererAdapter *adapter in _adapters) {
- self.nativeVideoTrack->RemoveSink(adapter.nativeVideoRenderer);
- }
-}
-
-- (RTCVideoSource *)source {
- if (!_source) {
- rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source =
- self.nativeVideoTrack->GetSource();
- if (source) {
- _source =
- [[RTCVideoSource alloc] initWithFactory:self.factory nativeVideoSource:source.get()];
- }
- }
- return _source;
-}
-
-- (void)addRenderer:(id<RTCVideoRenderer>)renderer {
- // Make sure we don't have this renderer yet.
- for (RTCVideoRendererAdapter *adapter in _adapters) {
- if (adapter.videoRenderer == renderer) {
- NSAssert(NO, @"|renderer| is already attached to this track");
- return;
- }
- }
- // Create a wrapper that provides a native pointer for us.
- RTCVideoRendererAdapter* adapter =
- [[RTCVideoRendererAdapter alloc] initWithNativeRenderer:renderer];
- [_adapters addObject:adapter];
- self.nativeVideoTrack->AddOrUpdateSink(adapter.nativeVideoRenderer,
- rtc::VideoSinkWants());
-}
-
-- (void)removeRenderer:(id<RTCVideoRenderer>)renderer {
- __block NSUInteger indexToRemove = NSNotFound;
- [_adapters enumerateObjectsUsingBlock:^(RTCVideoRendererAdapter *adapter,
- NSUInteger idx,
- BOOL *stop) {
- if (adapter.videoRenderer == renderer) {
- indexToRemove = idx;
- *stop = YES;
- }
- }];
- if (indexToRemove == NSNotFound) {
- return;
- }
- RTCVideoRendererAdapter *adapterToRemove =
- [_adapters objectAtIndex:indexToRemove];
- self.nativeVideoTrack->RemoveSink(adapterToRemove.nativeVideoRenderer);
- [_adapters removeObjectAtIndex:indexToRemove];
-}
-
-#pragma mark - Private
-
-- (rtc::scoped_refptr<webrtc::VideoTrackInterface>)nativeVideoTrack {
- return static_cast<webrtc::VideoTrackInterface *>(self.nativeTrack.get());
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoDecoder.h b/sdk/objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoDecoder.h
deleted file mode 100644
index 74cfaeb..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoDecoder.h
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import <Foundation/Foundation.h>
-
-#import "WebRTC/RTCVideoCodec.h"
-#include "api/video_codecs/video_decoder.h"
-#include "media/base/codec.h"
-
-@interface RTCWrappedNativeVideoDecoder : NSObject <RTCVideoDecoder>
-
-- (instancetype)initWithNativeDecoder:(std::unique_ptr<webrtc::VideoDecoder>)decoder;
-
-/* This moves the ownership of the wrapped decoder to the caller. */
-- (std::unique_ptr<webrtc::VideoDecoder>)releaseWrappedDecoder;
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoDecoder.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoDecoder.mm
deleted file mode 100644
index 2d4d45e..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoDecoder.mm
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import <Foundation/Foundation.h>
-
-#import "NSString+StdString.h"
-#import "RTCWrappedNativeVideoDecoder.h"
-
-@implementation RTCWrappedNativeVideoDecoder {
- std::unique_ptr<webrtc::VideoDecoder> _wrappedDecoder;
-}
-
-- (instancetype)initWithNativeDecoder:(std::unique_ptr<webrtc::VideoDecoder>)decoder {
- if (self = [super init]) {
- _wrappedDecoder = std::move(decoder);
- }
-
- return self;
-}
-
-- (std::unique_ptr<webrtc::VideoDecoder>)releaseWrappedDecoder {
- return std::move(_wrappedDecoder);
-}
-
-#pragma mark - RTCVideoDecoder
-
-- (void)setCallback:(RTCVideoDecoderCallback)callback {
- RTC_NOTREACHED();
-}
-
-- (NSInteger)startDecodeWithNumberOfCores:(int)numberOfCores {
- RTC_NOTREACHED();
- return 0;
-}
-
-- (NSInteger)startDecodeWithSettings:(RTCVideoEncoderSettings *)settings
- numberOfCores:(int)numberOfCores {
- RTC_NOTREACHED();
- return 0;
-}
-
-- (NSInteger)releaseDecoder {
- RTC_NOTREACHED();
- return 0;
-}
-
-- (NSInteger)decode:(RTCEncodedImage *)encodedImage
- missingFrames:(BOOL)missingFrames
- codecSpecificInfo:(nullable id<RTCCodecSpecificInfo>)info
- renderTimeMs:(int64_t)renderTimeMs {
- RTC_NOTREACHED();
- return 0;
-}
-
-- (NSString *)implementationName {
- RTC_NOTREACHED();
- return nil;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoEncoder.h b/sdk/objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoEncoder.h
deleted file mode 100644
index 5b95d33..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoEncoder.h
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import <Foundation/Foundation.h>
-
-#import "WebRTC/RTCVideoCodec.h"
-#include "api/video_codecs/sdp_video_format.h"
-#include "api/video_codecs/video_encoder.h"
-#include "media/base/codec.h"
-
-@interface RTCWrappedNativeVideoEncoder : NSObject <RTCVideoEncoder>
-
-- (instancetype)initWithNativeEncoder:(std::unique_ptr<webrtc::VideoEncoder>)encoder;
-
-/* This moves the ownership of the wrapped encoder to the caller. */
-- (std::unique_ptr<webrtc::VideoEncoder>)releaseWrappedEncoder;
-
-@end
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoEncoder.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoEncoder.mm
deleted file mode 100644
index 8988e6d..0000000
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoEncoder.mm
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import <Foundation/Foundation.h>
-
-#import "NSString+StdString.h"
-#import "RTCWrappedNativeVideoEncoder.h"
-
-@implementation RTCWrappedNativeVideoEncoder {
- std::unique_ptr<webrtc::VideoEncoder> _wrappedEncoder;
-}
-
-- (instancetype)initWithNativeEncoder:(std::unique_ptr<webrtc::VideoEncoder>)encoder {
- if (self = [super init]) {
- _wrappedEncoder = std::move(encoder);
- }
-
- return self;
-}
-
-- (std::unique_ptr<webrtc::VideoEncoder>)releaseWrappedEncoder {
- return std::move(_wrappedEncoder);
-}
-
-#pragma mark - RTCVideoEncoder
-
-- (void)setCallback:(RTCVideoEncoderCallback)callback {
- RTC_NOTREACHED();
-}
-
-- (NSInteger)startEncodeWithSettings:(RTCVideoEncoderSettings *)settings
- numberOfCores:(int)numberOfCores {
- RTC_NOTREACHED();
- return 0;
-}
-
-- (NSInteger)releaseEncoder {
- RTC_NOTREACHED();
- return 0;
-}
-
-- (NSInteger)encode:(RTCVideoFrame *)frame
- codecSpecificInfo:(nullable id<RTCCodecSpecificInfo>)info
- frameTypes:(NSArray<NSNumber *> *)frameTypes {
- RTC_NOTREACHED();
- return 0;
-}
-
-- (int)setBitrate:(uint32_t)bitrateKbit framerate:(uint32_t)framerate {
- RTC_NOTREACHED();
- return 0;
-}
-
-- (NSString *)implementationName {
- RTC_NOTREACHED();
- return nil;
-}
-
-- (RTCVideoEncoderQpThresholds *)scalingSettings {
- RTC_NOTREACHED();
- return nil;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/UI/RTCCameraPreviewView.m b/sdk/objc/Framework/Classes/UI/RTCCameraPreviewView.m
deleted file mode 100644
index 492c210..0000000
--- a/sdk/objc/Framework/Classes/UI/RTCCameraPreviewView.m
+++ /dev/null
@@ -1,120 +0,0 @@
-/*
- * Copyright 2015 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCCameraPreviewView.h"
-
-#import <AVFoundation/AVFoundation.h>
-#import <UIKit/UIKit.h>
-
-#import "RTCDispatcher+Private.h"
-
-@implementation RTCCameraPreviewView
-
-@synthesize captureSession = _captureSession;
-
-+ (Class)layerClass {
- return [AVCaptureVideoPreviewLayer class];
-}
-
-- (instancetype)initWithFrame:(CGRect)aRect {
- self = [super initWithFrame:aRect];
- if (self) {
- [self addOrientationObserver];
- }
- return self;
-}
-
-- (instancetype)initWithCoder:(NSCoder*)aDecoder {
- self = [super initWithCoder:aDecoder];
- if (self) {
- [self addOrientationObserver];
- }
- return self;
-}
-
-- (void)dealloc {
- [self removeOrientationObserver];
-}
-
-- (void)setCaptureSession:(AVCaptureSession *)captureSession {
- if (_captureSession == captureSession) {
- return;
- }
- [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeMain
- block:^{
- _captureSession = captureSession;
- AVCaptureVideoPreviewLayer *previewLayer = [self previewLayer];
- [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
- block:^{
- previewLayer.session = captureSession;
- [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeMain
- block:^{
- [self setCorrectVideoOrientation];
- }];
- }];
- }];
-}
-
-- (void)layoutSubviews {
- [super layoutSubviews];
-
- // Update the video orientation based on the device orientation.
- [self setCorrectVideoOrientation];
-}
-
--(void)orientationChanged:(NSNotification *)notification {
- [self setCorrectVideoOrientation];
-}
-
-- (void)setCorrectVideoOrientation {
- // Get current device orientation.
- UIDeviceOrientation deviceOrientation = [UIDevice currentDevice].orientation;
- AVCaptureVideoPreviewLayer *previewLayer = [self previewLayer];
-
- // First check if we are allowed to set the video orientation.
- if (previewLayer.connection.isVideoOrientationSupported) {
- // Set the video orientation based on device orientation.
- if (deviceOrientation == UIInterfaceOrientationPortraitUpsideDown) {
- previewLayer.connection.videoOrientation =
- AVCaptureVideoOrientationPortraitUpsideDown;
- } else if (deviceOrientation == UIInterfaceOrientationLandscapeRight) {
- previewLayer.connection.videoOrientation =
- AVCaptureVideoOrientationLandscapeRight;
- } else if (deviceOrientation == UIInterfaceOrientationLandscapeLeft) {
- previewLayer.connection.videoOrientation =
- AVCaptureVideoOrientationLandscapeLeft;
- } else if (deviceOrientation == UIInterfaceOrientationPortrait) {
- previewLayer.connection.videoOrientation =
- AVCaptureVideoOrientationPortrait;
- }
- // If device orientation switches to FaceUp or FaceDown, don't change video orientation.
- }
-}
-
-#pragma mark - Private
-
-- (void)addOrientationObserver {
- [[NSNotificationCenter defaultCenter] addObserver:self
- selector:@selector(orientationChanged:)
- name:UIDeviceOrientationDidChangeNotification
- object:nil];
-}
-
-- (void)removeOrientationObserver {
- [[NSNotificationCenter defaultCenter] removeObserver:self
- name:UIDeviceOrientationDidChangeNotification
- object:nil];
-}
-
-- (AVCaptureVideoPreviewLayer *)previewLayer {
- return (AVCaptureVideoPreviewLayer *)self.layer;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/UI/RTCEAGLVideoView.m b/sdk/objc/Framework/Classes/UI/RTCEAGLVideoView.m
deleted file mode 100644
index a267414..0000000
--- a/sdk/objc/Framework/Classes/UI/RTCEAGLVideoView.m
+++ /dev/null
@@ -1,341 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCEAGLVideoView.h"
-
-#import <GLKit/GLKit.h>
-
-#import "RTCDefaultShader.h"
-#import "RTCI420TextureCache.h"
-#import "RTCNV12TextureCache.h"
-#import "WebRTC/RTCLogging.h"
-#import "WebRTC/RTCVideoFrame.h"
-#import "WebRTC/RTCVideoFrameBuffer.h"
-
-// RTCDisplayLinkTimer wraps a CADisplayLink and is set to fire every two screen
-// refreshes, which should be 30fps. We wrap the display link in order to avoid
-// a retain cycle since CADisplayLink takes a strong reference onto its target.
-// The timer is paused by default.
-@interface RTCDisplayLinkTimer : NSObject
-
-@property(nonatomic) BOOL isPaused;
-
-- (instancetype)initWithTimerHandler:(void (^)(void))timerHandler;
-- (void)invalidate;
-
-@end
-
-@implementation RTCDisplayLinkTimer {
- CADisplayLink *_displayLink;
- void (^_timerHandler)(void);
-}
-
-- (instancetype)initWithTimerHandler:(void (^)(void))timerHandler {
- NSParameterAssert(timerHandler);
- if (self = [super init]) {
- _timerHandler = timerHandler;
- _displayLink =
- [CADisplayLink displayLinkWithTarget:self
- selector:@selector(displayLinkDidFire:)];
- _displayLink.paused = YES;
-#if __IPHONE_OS_VERSION_MIN_REQUIRED >= __IPHONE_10_0
- _displayLink.preferredFramesPerSecond = 30;
-#else
- [_displayLink setFrameInterval:2];
-#endif
- [_displayLink addToRunLoop:[NSRunLoop currentRunLoop]
- forMode:NSRunLoopCommonModes];
- }
- return self;
-}
-
-- (void)dealloc {
- [self invalidate];
-}
-
-- (BOOL)isPaused {
- return _displayLink.paused;
-}
-
-- (void)setIsPaused:(BOOL)isPaused {
- _displayLink.paused = isPaused;
-}
-
-- (void)invalidate {
- [_displayLink invalidate];
-}
-
-- (void)displayLinkDidFire:(CADisplayLink *)displayLink {
- _timerHandler();
-}
-
-@end
-
-// RTCEAGLVideoView wraps a GLKView which is setup with
-// enableSetNeedsDisplay = NO for the purpose of gaining control of
-// exactly when to call -[GLKView display]. This need for extra
-// control is required to avoid triggering method calls on GLKView
-// that results in attempting to bind the underlying render buffer
-// when the drawable size would be empty which would result in the
-// error GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT. -[GLKView display] is
-// the method that will trigger the binding of the render
-// buffer. Because the standard behaviour of -[UIView setNeedsDisplay]
-// is disabled for the reasons above, the RTCEAGLVideoView maintains
-// its own |isDirty| flag.
-
-@interface RTCEAGLVideoView () <GLKViewDelegate>
-// |videoFrame| is set when we receive a frame from a worker thread and is read
-// from the display link callback so atomicity is required.
-@property(atomic, strong) RTCVideoFrame *videoFrame;
-@property(nonatomic, readonly) GLKView *glkView;
-@end
-
-@implementation RTCEAGLVideoView {
- RTCDisplayLinkTimer *_timer;
- EAGLContext *_glContext;
- // This flag should only be set and read on the main thread (e.g. by
- // setNeedsDisplay)
- BOOL _isDirty;
- id<RTCVideoViewShading> _shader;
- RTCNV12TextureCache *_nv12TextureCache;
- RTCI420TextureCache *_i420TextureCache;
- // As timestamps should be unique between frames, will store last
- // drawn frame timestamp instead of the whole frame to reduce memory usage.
- int64_t _lastDrawnFrameTimeStampNs;
-}
-
-@synthesize delegate = _delegate;
-@synthesize videoFrame = _videoFrame;
-@synthesize glkView = _glkView;
-
-- (instancetype)initWithFrame:(CGRect)frame {
- return [self initWithFrame:frame shader:[[RTCDefaultShader alloc] init]];
-}
-
-- (instancetype)initWithCoder:(NSCoder *)aDecoder {
- return [self initWithCoder:aDecoder shader:[[RTCDefaultShader alloc] init]];
-}
-
-- (instancetype)initWithFrame:(CGRect)frame shader:(id<RTCVideoViewShading>)shader {
- if (self = [super initWithFrame:frame]) {
- _shader = shader;
- if (![self configure]) {
- return nil;
- }
- }
- return self;
-}
-
-- (instancetype)initWithCoder:(NSCoder *)aDecoder shader:(id<RTCVideoViewShading>)shader {
- if (self = [super initWithCoder:aDecoder]) {
- _shader = shader;
- if (![self configure]) {
- return nil;
- }
- }
- return self;
-}
-
-- (BOOL)configure {
- EAGLContext *glContext =
- [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3];
- if (!glContext) {
- glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
- }
- if (!glContext) {
- RTCLogError(@"Failed to create EAGLContext");
- return NO;
- }
- _glContext = glContext;
-
- // GLKView manages a framebuffer for us.
- _glkView = [[GLKView alloc] initWithFrame:CGRectZero
- context:_glContext];
- _glkView.drawableColorFormat = GLKViewDrawableColorFormatRGBA8888;
- _glkView.drawableDepthFormat = GLKViewDrawableDepthFormatNone;
- _glkView.drawableStencilFormat = GLKViewDrawableStencilFormatNone;
- _glkView.drawableMultisample = GLKViewDrawableMultisampleNone;
- _glkView.delegate = self;
- _glkView.layer.masksToBounds = YES;
- _glkView.enableSetNeedsDisplay = NO;
- [self addSubview:_glkView];
-
- // Listen to application state in order to clean up OpenGL before app goes
- // away.
- NSNotificationCenter *notificationCenter =
- [NSNotificationCenter defaultCenter];
- [notificationCenter addObserver:self
- selector:@selector(willResignActive)
- name:UIApplicationWillResignActiveNotification
- object:nil];
- [notificationCenter addObserver:self
- selector:@selector(didBecomeActive)
- name:UIApplicationDidBecomeActiveNotification
- object:nil];
-
- // Frames are received on a separate thread, so we poll for current frame
- // using a refresh rate proportional to screen refresh frequency. This
- // occurs on the main thread.
- __weak RTCEAGLVideoView *weakSelf = self;
- _timer = [[RTCDisplayLinkTimer alloc] initWithTimerHandler:^{
- RTCEAGLVideoView *strongSelf = weakSelf;
- [strongSelf displayLinkTimerDidFire];
- }];
- if ([[UIApplication sharedApplication] applicationState] == UIApplicationStateActive) {
- [self setupGL];
- }
- return YES;
-}
-
-- (void)dealloc {
- [[NSNotificationCenter defaultCenter] removeObserver:self];
- UIApplicationState appState =
- [UIApplication sharedApplication].applicationState;
- if (appState == UIApplicationStateActive) {
- [self teardownGL];
- }
- [_timer invalidate];
- [self ensureGLContext];
- _shader = nil;
- if (_glContext && [EAGLContext currentContext] == _glContext) {
- [EAGLContext setCurrentContext:nil];
- }
-}
-
-#pragma mark - UIView
-
-- (void)setNeedsDisplay {
- [super setNeedsDisplay];
- _isDirty = YES;
-}
-
-- (void)setNeedsDisplayInRect:(CGRect)rect {
- [super setNeedsDisplayInRect:rect];
- _isDirty = YES;
-}
-
-- (void)layoutSubviews {
- [super layoutSubviews];
- _glkView.frame = self.bounds;
-}
-
-#pragma mark - GLKViewDelegate
-
-// This method is called when the GLKView's content is dirty and needs to be
-// redrawn. This occurs on main thread.
-- (void)glkView:(GLKView *)view drawInRect:(CGRect)rect {
- // The renderer will draw the frame to the framebuffer corresponding to the
- // one used by |view|.
- RTCVideoFrame *frame = self.videoFrame;
- if (!frame || frame.timeStampNs == _lastDrawnFrameTimeStampNs) {
- return;
- }
- [self ensureGLContext];
- glClear(GL_COLOR_BUFFER_BIT);
- if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
- if (!_nv12TextureCache) {
- _nv12TextureCache = [[RTCNV12TextureCache alloc] initWithContext:_glContext];
- }
- if (_nv12TextureCache) {
- [_nv12TextureCache uploadFrameToTextures:frame];
- [_shader applyShadingForFrameWithWidth:frame.width
- height:frame.height
- rotation:frame.rotation
- yPlane:_nv12TextureCache.yTexture
- uvPlane:_nv12TextureCache.uvTexture];
- [_nv12TextureCache releaseTextures];
-
- _lastDrawnFrameTimeStampNs = self.videoFrame.timeStampNs;
- }
- } else {
- if (!_i420TextureCache) {
- _i420TextureCache = [[RTCI420TextureCache alloc] initWithContext:_glContext];
- }
- [_i420TextureCache uploadFrameToTextures:frame];
- [_shader applyShadingForFrameWithWidth:frame.width
- height:frame.height
- rotation:frame.rotation
- yPlane:_i420TextureCache.yTexture
- uPlane:_i420TextureCache.uTexture
- vPlane:_i420TextureCache.vTexture];
-
- _lastDrawnFrameTimeStampNs = self.videoFrame.timeStampNs;
- }
-}
-
-#pragma mark - RTCVideoRenderer
-
-// These methods may be called on non-main thread.
-- (void)setSize:(CGSize)size {
- __weak RTCEAGLVideoView *weakSelf = self;
- dispatch_async(dispatch_get_main_queue(), ^{
- RTCEAGLVideoView *strongSelf = weakSelf;
- [strongSelf.delegate videoView:strongSelf didChangeVideoSize:size];
- });
-}
-
-- (void)renderFrame:(RTCVideoFrame *)frame {
- self.videoFrame = frame;
-}
-
-#pragma mark - Private
-
-- (void)displayLinkTimerDidFire {
- // Don't render unless video frame have changed or the view content
- // has explicitly been marked dirty.
- if (!_isDirty && _lastDrawnFrameTimeStampNs == self.videoFrame.timeStampNs) {
- return;
- }
-
- // Always reset isDirty at this point, even if -[GLKView display]
- // won't be called in the case the drawable size is empty.
- _isDirty = NO;
-
- // Only call -[GLKView display] if the drawable size is
- // non-empty. Calling display will make the GLKView setup its
- // render buffer if necessary, but that will fail with error
- // GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT if size is empty.
- if (self.bounds.size.width > 0 && self.bounds.size.height > 0) {
- [_glkView display];
- }
-}
-
-- (void)setupGL {
- self.videoFrame = nil;
- [self ensureGLContext];
- glDisable(GL_DITHER);
- _timer.isPaused = NO;
-}
-
-- (void)teardownGL {
- self.videoFrame = nil;
- _timer.isPaused = YES;
- [_glkView deleteDrawable];
- [self ensureGLContext];
- _nv12TextureCache = nil;
- _i420TextureCache = nil;
-}
-
-- (void)didBecomeActive {
- [self setupGL];
-}
-
-- (void)willResignActive {
- [self teardownGL];
-}
-
-- (void)ensureGLContext {
- NSAssert(_glContext, @"context shouldn't be nil");
- if ([EAGLContext currentContext] != _glContext) {
- [EAGLContext setCurrentContext:_glContext];
- }
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/UI/RTCNSGLVideoView.m b/sdk/objc/Framework/Classes/UI/RTCNSGLVideoView.m
deleted file mode 100644
index 9578f82..0000000
--- a/sdk/objc/Framework/Classes/UI/RTCNSGLVideoView.m
+++ /dev/null
@@ -1,196 +0,0 @@
-/*
- * Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import <Foundation/Foundation.h>
-
-#if !TARGET_OS_IPHONE
-
-#import "WebRTC/RTCNSGLVideoView.h"
-
-#import <AppKit/NSOpenGL.h>
-#import <CoreVideo/CVDisplayLink.h>
-#import <OpenGL/gl3.h>
-
-#import "RTCDefaultShader.h"
-#import "RTCI420TextureCache.h"
-#import "WebRTC/RTCLogging.h"
-#import "WebRTC/RTCVideoFrame.h"
-
-@interface RTCNSGLVideoView ()
-// |videoFrame| is set when we receive a frame from a worker thread and is read
-// from the display link callback so atomicity is required.
-@property(atomic, strong) RTCVideoFrame *videoFrame;
-@property(atomic, strong) RTCI420TextureCache *i420TextureCache;
-
-- (void)drawFrame;
-@end
-
-static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink,
- const CVTimeStamp *now,
- const CVTimeStamp *outputTime,
- CVOptionFlags flagsIn,
- CVOptionFlags *flagsOut,
- void *displayLinkContext) {
- RTCNSGLVideoView *view = (__bridge RTCNSGLVideoView *)displayLinkContext;
- [view drawFrame];
- return kCVReturnSuccess;
-}
-
-@implementation RTCNSGLVideoView {
- CVDisplayLinkRef _displayLink;
- RTCVideoFrame *_lastDrawnFrame;
- id<RTCVideoViewShading> _shader;
-}
-
-@synthesize delegate = _delegate;
-@synthesize videoFrame = _videoFrame;
-@synthesize i420TextureCache = _i420TextureCache;
-
-- (instancetype)initWithFrame:(NSRect)frame pixelFormat:(NSOpenGLPixelFormat *)format {
- return [self initWithFrame:frame pixelFormat:format shader:[[RTCDefaultShader alloc] init]];
-}
-
-- (instancetype)initWithFrame:(NSRect)frame
- pixelFormat:(NSOpenGLPixelFormat *)format
- shader:(id<RTCVideoViewShading>)shader {
- if (self = [super initWithFrame:frame pixelFormat:format]) {
- _shader = shader;
- }
- return self;
-}
-
-- (void)dealloc {
- [self teardownDisplayLink];
-}
-
-- (void)drawRect:(NSRect)rect {
- [self drawFrame];
-}
-
-- (void)reshape {
- [super reshape];
- NSRect frame = [self frame];
- [self ensureGLContext];
- CGLLockContext([[self openGLContext] CGLContextObj]);
- glViewport(0, 0, frame.size.width, frame.size.height);
- CGLUnlockContext([[self openGLContext] CGLContextObj]);
-}
-
-- (void)lockFocus {
- NSOpenGLContext *context = [self openGLContext];
- [super lockFocus];
- if ([context view] != self) {
- [context setView:self];
- }
- [context makeCurrentContext];
-}
-
-- (void)prepareOpenGL {
- [super prepareOpenGL];
- [self ensureGLContext];
- glDisable(GL_DITHER);
- [self setupDisplayLink];
-}
-
-- (void)clearGLContext {
- [self ensureGLContext];
- self.i420TextureCache = nil;
- [super clearGLContext];
-}
-
-#pragma mark - RTCVideoRenderer
-
-// These methods may be called on non-main thread.
-- (void)setSize:(CGSize)size {
- dispatch_async(dispatch_get_main_queue(), ^{
- [self.delegate videoView:self didChangeVideoSize:size];
- });
-}
-
-- (void)renderFrame:(RTCVideoFrame *)frame {
- self.videoFrame = frame;
-}
-
-#pragma mark - Private
-
-- (void)drawFrame {
- RTCVideoFrame *frame = self.videoFrame;
- if (!frame || frame == _lastDrawnFrame) {
- return;
- }
- // This method may be called from CVDisplayLink callback which isn't on the
- // main thread so we have to lock the GL context before drawing.
- NSOpenGLContext *context = [self openGLContext];
- CGLLockContext([context CGLContextObj]);
-
- [self ensureGLContext];
- glClear(GL_COLOR_BUFFER_BIT);
-
- // Rendering native CVPixelBuffer is not supported on OS X.
- // TODO(magjed): Add support for NV12 texture cache on OS X.
- frame = [frame newI420VideoFrame];
- if (!self.i420TextureCache) {
- self.i420TextureCache = [[RTCI420TextureCache alloc] initWithContext:context];
- }
- RTCI420TextureCache *i420TextureCache = self.i420TextureCache;
- if (i420TextureCache) {
- [i420TextureCache uploadFrameToTextures:frame];
- [_shader applyShadingForFrameWithWidth:frame.width
- height:frame.height
- rotation:frame.rotation
- yPlane:i420TextureCache.yTexture
- uPlane:i420TextureCache.uTexture
- vPlane:i420TextureCache.vTexture];
- [context flushBuffer];
- _lastDrawnFrame = frame;
- }
- CGLUnlockContext([context CGLContextObj]);
-}
-
-- (void)setupDisplayLink {
- if (_displayLink) {
- return;
- }
- // Synchronize buffer swaps with vertical refresh rate.
- GLint swapInt = 1;
- [[self openGLContext] setValues:&swapInt forParameter:NSOpenGLCPSwapInterval];
-
- // Create display link.
- CVDisplayLinkCreateWithActiveCGDisplays(&_displayLink);
- CVDisplayLinkSetOutputCallback(_displayLink,
- &OnDisplayLinkFired,
- (__bridge void *)self);
- // Set the display link for the current renderer.
- CGLContextObj cglContext = [[self openGLContext] CGLContextObj];
- CGLPixelFormatObj cglPixelFormat = [[self pixelFormat] CGLPixelFormatObj];
- CVDisplayLinkSetCurrentCGDisplayFromOpenGLContext(
- _displayLink, cglContext, cglPixelFormat);
- CVDisplayLinkStart(_displayLink);
-}
-
-- (void)teardownDisplayLink {
- if (!_displayLink) {
- return;
- }
- CVDisplayLinkRelease(_displayLink);
- _displayLink = NULL;
-}
-
-- (void)ensureGLContext {
- NSOpenGLContext* context = [self openGLContext];
- NSAssert(context, @"context shouldn't be nil");
- if ([NSOpenGLContext currentContext] != context) {
- [context makeCurrentContext];
- }
-}
-
-@end
-
-#endif // !TARGET_OS_IPHONE
diff --git a/sdk/objc/Framework/Classes/Video/AVCaptureSession+DevicePosition.h b/sdk/objc/Framework/Classes/Video/AVCaptureSession+DevicePosition.h
deleted file mode 100644
index 32ab687..0000000
--- a/sdk/objc/Framework/Classes/Video/AVCaptureSession+DevicePosition.h
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import <AVFoundation/AVFoundation.h>
-#import <CoreMedia/CoreMedia.h>
-
-NS_ASSUME_NONNULL_BEGIN
-
-@interface AVCaptureSession (DevicePosition)
-
-// Check the image's EXIF for the camera the image came from.
-+ (AVCaptureDevicePosition)devicePositionForSampleBuffer:(CMSampleBufferRef)sampleBuffer;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/Video/AVCaptureSession+DevicePosition.mm b/sdk/objc/Framework/Classes/Video/AVCaptureSession+DevicePosition.mm
deleted file mode 100644
index 0814ecc..0000000
--- a/sdk/objc/Framework/Classes/Video/AVCaptureSession+DevicePosition.mm
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "AVCaptureSession+DevicePosition.h"
-
-BOOL CFStringContainsString(CFStringRef theString, CFStringRef stringToFind) {
- return CFStringFindWithOptions(theString,
- stringToFind,
- CFRangeMake(0, CFStringGetLength(theString)),
- kCFCompareCaseInsensitive,
- nil);
-}
-
-@implementation AVCaptureSession (DevicePosition)
-
-+ (AVCaptureDevicePosition)devicePositionForSampleBuffer:(CMSampleBufferRef)sampleBuffer {
- // Check the image's EXIF for the camera the image came from.
- AVCaptureDevicePosition cameraPosition = AVCaptureDevicePositionUnspecified;
- CFDictionaryRef attachments = CMCopyDictionaryOfAttachments(
- kCFAllocatorDefault, sampleBuffer, kCMAttachmentMode_ShouldPropagate);
- if (attachments) {
- int size = CFDictionaryGetCount(attachments);
- if (size > 0) {
- CFDictionaryRef cfExifDictVal = nil;
- if (CFDictionaryGetValueIfPresent(
- attachments, (const void *)CFSTR("{Exif}"), (const void **)&cfExifDictVal)) {
- CFStringRef cfLensModelStrVal;
- if (CFDictionaryGetValueIfPresent(cfExifDictVal,
- (const void *)CFSTR("LensModel"),
- (const void **)&cfLensModelStrVal)) {
- if (CFStringContainsString(cfLensModelStrVal, CFSTR("front"))) {
- cameraPosition = AVCaptureDevicePositionFront;
- } else if (CFStringContainsString(cfLensModelStrVal, CFSTR("back"))) {
- cameraPosition = AVCaptureDevicePositionBack;
- }
- }
- }
- }
- CFRelease(attachments);
- }
- return cameraPosition;
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/Video/RTCCVPixelBuffer.mm b/sdk/objc/Framework/Classes/Video/RTCCVPixelBuffer.mm
deleted file mode 100644
index 9e5ac73..0000000
--- a/sdk/objc/Framework/Classes/Video/RTCCVPixelBuffer.mm
+++ /dev/null
@@ -1,346 +0,0 @@
-/*
- * Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCVideoFrameBuffer.h"
-
-#include "common_video/libyuv/include/webrtc_libyuv.h"
-#include "rtc_base/checks.h"
-#include "rtc_base/logging.h"
-#include "third_party/libyuv/include/libyuv.h"
-
-#if !defined(NDEBUG) && defined(WEBRTC_IOS)
-#import <UIKit/UIKit.h>
-#import <VideoToolbox/VideoToolbox.h>
-#endif
-
-@implementation RTCCVPixelBuffer {
- int _width;
- int _height;
- int _bufferWidth;
- int _bufferHeight;
- int _cropWidth;
- int _cropHeight;
-}
-
-@synthesize pixelBuffer = _pixelBuffer;
-@synthesize cropX = _cropX;
-@synthesize cropY = _cropY;
-@synthesize cropWidth = _cropWidth;
-@synthesize cropHeight = _cropHeight;
-
-+ (NSSet<NSNumber*>*)supportedPixelFormats {
- return [NSSet setWithObjects:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange),
- @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange),
- @(kCVPixelFormatType_32BGRA),
- @(kCVPixelFormatType_32ARGB),
- nil];
-}
-
-- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer {
- return [self initWithPixelBuffer:pixelBuffer
- adaptedWidth:CVPixelBufferGetWidth(pixelBuffer)
- adaptedHeight:CVPixelBufferGetHeight(pixelBuffer)
- cropWidth:CVPixelBufferGetWidth(pixelBuffer)
- cropHeight:CVPixelBufferGetHeight(pixelBuffer)
- cropX:0
- cropY:0];
-}
-
-- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
- adaptedWidth:(int)adaptedWidth
- adaptedHeight:(int)adaptedHeight
- cropWidth:(int)cropWidth
- cropHeight:(int)cropHeight
- cropX:(int)cropX
- cropY:(int)cropY {
- if (self = [super init]) {
- _width = adaptedWidth;
- _height = adaptedHeight;
- _pixelBuffer = pixelBuffer;
- _bufferWidth = CVPixelBufferGetWidth(_pixelBuffer);
- _bufferHeight = CVPixelBufferGetHeight(_pixelBuffer);
- _cropWidth = cropWidth;
- _cropHeight = cropHeight;
- // Can only crop at even pixels.
- _cropX = cropX & ~1;
- _cropY = cropY & ~1;
- CVBufferRetain(_pixelBuffer);
- }
-
- return self;
-}
-
-- (void)dealloc {
- CVBufferRelease(_pixelBuffer);
-}
-
-- (int)width {
- return _width;
-}
-
-- (int)height {
- return _height;
-}
-
-- (BOOL)requiresCropping {
- return _cropWidth != _bufferWidth || _cropHeight != _bufferHeight;
-}
-
-- (BOOL)requiresScalingToWidth:(int)width height:(int)height {
- return _cropWidth != width || _cropHeight != height;
-}
-
-- (int)bufferSizeForCroppingAndScalingToWidth:(int)width height:(int)height {
- const OSType srcPixelFormat = CVPixelBufferGetPixelFormatType(_pixelBuffer);
- switch (srcPixelFormat) {
- case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
- case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: {
- int srcChromaWidth = (_cropWidth + 1) / 2;
- int srcChromaHeight = (_cropHeight + 1) / 2;
- int dstChromaWidth = (width + 1) / 2;
- int dstChromaHeight = (height + 1) / 2;
-
- return srcChromaWidth * srcChromaHeight * 2 + dstChromaWidth * dstChromaHeight * 2;
- }
- case kCVPixelFormatType_32BGRA:
- case kCVPixelFormatType_32ARGB: {
- return 0; // Scaling RGBA frames does not require a temporary buffer.
- }
- }
- RTC_NOTREACHED() << "Unsupported pixel format.";
- return 0;
-}
-
-- (BOOL)cropAndScaleTo:(CVPixelBufferRef)outputPixelBuffer
- withTempBuffer:(nullable uint8_t*)tmpBuffer {
- const OSType srcPixelFormat = CVPixelBufferGetPixelFormatType(_pixelBuffer);
- const OSType dstPixelFormat = CVPixelBufferGetPixelFormatType(outputPixelBuffer);
-
- switch (srcPixelFormat) {
- case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
- case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: {
- size_t dstWidth = CVPixelBufferGetWidth(outputPixelBuffer);
- size_t dstHeight = CVPixelBufferGetHeight(outputPixelBuffer);
- if (dstWidth > 0 && dstHeight > 0) {
- RTC_DCHECK(dstPixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange ||
- dstPixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange);
- if ([self requiresScalingToWidth:dstWidth height:dstHeight]) {
- RTC_DCHECK(tmpBuffer);
- }
- [self cropAndScaleNV12To:outputPixelBuffer withTempBuffer:tmpBuffer];
- }
- break;
- }
- case kCVPixelFormatType_32BGRA:
- case kCVPixelFormatType_32ARGB: {
- RTC_DCHECK(srcPixelFormat == dstPixelFormat);
- [self cropAndScaleARGBTo:outputPixelBuffer];
- break;
- }
- default: { RTC_NOTREACHED() << "Unsupported pixel format."; }
- }
-
- return YES;
-}
-
-- (id<RTCI420Buffer>)toI420 {
- const OSType pixelFormat = CVPixelBufferGetPixelFormatType(_pixelBuffer);
-
- CVPixelBufferLockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
-
- RTCMutableI420Buffer* i420Buffer =
- [[RTCMutableI420Buffer alloc] initWithWidth:[self width] height:[self height]];
-
- switch (pixelFormat) {
- case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
- case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: {
- const uint8_t* srcY =
- static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 0));
- const int srcYStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 0);
- const uint8_t* srcUV =
- static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 1));
- const int srcUVStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 1);
-
- // Crop just by modifying pointers.
- srcY += srcYStride * _cropY + _cropX;
- srcUV += srcUVStride * (_cropY / 2) + _cropX;
-
- // TODO(magjed): Use a frame buffer pool.
- webrtc::NV12ToI420Scaler nv12ToI420Scaler;
- nv12ToI420Scaler.NV12ToI420Scale(srcY,
- srcYStride,
- srcUV,
- srcUVStride,
- _cropWidth,
- _cropHeight,
- i420Buffer.mutableDataY,
- i420Buffer.strideY,
- i420Buffer.mutableDataU,
- i420Buffer.strideU,
- i420Buffer.mutableDataV,
- i420Buffer.strideV,
- i420Buffer.width,
- i420Buffer.height);
- break;
- }
- case kCVPixelFormatType_32BGRA:
- case kCVPixelFormatType_32ARGB: {
- CVPixelBufferRef scaledPixelBuffer = NULL;
- CVPixelBufferRef sourcePixelBuffer = NULL;
- if ([self requiresCropping] ||
- [self requiresScalingToWidth:i420Buffer.width height:i420Buffer.height]) {
- CVPixelBufferCreate(
- NULL, i420Buffer.width, i420Buffer.height, pixelFormat, NULL, &scaledPixelBuffer);
- [self cropAndScaleTo:scaledPixelBuffer withTempBuffer:NULL];
-
- CVPixelBufferLockBaseAddress(scaledPixelBuffer, kCVPixelBufferLock_ReadOnly);
- sourcePixelBuffer = scaledPixelBuffer;
- } else {
- sourcePixelBuffer = _pixelBuffer;
- }
- const uint8_t* src = static_cast<uint8_t*>(CVPixelBufferGetBaseAddress(sourcePixelBuffer));
- const size_t bytesPerRow = CVPixelBufferGetBytesPerRow(sourcePixelBuffer);
-
- if (pixelFormat == kCVPixelFormatType_32BGRA) {
- // Corresponds to libyuv::FOURCC_ARGB
- libyuv::ARGBToI420(src,
- bytesPerRow,
- i420Buffer.mutableDataY,
- i420Buffer.strideY,
- i420Buffer.mutableDataU,
- i420Buffer.strideU,
- i420Buffer.mutableDataV,
- i420Buffer.strideV,
- i420Buffer.width,
- i420Buffer.height);
- } else if (pixelFormat == kCVPixelFormatType_32ARGB) {
- // Corresponds to libyuv::FOURCC_BGRA
- libyuv::BGRAToI420(src,
- bytesPerRow,
- i420Buffer.mutableDataY,
- i420Buffer.strideY,
- i420Buffer.mutableDataU,
- i420Buffer.strideU,
- i420Buffer.mutableDataV,
- i420Buffer.strideV,
- i420Buffer.width,
- i420Buffer.height);
- }
-
- if (scaledPixelBuffer) {
- CVPixelBufferUnlockBaseAddress(scaledPixelBuffer, kCVPixelBufferLock_ReadOnly);
- CVBufferRelease(scaledPixelBuffer);
- }
- break;
- }
- default: { RTC_NOTREACHED() << "Unsupported pixel format."; }
- }
-
- CVPixelBufferUnlockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
-
- return i420Buffer;
-}
-
-#pragma mark - Debugging
-
-#if !defined(NDEBUG) && defined(WEBRTC_IOS)
-- (id)debugQuickLookObject {
- CGImageRef cgImage;
- VTCreateCGImageFromCVPixelBuffer(_pixelBuffer, NULL, &cgImage);
- UIImage *image = [UIImage imageWithCGImage:cgImage scale:1.0 orientation:UIImageOrientationUp];
- CGImageRelease(cgImage);
- return image;
-}
-#endif
-
-#pragma mark - Private
-
-- (void)cropAndScaleNV12To:(CVPixelBufferRef)outputPixelBuffer withTempBuffer:(uint8_t*)tmpBuffer {
- // Prepare output pointers.
- CVReturn cvRet = CVPixelBufferLockBaseAddress(outputPixelBuffer, 0);
- if (cvRet != kCVReturnSuccess) {
- RTC_LOG(LS_ERROR) << "Failed to lock base address: " << cvRet;
- }
- const int dstWidth = CVPixelBufferGetWidth(outputPixelBuffer);
- const int dstHeight = CVPixelBufferGetHeight(outputPixelBuffer);
- uint8_t* dstY =
- reinterpret_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0));
- const int dstYStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0);
- uint8_t* dstUV =
- reinterpret_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1));
- const int dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1);
-
- // Prepare source pointers.
- CVPixelBufferLockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
- const uint8_t* srcY = static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 0));
- const int srcYStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 0);
- const uint8_t* srcUV = static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 1));
- const int srcUVStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 1);
-
- // Crop just by modifying pointers.
- srcY += srcYStride * _cropY + _cropX;
- srcUV += srcUVStride * (_cropY / 2) + _cropX;
-
- webrtc::NV12Scale(tmpBuffer,
- srcY,
- srcYStride,
- srcUV,
- srcUVStride,
- _cropWidth,
- _cropHeight,
- dstY,
- dstYStride,
- dstUV,
- dstUVStride,
- dstWidth,
- dstHeight);
-
- CVPixelBufferUnlockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
- CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0);
-}
-
-- (void)cropAndScaleARGBTo:(CVPixelBufferRef)outputPixelBuffer {
- // Prepare output pointers.
- CVReturn cvRet = CVPixelBufferLockBaseAddress(outputPixelBuffer, 0);
- if (cvRet != kCVReturnSuccess) {
- RTC_LOG(LS_ERROR) << "Failed to lock base address: " << cvRet;
- }
- const int dstWidth = CVPixelBufferGetWidth(outputPixelBuffer);
- const int dstHeight = CVPixelBufferGetHeight(outputPixelBuffer);
-
- uint8_t* dst = reinterpret_cast<uint8_t*>(CVPixelBufferGetBaseAddress(outputPixelBuffer));
- const int dstStride = CVPixelBufferGetBytesPerRow(outputPixelBuffer);
-
- // Prepare source pointers.
- CVPixelBufferLockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
- const uint8_t* src = static_cast<uint8_t*>(CVPixelBufferGetBaseAddress(_pixelBuffer));
- const int srcStride = CVPixelBufferGetBytesPerRow(_pixelBuffer);
-
- // Crop just by modifying pointers. Need to ensure that src pointer points to a byte corresponding
- // to the start of a new pixel (byte with B for BGRA) so that libyuv scales correctly.
- const int bytesPerPixel = 4;
- src += srcStride * _cropY + (_cropX * bytesPerPixel);
-
- // kCVPixelFormatType_32BGRA corresponds to libyuv::FOURCC_ARGB
- libyuv::ARGBScale(src,
- srcStride,
- _cropWidth,
- _cropHeight,
- dst,
- dstStride,
- dstWidth,
- dstHeight,
- libyuv::kFilterBox);
-
- CVPixelBufferUnlockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
- CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0);
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/Video/RTCDefaultShader.h b/sdk/objc/Framework/Classes/Video/RTCDefaultShader.h
index ed5125e..136d700 100644
--- a/sdk/objc/Framework/Classes/Video/RTCDefaultShader.h
+++ b/sdk/objc/Framework/Classes/Video/RTCDefaultShader.h
@@ -8,16 +8,4 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "WebRTC/RTCVideoViewShading.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-/** Default RTCVideoViewShading that will be used in RTCNSGLVideoView and
- * RTCEAGLVideoView if no external shader is specified. This shader will render
- * the video in a rectangle without any color or geometric transformations.
- */
-@interface RTCDefaultShader : NSObject<RTCVideoViewShading>
-
-@end
-
-NS_ASSUME_NONNULL_END
+#import "components/renderer/opengl/RTCDefaultShader.h"
diff --git a/sdk/objc/Framework/Classes/Video/RTCDefaultShader.mm b/sdk/objc/Framework/Classes/Video/RTCDefaultShader.mm
deleted file mode 100644
index c5fbde1..0000000
--- a/sdk/objc/Framework/Classes/Video/RTCDefaultShader.mm
+++ /dev/null
@@ -1,207 +0,0 @@
-/*
- * Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCDefaultShader.h"
-
-#if TARGET_OS_IPHONE
-#import <OpenGLES/ES3/gl.h>
-#else
-#import <OpenGL/gl3.h>
-#endif
-
-#import "RTCOpenGLDefines.h"
-#import "RTCShader.h"
-#import "WebRTC/RTCLogging.h"
-
-#include "absl/types/optional.h"
-
-static const int kYTextureUnit = 0;
-static const int kUTextureUnit = 1;
-static const int kVTextureUnit = 2;
-static const int kUvTextureUnit = 1;
-
-// Fragment shader converts YUV values from input textures into a final RGB
-// pixel. The conversion formula is from http://www.fourcc.org/fccyvrgb.php.
-static const char kI420FragmentShaderSource[] =
- SHADER_VERSION
- "precision highp float;"
- FRAGMENT_SHADER_IN " vec2 v_texcoord;\n"
- "uniform lowp sampler2D s_textureY;\n"
- "uniform lowp sampler2D s_textureU;\n"
- "uniform lowp sampler2D s_textureV;\n"
- FRAGMENT_SHADER_OUT
- "void main() {\n"
- " float y, u, v, r, g, b;\n"
- " y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n"
- " u = " FRAGMENT_SHADER_TEXTURE "(s_textureU, v_texcoord).r;\n"
- " v = " FRAGMENT_SHADER_TEXTURE "(s_textureV, v_texcoord).r;\n"
- " u = u - 0.5;\n"
- " v = v - 0.5;\n"
- " r = y + 1.403 * v;\n"
- " g = y - 0.344 * u - 0.714 * v;\n"
- " b = y + 1.770 * u;\n"
- " " FRAGMENT_SHADER_COLOR " = vec4(r, g, b, 1.0);\n"
- " }\n";
-
-static const char kNV12FragmentShaderSource[] =
- SHADER_VERSION
- "precision mediump float;"
- FRAGMENT_SHADER_IN " vec2 v_texcoord;\n"
- "uniform lowp sampler2D s_textureY;\n"
- "uniform lowp sampler2D s_textureUV;\n"
- FRAGMENT_SHADER_OUT
- "void main() {\n"
- " mediump float y;\n"
- " mediump vec2 uv;\n"
- " y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n"
- " uv = " FRAGMENT_SHADER_TEXTURE "(s_textureUV, v_texcoord).ra -\n"
- " vec2(0.5, 0.5);\n"
- " " FRAGMENT_SHADER_COLOR " = vec4(y + 1.403 * uv.y,\n"
- " y - 0.344 * uv.x - 0.714 * uv.y,\n"
- " y + 1.770 * uv.x,\n"
- " 1.0);\n"
- " }\n";
-
-@implementation RTCDefaultShader {
- GLuint _vertexBuffer;
- GLuint _vertexArray;
- // Store current rotation and only upload new vertex data when rotation changes.
- absl::optional<RTCVideoRotation> _currentRotation;
-
- GLuint _i420Program;
- GLuint _nv12Program;
-}
-
-- (void)dealloc {
- glDeleteProgram(_i420Program);
- glDeleteProgram(_nv12Program);
- glDeleteBuffers(1, &_vertexBuffer);
- glDeleteVertexArrays(1, &_vertexArray);
-}
-
-- (BOOL)createAndSetupI420Program {
- NSAssert(!_i420Program, @"I420 program already created");
- _i420Program = RTCCreateProgramFromFragmentSource(kI420FragmentShaderSource);
- if (!_i420Program) {
- return NO;
- }
- GLint ySampler = glGetUniformLocation(_i420Program, "s_textureY");
- GLint uSampler = glGetUniformLocation(_i420Program, "s_textureU");
- GLint vSampler = glGetUniformLocation(_i420Program, "s_textureV");
-
- if (ySampler < 0 || uSampler < 0 || vSampler < 0) {
- RTCLog(@"Failed to get uniform variable locations in I420 shader");
- glDeleteProgram(_i420Program);
- _i420Program = 0;
- return NO;
- }
-
- glUseProgram(_i420Program);
- glUniform1i(ySampler, kYTextureUnit);
- glUniform1i(uSampler, kUTextureUnit);
- glUniform1i(vSampler, kVTextureUnit);
-
- return YES;
-}
-
-- (BOOL)createAndSetupNV12Program {
- NSAssert(!_nv12Program, @"NV12 program already created");
- _nv12Program = RTCCreateProgramFromFragmentSource(kNV12FragmentShaderSource);
- if (!_nv12Program) {
- return NO;
- }
- GLint ySampler = glGetUniformLocation(_nv12Program, "s_textureY");
- GLint uvSampler = glGetUniformLocation(_nv12Program, "s_textureUV");
-
- if (ySampler < 0 || uvSampler < 0) {
- RTCLog(@"Failed to get uniform variable locations in NV12 shader");
- glDeleteProgram(_nv12Program);
- _nv12Program = 0;
- return NO;
- }
-
- glUseProgram(_nv12Program);
- glUniform1i(ySampler, kYTextureUnit);
- glUniform1i(uvSampler, kUvTextureUnit);
-
- return YES;
-}
-
-- (BOOL)prepareVertexBufferWithRotation:(RTCVideoRotation)rotation {
- if (!_vertexBuffer && !RTCCreateVertexBuffer(&_vertexBuffer, &_vertexArray)) {
- RTCLog(@"Failed to setup vertex buffer");
- return NO;
- }
-#if !TARGET_OS_IPHONE
- glBindVertexArray(_vertexArray);
-#endif
- glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
- if (!_currentRotation || rotation != *_currentRotation) {
- _currentRotation = absl::optional<RTCVideoRotation>(rotation);
- RTCSetVertexData(*_currentRotation);
- }
- return YES;
-}
-
-- (void)applyShadingForFrameWithWidth:(int)width
- height:(int)height
- rotation:(RTCVideoRotation)rotation
- yPlane:(GLuint)yPlane
- uPlane:(GLuint)uPlane
- vPlane:(GLuint)vPlane {
- if (![self prepareVertexBufferWithRotation:rotation]) {
- return;
- }
-
- if (!_i420Program && ![self createAndSetupI420Program]) {
- RTCLog(@"Failed to setup I420 program");
- return;
- }
-
- glUseProgram(_i420Program);
-
- glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + kYTextureUnit));
- glBindTexture(GL_TEXTURE_2D, yPlane);
-
- glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + kUTextureUnit));
- glBindTexture(GL_TEXTURE_2D, uPlane);
-
- glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + kVTextureUnit));
- glBindTexture(GL_TEXTURE_2D, vPlane);
-
- glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
-}
-
-- (void)applyShadingForFrameWithWidth:(int)width
- height:(int)height
- rotation:(RTCVideoRotation)rotation
- yPlane:(GLuint)yPlane
- uvPlane:(GLuint)uvPlane {
- if (![self prepareVertexBufferWithRotation:rotation]) {
- return;
- }
-
- if (!_nv12Program && ![self createAndSetupNV12Program]) {
- RTCLog(@"Failed to setup NV12 shader");
- return;
- }
-
- glUseProgram(_nv12Program);
-
- glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + kYTextureUnit));
- glBindTexture(GL_TEXTURE_2D, yPlane);
-
- glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + kUvTextureUnit));
- glBindTexture(GL_TEXTURE_2D, uvPlane);
-
- glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/Video/RTCI420Buffer+Private.h b/sdk/objc/Framework/Classes/Video/RTCI420Buffer+Private.h
deleted file mode 100644
index d874925..0000000
--- a/sdk/objc/Framework/Classes/Video/RTCI420Buffer+Private.h
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCVideoFrameBuffer.h"
-
-#include "api/video/i420_buffer.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-@interface RTCI420Buffer ()
-
-/** Initialize an RTCI420Buffer with its backing I420BufferInterface. */
-- (instancetype)initWithFrameBuffer:(rtc::scoped_refptr<webrtc::I420BufferInterface>)i420Buffer;
-- (rtc::scoped_refptr<webrtc::I420BufferInterface>)nativeI420Buffer;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/Video/RTCI420Buffer.mm b/sdk/objc/Framework/Classes/Video/RTCI420Buffer.mm
deleted file mode 100644
index ae40926..0000000
--- a/sdk/objc/Framework/Classes/Video/RTCI420Buffer.mm
+++ /dev/null
@@ -1,159 +0,0 @@
-/*
- * Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCI420Buffer+Private.h"
-
-#include "api/video/i420_buffer.h"
-
-#if !defined(NDEBUG) && defined(WEBRTC_IOS)
-#import <UIKit/UIKit.h>
-#include "third_party/libyuv/include/libyuv.h"
-#endif
-
-@implementation RTCI420Buffer {
- @protected
- rtc::scoped_refptr<webrtc::I420BufferInterface> _i420Buffer;
-}
-
-- (instancetype)initWithWidth:(int)width height:(int)height {
- if (self = [super init]) {
- _i420Buffer = webrtc::I420Buffer::Create(width, height);
- }
-
- return self;
-}
-
-- (instancetype)initWithWidth:(int)width
- height:(int)height
- dataY:(const uint8_t *)dataY
- dataU:(const uint8_t *)dataU
- dataV:(const uint8_t *)dataV {
- if (self = [super init]) {
- _i420Buffer = webrtc::I420Buffer::Copy(
- width, height, dataY, width, dataU, (width + 1) / 2, dataV, (width + 1) / 2);
- }
- return self;
-}
-
-- (instancetype)initWithWidth:(int)width
- height:(int)height
- strideY:(int)strideY
- strideU:(int)strideU
- strideV:(int)strideV {
- if (self = [super init]) {
- _i420Buffer = webrtc::I420Buffer::Create(width, height, strideY, strideU, strideV);
- }
-
- return self;
-}
-
-- (instancetype)initWithFrameBuffer:(rtc::scoped_refptr<webrtc::I420BufferInterface>)i420Buffer {
- if (self = [super init]) {
- _i420Buffer = i420Buffer;
- }
-
- return self;
-}
-
-- (int)width {
- return _i420Buffer->width();
-}
-
-- (int)height {
- return _i420Buffer->height();
-}
-
-- (int)strideY {
- return _i420Buffer->StrideY();
-}
-
-- (int)strideU {
- return _i420Buffer->StrideU();
-}
-
-- (int)strideV {
- return _i420Buffer->StrideV();
-}
-
-- (int)chromaWidth {
- return _i420Buffer->ChromaWidth();
-}
-
-- (int)chromaHeight {
- return _i420Buffer->ChromaHeight();
-}
-
-- (const uint8_t *)dataY {
- return _i420Buffer->DataY();
-}
-
-- (const uint8_t *)dataU {
- return _i420Buffer->DataU();
-}
-
-- (const uint8_t *)dataV {
- return _i420Buffer->DataV();
-}
-
-- (id<RTCI420Buffer>)toI420 {
- return self;
-}
-
-#pragma mark - Private
-
-- (rtc::scoped_refptr<webrtc::I420BufferInterface>)nativeI420Buffer {
- return _i420Buffer;
-}
-
-#pragma mark - Debugging
-
-#if !defined(NDEBUG) && defined(WEBRTC_IOS)
-- (id)debugQuickLookObject {
- UIGraphicsBeginImageContext(CGSizeMake(_i420Buffer->width(), _i420Buffer->height()));
- CGContextRef c = UIGraphicsGetCurrentContext();
- uint8_t *ctxData = (uint8_t *)CGBitmapContextGetData(c);
-
- libyuv::I420ToARGB(_i420Buffer->DataY(),
- _i420Buffer->StrideY(),
- _i420Buffer->DataU(),
- _i420Buffer->StrideU(),
- _i420Buffer->DataV(),
- _i420Buffer->StrideV(),
- ctxData,
- CGBitmapContextGetBytesPerRow(c),
- CGBitmapContextGetWidth(c),
- CGBitmapContextGetHeight(c));
-
- UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
- UIGraphicsEndImageContext();
-
- return image;
-}
-#endif
-
-@end
-
-#pragma mark -
-
-@implementation RTCMutableI420Buffer
-
-- (uint8_t *)mutableDataY {
- return static_cast<webrtc::I420Buffer *>(_i420Buffer.get())->MutableDataY();
-}
-
-- (uint8_t *)mutableDataU {
- return static_cast<webrtc::I420Buffer *>(_i420Buffer.get())->MutableDataU();
-}
-
-- (uint8_t *)mutableDataV {
- return static_cast<webrtc::I420Buffer *>(_i420Buffer.get())->MutableDataV();
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/Video/RTCI420TextureCache.h b/sdk/objc/Framework/Classes/Video/RTCI420TextureCache.h
deleted file mode 100644
index 19f1bc9..0000000
--- a/sdk/objc/Framework/Classes/Video/RTCI420TextureCache.h
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCOpenGLDefines.h"
-#import "WebRTC/RTCVideoFrame.h"
-
-@interface RTCI420TextureCache : NSObject
-
-@property(nonatomic, readonly) GLuint yTexture;
-@property(nonatomic, readonly) GLuint uTexture;
-@property(nonatomic, readonly) GLuint vTexture;
-
-- (instancetype)init NS_UNAVAILABLE;
-- (instancetype)initWithContext:(GlContextType *)context NS_DESIGNATED_INITIALIZER;
-
-- (void)uploadFrameToTextures:(RTCVideoFrame *)frame;
-
-@end
diff --git a/sdk/objc/Framework/Classes/Video/RTCI420TextureCache.mm b/sdk/objc/Framework/Classes/Video/RTCI420TextureCache.mm
deleted file mode 100644
index b603130..0000000
--- a/sdk/objc/Framework/Classes/Video/RTCI420TextureCache.mm
+++ /dev/null
@@ -1,155 +0,0 @@
-/*
- * Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCI420TextureCache.h"
-#import "WebRTC/RTCVideoFrameBuffer.h"
-
-#if TARGET_OS_IPHONE
-#import <OpenGLES/ES3/gl.h>
-#else
-#import <OpenGL/gl3.h>
-#endif
-
-#include <vector>
-
-// Two sets of 3 textures are used here, one for each of the Y, U and V planes. Having two sets
-// alleviates CPU blockage in the event that the GPU is asked to render to a texture that is already
-// in use.
-static const GLsizei kNumTextureSets = 2;
-static const GLsizei kNumTexturesPerSet = 3;
-static const GLsizei kNumTextures = kNumTexturesPerSet * kNumTextureSets;
-
-@implementation RTCI420TextureCache {
- BOOL _hasUnpackRowLength;
- GLint _currentTextureSet;
- // Handles for OpenGL constructs.
- GLuint _textures[kNumTextures];
- // Used to create a non-padded plane for GPU upload when we receive padded frames.
- std::vector<uint8_t> _planeBuffer;
-}
-
-- (GLuint)yTexture {
- return _textures[_currentTextureSet * kNumTexturesPerSet];
-}
-
-- (GLuint)uTexture {
- return _textures[_currentTextureSet * kNumTexturesPerSet + 1];
-}
-
-- (GLuint)vTexture {
- return _textures[_currentTextureSet * kNumTexturesPerSet + 2];
-}
-
-- (instancetype)initWithContext:(GlContextType *)context {
- if (self = [super init]) {
-#if TARGET_OS_IPHONE
- _hasUnpackRowLength = (context.API == kEAGLRenderingAPIOpenGLES3);
-#else
- _hasUnpackRowLength = YES;
-#endif
- glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
-
- [self setupTextures];
- }
- return self;
-}
-
-- (void)dealloc {
- glDeleteTextures(kNumTextures, _textures);
-}
-
-- (void)setupTextures {
- glGenTextures(kNumTextures, _textures);
- // Set parameters for each of the textures we created.
- for (GLsizei i = 0; i < kNumTextures; i++) {
- glBindTexture(GL_TEXTURE_2D, _textures[i]);
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
- }
-}
-
-- (void)uploadPlane:(const uint8_t *)plane
- texture:(GLuint)texture
- width:(size_t)width
- height:(size_t)height
- stride:(int32_t)stride {
- glBindTexture(GL_TEXTURE_2D, texture);
-
- const uint8_t *uploadPlane = plane;
- if ((size_t)stride != width) {
- if (_hasUnpackRowLength) {
- // GLES3 allows us to specify stride.
- glPixelStorei(GL_UNPACK_ROW_LENGTH, stride);
- glTexImage2D(GL_TEXTURE_2D,
- 0,
- RTC_PIXEL_FORMAT,
- static_cast<GLsizei>(width),
- static_cast<GLsizei>(height),
- 0,
- RTC_PIXEL_FORMAT,
- GL_UNSIGNED_BYTE,
- uploadPlane);
- glPixelStorei(GL_UNPACK_ROW_LENGTH, 0);
- return;
- } else {
- // Make an unpadded copy and upload that instead. Quick profiling showed
- // that this is faster than uploading row by row using glTexSubImage2D.
- uint8_t *unpaddedPlane = _planeBuffer.data();
- for (size_t y = 0; y < height; ++y) {
- memcpy(unpaddedPlane + y * width, plane + y * stride, width);
- }
- uploadPlane = unpaddedPlane;
- }
- }
- glTexImage2D(GL_TEXTURE_2D,
- 0,
- RTC_PIXEL_FORMAT,
- static_cast<GLsizei>(width),
- static_cast<GLsizei>(height),
- 0,
- RTC_PIXEL_FORMAT,
- GL_UNSIGNED_BYTE,
- uploadPlane);
-}
-
-- (void)uploadFrameToTextures:(RTCVideoFrame *)frame {
- _currentTextureSet = (_currentTextureSet + 1) % kNumTextureSets;
-
- id<RTCI420Buffer> buffer = [frame.buffer toI420];
-
- const int chromaWidth = buffer.chromaWidth;
- const int chromaHeight = buffer.chromaHeight;
- if (buffer.strideY != frame.width || buffer.strideU != chromaWidth ||
- buffer.strideV != chromaWidth) {
- _planeBuffer.resize(buffer.width * buffer.height);
- }
-
- [self uploadPlane:buffer.dataY
- texture:self.yTexture
- width:buffer.width
- height:buffer.height
- stride:buffer.strideY];
-
- [self uploadPlane:buffer.dataU
- texture:self.uTexture
- width:chromaWidth
- height:chromaHeight
- stride:buffer.strideU];
-
- [self uploadPlane:buffer.dataV
- texture:self.vTexture
- width:chromaWidth
- height:chromaHeight
- stride:buffer.strideV];
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/Video/RTCNV12TextureCache.h b/sdk/objc/Framework/Classes/Video/RTCNV12TextureCache.h
index 9cba823..4ba1caa 100644
--- a/sdk/objc/Framework/Classes/Video/RTCNV12TextureCache.h
+++ b/sdk/objc/Framework/Classes/Video/RTCNV12TextureCache.h
@@ -8,24 +8,4 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import <GLKit/GLKit.h>
-
-@class RTCVideoFrame;
-
-NS_ASSUME_NONNULL_BEGIN
-
-@interface RTCNV12TextureCache : NSObject
-
-@property(nonatomic, readonly) GLuint yTexture;
-@property(nonatomic, readonly) GLuint uvTexture;
-
-- (instancetype)init NS_UNAVAILABLE;
-- (nullable instancetype)initWithContext:(EAGLContext *)context NS_DESIGNATED_INITIALIZER;
-
-- (BOOL)uploadFrameToTextures:(RTCVideoFrame *)frame;
-
-- (void)releaseTextures;
-
-@end
-
-NS_ASSUME_NONNULL_END
+#import "components/renderer/opengl/RTCNV12TextureCache.h"
diff --git a/sdk/objc/Framework/Classes/Video/RTCNV12TextureCache.m b/sdk/objc/Framework/Classes/Video/RTCNV12TextureCache.m
deleted file mode 100644
index 20a6082..0000000
--- a/sdk/objc/Framework/Classes/Video/RTCNV12TextureCache.m
+++ /dev/null
@@ -1,111 +0,0 @@
-/*
- * Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCNV12TextureCache.h"
-
-#import "WebRTC/RTCVideoFrame.h"
-#import "WebRTC/RTCVideoFrameBuffer.h"
-
-@implementation RTCNV12TextureCache {
- CVOpenGLESTextureCacheRef _textureCache;
- CVOpenGLESTextureRef _yTextureRef;
- CVOpenGLESTextureRef _uvTextureRef;
-}
-
-- (GLuint)yTexture {
- return CVOpenGLESTextureGetName(_yTextureRef);
-}
-
-- (GLuint)uvTexture {
- return CVOpenGLESTextureGetName(_uvTextureRef);
-}
-
-- (instancetype)initWithContext:(EAGLContext *)context {
- if (self = [super init]) {
- CVReturn ret = CVOpenGLESTextureCacheCreate(
- kCFAllocatorDefault, NULL,
-#if COREVIDEO_USE_EAGLCONTEXT_CLASS_IN_API
- context,
-#else
- (__bridge void *)context,
-#endif
- NULL, &_textureCache);
- if (ret != kCVReturnSuccess) {
- self = nil;
- }
- }
- return self;
-}
-
-- (BOOL)loadTexture:(CVOpenGLESTextureRef *)textureOut
- pixelBuffer:(CVPixelBufferRef)pixelBuffer
- planeIndex:(int)planeIndex
- pixelFormat:(GLenum)pixelFormat {
- const int width = CVPixelBufferGetWidthOfPlane(pixelBuffer, planeIndex);
- const int height = CVPixelBufferGetHeightOfPlane(pixelBuffer, planeIndex);
-
- if (*textureOut) {
- CFRelease(*textureOut);
- *textureOut = nil;
- }
- CVReturn ret = CVOpenGLESTextureCacheCreateTextureFromImage(
- kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, GL_TEXTURE_2D, pixelFormat, width,
- height, pixelFormat, GL_UNSIGNED_BYTE, planeIndex, textureOut);
- if (ret != kCVReturnSuccess) {
- CFRelease(*textureOut);
- *textureOut = nil;
- return NO;
- }
- NSAssert(CVOpenGLESTextureGetTarget(*textureOut) == GL_TEXTURE_2D,
- @"Unexpected GLES texture target");
- glBindTexture(GL_TEXTURE_2D, CVOpenGLESTextureGetName(*textureOut));
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
- return YES;
-}
-
-- (BOOL)uploadFrameToTextures:(RTCVideoFrame *)frame {
- NSAssert([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]],
- @"frame must be CVPixelBuffer backed");
- RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
- CVPixelBufferRef pixelBuffer = rtcPixelBuffer.pixelBuffer;
- return [self loadTexture:&_yTextureRef
- pixelBuffer:pixelBuffer
- planeIndex:0
- pixelFormat:GL_LUMINANCE] &&
- [self loadTexture:&_uvTextureRef
- pixelBuffer:pixelBuffer
- planeIndex:1
- pixelFormat:GL_LUMINANCE_ALPHA];
-}
-
-- (void)releaseTextures {
- if (_uvTextureRef) {
- CFRelease(_uvTextureRef);
- _uvTextureRef = nil;
- }
- if (_yTextureRef) {
- CFRelease(_yTextureRef);
- _yTextureRef = nil;
- }
-}
-
-- (void)dealloc {
- [self releaseTextures];
- if (_textureCache) {
- CFRelease(_textureCache);
- _textureCache = nil;
- }
-}
-
-@end
-
diff --git a/sdk/objc/Framework/Classes/Video/RTCOpenGLDefines.h b/sdk/objc/Framework/Classes/Video/RTCOpenGLDefines.h
deleted file mode 100644
index 4088535..0000000
--- a/sdk/objc/Framework/Classes/Video/RTCOpenGLDefines.h
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright 2016 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import <Foundation/Foundation.h>
-
-#if TARGET_OS_IPHONE
-#define RTC_PIXEL_FORMAT GL_LUMINANCE
-#define SHADER_VERSION
-#define VERTEX_SHADER_IN "attribute"
-#define VERTEX_SHADER_OUT "varying"
-#define FRAGMENT_SHADER_IN "varying"
-#define FRAGMENT_SHADER_OUT
-#define FRAGMENT_SHADER_COLOR "gl_FragColor"
-#define FRAGMENT_SHADER_TEXTURE "texture2D"
-
-@class EAGLContext;
-typedef EAGLContext GlContextType;
-#else
-#define RTC_PIXEL_FORMAT GL_RED
-#define SHADER_VERSION "#version 150\n"
-#define VERTEX_SHADER_IN "in"
-#define VERTEX_SHADER_OUT "out"
-#define FRAGMENT_SHADER_IN "in"
-#define FRAGMENT_SHADER_OUT "out vec4 fragColor;\n"
-#define FRAGMENT_SHADER_COLOR "fragColor"
-#define FRAGMENT_SHADER_TEXTURE "texture"
-
-@class NSOpenGLContext;
-typedef NSOpenGLContext GlContextType;
-#endif
diff --git a/sdk/objc/Framework/Classes/Video/RTCShader.h b/sdk/objc/Framework/Classes/Video/RTCShader.h
deleted file mode 100644
index 67afabb..0000000
--- a/sdk/objc/Framework/Classes/Video/RTCShader.h
+++ /dev/null
@@ -1,21 +0,0 @@
-/*
- * Copyright 2016 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCVideoFrame.h"
-
-RTC_EXTERN const char kRTCVertexShaderSource[];
-
-RTC_EXTERN GLuint RTCCreateShader(GLenum type, const GLchar* source);
-RTC_EXTERN GLuint RTCCreateProgram(GLuint vertexShader, GLuint fragmentShader);
-RTC_EXTERN GLuint
-RTCCreateProgramFromFragmentSource(const char fragmentShaderSource[]);
-RTC_EXTERN BOOL RTCCreateVertexBuffer(GLuint* vertexBuffer,
- GLuint* vertexArray);
-RTC_EXTERN void RTCSetVertexData(RTCVideoRotation rotation);
diff --git a/sdk/objc/Framework/Classes/Video/RTCShader.mm b/sdk/objc/Framework/Classes/Video/RTCShader.mm
deleted file mode 100644
index ea4228e..0000000
--- a/sdk/objc/Framework/Classes/Video/RTCShader.mm
+++ /dev/null
@@ -1,189 +0,0 @@
-/*
- * Copyright 2016 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "RTCShader.h"
-
-#if TARGET_OS_IPHONE
-#import <OpenGLES/ES3/gl.h>
-#else
-#import <OpenGL/gl3.h>
-#endif
-
-#include <algorithm>
-#include <array>
-#include <memory>
-
-#import "RTCOpenGLDefines.h"
-
-#include "rtc_base/checks.h"
-#include "rtc_base/logging.h"
-
-// Vertex shader doesn't do anything except pass coordinates through.
-const char kRTCVertexShaderSource[] =
- SHADER_VERSION
- VERTEX_SHADER_IN " vec2 position;\n"
- VERTEX_SHADER_IN " vec2 texcoord;\n"
- VERTEX_SHADER_OUT " vec2 v_texcoord;\n"
- "void main() {\n"
- " gl_Position = vec4(position.x, position.y, 0.0, 1.0);\n"
- " v_texcoord = texcoord;\n"
- "}\n";
-
-// Compiles a shader of the given |type| with GLSL source |source| and returns
-// the shader handle or 0 on error.
-GLuint RTCCreateShader(GLenum type, const GLchar *source) {
- GLuint shader = glCreateShader(type);
- if (!shader) {
- return 0;
- }
- glShaderSource(shader, 1, &source, NULL);
- glCompileShader(shader);
- GLint compileStatus = GL_FALSE;
- glGetShaderiv(shader, GL_COMPILE_STATUS, &compileStatus);
- if (compileStatus == GL_FALSE) {
- GLint logLength = 0;
- // The null termination character is included in the returned log length.
- glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &logLength);
- if (logLength > 0) {
- std::unique_ptr<char[]> compileLog(new char[logLength]);
- // The returned string is null terminated.
- glGetShaderInfoLog(shader, logLength, NULL, compileLog.get());
- RTC_LOG(LS_ERROR) << "Shader compile error: " << compileLog.get();
- }
- glDeleteShader(shader);
- shader = 0;
- }
- return shader;
-}
-
-// Links a shader program with the given vertex and fragment shaders and
-// returns the program handle or 0 on error.
-GLuint RTCCreateProgram(GLuint vertexShader, GLuint fragmentShader) {
- if (vertexShader == 0 || fragmentShader == 0) {
- return 0;
- }
- GLuint program = glCreateProgram();
- if (!program) {
- return 0;
- }
- glAttachShader(program, vertexShader);
- glAttachShader(program, fragmentShader);
- glLinkProgram(program);
- GLint linkStatus = GL_FALSE;
- glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
- if (linkStatus == GL_FALSE) {
- glDeleteProgram(program);
- program = 0;
- }
- return program;
-}
-
-// Creates and links a shader program with the given fragment shader source and
-// a plain vertex shader. Returns the program handle or 0 on error.
-GLuint RTCCreateProgramFromFragmentSource(const char fragmentShaderSource[]) {
- GLuint vertexShader = RTCCreateShader(GL_VERTEX_SHADER, kRTCVertexShaderSource);
- RTC_CHECK(vertexShader) << "failed to create vertex shader";
- GLuint fragmentShader =
- RTCCreateShader(GL_FRAGMENT_SHADER, fragmentShaderSource);
- RTC_CHECK(fragmentShader) << "failed to create fragment shader";
- GLuint program = RTCCreateProgram(vertexShader, fragmentShader);
- // Shaders are created only to generate program.
- if (vertexShader) {
- glDeleteShader(vertexShader);
- }
- if (fragmentShader) {
- glDeleteShader(fragmentShader);
- }
-
- // Set vertex shader variables 'position' and 'texcoord' in program.
- GLint position = glGetAttribLocation(program, "position");
- GLint texcoord = glGetAttribLocation(program, "texcoord");
- if (position < 0 || texcoord < 0) {
- glDeleteProgram(program);
- return 0;
- }
-
- // Read position attribute with size of 2 and stride of 4 beginning at the start of the array. The
- // last argument indicates offset of data within the vertex buffer.
- glVertexAttribPointer(position, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), (void *)0);
- glEnableVertexAttribArray(position);
-
- // Read texcoord attribute with size of 2 and stride of 4 beginning at the first texcoord in the
- // array. The last argument indicates offset of data within the vertex buffer.
- glVertexAttribPointer(
- texcoord, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), (void *)(2 * sizeof(GLfloat)));
- glEnableVertexAttribArray(texcoord);
-
- return program;
-}
-
-BOOL RTCCreateVertexBuffer(GLuint *vertexBuffer, GLuint *vertexArray) {
-#if !TARGET_OS_IPHONE
- glGenVertexArrays(1, vertexArray);
- if (*vertexArray == 0) {
- return NO;
- }
- glBindVertexArray(*vertexArray);
-#endif
- glGenBuffers(1, vertexBuffer);
- if (*vertexBuffer == 0) {
- glDeleteVertexArrays(1, vertexArray);
- return NO;
- }
- glBindBuffer(GL_ARRAY_BUFFER, *vertexBuffer);
- glBufferData(GL_ARRAY_BUFFER, 4 * 4 * sizeof(GLfloat), NULL, GL_DYNAMIC_DRAW);
- return YES;
-}
-
-// Set vertex data to the currently bound vertex buffer.
-void RTCSetVertexData(RTCVideoRotation rotation) {
- // When modelview and projection matrices are identity (default) the world is
- // contained in the square around origin with unit size 2. Drawing to these
- // coordinates is equivalent to drawing to the entire screen. The texture is
- // stretched over that square using texture coordinates (u, v) that range
- // from (0, 0) to (1, 1) inclusive. Texture coordinates are flipped vertically
- // here because the incoming frame has origin in upper left hand corner but
- // OpenGL expects origin in bottom left corner.
- std::array<std::array<GLfloat, 2>, 4> UVCoords = {{
- {{0, 1}}, // Lower left.
- {{1, 1}}, // Lower right.
- {{1, 0}}, // Upper right.
- {{0, 0}}, // Upper left.
- }};
-
- // Rotate the UV coordinates.
- int rotation_offset;
- switch (rotation) {
- case RTCVideoRotation_0:
- rotation_offset = 0;
- break;
- case RTCVideoRotation_90:
- rotation_offset = 1;
- break;
- case RTCVideoRotation_180:
- rotation_offset = 2;
- break;
- case RTCVideoRotation_270:
- rotation_offset = 3;
- break;
- }
- std::rotate(UVCoords.begin(), UVCoords.begin() + rotation_offset,
- UVCoords.end());
-
- const GLfloat gVertices[] = {
- // X, Y, U, V.
- -1, -1, UVCoords[0][0], UVCoords[0][1],
- 1, -1, UVCoords[1][0], UVCoords[1][1],
- 1, 1, UVCoords[2][0], UVCoords[2][1],
- -1, 1, UVCoords[3][0], UVCoords[3][1],
- };
-
- glBufferSubData(GL_ARRAY_BUFFER, 0, sizeof(gVertices), gVertices);
-}
diff --git a/sdk/objc/Framework/Classes/Video/UIDevice+H264Profile.h b/sdk/objc/Framework/Classes/Video/UIDevice+H264Profile.h
deleted file mode 100644
index bb6f6ce..0000000
--- a/sdk/objc/Framework/Classes/Video/UIDevice+H264Profile.h
+++ /dev/null
@@ -1,19 +0,0 @@
-/*
- * Copyright 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import <UIKit/UIKit.h>
-
-#include "media/base/h264_profile_level_id.h"
-
-@interface UIDevice (H264Profile)
-
-+ (absl::optional<webrtc::H264::ProfileLevelId>)maxSupportedH264Profile;
-
-@end
diff --git a/sdk/objc/Framework/Classes/Video/UIDevice+H264Profile.mm b/sdk/objc/Framework/Classes/Video/UIDevice+H264Profile.mm
deleted file mode 100644
index 196e34e..0000000
--- a/sdk/objc/Framework/Classes/Video/UIDevice+H264Profile.mm
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Copyright 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "UIDevice+H264Profile.h"
-#import "WebRTC/UIDevice+RTCDevice.h"
-
-#include <algorithm>
-
-namespace {
-
-using namespace webrtc::H264;
-
-struct SupportedH264Profile {
- const RTCDeviceType deviceType;
- const ProfileLevelId profile;
-};
-
-constexpr SupportedH264Profile kH264MaxSupportedProfiles[] = {
- // iPhones with at least iOS 9
- {RTCDeviceTypeIPhoneX, {kProfileHigh, kLevel5_2}}, // https://support.apple.com/kb/SP770
- {RTCDeviceTypeIPhone8, {kProfileHigh, kLevel5_2}}, // https://support.apple.com/kb/SP767
- {RTCDeviceTypeIPhone8Plus, {kProfileHigh, kLevel5_2}}, // https://support.apple.com/kb/SP768
- {RTCDeviceTypeIPhone7, {kProfileHigh, kLevel5_1}}, // https://support.apple.com/kb/SP743
- {RTCDeviceTypeIPhone7Plus, {kProfileHigh, kLevel5_1}}, // https://support.apple.com/kb/SP744
- {RTCDeviceTypeIPhoneSE, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP738
- {RTCDeviceTypeIPhone6S, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP726
- {RTCDeviceTypeIPhone6SPlus, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP727
- {RTCDeviceTypeIPhone6, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP705
- {RTCDeviceTypeIPhone6Plus, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP706
- {RTCDeviceTypeIPhone5SGSM, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP685
- {RTCDeviceTypeIPhone5SGSM_CDMA,
- {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP685
- {RTCDeviceTypeIPhone5GSM, {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP655
- {RTCDeviceTypeIPhone5GSM_CDMA,
- {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP655
- {RTCDeviceTypeIPhone5CGSM, {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP684
- {RTCDeviceTypeIPhone5CGSM_CDMA,
- {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP684
- {RTCDeviceTypeIPhone4S, {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP643
-
- // iPods with at least iOS 9
- {RTCDeviceTypeIPodTouch6G, {kProfileMain, kLevel4_1}}, // https://support.apple.com/kb/SP720
- {RTCDeviceTypeIPodTouch5G, {kProfileMain, kLevel3_1}}, // https://support.apple.com/kb/SP657
-
- // iPads with at least iOS 9
- {RTCDeviceTypeIPad2Wifi, {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP622
- {RTCDeviceTypeIPad2GSM, {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP622
- {RTCDeviceTypeIPad2CDMA, {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP622
- {RTCDeviceTypeIPad2Wifi2, {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP622
- {RTCDeviceTypeIPadMiniWifi, {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP661
- {RTCDeviceTypeIPadMiniGSM, {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP661
- {RTCDeviceTypeIPadMiniGSM_CDMA,
- {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP661
- {RTCDeviceTypeIPad3Wifi, {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP647
- {RTCDeviceTypeIPad3GSM_CDMA, {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP647
- {RTCDeviceTypeIPad3GSM, {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP647
- {RTCDeviceTypeIPad4Wifi, {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP662
- {RTCDeviceTypeIPad4GSM, {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP662
- {RTCDeviceTypeIPad4GSM_CDMA, {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP662
- {RTCDeviceTypeIPad5, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP751
- {RTCDeviceTypeIPad6, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP774
- {RTCDeviceTypeIPadAirWifi, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP692
- {RTCDeviceTypeIPadAirCellular,
- {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP692
- {RTCDeviceTypeIPadAirWifiCellular,
- {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP692
- {RTCDeviceTypeIPadAir2, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP708
- {RTCDeviceTypeIPadMini2GWifi, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP693
- {RTCDeviceTypeIPadMini2GCellular,
- {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP693
- {RTCDeviceTypeIPadMini2GWifiCellular,
- {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP693
- {RTCDeviceTypeIPadMini3, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP709
- {RTCDeviceTypeIPadMini4, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP725
- {RTCDeviceTypeIPadPro9Inch, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP739
- {RTCDeviceTypeIPadPro12Inch, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/sp723
- {RTCDeviceTypeIPadPro12Inch2, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP761
- {RTCDeviceTypeIPadPro10Inch, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP762
-};
-
-absl::optional<ProfileLevelId> FindMaxSupportedProfileForDevice(RTCDeviceType deviceType) {
- const auto* result = std::find_if(std::begin(kH264MaxSupportedProfiles),
- std::end(kH264MaxSupportedProfiles),
- [deviceType](const SupportedH264Profile& supportedProfile) {
- return supportedProfile.deviceType == deviceType;
- });
- if (result != std::end(kH264MaxSupportedProfiles)) {
- return result->profile;
- }
- return absl::nullopt;
-}
-
-} // namespace
-
-@implementation UIDevice (H264Profile)
-
-+ (absl::optional<webrtc::H264::ProfileLevelId>)maxSupportedH264Profile {
- return FindMaxSupportedProfileForDevice([self deviceType]);
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoDecoderH264.mm b/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoDecoderH264.mm
deleted file mode 100644
index 2871b8a..0000000
--- a/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoDecoderH264.mm
+++ /dev/null
@@ -1,290 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- *
- */
-
-#import "WebRTC/RTCVideoCodecH264.h"
-
-#import <VideoToolbox/VideoToolbox.h>
-
-#include "modules/video_coding/include/video_error_codes.h"
-#include "rtc_base/checks.h"
-#include "rtc_base/logging.h"
-#include "rtc_base/timeutils.h"
-#include "sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.h"
-
-#import "WebRTC/RTCVideoFrame.h"
-#import "WebRTC/RTCVideoFrameBuffer.h"
-#import "helpers.h"
-#import "scoped_cftyperef.h"
-
-#if defined(WEBRTC_IOS)
-#import "Common/RTCUIApplicationStatusObserver.h"
-#import "WebRTC/UIDevice+RTCDevice.h"
-#endif
-
-// Struct that we pass to the decoder per frame to decode. We receive it again
-// in the decoder callback.
-struct RTCFrameDecodeParams {
- RTCFrameDecodeParams(RTCVideoDecoderCallback cb, int64_t ts) : callback(cb), timestamp(ts) {}
- RTCVideoDecoderCallback callback;
- int64_t timestamp;
-};
-
-@interface RTCVideoDecoderH264 ()
-- (void)setError:(OSStatus)error;
-@end
-
-// This is the callback function that VideoToolbox calls when decode is
-// complete.
-void decompressionOutputCallback(void *decoderRef,
- void *params,
- OSStatus status,
- VTDecodeInfoFlags infoFlags,
- CVImageBufferRef imageBuffer,
- CMTime timestamp,
- CMTime duration) {
- std::unique_ptr<RTCFrameDecodeParams> decodeParams(
- reinterpret_cast<RTCFrameDecodeParams *>(params));
- if (status != noErr) {
- RTCVideoDecoderH264 *decoder = (__bridge RTCVideoDecoderH264 *)decoderRef;
- [decoder setError:status];
- RTC_LOG(LS_ERROR) << "Failed to decode frame. Status: " << status;
- return;
- }
- // TODO(tkchin): Handle CVO properly.
- RTCCVPixelBuffer *frameBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:imageBuffer];
- RTCVideoFrame *decodedFrame =
- [[RTCVideoFrame alloc] initWithBuffer:frameBuffer
- rotation:RTCVideoRotation_0
- timeStampNs:CMTimeGetSeconds(timestamp) * rtc::kNumNanosecsPerSec];
- decodedFrame.timeStamp = decodeParams->timestamp;
- decodeParams->callback(decodedFrame);
-}
-
-// Decoder.
-@implementation RTCVideoDecoderH264 {
- CMVideoFormatDescriptionRef _videoFormat;
- VTDecompressionSessionRef _decompressionSession;
- RTCVideoDecoderCallback _callback;
- OSStatus _error;
-}
-
-- (instancetype)init {
- if (self = [super init]) {
-#if defined(WEBRTC_IOS) && !defined(RTC_APPRTCMOBILE_BROADCAST_EXTENSION)
- [RTCUIApplicationStatusObserver prepareForUse];
- _error = noErr;
-#endif
- }
-
- return self;
-}
-
-- (void)dealloc {
- [self destroyDecompressionSession];
- [self setVideoFormat:nullptr];
-}
-
-- (NSInteger)startDecodeWithNumberOfCores:(int)numberOfCores {
- return WEBRTC_VIDEO_CODEC_OK;
-}
-
-- (NSInteger)startDecodeWithSettings:(RTCVideoEncoderSettings *)settings
- numberOfCores:(int)numberOfCores {
- return WEBRTC_VIDEO_CODEC_OK;
-}
-
-- (NSInteger)decode:(RTCEncodedImage *)inputImage
- missingFrames:(BOOL)missingFrames
- codecSpecificInfo:(nullable id<RTCCodecSpecificInfo>)info
- renderTimeMs:(int64_t)renderTimeMs {
- RTC_DCHECK(inputImage.buffer);
-
- if (_error != noErr) {
- RTC_LOG(LS_WARNING) << "Last frame decode failed.";
- _error = noErr;
- return WEBRTC_VIDEO_CODEC_ERROR;
- }
-
-#if defined(WEBRTC_IOS) && !defined(RTC_APPRTCMOBILE_BROADCAST_EXTENSION)
- if (![[RTCUIApplicationStatusObserver sharedInstance] isApplicationActive]) {
- // Ignore all decode requests when app isn't active. In this state, the
- // hardware decoder has been invalidated by the OS.
- // Reset video format so that we won't process frames until the next
- // keyframe.
- [self setVideoFormat:nullptr];
- return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
- }
-#endif
- rtc::ScopedCFTypeRef<CMVideoFormatDescriptionRef> inputFormat =
- rtc::ScopedCF(webrtc::CreateVideoFormatDescription((uint8_t *)inputImage.buffer.bytes,
- inputImage.buffer.length));
- if (inputFormat) {
- // Check if the video format has changed, and reinitialize decoder if
- // needed.
- if (!CMFormatDescriptionEqual(inputFormat.get(), _videoFormat)) {
- [self setVideoFormat:inputFormat.get()];
- int resetDecompressionSessionError = [self resetDecompressionSession];
- if (resetDecompressionSessionError != WEBRTC_VIDEO_CODEC_OK) {
- return resetDecompressionSessionError;
- }
- }
- }
- if (!_videoFormat) {
- // We received a frame but we don't have format information so we can't
- // decode it.
- // This can happen after backgrounding. We need to wait for the next
- // sps/pps before we can resume so we request a keyframe by returning an
- // error.
- RTC_LOG(LS_WARNING) << "Missing video format. Frame with sps/pps required.";
- return WEBRTC_VIDEO_CODEC_ERROR;
- }
- CMSampleBufferRef sampleBuffer = nullptr;
- if (!webrtc::H264AnnexBBufferToCMSampleBuffer((uint8_t *)inputImage.buffer.bytes,
- inputImage.buffer.length,
- _videoFormat,
- &sampleBuffer)) {
- return WEBRTC_VIDEO_CODEC_ERROR;
- }
- RTC_DCHECK(sampleBuffer);
- VTDecodeFrameFlags decodeFlags = kVTDecodeFrame_EnableAsynchronousDecompression;
- std::unique_ptr<RTCFrameDecodeParams> frameDecodeParams;
- frameDecodeParams.reset(new RTCFrameDecodeParams(_callback, inputImage.timeStamp));
- OSStatus status = VTDecompressionSessionDecodeFrame(
- _decompressionSession, sampleBuffer, decodeFlags, frameDecodeParams.release(), nullptr);
-#if defined(WEBRTC_IOS)
- // Re-initialize the decoder if we have an invalid session while the app is
- // active and retry the decode request.
- if (status == kVTInvalidSessionErr && [self resetDecompressionSession] == WEBRTC_VIDEO_CODEC_OK) {
- frameDecodeParams.reset(new RTCFrameDecodeParams(_callback, inputImage.timeStamp));
- status = VTDecompressionSessionDecodeFrame(
- _decompressionSession, sampleBuffer, decodeFlags, frameDecodeParams.release(), nullptr);
- }
-#endif
- CFRelease(sampleBuffer);
- if (status != noErr) {
- RTC_LOG(LS_ERROR) << "Failed to decode frame with code: " << status;
- return WEBRTC_VIDEO_CODEC_ERROR;
- }
- return WEBRTC_VIDEO_CODEC_OK;
-}
-
-- (void)setCallback:(RTCVideoDecoderCallback)callback {
- _callback = callback;
-}
-
-- (void)setError:(OSStatus)error {
- _error = error;
-}
-
-- (NSInteger)releaseDecoder {
- // Need to invalidate the session so that callbacks no longer occur and it
- // is safe to null out the callback.
- [self destroyDecompressionSession];
- [self setVideoFormat:nullptr];
- _callback = nullptr;
- return WEBRTC_VIDEO_CODEC_OK;
-}
-
-#pragma mark - Private
-
-- (int)resetDecompressionSession {
- [self destroyDecompressionSession];
-
- // Need to wait for the first SPS to initialize decoder.
- if (!_videoFormat) {
- return WEBRTC_VIDEO_CODEC_OK;
- }
-
- // Set keys for OpenGL and IOSurface compatibilty, which makes the encoder
- // create pixel buffers with GPU backed memory. The intent here is to pass
- // the pixel buffers directly so we avoid a texture upload later during
- // rendering. This currently is moot because we are converting back to an
- // I420 frame after decode, but eventually we will be able to plumb
- // CVPixelBuffers directly to the renderer.
- // TODO(tkchin): Maybe only set OpenGL/IOSurface keys if we know that that
- // we can pass CVPixelBuffers as native handles in decoder output.
- static size_t const attributesSize = 3;
- CFTypeRef keys[attributesSize] = {
-#if defined(WEBRTC_IOS)
- kCVPixelBufferOpenGLESCompatibilityKey,
-#elif defined(WEBRTC_MAC)
- kCVPixelBufferOpenGLCompatibilityKey,
-#endif
- kCVPixelBufferIOSurfacePropertiesKey,
- kCVPixelBufferPixelFormatTypeKey
- };
- CFDictionaryRef ioSurfaceValue = CreateCFTypeDictionary(nullptr, nullptr, 0);
- int64_t nv12type = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
- CFNumberRef pixelFormat = CFNumberCreate(nullptr, kCFNumberLongType, &nv12type);
- CFTypeRef values[attributesSize] = {kCFBooleanTrue, ioSurfaceValue, pixelFormat};
- CFDictionaryRef attributes = CreateCFTypeDictionary(keys, values, attributesSize);
- if (ioSurfaceValue) {
- CFRelease(ioSurfaceValue);
- ioSurfaceValue = nullptr;
- }
- if (pixelFormat) {
- CFRelease(pixelFormat);
- pixelFormat = nullptr;
- }
- VTDecompressionOutputCallbackRecord record = {
- decompressionOutputCallback, (__bridge void *)self,
- };
- OSStatus status = VTDecompressionSessionCreate(
- nullptr, _videoFormat, nullptr, attributes, &record, &_decompressionSession);
- CFRelease(attributes);
- if (status != noErr) {
- RTC_LOG(LS_ERROR) << "Failed to create decompression session: " << status;
- [self destroyDecompressionSession];
- return WEBRTC_VIDEO_CODEC_ERROR;
- }
- [self configureDecompressionSession];
-
- return WEBRTC_VIDEO_CODEC_OK;
-}
-
-- (void)configureDecompressionSession {
- RTC_DCHECK(_decompressionSession);
-#if defined(WEBRTC_IOS)
- VTSessionSetProperty(_decompressionSession, kVTDecompressionPropertyKey_RealTime, kCFBooleanTrue);
-#endif
-}
-
-- (void)destroyDecompressionSession {
- if (_decompressionSession) {
-#if defined(WEBRTC_IOS)
- if ([UIDevice isIOS11OrLater]) {
- VTDecompressionSessionWaitForAsynchronousFrames(_decompressionSession);
- }
-#endif
- VTDecompressionSessionInvalidate(_decompressionSession);
- CFRelease(_decompressionSession);
- _decompressionSession = nullptr;
- }
-}
-
-- (void)setVideoFormat:(CMVideoFormatDescriptionRef)videoFormat {
- if (_videoFormat == videoFormat) {
- return;
- }
- if (_videoFormat) {
- CFRelease(_videoFormat);
- }
- _videoFormat = videoFormat;
- if (_videoFormat) {
- CFRetain(_videoFormat);
- }
-}
-
-- (NSString *)implementationName {
- return @"VideoToolbox";
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoEncoderH264.mm b/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoEncoderH264.mm
deleted file mode 100644
index c167e54..0000000
--- a/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoEncoderH264.mm
+++ /dev/null
@@ -1,766 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- *
- */
-
-#import "WebRTC/RTCVideoCodecH264.h"
-
-#import <VideoToolbox/VideoToolbox.h>
-#include <vector>
-
-#if defined(WEBRTC_IOS)
-#import "Common/RTCUIApplicationStatusObserver.h"
-#import "WebRTC/UIDevice+RTCDevice.h"
-#endif
-#import "PeerConnection/RTCVideoCodec+Private.h"
-#import "WebRTC/RTCVideoCodec.h"
-#import "WebRTC/RTCVideoFrame.h"
-#import "WebRTC/RTCVideoFrameBuffer.h"
-#include "common_video/h264/h264_bitstream_parser.h"
-#include "common_video/h264/profile_level_id.h"
-#include "common_video/include/bitrate_adjuster.h"
-#import "helpers.h"
-#include "modules/include/module_common_types.h"
-#include "modules/video_coding/include/video_error_codes.h"
-#include "rtc_base/buffer.h"
-#include "rtc_base/logging.h"
-#include "rtc_base/timeutils.h"
-#include "sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.h"
-#include "third_party/libyuv/include/libyuv/convert_from.h"
-
-@interface RTCVideoEncoderH264 ()
-
-- (void)frameWasEncoded:(OSStatus)status
- flags:(VTEncodeInfoFlags)infoFlags
- sampleBuffer:(CMSampleBufferRef)sampleBuffer
- codecSpecificInfo:(id<RTCCodecSpecificInfo>)codecSpecificInfo
- width:(int32_t)width
- height:(int32_t)height
- renderTimeMs:(int64_t)renderTimeMs
- timestamp:(uint32_t)timestamp
- rotation:(RTCVideoRotation)rotation;
-
-@end
-
-namespace { // anonymous namespace
-
-// The ratio between kVTCompressionPropertyKey_DataRateLimits and
-// kVTCompressionPropertyKey_AverageBitRate. The data rate limit is set higher
-// than the average bit rate to avoid undershooting the target.
-const float kLimitToAverageBitRateFactor = 1.5f;
-// These thresholds deviate from the default h264 QP thresholds, as they
-// have been found to work better on devices that support VideoToolbox
-const int kLowH264QpThreshold = 28;
-const int kHighH264QpThreshold = 39;
-
-const OSType kNV12PixelFormat = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
-
-// Struct that we pass to the encoder per frame to encode. We receive it again
-// in the encoder callback.
-struct RTCFrameEncodeParams {
- RTCFrameEncodeParams(RTCVideoEncoderH264 *e,
- RTCCodecSpecificInfoH264 *csi,
- int32_t w,
- int32_t h,
- int64_t rtms,
- uint32_t ts,
- RTCVideoRotation r)
- : encoder(e), width(w), height(h), render_time_ms(rtms), timestamp(ts), rotation(r) {
- if (csi) {
- codecSpecificInfo = csi;
- } else {
- codecSpecificInfo = [[RTCCodecSpecificInfoH264 alloc] init];
- }
- }
-
- RTCVideoEncoderH264 *encoder;
- RTCCodecSpecificInfoH264 *codecSpecificInfo;
- int32_t width;
- int32_t height;
- int64_t render_time_ms;
- uint32_t timestamp;
- RTCVideoRotation rotation;
-};
-
-// We receive I420Frames as input, but we need to feed CVPixelBuffers into the
-// encoder. This performs the copy and format conversion.
-// TODO(tkchin): See if encoder will accept i420 frames and compare performance.
-bool CopyVideoFrameToNV12PixelBuffer(id<RTCI420Buffer> frameBuffer, CVPixelBufferRef pixelBuffer) {
- RTC_DCHECK(pixelBuffer);
- RTC_DCHECK_EQ(CVPixelBufferGetPixelFormatType(pixelBuffer), kNV12PixelFormat);
- RTC_DCHECK_EQ(CVPixelBufferGetHeightOfPlane(pixelBuffer, 0), frameBuffer.height);
- RTC_DCHECK_EQ(CVPixelBufferGetWidthOfPlane(pixelBuffer, 0), frameBuffer.width);
-
- CVReturn cvRet = CVPixelBufferLockBaseAddress(pixelBuffer, 0);
- if (cvRet != kCVReturnSuccess) {
- RTC_LOG(LS_ERROR) << "Failed to lock base address: " << cvRet;
- return false;
- }
- uint8_t *dstY = reinterpret_cast<uint8_t *>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0));
- int dstStrideY = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
- uint8_t *dstUV = reinterpret_cast<uint8_t *>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1));
- int dstStrideUV = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
- // Convert I420 to NV12.
- int ret = libyuv::I420ToNV12(frameBuffer.dataY,
- frameBuffer.strideY,
- frameBuffer.dataU,
- frameBuffer.strideU,
- frameBuffer.dataV,
- frameBuffer.strideV,
- dstY,
- dstStrideY,
- dstUV,
- dstStrideUV,
- frameBuffer.width,
- frameBuffer.height);
- CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
- if (ret) {
- RTC_LOG(LS_ERROR) << "Error converting I420 VideoFrame to NV12 :" << ret;
- return false;
- }
- return true;
-}
-
-CVPixelBufferRef CreatePixelBuffer(CVPixelBufferPoolRef pixel_buffer_pool) {
- if (!pixel_buffer_pool) {
- RTC_LOG(LS_ERROR) << "Failed to get pixel buffer pool.";
- return nullptr;
- }
- CVPixelBufferRef pixel_buffer;
- CVReturn ret = CVPixelBufferPoolCreatePixelBuffer(nullptr, pixel_buffer_pool, &pixel_buffer);
- if (ret != kCVReturnSuccess) {
- RTC_LOG(LS_ERROR) << "Failed to create pixel buffer: " << ret;
- // We probably want to drop frames here, since failure probably means
- // that the pool is empty.
- return nullptr;
- }
- return pixel_buffer;
-}
-
-// This is the callback function that VideoToolbox calls when encode is
-// complete. From inspection this happens on its own queue.
-void compressionOutputCallback(void *encoder,
- void *params,
- OSStatus status,
- VTEncodeInfoFlags infoFlags,
- CMSampleBufferRef sampleBuffer) {
- if (!params) {
- // If there are pending callbacks when the encoder is destroyed, this can happen.
- return;
- }
- std::unique_ptr<RTCFrameEncodeParams> encodeParams(
- reinterpret_cast<RTCFrameEncodeParams *>(params));
- [encodeParams->encoder frameWasEncoded:status
- flags:infoFlags
- sampleBuffer:sampleBuffer
- codecSpecificInfo:encodeParams->codecSpecificInfo
- width:encodeParams->width
- height:encodeParams->height
- renderTimeMs:encodeParams->render_time_ms
- timestamp:encodeParams->timestamp
- rotation:encodeParams->rotation];
-}
-
-// Extract VideoToolbox profile out of the webrtc::SdpVideoFormat. If there is
-// no specific VideoToolbox profile for the specified level, AutoLevel will be
-// returned. The user must initialize the encoder with a resolution and
-// framerate conforming to the selected H264 level regardless.
-CFStringRef ExtractProfile(webrtc::SdpVideoFormat videoFormat) {
- const absl::optional<webrtc::H264::ProfileLevelId> profile_level_id =
- webrtc::H264::ParseSdpProfileLevelId(videoFormat.parameters);
- RTC_DCHECK(profile_level_id);
- switch (profile_level_id->profile) {
- case webrtc::H264::kProfileConstrainedBaseline:
- case webrtc::H264::kProfileBaseline:
- switch (profile_level_id->level) {
- case webrtc::H264::kLevel3:
- return kVTProfileLevel_H264_Baseline_3_0;
- case webrtc::H264::kLevel3_1:
- return kVTProfileLevel_H264_Baseline_3_1;
- case webrtc::H264::kLevel3_2:
- return kVTProfileLevel_H264_Baseline_3_2;
- case webrtc::H264::kLevel4:
- return kVTProfileLevel_H264_Baseline_4_0;
- case webrtc::H264::kLevel4_1:
- return kVTProfileLevel_H264_Baseline_4_1;
- case webrtc::H264::kLevel4_2:
- return kVTProfileLevel_H264_Baseline_4_2;
- case webrtc::H264::kLevel5:
- return kVTProfileLevel_H264_Baseline_5_0;
- case webrtc::H264::kLevel5_1:
- return kVTProfileLevel_H264_Baseline_5_1;
- case webrtc::H264::kLevel5_2:
- return kVTProfileLevel_H264_Baseline_5_2;
- case webrtc::H264::kLevel1:
- case webrtc::H264::kLevel1_b:
- case webrtc::H264::kLevel1_1:
- case webrtc::H264::kLevel1_2:
- case webrtc::H264::kLevel1_3:
- case webrtc::H264::kLevel2:
- case webrtc::H264::kLevel2_1:
- case webrtc::H264::kLevel2_2:
- return kVTProfileLevel_H264_Baseline_AutoLevel;
- }
-
- case webrtc::H264::kProfileMain:
- switch (profile_level_id->level) {
- case webrtc::H264::kLevel3:
- return kVTProfileLevel_H264_Main_3_0;
- case webrtc::H264::kLevel3_1:
- return kVTProfileLevel_H264_Main_3_1;
- case webrtc::H264::kLevel3_2:
- return kVTProfileLevel_H264_Main_3_2;
- case webrtc::H264::kLevel4:
- return kVTProfileLevel_H264_Main_4_0;
- case webrtc::H264::kLevel4_1:
- return kVTProfileLevel_H264_Main_4_1;
- case webrtc::H264::kLevel4_2:
- return kVTProfileLevel_H264_Main_4_2;
- case webrtc::H264::kLevel5:
- return kVTProfileLevel_H264_Main_5_0;
- case webrtc::H264::kLevel5_1:
- return kVTProfileLevel_H264_Main_5_1;
- case webrtc::H264::kLevel5_2:
- return kVTProfileLevel_H264_Main_5_2;
- case webrtc::H264::kLevel1:
- case webrtc::H264::kLevel1_b:
- case webrtc::H264::kLevel1_1:
- case webrtc::H264::kLevel1_2:
- case webrtc::H264::kLevel1_3:
- case webrtc::H264::kLevel2:
- case webrtc::H264::kLevel2_1:
- case webrtc::H264::kLevel2_2:
- return kVTProfileLevel_H264_Main_AutoLevel;
- }
-
- case webrtc::H264::kProfileConstrainedHigh:
- case webrtc::H264::kProfileHigh:
- switch (profile_level_id->level) {
- case webrtc::H264::kLevel3:
- return kVTProfileLevel_H264_High_3_0;
- case webrtc::H264::kLevel3_1:
- return kVTProfileLevel_H264_High_3_1;
- case webrtc::H264::kLevel3_2:
- return kVTProfileLevel_H264_High_3_2;
- case webrtc::H264::kLevel4:
- return kVTProfileLevel_H264_High_4_0;
- case webrtc::H264::kLevel4_1:
- return kVTProfileLevel_H264_High_4_1;
- case webrtc::H264::kLevel4_2:
- return kVTProfileLevel_H264_High_4_2;
- case webrtc::H264::kLevel5:
- return kVTProfileLevel_H264_High_5_0;
- case webrtc::H264::kLevel5_1:
- return kVTProfileLevel_H264_High_5_1;
- case webrtc::H264::kLevel5_2:
- return kVTProfileLevel_H264_High_5_2;
- case webrtc::H264::kLevel1:
- case webrtc::H264::kLevel1_b:
- case webrtc::H264::kLevel1_1:
- case webrtc::H264::kLevel1_2:
- case webrtc::H264::kLevel1_3:
- case webrtc::H264::kLevel2:
- case webrtc::H264::kLevel2_1:
- case webrtc::H264::kLevel2_2:
- return kVTProfileLevel_H264_High_AutoLevel;
- }
- }
-}
-} // namespace
-
-@implementation RTCVideoEncoderH264 {
- RTCVideoCodecInfo *_codecInfo;
- std::unique_ptr<webrtc::BitrateAdjuster> _bitrateAdjuster;
- uint32_t _targetBitrateBps;
- uint32_t _encoderBitrateBps;
- RTCH264PacketizationMode _packetizationMode;
- CFStringRef _profile;
- RTCVideoEncoderCallback _callback;
- int32_t _width;
- int32_t _height;
- VTCompressionSessionRef _compressionSession;
- CVPixelBufferPoolRef _pixelBufferPool;
- RTCVideoCodecMode _mode;
-
- webrtc::H264BitstreamParser _h264BitstreamParser;
- std::vector<uint8_t> _frameScaleBuffer;
-}
-
-// .5 is set as a mininum to prevent overcompensating for large temporary
-// overshoots. We don't want to degrade video quality too badly.
-// .95 is set to prevent oscillations. When a lower bitrate is set on the
-// encoder than previously set, its output seems to have a brief period of
-// drastically reduced bitrate, so we want to avoid that. In steady state
-// conditions, 0.95 seems to give us better overall bitrate over long periods
-// of time.
-- (instancetype)initWithCodecInfo:(RTCVideoCodecInfo *)codecInfo {
- if (self = [super init]) {
- _codecInfo = codecInfo;
- _bitrateAdjuster.reset(new webrtc::BitrateAdjuster(.5, .95));
- _packetizationMode = RTCH264PacketizationModeNonInterleaved;
- _profile = ExtractProfile([codecInfo nativeSdpVideoFormat]);
- RTC_LOG(LS_INFO) << "Using profile " << CFStringToString(_profile);
- RTC_CHECK([codecInfo.name isEqualToString:kRTCVideoCodecH264Name]);
-
-#if defined(WEBRTC_IOS) && !defined(RTC_APPRTCMOBILE_BROADCAST_EXTENSION)
- [RTCUIApplicationStatusObserver prepareForUse];
-#endif
- }
- return self;
-}
-
-- (void)dealloc {
- [self destroyCompressionSession];
-}
-
-- (NSInteger)startEncodeWithSettings:(RTCVideoEncoderSettings *)settings
- numberOfCores:(int)numberOfCores {
- RTC_DCHECK(settings);
- RTC_DCHECK([settings.name isEqualToString:kRTCVideoCodecH264Name]);
-
- _width = settings.width;
- _height = settings.height;
- _mode = settings.mode;
-
- // We can only set average bitrate on the HW encoder.
- _targetBitrateBps = settings.startBitrate * 1000; // startBitrate is in kbps.
- _bitrateAdjuster->SetTargetBitrateBps(_targetBitrateBps);
-
- // TODO(tkchin): Try setting payload size via
- // kVTCompressionPropertyKey_MaxH264SliceBytes.
-
- return [self resetCompressionSessionWithPixelFormat:kNV12PixelFormat];
-}
-
-- (NSInteger)encode:(RTCVideoFrame *)frame
- codecSpecificInfo:(nullable id<RTCCodecSpecificInfo>)codecSpecificInfo
- frameTypes:(NSArray<NSNumber *> *)frameTypes {
- RTC_DCHECK_EQ(frame.width, _width);
- RTC_DCHECK_EQ(frame.height, _height);
- if (!_callback || !_compressionSession) {
- return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
- }
-#if defined(WEBRTC_IOS) && !defined(RTC_APPRTCMOBILE_BROADCAST_EXTENSION)
- if (![[RTCUIApplicationStatusObserver sharedInstance] isApplicationActive]) {
- // Ignore all encode requests when app isn't active. In this state, the
- // hardware encoder has been invalidated by the OS.
- return WEBRTC_VIDEO_CODEC_OK;
- }
-#endif
- BOOL isKeyframeRequired = NO;
-
- // Get a pixel buffer from the pool and copy frame data over.
- if ([self resetCompressionSessionIfNeededWithFrame:frame]) {
- isKeyframeRequired = YES;
- }
-
- CVPixelBufferRef pixelBuffer = nullptr;
- if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
- // Native frame buffer
- RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
- if (![rtcPixelBuffer requiresCropping]) {
- // This pixel buffer might have a higher resolution than what the
- // compression session is configured to. The compression session can
- // handle that and will output encoded frames in the configured
- // resolution regardless of the input pixel buffer resolution.
- pixelBuffer = rtcPixelBuffer.pixelBuffer;
- CVBufferRetain(pixelBuffer);
- } else {
- // Cropping required, we need to crop and scale to a new pixel buffer.
- pixelBuffer = CreatePixelBuffer(_pixelBufferPool);
- if (!pixelBuffer) {
- return WEBRTC_VIDEO_CODEC_ERROR;
- }
- int dstWidth = CVPixelBufferGetWidth(pixelBuffer);
- int dstHeight = CVPixelBufferGetHeight(pixelBuffer);
- if ([rtcPixelBuffer requiresScalingToWidth:dstWidth height:dstHeight]) {
- int size =
- [rtcPixelBuffer bufferSizeForCroppingAndScalingToWidth:dstWidth height:dstHeight];
- _frameScaleBuffer.resize(size);
- } else {
- _frameScaleBuffer.clear();
- }
- _frameScaleBuffer.shrink_to_fit();
- if (![rtcPixelBuffer cropAndScaleTo:pixelBuffer withTempBuffer:_frameScaleBuffer.data()]) {
- CVBufferRelease(pixelBuffer);
- return WEBRTC_VIDEO_CODEC_ERROR;
- }
- }
- }
-
- if (!pixelBuffer) {
- // We did not have a native frame buffer
- pixelBuffer = CreatePixelBuffer(_pixelBufferPool);
- if (!pixelBuffer) {
- return WEBRTC_VIDEO_CODEC_ERROR;
- }
- RTC_DCHECK(pixelBuffer);
- if (!CopyVideoFrameToNV12PixelBuffer([frame.buffer toI420], pixelBuffer)) {
- RTC_LOG(LS_ERROR) << "Failed to copy frame data.";
- CVBufferRelease(pixelBuffer);
- return WEBRTC_VIDEO_CODEC_ERROR;
- }
- }
-
- // Check if we need a keyframe.
- if (!isKeyframeRequired && frameTypes) {
- for (NSNumber *frameType in frameTypes) {
- if ((RTCFrameType)frameType.intValue == RTCFrameTypeVideoFrameKey) {
- isKeyframeRequired = YES;
- break;
- }
- }
- }
-
- CMTime presentationTimeStamp = CMTimeMake(frame.timeStampNs / rtc::kNumNanosecsPerMillisec, 1000);
- CFDictionaryRef frameProperties = nullptr;
- if (isKeyframeRequired) {
- CFTypeRef keys[] = {kVTEncodeFrameOptionKey_ForceKeyFrame};
- CFTypeRef values[] = {kCFBooleanTrue};
- frameProperties = CreateCFTypeDictionary(keys, values, 1);
- }
-
- std::unique_ptr<RTCFrameEncodeParams> encodeParams;
- encodeParams.reset(new RTCFrameEncodeParams(self,
- codecSpecificInfo,
- _width,
- _height,
- frame.timeStampNs / rtc::kNumNanosecsPerMillisec,
- frame.timeStamp,
- frame.rotation));
- encodeParams->codecSpecificInfo.packetizationMode = _packetizationMode;
-
- // Update the bitrate if needed.
- [self setBitrateBps:_bitrateAdjuster->GetAdjustedBitrateBps()];
-
- OSStatus status = VTCompressionSessionEncodeFrame(_compressionSession,
- pixelBuffer,
- presentationTimeStamp,
- kCMTimeInvalid,
- frameProperties,
- encodeParams.release(),
- nullptr);
- if (frameProperties) {
- CFRelease(frameProperties);
- }
- if (pixelBuffer) {
- CVBufferRelease(pixelBuffer);
- }
-
- if (status == kVTInvalidSessionErr) {
- // This error occurs when entering foreground after backgrounding the app.
- RTC_LOG(LS_ERROR) << "Invalid compression session, resetting.";
- [self resetCompressionSessionWithPixelFormat:[self pixelFormatOfFrame:frame]];
-
- return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
- } else if (status != noErr) {
- RTC_LOG(LS_ERROR) << "Failed to encode frame with code: " << status;
- return WEBRTC_VIDEO_CODEC_ERROR;
- }
- return WEBRTC_VIDEO_CODEC_OK;
-}
-
-- (void)setCallback:(RTCVideoEncoderCallback)callback {
- _callback = callback;
-}
-
-- (int)setBitrate:(uint32_t)bitrateKbit framerate:(uint32_t)framerate {
- _targetBitrateBps = 1000 * bitrateKbit;
- _bitrateAdjuster->SetTargetBitrateBps(_targetBitrateBps);
- [self setBitrateBps:_bitrateAdjuster->GetAdjustedBitrateBps()];
- return WEBRTC_VIDEO_CODEC_OK;
-}
-
-#pragma mark - Private
-
-- (NSInteger)releaseEncoder {
- // Need to destroy so that the session is invalidated and won't use the
- // callback anymore. Do not remove callback until the session is invalidated
- // since async encoder callbacks can occur until invalidation.
- [self destroyCompressionSession];
- _callback = nullptr;
- return WEBRTC_VIDEO_CODEC_OK;
-}
-
-- (OSType)pixelFormatOfFrame:(RTCVideoFrame *)frame {
- // Use NV12 for non-native frames.
- if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
- RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
- return CVPixelBufferGetPixelFormatType(rtcPixelBuffer.pixelBuffer);
- }
-
- return kNV12PixelFormat;
-}
-
-- (BOOL)resetCompressionSessionIfNeededWithFrame:(RTCVideoFrame *)frame {
- BOOL resetCompressionSession = NO;
-
- // If we're capturing native frames in another pixel format than the compression session is
- // configured with, make sure the compression session is reset using the correct pixel format.
- OSType framePixelFormat = [self pixelFormatOfFrame:frame];
-
- if (_compressionSession) {
- // The pool attribute `kCVPixelBufferPixelFormatTypeKey` can contain either an array of pixel
- // formats or a single pixel format.
- NSDictionary *poolAttributes =
- (__bridge NSDictionary *)CVPixelBufferPoolGetPixelBufferAttributes(_pixelBufferPool);
- id pixelFormats =
- [poolAttributes objectForKey:(__bridge NSString *)kCVPixelBufferPixelFormatTypeKey];
- NSArray<NSNumber *> *compressionSessionPixelFormats = nil;
- if ([pixelFormats isKindOfClass:[NSArray class]]) {
- compressionSessionPixelFormats = (NSArray *)pixelFormats;
- } else if ([pixelFormats isKindOfClass:[NSNumber class]]) {
- compressionSessionPixelFormats = @[ (NSNumber *)pixelFormats ];
- }
-
- if (![compressionSessionPixelFormats
- containsObject:[NSNumber numberWithLong:framePixelFormat]]) {
- resetCompressionSession = YES;
- RTC_LOG(LS_INFO) << "Resetting compression session due to non-matching pixel format.";
- }
- } else {
- resetCompressionSession = YES;
- }
-
- if (resetCompressionSession) {
- [self resetCompressionSessionWithPixelFormat:framePixelFormat];
- }
- return resetCompressionSession;
-}
-
-- (int)resetCompressionSessionWithPixelFormat:(OSType)framePixelFormat {
- [self destroyCompressionSession];
-
- // Set source image buffer attributes. These attributes will be present on
- // buffers retrieved from the encoder's pixel buffer pool.
- const size_t attributesSize = 3;
- CFTypeRef keys[attributesSize] = {
-#if defined(WEBRTC_IOS)
- kCVPixelBufferOpenGLESCompatibilityKey,
-#elif defined(WEBRTC_MAC)
- kCVPixelBufferOpenGLCompatibilityKey,
-#endif
- kCVPixelBufferIOSurfacePropertiesKey,
- kCVPixelBufferPixelFormatTypeKey
- };
- CFDictionaryRef ioSurfaceValue = CreateCFTypeDictionary(nullptr, nullptr, 0);
- int64_t pixelFormatType = framePixelFormat;
- CFNumberRef pixelFormat = CFNumberCreate(nullptr, kCFNumberLongType, &pixelFormatType);
- CFTypeRef values[attributesSize] = {kCFBooleanTrue, ioSurfaceValue, pixelFormat};
- CFDictionaryRef sourceAttributes = CreateCFTypeDictionary(keys, values, attributesSize);
- if (ioSurfaceValue) {
- CFRelease(ioSurfaceValue);
- ioSurfaceValue = nullptr;
- }
- if (pixelFormat) {
- CFRelease(pixelFormat);
- pixelFormat = nullptr;
- }
- CFMutableDictionaryRef encoder_specs = nullptr;
-#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
- // Currently hw accl is supported above 360p on mac, below 360p
- // the compression session will be created with hw accl disabled.
- encoder_specs = CFDictionaryCreateMutable(
- nullptr, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
- CFDictionarySetValue(encoder_specs,
- kVTVideoEncoderSpecification_EnableHardwareAcceleratedVideoEncoder,
- kCFBooleanTrue);
-#endif
- OSStatus status =
- VTCompressionSessionCreate(nullptr, // use default allocator
- _width,
- _height,
- kCMVideoCodecType_H264,
- encoder_specs, // use hardware accelerated encoder if available
- sourceAttributes,
- nullptr, // use default compressed data allocator
- compressionOutputCallback,
- nullptr,
- &_compressionSession);
- if (sourceAttributes) {
- CFRelease(sourceAttributes);
- sourceAttributes = nullptr;
- }
- if (encoder_specs) {
- CFRelease(encoder_specs);
- encoder_specs = nullptr;
- }
- if (status != noErr) {
- RTC_LOG(LS_ERROR) << "Failed to create compression session: " << status;
- return WEBRTC_VIDEO_CODEC_ERROR;
- }
-#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
- CFBooleanRef hwaccl_enabled = nullptr;
- status = VTSessionCopyProperty(_compressionSession,
- kVTCompressionPropertyKey_UsingHardwareAcceleratedVideoEncoder,
- nullptr,
- &hwaccl_enabled);
- if (status == noErr && (CFBooleanGetValue(hwaccl_enabled))) {
- RTC_LOG(LS_INFO) << "Compression session created with hw accl enabled";
- } else {
- RTC_LOG(LS_INFO) << "Compression session created with hw accl disabled";
- }
-#endif
- [self configureCompressionSession];
-
- // The pixel buffer pool is dependent on the compression session so if the session is reset, the
- // pool should be reset as well.
- _pixelBufferPool = VTCompressionSessionGetPixelBufferPool(_compressionSession);
-
- return WEBRTC_VIDEO_CODEC_OK;
-}
-
-- (void)configureCompressionSession {
- RTC_DCHECK(_compressionSession);
- SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_RealTime, true);
- SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_ProfileLevel, _profile);
- SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_AllowFrameReordering, false);
- [self setEncoderBitrateBps:_targetBitrateBps];
- // TODO(tkchin): Look at entropy mode and colorspace matrices.
- // TODO(tkchin): Investigate to see if there's any way to make this work.
- // May need it to interop with Android. Currently this call just fails.
- // On inspecting encoder output on iOS8, this value is set to 6.
- // internal::SetVTSessionProperty(compression_session_,
- // kVTCompressionPropertyKey_MaxFrameDelayCount,
- // 1);
-
- // Set a relatively large value for keyframe emission (7200 frames or 4 minutes).
- SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_MaxKeyFrameInterval, 7200);
- SetVTSessionProperty(
- _compressionSession, kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration, 240);
-}
-
-- (void)destroyCompressionSession {
- if (_compressionSession) {
- VTCompressionSessionInvalidate(_compressionSession);
- CFRelease(_compressionSession);
- _compressionSession = nullptr;
- _pixelBufferPool = nullptr;
- }
-}
-
-- (NSString *)implementationName {
- return @"VideoToolbox";
-}
-
-- (void)setBitrateBps:(uint32_t)bitrateBps {
- if (_encoderBitrateBps != bitrateBps) {
- [self setEncoderBitrateBps:bitrateBps];
- }
-}
-
-- (void)setEncoderBitrateBps:(uint32_t)bitrateBps {
- if (_compressionSession) {
- SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_AverageBitRate, bitrateBps);
-
- // TODO(tkchin): Add a helper method to set array value.
- int64_t dataLimitBytesPerSecondValue =
- static_cast<int64_t>(bitrateBps * kLimitToAverageBitRateFactor / 8);
- CFNumberRef bytesPerSecond =
- CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt64Type, &dataLimitBytesPerSecondValue);
- int64_t oneSecondValue = 1;
- CFNumberRef oneSecond =
- CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt64Type, &oneSecondValue);
- const void *nums[2] = {bytesPerSecond, oneSecond};
- CFArrayRef dataRateLimits = CFArrayCreate(nullptr, nums, 2, &kCFTypeArrayCallBacks);
- OSStatus status = VTSessionSetProperty(
- _compressionSession, kVTCompressionPropertyKey_DataRateLimits, dataRateLimits);
- if (bytesPerSecond) {
- CFRelease(bytesPerSecond);
- }
- if (oneSecond) {
- CFRelease(oneSecond);
- }
- if (dataRateLimits) {
- CFRelease(dataRateLimits);
- }
- if (status != noErr) {
- RTC_LOG(LS_ERROR) << "Failed to set data rate limit with code: " << status;
- }
-
- _encoderBitrateBps = bitrateBps;
- }
-}
-
-- (void)frameWasEncoded:(OSStatus)status
- flags:(VTEncodeInfoFlags)infoFlags
- sampleBuffer:(CMSampleBufferRef)sampleBuffer
- codecSpecificInfo:(id<RTCCodecSpecificInfo>)codecSpecificInfo
- width:(int32_t)width
- height:(int32_t)height
- renderTimeMs:(int64_t)renderTimeMs
- timestamp:(uint32_t)timestamp
- rotation:(RTCVideoRotation)rotation {
- if (status != noErr) {
- RTC_LOG(LS_ERROR) << "H264 encode failed with code: " << status;
- return;
- }
- if (infoFlags & kVTEncodeInfo_FrameDropped) {
- RTC_LOG(LS_INFO) << "H264 encode dropped frame.";
- return;
- }
-
- BOOL isKeyframe = NO;
- CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, 0);
- if (attachments != nullptr && CFArrayGetCount(attachments)) {
- CFDictionaryRef attachment =
- static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(attachments, 0));
- isKeyframe = !CFDictionaryContainsKey(attachment, kCMSampleAttachmentKey_NotSync);
- }
-
- if (isKeyframe) {
- RTC_LOG(LS_INFO) << "Generated keyframe";
- }
-
- // Convert the sample buffer into a buffer suitable for RTP packetization.
- // TODO(tkchin): Allocate buffers through a pool.
- std::unique_ptr<rtc::Buffer> buffer(new rtc::Buffer());
- RTCRtpFragmentationHeader *header;
- {
- std::unique_ptr<webrtc::RTPFragmentationHeader> header_cpp;
- bool result =
- H264CMSampleBufferToAnnexBBuffer(sampleBuffer, isKeyframe, buffer.get(), &header_cpp);
- header = [[RTCRtpFragmentationHeader alloc] initWithNativeFragmentationHeader:header_cpp.get()];
- if (!result) {
- return;
- }
- }
-
- RTCEncodedImage *frame = [[RTCEncodedImage alloc] init];
- frame.buffer = [NSData dataWithBytesNoCopy:buffer->data() length:buffer->size() freeWhenDone:NO];
- frame.encodedWidth = width;
- frame.encodedHeight = height;
- frame.completeFrame = YES;
- frame.frameType = isKeyframe ? RTCFrameTypeVideoFrameKey : RTCFrameTypeVideoFrameDelta;
- frame.captureTimeMs = renderTimeMs;
- frame.timeStamp = timestamp;
- frame.rotation = rotation;
- frame.contentType = (_mode == RTCVideoCodecModeScreensharing) ? RTCVideoContentTypeScreenshare :
- RTCVideoContentTypeUnspecified;
- frame.flags = webrtc::VideoSendTiming::kInvalid;
-
- int qp;
- _h264BitstreamParser.ParseBitstream(buffer->data(), buffer->size());
- _h264BitstreamParser.GetLastSliceQp(&qp);
- frame.qp = @(qp);
-
- BOOL res = _callback(frame, codecSpecificInfo, header);
- if (!res) {
- RTC_LOG(LS_ERROR) << "Encode callback failed";
- return;
- }
- _bitrateAdjuster->Update(frame.buffer.length);
-}
-
-- (RTCVideoEncoderQpThresholds *)scalingSettings {
- return [[RTCVideoEncoderQpThresholds alloc] initWithThresholdsLow:kLowH264QpThreshold
- high:kHighH264QpThreshold];
-}
-
-@end
diff --git a/sdk/objc/Framework/Classes/VideoToolbox/helpers.cc b/sdk/objc/Framework/Classes/VideoToolbox/helpers.cc
deleted file mode 100644
index ac957f1..0000000
--- a/sdk/objc/Framework/Classes/VideoToolbox/helpers.cc
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- *
- */
-
-#include "helpers.h"
-
-#include <string>
-
-#include "rtc_base/checks.h"
-#include "rtc_base/logging.h"
-
-// Copies characters from a CFStringRef into a std::string.
-std::string CFStringToString(const CFStringRef cf_string) {
- RTC_DCHECK(cf_string);
- std::string std_string;
- // Get the size needed for UTF8 plus terminating character.
- size_t buffer_size =
- CFStringGetMaximumSizeForEncoding(CFStringGetLength(cf_string),
- kCFStringEncodingUTF8) +
- 1;
- std::unique_ptr<char[]> buffer(new char[buffer_size]);
- if (CFStringGetCString(cf_string, buffer.get(), buffer_size,
- kCFStringEncodingUTF8)) {
- // Copy over the characters.
- std_string.assign(buffer.get());
- }
- return std_string;
-}
-
-// Convenience function for setting a VT property.
-void SetVTSessionProperty(VTSessionRef session,
- CFStringRef key,
- int32_t value) {
- CFNumberRef cfNum =
- CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &value);
- OSStatus status = VTSessionSetProperty(session, key, cfNum);
- CFRelease(cfNum);
- if (status != noErr) {
- std::string key_string = CFStringToString(key);
- RTC_LOG(LS_ERROR) << "VTSessionSetProperty failed to set: " << key_string
- << " to " << value << ": " << status;
- }
-}
-
-// Convenience function for setting a VT property.
-void SetVTSessionProperty(VTSessionRef session,
- CFStringRef key,
- uint32_t value) {
- int64_t value_64 = value;
- CFNumberRef cfNum =
- CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt64Type, &value_64);
- OSStatus status = VTSessionSetProperty(session, key, cfNum);
- CFRelease(cfNum);
- if (status != noErr) {
- std::string key_string = CFStringToString(key);
- RTC_LOG(LS_ERROR) << "VTSessionSetProperty failed to set: " << key_string
- << " to " << value << ": " << status;
- }
-}
-
-// Convenience function for setting a VT property.
-void SetVTSessionProperty(VTSessionRef session, CFStringRef key, bool value) {
- CFBooleanRef cf_bool = (value) ? kCFBooleanTrue : kCFBooleanFalse;
- OSStatus status = VTSessionSetProperty(session, key, cf_bool);
- if (status != noErr) {
- std::string key_string = CFStringToString(key);
- RTC_LOG(LS_ERROR) << "VTSessionSetProperty failed to set: " << key_string
- << " to " << value << ": " << status;
- }
-}
-
-// Convenience function for setting a VT property.
-void SetVTSessionProperty(VTSessionRef session,
- CFStringRef key,
- CFStringRef value) {
- OSStatus status = VTSessionSetProperty(session, key, value);
- if (status != noErr) {
- std::string key_string = CFStringToString(key);
- std::string val_string = CFStringToString(value);
- RTC_LOG(LS_ERROR) << "VTSessionSetProperty failed to set: " << key_string
- << " to " << val_string << ": " << status;
- }
-}
diff --git a/sdk/objc/Framework/Classes/VideoToolbox/helpers.h b/sdk/objc/Framework/Classes/VideoToolbox/helpers.h
deleted file mode 100644
index 0683ea7..0000000
--- a/sdk/objc/Framework/Classes/VideoToolbox/helpers.h
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- *
- */
-
-#ifndef SDK_OBJC_FRAMEWORK_CLASSES_VIDEOTOOLBOX_HELPERS_H_
-#define SDK_OBJC_FRAMEWORK_CLASSES_VIDEOTOOLBOX_HELPERS_H_
-
-#include <CoreFoundation/CoreFoundation.h>
-#include <VideoToolbox/VideoToolbox.h>
-#include <string>
-
-// Convenience function for creating a dictionary.
-inline CFDictionaryRef CreateCFTypeDictionary(CFTypeRef* keys,
- CFTypeRef* values,
- size_t size) {
- return CFDictionaryCreate(kCFAllocatorDefault, keys, values, size,
- &kCFTypeDictionaryKeyCallBacks,
- &kCFTypeDictionaryValueCallBacks);
-}
-
-// Copies characters from a CFStringRef into a std::string.
-std::string CFStringToString(const CFStringRef cf_string);
-
-// Convenience function for setting a VT property.
-void SetVTSessionProperty(VTSessionRef session, CFStringRef key, int32_t value);
-
-// Convenience function for setting a VT property.
-void SetVTSessionProperty(VTSessionRef session,
- CFStringRef key,
- uint32_t value);
-
-// Convenience function for setting a VT property.
-void SetVTSessionProperty(VTSessionRef session, CFStringRef key, bool value);
-
-// Convenience function for setting a VT property.
-void SetVTSessionProperty(VTSessionRef session,
- CFStringRef key,
- CFStringRef value);
-
-#endif // SDK_OBJC_FRAMEWORK_CLASSES_VIDEOTOOLBOX_HELPERS_H_
diff --git a/sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.cc b/sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.cc
deleted file mode 100644
index 479ef3c..0000000
--- a/sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.cc
+++ /dev/null
@@ -1,351 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- *
- */
-
-#include "sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.h"
-
-#include <CoreFoundation/CoreFoundation.h>
-#include <memory>
-#include <vector>
-
-#include "rtc_base/checks.h"
-#include "rtc_base/logging.h"
-
-namespace webrtc {
-
-using H264::kAud;
-using H264::kSps;
-using H264::NaluIndex;
-using H264::NaluType;
-using H264::ParseNaluType;
-
-const char kAnnexBHeaderBytes[4] = {0, 0, 0, 1};
-const size_t kAvccHeaderByteSize = sizeof(uint32_t);
-
-bool H264CMSampleBufferToAnnexBBuffer(
- CMSampleBufferRef avcc_sample_buffer,
- bool is_keyframe,
- rtc::Buffer* annexb_buffer,
- std::unique_ptr<RTPFragmentationHeader>* out_header) {
- RTC_DCHECK(avcc_sample_buffer);
- RTC_DCHECK(out_header);
- out_header->reset(nullptr);
-
- // Get format description from the sample buffer.
- CMVideoFormatDescriptionRef description =
- CMSampleBufferGetFormatDescription(avcc_sample_buffer);
- if (description == nullptr) {
- RTC_LOG(LS_ERROR) << "Failed to get sample buffer's description.";
- return false;
- }
-
- // Get parameter set information.
- int nalu_header_size = 0;
- size_t param_set_count = 0;
- OSStatus status = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(
- description, 0, nullptr, nullptr, ¶m_set_count, &nalu_header_size);
- if (status != noErr) {
- RTC_LOG(LS_ERROR) << "Failed to get parameter set.";
- return false;
- }
- RTC_CHECK_EQ(nalu_header_size, kAvccHeaderByteSize);
- RTC_DCHECK_EQ(param_set_count, 2);
-
- // Truncate any previous data in the buffer without changing its capacity.
- annexb_buffer->SetSize(0);
-
- size_t nalu_offset = 0;
- std::vector<size_t> frag_offsets;
- std::vector<size_t> frag_lengths;
-
- // Place all parameter sets at the front of buffer.
- if (is_keyframe) {
- size_t param_set_size = 0;
- const uint8_t* param_set = nullptr;
- for (size_t i = 0; i < param_set_count; ++i) {
- status = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(
- description, i, ¶m_set, ¶m_set_size, nullptr, nullptr);
- if (status != noErr) {
- RTC_LOG(LS_ERROR) << "Failed to get parameter set.";
- return false;
- }
- // Update buffer.
- annexb_buffer->AppendData(kAnnexBHeaderBytes, sizeof(kAnnexBHeaderBytes));
- annexb_buffer->AppendData(reinterpret_cast<const char*>(param_set),
- param_set_size);
- // Update fragmentation.
- frag_offsets.push_back(nalu_offset + sizeof(kAnnexBHeaderBytes));
- frag_lengths.push_back(param_set_size);
- nalu_offset += sizeof(kAnnexBHeaderBytes) + param_set_size;
- }
- }
-
- // Get block buffer from the sample buffer.
- CMBlockBufferRef block_buffer =
- CMSampleBufferGetDataBuffer(avcc_sample_buffer);
- if (block_buffer == nullptr) {
- RTC_LOG(LS_ERROR) << "Failed to get sample buffer's block buffer.";
- return false;
- }
- CMBlockBufferRef contiguous_buffer = nullptr;
- // Make sure block buffer is contiguous.
- if (!CMBlockBufferIsRangeContiguous(block_buffer, 0, 0)) {
- status = CMBlockBufferCreateContiguous(
- nullptr, block_buffer, nullptr, nullptr, 0, 0, 0, &contiguous_buffer);
- if (status != noErr) {
- RTC_LOG(LS_ERROR) << "Failed to flatten non-contiguous block buffer: "
- << status;
- return false;
- }
- } else {
- contiguous_buffer = block_buffer;
- // Retain to make cleanup easier.
- CFRetain(contiguous_buffer);
- block_buffer = nullptr;
- }
-
- // Now copy the actual data.
- char* data_ptr = nullptr;
- size_t block_buffer_size = CMBlockBufferGetDataLength(contiguous_buffer);
- status = CMBlockBufferGetDataPointer(contiguous_buffer, 0, nullptr, nullptr,
- &data_ptr);
- if (status != noErr) {
- RTC_LOG(LS_ERROR) << "Failed to get block buffer data.";
- CFRelease(contiguous_buffer);
- return false;
- }
- size_t bytes_remaining = block_buffer_size;
- while (bytes_remaining > 0) {
- // The size type here must match |nalu_header_size|, we expect 4 bytes.
- // Read the length of the next packet of data. Must convert from big endian
- // to host endian.
- RTC_DCHECK_GE(bytes_remaining, (size_t)nalu_header_size);
- uint32_t* uint32_data_ptr = reinterpret_cast<uint32_t*>(data_ptr);
- uint32_t packet_size = CFSwapInt32BigToHost(*uint32_data_ptr);
- // Update buffer.
- annexb_buffer->AppendData(kAnnexBHeaderBytes, sizeof(kAnnexBHeaderBytes));
- annexb_buffer->AppendData(data_ptr + nalu_header_size, packet_size);
- // Update fragmentation.
- frag_offsets.push_back(nalu_offset + sizeof(kAnnexBHeaderBytes));
- frag_lengths.push_back(packet_size);
- nalu_offset += sizeof(kAnnexBHeaderBytes) + packet_size;
-
- size_t bytes_written = packet_size + sizeof(kAnnexBHeaderBytes);
- bytes_remaining -= bytes_written;
- data_ptr += bytes_written;
- }
- RTC_DCHECK_EQ(bytes_remaining, (size_t)0);
-
- std::unique_ptr<RTPFragmentationHeader> header(new RTPFragmentationHeader());
- header->VerifyAndAllocateFragmentationHeader(frag_offsets.size());
- RTC_DCHECK_EQ(frag_lengths.size(), frag_offsets.size());
- for (size_t i = 0; i < frag_offsets.size(); ++i) {
- header->fragmentationOffset[i] = frag_offsets[i];
- header->fragmentationLength[i] = frag_lengths[i];
- header->fragmentationPlType[i] = 0;
- header->fragmentationTimeDiff[i] = 0;
- }
- *out_header = std::move(header);
- CFRelease(contiguous_buffer);
- return true;
-}
-
-bool H264AnnexBBufferToCMSampleBuffer(const uint8_t* annexb_buffer,
- size_t annexb_buffer_size,
- CMVideoFormatDescriptionRef video_format,
- CMSampleBufferRef* out_sample_buffer) {
- RTC_DCHECK(annexb_buffer);
- RTC_DCHECK(out_sample_buffer);
- RTC_DCHECK(video_format);
- *out_sample_buffer = nullptr;
-
- AnnexBBufferReader reader(annexb_buffer, annexb_buffer_size);
- if (reader.SeekToNextNaluOfType(kSps)) {
- // Buffer contains an SPS NALU - skip it and the following PPS
- const uint8_t* data;
- size_t data_len;
- if (!reader.ReadNalu(&data, &data_len)) {
- RTC_LOG(LS_ERROR) << "Failed to read SPS";
- return false;
- }
- if (!reader.ReadNalu(&data, &data_len)) {
- RTC_LOG(LS_ERROR) << "Failed to read PPS";
- return false;
- }
- } else {
- // No SPS NALU - start reading from the first NALU in the buffer
- reader.SeekToStart();
- }
-
- // Allocate memory as a block buffer.
- // TODO(tkchin): figure out how to use a pool.
- CMBlockBufferRef block_buffer = nullptr;
- OSStatus status = CMBlockBufferCreateWithMemoryBlock(
- nullptr, nullptr, reader.BytesRemaining(), nullptr, nullptr, 0,
- reader.BytesRemaining(), kCMBlockBufferAssureMemoryNowFlag,
- &block_buffer);
- if (status != kCMBlockBufferNoErr) {
- RTC_LOG(LS_ERROR) << "Failed to create block buffer.";
- return false;
- }
-
- // Make sure block buffer is contiguous.
- CMBlockBufferRef contiguous_buffer = nullptr;
- if (!CMBlockBufferIsRangeContiguous(block_buffer, 0, 0)) {
- status = CMBlockBufferCreateContiguous(
- nullptr, block_buffer, nullptr, nullptr, 0, 0, 0, &contiguous_buffer);
- if (status != noErr) {
- RTC_LOG(LS_ERROR) << "Failed to flatten non-contiguous block buffer: "
- << status;
- CFRelease(block_buffer);
- return false;
- }
- } else {
- contiguous_buffer = block_buffer;
- block_buffer = nullptr;
- }
-
- // Get a raw pointer into allocated memory.
- size_t block_buffer_size = 0;
- char* data_ptr = nullptr;
- status = CMBlockBufferGetDataPointer(contiguous_buffer, 0, nullptr,
- &block_buffer_size, &data_ptr);
- if (status != kCMBlockBufferNoErr) {
- RTC_LOG(LS_ERROR) << "Failed to get block buffer data pointer.";
- CFRelease(contiguous_buffer);
- return false;
- }
- RTC_DCHECK(block_buffer_size == reader.BytesRemaining());
-
- // Write Avcc NALUs into block buffer memory.
- AvccBufferWriter writer(reinterpret_cast<uint8_t*>(data_ptr),
- block_buffer_size);
- while (reader.BytesRemaining() > 0) {
- const uint8_t* nalu_data_ptr = nullptr;
- size_t nalu_data_size = 0;
- if (reader.ReadNalu(&nalu_data_ptr, &nalu_data_size)) {
- writer.WriteNalu(nalu_data_ptr, nalu_data_size);
- }
- }
-
- // Create sample buffer.
- status = CMSampleBufferCreate(nullptr, contiguous_buffer, true, nullptr,
- nullptr, video_format, 1, 0, nullptr, 0,
- nullptr, out_sample_buffer);
- if (status != noErr) {
- RTC_LOG(LS_ERROR) << "Failed to create sample buffer.";
- CFRelease(contiguous_buffer);
- return false;
- }
- CFRelease(contiguous_buffer);
- return true;
-}
-
-CMVideoFormatDescriptionRef CreateVideoFormatDescription(
- const uint8_t* annexb_buffer,
- size_t annexb_buffer_size) {
- const uint8_t* param_set_ptrs[2] = {};
- size_t param_set_sizes[2] = {};
- AnnexBBufferReader reader(annexb_buffer, annexb_buffer_size);
- // Skip everyting before the SPS, then read the SPS and PPS
- if (!reader.SeekToNextNaluOfType(kSps)) {
- return nullptr;
- }
- if (!reader.ReadNalu(¶m_set_ptrs[0], ¶m_set_sizes[0])) {
- RTC_LOG(LS_ERROR) << "Failed to read SPS";
- return nullptr;
- }
- if (!reader.ReadNalu(¶m_set_ptrs[1], ¶m_set_sizes[1])) {
- RTC_LOG(LS_ERROR) << "Failed to read PPS";
- return nullptr;
- }
-
- // Parse the SPS and PPS into a CMVideoFormatDescription.
- CMVideoFormatDescriptionRef description = nullptr;
- OSStatus status = CMVideoFormatDescriptionCreateFromH264ParameterSets(
- kCFAllocatorDefault, 2, param_set_ptrs, param_set_sizes, 4, &description);
- if (status != noErr) {
- RTC_LOG(LS_ERROR) << "Failed to create video format description.";
- return nullptr;
- }
- return description;
-}
-
-AnnexBBufferReader::AnnexBBufferReader(const uint8_t* annexb_buffer,
- size_t length)
- : start_(annexb_buffer), length_(length) {
- RTC_DCHECK(annexb_buffer);
- offsets_ = H264::FindNaluIndices(annexb_buffer, length);
- offset_ = offsets_.begin();
-}
-
-AnnexBBufferReader::~AnnexBBufferReader() = default;
-
-bool AnnexBBufferReader::ReadNalu(const uint8_t** out_nalu,
- size_t* out_length) {
- RTC_DCHECK(out_nalu);
- RTC_DCHECK(out_length);
- *out_nalu = nullptr;
- *out_length = 0;
-
- if (offset_ == offsets_.end()) {
- return false;
- }
- *out_nalu = start_ + offset_->payload_start_offset;
- *out_length = offset_->payload_size;
- ++offset_;
- return true;
-}
-
-size_t AnnexBBufferReader::BytesRemaining() const {
- if (offset_ == offsets_.end()) {
- return 0;
- }
- return length_ - offset_->start_offset;
-}
-
-void AnnexBBufferReader::SeekToStart() {
- offset_ = offsets_.begin();
-}
-
-bool AnnexBBufferReader::SeekToNextNaluOfType(NaluType type) {
- for (; offset_ != offsets_.end(); ++offset_) {
- if (offset_->payload_size < 1)
- continue;
- if (ParseNaluType(*(start_ + offset_->payload_start_offset)) == type)
- return true;
- }
- return false;
-}
-AvccBufferWriter::AvccBufferWriter(uint8_t* const avcc_buffer, size_t length)
- : start_(avcc_buffer), offset_(0), length_(length) {
- RTC_DCHECK(avcc_buffer);
-}
-
-bool AvccBufferWriter::WriteNalu(const uint8_t* data, size_t data_size) {
- // Check if we can write this length of data.
- if (data_size + kAvccHeaderByteSize > BytesRemaining()) {
- return false;
- }
- // Write length header, which needs to be big endian.
- uint32_t big_endian_length = CFSwapInt32HostToBig(data_size);
- memcpy(start_ + offset_, &big_endian_length, sizeof(big_endian_length));
- offset_ += sizeof(big_endian_length);
- // Write data.
- memcpy(start_ + offset_, data, data_size);
- offset_ += data_size;
- return true;
-}
-
-size_t AvccBufferWriter::BytesRemaining() const {
- return length_ - offset_;
-}
-
-} // namespace webrtc
diff --git a/sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.h b/sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.h
index cd5a510..21281f3 100644
--- a/sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.h
+++ b/sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.h
@@ -6,111 +6,6 @@
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
- *
*/
-#ifndef SDK_OBJC_FRAMEWORK_CLASSES_VIDEOTOOLBOX_NALU_REWRITER_H_
-#define SDK_OBJC_FRAMEWORK_CLASSES_VIDEOTOOLBOX_NALU_REWRITER_H_
-
-#include "modules/video_coding/codecs/h264/include/h264.h"
-
-#include <CoreMedia/CoreMedia.h>
-#include <vector>
-
-#include "common_video/h264/h264_common.h"
-#include "modules/include/module_common_types.h"
-#include "rtc_base/buffer.h"
-
-using webrtc::H264::NaluIndex;
-
-namespace webrtc {
-
-// Converts a sample buffer emitted from the VideoToolbox encoder into a buffer
-// suitable for RTP. The sample buffer is in avcc format whereas the rtp buffer
-// needs to be in Annex B format. Data is written directly to |annexb_buffer|
-// and a new RTPFragmentationHeader is returned in |out_header|.
-bool H264CMSampleBufferToAnnexBBuffer(
- CMSampleBufferRef avcc_sample_buffer,
- bool is_keyframe,
- rtc::Buffer* annexb_buffer,
- std::unique_ptr<RTPFragmentationHeader>* out_header);
-
-// Converts a buffer received from RTP into a sample buffer suitable for the
-// VideoToolbox decoder. The RTP buffer is in annex b format whereas the sample
-// buffer is in avcc format.
-// If |is_keyframe| is true then |video_format| is ignored since the format will
-// be read from the buffer. Otherwise |video_format| must be provided.
-// Caller is responsible for releasing the created sample buffer.
-bool H264AnnexBBufferToCMSampleBuffer(const uint8_t* annexb_buffer,
- size_t annexb_buffer_size,
- CMVideoFormatDescriptionRef video_format,
- CMSampleBufferRef* out_sample_buffer);
-
-// Returns a video format description created from the sps/pps information in
-// the Annex B buffer. If there is no such information, nullptr is returned.
-// The caller is responsible for releasing the description.
-CMVideoFormatDescriptionRef CreateVideoFormatDescription(
- const uint8_t* annexb_buffer,
- size_t annexb_buffer_size);
-
-// Helper class for reading NALUs from an RTP Annex B buffer.
-class AnnexBBufferReader final {
- public:
- AnnexBBufferReader(const uint8_t* annexb_buffer, size_t length);
- ~AnnexBBufferReader();
- AnnexBBufferReader(const AnnexBBufferReader& other) = delete;
- void operator=(const AnnexBBufferReader& other) = delete;
-
- // Returns a pointer to the beginning of the next NALU slice without the
- // header bytes and its length. Returns false if no more slices remain.
- bool ReadNalu(const uint8_t** out_nalu, size_t* out_length);
-
- // Returns the number of unread NALU bytes, including the size of the header.
- // If the buffer has no remaining NALUs this will return zero.
- size_t BytesRemaining() const;
-
- // Reset the reader to start reading from the first NALU
- void SeekToStart();
-
- // Seek to the next position that holds a NALU of the desired type,
- // or the end if no such NALU is found.
- // Return true if a NALU of the desired type is found, false if we
- // reached the end instead
- bool SeekToNextNaluOfType(H264::NaluType type);
-
- private:
- // Returns the the next offset that contains NALU data.
- size_t FindNextNaluHeader(const uint8_t* start,
- size_t length,
- size_t offset) const;
-
- const uint8_t* const start_;
- std::vector<NaluIndex> offsets_;
- std::vector<NaluIndex>::iterator offset_;
- const size_t length_;
-};
-
-// Helper class for writing NALUs using avcc format into a buffer.
-class AvccBufferWriter final {
- public:
- AvccBufferWriter(uint8_t* const avcc_buffer, size_t length);
- ~AvccBufferWriter() {}
- AvccBufferWriter(const AvccBufferWriter& other) = delete;
- void operator=(const AvccBufferWriter& other) = delete;
-
- // Writes the data slice into the buffer. Returns false if there isn't
- // enough space left.
- bool WriteNalu(const uint8_t* data, size_t data_size);
-
- // Returns the unused bytes in the buffer.
- size_t BytesRemaining() const;
-
- private:
- uint8_t* const start_;
- size_t offset_;
- const size_t length_;
-};
-
-} // namespace webrtc
-
-#endif // SDK_OBJC_FRAMEWORK_CLASSES_VIDEOTOOLBOX_NALU_REWRITER_H_
+#import "components/video_codec/nalu_rewriter.h"
diff --git a/sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter_unittest.cc b/sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter_unittest.cc
deleted file mode 100644
index d907ef8..0000000
--- a/sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter_unittest.cc
+++ /dev/null
@@ -1,233 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- *
- */
-
-#include <memory>
-
-#include "common_video/h264/h264_common.h"
-#include "rtc_base/arraysize.h"
-#include "sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.h"
-#include "test/gtest.h"
-
-namespace webrtc {
-
-using H264::kSps;
-
-static const uint8_t NALU_TEST_DATA_0[] = {0xAA, 0xBB, 0xCC};
-static const uint8_t NALU_TEST_DATA_1[] = {0xDE, 0xAD, 0xBE, 0xEF};
-
-TEST(H264VideoToolboxNaluTest, TestCreateVideoFormatDescription) {
- const uint8_t sps_pps_buffer[] = {
- // SPS nalu.
- 0x00, 0x00, 0x00, 0x01, 0x27, 0x42, 0x00, 0x1E, 0xAB, 0x40, 0xF0, 0x28,
- 0xD3, 0x70, 0x20, 0x20, 0x20, 0x20,
- // PPS nalu.
- 0x00, 0x00, 0x00, 0x01, 0x28, 0xCE, 0x3C, 0x30};
- CMVideoFormatDescriptionRef description =
- CreateVideoFormatDescription(sps_pps_buffer, arraysize(sps_pps_buffer));
- EXPECT_TRUE(description);
- if (description) {
- CFRelease(description);
- description = nullptr;
- }
-
- const uint8_t sps_pps_not_at_start_buffer[] = {
- // Add some non-SPS/PPS NALUs at the beginning
- 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x01, 0xFF, 0x00, 0x00, 0x00, 0x01,
- 0xAB, 0x33, 0x21,
- // SPS nalu.
- 0x00, 0x00, 0x01, 0x27, 0x42, 0x00, 0x1E, 0xAB, 0x40, 0xF0, 0x28, 0xD3,
- 0x70, 0x20, 0x20, 0x20, 0x20,
- // PPS nalu.
- 0x00, 0x00, 0x01, 0x28, 0xCE, 0x3C, 0x30};
- description = CreateVideoFormatDescription(
- sps_pps_not_at_start_buffer, arraysize(sps_pps_not_at_start_buffer));
- EXPECT_TRUE(description);
- if (description) {
- CFRelease(description);
- description = nullptr;
- }
-
- const uint8_t other_buffer[] = {0x00, 0x00, 0x00, 0x01, 0x28};
- EXPECT_FALSE(
- CreateVideoFormatDescription(other_buffer, arraysize(other_buffer)));
-}
-
-TEST(AnnexBBufferReaderTest, TestReadEmptyInput) {
- const uint8_t annex_b_test_data[] = {0x00};
- AnnexBBufferReader reader(annex_b_test_data, 0);
- const uint8_t* nalu = nullptr;
- size_t nalu_length = 0;
- EXPECT_EQ(0u, reader.BytesRemaining());
- EXPECT_FALSE(reader.ReadNalu(&nalu, &nalu_length));
- EXPECT_EQ(nullptr, nalu);
- EXPECT_EQ(0u, nalu_length);
-}
-
-TEST(AnnexBBufferReaderTest, TestReadSingleNalu) {
- const uint8_t annex_b_test_data[] = {0x00, 0x00, 0x00, 0x01, 0xAA};
- AnnexBBufferReader reader(annex_b_test_data, arraysize(annex_b_test_data));
- const uint8_t* nalu = nullptr;
- size_t nalu_length = 0;
- EXPECT_EQ(arraysize(annex_b_test_data), reader.BytesRemaining());
- EXPECT_TRUE(reader.ReadNalu(&nalu, &nalu_length));
- EXPECT_EQ(annex_b_test_data + 4, nalu);
- EXPECT_EQ(1u, nalu_length);
- EXPECT_EQ(0u, reader.BytesRemaining());
- EXPECT_FALSE(reader.ReadNalu(&nalu, &nalu_length));
- EXPECT_EQ(nullptr, nalu);
- EXPECT_EQ(0u, nalu_length);
-}
-
-TEST(AnnexBBufferReaderTest, TestReadSingleNalu3ByteHeader) {
- const uint8_t annex_b_test_data[] = {0x00, 0x00, 0x01, 0xAA};
- AnnexBBufferReader reader(annex_b_test_data, arraysize(annex_b_test_data));
- const uint8_t* nalu = nullptr;
- size_t nalu_length = 0;
- EXPECT_EQ(arraysize(annex_b_test_data), reader.BytesRemaining());
- EXPECT_TRUE(reader.ReadNalu(&nalu, &nalu_length));
- EXPECT_EQ(annex_b_test_data + 3, nalu);
- EXPECT_EQ(1u, nalu_length);
- EXPECT_EQ(0u, reader.BytesRemaining());
- EXPECT_FALSE(reader.ReadNalu(&nalu, &nalu_length));
- EXPECT_EQ(nullptr, nalu);
- EXPECT_EQ(0u, nalu_length);
-}
-
-TEST(AnnexBBufferReaderTest, TestReadMissingNalu) {
- // clang-format off
- const uint8_t annex_b_test_data[] = {0x01,
- 0x00, 0x01,
- 0x00, 0x00, 0x00, 0xFF};
- // clang-format on
- AnnexBBufferReader reader(annex_b_test_data, arraysize(annex_b_test_data));
- const uint8_t* nalu = nullptr;
- size_t nalu_length = 0;
- EXPECT_EQ(0u, reader.BytesRemaining());
- EXPECT_FALSE(reader.ReadNalu(&nalu, &nalu_length));
- EXPECT_EQ(nullptr, nalu);
- EXPECT_EQ(0u, nalu_length);
-}
-
-TEST(AnnexBBufferReaderTest, TestReadMultipleNalus) {
- // clang-format off
- const uint8_t annex_b_test_data[] = {0x00, 0x00, 0x00, 0x01, 0xFF,
- 0x01,
- 0x00, 0x01,
- 0x00, 0x00, 0x00, 0xFF,
- 0x00, 0x00, 0x01, 0xAA, 0xBB};
- // clang-format on
- AnnexBBufferReader reader(annex_b_test_data, arraysize(annex_b_test_data));
- const uint8_t* nalu = nullptr;
- size_t nalu_length = 0;
- EXPECT_EQ(arraysize(annex_b_test_data), reader.BytesRemaining());
- EXPECT_TRUE(reader.ReadNalu(&nalu, &nalu_length));
- EXPECT_EQ(annex_b_test_data + 4, nalu);
- EXPECT_EQ(8u, nalu_length);
- EXPECT_EQ(6u, reader.BytesRemaining());
- EXPECT_TRUE(reader.ReadNalu(&nalu, &nalu_length));
- EXPECT_EQ(annex_b_test_data + 16, nalu);
- EXPECT_EQ(2u, nalu_length);
- EXPECT_EQ(0u, reader.BytesRemaining());
- EXPECT_FALSE(reader.ReadNalu(&nalu, &nalu_length));
- EXPECT_EQ(nullptr, nalu);
- EXPECT_EQ(0u, nalu_length);
-}
-
-TEST(AnnexBBufferReaderTest, TestFindNextNaluOfType) {
- const uint8_t notSps = 0x1F;
- const uint8_t annex_b_test_data[] = {
- 0x00, 0x00, 0x00, 0x01, kSps, 0x00, 0x00, 0x01, notSps,
- 0x00, 0x00, 0x01, notSps, 0xDD, 0x00, 0x00, 0x01, notSps,
- 0xEE, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0x00, 0x00, 0x00,
- 0x01, 0x00, 0x00, 0x00, 0x01, kSps, 0xBB, 0x00, 0x00,
- 0x01, notSps, 0x00, 0x00, 0x01, notSps, 0xDD, 0x00, 0x00,
- 0x01, notSps, 0xEE, 0xFF, 0x00, 0x00, 0x00, 0x01};
-
- AnnexBBufferReader reader(annex_b_test_data, arraysize(annex_b_test_data));
- const uint8_t* nalu = nullptr;
- size_t nalu_length = 0;
- EXPECT_EQ(arraysize(annex_b_test_data), reader.BytesRemaining());
- EXPECT_TRUE(reader.FindNextNaluOfType(kSps));
- EXPECT_TRUE(reader.ReadNalu(&nalu, &nalu_length));
- EXPECT_EQ(annex_b_test_data + 4, nalu);
- EXPECT_EQ(1u, nalu_length);
-
- EXPECT_TRUE(reader.FindNextNaluOfType(kSps));
- EXPECT_TRUE(reader.ReadNalu(&nalu, &nalu_length));
- EXPECT_EQ(annex_b_test_data + 32, nalu);
- EXPECT_EQ(2u, nalu_length);
-
- EXPECT_FALSE(reader.FindNextNaluOfType(kSps));
- EXPECT_FALSE(reader.ReadNalu(&nalu, &nalu_length));
-}
-
-TEST(AvccBufferWriterTest, TestEmptyOutputBuffer) {
- const uint8_t expected_buffer[] = {0x00};
- const size_t buffer_size = 1;
- std::unique_ptr<uint8_t[]> buffer(new uint8_t[buffer_size]);
- memset(buffer.get(), 0, buffer_size);
- AvccBufferWriter writer(buffer.get(), 0);
- EXPECT_EQ(0u, writer.BytesRemaining());
- EXPECT_FALSE(writer.WriteNalu(NALU_TEST_DATA_0, arraysize(NALU_TEST_DATA_0)));
- EXPECT_EQ(0,
- memcmp(expected_buffer, buffer.get(), arraysize(expected_buffer)));
-}
-
-TEST(AvccBufferWriterTest, TestWriteSingleNalu) {
- const uint8_t expected_buffer[] = {
- 0x00, 0x00, 0x00, 0x03, 0xAA, 0xBB, 0xCC,
- };
- const size_t buffer_size = arraysize(NALU_TEST_DATA_0) + 4;
- std::unique_ptr<uint8_t[]> buffer(new uint8_t[buffer_size]);
- AvccBufferWriter writer(buffer.get(), buffer_size);
- EXPECT_EQ(buffer_size, writer.BytesRemaining());
- EXPECT_TRUE(writer.WriteNalu(NALU_TEST_DATA_0, arraysize(NALU_TEST_DATA_0)));
- EXPECT_EQ(0u, writer.BytesRemaining());
- EXPECT_FALSE(writer.WriteNalu(NALU_TEST_DATA_1, arraysize(NALU_TEST_DATA_1)));
- EXPECT_EQ(0,
- memcmp(expected_buffer, buffer.get(), arraysize(expected_buffer)));
-}
-
-TEST(AvccBufferWriterTest, TestWriteMultipleNalus) {
- // clang-format off
- const uint8_t expected_buffer[] = {
- 0x00, 0x00, 0x00, 0x03, 0xAA, 0xBB, 0xCC,
- 0x00, 0x00, 0x00, 0x04, 0xDE, 0xAD, 0xBE, 0xEF
- };
- // clang-format on
- const size_t buffer_size =
- arraysize(NALU_TEST_DATA_0) + arraysize(NALU_TEST_DATA_1) + 8;
- std::unique_ptr<uint8_t[]> buffer(new uint8_t[buffer_size]);
- AvccBufferWriter writer(buffer.get(), buffer_size);
- EXPECT_EQ(buffer_size, writer.BytesRemaining());
- EXPECT_TRUE(writer.WriteNalu(NALU_TEST_DATA_0, arraysize(NALU_TEST_DATA_0)));
- EXPECT_EQ(buffer_size - (arraysize(NALU_TEST_DATA_0) + 4),
- writer.BytesRemaining());
- EXPECT_TRUE(writer.WriteNalu(NALU_TEST_DATA_1, arraysize(NALU_TEST_DATA_1)));
- EXPECT_EQ(0u, writer.BytesRemaining());
- EXPECT_EQ(0,
- memcmp(expected_buffer, buffer.get(), arraysize(expected_buffer)));
-}
-
-TEST(AvccBufferWriterTest, TestOverflow) {
- const uint8_t expected_buffer[] = {0x00, 0x00, 0x00};
- const size_t buffer_size = arraysize(NALU_TEST_DATA_0);
- std::unique_ptr<uint8_t[]> buffer(new uint8_t[buffer_size]);
- memset(buffer.get(), 0, buffer_size);
- AvccBufferWriter writer(buffer.get(), buffer_size);
- EXPECT_EQ(buffer_size, writer.BytesRemaining());
- EXPECT_FALSE(writer.WriteNalu(NALU_TEST_DATA_0, arraysize(NALU_TEST_DATA_0)));
- EXPECT_EQ(buffer_size, writer.BytesRemaining());
- EXPECT_EQ(0,
- memcmp(expected_buffer, buffer.get(), arraysize(expected_buffer)));
-}
-
-} // namespace webrtc