Build dynamic iOS SDK.

- Places most ObjC code into webrtc/sdk/objc instead.
- New gyp targets to build, strip and export symbols for dylib.
- Removes old script used to generate dylib.

BUG=

Review URL: https://codereview.webrtc.org/1903663002

Cr-Commit-Position: refs/heads/master@{#12524}
diff --git a/webrtc/sdk/objc/Framework/Classes/NSString+StdString.h b/webrtc/sdk/objc/Framework/Classes/NSString+StdString.h
new file mode 100644
index 0000000..8bf6cc9
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/NSString+StdString.h
@@ -0,0 +1,26 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#include <string>
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface NSString (StdString)
+
+@property(nonatomic, readonly) std::string stdString;
+
++ (std::string)stdStringForString:(NSString *)nsString;
++ (NSString *)stringForStdString:(const std::string&)stdString;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/sdk/objc/Framework/Classes/NSString+StdString.mm b/webrtc/sdk/objc/Framework/Classes/NSString+StdString.mm
new file mode 100644
index 0000000..3210ff0
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/NSString+StdString.mm
@@ -0,0 +1,33 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "NSString+StdString.h"
+
+@implementation NSString (StdString)
+
+- (std::string)stdString {
+  return [NSString stdStringForString:self];
+}
+
++ (std::string)stdStringForString:(NSString *)nsString {
+  NSData *charData = [nsString dataUsingEncoding:NSUTF8StringEncoding];
+  return std::string(reinterpret_cast<const char *>(charData.bytes),
+                     charData.length);
+}
+
++ (NSString *)stringForStdString:(const std::string&)stdString {
+  // std::string may contain null termination character so we construct
+  // using length.
+  return [[NSString alloc] initWithBytes:stdString.data()
+                                  length:stdString.length()
+                                encoding:NSUTF8StringEncoding];
+}
+
+@end
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCAVFoundationVideoSource+Private.h b/webrtc/sdk/objc/Framework/Classes/RTCAVFoundationVideoSource+Private.h
new file mode 100644
index 0000000..7a4de08
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCAVFoundationVideoSource+Private.h
@@ -0,0 +1,27 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCAVFoundationVideoSource.h"
+
+#include "avfoundationvideocapturer.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCAVFoundationVideoSource ()
+
+@property(nonatomic, readonly) webrtc::AVFoundationVideoCapturer *capturer;
+
+/** Initialize an RTCAVFoundationVideoSource with constraints. */
+- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
+                    constraints:(nullable RTCMediaConstraints *)constraints;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCAVFoundationVideoSource.mm b/webrtc/sdk/objc/Framework/Classes/RTCAVFoundationVideoSource.mm
new file mode 100644
index 0000000..528e8cb
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCAVFoundationVideoSource.mm
@@ -0,0 +1,55 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCAVFoundationVideoSource+Private.h"
+
+#import "RTCMediaConstraints+Private.h"
+#import "RTCPeerConnectionFactory+Private.h"
+#import "RTCVideoSource+Private.h"
+
+@implementation RTCAVFoundationVideoSource {
+  webrtc::AVFoundationVideoCapturer *_capturer;
+}
+
+- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
+                    constraints:(RTCMediaConstraints *)constraints {
+  NSParameterAssert(factory);
+  // We pass ownership of the capturer to the source, but since we own
+  // the source, it should be ok to keep a raw pointer to the
+  // capturer.
+  _capturer = new webrtc::AVFoundationVideoCapturer();
+  rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source =
+      factory.nativeFactory->CreateVideoSource(
+          _capturer, constraints.nativeConstraints.get());
+
+  return [super initWithNativeVideoSource:source];
+}
+
+- (BOOL)canUseBackCamera {
+  return self.capturer->CanUseBackCamera();
+}
+
+- (BOOL)useBackCamera {
+  return self.capturer->GetUseBackCamera();
+}
+
+- (void)setUseBackCamera:(BOOL)useBackCamera {
+  self.capturer->SetUseBackCamera(useBackCamera);
+}
+
+- (AVCaptureSession *)captureSession {
+  return self.capturer->GetCaptureSession();
+}
+
+- (webrtc::AVFoundationVideoCapturer *)capturer {
+  return _capturer;
+}
+
+@end
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCAudioTrack+Private.h b/webrtc/sdk/objc/Framework/Classes/RTCAudioTrack+Private.h
new file mode 100644
index 0000000..cb5f186
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCAudioTrack+Private.h
@@ -0,0 +1,30 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCAudioTrack.h"
+
+#include "webrtc/api/mediastreaminterface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTCPeerConnectionFactory;
+@interface RTCAudioTrack ()
+
+/** AudioTrackInterface created or passed in at construction. */
+@property(nonatomic, readonly)
+    rtc::scoped_refptr<webrtc::AudioTrackInterface> nativeAudioTrack;
+
+/** Initialize an RTCAudioTrack with an id. */
+- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
+                        trackId:(NSString *)trackId;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCAudioTrack.mm b/webrtc/sdk/objc/Framework/Classes/RTCAudioTrack.mm
new file mode 100644
index 0000000..42542b8
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCAudioTrack.mm
@@ -0,0 +1,43 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCAudioTrack+Private.h"
+
+#import "NSString+StdString.h"
+#import "RTCMediaStreamTrack+Private.h"
+#import "RTCPeerConnectionFactory+Private.h"
+
+@implementation RTCAudioTrack
+
+- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
+                        trackId:(NSString *)trackId {
+  NSParameterAssert(factory);
+  NSParameterAssert(trackId.length);
+  std::string nativeId = [NSString stdStringForString:trackId];
+  rtc::scoped_refptr<webrtc::AudioTrackInterface> track =
+      factory.nativeFactory->CreateAudioTrack(nativeId, nullptr);
+  return [self initWithNativeTrack:track type:RTCMediaStreamTrackTypeAudio];
+}
+
+- (instancetype)initWithNativeTrack:
+    (rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
+                               type:(RTCMediaStreamTrackType)type {
+  NSParameterAssert(nativeTrack);
+  NSParameterAssert(type == RTCMediaStreamTrackTypeAudio);
+  return [super initWithNativeTrack:nativeTrack type:type];
+}
+
+#pragma mark - Private
+
+- (rtc::scoped_refptr<webrtc::AudioTrackInterface>)nativeAudioTrack {
+  return static_cast<webrtc::AudioTrackInterface *>(self.nativeTrack.get());
+}
+
+@end
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCCameraPreviewView.m b/webrtc/sdk/objc/Framework/Classes/RTCCameraPreviewView.m
new file mode 100644
index 0000000..659973f
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCCameraPreviewView.m
@@ -0,0 +1,43 @@
+/*
+ *  Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCCameraPreviewView.h"
+
+#import <AVFoundation/AVFoundation.h>
+
+#import "RTCDispatcher+Private.h"
+
+@implementation RTCCameraPreviewView
+
+@synthesize captureSession = _captureSession;
+
++ (Class)layerClass {
+  return [AVCaptureVideoPreviewLayer class];
+}
+
+- (void)setCaptureSession:(AVCaptureSession *)captureSession {
+  if (_captureSession == captureSession) {
+    return;
+  }
+  _captureSession = captureSession;
+  AVCaptureVideoPreviewLayer *previewLayer = [self previewLayer];
+  [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+                               block:^{
+    previewLayer.session = captureSession;
+  }];
+}
+
+#pragma mark - Private
+
+- (AVCaptureVideoPreviewLayer *)previewLayer {
+  return (AVCaptureVideoPreviewLayer *)self.layer;
+}
+
+@end
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCConfiguration+Private.h b/webrtc/sdk/objc/Framework/Classes/RTCConfiguration+Private.h
new file mode 100644
index 0000000..5a1663b
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCConfiguration+Private.h
@@ -0,0 +1,60 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCConfiguration.h"
+
+#include "webrtc/api/peerconnectioninterface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCConfiguration ()
+
+/**
+ * RTCConfiguration struct representation of this RTCConfiguration. This is
+ * needed to pass to the underlying C++ APIs.
+ */
+@property(nonatomic, readonly)
+    webrtc::PeerConnectionInterface::RTCConfiguration nativeConfiguration;
+
++ (webrtc::PeerConnectionInterface::IceTransportsType)
+    nativeTransportsTypeForTransportPolicy:(RTCIceTransportPolicy)policy;
+
++ (RTCIceTransportPolicy)transportPolicyForTransportsType:
+    (webrtc::PeerConnectionInterface::IceTransportsType)nativeType;
+
++ (NSString *)stringForTransportPolicy:(RTCIceTransportPolicy)policy;
+
++ (webrtc::PeerConnectionInterface::BundlePolicy)nativeBundlePolicyForPolicy:
+    (RTCBundlePolicy)policy;
+
++ (RTCBundlePolicy)bundlePolicyForNativePolicy:
+    (webrtc::PeerConnectionInterface::BundlePolicy)nativePolicy;
+
++ (NSString *)stringForBundlePolicy:(RTCBundlePolicy)policy;
+
++ (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativeRtcpMuxPolicyForPolicy:
+    (RTCRtcpMuxPolicy)policy;
+
++ (RTCRtcpMuxPolicy)rtcpMuxPolicyForNativePolicy:
+    (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativePolicy;
+
++ (NSString *)stringForRtcpMuxPolicy:(RTCRtcpMuxPolicy)policy;
+
++ (webrtc::PeerConnectionInterface::TcpCandidatePolicy)
+    nativeTcpCandidatePolicyForPolicy:(RTCTcpCandidatePolicy)policy;
+
++ (RTCTcpCandidatePolicy)tcpCandidatePolicyForNativePolicy:
+    (webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativePolicy;
+
++ (NSString *)stringForTcpCandidatePolicy:(RTCTcpCandidatePolicy)policy;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCConfiguration.mm b/webrtc/sdk/objc/Framework/Classes/RTCConfiguration.mm
new file mode 100644
index 0000000..b006319
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCConfiguration.mm
@@ -0,0 +1,237 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCConfiguration+Private.h"
+
+#include <memory>
+
+#import "RTCIceServer+Private.h"
+#import "WebRTC/RTCLogging.h"
+
+#include "webrtc/base/sslidentity.h"
+
+@implementation RTCConfiguration
+
+@synthesize iceServers = _iceServers;
+@synthesize iceTransportPolicy = _iceTransportPolicy;
+@synthesize bundlePolicy = _bundlePolicy;
+@synthesize rtcpMuxPolicy = _rtcpMuxPolicy;
+@synthesize tcpCandidatePolicy = _tcpCandidatePolicy;
+@synthesize audioJitterBufferMaxPackets = _audioJitterBufferMaxPackets;
+@synthesize iceConnectionReceivingTimeout = _iceConnectionReceivingTimeout;
+@synthesize iceBackupCandidatePairPingInterval =
+    _iceBackupCandidatePairPingInterval;
+@synthesize keyType = _keyType;
+
+- (instancetype)init {
+  if (self = [super init]) {
+    _iceServers = [NSMutableArray array];
+    // Copy defaults.
+    webrtc::PeerConnectionInterface::RTCConfiguration config;
+    _iceTransportPolicy =
+        [[self class] transportPolicyForTransportsType:config.type];
+    _bundlePolicy =
+        [[self class] bundlePolicyForNativePolicy:config.bundle_policy];
+    _rtcpMuxPolicy =
+        [[self class] rtcpMuxPolicyForNativePolicy:config.rtcp_mux_policy];
+    _tcpCandidatePolicy = [[self class] tcpCandidatePolicyForNativePolicy:
+        config.tcp_candidate_policy];
+    _audioJitterBufferMaxPackets = config.audio_jitter_buffer_max_packets;
+    _iceConnectionReceivingTimeout = config.ice_connection_receiving_timeout;
+    _iceBackupCandidatePairPingInterval =
+        config.ice_backup_candidate_pair_ping_interval;
+    _keyType = RTCEncryptionKeyTypeECDSA;
+  }
+  return self;
+}
+
+- (NSString *)description {
+  return [NSString stringWithFormat:
+      @"RTCConfiguration: {\n%@\n%@\n%@\n%@\n%@\n%d\n%d\n%d\n}\n",
+      _iceServers,
+      [[self class] stringForTransportPolicy:_iceTransportPolicy],
+      [[self class] stringForBundlePolicy:_bundlePolicy],
+      [[self class] stringForRtcpMuxPolicy:_rtcpMuxPolicy],
+      [[self class] stringForTcpCandidatePolicy:_tcpCandidatePolicy],
+      _audioJitterBufferMaxPackets,
+      _iceConnectionReceivingTimeout,
+      _iceBackupCandidatePairPingInterval];
+}
+
+#pragma mark - Private
+
+- (webrtc::PeerConnectionInterface::RTCConfiguration)nativeConfiguration {
+  webrtc::PeerConnectionInterface::RTCConfiguration nativeConfig;
+
+  for (RTCIceServer *iceServer in _iceServers) {
+    nativeConfig.servers.push_back(iceServer.nativeServer);
+  }
+  nativeConfig.type =
+      [[self class] nativeTransportsTypeForTransportPolicy:_iceTransportPolicy];
+  nativeConfig.bundle_policy =
+      [[self class] nativeBundlePolicyForPolicy:_bundlePolicy];
+  nativeConfig.rtcp_mux_policy =
+      [[self class] nativeRtcpMuxPolicyForPolicy:_rtcpMuxPolicy];
+  nativeConfig.tcp_candidate_policy =
+      [[self class] nativeTcpCandidatePolicyForPolicy:_tcpCandidatePolicy];
+  nativeConfig.audio_jitter_buffer_max_packets = _audioJitterBufferMaxPackets;
+  nativeConfig.ice_connection_receiving_timeout =
+      _iceConnectionReceivingTimeout;
+  nativeConfig.ice_backup_candidate_pair_ping_interval =
+      _iceBackupCandidatePairPingInterval;
+  if (_keyType == RTCEncryptionKeyTypeECDSA) {
+    std::unique_ptr<rtc::SSLIdentity> identity(
+        rtc::SSLIdentity::Generate(webrtc::kIdentityName, rtc::KT_ECDSA));
+    if (identity) {
+      nativeConfig.certificates.push_back(
+          rtc::RTCCertificate::Create(std::move(identity)));
+    } else {
+      RTCLogWarning(@"Failed to generate ECDSA identity. RSA will be used.");
+    }
+  }
+
+  return nativeConfig;
+}
+
++ (webrtc::PeerConnectionInterface::IceTransportsType)
+    nativeTransportsTypeForTransportPolicy:(RTCIceTransportPolicy)policy {
+  switch (policy) {
+    case RTCIceTransportPolicyNone:
+      return webrtc::PeerConnectionInterface::kNone;
+    case RTCIceTransportPolicyRelay:
+      return webrtc::PeerConnectionInterface::kRelay;
+    case RTCIceTransportPolicyNoHost:
+      return webrtc::PeerConnectionInterface::kNoHost;
+    case RTCIceTransportPolicyAll:
+      return webrtc::PeerConnectionInterface::kAll;
+  }
+}
+
++ (RTCIceTransportPolicy)transportPolicyForTransportsType:
+    (webrtc::PeerConnectionInterface::IceTransportsType)nativeType {
+  switch (nativeType) {
+    case webrtc::PeerConnectionInterface::kNone:
+      return RTCIceTransportPolicyNone;
+    case webrtc::PeerConnectionInterface::kRelay:
+      return RTCIceTransportPolicyRelay;
+    case webrtc::PeerConnectionInterface::kNoHost:
+      return RTCIceTransportPolicyNoHost;
+    case webrtc::PeerConnectionInterface::kAll:
+      return RTCIceTransportPolicyAll;
+  }
+}
+
++ (NSString *)stringForTransportPolicy:(RTCIceTransportPolicy)policy {
+  switch (policy) {
+    case RTCIceTransportPolicyNone:
+      return @"NONE";
+    case RTCIceTransportPolicyRelay:
+      return @"RELAY";
+    case RTCIceTransportPolicyNoHost:
+      return @"NO_HOST";
+    case RTCIceTransportPolicyAll:
+      return @"ALL";
+  }
+}
+
++ (webrtc::PeerConnectionInterface::BundlePolicy)nativeBundlePolicyForPolicy:
+    (RTCBundlePolicy)policy {
+  switch (policy) {
+    case RTCBundlePolicyBalanced:
+      return webrtc::PeerConnectionInterface::kBundlePolicyBalanced;
+    case RTCBundlePolicyMaxCompat:
+      return webrtc::PeerConnectionInterface::kBundlePolicyMaxCompat;
+    case RTCBundlePolicyMaxBundle:
+      return webrtc::PeerConnectionInterface::kBundlePolicyMaxBundle;
+  }
+}
+
++ (RTCBundlePolicy)bundlePolicyForNativePolicy:
+    (webrtc::PeerConnectionInterface::BundlePolicy)nativePolicy {
+  switch (nativePolicy) {
+    case webrtc::PeerConnectionInterface::kBundlePolicyBalanced:
+      return RTCBundlePolicyBalanced;
+    case webrtc::PeerConnectionInterface::kBundlePolicyMaxCompat:
+      return RTCBundlePolicyMaxCompat;
+    case webrtc::PeerConnectionInterface::kBundlePolicyMaxBundle:
+      return RTCBundlePolicyMaxBundle;
+  }
+}
+
++ (NSString *)stringForBundlePolicy:(RTCBundlePolicy)policy {
+  switch (policy) {
+    case RTCBundlePolicyBalanced:
+      return @"BALANCED";
+    case RTCBundlePolicyMaxCompat:
+      return @"MAX_COMPAT";
+    case RTCBundlePolicyMaxBundle:
+      return @"MAX_BUNDLE";
+  }
+}
+
++ (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativeRtcpMuxPolicyForPolicy:
+    (RTCRtcpMuxPolicy)policy {
+  switch (policy) {
+    case RTCRtcpMuxPolicyNegotiate:
+      return webrtc::PeerConnectionInterface::kRtcpMuxPolicyNegotiate;
+    case RTCRtcpMuxPolicyRequire:
+      return webrtc::PeerConnectionInterface::kRtcpMuxPolicyRequire;
+  }
+}
+
++ (RTCRtcpMuxPolicy)rtcpMuxPolicyForNativePolicy:
+    (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativePolicy {
+  switch (nativePolicy) {
+    case webrtc::PeerConnectionInterface::kRtcpMuxPolicyNegotiate:
+      return RTCRtcpMuxPolicyNegotiate;
+    case webrtc::PeerConnectionInterface::kRtcpMuxPolicyRequire:
+      return RTCRtcpMuxPolicyRequire;
+  }
+}
+
++ (NSString *)stringForRtcpMuxPolicy:(RTCRtcpMuxPolicy)policy {
+  switch (policy) {
+    case RTCRtcpMuxPolicyNegotiate:
+      return @"NEGOTIATE";
+    case RTCRtcpMuxPolicyRequire:
+      return @"REQUIRE";
+  }
+}
+
++ (webrtc::PeerConnectionInterface::TcpCandidatePolicy)
+    nativeTcpCandidatePolicyForPolicy:(RTCTcpCandidatePolicy)policy {
+  switch (policy) {
+    case RTCTcpCandidatePolicyEnabled:
+      return webrtc::PeerConnectionInterface::kTcpCandidatePolicyEnabled;
+    case RTCTcpCandidatePolicyDisabled:
+      return webrtc::PeerConnectionInterface::kTcpCandidatePolicyDisabled;
+  }
+}
+
++ (RTCTcpCandidatePolicy)tcpCandidatePolicyForNativePolicy:
+    (webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativePolicy {
+  switch (nativePolicy) {
+    case webrtc::PeerConnectionInterface::kTcpCandidatePolicyEnabled:
+      return RTCTcpCandidatePolicyEnabled;
+    case webrtc::PeerConnectionInterface::kTcpCandidatePolicyDisabled:
+      return RTCTcpCandidatePolicyDisabled;
+  }
+}
+
++ (NSString *)stringForTcpCandidatePolicy:(RTCTcpCandidatePolicy)policy {
+  switch (policy) {
+    case RTCTcpCandidatePolicyEnabled:
+      return @"TCP_ENABLED";
+    case RTCTcpCandidatePolicyDisabled:
+      return @"TCP_DISABLED";
+  }
+}
+
+@end
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCDataChannel+Private.h b/webrtc/sdk/objc/Framework/Classes/RTCDataChannel+Private.h
new file mode 100644
index 0000000..82e132f
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCDataChannel+Private.h
@@ -0,0 +1,49 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCDataChannel.h"
+
+#include "webrtc/api/datachannelinterface.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCDataBuffer ()
+
+/**
+ * The native DataBuffer representation of this RTCDatabuffer object. This is
+ * needed to pass to the underlying C++ APIs.
+ */
+@property(nonatomic, readonly) const webrtc::DataBuffer *nativeDataBuffer;
+
+/** Initialize an RTCDataBuffer from a native DataBuffer. */
+- (instancetype)initWithNativeBuffer:(const webrtc::DataBuffer&)nativeBuffer;
+
+@end
+
+
+@interface RTCDataChannel ()
+
+/** Initialize an RTCDataChannel from a native DataChannelInterface. */
+- (instancetype)initWithNativeDataChannel:
+    (rtc::scoped_refptr<webrtc::DataChannelInterface>)nativeDataChannel
+    NS_DESIGNATED_INITIALIZER;
+
++ (webrtc::DataChannelInterface::DataState)
+    nativeDataChannelStateForState:(RTCDataChannelState)state;
+
++ (RTCDataChannelState)dataChannelStateForNativeState:
+    (webrtc::DataChannelInterface::DataState)nativeState;
+
++ (NSString *)stringForState:(RTCDataChannelState)state;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCDataChannel.mm b/webrtc/sdk/objc/Framework/Classes/RTCDataChannel.mm
new file mode 100644
index 0000000..cdc7e98
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCDataChannel.mm
@@ -0,0 +1,234 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCDataChannel+Private.h"
+
+#import "NSString+StdString.h"
+
+#include "webrtc/base/scoped_ptr.h"
+
+namespace webrtc {
+
+class DataChannelDelegateAdapter : public DataChannelObserver {
+ public:
+  DataChannelDelegateAdapter(RTCDataChannel *channel) { channel_ = channel; }
+
+  void OnStateChange() override {
+    [channel_.delegate dataChannelDidChangeState:channel_];
+  }
+
+  void OnMessage(const DataBuffer& buffer) override {
+    RTCDataBuffer *data_buffer =
+        [[RTCDataBuffer alloc] initWithNativeBuffer:buffer];
+    [channel_.delegate dataChannel:channel_
+       didReceiveMessageWithBuffer:data_buffer];
+  }
+
+  void OnBufferedAmountChange(uint64_t previousAmount) override {
+    id<RTCDataChannelDelegate> delegate = channel_.delegate;
+    SEL sel = @selector(dataChannel:didChangeBufferedAmount:);
+    if ([delegate respondsToSelector:sel]) {
+      [delegate dataChannel:channel_ didChangeBufferedAmount:previousAmount];
+    }
+  }
+
+ private:
+  __weak RTCDataChannel *channel_;
+};
+}
+
+
+@implementation RTCDataBuffer {
+  rtc::scoped_ptr<webrtc::DataBuffer> _dataBuffer;
+}
+
+- (instancetype)initWithData:(NSData *)data isBinary:(BOOL)isBinary {
+  NSParameterAssert(data);
+  if (self = [super init]) {
+    rtc::CopyOnWriteBuffer buffer(
+        reinterpret_cast<const uint8_t*>(data.bytes), data.length);
+    _dataBuffer.reset(new webrtc::DataBuffer(buffer, isBinary));
+  }
+  return self;
+}
+
+- (NSData *)data {
+  return [NSData dataWithBytes:_dataBuffer->data.data()
+                        length:_dataBuffer->data.size()];
+}
+
+- (BOOL)isBinary {
+  return _dataBuffer->binary;
+}
+
+#pragma mark - Private
+
+- (instancetype)initWithNativeBuffer:(const webrtc::DataBuffer&)nativeBuffer {
+  if (self = [super init]) {
+    _dataBuffer.reset(new webrtc::DataBuffer(nativeBuffer));
+  }
+  return self;
+}
+
+- (const webrtc::DataBuffer *)nativeDataBuffer {
+  return _dataBuffer.get();
+}
+
+@end
+
+
+@implementation RTCDataChannel {
+  rtc::scoped_refptr<webrtc::DataChannelInterface> _nativeDataChannel;
+  rtc::scoped_ptr<webrtc::DataChannelDelegateAdapter> _observer;
+  BOOL _isObserverRegistered;
+}
+
+@synthesize delegate = _delegate;
+
+- (void)dealloc {
+  // Handles unregistering the observer properly. We need to do this because
+  // there may still be other references to the underlying data channel.
+  self.delegate = nil;
+}
+
+- (NSString *)label {
+  return [NSString stringForStdString:_nativeDataChannel->label()];
+}
+
+- (BOOL)isReliable {
+  return _nativeDataChannel->reliable();
+}
+
+- (BOOL)isOrdered {
+  return _nativeDataChannel->ordered();
+}
+
+- (NSUInteger)maxRetransmitTime {
+  return self.maxPacketLifeTime;
+}
+
+- (uint16_t)maxPacketLifeTime {
+  return _nativeDataChannel->maxRetransmitTime();
+}
+
+- (uint16_t)maxRetransmits {
+  return _nativeDataChannel->maxRetransmits();
+}
+
+- (NSString *)protocol {
+  return [NSString stringForStdString:_nativeDataChannel->protocol()];
+}
+
+- (BOOL)isNegotiated {
+  return _nativeDataChannel->negotiated();
+}
+
+- (NSInteger)streamId {
+  return self.channelId;
+}
+
+- (int)channelId {
+  return _nativeDataChannel->id();
+}
+
+- (RTCDataChannelState)readyState {
+  return [[self class] dataChannelStateForNativeState:
+      _nativeDataChannel->state()];
+}
+
+- (uint64_t)bufferedAmount {
+  return _nativeDataChannel->buffered_amount();
+}
+
+- (void)setDelegate:(id<RTCDataChannelDelegate>)delegate {
+  if (_delegate == delegate) {
+    return;
+  }
+  if (_isObserverRegistered) {
+    _nativeDataChannel->UnregisterObserver();
+    _isObserverRegistered = NO;
+  }
+  _delegate = delegate;
+  if (_delegate) {
+    _nativeDataChannel->RegisterObserver(_observer.get());
+    _isObserverRegistered = YES;
+  }
+}
+
+- (void)close {
+  _nativeDataChannel->Close();
+}
+
+- (BOOL)sendData:(RTCDataBuffer *)data {
+  return _nativeDataChannel->Send(*data.nativeDataBuffer);
+}
+
+- (NSString *)description {
+  return [NSString stringWithFormat:@"RTCDataChannel:\n%ld\n%@\n%@",
+                                    (long)self.channelId,
+                                    self.label,
+                                    [[self class]
+                                        stringForState:self.readyState]];
+}
+
+#pragma mark - Private
+
+- (instancetype)initWithNativeDataChannel:
+    (rtc::scoped_refptr<webrtc::DataChannelInterface>)nativeDataChannel {
+  NSParameterAssert(nativeDataChannel);
+  if (self = [super init]) {
+    _nativeDataChannel = nativeDataChannel;
+    _observer.reset(new webrtc::DataChannelDelegateAdapter(self));
+  }
+  return self;
+}
+
++ (webrtc::DataChannelInterface::DataState)
+    nativeDataChannelStateForState:(RTCDataChannelState)state {
+  switch (state) {
+    case RTCDataChannelStateConnecting:
+      return webrtc::DataChannelInterface::DataState::kConnecting;
+    case RTCDataChannelStateOpen:
+      return webrtc::DataChannelInterface::DataState::kOpen;
+    case RTCDataChannelStateClosing:
+      return webrtc::DataChannelInterface::DataState::kClosing;
+    case RTCDataChannelStateClosed:
+      return webrtc::DataChannelInterface::DataState::kClosed;
+  }
+}
+
++ (RTCDataChannelState)dataChannelStateForNativeState:
+    (webrtc::DataChannelInterface::DataState)nativeState {
+  switch (nativeState) {
+    case webrtc::DataChannelInterface::DataState::kConnecting:
+      return RTCDataChannelStateConnecting;
+    case webrtc::DataChannelInterface::DataState::kOpen:
+      return RTCDataChannelStateOpen;
+    case webrtc::DataChannelInterface::DataState::kClosing:
+      return RTCDataChannelStateClosing;
+    case webrtc::DataChannelInterface::DataState::kClosed:
+      return RTCDataChannelStateClosed;
+  }
+}
+
++ (NSString *)stringForState:(RTCDataChannelState)state {
+  switch (state) {
+    case RTCDataChannelStateConnecting:
+      return @"Connecting";
+    case RTCDataChannelStateOpen:
+      return @"Open";
+    case RTCDataChannelStateClosing:
+      return @"Closing";
+    case RTCDataChannelStateClosed:
+      return @"Closed";
+  }
+}
+
+@end
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCDataChannelConfiguration+Private.h b/webrtc/sdk/objc/Framework/Classes/RTCDataChannelConfiguration+Private.h
new file mode 100644
index 0000000..e9ea747
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCDataChannelConfiguration+Private.h
@@ -0,0 +1,23 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCDataChannelConfiguration.h"
+
+#include "webrtc/api/datachannelinterface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCDataChannelConfiguration ()
+
+@property(nonatomic, readonly) webrtc::DataChannelInit nativeDataChannelInit;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCDataChannelConfiguration.mm b/webrtc/sdk/objc/Framework/Classes/RTCDataChannelConfiguration.mm
new file mode 100644
index 0000000..89c56de
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCDataChannelConfiguration.mm
@@ -0,0 +1,83 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCDataChannelConfiguration+Private.h"
+
+#import "NSString+StdString.h"
+
+@implementation RTCDataChannelConfiguration
+
+@synthesize nativeDataChannelInit = _nativeDataChannelInit;
+
+- (BOOL)isOrdered {
+  return _nativeDataChannelInit.ordered;
+}
+
+- (void)setIsOrdered:(BOOL)isOrdered {
+  _nativeDataChannelInit.ordered = isOrdered;
+}
+
+- (NSInteger)maxRetransmitTimeMs {
+  return self.maxPacketLifeTime;
+}
+
+- (void)setMaxRetransmitTimeMs:(NSInteger)maxRetransmitTimeMs {
+  self.maxPacketLifeTime = maxRetransmitTimeMs;
+}
+
+- (int)maxPacketLifeTime {
+  return _nativeDataChannelInit.maxRetransmitTime;
+}
+
+- (void)setMaxPacketLifeTime:(int)maxPacketLifeTime {
+  _nativeDataChannelInit.maxRetransmitTime = maxPacketLifeTime;
+}
+
+- (int)maxRetransmits {
+  return _nativeDataChannelInit.maxRetransmits;
+}
+
+- (void)setMaxRetransmits:(int)maxRetransmits {
+  _nativeDataChannelInit.maxRetransmits = maxRetransmits;
+}
+
+- (NSString *)protocol {
+  return [NSString stringForStdString:_nativeDataChannelInit.protocol];
+}
+
+- (void)setProtocol:(NSString *)protocol {
+  _nativeDataChannelInit.protocol = [NSString stdStringForString:protocol];
+}
+
+- (BOOL)isNegotiated {
+  return _nativeDataChannelInit.negotiated;
+}
+
+- (void)setIsNegotiated:(BOOL)isNegotiated {
+  _nativeDataChannelInit.negotiated = isNegotiated;
+}
+
+- (int)streamId {
+  return self.channelId;
+}
+
+- (void)setStreamId:(int)streamId {
+  self.channelId = streamId;
+}
+
+- (int)channelId {
+  return _nativeDataChannelInit.id;
+}
+
+- (void)setChannelId:(int)channelId {
+  _nativeDataChannelInit.id = channelId;
+}
+
+@end
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCDispatcher+Private.h b/webrtc/sdk/objc/Framework/Classes/RTCDispatcher+Private.h
new file mode 100644
index 0000000..3c114e5
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCDispatcher+Private.h
@@ -0,0 +1,17 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCDispatcher.h"
+
+@interface RTCDispatcher ()
+
++ (dispatch_queue_t)dispatchQueueForType:(RTCDispatcherQueueType)dispatchType;
+
+@end
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCDispatcher.m b/webrtc/sdk/objc/Framework/Classes/RTCDispatcher.m
new file mode 100644
index 0000000..94176ac
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCDispatcher.m
@@ -0,0 +1,50 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCDispatcher+Private.h"
+
+static dispatch_queue_t kAudioSessionQueue = nil;
+static dispatch_queue_t kCaptureSessionQueue = nil;
+
+@implementation RTCDispatcher
+
++ (void)initialize {
+  static dispatch_once_t onceToken;
+  dispatch_once(&onceToken, ^{
+    kAudioSessionQueue = dispatch_queue_create(
+        "org.webrtc.RTCDispatcherAudioSession",
+        DISPATCH_QUEUE_SERIAL);
+    kCaptureSessionQueue = dispatch_queue_create(
+        "org.webrtc.RTCDispatcherCaptureSession",
+        DISPATCH_QUEUE_SERIAL);
+  });
+}
+
++ (void)dispatchAsyncOnType:(RTCDispatcherQueueType)dispatchType
+                      block:(dispatch_block_t)block {
+  dispatch_queue_t queue = [self dispatchQueueForType:dispatchType];
+  dispatch_async(queue, block);
+}
+
+#pragma mark - Private
+
++ (dispatch_queue_t)dispatchQueueForType:(RTCDispatcherQueueType)dispatchType {
+  switch (dispatchType) {
+    case RTCDispatcherTypeMain:
+      return dispatch_get_main_queue();
+    case RTCDispatcherTypeCaptureSession:
+      return kCaptureSessionQueue;
+    case RTCDispatcherTypeAudioSession:
+      return kAudioSessionQueue;
+  }
+}
+
+@end
+
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCEAGLVideoView.m b/webrtc/sdk/objc/Framework/Classes/RTCEAGLVideoView.m
new file mode 100644
index 0000000..d215265
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCEAGLVideoView.m
@@ -0,0 +1,263 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCEAGLVideoView.h"
+
+#import <GLKit/GLKit.h>
+
+#import "RTCOpenGLVideoRenderer.h"
+#import "WebRTC//RTCVideoFrame.h"
+
+// RTCDisplayLinkTimer wraps a CADisplayLink and is set to fire every two screen
+// refreshes, which should be 30fps. We wrap the display link in order to avoid
+// a retain cycle since CADisplayLink takes a strong reference onto its target.
+// The timer is paused by default.
+@interface RTCDisplayLinkTimer : NSObject
+
+@property(nonatomic) BOOL isPaused;
+
+- (instancetype)initWithTimerHandler:(void (^)(void))timerHandler;
+- (void)invalidate;
+
+@end
+
+@implementation RTCDisplayLinkTimer {
+  CADisplayLink *_displayLink;
+  void (^_timerHandler)(void);
+}
+
+- (instancetype)initWithTimerHandler:(void (^)(void))timerHandler {
+  NSParameterAssert(timerHandler);
+  if (self = [super init]) {
+    _timerHandler = timerHandler;
+    _displayLink =
+        [CADisplayLink displayLinkWithTarget:self
+                                    selector:@selector(displayLinkDidFire:)];
+    _displayLink.paused = YES;
+    // Set to half of screen refresh, which should be 30fps.
+    [_displayLink setFrameInterval:2];
+    [_displayLink addToRunLoop:[NSRunLoop currentRunLoop]
+                       forMode:NSRunLoopCommonModes];
+  }
+  return self;
+}
+
+- (void)dealloc {
+  [self invalidate];
+}
+
+- (BOOL)isPaused {
+  return _displayLink.paused;
+}
+
+- (void)setIsPaused:(BOOL)isPaused {
+  _displayLink.paused = isPaused;
+}
+
+- (void)invalidate {
+  [_displayLink invalidate];
+}
+
+- (void)displayLinkDidFire:(CADisplayLink *)displayLink {
+  _timerHandler();
+}
+
+@end
+
+// RTCEAGLVideoView wraps a GLKView which is setup with
+// enableSetNeedsDisplay = NO for the purpose of gaining control of
+// exactly when to call -[GLKView display]. This need for extra
+// control is required to avoid triggering method calls on GLKView
+// that results in attempting to bind the underlying render buffer
+// when the drawable size would be empty which would result in the
+// error GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT. -[GLKView display] is
+// the method that will trigger the binding of the render
+// buffer. Because the standard behaviour of -[UIView setNeedsDisplay]
+// is disabled for the reasons above, the RTCEAGLVideoView maintains
+// its own |isDirty| flag.
+
+@interface RTCEAGLVideoView () <GLKViewDelegate>
+// |videoFrame| is set when we receive a frame from a worker thread and is read
+// from the display link callback so atomicity is required.
+@property(atomic, strong) RTCVideoFrame *videoFrame;
+@property(nonatomic, readonly) GLKView *glkView;
+@property(nonatomic, readonly) RTCOpenGLVideoRenderer *glRenderer;
+@end
+
+@implementation RTCEAGLVideoView {
+  RTCDisplayLinkTimer *_timer;
+  // This flag should only be set and read on the main thread (e.g. by
+  // setNeedsDisplay)
+  BOOL _isDirty;
+}
+
+@synthesize delegate = _delegate;
+@synthesize videoFrame = _videoFrame;
+@synthesize glkView = _glkView;
+@synthesize glRenderer = _glRenderer;
+
+- (instancetype)initWithFrame:(CGRect)frame {
+  if (self = [super initWithFrame:frame]) {
+    [self configure];
+  }
+  return self;
+}
+
+- (instancetype)initWithCoder:(NSCoder *)aDecoder {
+  if (self = [super initWithCoder:aDecoder]) {
+    [self configure];
+  }
+  return self;
+}
+
+- (void)configure {
+  EAGLContext *glContext =
+    [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3];
+  if (!glContext) {
+    glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
+  }
+  _glRenderer = [[RTCOpenGLVideoRenderer alloc] initWithContext:glContext];
+
+  // GLKView manages a framebuffer for us.
+  _glkView = [[GLKView alloc] initWithFrame:CGRectZero
+                                    context:glContext];
+  _glkView.drawableColorFormat = GLKViewDrawableColorFormatRGBA8888;
+  _glkView.drawableDepthFormat = GLKViewDrawableDepthFormatNone;
+  _glkView.drawableStencilFormat = GLKViewDrawableStencilFormatNone;
+  _glkView.drawableMultisample = GLKViewDrawableMultisampleNone;
+  _glkView.delegate = self;
+  _glkView.layer.masksToBounds = YES;
+  _glkView.enableSetNeedsDisplay = NO;
+  [self addSubview:_glkView];
+
+  // Listen to application state in order to clean up OpenGL before app goes
+  // away.
+  NSNotificationCenter *notificationCenter =
+    [NSNotificationCenter defaultCenter];
+  [notificationCenter addObserver:self
+                         selector:@selector(willResignActive)
+                             name:UIApplicationWillResignActiveNotification
+                           object:nil];
+  [notificationCenter addObserver:self
+                         selector:@selector(didBecomeActive)
+                             name:UIApplicationDidBecomeActiveNotification
+                           object:nil];
+
+  // Frames are received on a separate thread, so we poll for current frame
+  // using a refresh rate proportional to screen refresh frequency. This
+  // occurs on the main thread.
+  __weak RTCEAGLVideoView *weakSelf = self;
+  _timer = [[RTCDisplayLinkTimer alloc] initWithTimerHandler:^{
+      RTCEAGLVideoView *strongSelf = weakSelf;
+      [strongSelf displayLinkTimerDidFire];
+    }];
+  [self setupGL];
+}
+
+- (void)dealloc {
+  [[NSNotificationCenter defaultCenter] removeObserver:self];
+  UIApplicationState appState =
+      [UIApplication sharedApplication].applicationState;
+  if (appState == UIApplicationStateActive) {
+    [self teardownGL];
+  }
+  [_timer invalidate];
+}
+
+#pragma mark - UIView
+
+- (void)setNeedsDisplay {
+  [super setNeedsDisplay];
+  _isDirty = YES;
+}
+
+- (void)setNeedsDisplayInRect:(CGRect)rect {
+  [super setNeedsDisplayInRect:rect];
+  _isDirty = YES;
+}
+
+- (void)layoutSubviews {
+  [super layoutSubviews];
+  _glkView.frame = self.bounds;
+}
+
+#pragma mark - GLKViewDelegate
+
+// This method is called when the GLKView's content is dirty and needs to be
+// redrawn. This occurs on main thread.
+- (void)glkView:(GLKView *)view drawInRect:(CGRect)rect {
+  // The renderer will draw the frame to the framebuffer corresponding to the
+  // one used by |view|.
+  [_glRenderer drawFrame:self.videoFrame];
+}
+
+#pragma mark - RTCVideoRenderer
+
+// These methods may be called on non-main thread.
+- (void)setSize:(CGSize)size {
+  __weak RTCEAGLVideoView *weakSelf = self;
+  dispatch_async(dispatch_get_main_queue(), ^{
+    RTCEAGLVideoView *strongSelf = weakSelf;
+    [strongSelf.delegate videoView:strongSelf didChangeVideoSize:size];
+  });
+}
+
+- (void)renderFrame:(RTCVideoFrame *)frame {
+  // Generate the i420 frame on video send thread instead of main thread.
+  // TODO(tkchin): Remove this once RTCEAGLVideoView supports uploading
+  // CVPixelBuffer textures.
+  [frame convertBufferIfNeeded];
+  self.videoFrame = frame;
+}
+
+#pragma mark - Private
+
+- (void)displayLinkTimerDidFire {
+  // Don't render unless video frame have changed or the view content
+  // has explicitly been marked dirty.
+  if (!_isDirty && _glRenderer.lastDrawnFrame == self.videoFrame) {
+    return;
+  }
+
+  // Always reset isDirty at this point, even if -[GLKView display]
+  // won't be called in the case the drawable size is empty.
+  _isDirty = NO;
+
+  // Only call -[GLKView display] if the drawable size is
+  // non-empty. Calling display will make the GLKView setup its
+  // render buffer if necessary, but that will fail with error
+  // GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT if size is empty.
+  if (self.bounds.size.width > 0 && self.bounds.size.height > 0) {
+    [_glkView display];
+  }
+}
+
+- (void)setupGL {
+  self.videoFrame = nil;
+  [_glRenderer setupGL];
+  _timer.isPaused = NO;
+}
+
+- (void)teardownGL {
+  self.videoFrame = nil;
+  _timer.isPaused = YES;
+  [_glkView deleteDrawable];
+  [_glRenderer teardownGL];
+}
+
+- (void)didBecomeActive {
+  [self setupGL];
+}
+
+- (void)willResignActive {
+  [self teardownGL];
+}
+
+@end
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCFieldTrials.mm b/webrtc/sdk/objc/Framework/Classes/RTCFieldTrials.mm
new file mode 100644
index 0000000..38c293f
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCFieldTrials.mm
@@ -0,0 +1,37 @@
+/*
+ *  Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCFieldTrials.h"
+
+#include <memory>
+
+#import "WebRTC/RTCLogging.h"
+
+#include "webrtc/system_wrappers/include/field_trial_default.h"
+
+static NSString * const kRTCEnableSendSideBweString =
+    @"WebRTC-SendSideBwe/Enabled/";
+static std::unique_ptr<char[]> gFieldTrialInitString;
+
+void RTCInitFieldTrials(RTCFieldTrialOptions options) {
+  NSMutableString *fieldTrialInitString = [NSMutableString string];
+  if (options & RTCFieldTrialOptionsSendSideBwe) {
+    [fieldTrialInitString appendString:kRTCEnableSendSideBweString];
+  }
+  size_t len = fieldTrialInitString.length + 1;
+  gFieldTrialInitString.reset(new char[len]);
+  if (![fieldTrialInitString getCString:gFieldTrialInitString.get()
+                              maxLength:len
+                               encoding:NSUTF8StringEncoding]) {
+    RTCLogError(@"Failed to convert field trial string.");
+    return;
+  }
+  webrtc::field_trial::InitFieldTrialsFromString(gFieldTrialInitString.get());
+}
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCFileLogger.mm b/webrtc/sdk/objc/Framework/Classes/RTCFileLogger.mm
new file mode 100644
index 0000000..73335f3
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCFileLogger.mm
@@ -0,0 +1,175 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCFileLogger.h"
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/filerotatingstream.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/logsinks.h"
+#include "webrtc/base/scoped_ptr.h"
+
+NSString *const kDefaultLogDirName = @"webrtc_logs";
+NSUInteger const kDefaultMaxFileSize = 10 * 1024 * 1024; // 10MB.
+const char *kRTCFileLoggerRotatingLogPrefix = "rotating_log";
+
+@implementation RTCFileLogger {
+  BOOL _hasStarted;
+  NSString *_dirPath;
+  NSUInteger _maxFileSize;
+  rtc::scoped_ptr<rtc::FileRotatingLogSink> _logSink;
+}
+
+@synthesize severity = _severity;
+@synthesize rotationType = _rotationType;
+@synthesize shouldDisableBuffering = _shouldDisableBuffering;
+
+- (instancetype)init {
+  NSArray *paths = NSSearchPathForDirectoriesInDomains(
+      NSDocumentDirectory, NSUserDomainMask, YES);
+  NSString *documentsDirPath = [paths firstObject];
+  NSString *defaultDirPath =
+      [documentsDirPath stringByAppendingPathComponent:kDefaultLogDirName];
+  return [self initWithDirPath:defaultDirPath
+                   maxFileSize:kDefaultMaxFileSize];
+}
+
+- (instancetype)initWithDirPath:(NSString *)dirPath
+                    maxFileSize:(NSUInteger)maxFileSize {
+  return [self initWithDirPath:dirPath
+                   maxFileSize:maxFileSize
+                  rotationType:RTCFileLoggerTypeCall];
+}
+
+- (instancetype)initWithDirPath:(NSString *)dirPath
+                    maxFileSize:(NSUInteger)maxFileSize
+                   rotationType:(RTCFileLoggerRotationType)rotationType {
+  NSParameterAssert(dirPath.length);
+  NSParameterAssert(maxFileSize);
+  if (self = [super init]) {
+    BOOL isDir = NO;
+    NSFileManager *fileManager = [NSFileManager defaultManager];
+    if ([fileManager fileExistsAtPath:dirPath isDirectory:&isDir]) {
+      if (!isDir) {
+        // Bail if something already exists there.
+        return nil;
+      }
+    } else {
+      if (![fileManager createDirectoryAtPath:dirPath
+                  withIntermediateDirectories:NO
+                                   attributes:nil
+                                        error:nil]) {
+        // Bail if we failed to create a directory.
+        return nil;
+      }
+    }
+    _dirPath = dirPath;
+    _maxFileSize = maxFileSize;
+    _severity = RTCFileLoggerSeverityInfo;
+  }
+  return self;
+}
+
+- (void)dealloc {
+  [self stop];
+}
+
+- (void)start {
+  if (_hasStarted) {
+    return;
+  }
+  switch (_rotationType) {
+    case RTCFileLoggerTypeApp:
+      _logSink.reset(
+          new rtc::FileRotatingLogSink(_dirPath.UTF8String,
+                                       kRTCFileLoggerRotatingLogPrefix,
+                                       _maxFileSize,
+                                       _maxFileSize / 10));
+      break;
+    case RTCFileLoggerTypeCall:
+      _logSink.reset(
+          new rtc::CallSessionFileRotatingLogSink(_dirPath.UTF8String,
+                                                  _maxFileSize));
+      break;
+  }
+  if (!_logSink->Init()) {
+    LOG(LS_ERROR) << "Failed to open log files at path: "
+                  << _dirPath.UTF8String;
+    _logSink.reset();
+    return;
+  }
+  if (_shouldDisableBuffering) {
+    _logSink->DisableBuffering();
+  }
+  rtc::LogMessage::LogThreads(true);
+  rtc::LogMessage::LogTimestamps(true);
+  rtc::LogMessage::AddLogToStream(_logSink.get(), [self rtcSeverity]);
+  _hasStarted = YES;
+}
+
+- (void)stop {
+  if (!_hasStarted) {
+    return;
+  }
+  RTC_DCHECK(_logSink);
+  rtc::LogMessage::RemoveLogToStream(_logSink.get());
+  _hasStarted = NO;
+  _logSink.reset();
+}
+
+- (NSData *)logData {
+  if (_hasStarted) {
+    return nil;
+  }
+  NSMutableData* logData = [NSMutableData data];
+  rtc::scoped_ptr<rtc::FileRotatingStream> stream;
+  switch(_rotationType) {
+    case RTCFileLoggerTypeApp:
+      stream.reset(
+          new rtc::FileRotatingStream(_dirPath.UTF8String,
+                                      kRTCFileLoggerRotatingLogPrefix));
+      break;
+    case RTCFileLoggerTypeCall:
+      stream.reset(new rtc::CallSessionFileRotatingStream(_dirPath.UTF8String));
+      break;
+  }
+  if (!stream->Open()) {
+    return logData;
+  }
+  size_t bufferSize = 0;
+  if (!stream->GetSize(&bufferSize) || bufferSize == 0) {
+    return logData;
+  }
+  size_t read = 0;
+  // Allocate memory using malloc so we can pass it direcly to NSData without
+  // copying.
+  rtc::scoped_ptr<uint8_t[]> buffer(static_cast<uint8_t*>(malloc(bufferSize)));
+  stream->ReadAll(buffer.get(), bufferSize, &read, nullptr);
+  logData = [[NSMutableData alloc] initWithBytesNoCopy:buffer.release()
+                                                length:read];
+  return logData;
+}
+
+#pragma mark - Private
+
+- (rtc::LoggingSeverity)rtcSeverity {
+  switch (_severity) {
+    case RTCFileLoggerSeverityVerbose:
+      return rtc::LS_VERBOSE;
+    case RTCFileLoggerSeverityInfo:
+      return rtc::LS_INFO;
+    case RTCFileLoggerSeverityWarning:
+      return rtc::LS_WARNING;
+    case RTCFileLoggerSeverityError:
+      return rtc::LS_ERROR;
+  }
+}
+
+@end
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCIceCandidate+Private.h b/webrtc/sdk/objc/Framework/Classes/RTCIceCandidate+Private.h
new file mode 100644
index 0000000..ba3ffb6
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCIceCandidate+Private.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCIceCandidate.h"
+
+#include "webrtc/api/jsep.h"
+#include "webrtc/base/scoped_ptr.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCIceCandidate ()
+
+/**
+ * The native IceCandidateInterface representation of this RTCIceCandidate
+ * object. This is needed to pass to the underlying C++ APIs.
+ */
+@property(nonatomic, readonly)
+    rtc::scoped_ptr<webrtc::IceCandidateInterface> nativeCandidate;
+
+/**
+ * Initialize an RTCIceCandidate from a native IceCandidateInterface. No
+ * ownership is taken of the native candidate.
+ */
+- (instancetype)initWithNativeCandidate:
+    (const webrtc::IceCandidateInterface *)candidate;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCIceCandidate.mm b/webrtc/sdk/objc/Framework/Classes/RTCIceCandidate.mm
new file mode 100644
index 0000000..7b1e655
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCIceCandidate.mm
@@ -0,0 +1,69 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCIceCandidate+Private.h"
+
+#import "NSString+StdString.h"
+#import "WebRTC/RTCLogging.h"
+
+@implementation RTCIceCandidate
+
+@synthesize sdpMid = _sdpMid;
+@synthesize sdpMLineIndex = _sdpMLineIndex;
+@synthesize sdp = _sdp;
+
+- (instancetype)initWithSdp:(NSString *)sdp
+              sdpMLineIndex:(int)sdpMLineIndex
+                     sdpMid:(NSString *)sdpMid {
+  NSParameterAssert(sdp.length);
+  if (self = [super init]) {
+    _sdpMid = [sdpMid copy];
+    _sdpMLineIndex = sdpMLineIndex;
+    _sdp = [sdp copy];
+  }
+  return self;
+}
+
+- (NSString *)description {
+  return [NSString stringWithFormat:@"RTCIceCandidate:\n%@\n%d\n%@",
+                                    _sdpMid,
+                                    _sdpMLineIndex,
+                                    _sdp];
+}
+
+#pragma mark - Private
+
+- (instancetype)initWithNativeCandidate:
+    (const webrtc::IceCandidateInterface *)candidate {
+  NSParameterAssert(candidate);
+  std::string sdp;
+  candidate->ToString(&sdp);
+
+  return [self initWithSdp:[NSString stringForStdString:sdp]
+             sdpMLineIndex:candidate->sdp_mline_index()
+                    sdpMid:[NSString stringForStdString:candidate->sdp_mid()]];
+}
+
+- (rtc::scoped_ptr<webrtc::IceCandidateInterface>)nativeCandidate {
+  webrtc::SdpParseError error;
+
+  webrtc::IceCandidateInterface *candidate = webrtc::CreateIceCandidate(
+      _sdpMid.stdString, _sdpMLineIndex, _sdp.stdString, &error);
+
+  if (!candidate) {
+    RTCLog(@"Failed to create ICE candidate: %s\nline: %s",
+           error.description.c_str(),
+           error.line.c_str());
+  }
+
+  return rtc::scoped_ptr<webrtc::IceCandidateInterface>(candidate);
+}
+
+@end
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCIceServer+Private.h b/webrtc/sdk/objc/Framework/Classes/RTCIceServer+Private.h
new file mode 100644
index 0000000..7efeda4
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCIceServer+Private.h
@@ -0,0 +1,32 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCIceServer.h"
+
+#include "webrtc/api/peerconnectioninterface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCIceServer ()
+
+/**
+ * IceServer struct representation of this RTCIceServer object's data.
+ * This is needed to pass to the underlying C++ APIs.
+ */
+@property(nonatomic, readonly)
+    webrtc::PeerConnectionInterface::IceServer nativeServer;
+
+/** Initialize an RTCIceServer from a native IceServer. */
+- (instancetype)initWithNativeServer:
+    (webrtc::PeerConnectionInterface::IceServer)nativeServer;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCIceServer.mm b/webrtc/sdk/objc/Framework/Classes/RTCIceServer.mm
new file mode 100644
index 0000000..41084b9
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCIceServer.mm
@@ -0,0 +1,78 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCIceServer+Private.h"
+
+#import "NSString+StdString.h"
+
+@implementation RTCIceServer
+
+@synthesize urlStrings = _urlStrings;
+@synthesize username = _username;
+@synthesize credential = _credential;
+
+- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings {
+  NSParameterAssert(urlStrings.count);
+  return [self initWithURLStrings:urlStrings
+                         username:nil
+                       credential:nil];
+}
+
+- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
+                          username:(NSString *)username
+                        credential:(NSString *)credential {
+  NSParameterAssert(urlStrings.count);
+  if (self = [super init]) {
+    _urlStrings = [[NSArray alloc] initWithArray:urlStrings copyItems:YES];
+    _username = [username copy];
+    _credential = [credential copy];
+  }
+  return self;
+}
+
+- (NSString *)description {
+  return [NSString stringWithFormat:@"RTCIceServer:\n%@\n%@\n%@",
+                                    _urlStrings,
+                                    _username,
+                                    _credential];
+}
+
+#pragma mark - Private
+
+- (webrtc::PeerConnectionInterface::IceServer)nativeServer {
+  __block webrtc::PeerConnectionInterface::IceServer iceServer;
+
+  iceServer.username = [NSString stdStringForString:_username];
+  iceServer.password = [NSString stdStringForString:_credential];
+
+  [_urlStrings enumerateObjectsUsingBlock:^(NSString *url,
+                                            NSUInteger idx,
+                                            BOOL *stop) {
+    iceServer.urls.push_back(url.stdString);
+  }];
+  return iceServer;
+}
+
+- (instancetype)initWithNativeServer:
+    (webrtc::PeerConnectionInterface::IceServer)nativeServer {
+  NSMutableArray *urls =
+      [NSMutableArray arrayWithCapacity:nativeServer.urls.size()];
+  for (auto const &url : nativeServer.urls) {
+    [urls addObject:[NSString stringForStdString:url]];
+  }
+  NSString *username = [NSString stringForStdString:nativeServer.username];
+  NSString *credential = [NSString stringForStdString:nativeServer.password];
+  self = [self initWithURLStrings:urls
+                         username:username
+                       credential:credential];
+  return self;
+}
+
+@end
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCLogging.mm b/webrtc/sdk/objc/Framework/Classes/RTCLogging.mm
new file mode 100644
index 0000000..ef62d1f
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCLogging.mm
@@ -0,0 +1,47 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCLogging.h"
+
+#include "webrtc/base/logging.h"
+
+rtc::LoggingSeverity RTCGetNativeLoggingSeverity(RTCLoggingSeverity severity) {
+  switch (severity) {
+    case RTCLoggingSeverityVerbose:
+      return rtc::LS_VERBOSE;
+    case RTCLoggingSeverityInfo:
+      return rtc::LS_INFO;
+    case RTCLoggingSeverityWarning:
+      return rtc::LS_WARNING;
+    case RTCLoggingSeverityError:
+      return rtc::LS_ERROR;
+  }
+}
+
+void RTCLogEx(RTCLoggingSeverity severity, NSString* log_string) {
+  if (log_string.length) {
+    const char* utf8_string = log_string.UTF8String;
+    LOG_V(RTCGetNativeLoggingSeverity(severity)) << utf8_string;
+  }
+}
+
+void RTCSetMinDebugLogLevel(RTCLoggingSeverity severity) {
+  rtc::LogMessage::LogToDebug(RTCGetNativeLoggingSeverity(severity));
+}
+
+NSString* RTCFileName(const char* file_path) {
+  NSString* ns_file_path =
+      [[NSString alloc] initWithBytesNoCopy:const_cast<char*>(file_path)
+                                     length:strlen(file_path)
+                                   encoding:NSUTF8StringEncoding
+                               freeWhenDone:NO];
+  return ns_file_path.lastPathComponent;
+}
+
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCMediaConstraints+Private.h b/webrtc/sdk/objc/Framework/Classes/RTCMediaConstraints+Private.h
new file mode 100644
index 0000000..3662c44
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCMediaConstraints+Private.h
@@ -0,0 +1,53 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCMediaConstraints.h"
+
+#include "webrtc/api/mediaconstraintsinterface.h"
+#include "webrtc/base/scoped_ptr.h"
+
+namespace webrtc {
+
+class MediaConstraints : public MediaConstraintsInterface {
+ public:
+  virtual ~MediaConstraints();
+  MediaConstraints();
+  MediaConstraints(
+      const MediaConstraintsInterface::Constraints& mandatory,
+      const MediaConstraintsInterface::Constraints& optional);
+  virtual const Constraints& GetMandatory() const;
+  virtual const Constraints& GetOptional() const;
+
+ private:
+  MediaConstraintsInterface::Constraints mandatory_;
+  MediaConstraintsInterface::Constraints optional_;
+};
+
+}  // namespace webrtc
+
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCMediaConstraints ()
+
+/**
+ * A MediaConstraints representation of this RTCMediaConstraints object. This is
+ * needed to pass to the underlying C++ APIs.
+ */
+- (rtc::scoped_ptr<webrtc::MediaConstraints>)nativeConstraints;
+
+/** Return a native Constraints object representing these constraints */
++ (webrtc::MediaConstraintsInterface::Constraints)
+    nativeConstraintsForConstraints:
+        (NSDictionary<NSString *, NSString *> *)constraints;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCMediaConstraints.mm b/webrtc/sdk/objc/Framework/Classes/RTCMediaConstraints.mm
new file mode 100644
index 0000000..7a7cdf1
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCMediaConstraints.mm
@@ -0,0 +1,92 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMediaConstraints+Private.h"
+
+#import "NSString+StdString.h"
+
+namespace webrtc {
+
+MediaConstraints::~MediaConstraints() {}
+
+MediaConstraints::MediaConstraints() {}
+
+MediaConstraints::MediaConstraints(
+    const MediaConstraintsInterface::Constraints& mandatory,
+    const MediaConstraintsInterface::Constraints& optional)
+    : mandatory_(mandatory), optional_(optional) {}
+
+const MediaConstraintsInterface::Constraints&
+MediaConstraints::GetMandatory() const {
+  return mandatory_;
+}
+
+const MediaConstraintsInterface::Constraints&
+MediaConstraints::GetOptional() const {
+  return optional_;
+}
+
+}  // namespace webrtc
+
+
+@implementation RTCMediaConstraints {
+  NSDictionary<NSString *, NSString *> *_mandatory;
+  NSDictionary<NSString *, NSString *> *_optional;
+}
+
+- (instancetype)initWithMandatoryConstraints:
+    (NSDictionary<NSString *, NSString *> *)mandatory
+                         optionalConstraints:
+    (NSDictionary<NSString *, NSString *> *)optional {
+  if (self = [super init]) {
+    _mandatory = [[NSDictionary alloc] initWithDictionary:mandatory
+                                                copyItems:YES];
+    _optional = [[NSDictionary alloc] initWithDictionary:optional
+                                               copyItems:YES];
+  }
+  return self;
+}
+
+- (NSString *)description {
+  return [NSString stringWithFormat:@"RTCMediaConstraints:\n%@\n%@",
+                                    _mandatory,
+                                    _optional];
+}
+
+#pragma mark - Private
+
+- (rtc::scoped_ptr<webrtc::MediaConstraints>)nativeConstraints {
+  webrtc::MediaConstraintsInterface::Constraints mandatory =
+      [[self class] nativeConstraintsForConstraints:_mandatory];
+  webrtc::MediaConstraintsInterface::Constraints optional =
+      [[self class] nativeConstraintsForConstraints:_optional];
+
+  webrtc::MediaConstraints *nativeConstraints =
+      new webrtc::MediaConstraints(mandatory, optional);
+  return rtc::scoped_ptr<webrtc::MediaConstraints>(nativeConstraints);
+}
+
++ (webrtc::MediaConstraintsInterface::Constraints)
+    nativeConstraintsForConstraints:
+        (NSDictionary<NSString *, NSString *> *)constraints {
+  webrtc::MediaConstraintsInterface::Constraints nativeConstraints;
+  for (NSString *key in constraints) {
+    NSAssert([key isKindOfClass:[NSString class]],
+             @"%@ is not an NSString.", key);
+    NSString *value = [constraints objectForKey:key];
+    NSAssert([value isKindOfClass:[NSString class]],
+             @"%@ is not an NSString.", value);
+    nativeConstraints.push_back(webrtc::MediaConstraintsInterface::Constraint(
+        key.stdString, value.stdString));
+  }
+  return nativeConstraints;
+}
+
+@end
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCMediaStream+Private.h b/webrtc/sdk/objc/Framework/Classes/RTCMediaStream+Private.h
new file mode 100644
index 0000000..7f87072
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCMediaStream+Private.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCMediaStream.h"
+
+#include "webrtc/api/mediastreaminterface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCMediaStream ()
+
+/**
+ * MediaStreamInterface representation of this RTCMediaStream object. This is
+ * needed to pass to the underlying C++ APIs.
+ */
+@property(nonatomic, readonly)
+    rtc::scoped_refptr<webrtc::MediaStreamInterface> nativeMediaStream;
+
+/** Initialize an RTCMediaStream with an id. */
+- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
+                       streamId:(NSString *)streamId;
+
+/** Initialize an RTCMediaStream from a native MediaStreamInterface. */
+- (instancetype)initWithNativeMediaStream:
+    (rtc::scoped_refptr<webrtc::MediaStreamInterface>)nativeMediaStream;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCMediaStream.mm b/webrtc/sdk/objc/Framework/Classes/RTCMediaStream.mm
new file mode 100644
index 0000000..0737dee
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCMediaStream.mm
@@ -0,0 +1,122 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMediaStream+Private.h"
+
+#include <vector>
+
+#import "NSString+StdString.h"
+#import "RTCAudioTrack+Private.h"
+#import "RTCMediaStreamTrack+Private.h"
+#import "RTCPeerConnectionFactory+Private.h"
+#import "RTCVideoTrack+Private.h"
+
+@implementation RTCMediaStream {
+  NSMutableArray *_audioTracks;
+  NSMutableArray *_videoTracks;
+  rtc::scoped_refptr<webrtc::MediaStreamInterface> _nativeMediaStream;
+}
+
+- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
+                       streamId:(NSString *)streamId {
+  NSParameterAssert(factory);
+  NSParameterAssert(streamId.length);
+  std::string nativeId = [NSString stdStringForString:streamId];
+  rtc::scoped_refptr<webrtc::MediaStreamInterface> stream =
+      factory.nativeFactory->CreateLocalMediaStream(nativeId);
+  return [self initWithNativeMediaStream:stream];
+}
+
+- (NSArray<RTCAudioTrack *> *)audioTracks {
+  return [_audioTracks copy];
+}
+
+- (NSArray<RTCVideoTrack *> *)videoTracks {
+  return [_videoTracks copy];
+}
+
+- (NSString *)streamId {
+  return [NSString stringForStdString:_nativeMediaStream->label()];
+}
+
+- (void)addAudioTrack:(RTCAudioTrack *)audioTrack {
+  if (_nativeMediaStream->AddTrack(audioTrack.nativeAudioTrack)) {
+    [_audioTracks addObject:audioTrack];
+  }
+}
+
+- (void)addVideoTrack:(RTCVideoTrack *)videoTrack {
+  if (_nativeMediaStream->AddTrack(videoTrack.nativeVideoTrack)) {
+    [_videoTracks addObject:videoTrack];
+  }
+}
+
+- (void)removeAudioTrack:(RTCAudioTrack *)audioTrack {
+  NSUInteger index = [_audioTracks indexOfObjectIdenticalTo:audioTrack];
+  NSAssert(index != NSNotFound,
+           @"|removeAudioTrack| called on unexpected RTCAudioTrack");
+  if (index != NSNotFound &&
+      _nativeMediaStream->RemoveTrack(audioTrack.nativeAudioTrack)) {
+    [_audioTracks removeObjectAtIndex:index];
+  }
+}
+
+- (void)removeVideoTrack:(RTCVideoTrack *)videoTrack {
+  NSUInteger index = [_videoTracks indexOfObjectIdenticalTo:videoTrack];
+  NSAssert(index != NSNotFound,
+           @"|removeVideoTrack| called on unexpected RTCVideoTrack");
+  if (index != NSNotFound &&
+      _nativeMediaStream->RemoveTrack(videoTrack.nativeVideoTrack)) {
+    [_videoTracks removeObjectAtIndex:index];
+  }
+}
+
+- (NSString *)description {
+  return [NSString stringWithFormat:@"RTCMediaStream:\n%@\nA=%lu\nV=%lu",
+                                    self.streamId,
+                                    (unsigned long)self.audioTracks.count,
+                                    (unsigned long)self.videoTracks.count];
+}
+
+#pragma mark - Private
+
+- (rtc::scoped_refptr<webrtc::MediaStreamInterface>)nativeMediaStream {
+  return _nativeMediaStream;
+}
+
+- (instancetype)initWithNativeMediaStream:
+    (rtc::scoped_refptr<webrtc::MediaStreamInterface>)nativeMediaStream {
+  NSParameterAssert(nativeMediaStream);
+  if (self = [super init]) {
+    webrtc::AudioTrackVector audioTracks = nativeMediaStream->GetAudioTracks();
+    webrtc::VideoTrackVector videoTracks = nativeMediaStream->GetVideoTracks();
+
+    _audioTracks = [NSMutableArray arrayWithCapacity:audioTracks.size()];
+    _videoTracks = [NSMutableArray arrayWithCapacity:videoTracks.size()];
+    _nativeMediaStream = nativeMediaStream;
+
+    for (auto &track : audioTracks) {
+      RTCMediaStreamTrackType type = RTCMediaStreamTrackTypeAudio;
+      RTCAudioTrack *audioTrack =
+          [[RTCAudioTrack alloc] initWithNativeTrack:track type:type];
+      [_audioTracks addObject:audioTrack];
+    }
+
+    for (auto &track : videoTracks) {
+      RTCMediaStreamTrackType type = RTCMediaStreamTrackTypeVideo;
+      RTCVideoTrack *videoTrack =
+          [[RTCVideoTrack alloc] initWithNativeTrack:track type:type];
+      [_videoTracks addObject:videoTrack];
+    }
+  }
+  return self;
+}
+
+@end
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCMediaStreamTrack+Private.h b/webrtc/sdk/objc/Framework/Classes/RTCMediaStreamTrack+Private.h
new file mode 100644
index 0000000..fd98cb6
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCMediaStreamTrack+Private.h
@@ -0,0 +1,55 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCMediaStreamTrack.h"
+
+#include "webrtc/api/mediastreaminterface.h"
+#include "webrtc/base/scoped_ptr.h"
+
+typedef NS_ENUM(NSInteger, RTCMediaStreamTrackType) {
+  RTCMediaStreamTrackTypeAudio,
+  RTCMediaStreamTrackTypeVideo,
+};
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCMediaStreamTrack ()
+
+/**
+ * The native MediaStreamTrackInterface passed in or created during
+ * construction.
+ */
+@property(nonatomic, readonly)
+    rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> nativeTrack;
+
+/**
+ * Initialize an RTCMediaStreamTrack from a native MediaStreamTrackInterface.
+ */
+- (instancetype)initWithNativeTrack:
+    (rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
+                               type:(RTCMediaStreamTrackType)type
+    NS_DESIGNATED_INITIALIZER;
+
+- (instancetype)initWithNativeTrack:
+    (rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack;
+
+- (BOOL)isEqualToTrack:(RTCMediaStreamTrack *)track;
+
++ (webrtc::MediaStreamTrackInterface::TrackState)nativeTrackStateForState:
+    (RTCMediaStreamTrackState)state;
+
++ (RTCMediaStreamTrackState)trackStateForNativeState:
+    (webrtc::MediaStreamTrackInterface::TrackState)nativeState;
+
++ (NSString *)stringForState:(RTCMediaStreamTrackState)state;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCMediaStreamTrack.mm b/webrtc/sdk/objc/Framework/Classes/RTCMediaStreamTrack.mm
new file mode 100644
index 0000000..3d307be
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCMediaStreamTrack.mm
@@ -0,0 +1,132 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMediaStreamTrack+Private.h"
+
+#import "NSString+StdString.h"
+
+@implementation RTCMediaStreamTrack {
+  rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> _nativeTrack;
+  RTCMediaStreamTrackType _type;
+}
+
+- (NSString *)kind {
+  return [NSString stringForStdString:_nativeTrack->kind()];
+}
+
+- (NSString *)trackId {
+  return [NSString stringForStdString:_nativeTrack->id()];
+}
+
+- (BOOL)isEnabled {
+  return _nativeTrack->enabled();
+}
+
+- (void)setIsEnabled:(BOOL)isEnabled {
+  _nativeTrack->set_enabled(isEnabled);
+}
+
+- (RTCMediaStreamTrackState)readyState {
+  return [[self class] trackStateForNativeState:_nativeTrack->state()];
+}
+
+- (NSString *)description {
+  NSString *readyState = [[self class] stringForState:self.readyState];
+  return [NSString stringWithFormat:@"RTCMediaStreamTrack:\n%@\n%@\n%@\n%@",
+                                    self.kind,
+                                    self.trackId,
+                                    self.isEnabled ? @"enabled" : @"disabled",
+                                    readyState];
+}
+
+- (BOOL)isEqual:(id)object {
+  if (self == object) {
+    return YES;
+  }
+  if (![object isMemberOfClass:[self class]]) {
+    return NO;
+  }
+  return [self isEqualToTrack:(RTCMediaStreamTrack *)object];
+}
+
+- (NSUInteger)hash {
+  return (NSUInteger)_nativeTrack.get();
+}
+
+#pragma mark - Private
+
+- (rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack {
+  return _nativeTrack;
+}
+
+- (instancetype)initWithNativeTrack:
+    (rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
+                               type:(RTCMediaStreamTrackType)type {
+  NSParameterAssert(nativeTrack);
+  if (self = [super init]) {
+    _nativeTrack = nativeTrack;
+    _type = type;
+  }
+  return self;
+}
+
+- (instancetype)initWithNativeTrack:
+    (rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack {
+  NSParameterAssert(nativeTrack);
+  if (nativeTrack->kind() ==
+      std::string(webrtc::MediaStreamTrackInterface::kAudioKind)) {
+    return [self initWithNativeTrack:nativeTrack
+                                type:RTCMediaStreamTrackTypeAudio];
+  }
+  if (nativeTrack->kind() ==
+      std::string(webrtc::MediaStreamTrackInterface::kVideoKind)) {
+    return [self initWithNativeTrack:nativeTrack
+                                type:RTCMediaStreamTrackTypeVideo];
+  }
+  return nil;
+}
+
+- (BOOL)isEqualToTrack:(RTCMediaStreamTrack *)track {
+  if (!track) {
+    return NO;
+  }
+  return _nativeTrack == track.nativeTrack;
+}
+
++ (webrtc::MediaStreamTrackInterface::TrackState)nativeTrackStateForState:
+    (RTCMediaStreamTrackState)state {
+  switch (state) {
+    case RTCMediaStreamTrackStateLive:
+      return webrtc::MediaStreamTrackInterface::kLive;
+    case RTCMediaStreamTrackStateEnded:
+      return webrtc::MediaStreamTrackInterface::kEnded;
+  }
+}
+
++ (RTCMediaStreamTrackState)trackStateForNativeState:
+    (webrtc::MediaStreamTrackInterface::TrackState)nativeState {
+  switch (nativeState) {
+    case webrtc::MediaStreamTrackInterface::kLive:
+      return RTCMediaStreamTrackStateLive;
+    case webrtc::MediaStreamTrackInterface::kEnded:
+      return RTCMediaStreamTrackStateEnded;
+  }
+}
+
++ (NSString *)stringForState:(RTCMediaStreamTrackState)state {
+  switch (state) {
+    case RTCMediaStreamTrackStateLive:
+      return @"Live";
+    case RTCMediaStreamTrackStateEnded:
+      return @"Ended";
+  }
+}
+
+@end
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCNSGLVideoView.m b/webrtc/sdk/objc/Framework/Classes/RTCNSGLVideoView.m
new file mode 100644
index 0000000..415efe8
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCNSGLVideoView.m
@@ -0,0 +1,146 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if !TARGET_OS_IPHONE
+
+#import "WebRTC/RTCNSGLVideoView.h"
+
+#import <CoreVideo/CVDisplayLink.h>
+#import <OpenGL/gl3.h>
+
+#import "RTCOpenGLVideoRenderer.h"
+#import "WebRTC/RTCVideoFrame.h"
+
+@interface RTCNSGLVideoView ()
+// |videoFrame| is set when we receive a frame from a worker thread and is read
+// from the display link callback so atomicity is required.
+@property(atomic, strong) RTCVideoFrame *videoFrame;
+@property(atomic, strong) RTCOpenGLVideoRenderer *glRenderer;
+- (void)drawFrame;
+@end
+
+static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink,
+                                   const CVTimeStamp *now,
+                                   const CVTimeStamp *outputTime,
+                                   CVOptionFlags flagsIn,
+                                   CVOptionFlags *flagsOut,
+                                   void *displayLinkContext) {
+  RTCNSGLVideoView *view = (__bridge RTCNSGLVideoView *)displayLinkContext;
+  [view drawFrame];
+  return kCVReturnSuccess;
+}
+
+@implementation RTCNSGLVideoView {
+  CVDisplayLinkRef _displayLink;
+}
+
+@synthesize delegate = _delegate;
+@synthesize videoFrame = _videoFrame;
+@synthesize glRenderer = _glRenderer;
+
+- (void)dealloc {
+  [self teardownDisplayLink];
+}
+
+- (void)drawRect:(NSRect)rect {
+  [self drawFrame];
+}
+
+- (void)reshape {
+  [super reshape];
+  NSRect frame = [self frame];
+  CGLLockContext([[self openGLContext] CGLContextObj]);
+  glViewport(0, 0, frame.size.width, frame.size.height);
+  CGLUnlockContext([[self openGLContext] CGLContextObj]);
+}
+
+- (void)lockFocus {
+  NSOpenGLContext *context = [self openGLContext];
+  [super lockFocus];
+  if ([context view] != self) {
+    [context setView:self];
+  }
+  [context makeCurrentContext];
+}
+
+- (void)prepareOpenGL {
+  [super prepareOpenGL];
+  if (!self.glRenderer) {
+    self.glRenderer =
+        [[RTCOpenGLVideoRenderer alloc] initWithContext:[self openGLContext]];
+  }
+  [self.glRenderer setupGL];
+  [self setupDisplayLink];
+}
+
+- (void)clearGLContext {
+  [self.glRenderer teardownGL];
+  self.glRenderer = nil;
+  [super clearGLContext];
+}
+
+#pragma mark - RTCVideoRenderer
+
+// These methods may be called on non-main thread.
+- (void)setSize:(CGSize)size {
+  dispatch_async(dispatch_get_main_queue(), ^{
+    [self.delegate videoView:self didChangeVideoSize:size];
+  });
+}
+
+- (void)renderFrame:(RTCVideoFrame *)frame {
+  self.videoFrame = frame;
+}
+
+#pragma mark - Private
+
+- (void)drawFrame {
+  RTCVideoFrame *videoFrame = self.videoFrame;
+  if (self.glRenderer.lastDrawnFrame != videoFrame) {
+    // This method may be called from CVDisplayLink callback which isn't on the
+    // main thread so we have to lock the GL context before drawing.
+    CGLLockContext([[self openGLContext] CGLContextObj]);
+    [self.glRenderer drawFrame:videoFrame];
+    CGLUnlockContext([[self openGLContext] CGLContextObj]);
+  }
+}
+
+- (void)setupDisplayLink {
+  if (_displayLink) {
+    return;
+  }
+  // Synchronize buffer swaps with vertical refresh rate.
+  GLint swapInt = 1;
+  [[self openGLContext] setValues:&swapInt forParameter:NSOpenGLCPSwapInterval];
+
+  // Create display link.
+  CVDisplayLinkCreateWithActiveCGDisplays(&_displayLink);
+  CVDisplayLinkSetOutputCallback(_displayLink,
+                                 &OnDisplayLinkFired,
+                                 (__bridge void *)self);
+  // Set the display link for the current renderer.
+  CGLContextObj cglContext = [[self openGLContext] CGLContextObj];
+  CGLPixelFormatObj cglPixelFormat = [[self pixelFormat] CGLPixelFormatObj];
+  CVDisplayLinkSetCurrentCGDisplayFromOpenGLContext(
+      _displayLink, cglContext, cglPixelFormat);
+  CVDisplayLinkStart(_displayLink);
+}
+
+- (void)teardownDisplayLink {
+  if (!_displayLink) {
+    return;
+  }
+  CVDisplayLinkRelease(_displayLink);
+  _displayLink = NULL;
+}
+
+@end
+
+#endif  // !TARGET_OS_IPHONE
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCOpenGLVideoRenderer.h b/webrtc/sdk/objc/Framework/Classes/RTCOpenGLVideoRenderer.h
new file mode 100644
index 0000000..7041861
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCOpenGLVideoRenderer.h
@@ -0,0 +1,61 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#if TARGET_OS_IPHONE
+#import <GLKit/GLKit.h>
+#else
+#import <AppKit/NSOpenGL.h>
+#endif
+
+#import "WebRTC/RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTCVideoFrame;
+
+// RTCOpenGLVideoRenderer issues appropriate OpenGL commands to draw a frame to
+// the currently bound framebuffer. Supports OpenGL 3.2 and OpenGLES 2.0. OpenGL
+// framebuffer creation and management should be handled elsewhere using the
+// same context used to initialize this class.
+RTC_EXPORT
+@interface RTCOpenGLVideoRenderer : NSObject
+
+// The last successfully drawn frame. Used to avoid drawing frames unnecessarily
+// hence saving battery life by reducing load.
+@property(nonatomic, readonly) RTCVideoFrame *lastDrawnFrame;
+
+#if TARGET_OS_IPHONE
+- (instancetype)initWithContext:(EAGLContext *)context
+    NS_DESIGNATED_INITIALIZER;
+#else
+- (instancetype)initWithContext:(NSOpenGLContext *)context
+    NS_DESIGNATED_INITIALIZER;
+#endif
+
+// Draws |frame| onto the currently bound OpenGL framebuffer. |setupGL| must be
+// called before this function will succeed.
+- (BOOL)drawFrame:(RTCVideoFrame *)frame;
+
+// The following methods are used to manage OpenGL resources. On iOS
+// applications should release resources when placed in background for use in
+// the foreground application. In fact, attempting to call OpenGLES commands
+// while in background will result in application termination.
+
+// Sets up the OpenGL state needed for rendering.
+- (void)setupGL;
+// Tears down the OpenGL state created by |setupGL|.
+- (void)teardownGL;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCOpenGLVideoRenderer.mm b/webrtc/sdk/objc/Framework/Classes/RTCOpenGLVideoRenderer.mm
new file mode 100644
index 0000000..ab45ca4
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCOpenGLVideoRenderer.mm
@@ -0,0 +1,484 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCOpenGLVideoRenderer.h"
+
+#if TARGET_OS_IPHONE
+#import <OpenGLES/ES3/gl.h>
+#else
+#import <OpenGL/gl3.h>
+#endif
+#include <string.h>
+
+#import "WebRTC/RTCVideoFrame.h"
+
+#include "webrtc/base/scoped_ptr.h"
+
+// TODO(tkchin): check and log openGL errors. Methods here return BOOLs in
+// anticipation of that happening in the future.
+
+#if TARGET_OS_IPHONE
+#define RTC_PIXEL_FORMAT GL_LUMINANCE
+#define SHADER_VERSION
+#define VERTEX_SHADER_IN "attribute"
+#define VERTEX_SHADER_OUT "varying"
+#define FRAGMENT_SHADER_IN "varying"
+#define FRAGMENT_SHADER_OUT
+#define FRAGMENT_SHADER_COLOR "gl_FragColor"
+#define FRAGMENT_SHADER_TEXTURE "texture2D"
+#else
+#define RTC_PIXEL_FORMAT GL_RED
+#define SHADER_VERSION "#version 150\n"
+#define VERTEX_SHADER_IN "in"
+#define VERTEX_SHADER_OUT "out"
+#define FRAGMENT_SHADER_IN "in"
+#define FRAGMENT_SHADER_OUT "out vec4 fragColor;\n"
+#define FRAGMENT_SHADER_COLOR "fragColor"
+#define FRAGMENT_SHADER_TEXTURE "texture"
+#endif
+
+// Vertex shader doesn't do anything except pass coordinates through.
+static const char kVertexShaderSource[] =
+  SHADER_VERSION
+  VERTEX_SHADER_IN " vec2 position;\n"
+  VERTEX_SHADER_IN " vec2 texcoord;\n"
+  VERTEX_SHADER_OUT " vec2 v_texcoord;\n"
+  "void main() {\n"
+  "    gl_Position = vec4(position.x, position.y, 0.0, 1.0);\n"
+  "    v_texcoord = texcoord;\n"
+  "}\n";
+
+// Fragment shader converts YUV values from input textures into a final RGB
+// pixel. The conversion formula is from http://www.fourcc.org/fccyvrgb.php.
+static const char kFragmentShaderSource[] =
+  SHADER_VERSION
+  "precision highp float;"
+  FRAGMENT_SHADER_IN " vec2 v_texcoord;\n"
+  "uniform lowp sampler2D s_textureY;\n"
+  "uniform lowp sampler2D s_textureU;\n"
+  "uniform lowp sampler2D s_textureV;\n"
+  FRAGMENT_SHADER_OUT
+  "void main() {\n"
+  "    float y, u, v, r, g, b;\n"
+  "    y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n"
+  "    u = " FRAGMENT_SHADER_TEXTURE "(s_textureU, v_texcoord).r;\n"
+  "    v = " FRAGMENT_SHADER_TEXTURE "(s_textureV, v_texcoord).r;\n"
+  "    u = u - 0.5;\n"
+  "    v = v - 0.5;\n"
+  "    r = y + 1.403 * v;\n"
+  "    g = y - 0.344 * u - 0.714 * v;\n"
+  "    b = y + 1.770 * u;\n"
+  "    " FRAGMENT_SHADER_COLOR " = vec4(r, g, b, 1.0);\n"
+  "  }\n";
+
+// Compiles a shader of the given |type| with GLSL source |source| and returns
+// the shader handle or 0 on error.
+GLuint CreateShader(GLenum type, const GLchar *source) {
+  GLuint shader = glCreateShader(type);
+  if (!shader) {
+    return 0;
+  }
+  glShaderSource(shader, 1, &source, NULL);
+  glCompileShader(shader);
+  GLint compileStatus = GL_FALSE;
+  glGetShaderiv(shader, GL_COMPILE_STATUS, &compileStatus);
+  if (compileStatus == GL_FALSE) {
+    glDeleteShader(shader);
+    shader = 0;
+  }
+  return shader;
+}
+
+// Links a shader program with the given vertex and fragment shaders and
+// returns the program handle or 0 on error.
+GLuint CreateProgram(GLuint vertexShader, GLuint fragmentShader) {
+  if (vertexShader == 0 || fragmentShader == 0) {
+    return 0;
+  }
+  GLuint program = glCreateProgram();
+  if (!program) {
+    return 0;
+  }
+  glAttachShader(program, vertexShader);
+  glAttachShader(program, fragmentShader);
+  glLinkProgram(program);
+  GLint linkStatus = GL_FALSE;
+  glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
+  if (linkStatus == GL_FALSE) {
+    glDeleteProgram(program);
+    program = 0;
+  }
+  return program;
+}
+
+// When modelview and projection matrices are identity (default) the world is
+// contained in the square around origin with unit size 2. Drawing to these
+// coordinates is equivalent to drawing to the entire screen. The texture is
+// stretched over that square using texture coordinates (u, v) that range
+// from (0, 0) to (1, 1) inclusive. Texture coordinates are flipped vertically
+// here because the incoming frame has origin in upper left hand corner but
+// OpenGL expects origin in bottom left corner.
+const GLfloat gVertices[] = {
+  // X, Y, U, V.
+  -1, -1, 0, 1,  // Bottom left.
+   1, -1, 1, 1,  // Bottom right.
+   1,  1, 1, 0,  // Top right.
+  -1,  1, 0, 0,  // Top left.
+};
+
+// |kNumTextures| must not exceed 8, which is the limit in OpenGLES2. Two sets
+// of 3 textures are used here, one for each of the Y, U and V planes. Having
+// two sets alleviates CPU blockage in the event that the GPU is asked to render
+// to a texture that is already in use.
+static const GLsizei kNumTextureSets = 2;
+static const GLsizei kNumTextures = 3 * kNumTextureSets;
+
+@implementation RTCOpenGLVideoRenderer {
+#if TARGET_OS_IPHONE
+  EAGLContext *_context;
+#else
+  NSOpenGLContext *_context;
+#endif
+  BOOL _isInitialized;
+  GLint _currentTextureSet;
+  // Handles for OpenGL constructs.
+  GLuint _textures[kNumTextures];
+  GLuint _program;
+#if !TARGET_OS_IPHONE
+  GLuint _vertexArray;
+#endif
+  GLuint _vertexBuffer;
+  GLint _position;
+  GLint _texcoord;
+  GLint _ySampler;
+  GLint _uSampler;
+  GLint _vSampler;
+  // Used to create a non-padded plane for GPU upload when we receive padded
+  // frames.
+  rtc::scoped_ptr<uint8_t[]> _planeBuffer;
+}
+
+@synthesize lastDrawnFrame = _lastDrawnFrame;
+
++ (void)initialize {
+  // Disable dithering for performance.
+  glDisable(GL_DITHER);
+}
+
+#if TARGET_OS_IPHONE
+- (instancetype)initWithContext:(EAGLContext *)context {
+#else
+- (instancetype)initWithContext:(NSOpenGLContext *)context {
+#endif
+  NSAssert(context != nil, @"context cannot be nil");
+  if (self = [super init]) {
+    _context = context;
+  }
+  return self;
+}
+
+- (BOOL)drawFrame:(RTCVideoFrame *)frame {
+  if (!_isInitialized) {
+    return NO;
+  }
+  if (_lastDrawnFrame == frame) {
+    return NO;
+  }
+  [self ensureGLContext];
+  glClear(GL_COLOR_BUFFER_BIT);
+  if (frame) {
+    if (![self updateTextureSizesForFrame:frame] ||
+        ![self updateTextureDataForFrame:frame]) {
+      return NO;
+    }
+#if !TARGET_OS_IPHONE
+    glBindVertexArray(_vertexArray);
+#endif
+    glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
+    glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
+  }
+#if !TARGET_OS_IPHONE
+  [_context flushBuffer];
+#endif
+  _lastDrawnFrame = frame;
+  return YES;
+}
+
+- (void)setupGL {
+  if (_isInitialized) {
+    return;
+  }
+  [self ensureGLContext];
+  if (![self setupProgram]) {
+    return;
+  }
+  if (![self setupTextures]) {
+    return;
+  }
+  if (![self setupVertices]) {
+    return;
+  }
+  glUseProgram(_program);
+  glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
+  _isInitialized = YES;
+}
+
+- (void)teardownGL {
+  if (!_isInitialized) {
+    return;
+  }
+  [self ensureGLContext];
+  glDeleteProgram(_program);
+  _program = 0;
+  glDeleteTextures(kNumTextures, _textures);
+  glDeleteBuffers(1, &_vertexBuffer);
+  _vertexBuffer = 0;
+#if !TARGET_OS_IPHONE
+  glDeleteVertexArrays(1, &_vertexArray);
+#endif
+  _isInitialized = NO;
+}
+
+#pragma mark - Private
+
+- (void)ensureGLContext {
+  NSAssert(_context, @"context shouldn't be nil");
+#if TARGET_OS_IPHONE
+  if ([EAGLContext currentContext] != _context) {
+    [EAGLContext setCurrentContext:_context];
+  }
+#else
+  if ([NSOpenGLContext currentContext] != _context) {
+    [_context makeCurrentContext];
+  }
+#endif
+}
+
+- (BOOL)setupProgram {
+  NSAssert(!_program, @"program already set up");
+  GLuint vertexShader = CreateShader(GL_VERTEX_SHADER, kVertexShaderSource);
+  NSAssert(vertexShader, @"failed to create vertex shader");
+  GLuint fragmentShader =
+      CreateShader(GL_FRAGMENT_SHADER, kFragmentShaderSource);
+  NSAssert(fragmentShader, @"failed to create fragment shader");
+  _program = CreateProgram(vertexShader, fragmentShader);
+  // Shaders are created only to generate program.
+  if (vertexShader) {
+    glDeleteShader(vertexShader);
+  }
+  if (fragmentShader) {
+    glDeleteShader(fragmentShader);
+  }
+  if (!_program) {
+    return NO;
+  }
+  _position = glGetAttribLocation(_program, "position");
+  _texcoord = glGetAttribLocation(_program, "texcoord");
+  _ySampler = glGetUniformLocation(_program, "s_textureY");
+  _uSampler = glGetUniformLocation(_program, "s_textureU");
+  _vSampler = glGetUniformLocation(_program, "s_textureV");
+  if (_position < 0 || _texcoord < 0 || _ySampler < 0 || _uSampler < 0 ||
+      _vSampler < 0) {
+    return NO;
+  }
+  return YES;
+}
+
+- (BOOL)setupTextures {
+  glGenTextures(kNumTextures, _textures);
+  // Set parameters for each of the textures we created.
+  for (GLsizei i = 0; i < kNumTextures; i++) {
+    glActiveTexture(GL_TEXTURE0 + i);
+    glBindTexture(GL_TEXTURE_2D, _textures[i]);
+    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+  }
+  return YES;
+}
+
+- (BOOL)updateTextureSizesForFrame:(RTCVideoFrame *)frame {
+  if (frame.height == _lastDrawnFrame.height &&
+      frame.width == _lastDrawnFrame.width &&
+      frame.chromaWidth == _lastDrawnFrame.chromaWidth &&
+      frame.chromaHeight == _lastDrawnFrame.chromaHeight) {
+    return YES;
+  }
+  GLsizei lumaWidth = static_cast<GLsizei>(frame.width);
+  GLsizei lumaHeight = static_cast<GLsizei>(frame.height);
+  GLsizei chromaWidth = static_cast<GLsizei>(frame.chromaWidth);
+  GLsizei chromaHeight = static_cast<GLsizei>(frame.chromaHeight);
+  for (GLint i = 0; i < kNumTextureSets; i++) {
+    glActiveTexture(GL_TEXTURE0 + i * 3);
+    glTexImage2D(GL_TEXTURE_2D,
+                 0,
+                 RTC_PIXEL_FORMAT,
+                 lumaWidth,
+                 lumaHeight,
+                 0,
+                 RTC_PIXEL_FORMAT,
+                 GL_UNSIGNED_BYTE,
+                 0);
+    glActiveTexture(GL_TEXTURE0 + i * 3 + 1);
+    glTexImage2D(GL_TEXTURE_2D,
+                 0,
+                 RTC_PIXEL_FORMAT,
+                 chromaWidth,
+                 chromaHeight,
+                 0,
+                 RTC_PIXEL_FORMAT,
+                 GL_UNSIGNED_BYTE,
+                 0);
+    glActiveTexture(GL_TEXTURE0 + i * 3 + 2);
+    glTexImage2D(GL_TEXTURE_2D,
+                 0,
+                 RTC_PIXEL_FORMAT,
+                 chromaWidth,
+                 chromaHeight,
+                 0,
+                 RTC_PIXEL_FORMAT,
+                 GL_UNSIGNED_BYTE,
+                 0);
+  }
+  if ((NSUInteger)frame.yPitch != frame.width ||
+      (NSUInteger)frame.uPitch != frame.chromaWidth ||
+      (NSUInteger)frame.vPitch != frame.chromaWidth) {
+    _planeBuffer.reset(new uint8_t[frame.width * frame.height]);
+  } else {
+    _planeBuffer.reset();
+  }
+  return YES;
+}
+
+- (void)uploadPlane:(const uint8_t *)plane
+            sampler:(GLint)sampler
+             offset:(GLint)offset
+              width:(size_t)width
+             height:(size_t)height
+             stride:(int32_t)stride {
+  glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + offset));
+  // When setting texture sampler uniforms, the texture index is used not
+  // the texture handle.
+  glUniform1i(sampler, offset);
+#if TARGET_OS_IPHONE
+  BOOL hasUnpackRowLength = _context.API == kEAGLRenderingAPIOpenGLES3;
+#else
+  BOOL hasUnpackRowLength = YES;
+#endif
+  const uint8_t *uploadPlane = plane;
+  if ((size_t)stride != width) {
+   if (hasUnpackRowLength) {
+      // GLES3 allows us to specify stride.
+      glPixelStorei(GL_UNPACK_ROW_LENGTH, stride);
+      glTexImage2D(GL_TEXTURE_2D,
+                   0,
+                   RTC_PIXEL_FORMAT,
+                   static_cast<GLsizei>(width),
+                   static_cast<GLsizei>(height),
+                   0,
+                   RTC_PIXEL_FORMAT,
+                   GL_UNSIGNED_BYTE,
+                   uploadPlane);
+      glPixelStorei(GL_UNPACK_ROW_LENGTH, 0);
+      return;
+    } else {
+      // Make an unpadded copy and upload that instead. Quick profiling showed
+      // that this is faster than uploading row by row using glTexSubImage2D.
+      uint8_t *unpaddedPlane = _planeBuffer.get();
+      for (size_t y = 0; y < height; ++y) {
+        memcpy(unpaddedPlane + y * width, plane + y * stride, width);
+      }
+      uploadPlane = unpaddedPlane;
+    }
+  }
+  glTexImage2D(GL_TEXTURE_2D,
+               0,
+               RTC_PIXEL_FORMAT,
+               static_cast<GLsizei>(width),
+               static_cast<GLsizei>(height),
+               0,
+               RTC_PIXEL_FORMAT,
+               GL_UNSIGNED_BYTE,
+               uploadPlane);
+}
+
+- (BOOL)updateTextureDataForFrame:(RTCVideoFrame *)frame {
+  GLint textureOffset = _currentTextureSet * 3;
+  NSAssert(textureOffset + 3 <= kNumTextures, @"invalid offset");
+
+  [self uploadPlane:frame.yPlane
+            sampler:_ySampler
+             offset:textureOffset
+              width:frame.width
+             height:frame.height
+             stride:frame.yPitch];
+
+  [self uploadPlane:frame.uPlane
+            sampler:_uSampler
+             offset:textureOffset + 1
+              width:frame.chromaWidth
+             height:frame.chromaHeight
+             stride:frame.uPitch];
+
+  [self uploadPlane:frame.vPlane
+            sampler:_vSampler
+             offset:textureOffset + 2
+              width:frame.chromaWidth
+             height:frame.chromaHeight
+             stride:frame.vPitch];
+
+  _currentTextureSet = (_currentTextureSet + 1) % kNumTextureSets;
+  return YES;
+}
+
+- (BOOL)setupVertices {
+#if !TARGET_OS_IPHONE
+  NSAssert(!_vertexArray, @"vertex array already set up");
+  glGenVertexArrays(1, &_vertexArray);
+  if (!_vertexArray) {
+    return NO;
+  }
+  glBindVertexArray(_vertexArray);
+#endif
+  NSAssert(!_vertexBuffer, @"vertex buffer already set up");
+  glGenBuffers(1, &_vertexBuffer);
+  if (!_vertexBuffer) {
+#if !TARGET_OS_IPHONE
+    glDeleteVertexArrays(1, &_vertexArray);
+    _vertexArray = 0;
+#endif
+    return NO;
+  }
+  glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
+  glBufferData(GL_ARRAY_BUFFER, sizeof(gVertices), gVertices, GL_DYNAMIC_DRAW);
+
+  // Read position attribute from |gVertices| with size of 2 and stride of 4
+  // beginning at the start of the array. The last argument indicates offset
+  // of data within |gVertices| as supplied to the vertex buffer.
+  glVertexAttribPointer(
+      _position, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), (void *)0);
+  glEnableVertexAttribArray(_position);
+
+  // Read texcoord attribute from |gVertices| with size of 2 and stride of 4
+  // beginning at the first texcoord in the array. The last argument indicates
+  // offset of data within |gVertices| as supplied to the vertex buffer.
+  glVertexAttribPointer(_texcoord,
+                        2,
+                        GL_FLOAT,
+                        GL_FALSE,
+                        4 * sizeof(GLfloat),
+                        (void *)(2 * sizeof(GLfloat)));
+  glEnableVertexAttribArray(_texcoord);
+
+  return YES;
+}
+
+@end
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCPeerConnection+DataChannel.mm b/webrtc/sdk/objc/Framework/Classes/RTCPeerConnection+DataChannel.mm
new file mode 100644
index 0000000..b3825d4
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCPeerConnection+DataChannel.mm
@@ -0,0 +1,31 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCPeerConnection+Private.h"
+
+#import "NSString+StdString.h"
+#import "RTCDataChannel+Private.h"
+#import "RTCDataChannelConfiguration+Private.h"
+
+@implementation RTCPeerConnection (DataChannel)
+
+- (RTCDataChannel *)dataChannelForLabel:(NSString *)label
+                          configuration:
+    (RTCDataChannelConfiguration *)configuration {
+  std::string labelString = [NSString stdStringForString:label];
+  const webrtc::DataChannelInit nativeInit =
+      configuration.nativeDataChannelInit;
+  rtc::scoped_refptr<webrtc::DataChannelInterface> dataChannel =
+      self.nativePeerConnection->CreateDataChannel(labelString,
+                                                   &nativeInit);
+  return [[RTCDataChannel alloc] initWithNativeDataChannel:dataChannel];
+}
+
+@end
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCPeerConnection+Private.h b/webrtc/sdk/objc/Framework/Classes/RTCPeerConnection+Private.h
new file mode 100644
index 0000000..cbae360
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCPeerConnection+Private.h
@@ -0,0 +1,103 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCPeerConnection.h"
+
+#include "webrtc/api/peerconnectioninterface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+namespace webrtc {
+
+/**
+ * These objects are created by RTCPeerConnectionFactory to wrap an
+ * id<RTCPeerConnectionDelegate> and call methods on that interface.
+ */
+class PeerConnectionDelegateAdapter : public PeerConnectionObserver {
+
+ public:
+  PeerConnectionDelegateAdapter(RTCPeerConnection *peerConnection);
+  virtual ~PeerConnectionDelegateAdapter();
+
+  void OnSignalingChange(
+      PeerConnectionInterface::SignalingState new_state) override;
+
+  void OnAddStream(MediaStreamInterface *stream) override;
+
+  void OnRemoveStream(MediaStreamInterface *stream) override;
+
+  void OnDataChannel(DataChannelInterface *data_channel) override;
+
+  void OnRenegotiationNeeded() override;
+
+  void OnIceConnectionChange(
+      PeerConnectionInterface::IceConnectionState new_state) override;
+
+  void OnIceGatheringChange(
+      PeerConnectionInterface::IceGatheringState new_state) override;
+
+  void OnIceCandidate(const IceCandidateInterface *candidate) override;
+
+ private:
+  __weak RTCPeerConnection *peer_connection_;
+};
+
+} // namespace webrtc
+
+
+@interface RTCPeerConnection ()
+
+/** The native PeerConnectionInterface created during construction. */
+@property(nonatomic, readonly)
+    rtc::scoped_refptr<webrtc::PeerConnectionInterface> nativePeerConnection;
+
+/** Initialize an RTCPeerConnection with a configuration, constraints, and
+ *  delegate.
+ */
+- (instancetype)initWithFactory:
+    (RTCPeerConnectionFactory *)factory
+                  configuration:
+    (RTCConfiguration *)configuration
+                    constraints:
+    (RTCMediaConstraints *)constraints
+                       delegate:
+    (nullable id<RTCPeerConnectionDelegate>)delegate
+    NS_DESIGNATED_INITIALIZER;
+
++ (webrtc::PeerConnectionInterface::SignalingState)nativeSignalingStateForState:
+    (RTCSignalingState)state;
+
++ (RTCSignalingState)signalingStateForNativeState:
+    (webrtc::PeerConnectionInterface::SignalingState)nativeState;
+
++ (NSString *)stringForSignalingState:(RTCSignalingState)state;
+
++ (webrtc::PeerConnectionInterface::IceConnectionState)
+    nativeIceConnectionStateForState:(RTCIceConnectionState)state;
+
++ (RTCIceConnectionState)iceConnectionStateForNativeState:
+    (webrtc::PeerConnectionInterface::IceConnectionState)nativeState;
+
++ (NSString *)stringForIceConnectionState:(RTCIceConnectionState)state;
+
++ (webrtc::PeerConnectionInterface::IceGatheringState)
+    nativeIceGatheringStateForState:(RTCIceGatheringState)state;
+
++ (RTCIceGatheringState)iceGatheringStateForNativeState:
+    (webrtc::PeerConnectionInterface::IceGatheringState)nativeState;
+
++ (NSString *)stringForIceGatheringState:(RTCIceGatheringState)state;
+
++ (webrtc::PeerConnectionInterface::StatsOutputLevel)
+    nativeStatsOutputLevelForLevel:(RTCStatsOutputLevel)level;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCPeerConnection+Stats.mm b/webrtc/sdk/objc/Framework/Classes/RTCPeerConnection+Stats.mm
new file mode 100644
index 0000000..ccbd58f
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCPeerConnection+Stats.mm
@@ -0,0 +1,64 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCPeerConnection+Private.h"
+
+#import "NSString+StdString.h"
+#import "RTCMediaStreamTrack+Private.h"
+#import "RTCStatsReport+Private.h"
+
+#include "webrtc/base/checks.h"
+
+namespace webrtc {
+
+class StatsObserverAdapter : public StatsObserver {
+ public:
+  StatsObserverAdapter(void (^completionHandler)
+      (NSArray<RTCStatsReport *> *stats)) {
+    completion_handler_ = completionHandler;
+  }
+
+  ~StatsObserverAdapter() {
+    completion_handler_ = nil;
+  }
+
+  void OnComplete(const StatsReports& reports) override {
+    RTC_DCHECK(completion_handler_);
+    NSMutableArray *stats = [NSMutableArray arrayWithCapacity:reports.size()];
+    for (const auto* report : reports) {
+      RTCStatsReport *statsReport =
+          [[RTCStatsReport alloc] initWithNativeReport:*report];
+      [stats addObject:statsReport];
+    }
+    completion_handler_(stats);
+    completion_handler_ = nil;
+  }
+
+ private:
+  void (^completion_handler_)(NSArray<RTCStatsReport *> *stats);
+};
+}  // namespace webrtc
+
+@implementation RTCPeerConnection (Stats)
+
+- (void)statsForTrack:(RTCMediaStreamTrack *)mediaStreamTrack
+     statsOutputLevel:(RTCStatsOutputLevel)statsOutputLevel
+    completionHandler:
+    (void (^)(NSArray<RTCStatsReport *> *stats))completionHandler {
+  rtc::scoped_refptr<webrtc::StatsObserverAdapter> observer(
+      new rtc::RefCountedObject<webrtc::StatsObserverAdapter>
+          (completionHandler));
+  webrtc::PeerConnectionInterface::StatsOutputLevel nativeOutputLevel =
+      [[self class] nativeStatsOutputLevelForLevel:statsOutputLevel];
+  self.nativePeerConnection->GetStats(
+      observer, mediaStreamTrack.nativeTrack, nativeOutputLevel);
+}
+
+@end
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCPeerConnection.mm b/webrtc/sdk/objc/Framework/Classes/RTCPeerConnection.mm
new file mode 100644
index 0000000..e8fa4c9
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCPeerConnection.mm
@@ -0,0 +1,495 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCPeerConnection+Private.h"
+
+#import "NSString+StdString.h"
+#import "RTCConfiguration+Private.h"
+#import "RTCDataChannel+Private.h"
+#import "RTCIceCandidate+Private.h"
+#import "RTCMediaConstraints+Private.h"
+#import "RTCMediaStream+Private.h"
+#import "RTCPeerConnectionFactory+Private.h"
+#import "RTCRtpSender+Private.h"
+#import "RTCSessionDescription+Private.h"
+#import "RTCStatsReport+Private.h"
+#import "WebRTC/RTCLogging.h"
+
+#include "webrtc/base/checks.h"
+
+NSString * const kRTCPeerConnectionErrorDomain =
+    @"org.webrtc.RTCPeerConnection";
+int const kRTCPeerConnnectionSessionDescriptionError = -1;
+
+namespace webrtc {
+
+class CreateSessionDescriptionObserverAdapter
+    : public CreateSessionDescriptionObserver {
+ public:
+  CreateSessionDescriptionObserverAdapter(
+      void (^completionHandler)(RTCSessionDescription *sessionDescription,
+                                NSError *error)) {
+    completion_handler_ = completionHandler;
+  }
+
+  ~CreateSessionDescriptionObserverAdapter() {
+    completion_handler_ = nil;
+  }
+
+  void OnSuccess(SessionDescriptionInterface *desc) override {
+    RTC_DCHECK(completion_handler_);
+    rtc::scoped_ptr<webrtc::SessionDescriptionInterface> description =
+        rtc::scoped_ptr<webrtc::SessionDescriptionInterface>(desc);
+    RTCSessionDescription* session =
+        [[RTCSessionDescription alloc] initWithNativeDescription:
+            description.get()];
+    completion_handler_(session, nil);
+    completion_handler_ = nil;
+  }
+
+  void OnFailure(const std::string& error) override {
+    RTC_DCHECK(completion_handler_);
+    NSString* str = [NSString stringForStdString:error];
+    NSError* err =
+        [NSError errorWithDomain:kRTCPeerConnectionErrorDomain
+                            code:kRTCPeerConnnectionSessionDescriptionError
+                        userInfo:@{ NSLocalizedDescriptionKey : str }];
+    completion_handler_(nil, err);
+    completion_handler_ = nil;
+  }
+
+ private:
+  void (^completion_handler_)
+      (RTCSessionDescription *sessionDescription, NSError *error);
+};
+
+class SetSessionDescriptionObserverAdapter :
+    public SetSessionDescriptionObserver {
+ public:
+  SetSessionDescriptionObserverAdapter(void (^completionHandler)
+      (NSError *error)) {
+    completion_handler_ = completionHandler;
+  }
+
+  ~SetSessionDescriptionObserverAdapter() {
+    completion_handler_ = nil;
+  }
+
+  void OnSuccess() override {
+    RTC_DCHECK(completion_handler_);
+    completion_handler_(nil);
+    completion_handler_ = nil;
+  }
+
+  void OnFailure(const std::string& error) override {
+    RTC_DCHECK(completion_handler_);
+    NSString* str = [NSString stringForStdString:error];
+    NSError* err =
+        [NSError errorWithDomain:kRTCPeerConnectionErrorDomain
+                            code:kRTCPeerConnnectionSessionDescriptionError
+                        userInfo:@{ NSLocalizedDescriptionKey : str }];
+    completion_handler_(err);
+    completion_handler_ = nil;
+  }
+
+ private:
+  void (^completion_handler_)(NSError *error);
+};
+
+PeerConnectionDelegateAdapter::PeerConnectionDelegateAdapter(
+    RTCPeerConnection *peerConnection) {
+  peer_connection_ = peerConnection;
+}
+
+PeerConnectionDelegateAdapter::~PeerConnectionDelegateAdapter() {
+  peer_connection_ = nil;
+}
+
+void PeerConnectionDelegateAdapter::OnSignalingChange(
+    PeerConnectionInterface::SignalingState new_state) {
+  RTCSignalingState state =
+      [[RTCPeerConnection class] signalingStateForNativeState:new_state];
+  RTCPeerConnection *peer_connection = peer_connection_;
+  [peer_connection.delegate peerConnection:peer_connection
+                   didChangeSignalingState:state];
+}
+
+void PeerConnectionDelegateAdapter::OnAddStream(
+    MediaStreamInterface *stream) {
+  RTCMediaStream *mediaStream =
+      [[RTCMediaStream alloc] initWithNativeMediaStream:stream];
+  RTCPeerConnection *peer_connection = peer_connection_;
+  [peer_connection.delegate peerConnection:peer_connection
+                              didAddStream:mediaStream];
+}
+
+void PeerConnectionDelegateAdapter::OnRemoveStream(
+    MediaStreamInterface *stream) {
+  RTCMediaStream *mediaStream =
+      [[RTCMediaStream alloc] initWithNativeMediaStream:stream];
+  RTCPeerConnection *peer_connection = peer_connection_;
+  [peer_connection.delegate peerConnection:peer_connection
+                           didRemoveStream:mediaStream];
+}
+
+void PeerConnectionDelegateAdapter::OnDataChannel(
+    DataChannelInterface *data_channel) {
+  RTCDataChannel *dataChannel =
+      [[RTCDataChannel alloc] initWithNativeDataChannel:data_channel];
+  RTCPeerConnection *peer_connection = peer_connection_;
+  [peer_connection.delegate peerConnection:peer_connection
+                        didOpenDataChannel:dataChannel];
+}
+
+void PeerConnectionDelegateAdapter::OnRenegotiationNeeded() {
+  RTCPeerConnection *peer_connection = peer_connection_;
+  [peer_connection.delegate peerConnectionShouldNegotiate:peer_connection];
+}
+
+void PeerConnectionDelegateAdapter::OnIceConnectionChange(
+    PeerConnectionInterface::IceConnectionState new_state) {
+  RTCIceConnectionState state =
+      [[RTCPeerConnection class] iceConnectionStateForNativeState:new_state];
+  RTCPeerConnection *peer_connection = peer_connection_;
+  [peer_connection.delegate peerConnection:peer_connection
+               didChangeIceConnectionState:state];
+}
+
+void PeerConnectionDelegateAdapter::OnIceGatheringChange(
+    PeerConnectionInterface::IceGatheringState new_state) {
+  RTCIceGatheringState state =
+      [[RTCPeerConnection class] iceGatheringStateForNativeState:new_state];
+  RTCPeerConnection *peer_connection = peer_connection_;
+  [peer_connection.delegate peerConnection:peer_connection
+                didChangeIceGatheringState:state];
+}
+
+void PeerConnectionDelegateAdapter::OnIceCandidate(
+    const IceCandidateInterface *candidate) {
+  RTCIceCandidate *iceCandidate =
+      [[RTCIceCandidate alloc] initWithNativeCandidate:candidate];
+  RTCPeerConnection *peer_connection = peer_connection_;
+  [peer_connection.delegate peerConnection:peer_connection
+                   didGenerateIceCandidate:iceCandidate];
+}
+}  // namespace webrtc
+
+
+@implementation RTCPeerConnection {
+  NSMutableArray *_localStreams;
+  rtc::scoped_ptr<webrtc::PeerConnectionDelegateAdapter> _observer;
+  rtc::scoped_refptr<webrtc::PeerConnectionInterface> _peerConnection;
+}
+
+@synthesize delegate = _delegate;
+
+- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
+                  configuration:(RTCConfiguration *)configuration
+                    constraints:(RTCMediaConstraints *)constraints
+                       delegate:(id<RTCPeerConnectionDelegate>)delegate {
+  NSParameterAssert(factory);
+  if (self = [super init]) {
+    _observer.reset(new webrtc::PeerConnectionDelegateAdapter(self));
+    webrtc::PeerConnectionInterface::RTCConfiguration config =
+        configuration.nativeConfiguration;
+    rtc::scoped_ptr<webrtc::MediaConstraints> nativeConstraints =
+        constraints.nativeConstraints;
+    _peerConnection =
+        factory.nativeFactory->CreatePeerConnection(config,
+                                                    nativeConstraints.get(),
+                                                    nullptr,
+                                                    nullptr,
+                                                    _observer.get());
+    _localStreams = [[NSMutableArray alloc] init];
+    _delegate = delegate;
+  }
+  return self;
+}
+
+- (NSArray *)localStreams {
+  return [_localStreams copy];
+}
+
+- (RTCSessionDescription *)localDescription {
+  const webrtc::SessionDescriptionInterface *description =
+      _peerConnection->local_description();
+  return description ?
+      [[RTCSessionDescription alloc] initWithNativeDescription:description]
+          : nil;
+}
+
+- (RTCSessionDescription *)remoteDescription {
+  const webrtc::SessionDescriptionInterface *description =
+      _peerConnection->remote_description();
+  return description ?
+      [[RTCSessionDescription alloc] initWithNativeDescription:description]
+          : nil;
+}
+
+- (RTCSignalingState)signalingState {
+  return [[self class]
+      signalingStateForNativeState:_peerConnection->signaling_state()];
+}
+
+- (RTCIceConnectionState)iceConnectionState {
+  return [[self class] iceConnectionStateForNativeState:
+      _peerConnection->ice_connection_state()];
+}
+
+- (RTCIceGatheringState)iceGatheringState {
+  return [[self class] iceGatheringStateForNativeState:
+      _peerConnection->ice_gathering_state()];
+}
+
+- (BOOL)setConfiguration:(RTCConfiguration *)configuration {
+  return _peerConnection->SetConfiguration(configuration.nativeConfiguration);
+}
+
+- (void)close {
+  _peerConnection->Close();
+}
+
+- (void)addIceCandidate:(RTCIceCandidate *)candidate {
+  rtc::scoped_ptr<const webrtc::IceCandidateInterface> iceCandidate(
+      candidate.nativeCandidate);
+  _peerConnection->AddIceCandidate(iceCandidate.get());
+}
+
+- (void)addStream:(RTCMediaStream *)stream {
+  if (!_peerConnection->AddStream(stream.nativeMediaStream)) {
+    RTCLogError(@"Failed to add stream: %@", stream);
+    return;
+  }
+  [_localStreams addObject:stream];
+}
+
+- (void)removeStream:(RTCMediaStream *)stream {
+  _peerConnection->RemoveStream(stream.nativeMediaStream);
+  [_localStreams removeObject:stream];
+}
+
+- (void)offerForConstraints:(RTCMediaConstraints *)constraints
+          completionHandler:
+    (void (^)(RTCSessionDescription *sessionDescription,
+              NSError *error))completionHandler {
+  rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserverAdapter>
+      observer(new rtc::RefCountedObject
+          <webrtc::CreateSessionDescriptionObserverAdapter>(completionHandler));
+  _peerConnection->CreateOffer(observer, constraints.nativeConstraints.get());
+}
+
+- (void)answerForConstraints:(RTCMediaConstraints *)constraints
+           completionHandler:
+    (void (^)(RTCSessionDescription *sessionDescription,
+              NSError *error))completionHandler {
+  rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserverAdapter>
+      observer(new rtc::RefCountedObject
+          <webrtc::CreateSessionDescriptionObserverAdapter>(completionHandler));
+  _peerConnection->CreateAnswer(observer, constraints.nativeConstraints.get());
+}
+
+- (void)setLocalDescription:(RTCSessionDescription *)sdp
+          completionHandler:(void (^)(NSError *error))completionHandler {
+  rtc::scoped_refptr<webrtc::SetSessionDescriptionObserverAdapter> observer(
+      new rtc::RefCountedObject<webrtc::SetSessionDescriptionObserverAdapter>(
+          completionHandler));
+  _peerConnection->SetLocalDescription(observer, sdp.nativeDescription);
+}
+
+- (void)setRemoteDescription:(RTCSessionDescription *)sdp
+           completionHandler:(void (^)(NSError *error))completionHandler {
+  rtc::scoped_refptr<webrtc::SetSessionDescriptionObserverAdapter> observer(
+      new rtc::RefCountedObject<webrtc::SetSessionDescriptionObserverAdapter>(
+          completionHandler));
+  _peerConnection->SetRemoteDescription(observer, sdp.nativeDescription);
+}
+
+- (NSArray<RTCRtpSender *> *)senders {
+  std::vector<rtc::scoped_refptr<webrtc::RtpSenderInterface>> nativeSenders(
+      _peerConnection->GetSenders());
+  NSMutableArray *senders = [[NSMutableArray alloc] init];
+  for (const auto &nativeSender : nativeSenders) {
+    RTCRtpSender *sender =
+        [[RTCRtpSender alloc] initWithNativeRtpSender:nativeSender];
+    [senders addObject:sender];
+  }
+  return senders;
+}
+
+#pragma mark - Private
+
++ (webrtc::PeerConnectionInterface::SignalingState)nativeSignalingStateForState:
+    (RTCSignalingState)state {
+  switch (state) {
+    case RTCSignalingStateStable:
+      return webrtc::PeerConnectionInterface::kStable;
+    case RTCSignalingStateHaveLocalOffer:
+      return webrtc::PeerConnectionInterface::kHaveLocalOffer;
+    case RTCSignalingStateHaveLocalPrAnswer:
+      return webrtc::PeerConnectionInterface::kHaveLocalPrAnswer;
+    case RTCSignalingStateHaveRemoteOffer:
+      return webrtc::PeerConnectionInterface::kHaveRemoteOffer;
+    case RTCSignalingStateHaveRemotePrAnswer:
+      return webrtc::PeerConnectionInterface::kHaveRemotePrAnswer;
+    case RTCSignalingStateClosed:
+      return webrtc::PeerConnectionInterface::kClosed;
+  }
+}
+
++ (RTCSignalingState)signalingStateForNativeState:
+    (webrtc::PeerConnectionInterface::SignalingState)nativeState {
+  switch (nativeState) {
+    case webrtc::PeerConnectionInterface::kStable:
+      return RTCSignalingStateStable;
+    case webrtc::PeerConnectionInterface::kHaveLocalOffer:
+      return RTCSignalingStateHaveLocalOffer;
+    case webrtc::PeerConnectionInterface::kHaveLocalPrAnswer:
+      return RTCSignalingStateHaveLocalPrAnswer;
+    case webrtc::PeerConnectionInterface::kHaveRemoteOffer:
+      return RTCSignalingStateHaveRemoteOffer;
+    case webrtc::PeerConnectionInterface::kHaveRemotePrAnswer:
+      return RTCSignalingStateHaveRemotePrAnswer;
+    case webrtc::PeerConnectionInterface::kClosed:
+      return RTCSignalingStateClosed;
+  }
+}
+
++ (NSString *)stringForSignalingState:(RTCSignalingState)state {
+  switch (state) {
+    case RTCSignalingStateStable:
+      return @"STABLE";
+    case RTCSignalingStateHaveLocalOffer:
+      return @"HAVE_LOCAL_OFFER";
+    case RTCSignalingStateHaveLocalPrAnswer:
+      return @"HAVE_LOCAL_PRANSWER";
+    case RTCSignalingStateHaveRemoteOffer:
+      return @"HAVE_REMOTE_OFFER";
+    case RTCSignalingStateHaveRemotePrAnswer:
+      return @"HAVE_REMOTE_PRANSWER";
+    case RTCSignalingStateClosed:
+      return @"CLOSED";
+  }
+}
+
++ (webrtc::PeerConnectionInterface::IceConnectionState)
+    nativeIceConnectionStateForState:(RTCIceConnectionState)state {
+  switch (state) {
+    case RTCIceConnectionStateNew:
+      return webrtc::PeerConnectionInterface::kIceConnectionNew;
+    case RTCIceConnectionStateChecking:
+      return webrtc::PeerConnectionInterface::kIceConnectionChecking;
+    case RTCIceConnectionStateConnected:
+      return webrtc::PeerConnectionInterface::kIceConnectionConnected;
+    case RTCIceConnectionStateCompleted:
+      return webrtc::PeerConnectionInterface::kIceConnectionCompleted;
+    case RTCIceConnectionStateFailed:
+      return webrtc::PeerConnectionInterface::kIceConnectionFailed;
+    case RTCIceConnectionStateDisconnected:
+      return webrtc::PeerConnectionInterface::kIceConnectionDisconnected;
+    case RTCIceConnectionStateClosed:
+      return webrtc::PeerConnectionInterface::kIceConnectionClosed;
+    case RTCIceConnectionStateCount:
+      return webrtc::PeerConnectionInterface::kIceConnectionMax;
+  }
+}
+
++ (RTCIceConnectionState)iceConnectionStateForNativeState:
+    (webrtc::PeerConnectionInterface::IceConnectionState)nativeState {
+  switch (nativeState) {
+    case webrtc::PeerConnectionInterface::kIceConnectionNew:
+      return RTCIceConnectionStateNew;
+    case webrtc::PeerConnectionInterface::kIceConnectionChecking:
+      return RTCIceConnectionStateChecking;
+    case webrtc::PeerConnectionInterface::kIceConnectionConnected:
+      return RTCIceConnectionStateConnected;
+    case webrtc::PeerConnectionInterface::kIceConnectionCompleted:
+      return RTCIceConnectionStateCompleted;
+    case webrtc::PeerConnectionInterface::kIceConnectionFailed:
+      return RTCIceConnectionStateFailed;
+    case webrtc::PeerConnectionInterface::kIceConnectionDisconnected:
+      return RTCIceConnectionStateDisconnected;
+    case webrtc::PeerConnectionInterface::kIceConnectionClosed:
+      return RTCIceConnectionStateClosed;
+    case webrtc::PeerConnectionInterface::kIceConnectionMax:
+      return RTCIceConnectionStateCount;
+  }
+}
+
++ (NSString *)stringForIceConnectionState:(RTCIceConnectionState)state {
+  switch (state) {
+    case RTCIceConnectionStateNew:
+      return @"NEW";
+    case RTCIceConnectionStateChecking:
+      return @"CHECKING";
+    case RTCIceConnectionStateConnected:
+      return @"CONNECTED";
+    case RTCIceConnectionStateCompleted:
+      return @"COMPLETED";
+    case RTCIceConnectionStateFailed:
+      return @"FAILED";
+    case RTCIceConnectionStateDisconnected:
+      return @"DISCONNECTED";
+    case RTCIceConnectionStateClosed:
+      return @"CLOSED";
+    case RTCIceConnectionStateCount:
+      return @"COUNT";
+  }
+}
+
++ (webrtc::PeerConnectionInterface::IceGatheringState)
+    nativeIceGatheringStateForState:(RTCIceGatheringState)state {
+  switch (state) {
+    case RTCIceGatheringStateNew:
+      return webrtc::PeerConnectionInterface::kIceGatheringNew;
+    case RTCIceGatheringStateGathering:
+      return webrtc::PeerConnectionInterface::kIceGatheringGathering;
+    case RTCIceGatheringStateComplete:
+      return webrtc::PeerConnectionInterface::kIceGatheringComplete;
+  }
+}
+
++ (RTCIceGatheringState)iceGatheringStateForNativeState:
+    (webrtc::PeerConnectionInterface::IceGatheringState)nativeState {
+  switch (nativeState) {
+    case webrtc::PeerConnectionInterface::kIceGatheringNew:
+      return RTCIceGatheringStateNew;
+    case webrtc::PeerConnectionInterface::kIceGatheringGathering:
+      return RTCIceGatheringStateGathering;
+    case webrtc::PeerConnectionInterface::kIceGatheringComplete:
+      return RTCIceGatheringStateComplete;
+  }
+}
+
++ (NSString *)stringForIceGatheringState:(RTCIceGatheringState)state {
+  switch (state) {
+    case RTCIceGatheringStateNew:
+      return @"NEW";
+    case RTCIceGatheringStateGathering:
+      return @"GATHERING";
+    case RTCIceGatheringStateComplete:
+      return @"COMPLETE";
+  }
+}
+
++ (webrtc::PeerConnectionInterface::StatsOutputLevel)
+    nativeStatsOutputLevelForLevel:(RTCStatsOutputLevel)level {
+  switch (level) {
+    case RTCStatsOutputLevelStandard:
+      return webrtc::PeerConnectionInterface::kStatsOutputLevelStandard;
+    case RTCStatsOutputLevelDebug:
+      return webrtc::PeerConnectionInterface::kStatsOutputLevelDebug;
+  }
+}
+
+- (rtc::scoped_refptr<webrtc::PeerConnectionInterface>)nativePeerConnection {
+  return _peerConnection;
+}
+
+@end
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCPeerConnectionFactory+Private.h b/webrtc/sdk/objc/Framework/Classes/RTCPeerConnectionFactory+Private.h
new file mode 100644
index 0000000..a7e453c
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCPeerConnectionFactory+Private.h
@@ -0,0 +1,30 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCPeerConnectionFactory.h"
+
+#include "webrtc/api/peerconnectionfactory.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCPeerConnectionFactory ()
+
+/**
+ * PeerConnectionFactoryInterface created and held by this
+ * RTCPeerConnectionFactory object. This is needed to pass to the underlying
+ * C++ APIs.
+ */
+@property(nonatomic, readonly)
+    rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> nativeFactory;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCPeerConnectionFactory.mm b/webrtc/sdk/objc/Framework/Classes/RTCPeerConnectionFactory.mm
new file mode 100644
index 0000000..04aa121
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCPeerConnectionFactory.mm
@@ -0,0 +1,85 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCPeerConnectionFactory+Private.h"
+
+#import "NSString+StdString.h"
+#if defined(WEBRTC_IOS)
+#import "RTCAVFoundationVideoSource+Private.h"
+#endif
+#import "RTCAudioTrack+Private.h"
+#import "RTCMediaStream+Private.h"
+#import "RTCPeerConnection+Private.h"
+#import "RTCVideoSource+Private.h"
+#import "RTCVideoTrack+Private.h"
+
+@implementation RTCPeerConnectionFactory {
+  rtc::scoped_ptr<rtc::Thread> _signalingThread;
+  rtc::scoped_ptr<rtc::Thread> _workerThread;
+}
+
+@synthesize nativeFactory = _nativeFactory;
+
+- (instancetype)init {
+  if ((self = [super init])) {
+    _signalingThread.reset(new rtc::Thread());
+    BOOL result = _signalingThread->Start();
+    NSAssert(result, @"Failed to start signaling thread.");
+    _workerThread.reset(new rtc::Thread());
+    result = _workerThread->Start();
+    NSAssert(result, @"Failed to start worker thread.");
+
+    _nativeFactory = webrtc::CreatePeerConnectionFactory(
+        _workerThread.get(), _signalingThread.get(), nullptr, nullptr, nullptr);
+    NSAssert(_nativeFactory, @"Failed to initialize PeerConnectionFactory!");
+  }
+  return self;
+}
+
+- (RTCAVFoundationVideoSource *)avFoundationVideoSourceWithConstraints:
+    (nullable RTCMediaConstraints *)constraints {
+#if defined(WEBRTC_IOS)
+  return [[RTCAVFoundationVideoSource alloc] initWithFactory:self
+                                                 constraints:constraints];
+#else
+  return nil;
+#endif
+}
+
+- (RTCAudioTrack *)audioTrackWithTrackId:(NSString *)trackId {
+  return [[RTCAudioTrack alloc] initWithFactory:self
+                                        trackId:trackId];
+}
+
+- (RTCVideoTrack *)videoTrackWithSource:(RTCVideoSource *)source
+                                trackId:(NSString *)trackId {
+  return [[RTCVideoTrack alloc] initWithFactory:self
+                                         source:source
+                                        trackId:trackId];
+}
+
+- (RTCMediaStream *)mediaStreamWithStreamId:(NSString *)streamId {
+  return [[RTCMediaStream alloc] initWithFactory:self
+                                        streamId:streamId];
+}
+
+- (RTCPeerConnection *)peerConnectionWithConfiguration:
+    (RTCConfiguration *)configuration
+                                           constraints:
+    (RTCMediaConstraints *)constraints
+                                              delegate:
+    (nullable id<RTCPeerConnectionDelegate>)delegate {
+  return [[RTCPeerConnection alloc] initWithFactory:self
+                                      configuration:configuration
+                                        constraints:constraints
+                                           delegate:delegate];
+}
+
+@end
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCRtpCodecParameters+Private.h b/webrtc/sdk/objc/Framework/Classes/RTCRtpCodecParameters+Private.h
new file mode 100644
index 0000000..fe33e9e
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCRtpCodecParameters+Private.h
@@ -0,0 +1,28 @@
+/*
+ *  Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCRtpCodecParameters.h"
+
+#include "webrtc/api/rtpparameters.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCRtpCodecParameters ()
+
+/** Returns the equivalent native RtpCodecParameters structure. */
+@property(nonatomic, readonly) webrtc::RtpCodecParameters nativeParameters;
+
+/** Initialize the object with a native RtpCodecParameters structure. */
+- (instancetype)initWithNativeParameters:
+    (const webrtc::RtpCodecParameters &)nativeParameters;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCRtpCodecParameters.mm b/webrtc/sdk/objc/Framework/Classes/RTCRtpCodecParameters.mm
new file mode 100644
index 0000000..7704769
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCRtpCodecParameters.mm
@@ -0,0 +1,65 @@
+/*
+ *  Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpCodecParameters+Private.h"
+
+#import "NSString+StdString.h"
+
+#include "webrtc/media/base/mediaconstants.h"
+
+const NSString * const kRTCRtxCodecMimeType = @(cricket::kRtxCodecName);
+const NSString * const kRTCRedCodecMimeType = @(cricket::kRedCodecName);
+const NSString * const kRTCUlpfecCodecMimeType = @(cricket::kUlpfecCodecName);
+const NSString * const kRTCOpusCodecMimeType = @(cricket::kOpusCodecName);
+const NSString * const kRTCIsacCodecMimeType = @(cricket::kIsacCodecName);
+const NSString * const kRTCL16CodecMimeType  = @(cricket::kL16CodecName);
+const NSString * const kRTCG722CodecMimeType = @(cricket::kG722CodecName);
+const NSString * const kRTCIlbcCodecMimeType = @(cricket::kIlbcCodecName);
+const NSString * const kRTCPcmuCodecMimeType = @(cricket::kPcmuCodecName);
+const NSString * const kRTCPcmaCodecMimeType = @(cricket::kPcmaCodecName);
+const NSString * const kRTCDtmfCodecMimeType = @(cricket::kDtmfCodecName);
+const NSString * const kRTCComfortNoiseCodecMimeType =
+    @(cricket::kComfortNoiseCodecName);
+const NSString * const kVp8CodecMimeType = @(cricket::kVp8CodecName);
+const NSString * const kVp9CodecMimeType = @(cricket::kVp9CodecName);
+const NSString * const kH264CodecMimeType = @(cricket::kH264CodecName);
+
+@implementation RTCRtpCodecParameters
+
+@synthesize payloadType = _payloadType;
+@synthesize mimeType = _mimeType;
+@synthesize clockRate = _clockRate;
+@synthesize channels = _channels;
+
+- (instancetype)init {
+  return [super init];
+}
+
+- (instancetype)initWithNativeParameters:
+    (const webrtc::RtpCodecParameters &)nativeParameters {
+  if (self = [self init]) {
+    _payloadType = nativeParameters.payload_type;
+    _mimeType = [NSString stringForStdString:nativeParameters.mime_type];
+    _clockRate = nativeParameters.clock_rate;
+    _channels = nativeParameters.channels;
+  }
+  return self;
+}
+
+- (webrtc::RtpCodecParameters)nativeParameters {
+  webrtc::RtpCodecParameters parameters;
+  parameters.payload_type = _payloadType;
+  parameters.mime_type = [NSString stdStringForString:_mimeType];
+  parameters.clock_rate = _clockRate;
+  parameters.channels = _channels;
+  return parameters;
+}
+
+@end
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCRtpEncodingParameters+Private.h b/webrtc/sdk/objc/Framework/Classes/RTCRtpEncodingParameters+Private.h
new file mode 100644
index 0000000..1d75268
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCRtpEncodingParameters+Private.h
@@ -0,0 +1,28 @@
+/*
+ *  Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCRtpEncodingParameters.h"
+
+#include "webrtc/api/rtpparameters.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCRtpEncodingParameters ()
+
+/** Returns the equivalent native RtpEncodingParameters structure. */
+@property(nonatomic, readonly) webrtc::RtpEncodingParameters nativeParameters;
+
+/** Initialize the object with a native RtpEncodingParameters structure. */
+- (instancetype)initWithNativeParameters:
+    (const webrtc::RtpEncodingParameters &)nativeParameters;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCRtpEncodingParameters.mm b/webrtc/sdk/objc/Framework/Classes/RTCRtpEncodingParameters.mm
new file mode 100644
index 0000000..af07a04
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCRtpEncodingParameters.mm
@@ -0,0 +1,46 @@
+/*
+ *  Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpEncodingParameters+Private.h"
+
+@implementation RTCRtpEncodingParameters
+
+@synthesize isActive = _isActive;
+@synthesize maxBitrateBps = _maxBitrateBps;
+
+static const int kBitrateUnlimited = -1;
+
+- (instancetype)init {
+  return [super init];
+}
+
+- (instancetype)initWithNativeParameters:
+    (const webrtc::RtpEncodingParameters &)nativeParameters {
+  if (self = [self init]) {
+    _isActive = nativeParameters.active;
+    // TODO(skvlad): Replace with rtc::Optional once the C++ code is updated.
+    if (nativeParameters.max_bitrate_bps != kBitrateUnlimited) {
+      _maxBitrateBps =
+          [NSNumber numberWithInt:nativeParameters.max_bitrate_bps];
+    }
+  }
+  return self;
+}
+
+- (webrtc::RtpEncodingParameters)nativeParameters {
+  webrtc::RtpEncodingParameters parameters;
+  parameters.active = _isActive;
+  if (_maxBitrateBps != nil) {
+    parameters.max_bitrate_bps = _maxBitrateBps.intValue;
+  }
+  return parameters;
+}
+
+@end
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCRtpParameters+Private.h b/webrtc/sdk/objc/Framework/Classes/RTCRtpParameters+Private.h
new file mode 100644
index 0000000..0cb1ffe
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCRtpParameters+Private.h
@@ -0,0 +1,28 @@
+/*
+ *  Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCRtpParameters.h"
+
+#include "webrtc/api/rtpparameters.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCRtpParameters ()
+
+/** Returns the equivalent native RtpParameters structure. */
+@property(nonatomic, readonly) webrtc::RtpParameters nativeParameters;
+
+/** Initialize the object with a native RtpParameters structure. */
+- (instancetype)initWithNativeParameters:
+    (const webrtc::RtpParameters &)nativeParameters;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCRtpParameters.mm b/webrtc/sdk/objc/Framework/Classes/RTCRtpParameters.mm
new file mode 100644
index 0000000..5e79106
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCRtpParameters.mm
@@ -0,0 +1,56 @@
+/*
+ *  Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpParameters+Private.h"
+
+#import "RTCRtpCodecParameters+Private.h"
+#import "RTCRtpEncodingParameters+Private.h"
+
+@implementation RTCRtpParameters
+
+@synthesize encodings = _encodings;
+@synthesize codecs = _codecs;
+
+- (instancetype)init {
+  return [super init];
+}
+
+- (instancetype)initWithNativeParameters:
+    (const webrtc::RtpParameters &)nativeParameters {
+  if (self = [self init]) {
+    NSMutableArray *encodings = [[NSMutableArray alloc] init];
+    for (const auto &encoding : nativeParameters.encodings) {
+      [encodings addObject:[[RTCRtpEncodingParameters alloc]
+                               initWithNativeParameters:encoding]];
+    }
+    _encodings = encodings;
+
+    NSMutableArray *codecs = [[NSMutableArray alloc] init];
+    for (const auto &codec : nativeParameters.codecs) {
+      [codecs addObject:[[RTCRtpCodecParameters alloc]
+                            initWithNativeParameters:codec]];
+    }
+    _codecs = codecs;
+  }
+  return self;
+}
+
+- (webrtc::RtpParameters)nativeParameters {
+    webrtc::RtpParameters parameters;
+  for (RTCRtpEncodingParameters *encoding in _encodings) {
+    parameters.encodings.push_back(encoding.nativeParameters);
+  }
+  for (RTCRtpCodecParameters *codec in _codecs) {
+    parameters.codecs.push_back(codec.nativeParameters);
+  }
+  return parameters;
+}
+
+@end
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCRtpSender+Private.h b/webrtc/sdk/objc/Framework/Classes/RTCRtpSender+Private.h
new file mode 100644
index 0000000..e871673
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCRtpSender+Private.h
@@ -0,0 +1,26 @@
+/*
+ *  Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCRtpSender.h"
+
+#include "webrtc/api/rtpsenderinterface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCRtpSender ()
+
+/** Initialize an RTCRtpSender with a native RtpSenderInterface. */
+- (instancetype)initWithNativeRtpSender:
+    (rtc::scoped_refptr<webrtc::RtpSenderInterface>)nativeRtpSender
+    NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCRtpSender.mm b/webrtc/sdk/objc/Framework/Classes/RTCRtpSender.mm
new file mode 100644
index 0000000..7103002
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCRtpSender.mm
@@ -0,0 +1,48 @@
+/*
+ *  Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpSender+Private.h"
+
+#import "RTCMediaStreamTrack+Private.h"
+#import "RTCRtpParameters+Private.h"
+
+#include "webrtc/api/mediastreaminterface.h"
+
+@implementation RTCRtpSender {
+  rtc::scoped_refptr<webrtc::RtpSenderInterface> _nativeRtpSender;
+}
+
+- (instancetype)initWithNativeRtpSender:
+    (rtc::scoped_refptr<webrtc::RtpSenderInterface>)nativeRtpSender {
+  if (self = [super init]) {
+    _nativeRtpSender = nativeRtpSender;
+  }
+  return self;
+}
+
+- (RTCRtpParameters *)parameters {
+  return [[RTCRtpParameters alloc]
+      initWithNativeParameters:_nativeRtpSender->GetParameters()];
+}
+
+- (BOOL)setParameters:(RTCRtpParameters *)parameters {
+  return _nativeRtpSender->SetParameters(parameters.nativeParameters);
+}
+
+- (RTCMediaStreamTrack *)track {
+  rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> nativeTrack(
+    _nativeRtpSender->track());
+  if (nativeTrack) {
+    return [[RTCMediaStreamTrack alloc] initWithNativeTrack:nativeTrack];
+  }
+  return nil;
+}
+
+@end
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCSSLAdapter.mm b/webrtc/sdk/objc/Framework/Classes/RTCSSLAdapter.mm
new file mode 100644
index 0000000..a0da105
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCSSLAdapter.mm
@@ -0,0 +1,26 @@
+/*
+ *  Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCSSLAdapter.h"
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/ssladapter.h"
+
+BOOL RTCInitializeSSL() {
+  BOOL initialized = rtc::InitializeSSL();
+  RTC_DCHECK(initialized);
+  return initialized;
+}
+
+BOOL RTCCleanupSSL() {
+  BOOL cleanedUp = rtc::CleanupSSL();
+  RTC_DCHECK(cleanedUp);
+  return cleanedUp;
+}
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCSessionDescription+Private.h b/webrtc/sdk/objc/Framework/Classes/RTCSessionDescription+Private.h
new file mode 100644
index 0000000..04b6fbe
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCSessionDescription+Private.h
@@ -0,0 +1,41 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCSessionDescription.h"
+
+#include "webrtc/api/jsep.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCSessionDescription ()
+
+/**
+ * The native SessionDescriptionInterface representation of this
+ * RTCSessionDescription object. This is needed to pass to the underlying C++
+ * APIs.
+ */
+@property(nonatomic, readonly)
+    webrtc::SessionDescriptionInterface *nativeDescription;
+
+/**
+ * Initialize an RTCSessionDescription from a native
+ * SessionDescriptionInterface. No ownership is taken of the native session
+ * description.
+ */
+- (instancetype)initWithNativeDescription:
+    (const webrtc::SessionDescriptionInterface *)nativeDescription;
+
++ (std::string)stdStringForType:(RTCSdpType)type;
+
++ (RTCSdpType)typeForStdString:(const std::string &)string;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCSessionDescription.mm b/webrtc/sdk/objc/Framework/Classes/RTCSessionDescription.mm
new file mode 100644
index 0000000..417ff7d
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCSessionDescription.mm
@@ -0,0 +1,102 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCSessionDescription+Private.h"
+
+#import "NSString+StdString.h"
+#import "WebRTC/RTCLogging.h"
+
+#include "webrtc/base/checks.h"
+
+@implementation RTCSessionDescription
+
+@synthesize type = _type;
+@synthesize sdp = _sdp;
+
++ (NSString *)stringForType:(RTCSdpType)type {
+  std::string string = [[self class] stdStringForType:type];
+  return [NSString stringForStdString:string];
+}
+
++ (RTCSdpType)typeForString:(NSString *)string {
+  std::string typeString = string.stdString;
+  return [[self class] typeForStdString:typeString];
+}
+
+- (instancetype)initWithType:(RTCSdpType)type sdp:(NSString *)sdp {
+  NSParameterAssert(sdp.length);
+  if (self = [super init]) {
+    _type = type;
+    _sdp = [sdp copy];
+  }
+  return self;
+}
+
+- (NSString *)description {
+  return [NSString stringWithFormat:@"RTCSessionDescription:\n%@\n%@",
+                                    [[self class] stringForType:_type],
+                                    _sdp];
+}
+
+#pragma mark - Private
+
+- (webrtc::SessionDescriptionInterface *)nativeDescription {
+  webrtc::SdpParseError error;
+
+  webrtc::SessionDescriptionInterface *description =
+      webrtc::CreateSessionDescription([[self class] stdStringForType:_type],
+                                       _sdp.stdString,
+                                       &error);
+
+  if (!description) {
+    RTCLogError(@"Failed to create session description: %s\nline: %s",
+                error.description.c_str(),
+                error.line.c_str());
+  }
+
+  return description;
+}
+
+- (instancetype)initWithNativeDescription:
+    (const webrtc::SessionDescriptionInterface *)nativeDescription {
+  NSParameterAssert(nativeDescription);
+  std::string sdp;
+  nativeDescription->ToString(&sdp);
+  RTCSdpType type = [[self class] typeForStdString:nativeDescription->type()];
+
+  return [self initWithType:type
+                        sdp:[NSString stringForStdString:sdp]];
+}
+
++ (std::string)stdStringForType:(RTCSdpType)type {
+  switch (type) {
+    case RTCSdpTypeOffer:
+      return webrtc::SessionDescriptionInterface::kOffer;
+    case RTCSdpTypePrAnswer:
+      return webrtc::SessionDescriptionInterface::kPrAnswer;
+    case RTCSdpTypeAnswer:
+      return webrtc::SessionDescriptionInterface::kAnswer;
+  }
+}
+
++ (RTCSdpType)typeForStdString:(const std::string &)string {
+  if (string == webrtc::SessionDescriptionInterface::kOffer) {
+    return RTCSdpTypeOffer;
+  } else if (string == webrtc::SessionDescriptionInterface::kPrAnswer) {
+    return RTCSdpTypePrAnswer;
+  } else if (string == webrtc::SessionDescriptionInterface::kAnswer) {
+    return RTCSdpTypeAnswer;
+  } else {
+    RTC_NOTREACHED();
+    return RTCSdpTypeOffer;
+  }
+}
+
+@end
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCStatsReport+Private.h b/webrtc/sdk/objc/Framework/Classes/RTCStatsReport+Private.h
new file mode 100644
index 0000000..e1af6f7
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCStatsReport+Private.h
@@ -0,0 +1,24 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCStatsReport.h"
+
+#include "webrtc/api/statstypes.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCStatsReport ()
+
+/** Initialize an RTCStatsReport object from a native StatsReport. */
+- (instancetype)initWithNativeReport:(const webrtc::StatsReport &)nativeReport;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCStatsReport.mm b/webrtc/sdk/objc/Framework/Classes/RTCStatsReport.mm
new file mode 100644
index 0000000..1bc3eb9
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCStatsReport.mm
@@ -0,0 +1,60 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCStatsReport+Private.h"
+
+#import "NSString+StdString.h"
+#import "WebRTC/RTCLogging.h"
+
+#include "webrtc/base/checks.h"
+
+@implementation RTCStatsReport
+
+@synthesize timestamp = _timestamp;
+@synthesize type = _type;
+@synthesize reportId = _reportId;
+@synthesize values = _values;
+
+- (NSString *)description {
+  return [NSString stringWithFormat:@"RTCStatsReport:\n%@\n%@\n%f\n%@",
+                                    _reportId,
+                                    _type,
+                                    _timestamp,
+                                    _values];
+}
+
+#pragma mark - Private
+
+- (instancetype)initWithNativeReport:(const webrtc::StatsReport &)nativeReport {
+  if (self = [super init]) {
+    _timestamp = nativeReport.timestamp();
+    _type = [NSString stringForStdString:nativeReport.TypeToString()];
+    _reportId = [NSString stringForStdString:
+        nativeReport.id()->ToString()];
+
+    NSUInteger capacity = nativeReport.values().size();
+    NSMutableDictionary *values =
+        [NSMutableDictionary dictionaryWithCapacity:capacity];
+    for (auto const &valuePair : nativeReport.values()) {
+      NSString *key = [NSString stringForStdString:
+          valuePair.second->display_name()];
+      NSString *value = [NSString stringForStdString:
+          valuePair.second->ToString()];
+
+      // Not expecting duplicate keys.
+      RTC_DCHECK(![values objectForKey:key]);
+      [values setObject:value forKey:key];
+    }
+    _values = values;
+  }
+  return self;
+}
+
+@end
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCTracing.mm b/webrtc/sdk/objc/Framework/Classes/RTCTracing.mm
new file mode 100644
index 0000000..37755a6
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCTracing.mm
@@ -0,0 +1,29 @@
+/*
+ *  Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCTracing.h"
+
+#include "webrtc/base/event_tracer.h"
+
+void RTCSetupInternalTracer() {
+  rtc::tracing::SetupInternalTracer();
+}
+
+BOOL RTCStartInternalCapture(NSString *filePath) {
+  return rtc::tracing::StartInternalCapture(filePath.UTF8String);
+}
+
+void RTCStopInternalCapture() {
+  rtc::tracing::StopInternalCapture();
+}
+
+void RTCShutdownInternalTracer() {
+  rtc::tracing::ShutdownInternalTracer();
+}
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCUIApplication.h b/webrtc/sdk/objc/Framework/Classes/RTCUIApplication.h
new file mode 100644
index 0000000..fb11ede
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCUIApplication.h
@@ -0,0 +1,21 @@
+/*
+ *  Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_OBJC_RTC_UI_APPLICATION_H_
+#define WEBRTC_BASE_OBJC_RTC_UI_APPLICATION_H_
+
+#include "WebRTC/RTCMacros.h"
+
+#if defined(WEBRTC_IOS)
+/** Convenience function to get UIApplicationState from C++. */
+RTC_EXTERN bool RTCIsUIApplicationActive();
+#endif  // WEBRTC_IOS
+
+#endif  // WEBRTC_BASE_OBJC_RTC_UI_APPLICATION_H_
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCUIApplication.mm b/webrtc/sdk/objc/Framework/Classes/RTCUIApplication.mm
new file mode 100644
index 0000000..7e8aea6
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCUIApplication.mm
@@ -0,0 +1,22 @@
+/*
+ *  Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "RTCUIApplication.h"
+
+#if defined(WEBRTC_IOS)
+
+#import <UIKit/UIKit.h>
+
+bool RTCIsUIApplicationActive() {
+  UIApplicationState state = [UIApplication sharedApplication].applicationState;
+  return state == UIApplicationStateActive;
+}
+
+#endif // WEBRTC_IOS
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCVideoFrame+Private.h b/webrtc/sdk/objc/Framework/Classes/RTCVideoFrame+Private.h
new file mode 100644
index 0000000..a480712
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCVideoFrame+Private.h
@@ -0,0 +1,27 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCVideoFrame.h"
+
+#include "webrtc/media/base/videoframe.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCVideoFrame ()
+
+@property(nonatomic, readonly)
+    rtc::scoped_refptr<webrtc::VideoFrameBuffer> i420Buffer;
+
+- (instancetype)initWithNativeFrame:(const cricket::VideoFrame *)nativeFrame
+    NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCVideoFrame.mm b/webrtc/sdk/objc/Framework/Classes/RTCVideoFrame.mm
new file mode 100644
index 0000000..8a99d4e
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCVideoFrame.mm
@@ -0,0 +1,117 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoFrame+Private.h"
+
+#include "webrtc/base/scoped_ptr.h"
+
+@implementation RTCVideoFrame {
+  rtc::scoped_ptr<cricket::VideoFrame> _videoFrame;
+  rtc::scoped_refptr<webrtc::VideoFrameBuffer> _i420Buffer;
+}
+
+- (size_t)width {
+  return _videoFrame->width();
+}
+
+- (size_t)height {
+  return _videoFrame->height();
+}
+
+// TODO(nisse): chromaWidth and chromaHeight are used only in
+// RTCOpenGLVideoRenderer.mm. Update, and then delete these
+// properties.
+- (size_t)chromaWidth {
+  return (self.width + 1) / 2;
+}
+
+- (size_t)chromaHeight {
+  return (self.height + 1) / 2;
+}
+
+- (const uint8_t *)yPlane {
+  if (!self.i420Buffer) {
+    return nullptr;
+  }
+  return self.i420Buffer->data(webrtc::kYPlane);
+}
+
+- (const uint8_t *)uPlane {
+  if (!self.i420Buffer) {
+    return nullptr;
+  }
+  return self.i420Buffer->data(webrtc::kUPlane);
+}
+
+- (const uint8_t *)vPlane {
+  if (!self.i420Buffer) {
+    return nullptr;
+  }
+  return self.i420Buffer->data(webrtc::kVPlane);
+}
+
+- (int32_t)yPitch {
+  if (!self.i420Buffer) {
+    return 0;
+  }
+  return self.i420Buffer->stride(webrtc::kYPlane);
+}
+
+- (int32_t)uPitch {
+  if (!self.i420Buffer) {
+    return 0;
+  }
+  return self.i420Buffer->stride(webrtc::kUPlane);
+}
+
+- (int32_t)vPitch {
+  if (!self.i420Buffer) {
+    return 0;
+  }
+  return self.i420Buffer->stride(webrtc::kVPlane);
+}
+
+- (int64_t)timeStamp {
+  return _videoFrame->GetTimeStamp();
+}
+
+- (CVPixelBufferRef)nativeHandle {
+  return static_cast<CVPixelBufferRef>(_videoFrame->GetNativeHandle());
+}
+
+- (void)convertBufferIfNeeded {
+  if (!_i420Buffer) {
+    if (_videoFrame->GetNativeHandle()) {
+      // Convert to I420.
+      _i420Buffer = _videoFrame->video_frame_buffer()->NativeToI420Buffer();
+    } else {
+      // Should already be I420.
+      _i420Buffer = _videoFrame->video_frame_buffer();
+    }
+  }
+}
+
+#pragma mark - Private
+
+- (instancetype)initWithNativeFrame:(const cricket::VideoFrame *)nativeFrame {
+  if (self = [super init]) {
+    // Keep a shallow copy of the video frame. The underlying frame buffer is
+    // not copied.
+    _videoFrame.reset(nativeFrame->Copy());
+  }
+  return self;
+}
+
+- (rtc::scoped_refptr<webrtc::VideoFrameBuffer>)i420Buffer {
+  [self convertBufferIfNeeded];
+  return _i420Buffer;
+}
+
+@end
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCVideoRendererAdapter+Private.h b/webrtc/sdk/objc/Framework/Classes/RTCVideoRendererAdapter+Private.h
new file mode 100644
index 0000000..b413f7e
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCVideoRendererAdapter+Private.h
@@ -0,0 +1,42 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoRendererAdapter.h"
+
+#import "WebRTC/RTCVideoRenderer.h"
+
+#include "webrtc/api/mediastreaminterface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCVideoRendererAdapter ()
+
+/**
+ * The Objective-C video renderer passed to this adapter during construction.
+ * Calls made to the webrtc::VideoRenderInterface will be adapted and passed to
+ * this video renderer.
+ */
+@property(nonatomic, readonly) id<RTCVideoRenderer> videoRenderer;
+
+/**
+ * The native VideoSinkInterface surface exposed by this adapter. Calls made
+ * to this interface will be adapted and passed to the RTCVideoRenderer supplied
+ * during construction. This pointer is unsafe and owned by this class.
+ */
+@property(nonatomic, readonly)
+    rtc::VideoSinkInterface<cricket::VideoFrame> *nativeVideoRenderer;
+
+/** Initialize an RTCVideoRendererAdapter with an RTCVideoRenderer. */
+- (instancetype)initWithNativeRenderer:(id<RTCVideoRenderer>)videoRenderer
+    NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCVideoRendererAdapter.h b/webrtc/sdk/objc/Framework/Classes/RTCVideoRendererAdapter.h
new file mode 100644
index 0000000..b0b6f04
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCVideoRendererAdapter.h
@@ -0,0 +1,27 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/*
+ * Creates a rtc::VideoSinkInterface surface for an RTCVideoRenderer. The
+ * rtc::VideoSinkInterface is used by WebRTC rendering code - this
+ * adapter adapts calls made to that interface to the RTCVideoRenderer supplied
+ * during construction.
+ */
+@interface RTCVideoRendererAdapter : NSObject
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCVideoRendererAdapter.mm b/webrtc/sdk/objc/Framework/Classes/RTCVideoRendererAdapter.mm
new file mode 100644
index 0000000..1d64cd8
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCVideoRendererAdapter.mm
@@ -0,0 +1,81 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoRendererAdapter+Private.h"
+
+#import "RTCVideoFrame+Private.h"
+
+#include "webrtc/media/engine/webrtcvideoframe.h"
+
+namespace webrtc {
+
+class VideoRendererAdapter
+    : public rtc::VideoSinkInterface<cricket::VideoFrame> {
+ public:
+  VideoRendererAdapter(RTCVideoRendererAdapter* adapter) {
+    adapter_ = adapter;
+    size_ = CGSizeZero;
+  }
+
+  void OnFrame(const cricket::VideoFrame& nativeVideoFrame) override {
+    RTCVideoFrame *videoFrame = nil;
+    // Rotation of native handles is unsupported right now. Convert to CPU
+    // I420 buffer for rotation before calling the rotation method otherwise
+    // it will hit a DCHECK.
+    if (nativeVideoFrame.rotation() != webrtc::kVideoRotation_0 &&
+        nativeVideoFrame.GetNativeHandle()) {
+      rtc::scoped_refptr<webrtc::VideoFrameBuffer> i420Buffer =
+          nativeVideoFrame.video_frame_buffer()->NativeToI420Buffer();
+      std::unique_ptr<cricket::VideoFrame> cpuFrame(
+          new cricket::WebRtcVideoFrame(i420Buffer,
+                                        nativeVideoFrame.rotation(),
+                                        nativeVideoFrame.timestamp_us()));
+      const cricket::VideoFrame *rotatedFrame =
+          cpuFrame->GetCopyWithRotationApplied();
+      videoFrame = [[RTCVideoFrame alloc] initWithNativeFrame:rotatedFrame];
+    } else {
+      const cricket::VideoFrame *rotatedFrame =
+          nativeVideoFrame.GetCopyWithRotationApplied();
+      videoFrame = [[RTCVideoFrame alloc] initWithNativeFrame:rotatedFrame];
+    }
+    CGSize current_size = CGSizeMake(videoFrame.width, videoFrame.height);
+    if (!CGSizeEqualToSize(size_, current_size)) {
+      size_ = current_size;
+      [adapter_.videoRenderer setSize:size_];
+    }
+    [adapter_.videoRenderer renderFrame:videoFrame];
+  }
+
+ private:
+  __weak RTCVideoRendererAdapter *adapter_;
+  CGSize size_;
+};
+}
+
+@implementation RTCVideoRendererAdapter {
+  rtc::scoped_ptr<webrtc::VideoRendererAdapter> _adapter;
+}
+
+@synthesize videoRenderer = _videoRenderer;
+
+- (instancetype)initWithNativeRenderer:(id<RTCVideoRenderer>)videoRenderer {
+  NSParameterAssert(videoRenderer);
+  if (self = [super init]) {
+    _videoRenderer = videoRenderer;
+    _adapter.reset(new webrtc::VideoRendererAdapter(self));
+  }
+  return self;
+}
+
+- (rtc::VideoSinkInterface<cricket::VideoFrame> *)nativeVideoRenderer {
+  return _adapter.get();
+}
+
+@end
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCVideoSource+Private.h b/webrtc/sdk/objc/Framework/Classes/RTCVideoSource+Private.h
new file mode 100644
index 0000000..757c174
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCVideoSource+Private.h
@@ -0,0 +1,42 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCVideoSource.h"
+
+#include "webrtc/api/mediastreaminterface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCVideoSource ()
+
+/**
+ * The VideoTrackSourceInterface object passed to this RTCVideoSource during
+ * construction.
+ */
+@property(nonatomic, readonly)
+    rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>
+        nativeVideoSource;
+
+/** Initialize an RTCVideoSource from a native VideoTrackSourceInterface. */
+- (instancetype)initWithNativeVideoSource:
+    (rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)nativeVideoSource
+    NS_DESIGNATED_INITIALIZER;
+
++ (webrtc::MediaSourceInterface::SourceState)nativeSourceStateForState:
+    (RTCSourceState)state;
+
++ (RTCSourceState)sourceStateForNativeState:
+    (webrtc::MediaSourceInterface::SourceState)nativeState;
+
++ (NSString *)stringForState:(RTCSourceState)state;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCVideoSource.mm b/webrtc/sdk/objc/Framework/Classes/RTCVideoSource.mm
new file mode 100644
index 0000000..eddf5e0
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCVideoSource.mm
@@ -0,0 +1,82 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoSource+Private.h"
+
+@implementation RTCVideoSource {
+  rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> _nativeVideoSource;
+}
+
+- (RTCSourceState)state {
+  return [[self class] sourceStateForNativeState:_nativeVideoSource->state()];
+}
+
+- (NSString *)description {
+  return [NSString stringWithFormat:@"RTCVideoSource:\n%@",
+                                    [[self class] stringForState:self.state]];
+}
+
+#pragma mark - Private
+
+- (rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)nativeVideoSource {
+  return _nativeVideoSource;
+}
+
+- (instancetype)initWithNativeVideoSource:
+    (rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)nativeVideoSource {
+  NSParameterAssert(nativeVideoSource);
+  if (self = [super init]) {
+    _nativeVideoSource = nativeVideoSource;
+  }
+  return self;
+}
+
++ (webrtc::MediaSourceInterface::SourceState)nativeSourceStateForState:
+    (RTCSourceState)state {
+  switch (state) {
+    case RTCSourceStateInitializing:
+      return webrtc::MediaSourceInterface::kInitializing;
+    case RTCSourceStateLive:
+      return webrtc::MediaSourceInterface::kLive;
+    case RTCSourceStateEnded:
+      return webrtc::MediaSourceInterface::kEnded;
+    case RTCSourceStateMuted:
+      return webrtc::MediaSourceInterface::kMuted;
+  }
+}
+
++ (RTCSourceState)sourceStateForNativeState:
+    (webrtc::MediaSourceInterface::SourceState)nativeState {
+  switch (nativeState) {
+    case webrtc::MediaSourceInterface::kInitializing:
+      return RTCSourceStateInitializing;
+    case webrtc::MediaSourceInterface::kLive:
+      return RTCSourceStateLive;
+    case webrtc::MediaSourceInterface::kEnded:
+      return RTCSourceStateEnded;
+    case webrtc::MediaSourceInterface::kMuted:
+      return RTCSourceStateMuted;
+  }
+}
+
++ (NSString *)stringForState:(RTCSourceState)state {
+  switch (state) {
+    case RTCSourceStateInitializing:
+      return @"Initializing";
+    case RTCSourceStateLive:
+      return @"Live";
+    case RTCSourceStateEnded:
+      return @"Ended";
+    case RTCSourceStateMuted:
+      return @"Muted";
+  }
+}
+
+@end
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCVideoTrack+Private.h b/webrtc/sdk/objc/Framework/Classes/RTCVideoTrack+Private.h
new file mode 100644
index 0000000..5199be3
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCVideoTrack+Private.h
@@ -0,0 +1,30 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCVideoTrack.h"
+
+#include "webrtc/api/mediastreaminterface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCVideoTrack ()
+
+/** VideoTrackInterface created or passed in at construction. */
+@property(nonatomic, readonly)
+    rtc::scoped_refptr<webrtc::VideoTrackInterface> nativeVideoTrack;
+
+/** Initialize an RTCVideoTrack with its source and an id. */
+- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
+                         source:(RTCVideoSource *)source
+                        trackId:(NSString *)trackId;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCVideoTrack.mm b/webrtc/sdk/objc/Framework/Classes/RTCVideoTrack.mm
new file mode 100644
index 0000000..6691375
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/RTCVideoTrack.mm
@@ -0,0 +1,110 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoTrack+Private.h"
+
+#import "NSString+StdString.h"
+#import "RTCMediaStreamTrack+Private.h"
+#import "RTCPeerConnectionFactory+Private.h"
+#import "RTCVideoRendererAdapter+Private.h"
+#import "RTCVideoSource+Private.h"
+
+@implementation RTCVideoTrack {
+  NSMutableArray *_adapters;
+}
+
+@synthesize source = _source;
+
+- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
+                         source:(RTCVideoSource *)source
+                        trackId:(NSString *)trackId {
+  NSParameterAssert(factory);
+  NSParameterAssert(source);
+  NSParameterAssert(trackId.length);
+  std::string nativeId = [NSString stdStringForString:trackId];
+  rtc::scoped_refptr<webrtc::VideoTrackInterface> track =
+      factory.nativeFactory->CreateVideoTrack(nativeId,
+                                              source.nativeVideoSource);
+  if ([self initWithNativeTrack:track type:RTCMediaStreamTrackTypeVideo]) {
+    _source = source;
+  }
+  return self;
+}
+
+- (instancetype)initWithNativeTrack:
+    (rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeMediaTrack
+                               type:(RTCMediaStreamTrackType)type {
+  NSParameterAssert(nativeMediaTrack);
+  NSParameterAssert(type == RTCMediaStreamTrackTypeVideo);
+  if (self = [super initWithNativeTrack:nativeMediaTrack type:type]) {
+    _adapters = [NSMutableArray array];
+  }
+  return self;
+}
+
+- (void)dealloc {
+  for (RTCVideoRendererAdapter *adapter in _adapters) {
+    self.nativeVideoTrack->RemoveSink(adapter.nativeVideoRenderer);
+  }
+}
+
+- (RTCVideoSource *)source {
+  if (!_source) {
+    rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source =
+        self.nativeVideoTrack->GetSource();
+    if (source) {
+      _source = [[RTCVideoSource alloc] initWithNativeVideoSource:source.get()];
+    }
+  }
+  return _source;
+}
+
+- (void)addRenderer:(id<RTCVideoRenderer>)renderer {
+  // Make sure we don't have this renderer yet.
+  for (RTCVideoRendererAdapter *adapter in _adapters) {
+    // Getting around unused variable error
+    if (adapter.videoRenderer != renderer) {
+      NSAssert(NO, @"|renderer| is already attached to this track");
+    }
+  }
+  // Create a wrapper that provides a native pointer for us.
+  RTCVideoRendererAdapter* adapter =
+      [[RTCVideoRendererAdapter alloc] initWithNativeRenderer:renderer];
+  [_adapters addObject:adapter];
+  self.nativeVideoTrack->AddOrUpdateSink(adapter.nativeVideoRenderer,
+                                         rtc::VideoSinkWants());
+}
+
+- (void)removeRenderer:(id<RTCVideoRenderer>)renderer {
+  __block NSUInteger indexToRemove = NSNotFound;
+  [_adapters enumerateObjectsUsingBlock:^(RTCVideoRendererAdapter *adapter,
+                                          NSUInteger idx,
+                                          BOOL *stop) {
+    if (adapter.videoRenderer == renderer) {
+      indexToRemove = idx;
+      *stop = YES;
+    }
+  }];
+  if (indexToRemove == NSNotFound) {
+    return;
+  }
+  RTCVideoRendererAdapter *adapterToRemove =
+      [_adapters objectAtIndex:indexToRemove];
+  self.nativeVideoTrack->RemoveSink(adapterToRemove.nativeVideoRenderer);
+  [_adapters removeObjectAtIndex:indexToRemove];
+}
+
+#pragma mark - Private
+
+- (rtc::scoped_refptr<webrtc::VideoTrackInterface>)nativeVideoTrack {
+  return static_cast<webrtc::VideoTrackInterface *>(self.nativeTrack.get());
+}
+
+@end
diff --git a/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.h b/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.h
new file mode 100644
index 0000000..5a70238
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.h
@@ -0,0 +1,73 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_API_OBJC_AVFOUNDATION_VIDEO_CAPTURER_H_
+#define WEBRTC_API_OBJC_AVFOUNDATION_VIDEO_CAPTURER_H_
+
+#import <AVFoundation/AVFoundation.h>
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/media/base/videocapturer.h"
+#include "webrtc/video_frame.h"
+
+@class RTCAVFoundationVideoCapturerInternal;
+
+namespace rtc {
+class Thread;
+}  // namespace rtc
+
+namespace webrtc {
+
+class AVFoundationVideoCapturer : public cricket::VideoCapturer,
+                                  public rtc::MessageHandler {
+ public:
+  AVFoundationVideoCapturer();
+  ~AVFoundationVideoCapturer();
+
+  cricket::CaptureState Start(const cricket::VideoFormat& format) override;
+  void Stop() override;
+  bool IsRunning() override;
+  bool IsScreencast() const override {
+    return false;
+  }
+  bool GetPreferredFourccs(std::vector<uint32_t> *fourccs) override {
+    fourccs->push_back(cricket::FOURCC_NV12);
+    return true;
+  }
+
+  // Returns the active capture session. Calls to the capture session should
+  // occur on the RTCDispatcherTypeCaptureSession queue in RTCDispatcher.
+  AVCaptureSession* GetCaptureSession();
+
+  // Returns whether the rear-facing camera can be used.
+  // e.g. It can't be used because it doesn't exist.
+  bool CanUseBackCamera() const;
+
+  // Switches the camera being used (either front or back).
+  void SetUseBackCamera(bool useBackCamera);
+  bool GetUseBackCamera() const;
+
+  // Converts the sample buffer into a cricket::CapturedFrame and signals the
+  // frame for capture.
+  void CaptureSampleBuffer(CMSampleBufferRef sampleBuffer);
+
+  // Handles messages from posts.
+  void OnMessage(rtc::Message *msg) override;
+
+ private:
+  void OnFrameMessage(CVImageBufferRef image_buffer, int64_t capture_time);
+
+  RTCAVFoundationVideoCapturerInternal *_capturer;
+  rtc::Thread *_startThread;  // Set in Start(), unset in Stop().
+};  // AVFoundationVideoCapturer
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_OBJC_AVFOUNDATION_VIDEO_CAPTURER_H_
diff --git a/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm b/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm
new file mode 100644
index 0000000..cb945f4
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm
@@ -0,0 +1,545 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "avfoundationvideocapturer.h"
+
+#import <AVFoundation/AVFoundation.h>
+#import <Foundation/Foundation.h>
+#import <UIKit/UIKit.h>
+
+#import "RTCDispatcher+Private.h"
+#import "WebRTC/RTCLogging.h"
+
+#include "webrtc/base/bind.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/thread.h"
+
+// TODO(tkchin): support other formats.
+static NSString *const kDefaultPreset = AVCaptureSessionPreset640x480;
+static cricket::VideoFormat const kDefaultFormat =
+    cricket::VideoFormat(640,
+                         480,
+                         cricket::VideoFormat::FpsToInterval(30),
+                         cricket::FOURCC_NV12);
+
+// This class used to capture frames using AVFoundation APIs on iOS. It is meant
+// to be owned by an instance of AVFoundationVideoCapturer. The reason for this
+// because other webrtc objects own cricket::VideoCapturer, which is not
+// ref counted. To prevent bad behavior we do not expose this class directly.
+@interface RTCAVFoundationVideoCapturerInternal : NSObject
+    <AVCaptureVideoDataOutputSampleBufferDelegate>
+
+@property(nonatomic, readonly) AVCaptureSession *captureSession;
+@property(nonatomic, readonly) BOOL isRunning;
+@property(nonatomic, readonly) BOOL canUseBackCamera;
+@property(nonatomic, assign) BOOL useBackCamera;  // Defaults to NO.
+
+// We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
+// when we receive frames. This is safe because this object should be owned by
+// it.
+- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer;
+
+// Starts and stops the capture session asynchronously. We cannot do this
+// synchronously without blocking a WebRTC thread.
+- (void)start;
+- (void)stop;
+
+@end
+
+@implementation RTCAVFoundationVideoCapturerInternal {
+  // Keep pointers to inputs for convenience.
+  AVCaptureDeviceInput *_frontCameraInput;
+  AVCaptureDeviceInput *_backCameraInput;
+  AVCaptureVideoDataOutput *_videoDataOutput;
+  // The cricket::VideoCapturer that owns this class. Should never be NULL.
+  webrtc::AVFoundationVideoCapturer *_capturer;
+  BOOL _orientationHasChanged;
+}
+
+@synthesize captureSession = _captureSession;
+@synthesize isRunning = _isRunning;
+@synthesize useBackCamera = _useBackCamera;
+
+// This is called from the thread that creates the video source, which is likely
+// the main thread.
+- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer {
+  RTC_DCHECK(capturer);
+  if (self = [super init]) {
+    _capturer = capturer;
+    // Create the capture session and all relevant inputs and outputs. We need
+    // to do this in init because the application may want the capture session
+    // before we start the capturer for e.g. AVCapturePreviewLayer. All objects
+    // created here are retained until dealloc and never recreated.
+    if (![self setupCaptureSession]) {
+      return nil;
+    }
+    NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
+    [center addObserver:self
+               selector:@selector(deviceOrientationDidChange:)
+                   name:UIDeviceOrientationDidChangeNotification
+                 object:nil];
+    [center addObserverForName:AVCaptureSessionRuntimeErrorNotification
+                        object:nil
+                         queue:nil
+                    usingBlock:^(NSNotification *notification) {
+      RTCLogError(@"Capture session error: %@", notification.userInfo);
+    }];
+  }
+  return self;
+}
+
+- (void)dealloc {
+  RTC_DCHECK(!_isRunning);
+  [[NSNotificationCenter defaultCenter] removeObserver:self];
+  _capturer = nullptr;
+}
+
+- (AVCaptureSession *)captureSession {
+  return _captureSession;
+}
+
+// Called from any thread (likely main thread).
+- (BOOL)canUseBackCamera {
+  return _backCameraInput != nil;
+}
+
+// Called from any thread (likely main thread).
+- (BOOL)useBackCamera {
+  @synchronized(self) {
+    return _useBackCamera;
+  }
+}
+
+// Called from any thread (likely main thread).
+- (void)setUseBackCamera:(BOOL)useBackCamera {
+  if (!self.canUseBackCamera) {
+    if (useBackCamera) {
+      RTCLogWarning(@"No rear-facing camera exists or it cannot be used;"
+                    "not switching.");
+    }
+    return;
+  }
+  @synchronized(self) {
+    if (_useBackCamera == useBackCamera) {
+      return;
+    }
+    _useBackCamera = useBackCamera;
+    [self updateSessionInputForUseBackCamera:useBackCamera];
+  }
+}
+
+// Called from WebRTC thread.
+- (void)start {
+  if (_isRunning) {
+    return;
+  }
+  _isRunning = YES;
+  [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+                               block:^{
+    _orientationHasChanged = NO;
+    [self updateOrientation];
+    [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
+    AVCaptureSession *captureSession = self.captureSession;
+    [captureSession startRunning];
+  }];
+}
+
+// Called from same thread as start.
+- (void)stop {
+  if (!_isRunning) {
+    return;
+  }
+  _isRunning = NO;
+  [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+                               block:^{
+    [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr];
+    [_captureSession stopRunning];
+    [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
+  }];
+}
+
+#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
+
+- (void)captureOutput:(AVCaptureOutput *)captureOutput
+    didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
+           fromConnection:(AVCaptureConnection *)connection {
+  NSParameterAssert(captureOutput == _videoDataOutput);
+  if (!_isRunning) {
+    return;
+  }
+  _capturer->CaptureSampleBuffer(sampleBuffer);
+}
+
+- (void)captureOutput:(AVCaptureOutput *)captureOutput
+    didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
+         fromConnection:(AVCaptureConnection *)connection {
+  RTCLogError(@"Dropped sample buffer.");
+}
+
+#pragma mark - Private
+
+- (BOOL)setupCaptureSession {
+  AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
+#if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0
+  NSString *version = [[UIDevice currentDevice] systemVersion];
+  if ([version integerValue] >= 7) {
+    captureSession.usesApplicationAudioSession = NO;
+  }
+#endif
+  if (![captureSession canSetSessionPreset:kDefaultPreset]) {
+    RTCLogError(@"Session preset unsupported.");
+    return NO;
+  }
+  captureSession.sessionPreset = kDefaultPreset;
+
+  // Add the output.
+  AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput];
+  if (![captureSession canAddOutput:videoDataOutput]) {
+    RTCLogError(@"Video data output unsupported.");
+    return NO;
+  }
+  [captureSession addOutput:videoDataOutput];
+
+  // Get the front and back cameras. If there isn't a front camera
+  // give up.
+  AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput];
+  AVCaptureDeviceInput *backCameraInput = [self backCameraInput];
+  if (!frontCameraInput) {
+    RTCLogError(@"No front camera for capture session.");
+    return NO;
+  }
+
+  // Add the inputs.
+  if (![captureSession canAddInput:frontCameraInput] ||
+      (backCameraInput && ![captureSession canAddInput:backCameraInput])) {
+    RTCLogError(@"Session does not support capture inputs.");
+    return NO;
+  }
+  AVCaptureDeviceInput *input = self.useBackCamera ?
+      backCameraInput : frontCameraInput;
+  [captureSession addInput:input];
+  _captureSession = captureSession;
+  return YES;
+}
+
+- (AVCaptureVideoDataOutput *)videoDataOutput {
+  if (!_videoDataOutput) {
+    // Make the capturer output NV12. Ideally we want I420 but that's not
+    // currently supported on iPhone / iPad.
+    AVCaptureVideoDataOutput *videoDataOutput =
+        [[AVCaptureVideoDataOutput alloc] init];
+    videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
+    videoDataOutput.videoSettings = @{
+      (NSString *)kCVPixelBufferPixelFormatTypeKey :
+        @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
+    };
+    videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
+    dispatch_queue_t queue =
+        [RTCDispatcher dispatchQueueForType:RTCDispatcherTypeCaptureSession];
+    [videoDataOutput setSampleBufferDelegate:self queue:queue];
+    _videoDataOutput = videoDataOutput;
+  }
+  return _videoDataOutput;
+}
+
+- (AVCaptureDevice *)videoCaptureDeviceForPosition:
+    (AVCaptureDevicePosition)position {
+  for (AVCaptureDevice *captureDevice in
+       [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
+    if (captureDevice.position == position) {
+      return captureDevice;
+    }
+  }
+  return nil;
+}
+
+- (AVCaptureDeviceInput *)frontCameraInput {
+  if (!_frontCameraInput) {
+    AVCaptureDevice *frontCameraDevice =
+        [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront];
+    if (!frontCameraDevice) {
+      RTCLogWarning(@"Failed to find front capture device.");
+      return nil;
+    }
+    NSError *error = nil;
+    AVCaptureDeviceInput *frontCameraInput =
+        [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice
+                                              error:&error];
+    if (!frontCameraInput) {
+      RTCLogError(@"Failed to create front camera input: %@",
+                  error.localizedDescription);
+      return nil;
+    }
+    _frontCameraInput = frontCameraInput;
+  }
+  return _frontCameraInput;
+}
+
+- (AVCaptureDeviceInput *)backCameraInput {
+  if (!_backCameraInput) {
+    AVCaptureDevice *backCameraDevice =
+        [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack];
+    if (!backCameraDevice) {
+      RTCLogWarning(@"Failed to find front capture device.");
+      return nil;
+    }
+    NSError *error = nil;
+    AVCaptureDeviceInput *backCameraInput =
+        [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice
+                                              error:&error];
+    if (!backCameraInput) {
+      RTCLogError(@"Failed to create front camera input: %@",
+                  error.localizedDescription);
+      return nil;
+    }
+    _backCameraInput = backCameraInput;
+  }
+  return _backCameraInput;
+}
+
+- (void)deviceOrientationDidChange:(NSNotification *)notification {
+  [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+                               block:^{
+    _orientationHasChanged = YES;
+    [self updateOrientation];
+  }];
+}
+
+// Called from capture session queue.
+- (void)updateOrientation {
+  AVCaptureConnection *connection =
+      [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
+  if (!connection.supportsVideoOrientation) {
+    // TODO(tkchin): set rotation bit on frames.
+    return;
+  }
+  AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
+  switch ([UIDevice currentDevice].orientation) {
+    case UIDeviceOrientationPortrait:
+      orientation = AVCaptureVideoOrientationPortrait;
+      break;
+    case UIDeviceOrientationPortraitUpsideDown:
+      orientation = AVCaptureVideoOrientationPortraitUpsideDown;
+      break;
+    case UIDeviceOrientationLandscapeLeft:
+      orientation = AVCaptureVideoOrientationLandscapeRight;
+      break;
+    case UIDeviceOrientationLandscapeRight:
+      orientation = AVCaptureVideoOrientationLandscapeLeft;
+      break;
+    case UIDeviceOrientationFaceUp:
+    case UIDeviceOrientationFaceDown:
+    case UIDeviceOrientationUnknown:
+      if (!_orientationHasChanged) {
+        connection.videoOrientation = orientation;
+      }
+      return;
+  }
+  connection.videoOrientation = orientation;
+}
+
+// Update the current session input to match what's stored in _useBackCamera.
+- (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera {
+  [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+                               block:^{
+    [_captureSession beginConfiguration];
+    AVCaptureDeviceInput *oldInput = _backCameraInput;
+    AVCaptureDeviceInput *newInput = _frontCameraInput;
+    if (useBackCamera) {
+      oldInput = _frontCameraInput;
+      newInput = _backCameraInput;
+    }
+    if (oldInput) {
+      // Ok to remove this even if it's not attached. Will be no-op.
+      [_captureSession removeInput:oldInput];
+    }
+    if (newInput) {
+      [_captureSession addInput:newInput];
+    }
+    [self updateOrientation];
+    [_captureSession commitConfiguration];
+  }];
+}
+
+@end
+
+namespace webrtc {
+
+enum AVFoundationVideoCapturerMessageType : uint32_t {
+  kMessageTypeFrame,
+};
+
+struct AVFoundationFrame {
+  AVFoundationFrame(CVImageBufferRef buffer, int64_t time)
+    : image_buffer(buffer), capture_time(time) {}
+  CVImageBufferRef image_buffer;
+  int64_t capture_time;
+};
+
+AVFoundationVideoCapturer::AVFoundationVideoCapturer()
+    : _capturer(nil), _startThread(nullptr) {
+  // Set our supported formats. This matches kDefaultPreset.
+  std::vector<cricket::VideoFormat> supportedFormats;
+  supportedFormats.push_back(cricket::VideoFormat(kDefaultFormat));
+  SetSupportedFormats(supportedFormats);
+  _capturer =
+      [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
+}
+
+AVFoundationVideoCapturer::~AVFoundationVideoCapturer() {
+  _capturer = nil;
+}
+
+cricket::CaptureState AVFoundationVideoCapturer::Start(
+    const cricket::VideoFormat& format) {
+  if (!_capturer) {
+    LOG(LS_ERROR) << "Failed to create AVFoundation capturer.";
+    return cricket::CaptureState::CS_FAILED;
+  }
+  if (_capturer.isRunning) {
+    LOG(LS_ERROR) << "The capturer is already running.";
+    return cricket::CaptureState::CS_FAILED;
+  }
+  if (format != kDefaultFormat) {
+    LOG(LS_ERROR) << "Unsupported format provided.";
+    return cricket::CaptureState::CS_FAILED;
+  }
+
+  // Keep track of which thread capture started on. This is the thread that
+  // frames need to be sent to.
+  RTC_DCHECK(!_startThread);
+  _startThread = rtc::Thread::Current();
+
+  SetCaptureFormat(&format);
+  // This isn't super accurate because it takes a while for the AVCaptureSession
+  // to spin up, and this call returns async.
+  // TODO(tkchin): make this better.
+  [_capturer start];
+  SetCaptureState(cricket::CaptureState::CS_RUNNING);
+
+  return cricket::CaptureState::CS_STARTING;
+}
+
+void AVFoundationVideoCapturer::Stop() {
+  [_capturer stop];
+  SetCaptureFormat(NULL);
+  _startThread = nullptr;
+}
+
+bool AVFoundationVideoCapturer::IsRunning() {
+  return _capturer.isRunning;
+}
+
+AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() {
+  return _capturer.captureSession;
+}
+
+bool AVFoundationVideoCapturer::CanUseBackCamera() const {
+  return _capturer.canUseBackCamera;
+}
+
+void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) {
+  _capturer.useBackCamera = useBackCamera;
+}
+
+bool AVFoundationVideoCapturer::GetUseBackCamera() const {
+  return _capturer.useBackCamera;
+}
+
+void AVFoundationVideoCapturer::CaptureSampleBuffer(
+    CMSampleBufferRef sampleBuffer) {
+  if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 ||
+      !CMSampleBufferIsValid(sampleBuffer) ||
+      !CMSampleBufferDataIsReady(sampleBuffer)) {
+    return;
+  }
+
+  CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sampleBuffer);
+  if (image_buffer == NULL) {
+    return;
+  }
+
+  // Retain the buffer and post it to the webrtc thread. It will be released
+  // after it has successfully been signaled.
+  CVBufferRetain(image_buffer);
+  AVFoundationFrame frame(image_buffer, rtc::TimeNanos());
+  _startThread->Post(this, kMessageTypeFrame,
+                     new rtc::TypedMessageData<AVFoundationFrame>(frame));
+}
+
+void AVFoundationVideoCapturer::OnMessage(rtc::Message *msg) {
+  switch (msg->message_id) {
+    case kMessageTypeFrame: {
+      rtc::TypedMessageData<AVFoundationFrame>* data =
+        static_cast<rtc::TypedMessageData<AVFoundationFrame>*>(msg->pdata);
+      const AVFoundationFrame& frame = data->data();
+      OnFrameMessage(frame.image_buffer, frame.capture_time);
+      delete data;
+      break;
+    }
+  }
+}
+
+void AVFoundationVideoCapturer::OnFrameMessage(CVImageBufferRef image_buffer,
+                                               int64_t capture_time) {
+  RTC_DCHECK(_startThread->IsCurrent());
+
+  // Base address must be unlocked to access frame data.
+  CVOptionFlags lock_flags = kCVPixelBufferLock_ReadOnly;
+  CVReturn ret = CVPixelBufferLockBaseAddress(image_buffer, lock_flags);
+  if (ret != kCVReturnSuccess) {
+    return;
+  }
+
+  static size_t const kYPlaneIndex = 0;
+  static size_t const kUVPlaneIndex = 1;
+  uint8_t* y_plane_address =
+      static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(image_buffer,
+                                                               kYPlaneIndex));
+  size_t y_plane_height =
+      CVPixelBufferGetHeightOfPlane(image_buffer, kYPlaneIndex);
+  size_t y_plane_width =
+      CVPixelBufferGetWidthOfPlane(image_buffer, kYPlaneIndex);
+  size_t y_plane_bytes_per_row =
+      CVPixelBufferGetBytesPerRowOfPlane(image_buffer, kYPlaneIndex);
+  size_t uv_plane_height =
+      CVPixelBufferGetHeightOfPlane(image_buffer, kUVPlaneIndex);
+  size_t uv_plane_bytes_per_row =
+      CVPixelBufferGetBytesPerRowOfPlane(image_buffer, kUVPlaneIndex);
+  size_t frame_size = y_plane_bytes_per_row * y_plane_height +
+      uv_plane_bytes_per_row * uv_plane_height;
+
+  // Sanity check assumption that planar bytes are contiguous.
+  uint8_t* uv_plane_address =
+      static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(image_buffer,
+                                                               kUVPlaneIndex));
+  RTC_DCHECK(uv_plane_address ==
+             y_plane_address + y_plane_height * y_plane_bytes_per_row);
+
+  // Stuff data into a cricket::CapturedFrame.
+  cricket::CapturedFrame frame;
+  frame.width = y_plane_width;
+  frame.height = y_plane_height;
+  frame.pixel_width = 1;
+  frame.pixel_height = 1;
+  frame.fourcc = static_cast<uint32_t>(cricket::FOURCC_NV12);
+  frame.time_stamp = capture_time;
+  frame.data = y_plane_address;
+  frame.data_size = frame_size;
+
+  // This will call a superclass method that will perform the frame conversion
+  // to I420.
+  SignalFrameCaptured(this, &frame);
+
+  CVPixelBufferUnlockBaseAddress(image_buffer, lock_flags);
+  CVBufferRelease(image_buffer);
+}
+
+}  // namespace webrtc