Move talk/app/webrtc to webrtc/api

The previously disabled warnings that were inherited from
talk/build/common.gypi are now replaced by target-specific disabling
of only the failing warnings. Additional disabling was needed since the stricter
compilation warnings that applies to code in webrtc/.

License headers will be updated in a follow-up CL.

Other modifications:
* Updated the header guards.
* Sorted the includes using chromium/src/tools/sort-headers.py
  except for these files:
  talk/app/webrtc/peerconnectionendtoend_unittest.cc
  talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
  talk/app/webrtc/java/jni/androidmediaencoder_jni.cc
  webrtc/media/devices/win32devicemanager.cc

The HAVE_SCTP define was added for the peerconnection_unittests target
in api_tests.gyp.

I also checked that none of
SRTP_RELATIVE_PATH
HAVE_SRTP
HAVE_WEBRTC_VIDEO
HAVE_WEBRTC_VOICE
were used by the talk/app/webrtc code.

For Chromium, the following changes will need to be applied to the roll CL that updates the
DEPS for WebRTC and libjingle:
https://codereview.chromium.org/1615433002

BUG=webrtc:5418
NOPRESUBMIT=True
R=deadbeef@webrtc.org, pthatcher@webrtc.org, tommi@webrtc.org

Review URL: https://codereview.webrtc.org/1610243002 .

Cr-Commit-Position: refs/heads/master@{#11545}
diff --git a/webrtc/api/java/jni/OWNERS b/webrtc/api/java/jni/OWNERS
new file mode 100644
index 0000000..4d31ffb
--- /dev/null
+++ b/webrtc/api/java/jni/OWNERS
@@ -0,0 +1 @@
+magjed@webrtc.org
diff --git a/webrtc/api/java/jni/androidmediacodeccommon.h b/webrtc/api/java/jni/androidmediacodeccommon.h
new file mode 100644
index 0000000..7044fb4
--- /dev/null
+++ b/webrtc/api/java/jni/androidmediacodeccommon.h
@@ -0,0 +1,113 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef WEBRTC_API_JAVA_JNI_ANDROIDMEDIACODECCOMMON_H_
+#define WEBRTC_API_JAVA_JNI_ANDROIDMEDIACODECCOMMON_H_
+
+#include <android/log.h>
+#include <string>
+
+#include "webrtc/base/thread.h"
+#include "webrtc/api/java/jni/classreferenceholder.h"
+#include "webrtc/api/java/jni/jni_helpers.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+
+namespace webrtc_jni {
+
+// Uncomment this define to enable verbose logging for every encoded/decoded
+// video frame.
+//#define TRACK_BUFFER_TIMING
+
+#define TAG_COMMON "MediaCodecVideo"
+
+// Color formats supported by encoder - should mirror supportedColorList
+// from MediaCodecVideoEncoder.java
+enum COLOR_FORMATTYPE {
+  COLOR_FormatYUV420Planar = 0x13,
+  COLOR_FormatYUV420SemiPlanar = 0x15,
+  COLOR_QCOM_FormatYUV420SemiPlanar = 0x7FA30C00,
+  // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
+  // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
+  // This format is presumably similar to COLOR_FormatYUV420SemiPlanar,
+  // but requires some (16, 32?) byte alignment.
+  COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04
+};
+
+// Arbitrary interval to poll the codec for new outputs.
+enum { kMediaCodecPollMs = 10 };
+// Media codec maximum output buffer ready timeout.
+enum { kMediaCodecTimeoutMs = 1000 };
+// Interval to print codec statistics (bitrate, fps, encoding/decoding time).
+enum { kMediaCodecStatisticsIntervalMs = 3000 };
+// Maximum amount of pending frames for VP8 decoder.
+enum { kMaxPendingFramesVp8 = 1 };
+// Maximum amount of pending frames for VP9 decoder.
+enum { kMaxPendingFramesVp9 = 1 };
+// Maximum amount of pending frames for H.264 decoder.
+enum { kMaxPendingFramesH264 = 8 };
+// Maximum amount of decoded frames for which per-frame logging is enabled.
+enum { kMaxDecodedLogFrames = 10 };
+// Maximum amount of encoded frames for which per-frame logging is enabled.
+enum { kMaxEncodedLogFrames = 10 };
+
+static inline int64_t GetCurrentTimeMs() {
+  return webrtc::TickTime::Now().Ticks() / 1000000LL;
+}
+
+static inline void AllowBlockingCalls() {
+  rtc::Thread* current_thread = rtc::Thread::Current();
+  if (current_thread != NULL)
+    current_thread->SetAllowBlockingCalls(true);
+}
+
+// Return the (singleton) Java Enum object corresponding to |index|;
+// |state_class_fragment| is something like "MediaSource$State".
+static inline jobject JavaEnumFromIndexAndClassName(
+    JNIEnv* jni, const std::string& state_class_fragment, int index) {
+  const std::string state_class = "org/webrtc/" + state_class_fragment;
+  return JavaEnumFromIndex(jni, FindClass(jni, state_class.c_str()),
+                           state_class, index);
+}
+
+// Checks for any Java exception, prints stack backtrace and clears
+// currently thrown exception.
+static inline bool CheckException(JNIEnv* jni) {
+  if (jni->ExceptionCheck()) {
+    LOG_TAG(rtc::LS_ERROR, TAG_COMMON) << "Java JNI exception.";
+    jni->ExceptionDescribe();
+    jni->ExceptionClear();
+    return true;
+  }
+  return false;
+}
+
+}  // namespace webrtc_jni
+
+#endif  // WEBRTC_API_JAVA_JNI_ANDROIDMEDIACODECCOMMON_H_
diff --git a/webrtc/api/java/jni/androidmediadecoder_jni.cc b/webrtc/api/java/jni/androidmediadecoder_jni.cc
new file mode 100644
index 0000000..b9973be
--- /dev/null
+++ b/webrtc/api/java/jni/androidmediadecoder_jni.cc
@@ -0,0 +1,945 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include <algorithm>
+#include <vector>
+
+// NOTICE: androidmediadecoder_jni.h must be included before
+// androidmediacodeccommon.h to avoid build errors.
+#include "webrtc/api/java/jni/androidmediadecoder_jni.h"
+
+#include "third_party/libyuv/include/libyuv/convert.h"
+#include "third_party/libyuv/include/libyuv/convert_from.h"
+#include "third_party/libyuv/include/libyuv/video_common.h"
+#include "webrtc/api/java/jni/androidmediacodeccommon.h"
+#include "webrtc/api/java/jni/classreferenceholder.h"
+#include "webrtc/api/java/jni/native_handle_impl.h"
+#include "webrtc/api/java/jni/surfacetexturehelper_jni.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/timeutils.h"
+#include "webrtc/common_video/include/i420_buffer_pool.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
+#include "webrtc/system_wrappers/include/logcat_trace_context.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+
+using rtc::Bind;
+using rtc::Thread;
+using rtc::ThreadManager;
+using rtc::scoped_ptr;
+
+using webrtc::CodecSpecificInfo;
+using webrtc::DecodedImageCallback;
+using webrtc::EncodedImage;
+using webrtc::VideoFrame;
+using webrtc::RTPFragmentationHeader;
+using webrtc::TickTime;
+using webrtc::VideoCodec;
+using webrtc::VideoCodecType;
+using webrtc::kVideoCodecH264;
+using webrtc::kVideoCodecVP8;
+using webrtc::kVideoCodecVP9;
+
+namespace webrtc_jni {
+
+// Logging macros.
+#define TAG_DECODER "MediaCodecVideoDecoder"
+#ifdef TRACK_BUFFER_TIMING
+#define ALOGV(...)
+  __android_log_print(ANDROID_LOG_VERBOSE, TAG_DECODER, __VA_ARGS__)
+#else
+#define ALOGV(...)
+#endif
+#define ALOGD LOG_TAG(rtc::LS_INFO, TAG_DECODER)
+#define ALOGW LOG_TAG(rtc::LS_WARNING, TAG_DECODER)
+#define ALOGE LOG_TAG(rtc::LS_ERROR, TAG_DECODER)
+
+enum { kMaxWarningLogFrames = 2 };
+
+class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
+                               public rtc::MessageHandler {
+ public:
+  explicit MediaCodecVideoDecoder(
+      JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context);
+  virtual ~MediaCodecVideoDecoder();
+
+  int32_t InitDecode(const VideoCodec* codecSettings, int32_t numberOfCores)
+      override;
+
+  int32_t Decode(
+      const EncodedImage& inputImage, bool missingFrames,
+      const RTPFragmentationHeader* fragmentation,
+      const CodecSpecificInfo* codecSpecificInfo = NULL,
+      int64_t renderTimeMs = -1) override;
+
+  int32_t RegisterDecodeCompleteCallback(DecodedImageCallback* callback)
+      override;
+
+  int32_t Release() override;
+
+  bool PrefersLateDecoding() const override { return true; }
+
+  // rtc::MessageHandler implementation.
+  void OnMessage(rtc::Message* msg) override;
+
+  const char* ImplementationName() const override;
+
+ private:
+  // CHECK-fail if not running on |codec_thread_|.
+  void CheckOnCodecThread();
+
+  int32_t InitDecodeOnCodecThread();
+  int32_t ReleaseOnCodecThread();
+  int32_t DecodeOnCodecThread(const EncodedImage& inputImage);
+  // Deliver any outputs pending in the MediaCodec to our |callback_| and return
+  // true on success.
+  bool DeliverPendingOutputs(JNIEnv* jni, int dequeue_timeout_us);
+  int32_t ProcessHWErrorOnCodecThread();
+  void EnableFrameLogOnWarning();
+
+  // Type of video codec.
+  VideoCodecType codecType_;
+
+  // Render EGL context - owned by factory, should not be allocated/destroyed
+  // by VideoDecoder.
+  jobject render_egl_context_;
+
+  bool key_frame_required_;
+  bool inited_;
+  bool sw_fallback_required_;
+  bool use_surface_;
+  VideoCodec codec_;
+  webrtc::I420BufferPool decoded_frame_pool_;
+  rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_;
+  DecodedImageCallback* callback_;
+  int frames_received_;  // Number of frames received by decoder.
+  int frames_decoded_;  // Number of frames decoded by decoder.
+  // Number of decoded frames for which log information is displayed.
+  int frames_decoded_logged_;
+  int64_t start_time_ms_;  // Start time for statistics.
+  int current_frames_;  // Number of frames in the current statistics interval.
+  int current_bytes_;  // Encoded bytes in the current statistics interval.
+  int current_decoding_time_ms_;  // Overall decoding time in the current second
+  int current_delay_time_ms_;  // Overall delay time in the current second.
+  uint32_t max_pending_frames_;  // Maximum number of pending input frames.
+
+  // State that is constant for the lifetime of this object once the ctor
+  // returns.
+  scoped_ptr<Thread> codec_thread_;  // Thread on which to operate MediaCodec.
+  ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_;
+  ScopedGlobalRef<jobject> j_media_codec_video_decoder_;
+  jmethodID j_init_decode_method_;
+  jmethodID j_release_method_;
+  jmethodID j_dequeue_input_buffer_method_;
+  jmethodID j_queue_input_buffer_method_;
+  jmethodID j_dequeue_byte_buffer_method_;
+  jmethodID j_dequeue_texture_buffer_method_;
+  jmethodID j_return_decoded_byte_buffer_method_;
+  // MediaCodecVideoDecoder fields.
+  jfieldID j_input_buffers_field_;
+  jfieldID j_output_buffers_field_;
+  jfieldID j_color_format_field_;
+  jfieldID j_width_field_;
+  jfieldID j_height_field_;
+  jfieldID j_stride_field_;
+  jfieldID j_slice_height_field_;
+  // MediaCodecVideoDecoder.DecodedTextureBuffer fields.
+  jfieldID j_texture_id_field_;
+  jfieldID j_transform_matrix_field_;
+  jfieldID j_texture_presentation_timestamp_ms_field_;
+  jfieldID j_texture_timestamp_ms_field_;
+  jfieldID j_texture_ntp_timestamp_ms_field_;
+  jfieldID j_texture_decode_time_ms_field_;
+  jfieldID j_texture_frame_delay_ms_field_;
+  // MediaCodecVideoDecoder.DecodedOutputBuffer fields.
+  jfieldID j_info_index_field_;
+  jfieldID j_info_offset_field_;
+  jfieldID j_info_size_field_;
+  jfieldID j_presentation_timestamp_ms_field_;
+  jfieldID j_timestamp_ms_field_;
+  jfieldID j_ntp_timestamp_ms_field_;
+  jfieldID j_byte_buffer_decode_time_ms_field_;
+
+  // Global references; must be deleted in Release().
+  std::vector<jobject> input_buffers_;
+};
+
+MediaCodecVideoDecoder::MediaCodecVideoDecoder(
+    JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context) :
+    codecType_(codecType),
+    render_egl_context_(render_egl_context),
+    key_frame_required_(true),
+    inited_(false),
+    sw_fallback_required_(false),
+    codec_thread_(new Thread()),
+    j_media_codec_video_decoder_class_(
+        jni,
+        FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")),
+          j_media_codec_video_decoder_(
+              jni,
+              jni->NewObject(*j_media_codec_video_decoder_class_,
+                   GetMethodID(jni,
+                              *j_media_codec_video_decoder_class_,
+                              "<init>",
+                              "()V"))) {
+  ScopedLocalRefFrame local_ref_frame(jni);
+  codec_thread_->SetName("MediaCodecVideoDecoder", NULL);
+  RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder";
+
+  j_init_decode_method_ = GetMethodID(
+      jni, *j_media_codec_video_decoder_class_, "initDecode",
+      "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;"
+      "IILorg/webrtc/SurfaceTextureHelper;)Z");
+  j_release_method_ =
+      GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V");
+  j_dequeue_input_buffer_method_ = GetMethodID(
+      jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I");
+  j_queue_input_buffer_method_ = GetMethodID(
+      jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJJJ)Z");
+  j_dequeue_byte_buffer_method_ = GetMethodID(
+      jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer",
+      "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer;");
+  j_dequeue_texture_buffer_method_ = GetMethodID(
+      jni, *j_media_codec_video_decoder_class_, "dequeueTextureBuffer",
+      "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer;");
+  j_return_decoded_byte_buffer_method_ =
+      GetMethodID(jni, *j_media_codec_video_decoder_class_,
+                  "returnDecodedOutputBuffer", "(I)V");
+
+  j_input_buffers_field_ = GetFieldID(
+      jni, *j_media_codec_video_decoder_class_,
+      "inputBuffers", "[Ljava/nio/ByteBuffer;");
+  j_output_buffers_field_ = GetFieldID(
+      jni, *j_media_codec_video_decoder_class_,
+      "outputBuffers", "[Ljava/nio/ByteBuffer;");
+  j_color_format_field_ = GetFieldID(
+      jni, *j_media_codec_video_decoder_class_, "colorFormat", "I");
+  j_width_field_ = GetFieldID(
+      jni, *j_media_codec_video_decoder_class_, "width", "I");
+  j_height_field_ = GetFieldID(
+      jni, *j_media_codec_video_decoder_class_, "height", "I");
+  j_stride_field_ = GetFieldID(
+      jni, *j_media_codec_video_decoder_class_, "stride", "I");
+  j_slice_height_field_ = GetFieldID(
+      jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I");
+
+  jclass j_decoded_texture_buffer_class = FindClass(jni,
+      "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer");
+  j_texture_id_field_ = GetFieldID(
+      jni, j_decoded_texture_buffer_class, "textureID", "I");
+  j_transform_matrix_field_ = GetFieldID(
+      jni, j_decoded_texture_buffer_class, "transformMatrix", "[F");
+  j_texture_presentation_timestamp_ms_field_ = GetFieldID(
+      jni, j_decoded_texture_buffer_class, "presentationTimeStampMs", "J");
+  j_texture_timestamp_ms_field_ = GetFieldID(
+      jni, j_decoded_texture_buffer_class, "timeStampMs", "J");
+  j_texture_ntp_timestamp_ms_field_ = GetFieldID(
+      jni, j_decoded_texture_buffer_class, "ntpTimeStampMs", "J");
+  j_texture_decode_time_ms_field_ = GetFieldID(
+      jni, j_decoded_texture_buffer_class, "decodeTimeMs", "J");
+  j_texture_frame_delay_ms_field_ = GetFieldID(
+      jni, j_decoded_texture_buffer_class, "frameDelayMs", "J");
+
+  jclass j_decoded_output_buffer_class = FindClass(jni,
+      "org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer");
+  j_info_index_field_ = GetFieldID(
+      jni, j_decoded_output_buffer_class, "index", "I");
+  j_info_offset_field_ = GetFieldID(
+      jni, j_decoded_output_buffer_class, "offset", "I");
+  j_info_size_field_ = GetFieldID(
+      jni, j_decoded_output_buffer_class, "size", "I");
+  j_presentation_timestamp_ms_field_ = GetFieldID(
+      jni, j_decoded_output_buffer_class, "presentationTimeStampMs", "J");
+  j_timestamp_ms_field_ = GetFieldID(
+      jni, j_decoded_output_buffer_class, "timeStampMs", "J");
+  j_ntp_timestamp_ms_field_ = GetFieldID(
+      jni, j_decoded_output_buffer_class, "ntpTimeStampMs", "J");
+  j_byte_buffer_decode_time_ms_field_ = GetFieldID(
+      jni, j_decoded_output_buffer_class, "decodeTimeMs", "J");
+
+  CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed";
+  use_surface_ = (render_egl_context_ != NULL);
+  ALOGD << "MediaCodecVideoDecoder ctor. Use surface: " << use_surface_;
+  memset(&codec_, 0, sizeof(codec_));
+  AllowBlockingCalls();
+}
+
+MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
+  // Call Release() to ensure no more callbacks to us after we are deleted.
+  Release();
+}
+
+int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
+    int32_t numberOfCores) {
+  ALOGD << "InitDecode.";
+  if (inst == NULL) {
+    ALOGE << "NULL VideoCodec instance";
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  }
+  // Factory should guard against other codecs being used with us.
+  RTC_CHECK(inst->codecType == codecType_)
+      << "Unsupported codec " << inst->codecType << " for " << codecType_;
+
+  if (sw_fallback_required_) {
+    ALOGE << "InitDecode() - fallback to SW decoder";
+    return WEBRTC_VIDEO_CODEC_OK;
+  }
+  // Save VideoCodec instance for later.
+  if (&codec_ != inst) {
+    codec_ = *inst;
+  }
+  // If maxFramerate is not set then assume 30 fps.
+  codec_.maxFramerate = (codec_.maxFramerate >= 1) ? codec_.maxFramerate : 30;
+
+  // Call Java init.
+  return codec_thread_->Invoke<int32_t>(
+      Bind(&MediaCodecVideoDecoder::InitDecodeOnCodecThread, this));
+}
+
+int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
+  CheckOnCodecThread();
+  JNIEnv* jni = AttachCurrentThreadIfNeeded();
+  ScopedLocalRefFrame local_ref_frame(jni);
+  ALOGD << "InitDecodeOnCodecThread Type: " << (int)codecType_ << ". "
+      << codec_.width << " x " << codec_.height << ". Fps: " <<
+      (int)codec_.maxFramerate;
+
+  // Release previous codec first if it was allocated before.
+  int ret_val = ReleaseOnCodecThread();
+  if (ret_val < 0) {
+    ALOGE << "Release failure: " << ret_val << " - fallback to SW codec";
+    sw_fallback_required_ = true;
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+
+  // Always start with a complete key frame.
+  key_frame_required_ = true;
+  frames_received_ = 0;
+  frames_decoded_ = 0;
+  frames_decoded_logged_ = kMaxDecodedLogFrames;
+
+  jobject java_surface_texture_helper_ = nullptr;
+  if (use_surface_) {
+    java_surface_texture_helper_ = jni->CallStaticObjectMethod(
+        FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
+        GetStaticMethodID(jni,
+                          FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
+                          "create",
+                          "(Lorg/webrtc/EglBase$Context;)"
+                          "Lorg/webrtc/SurfaceTextureHelper;"),
+        render_egl_context_);
+    RTC_CHECK(java_surface_texture_helper_ != nullptr);
+    surface_texture_helper_ = new rtc::RefCountedObject<SurfaceTextureHelper>(
+        jni, java_surface_texture_helper_);
+  }
+
+  jobject j_video_codec_enum = JavaEnumFromIndexAndClassName(
+      jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_);
+  bool success = jni->CallBooleanMethod(
+      *j_media_codec_video_decoder_,
+      j_init_decode_method_,
+      j_video_codec_enum,
+      codec_.width,
+      codec_.height,
+      java_surface_texture_helper_);
+  if (CheckException(jni) || !success) {
+    ALOGE << "Codec initialization error - fallback to SW codec.";
+    sw_fallback_required_ = true;
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+  inited_ = true;
+
+  switch (codecType_) {
+    case kVideoCodecVP8:
+      max_pending_frames_ = kMaxPendingFramesVp8;
+      break;
+    case kVideoCodecVP9:
+      max_pending_frames_ = kMaxPendingFramesVp9;
+      break;
+    case kVideoCodecH264:
+      max_pending_frames_ = kMaxPendingFramesH264;
+      break;
+    default:
+      max_pending_frames_ = 0;
+  }
+  start_time_ms_ = GetCurrentTimeMs();
+  current_frames_ = 0;
+  current_bytes_ = 0;
+  current_decoding_time_ms_ = 0;
+  current_delay_time_ms_ = 0;
+
+  jobjectArray input_buffers = (jobjectArray)GetObjectField(
+      jni, *j_media_codec_video_decoder_, j_input_buffers_field_);
+  size_t num_input_buffers = jni->GetArrayLength(input_buffers);
+  ALOGD << "Maximum amount of pending frames: " << max_pending_frames_;
+  input_buffers_.resize(num_input_buffers);
+  for (size_t i = 0; i < num_input_buffers; ++i) {
+    input_buffers_[i] =
+        jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
+    if (CheckException(jni)) {
+      ALOGE << "NewGlobalRef error - fallback to SW codec.";
+      sw_fallback_required_ = true;
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+  }
+
+  codec_thread_->PostDelayed(kMediaCodecPollMs, this);
+
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoDecoder::Release() {
+  ALOGD << "DecoderRelease request";
+  return codec_thread_->Invoke<int32_t>(
+        Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this));
+}
+
+int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() {
+  if (!inited_) {
+    return WEBRTC_VIDEO_CODEC_OK;
+  }
+  CheckOnCodecThread();
+  JNIEnv* jni = AttachCurrentThreadIfNeeded();
+  ALOGD << "DecoderReleaseOnCodecThread: Frames received: " <<
+      frames_received_ << ". Frames decoded: " << frames_decoded_;
+  ScopedLocalRefFrame local_ref_frame(jni);
+  for (size_t i = 0; i < input_buffers_.size(); i++) {
+    jni->DeleteGlobalRef(input_buffers_[i]);
+  }
+  input_buffers_.clear();
+  jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_);
+  surface_texture_helper_ = nullptr;
+  inited_ = false;
+  rtc::MessageQueueManager::Clear(this);
+  if (CheckException(jni)) {
+    ALOGE << "Decoder release exception";
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+  ALOGD << "DecoderReleaseOnCodecThread done";
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+void MediaCodecVideoDecoder::CheckOnCodecThread() {
+  RTC_CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread())
+      << "Running on wrong thread!";
+}
+
+void MediaCodecVideoDecoder::EnableFrameLogOnWarning() {
+  // Log next 2 output frames.
+  frames_decoded_logged_ = std::max(
+      frames_decoded_logged_, frames_decoded_ + kMaxWarningLogFrames);
+}
+
+int32_t MediaCodecVideoDecoder::ProcessHWErrorOnCodecThread() {
+  CheckOnCodecThread();
+  int ret_val = ReleaseOnCodecThread();
+  if (ret_val < 0) {
+    ALOGE << "ProcessHWError: Release failure";
+  }
+  if (codecType_ == kVideoCodecH264) {
+    // For now there is no SW H.264 which can be used as fallback codec.
+    // So try to restart hw codec for now.
+    ret_val = InitDecodeOnCodecThread();
+    ALOGE << "Reset H.264 codec done. Status: " << ret_val;
+    if (ret_val == WEBRTC_VIDEO_CODEC_OK) {
+      // H.264 codec was succesfully reset - return regular error code.
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    } else {
+      // Fail to restart H.264 codec - return error code which should stop the
+      // call.
+      return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+    }
+  } else {
+    sw_fallback_required_ = true;
+    ALOGE << "Return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE";
+    return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+  }
+}
+
+int32_t MediaCodecVideoDecoder::Decode(
+    const EncodedImage& inputImage,
+    bool missingFrames,
+    const RTPFragmentationHeader* fragmentation,
+    const CodecSpecificInfo* codecSpecificInfo,
+    int64_t renderTimeMs) {
+  if (sw_fallback_required_) {
+    ALOGE << "Decode() - fallback to SW codec";
+    return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+  }
+  if (callback_ == NULL) {
+    ALOGE << "Decode() - callback_ is NULL";
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+  if (inputImage._buffer == NULL && inputImage._length > 0) {
+    ALOGE << "Decode() - inputImage is incorrect";
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  }
+  if (!inited_) {
+    ALOGE << "Decode() - decoder is not initialized";
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+
+  // Check if encoded frame dimension has changed.
+  if ((inputImage._encodedWidth * inputImage._encodedHeight > 0) &&
+      (inputImage._encodedWidth != codec_.width ||
+      inputImage._encodedHeight != codec_.height)) {
+    codec_.width = inputImage._encodedWidth;
+    codec_.height = inputImage._encodedHeight;
+    int32_t ret = InitDecode(&codec_, 1);
+    if (ret < 0) {
+      ALOGE << "InitDecode failure: " << ret << " - fallback to SW codec";
+      sw_fallback_required_ = true;
+      return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+    }
+  }
+
+  // Always start with a complete key frame.
+  if (key_frame_required_) {
+    if (inputImage._frameType != webrtc::kVideoFrameKey) {
+      ALOGE << "Decode() - key frame is required";
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+    if (!inputImage._completeFrame) {
+      ALOGE << "Decode() - complete frame is required";
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+    key_frame_required_ = false;
+  }
+  if (inputImage._length == 0) {
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+
+  return codec_thread_->Invoke<int32_t>(Bind(
+      &MediaCodecVideoDecoder::DecodeOnCodecThread, this, inputImage));
+}
+
+int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
+    const EncodedImage& inputImage) {
+  CheckOnCodecThread();
+  JNIEnv* jni = AttachCurrentThreadIfNeeded();
+  ScopedLocalRefFrame local_ref_frame(jni);
+
+  // Try to drain the decoder and wait until output is not too
+  // much behind the input.
+  if (frames_received_ > frames_decoded_ + max_pending_frames_) {
+    ALOGW << "Decoder is too far behind. Try to drain. Received: " <<
+        frames_received_ << ". Decoded: " << frames_decoded_;
+    EnableFrameLogOnWarning();
+  }
+  const int64 drain_start = GetCurrentTimeMs();
+  while ((frames_received_ > frames_decoded_ + max_pending_frames_) &&
+         (GetCurrentTimeMs() - drain_start) < kMediaCodecTimeoutMs) {
+    if (!DeliverPendingOutputs(jni, kMediaCodecPollMs)) {
+      ALOGE << "DeliverPendingOutputs error. Frames received: " <<
+          frames_received_ << ". Frames decoded: " << frames_decoded_;
+      return ProcessHWErrorOnCodecThread();
+    }
+  }
+  if (frames_received_ > frames_decoded_ + max_pending_frames_) {
+    ALOGE << "Output buffer dequeue timeout. Frames received: " <<
+        frames_received_ << ". Frames decoded: " << frames_decoded_;
+    return ProcessHWErrorOnCodecThread();
+  }
+
+  // Get input buffer.
+  int j_input_buffer_index = jni->CallIntMethod(
+      *j_media_codec_video_decoder_, j_dequeue_input_buffer_method_);
+  if (CheckException(jni) || j_input_buffer_index < 0) {
+    ALOGE << "dequeueInputBuffer error: " << j_input_buffer_index <<
+        ". Retry DeliverPendingOutputs.";
+    EnableFrameLogOnWarning();
+    // Try to drain the decoder.
+    if (!DeliverPendingOutputs(jni, kMediaCodecPollMs)) {
+      ALOGE << "DeliverPendingOutputs error. Frames received: " <<
+          frames_received_ << ". Frames decoded: " << frames_decoded_;
+      return ProcessHWErrorOnCodecThread();
+    }
+    // Try dequeue input buffer one last time.
+    j_input_buffer_index = jni->CallIntMethod(
+        *j_media_codec_video_decoder_, j_dequeue_input_buffer_method_);
+    if (CheckException(jni) || j_input_buffer_index < 0) {
+      ALOGE << "dequeueInputBuffer critical error: " << j_input_buffer_index;
+      return ProcessHWErrorOnCodecThread();
+    }
+  }
+
+  // Copy encoded data to Java ByteBuffer.
+  jobject j_input_buffer = input_buffers_[j_input_buffer_index];
+  uint8_t* buffer =
+      reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer));
+  RTC_CHECK(buffer) << "Indirect buffer??";
+  int64_t buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer);
+  if (CheckException(jni) || buffer_capacity < inputImage._length) {
+    ALOGE << "Input frame size "<<  inputImage._length <<
+        " is bigger than buffer size " << buffer_capacity;
+    return ProcessHWErrorOnCodecThread();
+  }
+  jlong presentation_timestamp_us = static_cast<jlong>(
+      static_cast<int64_t>(frames_received_) * 1000000 / codec_.maxFramerate);
+  memcpy(buffer, inputImage._buffer, inputImage._length);
+
+  if (frames_decoded_ < frames_decoded_logged_) {
+    ALOGD << "Decoder frame in # " << frames_received_ <<
+        ". Type: " << inputImage._frameType <<
+        ". Buffer # " << j_input_buffer_index <<
+        ". TS: " << presentation_timestamp_us / 1000 <<
+        ". Size: " << inputImage._length;
+  }
+
+  // Save input image timestamps for later output.
+  frames_received_++;
+  current_bytes_ += inputImage._length;
+
+  // Feed input to decoder.
+  bool success = jni->CallBooleanMethod(
+      *j_media_codec_video_decoder_,
+      j_queue_input_buffer_method_,
+      j_input_buffer_index,
+      inputImage._length,
+      presentation_timestamp_us,
+      static_cast<int64_t> (inputImage._timeStamp),
+      inputImage.ntp_time_ms_);
+  if (CheckException(jni) || !success) {
+    ALOGE << "queueInputBuffer error";
+    return ProcessHWErrorOnCodecThread();
+  }
+
+  // Try to drain the decoder
+  if (!DeliverPendingOutputs(jni, 0)) {
+    ALOGE << "DeliverPendingOutputs error";
+    return ProcessHWErrorOnCodecThread();
+  }
+
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+bool MediaCodecVideoDecoder::DeliverPendingOutputs(
+    JNIEnv* jni, int dequeue_timeout_ms) {
+  if (frames_received_ <= frames_decoded_) {
+    // No need to query for output buffers - decoder is drained.
+    return true;
+  }
+  // Get decoder output.
+  jobject j_decoder_output_buffer =
+      jni->CallObjectMethod(*j_media_codec_video_decoder_,
+          use_surface_ ? j_dequeue_texture_buffer_method_
+                       : j_dequeue_byte_buffer_method_,
+          dequeue_timeout_ms);
+
+  if (CheckException(jni)) {
+    ALOGE << "dequeueOutputBuffer() error";
+    return false;
+  }
+  if (IsNull(jni, j_decoder_output_buffer)) {
+    // No decoded frame ready.
+    return true;
+  }
+
+  // Get decoded video frame properties.
+  int color_format = GetIntField(jni, *j_media_codec_video_decoder_,
+      j_color_format_field_);
+  int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_);
+  int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_);
+  int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_);
+  int slice_height = GetIntField(jni, *j_media_codec_video_decoder_,
+      j_slice_height_field_);
+
+  rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer;
+  int64_t presentation_timestamps_ms = 0;
+  int64_t output_timestamps_ms = 0;
+  int64_t output_ntp_timestamps_ms = 0;
+  int decode_time_ms = 0;
+  int64_t frame_delayed_ms = 0;
+  if (use_surface_) {
+    // Extract data from Java DecodedTextureBuffer.
+    presentation_timestamps_ms = GetLongField(
+        jni, j_decoder_output_buffer,
+        j_texture_presentation_timestamp_ms_field_);
+    output_timestamps_ms = GetLongField(
+        jni, j_decoder_output_buffer, j_texture_timestamp_ms_field_);
+    output_ntp_timestamps_ms = GetLongField(
+        jni, j_decoder_output_buffer, j_texture_ntp_timestamp_ms_field_);
+    decode_time_ms = GetLongField(
+        jni, j_decoder_output_buffer, j_texture_decode_time_ms_field_);
+
+    const int texture_id =
+        GetIntField(jni, j_decoder_output_buffer, j_texture_id_field_);
+    if (texture_id != 0) {  // |texture_id| == 0 represents a dropped frame.
+      const jfloatArray j_transform_matrix =
+          reinterpret_cast<jfloatArray>(GetObjectField(
+              jni, j_decoder_output_buffer, j_transform_matrix_field_));
+      frame_delayed_ms = GetLongField(
+          jni, j_decoder_output_buffer, j_texture_frame_delay_ms_field_);
+
+      // Create webrtc::VideoFrameBuffer with native texture handle.
+      frame_buffer = surface_texture_helper_->CreateTextureFrame(
+          width, height, NativeHandleImpl(jni, texture_id, j_transform_matrix));
+    } else {
+      EnableFrameLogOnWarning();
+    }
+  } else {
+    // Extract data from Java ByteBuffer and create output yuv420 frame -
+    // for non surface decoding only.
+    const int output_buffer_index = GetIntField(
+        jni, j_decoder_output_buffer, j_info_index_field_);
+    const int output_buffer_offset = GetIntField(
+        jni, j_decoder_output_buffer, j_info_offset_field_);
+    const int output_buffer_size = GetIntField(
+        jni, j_decoder_output_buffer, j_info_size_field_);
+    presentation_timestamps_ms = GetLongField(
+        jni, j_decoder_output_buffer, j_presentation_timestamp_ms_field_);
+    output_timestamps_ms = GetLongField(
+        jni, j_decoder_output_buffer, j_timestamp_ms_field_);
+    output_ntp_timestamps_ms = GetLongField(
+        jni, j_decoder_output_buffer, j_ntp_timestamp_ms_field_);
+
+    decode_time_ms = GetLongField(jni, j_decoder_output_buffer,
+                                  j_byte_buffer_decode_time_ms_field_);
+
+    if (output_buffer_size < width * height * 3 / 2) {
+      ALOGE << "Insufficient output buffer size: " << output_buffer_size;
+      return false;
+    }
+    jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField(
+        jni, *j_media_codec_video_decoder_, j_output_buffers_field_));
+    jobject output_buffer =
+        jni->GetObjectArrayElement(output_buffers, output_buffer_index);
+    uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(
+        output_buffer));
+    if (CheckException(jni)) {
+      return false;
+    }
+    payload += output_buffer_offset;
+
+    // Create yuv420 frame.
+    frame_buffer = decoded_frame_pool_.CreateBuffer(width, height);
+    if (color_format == COLOR_FormatYUV420Planar) {
+      RTC_CHECK_EQ(0, stride % 2);
+      RTC_CHECK_EQ(0, slice_height % 2);
+      const int uv_stride = stride / 2;
+      const int u_slice_height = slice_height / 2;
+      const uint8_t* y_ptr = payload;
+      const uint8_t* u_ptr = y_ptr + stride * slice_height;
+      const uint8_t* v_ptr = u_ptr + uv_stride * u_slice_height;
+      libyuv::I420Copy(y_ptr, stride,
+                       u_ptr, uv_stride,
+                       v_ptr, uv_stride,
+                       frame_buffer->MutableData(webrtc::kYPlane),
+                       frame_buffer->stride(webrtc::kYPlane),
+                       frame_buffer->MutableData(webrtc::kUPlane),
+                       frame_buffer->stride(webrtc::kUPlane),
+                       frame_buffer->MutableData(webrtc::kVPlane),
+                       frame_buffer->stride(webrtc::kVPlane),
+                       width, height);
+    } else {
+      // All other supported formats are nv12.
+      const uint8_t* y_ptr = payload;
+      const uint8_t* uv_ptr = y_ptr + stride * slice_height;
+      libyuv::NV12ToI420(
+          y_ptr, stride,
+          uv_ptr, stride,
+          frame_buffer->MutableData(webrtc::kYPlane),
+          frame_buffer->stride(webrtc::kYPlane),
+          frame_buffer->MutableData(webrtc::kUPlane),
+          frame_buffer->stride(webrtc::kUPlane),
+          frame_buffer->MutableData(webrtc::kVPlane),
+          frame_buffer->stride(webrtc::kVPlane),
+          width, height);
+    }
+    // Return output byte buffer back to codec.
+    jni->CallVoidMethod(
+        *j_media_codec_video_decoder_,
+        j_return_decoded_byte_buffer_method_,
+        output_buffer_index);
+    if (CheckException(jni)) {
+      ALOGE << "returnDecodedOutputBuffer error";
+      return false;
+    }
+  }
+  VideoFrame decoded_frame(frame_buffer, 0, 0, webrtc::kVideoRotation_0);
+  decoded_frame.set_timestamp(output_timestamps_ms);
+  decoded_frame.set_ntp_time_ms(output_ntp_timestamps_ms);
+
+  if (frames_decoded_ < frames_decoded_logged_) {
+    ALOGD << "Decoder frame out # " << frames_decoded_ <<
+        ". " << width << " x " << height <<
+        ". " << stride << " x " <<  slice_height <<
+        ". Color: " << color_format <<
+        ". TS: " << presentation_timestamps_ms <<
+        ". DecTime: " << (int)decode_time_ms <<
+        ". DelayTime: " << (int)frame_delayed_ms;
+  }
+
+  // Calculate and print decoding statistics - every 3 seconds.
+  frames_decoded_++;
+  current_frames_++;
+  current_decoding_time_ms_ += decode_time_ms;
+  current_delay_time_ms_ += frame_delayed_ms;
+  int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
+  if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
+      current_frames_ > 0) {
+    int current_bitrate = current_bytes_ * 8 / statistic_time_ms;
+    int current_fps =
+        (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms;
+    ALOGD << "Frames decoded: " << frames_decoded_ <<
+        ". Received: " <<  frames_received_ <<
+        ". Bitrate: " << current_bitrate << " kbps" <<
+        ". Fps: " << current_fps <<
+        ". DecTime: " << (current_decoding_time_ms_ / current_frames_) <<
+        ". DelayTime: " << (current_delay_time_ms_ / current_frames_) <<
+        " for last " << statistic_time_ms << " ms.";
+    start_time_ms_ = GetCurrentTimeMs();
+    current_frames_ = 0;
+    current_bytes_ = 0;
+    current_decoding_time_ms_ = 0;
+    current_delay_time_ms_ = 0;
+  }
+
+  // |.IsZeroSize())| returns true when a frame has been dropped.
+  if (!decoded_frame.IsZeroSize()) {
+    // Callback - output decoded frame.
+    const int32_t callback_status =
+        callback_->Decoded(decoded_frame, decode_time_ms);
+    if (callback_status > 0) {
+      ALOGE << "callback error";
+    }
+  }
+  return true;
+}
+
+int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback(
+    DecodedImageCallback* callback) {
+  callback_ = callback;
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+void MediaCodecVideoDecoder::OnMessage(rtc::Message* msg) {
+  JNIEnv* jni = AttachCurrentThreadIfNeeded();
+  ScopedLocalRefFrame local_ref_frame(jni);
+  if (!inited_) {
+    return;
+  }
+  // We only ever send one message to |this| directly (not through a Bind()'d
+  // functor), so expect no ID/data.
+  RTC_CHECK(!msg->message_id) << "Unexpected message!";
+  RTC_CHECK(!msg->pdata) << "Unexpected message!";
+  CheckOnCodecThread();
+
+  if (!DeliverPendingOutputs(jni, 0)) {
+    ALOGE << "OnMessage: DeliverPendingOutputs error";
+    ProcessHWErrorOnCodecThread();
+    return;
+  }
+  codec_thread_->PostDelayed(kMediaCodecPollMs, this);
+}
+
+MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() {
+  ALOGD << "MediaCodecVideoDecoderFactory ctor";
+  JNIEnv* jni = AttachCurrentThreadIfNeeded();
+  ScopedLocalRefFrame local_ref_frame(jni);
+  jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder");
+  supported_codec_types_.clear();
+
+  bool is_vp8_hw_supported = jni->CallStaticBooleanMethod(
+      j_decoder_class,
+      GetStaticMethodID(jni, j_decoder_class, "isVp8HwSupported", "()Z"));
+  if (CheckException(jni)) {
+    is_vp8_hw_supported = false;
+  }
+  if (is_vp8_hw_supported) {
+    ALOGD << "VP8 HW Decoder supported.";
+    supported_codec_types_.push_back(kVideoCodecVP8);
+  }
+
+  bool is_vp9_hw_supported = jni->CallStaticBooleanMethod(
+      j_decoder_class,
+      GetStaticMethodID(jni, j_decoder_class, "isVp9HwSupported", "()Z"));
+  if (CheckException(jni)) {
+    is_vp9_hw_supported = false;
+  }
+  if (is_vp9_hw_supported) {
+    ALOGD << "VP9 HW Decoder supported.";
+    supported_codec_types_.push_back(kVideoCodecVP9);
+  }
+
+  bool is_h264_hw_supported = jni->CallStaticBooleanMethod(
+      j_decoder_class,
+      GetStaticMethodID(jni, j_decoder_class, "isH264HwSupported", "()Z"));
+  if (CheckException(jni)) {
+    is_h264_hw_supported = false;
+  }
+  if (is_h264_hw_supported) {
+    ALOGD << "H264 HW Decoder supported.";
+    supported_codec_types_.push_back(kVideoCodecH264);
+  }
+}
+
+MediaCodecVideoDecoderFactory::~MediaCodecVideoDecoderFactory() {
+  ALOGD << "MediaCodecVideoDecoderFactory dtor";
+}
+
+void MediaCodecVideoDecoderFactory::SetEGLContext(
+    JNIEnv* jni, jobject render_egl_context) {
+  ALOGD << "MediaCodecVideoDecoderFactory::SetEGLContext";
+  if (!egl_.CreateEglBase(jni, render_egl_context)) {
+    ALOGW << "Invalid EGL context - HW surface decoding is disabled.";
+  }
+}
+
+webrtc::VideoDecoder* MediaCodecVideoDecoderFactory::CreateVideoDecoder(
+    VideoCodecType type) {
+  if (supported_codec_types_.empty()) {
+    ALOGW << "No HW video decoder for type " << (int)type;
+    return nullptr;
+  }
+  for (VideoCodecType codec_type : supported_codec_types_) {
+    if (codec_type == type) {
+      ALOGD << "Create HW video decoder for type " << (int)type;
+      return new MediaCodecVideoDecoder(AttachCurrentThreadIfNeeded(), type,
+                                        egl_.egl_base_context());
+    }
+  }
+  ALOGW << "Can not find HW video decoder for type " << (int)type;
+  return nullptr;
+}
+
+void MediaCodecVideoDecoderFactory::DestroyVideoDecoder(
+    webrtc::VideoDecoder* decoder) {
+  ALOGD << "Destroy video decoder.";
+  delete decoder;
+}
+
+const char* MediaCodecVideoDecoder::ImplementationName() const {
+  return "MediaCodec";
+}
+
+}  // namespace webrtc_jni
+
diff --git a/webrtc/api/java/jni/androidmediadecoder_jni.h b/webrtc/api/java/jni/androidmediadecoder_jni.h
new file mode 100644
index 0000000..c79490e
--- /dev/null
+++ b/webrtc/api/java/jni/androidmediadecoder_jni.h
@@ -0,0 +1,59 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef WEBRTC_API_JAVA_JNI_ANDROIDMEDIADECODER_JNI_H_
+#define WEBRTC_API_JAVA_JNI_ANDROIDMEDIADECODER_JNI_H_
+
+#include "webrtc/api/java/jni/eglbase_jni.h"
+#include "webrtc/media/webrtc/webrtcvideodecoderfactory.h"
+
+namespace webrtc_jni {
+
+// Implementation of Android MediaCodec based decoder factory.
+class MediaCodecVideoDecoderFactory
+    : public cricket::WebRtcVideoDecoderFactory {
+ public:
+  MediaCodecVideoDecoderFactory();
+  virtual ~MediaCodecVideoDecoderFactory();
+
+  void SetEGLContext(JNIEnv* jni, jobject render_egl_context);
+
+  // WebRtcVideoDecoderFactory implementation.
+  webrtc::VideoDecoder* CreateVideoDecoder(webrtc::VideoCodecType type)
+      override;
+
+  void DestroyVideoDecoder(webrtc::VideoDecoder* decoder) override;
+
+ private:
+  EglBase egl_;
+  std::vector<webrtc::VideoCodecType> supported_codec_types_;
+};
+
+}  // namespace webrtc_jni
+
+#endif  // WEBRTC_API_JAVA_JNI_ANDROIDMEDIADECODER_JNI_H_
diff --git a/webrtc/api/java/jni/androidmediaencoder_jni.cc b/webrtc/api/java/jni/androidmediaencoder_jni.cc
new file mode 100644
index 0000000..a06b026
--- /dev/null
+++ b/webrtc/api/java/jni/androidmediaencoder_jni.cc
@@ -0,0 +1,1265 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// NOTICE: androidmediaencoder_jni.h must be included before
+// androidmediacodeccommon.h to avoid build errors.
+#include "webrtc/api/java/jni/androidmediaencoder_jni.h"
+
+#include "third_party/libyuv/include/libyuv/convert.h"
+#include "third_party/libyuv/include/libyuv/convert_from.h"
+#include "third_party/libyuv/include/libyuv/video_common.h"
+#include "webrtc/api/java/jni/androidmediacodeccommon.h"
+#include "webrtc/api/java/jni/classreferenceholder.h"
+#include "webrtc/api/java/jni/native_handle_impl.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/thread_checker.h"
+#include "webrtc/common_types.h"
+#include "webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
+#include "webrtc/modules/video_coding/utility/quality_scaler.h"
+#include "webrtc/modules/video_coding/utility/vp8_header_parser.h"
+#include "webrtc/system_wrappers/include/field_trial.h"
+#include "webrtc/system_wrappers/include/logcat_trace_context.h"
+
+using rtc::Bind;
+using rtc::Thread;
+using rtc::ThreadManager;
+using rtc::scoped_ptr;
+
+using webrtc::CodecSpecificInfo;
+using webrtc::EncodedImage;
+using webrtc::VideoFrame;
+using webrtc::RTPFragmentationHeader;
+using webrtc::VideoCodec;
+using webrtc::VideoCodecType;
+using webrtc::kVideoCodecH264;
+using webrtc::kVideoCodecVP8;
+using webrtc::kVideoCodecVP9;
+
+namespace webrtc_jni {
+
+// H.264 start code length.
+#define H264_SC_LENGTH 4
+// Maximum allowed NALUs in one output frame.
+#define MAX_NALUS_PERFRAME 32
+// Maximum supported HW video encoder resolution.
+#define MAX_VIDEO_WIDTH 1280
+#define MAX_VIDEO_HEIGHT 1280
+// Maximum supported HW video encoder fps.
+#define MAX_VIDEO_FPS 30
+// Maximum allowed fps value in SetRates() call.
+#define MAX_ALLOWED_VIDEO_FPS 60
+// Maximum allowed frames in encoder input queue.
+#define MAX_ENCODER_Q_SIZE 2
+// Maximum allowed latency in ms.
+#define MAX_ENCODER_LATENCY_MS 70
+// Maximum amount of dropped frames caused by full encoder queue - exceeding
+// this threshold means that encoder probably got stuck and need to be reset.
+#define ENCODER_STALL_FRAMEDROP_THRESHOLD 60
+
+// Logging macros.
+#define TAG_ENCODER "MediaCodecVideoEncoder"
+#ifdef TRACK_BUFFER_TIMING
+#define ALOGV(...)
+  __android_log_print(ANDROID_LOG_VERBOSE, TAG_ENCODER, __VA_ARGS__)
+#else
+#define ALOGV(...)
+#endif
+#define ALOGD LOG_TAG(rtc::LS_INFO, TAG_ENCODER)
+#define ALOGW LOG_TAG(rtc::LS_WARNING, TAG_ENCODER)
+#define ALOGE LOG_TAG(rtc::LS_ERROR, TAG_ENCODER)
+
+namespace {
+// Maximum time limit between incoming frames before requesting a key frame.
+const size_t kFrameDiffThresholdMs = 1100;
+const int kMinKeyFrameInterval = 2;
+}  // namespace
+
+// MediaCodecVideoEncoder is a webrtc::VideoEncoder implementation that uses
+// Android's MediaCodec SDK API behind the scenes to implement (hopefully)
+// HW-backed video encode.  This C++ class is implemented as a very thin shim,
+// delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder.
+// MediaCodecVideoEncoder is created, operated, and destroyed on a single
+// thread, currently the libjingle Worker thread.
+class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
+                               public rtc::MessageHandler {
+ public:
+  virtual ~MediaCodecVideoEncoder();
+  MediaCodecVideoEncoder(JNIEnv* jni,
+                         VideoCodecType codecType,
+                         jobject egl_context);
+
+  // webrtc::VideoEncoder implementation.  Everything trampolines to
+  // |codec_thread_| for execution.
+  int32_t InitEncode(const webrtc::VideoCodec* codec_settings,
+                     int32_t /* number_of_cores */,
+                     size_t /* max_payload_size */) override;
+  int32_t Encode(const webrtc::VideoFrame& input_image,
+                 const webrtc::CodecSpecificInfo* /* codec_specific_info */,
+                 const std::vector<webrtc::FrameType>* frame_types) override;
+  int32_t RegisterEncodeCompleteCallback(
+      webrtc::EncodedImageCallback* callback) override;
+  int32_t Release() override;
+  int32_t SetChannelParameters(uint32_t /* packet_loss */,
+                               int64_t /* rtt */) override;
+  int32_t SetRates(uint32_t new_bit_rate, uint32_t frame_rate) override;
+
+  // rtc::MessageHandler implementation.
+  void OnMessage(rtc::Message* msg) override;
+
+  void OnDroppedFrame() override;
+
+  int GetTargetFramerate() override;
+
+  bool SupportsNativeHandle() const override { return egl_context_ != nullptr; }
+  const char* ImplementationName() const override;
+
+ private:
+  // CHECK-fail if not running on |codec_thread_|.
+  void CheckOnCodecThread();
+
+ private:
+  // ResetCodecOnCodecThread() calls ReleaseOnCodecThread() and
+  // InitEncodeOnCodecThread() in an attempt to restore the codec to an
+  // operable state.  Necessary after all manner of OMX-layer errors.
+  bool ResetCodecOnCodecThread();
+
+  // Implementation of webrtc::VideoEncoder methods above, all running on the
+  // codec thread exclusively.
+  //
+  // If width==0 then this is assumed to be a re-initialization and the
+  // previously-current values are reused instead of the passed parameters
+  // (makes it easier to reason about thread-safety).
+  int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps,
+      bool use_surface);
+  // Reconfigure to match |frame| in width, height. Also reconfigures the
+  // encoder if |frame| is a texture/byte buffer and the encoder is initialized
+  // for byte buffer/texture. Returns false if reconfiguring fails.
+  bool MaybeReconfigureEncoderOnCodecThread(const webrtc::VideoFrame& frame);
+  int32_t EncodeOnCodecThread(
+      const webrtc::VideoFrame& input_image,
+      const std::vector<webrtc::FrameType>* frame_types);
+  bool EncodeByteBufferOnCodecThread(JNIEnv* jni,
+      bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index);
+  bool EncodeTextureOnCodecThread(JNIEnv* jni,
+      bool key_frame, const webrtc::VideoFrame& frame);
+
+  int32_t RegisterEncodeCompleteCallbackOnCodecThread(
+      webrtc::EncodedImageCallback* callback);
+  int32_t ReleaseOnCodecThread();
+  int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate);
+
+  // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members.
+  int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info);
+  jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info);
+  bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info);
+  jlong GetOutputBufferInfoPresentationTimestampUs(
+      JNIEnv* jni, jobject j_output_buffer_info);
+
+  // Deliver any outputs pending in the MediaCodec to our |callback_| and return
+  // true on success.
+  bool DeliverPendingOutputs(JNIEnv* jni);
+
+  // Search for H.264 start codes.
+  int32_t NextNaluPosition(uint8_t *buffer, size_t buffer_size);
+
+  // Displays encoder statistics.
+  void LogStatistics(bool force_log);
+
+  // Type of video codec.
+  VideoCodecType codecType_;
+
+  // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to
+  // |codec_thread_| synchronously.
+  webrtc::EncodedImageCallback* callback_;
+
+  // State that is constant for the lifetime of this object once the ctor
+  // returns.
+  scoped_ptr<Thread> codec_thread_;  // Thread on which to operate MediaCodec.
+  rtc::ThreadChecker codec_thread_checker_;
+  ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_;
+  ScopedGlobalRef<jobject> j_media_codec_video_encoder_;
+  jmethodID j_init_encode_method_;
+  jmethodID j_get_input_buffers_method_;
+  jmethodID j_dequeue_input_buffer_method_;
+  jmethodID j_encode_buffer_method_;
+  jmethodID j_encode_texture_method_;
+  jmethodID j_release_method_;
+  jmethodID j_set_rates_method_;
+  jmethodID j_dequeue_output_buffer_method_;
+  jmethodID j_release_output_buffer_method_;
+  jfieldID j_color_format_field_;
+  jfieldID j_info_index_field_;
+  jfieldID j_info_buffer_field_;
+  jfieldID j_info_is_key_frame_field_;
+  jfieldID j_info_presentation_timestamp_us_field_;
+
+  // State that is valid only between InitEncode() and the next Release().
+  // Touched only on codec_thread_ so no explicit synchronization necessary.
+  int width_;   // Frame width in pixels.
+  int height_;  // Frame height in pixels.
+  bool inited_;
+  bool use_surface_;
+  uint16_t picture_id_;
+  enum libyuv::FourCC encoder_fourcc_;  // Encoder color space format.
+  int last_set_bitrate_kbps_;  // Last-requested bitrate in kbps.
+  int last_set_fps_;  // Last-requested frame rate.
+  int64_t current_timestamp_us_;  // Current frame timestamps in us.
+  int frames_received_;  // Number of frames received by encoder.
+  int frames_encoded_;  // Number of frames encoded by encoder.
+  int frames_dropped_media_encoder_;  // Number of frames dropped by encoder.
+  // Number of dropped frames caused by full queue.
+  int consecutive_full_queue_frame_drops_;
+  int frames_in_queue_;  // Number of frames in encoder queue.
+  int64_t stat_start_time_ms_;  // Start time for statistics.
+  int current_frames_;  // Number of frames in the current statistics interval.
+  int current_bytes_;  // Encoded bytes in the current statistics interval.
+  int current_acc_qp_; // Accumulated QP in the current statistics interval.
+  int current_encoding_time_ms_;  // Overall encoding time in the current second
+  int64_t last_input_timestamp_ms_;  // Timestamp of last received yuv frame.
+  int64_t last_output_timestamp_ms_;  // Timestamp of last encoded frame.
+  std::vector<int32_t> timestamps_;  // Video frames timestamp queue.
+  std::vector<int64_t> render_times_ms_;  // Video frames render time queue.
+  std::vector<int64_t> frame_rtc_times_ms_;  // Time when video frame is sent to
+                                             // encoder input.
+  int32_t output_timestamp_;  // Last output frame timestamp from timestamps_ Q.
+  int64_t output_render_time_ms_; // Last output frame render time from
+                                  // render_times_ms_ queue.
+  // Frame size in bytes fed to MediaCodec.
+  int yuv_size_;
+  // True only when between a callback_->Encoded() call return a positive value
+  // and the next Encode() call being ignored.
+  bool drop_next_input_frame_;
+  // Global references; must be deleted in Release().
+  std::vector<jobject> input_buffers_;
+  webrtc::QualityScaler quality_scaler_;
+  // Dynamic resolution change, off by default.
+  bool scale_;
+
+  // H264 bitstream parser, used to extract QP from encoded bitstreams.
+  webrtc::H264BitstreamParser h264_bitstream_parser_;
+
+  // VP9 variables to populate codec specific structure.
+  webrtc::GofInfoVP9 gof_; // Contains each frame's temporal information for
+                           // non-flexible VP9 mode.
+  uint8_t tl0_pic_idx_;
+  size_t gof_idx_;
+
+  // EGL context - owned by factory, should not be allocated/destroyed
+  // by MediaCodecVideoEncoder.
+  jobject egl_context_;
+
+  // Temporary fix for VP8.
+  // Sends a key frame if frames are largely spaced apart (possibly
+  // corresponding to a large image change).
+  int64_t last_frame_received_ms_;
+  int frames_received_since_last_key_;
+  webrtc::VideoCodecMode codec_mode_;
+};
+
+MediaCodecVideoEncoder::~MediaCodecVideoEncoder() {
+  // Call Release() to ensure no more callbacks to us after we are deleted.
+  Release();
+}
+
+MediaCodecVideoEncoder::MediaCodecVideoEncoder(
+    JNIEnv* jni, VideoCodecType codecType, jobject egl_context) :
+    codecType_(codecType),
+    callback_(NULL),
+    codec_thread_(new Thread()),
+    j_media_codec_video_encoder_class_(
+        jni,
+        FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")),
+    j_media_codec_video_encoder_(
+        jni,
+        jni->NewObject(*j_media_codec_video_encoder_class_,
+                       GetMethodID(jni,
+                                   *j_media_codec_video_encoder_class_,
+                                   "<init>",
+                                   "()V"))),
+    inited_(false),
+    use_surface_(false),
+    picture_id_(0),
+    egl_context_(egl_context) {
+  ScopedLocalRefFrame local_ref_frame(jni);
+  // It would be nice to avoid spinning up a new thread per MediaCodec, and
+  // instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug
+  // 2732 means that deadlocks abound.  This class synchronously trampolines
+  // to |codec_thread_|, so if anything else can be coming to _us_ from
+  // |codec_thread_|, or from any thread holding the |_sendCritSect| described
+  // in the bug, we have a problem.  For now work around that with a dedicated
+  // thread.
+  codec_thread_->SetName("MediaCodecVideoEncoder", NULL);
+  RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder";
+  codec_thread_checker_.DetachFromThread();
+  jclass j_output_buffer_info_class =
+      FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
+  j_init_encode_method_ = GetMethodID(
+      jni,
+      *j_media_codec_video_encoder_class_,
+      "initEncode",
+      "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;"
+      "IIIILorg/webrtc/EglBase14$Context;)Z");
+  j_get_input_buffers_method_ = GetMethodID(
+      jni,
+      *j_media_codec_video_encoder_class_,
+      "getInputBuffers",
+      "()[Ljava/nio/ByteBuffer;");
+  j_dequeue_input_buffer_method_ = GetMethodID(
+      jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I");
+  j_encode_buffer_method_ = GetMethodID(
+      jni, *j_media_codec_video_encoder_class_, "encodeBuffer", "(ZIIJ)Z");
+  j_encode_texture_method_ = GetMethodID(
+        jni, *j_media_codec_video_encoder_class_, "encodeTexture",
+        "(ZI[FJ)Z");
+  j_release_method_ =
+      GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V");
+  j_set_rates_method_ = GetMethodID(
+      jni, *j_media_codec_video_encoder_class_, "setRates", "(II)Z");
+  j_dequeue_output_buffer_method_ = GetMethodID(
+      jni,
+      *j_media_codec_video_encoder_class_,
+      "dequeueOutputBuffer",
+      "()Lorg/webrtc/MediaCodecVideoEncoder$OutputBufferInfo;");
+  j_release_output_buffer_method_ = GetMethodID(
+      jni, *j_media_codec_video_encoder_class_, "releaseOutputBuffer", "(I)Z");
+
+  j_color_format_field_ =
+      GetFieldID(jni, *j_media_codec_video_encoder_class_, "colorFormat", "I");
+  j_info_index_field_ =
+      GetFieldID(jni, j_output_buffer_info_class, "index", "I");
+  j_info_buffer_field_ = GetFieldID(
+      jni, j_output_buffer_info_class, "buffer", "Ljava/nio/ByteBuffer;");
+  j_info_is_key_frame_field_ =
+      GetFieldID(jni, j_output_buffer_info_class, "isKeyFrame", "Z");
+  j_info_presentation_timestamp_us_field_ = GetFieldID(
+      jni, j_output_buffer_info_class, "presentationTimestampUs", "J");
+  CHECK_EXCEPTION(jni) << "MediaCodecVideoEncoder ctor failed";
+  srand(time(NULL));
+  AllowBlockingCalls();
+}
+
+int32_t MediaCodecVideoEncoder::InitEncode(
+    const webrtc::VideoCodec* codec_settings,
+    int32_t /* number_of_cores */,
+    size_t /* max_payload_size */) {
+  const int kMinWidth = 320;
+  const int kMinHeight = 180;
+  const int kLowQpThresholdDenominator = 3;
+  if (codec_settings == NULL) {
+    ALOGE << "NULL VideoCodec instance";
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  }
+  // Factory should guard against other codecs being used with us.
+  RTC_CHECK(codec_settings->codecType == codecType_)
+      << "Unsupported codec " << codec_settings->codecType << " for "
+      << codecType_;
+
+  ALOGD << "InitEncode request";
+  codec_mode_ = codec_settings->mode;
+  scale_ = (codecType_ != kVideoCodecVP9) && (webrtc::field_trial::FindFullName(
+        "WebRTC-MediaCodecVideoEncoder-AutomaticResize") == "Enabled");
+  ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled");
+  if (scale_) {
+    if (codecType_ == kVideoCodecVP8) {
+      // QP is obtained from VP8-bitstream for HW, so the QP corresponds to the
+      // (internal) range: [0, 127]. And we cannot change QP_max in HW, so it is
+      // always = 127. Note that in SW, QP is that of the user-level range [0,
+      // 63].
+      const int kMaxQp = 127;
+      const int kBadQpThreshold = 95;
+      quality_scaler_.Init(
+          kMaxQp / kLowQpThresholdDenominator, kBadQpThreshold, false);
+    } else if (codecType_ == kVideoCodecH264) {
+      // H264 QP is in the range [0, 51].
+      const int kMaxQp = 51;
+      const int kBadQpThreshold = 40;
+      quality_scaler_.Init(
+          kMaxQp / kLowQpThresholdDenominator, kBadQpThreshold, false);
+    } else {
+      // When adding codec support to additional hardware codecs, also configure
+      // their QP thresholds for scaling.
+      RTC_NOTREACHED() << "Unsupported codec without configured QP thresholds.";
+    }
+    quality_scaler_.SetMinResolution(kMinWidth, kMinHeight);
+    quality_scaler_.ReportFramerate(codec_settings->maxFramerate);
+  }
+  return codec_thread_->Invoke<int32_t>(
+      Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread,
+           this,
+           codec_settings->width,
+           codec_settings->height,
+           codec_settings->startBitrate,
+           codec_settings->maxFramerate,
+           false /* use_surface */));
+}
+
+int32_t MediaCodecVideoEncoder::Encode(
+    const webrtc::VideoFrame& frame,
+    const webrtc::CodecSpecificInfo* /* codec_specific_info */,
+    const std::vector<webrtc::FrameType>* frame_types) {
+  return codec_thread_->Invoke<int32_t>(Bind(
+      &MediaCodecVideoEncoder::EncodeOnCodecThread, this, frame, frame_types));
+}
+
+int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback(
+    webrtc::EncodedImageCallback* callback) {
+  return codec_thread_->Invoke<int32_t>(
+      Bind(&MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread,
+           this,
+           callback));
+}
+
+int32_t MediaCodecVideoEncoder::Release() {
+  ALOGD << "EncoderRelease request";
+  return codec_thread_->Invoke<int32_t>(
+      Bind(&MediaCodecVideoEncoder::ReleaseOnCodecThread, this));
+}
+
+int32_t MediaCodecVideoEncoder::SetChannelParameters(uint32_t /* packet_loss */,
+                                                     int64_t /* rtt */) {
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoEncoder::SetRates(uint32_t new_bit_rate,
+                                         uint32_t frame_rate) {
+  return codec_thread_->Invoke<int32_t>(
+      Bind(&MediaCodecVideoEncoder::SetRatesOnCodecThread,
+           this,
+           new_bit_rate,
+           frame_rate));
+}
+
+void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+  JNIEnv* jni = AttachCurrentThreadIfNeeded();
+  ScopedLocalRefFrame local_ref_frame(jni);
+
+  // We only ever send one message to |this| directly (not through a Bind()'d
+  // functor), so expect no ID/data.
+  RTC_CHECK(!msg->message_id) << "Unexpected message!";
+  RTC_CHECK(!msg->pdata) << "Unexpected message!";
+  if (!inited_) {
+    return;
+  }
+
+  // It would be nice to recover from a failure here if one happened, but it's
+  // unclear how to signal such a failure to the app, so instead we stay silent
+  // about it and let the next app-called API method reveal the borkedness.
+  DeliverPendingOutputs(jni);
+  codec_thread_->PostDelayed(kMediaCodecPollMs, this);
+}
+
+bool MediaCodecVideoEncoder::ResetCodecOnCodecThread() {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+  ALOGE << "ResetOnCodecThread";
+  if (ReleaseOnCodecThread() != WEBRTC_VIDEO_CODEC_OK ||
+      InitEncodeOnCodecThread(width_, height_, 0, 0, false) !=
+          WEBRTC_VIDEO_CODEC_OK) {
+    // TODO(fischman): wouldn't it be nice if there was a way to gracefully
+    // degrade to a SW encoder at this point?  There isn't one AFAICT :(
+    // https://code.google.com/p/webrtc/issues/detail?id=2920
+    return false;
+  }
+  return true;
+}
+
+int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
+    int width, int height, int kbps, int fps, bool use_surface) {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+  RTC_CHECK(!use_surface || egl_context_ != nullptr) << "EGL context not set.";
+  JNIEnv* jni = AttachCurrentThreadIfNeeded();
+  ScopedLocalRefFrame local_ref_frame(jni);
+
+  ALOGD << "InitEncodeOnCodecThread Type: " <<  (int)codecType_ << ", " <<
+      width << " x " << height << ". Bitrate: " << kbps <<
+      " kbps. Fps: " << fps;
+  if (kbps == 0) {
+    kbps = last_set_bitrate_kbps_;
+  }
+  if (fps == 0) {
+    fps = MAX_VIDEO_FPS;
+  }
+
+  width_ = width;
+  height_ = height;
+  last_set_bitrate_kbps_ = kbps;
+  last_set_fps_ = (fps < MAX_VIDEO_FPS) ? fps : MAX_VIDEO_FPS;
+  yuv_size_ = width_ * height_ * 3 / 2;
+  frames_received_ = 0;
+  frames_encoded_ = 0;
+  frames_dropped_media_encoder_ = 0;
+  consecutive_full_queue_frame_drops_ = 0;
+  frames_in_queue_ = 0;
+  current_timestamp_us_ = 0;
+  stat_start_time_ms_ = GetCurrentTimeMs();
+  current_frames_ = 0;
+  current_bytes_ = 0;
+  current_acc_qp_ = 0;
+  current_encoding_time_ms_ = 0;
+  last_input_timestamp_ms_ = -1;
+  last_output_timestamp_ms_ = -1;
+  output_timestamp_ = 0;
+  output_render_time_ms_ = 0;
+  timestamps_.clear();
+  render_times_ms_.clear();
+  frame_rtc_times_ms_.clear();
+  drop_next_input_frame_ = false;
+  use_surface_ = use_surface;
+  picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF;
+  gof_.SetGofInfoVP9(webrtc::TemporalStructureMode::kTemporalStructureMode1);
+  tl0_pic_idx_ = static_cast<uint8_t>(rand());
+  gof_idx_ = 0;
+  last_frame_received_ms_ = -1;
+  frames_received_since_last_key_ = kMinKeyFrameInterval;
+
+  // We enforce no extra stride/padding in the format creation step.
+  jobject j_video_codec_enum = JavaEnumFromIndexAndClassName(
+      jni, "MediaCodecVideoEncoder$VideoCodecType", codecType_);
+  const bool encode_status = jni->CallBooleanMethod(
+      *j_media_codec_video_encoder_, j_init_encode_method_,
+      j_video_codec_enum, width, height, kbps, fps,
+      (use_surface ? egl_context_ : nullptr));
+  if (!encode_status) {
+    ALOGE << "Failed to configure encoder.";
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+  CHECK_EXCEPTION(jni);
+
+  if (!use_surface) {
+    jobjectArray input_buffers = reinterpret_cast<jobjectArray>(
+        jni->CallObjectMethod(*j_media_codec_video_encoder_,
+            j_get_input_buffers_method_));
+    CHECK_EXCEPTION(jni);
+    if (IsNull(jni, input_buffers)) {
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+
+    switch (GetIntField(jni, *j_media_codec_video_encoder_,
+        j_color_format_field_)) {
+      case COLOR_FormatYUV420Planar:
+        encoder_fourcc_ = libyuv::FOURCC_YU12;
+        break;
+      case COLOR_FormatYUV420SemiPlanar:
+      case COLOR_QCOM_FormatYUV420SemiPlanar:
+      case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
+        encoder_fourcc_ = libyuv::FOURCC_NV12;
+        break;
+      default:
+        LOG(LS_ERROR) << "Wrong color format.";
+        return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+    size_t num_input_buffers = jni->GetArrayLength(input_buffers);
+    RTC_CHECK(input_buffers_.empty())
+        << "Unexpected double InitEncode without Release";
+    input_buffers_.resize(num_input_buffers);
+    for (size_t i = 0; i < num_input_buffers; ++i) {
+      input_buffers_[i] =
+          jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
+      int64_t yuv_buffer_capacity =
+          jni->GetDirectBufferCapacity(input_buffers_[i]);
+      CHECK_EXCEPTION(jni);
+      RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity";
+    }
+  }
+
+  inited_ = true;
+  codec_thread_->PostDelayed(kMediaCodecPollMs, this);
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
+    const webrtc::VideoFrame& frame,
+    const std::vector<webrtc::FrameType>* frame_types) {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+  JNIEnv* jni = AttachCurrentThreadIfNeeded();
+  ScopedLocalRefFrame local_ref_frame(jni);
+
+  if (!inited_) {
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+
+  bool send_key_frame = false;
+  if (codec_mode_ == webrtc::kRealtimeVideo) {
+    ++frames_received_since_last_key_;
+    int64_t now_ms = GetCurrentTimeMs();
+    if (last_frame_received_ms_ != -1 &&
+        (now_ms - last_frame_received_ms_) > kFrameDiffThresholdMs) {
+      // Add limit to prevent triggering a key for every frame for very low
+      // framerates (e.g. if frame diff > kFrameDiffThresholdMs).
+      if (frames_received_since_last_key_ > kMinKeyFrameInterval) {
+        ALOGD << "Send key, frame diff: " << (now_ms - last_frame_received_ms_);
+        send_key_frame = true;
+      }
+      frames_received_since_last_key_ = 0;
+    }
+    last_frame_received_ms_ = now_ms;
+  }
+
+  frames_received_++;
+  if (!DeliverPendingOutputs(jni)) {
+    if (!ResetCodecOnCodecThread())
+      return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+  if (frames_encoded_ < kMaxEncodedLogFrames) {
+    ALOGD << "Encoder frame in # " << (frames_received_ - 1) <<
+        ". TS: " << (int)(current_timestamp_us_ / 1000) <<
+        ". Q: " << frames_in_queue_ <<
+        ". Fps: " << last_set_fps_ <<
+        ". Kbps: " << last_set_bitrate_kbps_;
+  }
+
+  if (drop_next_input_frame_) {
+    ALOGW << "Encoder drop frame - failed callback.";
+    drop_next_input_frame_ = false;
+    current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
+    frames_dropped_media_encoder_++;
+    OnDroppedFrame();
+    return WEBRTC_VIDEO_CODEC_OK;
+  }
+
+  RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count";
+
+  // Check if we accumulated too many frames in encoder input buffers
+  // or the encoder latency exceeds 70 ms and drop frame if so.
+  if (frames_in_queue_ > 0 && last_input_timestamp_ms_ >= 0) {
+    int encoder_latency_ms = last_input_timestamp_ms_ -
+        last_output_timestamp_ms_;
+    if (frames_in_queue_ > MAX_ENCODER_Q_SIZE ||
+        encoder_latency_ms > MAX_ENCODER_LATENCY_MS) {
+      ALOGD << "Drop frame - encoder is behind by " << encoder_latency_ms <<
+          " ms. Q size: " << frames_in_queue_ << ". TS: " <<
+          (int)(current_timestamp_us_ / 1000) <<  ". Fps: " << last_set_fps_ <<
+          ". Consecutive drops: " << consecutive_full_queue_frame_drops_ ;
+      current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
+      consecutive_full_queue_frame_drops_++;
+      if (consecutive_full_queue_frame_drops_ >=
+          ENCODER_STALL_FRAMEDROP_THRESHOLD) {
+        ALOGE << "Encoder got stuck. Reset.";
+        ResetCodecOnCodecThread();
+        return WEBRTC_VIDEO_CODEC_ERROR;
+      }
+      frames_dropped_media_encoder_++;
+      OnDroppedFrame();
+      return WEBRTC_VIDEO_CODEC_OK;
+    }
+  }
+  consecutive_full_queue_frame_drops_ = 0;
+
+  VideoFrame input_frame = frame;
+  if (scale_) {
+    // Check framerate before spatial resolution change.
+    quality_scaler_.OnEncodeFrame(frame);
+    const webrtc::QualityScaler::Resolution scaled_resolution =
+        quality_scaler_.GetScaledResolution();
+    if (scaled_resolution.width != frame.width() ||
+        scaled_resolution.height != frame.height()) {
+      if (frame.native_handle() != nullptr) {
+        rtc::scoped_refptr<webrtc::VideoFrameBuffer> scaled_buffer(
+            static_cast<AndroidTextureBuffer*>(
+                frame.video_frame_buffer().get())->ScaleAndRotate(
+                    scaled_resolution.width,
+                    scaled_resolution.height,
+                    webrtc::kVideoRotation_0));
+        input_frame.set_video_frame_buffer(scaled_buffer);
+      } else {
+        input_frame = quality_scaler_.GetScaledFrame(frame);
+      }
+    }
+  }
+
+  if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) {
+    ALOGE << "Failed to reconfigure encoder.";
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+
+  // Save time when input frame is sent to the encoder input.
+  frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
+
+  const bool key_frame =
+      frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame;
+  bool encode_status = true;
+  if (!input_frame.native_handle()) {
+    int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_,
+        j_dequeue_input_buffer_method_);
+    CHECK_EXCEPTION(jni);
+    if (j_input_buffer_index == -1) {
+      // Video codec falls behind - no input buffer available.
+      ALOGW << "Encoder drop frame - no input buffers available";
+      frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
+      current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
+      frames_dropped_media_encoder_++;
+      OnDroppedFrame();
+      return WEBRTC_VIDEO_CODEC_OK;  // TODO(fischman): see webrtc bug 2887.
+    }
+    if (j_input_buffer_index == -2) {
+      ResetCodecOnCodecThread();
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+    encode_status = EncodeByteBufferOnCodecThread(jni, key_frame, input_frame,
+        j_input_buffer_index);
+  } else {
+    encode_status = EncodeTextureOnCodecThread(jni, key_frame, input_frame);
+  }
+
+  if (!encode_status) {
+    ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp();
+    ResetCodecOnCodecThread();
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+
+  last_input_timestamp_ms_ =
+      current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec;
+  frames_in_queue_++;
+
+  // Save input image timestamps for later output
+  timestamps_.push_back(input_frame.timestamp());
+  render_times_ms_.push_back(input_frame.render_time_ms());
+  current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
+
+  if (!DeliverPendingOutputs(jni)) {
+    ALOGE << "Failed deliver pending outputs.";
+    ResetCodecOnCodecThread();
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+bool MediaCodecVideoEncoder::MaybeReconfigureEncoderOnCodecThread(
+    const webrtc::VideoFrame& frame) {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+
+  const bool is_texture_frame = frame.native_handle() != nullptr;
+  const bool reconfigure_due_to_format = is_texture_frame != use_surface_;
+  const bool reconfigure_due_to_size =
+      frame.width() != width_ || frame.height() != height_;
+
+  if (reconfigure_due_to_format) {
+      ALOGD << "Reconfigure encoder due to format change. "
+            << (use_surface_ ?
+                "Reconfiguring to encode from byte buffer." :
+                "Reconfiguring to encode from texture.");
+      LogStatistics(true);
+  }
+  if (reconfigure_due_to_size) {
+    ALOGW << "Reconfigure encoder due to frame resolution change from "
+        << width_ << " x " << height_ << " to " << frame.width() << " x "
+        << frame.height();
+    LogStatistics(true);
+    width_ = frame.width();
+    height_ = frame.height();
+  }
+
+  if (!reconfigure_due_to_format && !reconfigure_due_to_size)
+    return true;
+
+  ReleaseOnCodecThread();
+
+  return InitEncodeOnCodecThread(width_, height_, 0, 0 , is_texture_frame) ==
+      WEBRTC_VIDEO_CODEC_OK;
+}
+
+bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni,
+    bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index) {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+  RTC_CHECK(!use_surface_);
+
+  jobject j_input_buffer = input_buffers_[input_buffer_index];
+  uint8_t* yuv_buffer =
+      reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer));
+  CHECK_EXCEPTION(jni);
+  RTC_CHECK(yuv_buffer) << "Indirect buffer??";
+  RTC_CHECK(!libyuv::ConvertFromI420(
+      frame.buffer(webrtc::kYPlane), frame.stride(webrtc::kYPlane),
+      frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane),
+      frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane),
+      yuv_buffer, width_, width_, height_, encoder_fourcc_))
+      << "ConvertFromI420 failed";
+
+  bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
+                                              j_encode_buffer_method_,
+                                              key_frame,
+                                              input_buffer_index,
+                                              yuv_size_,
+                                              current_timestamp_us_);
+  CHECK_EXCEPTION(jni);
+  return encode_status;
+}
+
+bool MediaCodecVideoEncoder::EncodeTextureOnCodecThread(JNIEnv* jni,
+    bool key_frame, const webrtc::VideoFrame& frame) {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+  RTC_CHECK(use_surface_);
+  NativeHandleImpl* handle =
+      static_cast<NativeHandleImpl*>(frame.native_handle());
+  jfloatArray sampling_matrix = jni->NewFloatArray(16);
+  jni->SetFloatArrayRegion(sampling_matrix, 0, 16, handle->sampling_matrix);
+
+  bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
+                                              j_encode_texture_method_,
+                                              key_frame,
+                                              handle->oes_texture_id,
+                                              sampling_matrix,
+                                              current_timestamp_us_);
+  CHECK_EXCEPTION(jni);
+  return encode_status;
+}
+
+int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread(
+    webrtc::EncodedImageCallback* callback) {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+  JNIEnv* jni = AttachCurrentThreadIfNeeded();
+  ScopedLocalRefFrame local_ref_frame(jni);
+  callback_ = callback;
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+  if (!inited_) {
+    return WEBRTC_VIDEO_CODEC_OK;
+  }
+  JNIEnv* jni = AttachCurrentThreadIfNeeded();
+  ALOGD << "EncoderReleaseOnCodecThread: Frames received: " <<
+      frames_received_ << ". Encoded: " << frames_encoded_ <<
+      ". Dropped: " << frames_dropped_media_encoder_;
+  ScopedLocalRefFrame local_ref_frame(jni);
+  for (size_t i = 0; i < input_buffers_.size(); ++i)
+    jni->DeleteGlobalRef(input_buffers_[i]);
+  input_buffers_.clear();
+  jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_);
+  CHECK_EXCEPTION(jni);
+  rtc::MessageQueueManager::Clear(this);
+  inited_ = false;
+  use_surface_ = false;
+  ALOGD << "EncoderReleaseOnCodecThread done.";
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate,
+                                                      uint32_t frame_rate) {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+  frame_rate = (frame_rate < MAX_ALLOWED_VIDEO_FPS) ?
+      frame_rate : MAX_ALLOWED_VIDEO_FPS;
+  if (last_set_bitrate_kbps_ == new_bit_rate &&
+      last_set_fps_ == frame_rate) {
+    return WEBRTC_VIDEO_CODEC_OK;
+  }
+  if (scale_) {
+    quality_scaler_.ReportFramerate(frame_rate);
+  }
+  JNIEnv* jni = AttachCurrentThreadIfNeeded();
+  ScopedLocalRefFrame local_ref_frame(jni);
+  if (new_bit_rate > 0) {
+    last_set_bitrate_kbps_ = new_bit_rate;
+  }
+  if (frame_rate > 0) {
+    last_set_fps_ = frame_rate;
+  }
+  bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
+                                       j_set_rates_method_,
+                                       last_set_bitrate_kbps_,
+                                       last_set_fps_);
+  CHECK_EXCEPTION(jni);
+  if (!ret) {
+    ResetCodecOnCodecThread();
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int MediaCodecVideoEncoder::GetOutputBufferInfoIndex(
+    JNIEnv* jni,
+    jobject j_output_buffer_info) {
+  return GetIntField(jni, j_output_buffer_info, j_info_index_field_);
+}
+
+jobject MediaCodecVideoEncoder::GetOutputBufferInfoBuffer(
+    JNIEnv* jni,
+    jobject j_output_buffer_info) {
+  return GetObjectField(jni, j_output_buffer_info, j_info_buffer_field_);
+}
+
+bool MediaCodecVideoEncoder::GetOutputBufferInfoIsKeyFrame(
+    JNIEnv* jni,
+    jobject j_output_buffer_info) {
+  return GetBooleanField(jni, j_output_buffer_info, j_info_is_key_frame_field_);
+}
+
+jlong MediaCodecVideoEncoder::GetOutputBufferInfoPresentationTimestampUs(
+    JNIEnv* jni,
+    jobject j_output_buffer_info) {
+  return GetLongField(
+      jni, j_output_buffer_info, j_info_presentation_timestamp_us_field_);
+}
+
+bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+  while (true) {
+    jobject j_output_buffer_info = jni->CallObjectMethod(
+        *j_media_codec_video_encoder_, j_dequeue_output_buffer_method_);
+    CHECK_EXCEPTION(jni);
+    if (IsNull(jni, j_output_buffer_info)) {
+      break;
+    }
+
+    int output_buffer_index =
+        GetOutputBufferInfoIndex(jni, j_output_buffer_info);
+    if (output_buffer_index == -1) {
+      ResetCodecOnCodecThread();
+      return false;
+    }
+
+    // Get key and config frame flags.
+    jobject j_output_buffer =
+        GetOutputBufferInfoBuffer(jni, j_output_buffer_info);
+    bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info);
+
+    // Get frame timestamps from a queue - for non config frames only.
+    int64_t frame_encoding_time_ms = 0;
+    last_output_timestamp_ms_ =
+        GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) /
+        1000;
+    if (frames_in_queue_ > 0) {
+      output_timestamp_ = timestamps_.front();
+      timestamps_.erase(timestamps_.begin());
+      output_render_time_ms_ = render_times_ms_.front();
+      render_times_ms_.erase(render_times_ms_.begin());
+      frame_encoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front();
+      frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
+      frames_in_queue_--;
+    }
+
+    // Extract payload.
+    size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer);
+    uint8_t* payload = reinterpret_cast<uint8_t*>(
+        jni->GetDirectBufferAddress(j_output_buffer));
+    CHECK_EXCEPTION(jni);
+
+    if (frames_encoded_ < kMaxEncodedLogFrames) {
+      int current_latency =
+          (int)(last_input_timestamp_ms_ - last_output_timestamp_ms_);
+      ALOGD << "Encoder frame out # " << frames_encoded_ <<
+          ". Key: " << key_frame <<
+          ". Size: " << payload_size <<
+          ". TS: " << (int)last_output_timestamp_ms_ <<
+          ". Latency: " << current_latency <<
+          ". EncTime: " << frame_encoding_time_ms;
+    }
+
+    // Callback - return encoded frame.
+    int32_t callback_status = 0;
+    if (callback_) {
+      scoped_ptr<webrtc::EncodedImage> image(
+          new webrtc::EncodedImage(payload, payload_size, payload_size));
+      image->_encodedWidth = width_;
+      image->_encodedHeight = height_;
+      image->_timeStamp = output_timestamp_;
+      image->capture_time_ms_ = output_render_time_ms_;
+      image->_frameType =
+          (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta);
+      image->_completeFrame = true;
+      image->adapt_reason_.quality_resolution_downscales =
+          scale_ ? quality_scaler_.downscale_shift() : -1;
+
+      webrtc::CodecSpecificInfo info;
+      memset(&info, 0, sizeof(info));
+      info.codecType = codecType_;
+      if (codecType_ == kVideoCodecVP8) {
+        info.codecSpecific.VP8.pictureId = picture_id_;
+        info.codecSpecific.VP8.nonReference = false;
+        info.codecSpecific.VP8.simulcastIdx = 0;
+        info.codecSpecific.VP8.temporalIdx = webrtc::kNoTemporalIdx;
+        info.codecSpecific.VP8.layerSync = false;
+        info.codecSpecific.VP8.tl0PicIdx = webrtc::kNoTl0PicIdx;
+        info.codecSpecific.VP8.keyIdx = webrtc::kNoKeyIdx;
+      } else if (codecType_ == kVideoCodecVP9) {
+        if (key_frame) {
+          gof_idx_ = 0;
+        }
+        info.codecSpecific.VP9.picture_id = picture_id_;
+        info.codecSpecific.VP9.inter_pic_predicted = key_frame ? false : true;
+        info.codecSpecific.VP9.flexible_mode = false;
+        info.codecSpecific.VP9.ss_data_available = key_frame ? true : false;
+        info.codecSpecific.VP9.tl0_pic_idx = tl0_pic_idx_++;
+        info.codecSpecific.VP9.temporal_idx = webrtc::kNoTemporalIdx;
+        info.codecSpecific.VP9.spatial_idx = webrtc::kNoSpatialIdx;
+        info.codecSpecific.VP9.temporal_up_switch = true;
+        info.codecSpecific.VP9.inter_layer_predicted = false;
+        info.codecSpecific.VP9.gof_idx =
+            static_cast<uint8_t>(gof_idx_++ % gof_.num_frames_in_gof);
+        info.codecSpecific.VP9.num_spatial_layers = 1;
+        info.codecSpecific.VP9.spatial_layer_resolution_present = false;
+        if (info.codecSpecific.VP9.ss_data_available) {
+          info.codecSpecific.VP9.spatial_layer_resolution_present = true;
+          info.codecSpecific.VP9.width[0] = width_;
+          info.codecSpecific.VP9.height[0] = height_;
+          info.codecSpecific.VP9.gof.CopyGofInfoVP9(gof_);
+        }
+      }
+      picture_id_ = (picture_id_ + 1) & 0x7FFF;
+
+      // Generate a header describing a single fragment.
+      webrtc::RTPFragmentationHeader header;
+      memset(&header, 0, sizeof(header));
+      if (codecType_ == kVideoCodecVP8 || codecType_ == kVideoCodecVP9) {
+        header.VerifyAndAllocateFragmentationHeader(1);
+        header.fragmentationOffset[0] = 0;
+        header.fragmentationLength[0] = image->_length;
+        header.fragmentationPlType[0] = 0;
+        header.fragmentationTimeDiff[0] = 0;
+        if (codecType_ == kVideoCodecVP8 && scale_) {
+          int qp;
+          if (webrtc::vp8::GetQp(payload, payload_size, &qp)) {
+            current_acc_qp_ += qp;
+            quality_scaler_.ReportQP(qp);
+          }
+        }
+      } else if (codecType_ == kVideoCodecH264) {
+        if (scale_) {
+          h264_bitstream_parser_.ParseBitstream(payload, payload_size);
+          int qp;
+          if (h264_bitstream_parser_.GetLastSliceQp(&qp)) {
+            current_acc_qp_ += qp;
+            quality_scaler_.ReportQP(qp);
+          }
+        }
+        // For H.264 search for start codes.
+        int32_t scPositions[MAX_NALUS_PERFRAME + 1] = {};
+        int32_t scPositionsLength = 0;
+        int32_t scPosition = 0;
+        while (scPositionsLength < MAX_NALUS_PERFRAME) {
+          int32_t naluPosition = NextNaluPosition(
+              payload + scPosition, payload_size - scPosition);
+          if (naluPosition < 0) {
+            break;
+          }
+          scPosition += naluPosition;
+          scPositions[scPositionsLength++] = scPosition;
+          scPosition += H264_SC_LENGTH;
+        }
+        if (scPositionsLength == 0) {
+          ALOGE << "Start code is not found!";
+          ALOGE << "Data:" <<  image->_buffer[0] << " " << image->_buffer[1]
+              << " " << image->_buffer[2] << " " << image->_buffer[3]
+              << " " << image->_buffer[4] << " " << image->_buffer[5];
+          ResetCodecOnCodecThread();
+          return false;
+        }
+        scPositions[scPositionsLength] = payload_size;
+        header.VerifyAndAllocateFragmentationHeader(scPositionsLength);
+        for (size_t i = 0; i < scPositionsLength; i++) {
+          header.fragmentationOffset[i] = scPositions[i] + H264_SC_LENGTH;
+          header.fragmentationLength[i] =
+              scPositions[i + 1] - header.fragmentationOffset[i];
+          header.fragmentationPlType[i] = 0;
+          header.fragmentationTimeDiff[i] = 0;
+        }
+      }
+
+      callback_status = callback_->Encoded(*image, &info, &header);
+    }
+
+    // Return output buffer back to the encoder.
+    bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
+                                          j_release_output_buffer_method_,
+                                          output_buffer_index);
+    CHECK_EXCEPTION(jni);
+    if (!success) {
+      ResetCodecOnCodecThread();
+      return false;
+    }
+
+    // Calculate and print encoding statistics - every 3 seconds.
+    frames_encoded_++;
+    current_frames_++;
+    current_bytes_ += payload_size;
+    current_encoding_time_ms_ += frame_encoding_time_ms;
+    LogStatistics(false);
+
+    if (callback_status > 0) {
+      drop_next_input_frame_ = true;
+      // Theoretically could handle callback_status<0 here, but unclear what
+      // that would mean for us.
+    }
+  }
+  return true;
+}
+
+void MediaCodecVideoEncoder::LogStatistics(bool force_log) {
+  int statistic_time_ms = GetCurrentTimeMs() - stat_start_time_ms_;
+  if ((statistic_time_ms >= kMediaCodecStatisticsIntervalMs || force_log) &&
+      current_frames_ > 0 && statistic_time_ms > 0) {
+    int current_bitrate = current_bytes_ * 8 / statistic_time_ms;
+    int current_fps =
+        (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms;
+    ALOGD << "Encoded frames: " << frames_encoded_ <<
+        ". Bitrate: " << current_bitrate <<
+        ", target: " << last_set_bitrate_kbps_ << " kbps" <<
+        ", fps: " << current_fps <<
+        ", encTime: " << (current_encoding_time_ms_ / current_frames_) <<
+        ". QP: " << (current_acc_qp_ / current_frames_) <<
+        " for last " << statistic_time_ms << " ms.";
+    stat_start_time_ms_ = GetCurrentTimeMs();
+    current_frames_ = 0;
+    current_bytes_ = 0;
+    current_acc_qp_ = 0;
+    current_encoding_time_ms_ = 0;
+  }
+}
+
+int32_t MediaCodecVideoEncoder::NextNaluPosition(
+    uint8_t *buffer, size_t buffer_size) {
+  if (buffer_size < H264_SC_LENGTH) {
+    return -1;
+  }
+  uint8_t *head = buffer;
+  // Set end buffer pointer to 4 bytes before actual buffer end so we can
+  // access head[1], head[2] and head[3] in a loop without buffer overrun.
+  uint8_t *end = buffer + buffer_size - H264_SC_LENGTH;
+
+  while (head < end) {
+    if (head[0]) {
+      head++;
+      continue;
+    }
+    if (head[1]) { // got 00xx
+      head += 2;
+      continue;
+    }
+    if (head[2]) { // got 0000xx
+      head += 3;
+      continue;
+    }
+    if (head[3] != 0x01) { // got 000000xx
+      head++; // xx != 1, continue searching.
+      continue;
+    }
+    return (int32_t)(head - buffer);
+  }
+  return -1;
+}
+
+void MediaCodecVideoEncoder::OnDroppedFrame() {
+  // Report dropped frame to quality_scaler_.
+  if (scale_)
+    quality_scaler_.ReportDroppedFrame();
+}
+
+int MediaCodecVideoEncoder::GetTargetFramerate() {
+  return scale_ ? quality_scaler_.GetTargetFramerate() : -1;
+}
+
+const char* MediaCodecVideoEncoder::ImplementationName() const {
+  return "MediaCodec";
+}
+
+MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() {
+  JNIEnv* jni = AttachCurrentThreadIfNeeded();
+  ScopedLocalRefFrame local_ref_frame(jni);
+  jclass j_encoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoEncoder");
+  supported_codecs_.clear();
+
+  bool is_vp8_hw_supported = jni->CallStaticBooleanMethod(
+      j_encoder_class,
+      GetStaticMethodID(jni, j_encoder_class, "isVp8HwSupported", "()Z"));
+  CHECK_EXCEPTION(jni);
+  if (is_vp8_hw_supported) {
+    ALOGD << "VP8 HW Encoder supported.";
+    supported_codecs_.push_back(VideoCodec(kVideoCodecVP8, "VP8",
+        MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS));
+  }
+
+  bool is_vp9_hw_supported = jni->CallStaticBooleanMethod(
+      j_encoder_class,
+      GetStaticMethodID(jni, j_encoder_class, "isVp9HwSupported", "()Z"));
+  CHECK_EXCEPTION(jni);
+  if (is_vp9_hw_supported) {
+    ALOGD << "VP9 HW Encoder supported.";
+    supported_codecs_.push_back(VideoCodec(kVideoCodecVP9, "VP9",
+        MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS));
+  }
+
+  bool is_h264_hw_supported = jni->CallStaticBooleanMethod(
+      j_encoder_class,
+      GetStaticMethodID(jni, j_encoder_class, "isH264HwSupported", "()Z"));
+  CHECK_EXCEPTION(jni);
+  if (is_h264_hw_supported) {
+    ALOGD << "H.264 HW Encoder supported.";
+    supported_codecs_.push_back(VideoCodec(kVideoCodecH264, "H264",
+        MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS));
+  }
+}
+
+MediaCodecVideoEncoderFactory::~MediaCodecVideoEncoderFactory() {
+  ALOGD << "MediaCodecVideoEncoderFactory dtor";
+}
+
+void MediaCodecVideoEncoderFactory::SetEGLContext(
+    JNIEnv* jni, jobject render_egl_context) {
+  ALOGD << "MediaCodecVideoEncoderFactory::SetEGLContext";
+  if (!egl_base_.CreateEglBase(jni, render_egl_context)) {
+    ALOGW << "Invalid EGL context - HW surface encoding is disabled.";
+  }
+}
+
+webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder(
+    VideoCodecType type) {
+  if (supported_codecs_.empty()) {
+    ALOGW << "No HW video encoder for type " << (int)type;
+    return nullptr;
+  }
+  for (std::vector<VideoCodec>::const_iterator it = supported_codecs_.begin();
+         it != supported_codecs_.end(); ++it) {
+    if (it->type == type) {
+      ALOGD << "Create HW video encoder for type " << (int)type <<
+          " (" << it->name << ").";
+      return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded(), type,
+          egl_base_.egl_base_context());
+    }
+  }
+  ALOGW << "Can not find HW video encoder for type " << (int)type;
+  return nullptr;
+}
+
+const std::vector<MediaCodecVideoEncoderFactory::VideoCodec>&
+MediaCodecVideoEncoderFactory::codecs() const {
+  return supported_codecs_;
+}
+
+void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(
+    webrtc::VideoEncoder* encoder) {
+  ALOGD << "Destroy video encoder.";
+  delete encoder;
+}
+
+}  // namespace webrtc_jni
+
diff --git a/webrtc/api/java/jni/androidmediaencoder_jni.h b/webrtc/api/java/jni/androidmediaencoder_jni.h
new file mode 100644
index 0000000..e96a489
--- /dev/null
+++ b/webrtc/api/java/jni/androidmediaencoder_jni.h
@@ -0,0 +1,63 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef WEBRTC_API_JAVA_JNI_ANDROIDMEDIAENCODER_JNI_H_
+#define WEBRTC_API_JAVA_JNI_ANDROIDMEDIAENCODER_JNI_H_
+
+#include <vector>
+
+#include "webrtc/api/java/jni/eglbase_jni.h"
+#include "webrtc/media/webrtc/webrtcvideoencoderfactory.h"
+
+namespace webrtc_jni {
+
+// Implementation of Android MediaCodec based encoder factory.
+class MediaCodecVideoEncoderFactory
+    : public cricket::WebRtcVideoEncoderFactory {
+ public:
+  MediaCodecVideoEncoderFactory();
+  virtual ~MediaCodecVideoEncoderFactory();
+
+  void SetEGLContext(JNIEnv* jni, jobject render_egl_context);
+
+  // WebRtcVideoEncoderFactory implementation.
+  webrtc::VideoEncoder* CreateVideoEncoder(webrtc::VideoCodecType type)
+      override;
+  const std::vector<VideoCodec>& codecs() const override;
+  void DestroyVideoEncoder(webrtc::VideoEncoder* encoder) override;
+
+ private:
+  EglBase egl_base_;
+
+  // Empty if platform support is lacking, const after ctor returns.
+  std::vector<VideoCodec> supported_codecs_;
+};
+
+}  // namespace webrtc_jni
+
+#endif  // WEBRTC_API_JAVA_JNI_ANDROIDMEDIAENCODER_JNI_H_
diff --git a/webrtc/api/java/jni/androidnetworkmonitor_jni.cc b/webrtc/api/java/jni/androidnetworkmonitor_jni.cc
new file mode 100644
index 0000000..a38fa11
--- /dev/null
+++ b/webrtc/api/java/jni/androidnetworkmonitor_jni.cc
@@ -0,0 +1,384 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/java/jni/androidnetworkmonitor_jni.h"
+
+#include <dlfcn.h>
+
+#include "webrtc/api/java/jni/classreferenceholder.h"
+#include "webrtc/api/java/jni/jni_helpers.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/ipaddress.h"
+
+namespace webrtc_jni {
+
+jobject AndroidNetworkMonitor::application_context_ = nullptr;
+
+static NetworkType GetNetworkTypeFromJava(JNIEnv* jni, jobject j_network_type) {
+  std::string enum_name =
+      GetJavaEnumName(jni, "org/webrtc/NetworkMonitorAutoDetect$ConnectionType",
+                      j_network_type);
+  if (enum_name == "CONNECTION_UNKNOWN") {
+    return NetworkType::NETWORK_UNKNOWN;
+  }
+  if (enum_name == "CONNECTION_ETHERNET") {
+    return NetworkType::NETWORK_ETHERNET;
+  }
+  if (enum_name == "CONNECTION_WIFI") {
+    return NetworkType::NETWORK_WIFI;
+  }
+  if (enum_name == "CONNECTION_4G") {
+    return NetworkType::NETWORK_4G;
+  }
+  if (enum_name == "CONNECTION_3G") {
+    return NetworkType::NETWORK_3G;
+  }
+  if (enum_name == "CONNECTION_2G") {
+    return NetworkType::NETWORK_2G;
+  }
+  if (enum_name == "CONNECTION_BLUETOOTH") {
+    return NetworkType::NETWORK_BLUETOOTH;
+  }
+  if (enum_name == "CONNECTION_NONE") {
+    return NetworkType::NETWORK_NONE;
+  }
+  ASSERT(false);
+  return NetworkType::NETWORK_UNKNOWN;
+}
+
+static rtc::AdapterType AdapterTypeFromNetworkType(NetworkType network_type) {
+  switch (network_type) {
+    case NETWORK_UNKNOWN:
+      RTC_DCHECK(false) << "Unknown network type";
+      return rtc::ADAPTER_TYPE_UNKNOWN;
+    case NETWORK_ETHERNET:
+      return rtc::ADAPTER_TYPE_ETHERNET;
+    case NETWORK_WIFI:
+      return rtc::ADAPTER_TYPE_WIFI;
+    case NETWORK_4G:
+    case NETWORK_3G:
+    case NETWORK_2G:
+      return rtc::ADAPTER_TYPE_CELLULAR;
+    case NETWORK_BLUETOOTH:
+      // There is no corresponding mapping for bluetooth networks.
+      // Map it to VPN for now.
+      return rtc::ADAPTER_TYPE_VPN;
+    default:
+      RTC_DCHECK(false) << "Invalid network type " << network_type;
+      return rtc::ADAPTER_TYPE_UNKNOWN;
+  }
+}
+
+static rtc::IPAddress GetIPAddressFromJava(JNIEnv* jni, jobject j_ip_address) {
+  jclass j_ip_address_class = GetObjectClass(jni, j_ip_address);
+  jfieldID j_address_id = GetFieldID(jni, j_ip_address_class, "address", "[B");
+  jbyteArray j_addresses =
+      static_cast<jbyteArray>(GetObjectField(jni, j_ip_address, j_address_id));
+  size_t address_length = jni->GetArrayLength(j_addresses);
+  jbyte* addr_array = jni->GetByteArrayElements(j_addresses, nullptr);
+  CHECK_EXCEPTION(jni) << "Error during GetIPAddressFromJava";
+  if (address_length == 4) {
+    // IP4
+    struct in_addr ip4_addr;
+    memcpy(&ip4_addr.s_addr, addr_array, 4);
+    jni->ReleaseByteArrayElements(j_addresses, addr_array, JNI_ABORT);
+    return rtc::IPAddress(ip4_addr);
+  }
+  // IP6
+  RTC_CHECK(address_length == 16);
+  struct in6_addr ip6_addr;
+  memcpy(ip6_addr.s6_addr, addr_array, address_length);
+  jni->ReleaseByteArrayElements(j_addresses, addr_array, JNI_ABORT);
+  return rtc::IPAddress(ip6_addr);
+}
+
+static void GetIPAddressesFromJava(JNIEnv* jni,
+                                   jobjectArray j_ip_addresses,
+                                   std::vector<rtc::IPAddress>* ip_addresses) {
+  ip_addresses->clear();
+  size_t num_addresses = jni->GetArrayLength(j_ip_addresses);
+  CHECK_EXCEPTION(jni) << "Error during GetArrayLength";
+  for (size_t i = 0; i < num_addresses; ++i) {
+    jobject j_ip_address = jni->GetObjectArrayElement(j_ip_addresses, i);
+    CHECK_EXCEPTION(jni) << "Error during GetObjectArrayElement";
+    rtc::IPAddress ip = GetIPAddressFromJava(jni, j_ip_address);
+    ip_addresses->push_back(ip);
+  }
+}
+
+static NetworkInformation GetNetworkInformationFromJava(
+    JNIEnv* jni,
+    jobject j_network_info) {
+  jclass j_network_info_class = GetObjectClass(jni, j_network_info);
+  jfieldID j_interface_name_id =
+      GetFieldID(jni, j_network_info_class, "name", "Ljava/lang/String;");
+  jfieldID j_handle_id = GetFieldID(jni, j_network_info_class, "handle", "I");
+  jfieldID j_type_id =
+      GetFieldID(jni, j_network_info_class, "type",
+                 "Lorg/webrtc/NetworkMonitorAutoDetect$ConnectionType;");
+  jfieldID j_ip_addresses_id =
+      GetFieldID(jni, j_network_info_class, "ipAddresses",
+                 "[Lorg/webrtc/NetworkMonitorAutoDetect$IPAddress;");
+
+  NetworkInformation network_info;
+  network_info.interface_name = JavaToStdString(
+      jni, GetStringField(jni, j_network_info, j_interface_name_id));
+  network_info.handle =
+      static_cast<NetworkHandle>(GetIntField(jni, j_network_info, j_handle_id));
+  network_info.type = GetNetworkTypeFromJava(
+      jni, GetObjectField(jni, j_network_info, j_type_id));
+  jobjectArray j_ip_addresses = static_cast<jobjectArray>(
+      GetObjectField(jni, j_network_info, j_ip_addresses_id));
+  GetIPAddressesFromJava(jni, j_ip_addresses, &network_info.ip_addresses);
+  return network_info;
+}
+
+std::string NetworkInformation::ToString() const {
+  std::stringstream ss;
+  ss << "NetInfo[name " << interface_name << "; handle " << handle << "; type "
+     << type << "; address";
+  for (const rtc::IPAddress address : ip_addresses) {
+    ss << " " << address.ToString();
+  }
+  ss << "]";
+  return ss.str();
+}
+
+// static
+void AndroidNetworkMonitor::SetAndroidContext(JNIEnv* jni, jobject context) {
+  if (application_context_) {
+    jni->DeleteGlobalRef(application_context_);
+  }
+  application_context_ = NewGlobalRef(jni, context);
+}
+
+AndroidNetworkMonitor::AndroidNetworkMonitor()
+    : j_network_monitor_class_(jni(),
+                               FindClass(jni(), "org/webrtc/NetworkMonitor")),
+      j_network_monitor_(
+          jni(),
+          jni()->CallStaticObjectMethod(
+              *j_network_monitor_class_,
+              GetStaticMethodID(
+                  jni(),
+                  *j_network_monitor_class_,
+                  "init",
+                  "(Landroid/content/Context;)Lorg/webrtc/NetworkMonitor;"),
+              application_context_)) {
+  ASSERT(application_context_ != nullptr);
+  CHECK_EXCEPTION(jni()) << "Error during NetworkMonitor.init";
+}
+
+void AndroidNetworkMonitor::Start() {
+  RTC_CHECK(thread_checker_.CalledOnValidThread());
+  if (started_) {
+    return;
+  }
+  started_ = true;
+
+  // This is kind of magic behavior, but doing this allows the SocketServer to
+  // use this as a NetworkBinder to bind sockets on a particular network when
+  // it creates sockets.
+  worker_thread()->socketserver()->set_network_binder(this);
+
+  jmethodID m =
+      GetMethodID(jni(), *j_network_monitor_class_, "startMonitoring", "(J)V");
+  jni()->CallVoidMethod(*j_network_monitor_, m, jlongFromPointer(this));
+  CHECK_EXCEPTION(jni()) << "Error during CallVoidMethod";
+}
+
+void AndroidNetworkMonitor::Stop() {
+  RTC_CHECK(thread_checker_.CalledOnValidThread());
+  if (!started_) {
+    return;
+  }
+  started_ = false;
+
+  // Once the network monitor stops, it will clear all network information and
+  // it won't find the network handle to bind anyway.
+  if (worker_thread()->socketserver()->network_binder() == this) {
+    worker_thread()->socketserver()->set_network_binder(nullptr);
+  }
+
+  jmethodID m =
+      GetMethodID(jni(), *j_network_monitor_class_, "stopMonitoring", "(J)V");
+  jni()->CallVoidMethod(*j_network_monitor_, m, jlongFromPointer(this));
+  CHECK_EXCEPTION(jni()) << "Error during NetworkMonitor.stopMonitoring";
+
+  network_handle_by_address_.clear();
+  network_info_by_handle_.clear();
+}
+
+int AndroidNetworkMonitor::BindSocketToNetwork(int socket_fd,
+                                               const rtc::IPAddress& address) {
+  RTC_CHECK(thread_checker_.CalledOnValidThread());
+  // Android prior to Lollipop didn't have support for binding sockets to
+  // networks. However, in that case it should not have reached here because
+  // |network_handle_by_address_| should only be populated in Android Lollipop
+  // and above.
+  // TODO(honghaiz): Add a check for Android version here so that it won't try
+  // to look for handle if the Android version is before Lollipop.
+  auto iter = network_handle_by_address_.find(address);
+  if (iter == network_handle_by_address_.end()) {
+    return rtc::NETWORK_BIND_ADDRESS_NOT_FOUND;
+  }
+  NetworkHandle network_handle = iter->second;
+
+  // NOTE: This does rely on Android implementation details, but
+  // these details are unlikely to change.
+  typedef int (*SetNetworkForSocket)(unsigned netId, int socketFd);
+  static SetNetworkForSocket setNetworkForSocket;
+  // This is not threadsafe, but we are running this only on the worker thread.
+  if (setNetworkForSocket == nullptr) {
+    // Android's netd client library should always be loaded in our address
+    // space as it shims libc functions like connect().
+    const std::string net_library_path = "libnetd_client.so";
+    void* lib = dlopen(net_library_path.c_str(), RTLD_LAZY);
+    if (lib == nullptr) {
+      LOG(LS_ERROR) << "Library " << net_library_path << " not found!";
+      return rtc::NETWORK_BIND_NOT_IMPLEMENTED;
+    }
+    setNetworkForSocket = reinterpret_cast<SetNetworkForSocket>(
+        dlsym(lib, "setNetworkForSocket"));
+  }
+  if (setNetworkForSocket == nullptr) {
+    LOG(LS_ERROR) << "Symbol setNetworkForSocket not found ";
+    return rtc::NETWORK_BIND_NOT_IMPLEMENTED;
+  }
+  int rv = setNetworkForSocket(network_handle, socket_fd);
+  // If |network| has since disconnected, |rv| will be ENONET.  Surface this as
+  // ERR_NETWORK_CHANGED, rather than MapSystemError(ENONET) which gives back
+  // the less descriptive ERR_FAILED.
+  if (rv == 0) {
+    return rtc::NETWORK_BIND_SUCCESS;
+  }
+  if (rv == ENONET) {
+    return rtc::NETWORK_BIND_NETWORK_CHANGED;
+  }
+  return rtc::NETWORK_BIND_FAILURE;
+}
+
+void AndroidNetworkMonitor::OnNetworkConnected(
+    const NetworkInformation& network_info) {
+  LOG(LS_INFO) << "Network connected: " << network_info.ToString();
+  worker_thread()->Invoke<void>(rtc::Bind(
+      &AndroidNetworkMonitor::OnNetworkConnected_w, this, network_info));
+}
+
+void AndroidNetworkMonitor::OnNetworkConnected_w(
+    const NetworkInformation& network_info) {
+  adapter_type_by_name_[network_info.interface_name] =
+      AdapterTypeFromNetworkType(network_info.type);
+  network_info_by_handle_[network_info.handle] = network_info;
+  for (const rtc::IPAddress& address : network_info.ip_addresses) {
+    network_handle_by_address_[address] = network_info.handle;
+  }
+}
+
+void AndroidNetworkMonitor::OnNetworkDisconnected(NetworkHandle handle) {
+  LOG(LS_INFO) << "Network disconnected for handle " << handle;
+  worker_thread()->Invoke<void>(
+      rtc::Bind(&AndroidNetworkMonitor::OnNetworkDisconnected_w, this, handle));
+}
+
+void AndroidNetworkMonitor::OnNetworkDisconnected_w(NetworkHandle handle) {
+  auto iter = network_info_by_handle_.find(handle);
+  if (iter != network_info_by_handle_.end()) {
+    for (const rtc::IPAddress& address : iter->second.ip_addresses) {
+      network_handle_by_address_.erase(address);
+    }
+    network_info_by_handle_.erase(iter);
+  }
+}
+
+void AndroidNetworkMonitor::SetNetworkInfos(
+    const std::vector<NetworkInformation>& network_infos) {
+  RTC_CHECK(thread_checker_.CalledOnValidThread());
+  network_handle_by_address_.clear();
+  network_info_by_handle_.clear();
+  for (NetworkInformation network : network_infos) {
+    OnNetworkConnected_w(network);
+  }
+}
+
+rtc::AdapterType AndroidNetworkMonitor::GetAdapterType(
+    const std::string& if_name) {
+  auto iter = adapter_type_by_name_.find(if_name);
+  if (iter == adapter_type_by_name_.end()) {
+    return rtc::ADAPTER_TYPE_UNKNOWN;
+  }
+  return iter->second;
+}
+
+rtc::NetworkMonitorInterface*
+AndroidNetworkMonitorFactory::CreateNetworkMonitor() {
+  return new AndroidNetworkMonitor();
+}
+
+JOW(void, NetworkMonitor_nativeNotifyConnectionTypeChanged)(
+    JNIEnv* jni, jobject j_monitor, jlong j_native_monitor) {
+  rtc::NetworkMonitorInterface* network_monitor =
+      reinterpret_cast<rtc::NetworkMonitorInterface*>(j_native_monitor);
+  network_monitor->OnNetworksChanged();
+}
+
+JOW(void, NetworkMonitor_nativeNotifyOfActiveNetworkList)(
+    JNIEnv* jni, jobject j_monitor, jlong j_native_monitor,
+    jobjectArray j_network_infos) {
+  AndroidNetworkMonitor* network_monitor =
+      reinterpret_cast<AndroidNetworkMonitor*>(j_native_monitor);
+  std::vector<NetworkInformation> network_infos;
+  size_t num_networks = jni->GetArrayLength(j_network_infos);
+  for (size_t i = 0; i < num_networks; ++i) {
+    jobject j_network_info = jni->GetObjectArrayElement(j_network_infos, i);
+    CHECK_EXCEPTION(jni) << "Error during GetObjectArrayElement";
+    network_infos.push_back(GetNetworkInformationFromJava(jni, j_network_info));
+  }
+  network_monitor->SetNetworkInfos(network_infos);
+}
+
+JOW(void, NetworkMonitor_nativeNotifyOfNetworkConnect)(
+    JNIEnv* jni, jobject j_monitor, jlong j_native_monitor,
+    jobject j_network_info) {
+  AndroidNetworkMonitor* network_monitor =
+      reinterpret_cast<AndroidNetworkMonitor*>(j_native_monitor);
+  NetworkInformation network_info =
+      GetNetworkInformationFromJava(jni, j_network_info);
+  network_monitor->OnNetworkConnected(network_info);
+}
+
+JOW(void, NetworkMonitor_nativeNotifyOfNetworkDisconnect)(
+    JNIEnv* jni, jobject j_monitor, jlong j_native_monitor,
+    jint network_handle) {
+  AndroidNetworkMonitor* network_monitor =
+      reinterpret_cast<AndroidNetworkMonitor*>(j_native_monitor);
+  network_monitor->OnNetworkDisconnected(
+      static_cast<NetworkHandle>(network_handle));
+}
+
+}  // namespace webrtc_jni
diff --git a/webrtc/api/java/jni/androidnetworkmonitor_jni.h b/webrtc/api/java/jni/androidnetworkmonitor_jni.h
new file mode 100644
index 0000000..220a5bc
--- /dev/null
+++ b/webrtc/api/java/jni/androidnetworkmonitor_jni.h
@@ -0,0 +1,108 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_JAVA_JNI_ANDROIDNETWORKMONITOR_JNI_H_
+#define WEBRTC_API_JAVA_JNI_ANDROIDNETWORKMONITOR_JNI_H_
+
+#include "webrtc/base/networkmonitor.h"
+
+#include <map>
+
+#include "webrtc/api/java/jni/jni_helpers.h"
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/thread_checker.h"
+
+namespace webrtc_jni {
+
+typedef uint32_t NetworkHandle;
+
+// c++ equivalent of java NetworkMonitorAutoDetect.ConnectionType.
+enum NetworkType {
+  NETWORK_UNKNOWN,
+  NETWORK_ETHERNET,
+  NETWORK_WIFI,
+  NETWORK_4G,
+  NETWORK_3G,
+  NETWORK_2G,
+  NETWORK_BLUETOOTH,
+  NETWORK_NONE
+};
+
+// The information is collected from Android OS so that the native code can get
+// the network type and handle (Android network ID) for each interface.
+struct NetworkInformation {
+  std::string interface_name;
+  NetworkHandle handle;
+  NetworkType type;
+  std::vector<rtc::IPAddress> ip_addresses;
+
+  std::string ToString() const;
+};
+
+class AndroidNetworkMonitor : public rtc::NetworkMonitorBase,
+                              public rtc::NetworkBinderInterface {
+ public:
+  AndroidNetworkMonitor();
+
+  static void SetAndroidContext(JNIEnv* jni, jobject context);
+
+  void Start() override;
+  void Stop() override;
+
+  int BindSocketToNetwork(int socket_fd,
+                          const rtc::IPAddress& address) override;
+  rtc::AdapterType GetAdapterType(const std::string& if_name) override;
+  void OnNetworkConnected(const NetworkInformation& network_info);
+  void OnNetworkDisconnected(NetworkHandle network_handle);
+  void SetNetworkInfos(const std::vector<NetworkInformation>& network_infos);
+
+ private:
+  JNIEnv* jni() { return AttachCurrentThreadIfNeeded(); }
+
+  void OnNetworkConnected_w(const NetworkInformation& network_info);
+  void OnNetworkDisconnected_w(NetworkHandle network_handle);
+
+  ScopedGlobalRef<jclass> j_network_monitor_class_;
+  ScopedGlobalRef<jobject> j_network_monitor_;
+  rtc::ThreadChecker thread_checker_;
+  static jobject application_context_;
+  bool started_ = false;
+  std::map<std::string, rtc::AdapterType> adapter_type_by_name_;
+  std::map<rtc::IPAddress, NetworkHandle> network_handle_by_address_;
+  std::map<NetworkHandle, NetworkInformation> network_info_by_handle_;
+};
+
+class AndroidNetworkMonitorFactory : public rtc::NetworkMonitorFactory {
+ public:
+  AndroidNetworkMonitorFactory() {}
+
+  rtc::NetworkMonitorInterface* CreateNetworkMonitor() override;
+};
+
+}  // namespace webrtc_jni
+
+#endif  // WEBRTC_API_JAVA_JNI_ANDROIDNETWORKMONITOR_JNI_H_
diff --git a/webrtc/api/java/jni/androidvideocapturer_jni.cc b/webrtc/api/java/jni/androidvideocapturer_jni.cc
new file mode 100644
index 0000000..a636d62
--- /dev/null
+++ b/webrtc/api/java/jni/androidvideocapturer_jni.cc
@@ -0,0 +1,246 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include "webrtc/api/java/jni/androidvideocapturer_jni.h"
+#include "webrtc/api/java/jni/classreferenceholder.h"
+#include "webrtc/api/java/jni/native_handle_impl.h"
+#include "webrtc/api/java/jni/surfacetexturehelper_jni.h"
+#include "third_party/libyuv/include/libyuv/convert.h"
+#include "webrtc/base/bind.h"
+
+namespace webrtc_jni {
+
+jobject AndroidVideoCapturerJni::application_context_ = nullptr;
+
+// static
+int AndroidVideoCapturerJni::SetAndroidObjects(JNIEnv* jni,
+                                               jobject appliction_context) {
+  if (application_context_) {
+    jni->DeleteGlobalRef(application_context_);
+  }
+  application_context_ = NewGlobalRef(jni, appliction_context);
+
+  return 0;
+}
+
+AndroidVideoCapturerJni::AndroidVideoCapturerJni(
+    JNIEnv* jni,
+    jobject j_video_capturer,
+    jobject j_surface_texture_helper)
+    : j_video_capturer_(jni, j_video_capturer),
+      j_video_capturer_class_(
+          jni, FindClass(jni, "org/webrtc/VideoCapturerAndroid")),
+      j_observer_class_(
+          jni,
+          FindClass(jni,
+                    "org/webrtc/VideoCapturerAndroid$NativeObserver")),
+      surface_texture_helper_(new rtc::RefCountedObject<SurfaceTextureHelper>(
+          jni, j_surface_texture_helper)),
+      capturer_(nullptr) {
+  LOG(LS_INFO) << "AndroidVideoCapturerJni ctor";
+  thread_checker_.DetachFromThread();
+}
+
+AndroidVideoCapturerJni::~AndroidVideoCapturerJni() {
+  LOG(LS_INFO) << "AndroidVideoCapturerJni dtor";
+  jni()->CallVoidMethod(
+      *j_video_capturer_,
+      GetMethodID(jni(), *j_video_capturer_class_, "release", "()V"));
+  CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.release()";
+}
+
+void AndroidVideoCapturerJni::Start(int width, int height, int framerate,
+                                    webrtc::AndroidVideoCapturer* capturer) {
+  LOG(LS_INFO) << "AndroidVideoCapturerJni start";
+  RTC_DCHECK(thread_checker_.CalledOnValidThread());
+  {
+    rtc::CritScope cs(&capturer_lock_);
+    RTC_CHECK(capturer_ == nullptr);
+    RTC_CHECK(invoker_.get() == nullptr);
+    capturer_ = capturer;
+    invoker_.reset(new rtc::GuardedAsyncInvoker());
+  }
+  jobject j_frame_observer =
+      jni()->NewObject(*j_observer_class_,
+                       GetMethodID(jni(), *j_observer_class_, "<init>", "(J)V"),
+                       jlongFromPointer(this));
+  CHECK_EXCEPTION(jni()) << "error during NewObject";
+
+  jmethodID m = GetMethodID(
+      jni(), *j_video_capturer_class_, "startCapture",
+      "(IIILandroid/content/Context;"
+      "Lorg/webrtc/VideoCapturerAndroid$CapturerObserver;)V");
+  jni()->CallVoidMethod(*j_video_capturer_,
+                        m, width, height,
+                        framerate,
+                        application_context_,
+                        j_frame_observer);
+  CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.startCapture";
+}
+
+void AndroidVideoCapturerJni::Stop() {
+  LOG(LS_INFO) << "AndroidVideoCapturerJni stop";
+  RTC_DCHECK(thread_checker_.CalledOnValidThread());
+  {
+    rtc::CritScope cs(&capturer_lock_);
+    // Destroying |invoker_| will cancel all pending calls to |capturer_|.
+    invoker_ = nullptr;
+    capturer_ = nullptr;
+  }
+  jmethodID m = GetMethodID(jni(), *j_video_capturer_class_,
+                            "stopCapture", "()V");
+  jni()->CallVoidMethod(*j_video_capturer_, m);
+  CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.stopCapture";
+  LOG(LS_INFO) << "AndroidVideoCapturerJni stop done";
+}
+
+template <typename... Args>
+void AndroidVideoCapturerJni::AsyncCapturerInvoke(
+    const char* method_name,
+    void (webrtc::AndroidVideoCapturer::*method)(Args...),
+    typename Identity<Args>::type... args) {
+  rtc::CritScope cs(&capturer_lock_);
+  if (!invoker_) {
+    LOG(LS_WARNING) << method_name << "() called for closed capturer.";
+    return;
+  }
+  invoker_->AsyncInvoke<void>(rtc::Bind(method, capturer_, args...));
+}
+
+std::string AndroidVideoCapturerJni::GetSupportedFormats() {
+  jmethodID m =
+      GetMethodID(jni(), *j_video_capturer_class_,
+                  "getSupportedFormatsAsJson", "()Ljava/lang/String;");
+  jstring j_json_caps =
+      (jstring) jni()->CallObjectMethod(*j_video_capturer_, m);
+  CHECK_EXCEPTION(jni()) << "error during supportedFormatsAsJson";
+  return JavaToStdString(jni(), j_json_caps);
+}
+
+void AndroidVideoCapturerJni::OnCapturerStarted(bool success) {
+  LOG(LS_INFO) << "AndroidVideoCapturerJni capture started: " << success;
+  AsyncCapturerInvoke("OnCapturerStarted",
+                      &webrtc::AndroidVideoCapturer::OnCapturerStarted,
+                      success);
+}
+
+void AndroidVideoCapturerJni::OnMemoryBufferFrame(void* video_frame,
+                                                  int length,
+                                                  int width,
+                                                  int height,
+                                                  int rotation,
+                                                  int64_t timestamp_ns) {
+  const uint8_t* y_plane = static_cast<uint8_t*>(video_frame);
+  const uint8_t* vu_plane = y_plane + width * height;
+
+  rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
+      buffer_pool_.CreateBuffer(width, height);
+  libyuv::NV21ToI420(
+      y_plane, width,
+      vu_plane, width,
+      buffer->MutableData(webrtc::kYPlane), buffer->stride(webrtc::kYPlane),
+      buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane),
+      buffer->MutableData(webrtc::kVPlane), buffer->stride(webrtc::kVPlane),
+      width, height);
+  AsyncCapturerInvoke("OnIncomingFrame",
+                      &webrtc::AndroidVideoCapturer::OnIncomingFrame,
+                      buffer, rotation, timestamp_ns);
+}
+
+void AndroidVideoCapturerJni::OnTextureFrame(int width,
+                                             int height,
+                                             int rotation,
+                                             int64_t timestamp_ns,
+                                             const NativeHandleImpl& handle) {
+  rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
+      surface_texture_helper_->CreateTextureFrame(width, height, handle));
+
+  AsyncCapturerInvoke("OnIncomingFrame",
+                      &webrtc::AndroidVideoCapturer::OnIncomingFrame,
+                      buffer, rotation, timestamp_ns);
+}
+
+void AndroidVideoCapturerJni::OnOutputFormatRequest(int width,
+                                                    int height,
+                                                    int fps) {
+  AsyncCapturerInvoke("OnOutputFormatRequest",
+                      &webrtc::AndroidVideoCapturer::OnOutputFormatRequest,
+                      width, height, fps);
+}
+
+JNIEnv* AndroidVideoCapturerJni::jni() { return AttachCurrentThreadIfNeeded(); }
+
+JOW(void,
+    VideoCapturerAndroid_00024NativeObserver_nativeOnByteBufferFrameCaptured)
+    (JNIEnv* jni, jclass, jlong j_capturer, jbyteArray j_frame, jint length,
+        jint width, jint height, jint rotation, jlong timestamp) {
+  jboolean is_copy = true;
+  jbyte* bytes = jni->GetByteArrayElements(j_frame, &is_copy);
+  reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)
+      ->OnMemoryBufferFrame(bytes, length, width, height, rotation, timestamp);
+  jni->ReleaseByteArrayElements(j_frame, bytes, JNI_ABORT);
+}
+
+JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeOnTextureFrameCaptured)
+    (JNIEnv* jni, jclass, jlong j_capturer, jint j_width, jint j_height,
+        jint j_oes_texture_id, jfloatArray j_transform_matrix,
+        jint j_rotation, jlong j_timestamp) {
+   reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)
+         ->OnTextureFrame(j_width, j_height, j_rotation, j_timestamp,
+                          NativeHandleImpl(jni, j_oes_texture_id,
+                                           j_transform_matrix));
+}
+
+JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeCapturerStarted)
+    (JNIEnv* jni, jclass, jlong j_capturer, jboolean j_success) {
+  LOG(LS_INFO) << "NativeObserver_nativeCapturerStarted";
+  reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)->OnCapturerStarted(
+      j_success);
+}
+
+JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeOnOutputFormatRequest)
+    (JNIEnv* jni, jclass, jlong j_capturer, jint j_width, jint j_height,
+        jint j_fps) {
+  LOG(LS_INFO) << "NativeObserver_nativeOnOutputFormatRequest";
+  reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)->OnOutputFormatRequest(
+      j_width, j_height, j_fps);
+}
+
+JOW(jlong, VideoCapturerAndroid_nativeCreateVideoCapturer)
+    (JNIEnv* jni, jclass,
+     jobject j_video_capturer, jobject j_surface_texture_helper) {
+  rtc::scoped_refptr<webrtc::AndroidVideoCapturerDelegate> delegate =
+      new rtc::RefCountedObject<AndroidVideoCapturerJni>(
+          jni, j_video_capturer, j_surface_texture_helper);
+  rtc::scoped_ptr<cricket::VideoCapturer> capturer(
+      new webrtc::AndroidVideoCapturer(delegate));
+  // Caller takes ownership of the cricket::VideoCapturer* pointer.
+  return jlongFromPointer(capturer.release());
+}
+
+}  // namespace webrtc_jni
diff --git a/webrtc/api/java/jni/androidvideocapturer_jni.h b/webrtc/api/java/jni/androidvideocapturer_jni.h
new file mode 100644
index 0000000..bf611f5
--- /dev/null
+++ b/webrtc/api/java/jni/androidvideocapturer_jni.h
@@ -0,0 +1,116 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef WEBRTC_API_JAVA_JNI_ANDROIDVIDEOCAPTURER_JNI_H_
+#define WEBRTC_API_JAVA_JNI_ANDROIDVIDEOCAPTURER_JNI_H_
+
+#include <string>
+
+#include "webrtc/api/androidvideocapturer.h"
+#include "webrtc/api/java/jni/jni_helpers.h"
+#include "webrtc/base/asyncinvoker.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/thread_checker.h"
+#include "webrtc/common_video/include/i420_buffer_pool.h"
+
+namespace webrtc_jni {
+
+struct NativeHandleImpl;
+class SurfaceTextureHelper;
+
+// AndroidVideoCapturerJni implements AndroidVideoCapturerDelegate.
+// The purpose of the delegate is to hide the JNI specifics from the C++ only
+// AndroidVideoCapturer.
+class AndroidVideoCapturerJni : public webrtc::AndroidVideoCapturerDelegate {
+ public:
+  static int SetAndroidObjects(JNIEnv* jni, jobject appliction_context);
+
+  AndroidVideoCapturerJni(JNIEnv* jni,
+                          jobject j_video_capturer,
+                          jobject j_surface_texture_helper);
+
+  void Start(int width, int height, int framerate,
+             webrtc::AndroidVideoCapturer* capturer) override;
+  void Stop() override;
+
+  std::string GetSupportedFormats() override;
+
+  // Called from VideoCapturerAndroid::NativeObserver on a Java thread.
+  void OnCapturerStarted(bool success);
+  void OnMemoryBufferFrame(void* video_frame, int length, int width,
+                           int height, int rotation, int64_t timestamp_ns);
+  void OnTextureFrame(int width, int height, int rotation, int64_t timestamp_ns,
+                      const NativeHandleImpl& handle);
+  void OnOutputFormatRequest(int width, int height, int fps);
+
+ protected:
+  ~AndroidVideoCapturerJni();
+
+ private:
+  JNIEnv* jni();
+
+  // To avoid deducing Args from the 3rd parameter of AsyncCapturerInvoke.
+  template <typename T>
+  struct Identity {
+    typedef T type;
+  };
+
+  // Helper function to make safe asynchronous calls to |capturer_|. The calls
+  // are not guaranteed to be delivered.
+  template <typename... Args>
+  void AsyncCapturerInvoke(
+      const char* method_name,
+      void (webrtc::AndroidVideoCapturer::*method)(Args...),
+      typename Identity<Args>::type... args);
+
+  const ScopedGlobalRef<jobject> j_video_capturer_;
+  const ScopedGlobalRef<jclass> j_video_capturer_class_;
+  const ScopedGlobalRef<jclass> j_observer_class_;
+
+  // Used on the Java thread running the camera.
+  webrtc::I420BufferPool buffer_pool_;
+  rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_;
+  rtc::ThreadChecker thread_checker_;
+
+  // |capturer| is a guaranteed to be a valid pointer between a call to
+  // AndroidVideoCapturerDelegate::Start
+  // until AndroidVideoCapturerDelegate::Stop.
+  rtc::CriticalSection capturer_lock_;
+  webrtc::AndroidVideoCapturer* capturer_ GUARDED_BY(capturer_lock_);
+  // |invoker_| is used to communicate with |capturer_| on the thread Start() is
+  // called on.
+  rtc::scoped_ptr<rtc::GuardedAsyncInvoker> invoker_ GUARDED_BY(capturer_lock_);
+
+  static jobject application_context_;
+
+  RTC_DISALLOW_COPY_AND_ASSIGN(AndroidVideoCapturerJni);
+};
+
+}  // namespace webrtc_jni
+
+#endif  // WEBRTC_API_JAVA_JNI_ANDROIDVIDEOCAPTURER_JNI_H_
diff --git a/webrtc/api/java/jni/classreferenceholder.cc b/webrtc/api/java/jni/classreferenceholder.cc
new file mode 100644
index 0000000..0625cc2
--- /dev/null
+++ b/webrtc/api/java/jni/classreferenceholder.cc
@@ -0,0 +1,152 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+#include "webrtc/api/java/jni/classreferenceholder.h"
+
+#include "webrtc/api/java/jni/jni_helpers.h"
+
+namespace webrtc_jni {
+
+// ClassReferenceHolder holds global reference to Java classes in app/webrtc.
+class ClassReferenceHolder {
+ public:
+  explicit ClassReferenceHolder(JNIEnv* jni);
+  ~ClassReferenceHolder();
+
+  void FreeReferences(JNIEnv* jni);
+  jclass GetClass(const std::string& name);
+
+ private:
+  void LoadClass(JNIEnv* jni, const std::string& name);
+
+  std::map<std::string, jclass> classes_;
+};
+
+// Allocated in LoadGlobalClassReferenceHolder(),
+// freed in FreeGlobalClassReferenceHolder().
+static ClassReferenceHolder* g_class_reference_holder = nullptr;
+
+void LoadGlobalClassReferenceHolder() {
+  RTC_CHECK(g_class_reference_holder == nullptr);
+  g_class_reference_holder = new ClassReferenceHolder(GetEnv());
+}
+
+void FreeGlobalClassReferenceHolder() {
+  g_class_reference_holder->FreeReferences(AttachCurrentThreadIfNeeded());
+  delete g_class_reference_holder;
+  g_class_reference_holder = nullptr;
+}
+
+ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni) {
+  LoadClass(jni, "android/graphics/SurfaceTexture");
+  LoadClass(jni, "java/nio/ByteBuffer");
+  LoadClass(jni, "java/util/ArrayList");
+  LoadClass(jni, "org/webrtc/AudioTrack");
+  LoadClass(jni, "org/webrtc/CameraEnumerator");
+  LoadClass(jni, "org/webrtc/Camera2Enumerator");
+  LoadClass(jni, "org/webrtc/CameraEnumerationAndroid");
+  LoadClass(jni, "org/webrtc/DataChannel");
+  LoadClass(jni, "org/webrtc/DataChannel$Buffer");
+  LoadClass(jni, "org/webrtc/DataChannel$Init");
+  LoadClass(jni, "org/webrtc/DataChannel$State");
+  LoadClass(jni, "org/webrtc/EglBase");
+  LoadClass(jni, "org/webrtc/EglBase$Context");
+  LoadClass(jni, "org/webrtc/EglBase14$Context");
+  LoadClass(jni, "org/webrtc/IceCandidate");
+  LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder");
+  LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
+  LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$VideoCodecType");
+  LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder");
+  LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer");
+  LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer");
+  LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$VideoCodecType");
+  LoadClass(jni, "org/webrtc/MediaSource$State");
+  LoadClass(jni, "org/webrtc/MediaStream");
+  LoadClass(jni, "org/webrtc/MediaStreamTrack$State");
+  LoadClass(jni, "org/webrtc/NetworkMonitor");
+  LoadClass(jni, "org/webrtc/NetworkMonitorAutoDetect$ConnectionType");
+  LoadClass(jni, "org/webrtc/NetworkMonitorAutoDetect$IPAddress");
+  LoadClass(jni, "org/webrtc/NetworkMonitorAutoDetect$NetworkInformation");
+  LoadClass(jni, "org/webrtc/PeerConnectionFactory");
+  LoadClass(jni, "org/webrtc/PeerConnection$BundlePolicy");
+  LoadClass(jni, "org/webrtc/PeerConnection$ContinualGatheringPolicy");
+  LoadClass(jni, "org/webrtc/PeerConnection$RtcpMuxPolicy");
+  LoadClass(jni, "org/webrtc/PeerConnection$IceConnectionState");
+  LoadClass(jni, "org/webrtc/PeerConnection$IceGatheringState");
+  LoadClass(jni, "org/webrtc/PeerConnection$IceTransportsType");
+  LoadClass(jni, "org/webrtc/PeerConnection$TcpCandidatePolicy");
+  LoadClass(jni, "org/webrtc/PeerConnection$KeyType");
+  LoadClass(jni, "org/webrtc/PeerConnection$SignalingState");
+  LoadClass(jni, "org/webrtc/RtpReceiver");
+  LoadClass(jni, "org/webrtc/RtpSender");
+  LoadClass(jni, "org/webrtc/SessionDescription");
+  LoadClass(jni, "org/webrtc/SessionDescription$Type");
+  LoadClass(jni, "org/webrtc/StatsReport");
+  LoadClass(jni, "org/webrtc/StatsReport$Value");
+  LoadClass(jni, "org/webrtc/SurfaceTextureHelper");
+  LoadClass(jni, "org/webrtc/VideoCapturerAndroid");
+  LoadClass(jni, "org/webrtc/VideoCapturerAndroid$NativeObserver");
+  LoadClass(jni, "org/webrtc/VideoRenderer$I420Frame");
+  LoadClass(jni, "org/webrtc/VideoTrack");
+}
+
+ClassReferenceHolder::~ClassReferenceHolder() {
+  RTC_CHECK(classes_.empty()) << "Must call FreeReferences() before dtor!";
+}
+
+void ClassReferenceHolder::FreeReferences(JNIEnv* jni) {
+  for (std::map<std::string, jclass>::const_iterator it = classes_.begin();
+      it != classes_.end(); ++it) {
+    jni->DeleteGlobalRef(it->second);
+  }
+  classes_.clear();
+}
+
+jclass ClassReferenceHolder::GetClass(const std::string& name) {
+  std::map<std::string, jclass>::iterator it = classes_.find(name);
+  RTC_CHECK(it != classes_.end()) << "Unexpected GetClass() call for: " << name;
+  return it->second;
+}
+
+void ClassReferenceHolder::LoadClass(JNIEnv* jni, const std::string& name) {
+  jclass localRef = jni->FindClass(name.c_str());
+  CHECK_EXCEPTION(jni) << "error during FindClass: " << name;
+  RTC_CHECK(localRef) << name;
+  jclass globalRef = reinterpret_cast<jclass>(jni->NewGlobalRef(localRef));
+  CHECK_EXCEPTION(jni) << "error during NewGlobalRef: " << name;
+  RTC_CHECK(globalRef) << name;
+  bool inserted = classes_.insert(std::make_pair(name, globalRef)).second;
+  RTC_CHECK(inserted) << "Duplicate class name: " << name;
+}
+
+// Returns a global reference guaranteed to be valid for the lifetime of the
+// process.
+jclass FindClass(JNIEnv* jni, const char* name) {
+  return g_class_reference_holder->GetClass(name);
+}
+
+}  // namespace webrtc_jni
diff --git a/webrtc/api/java/jni/classreferenceholder.h b/webrtc/api/java/jni/classreferenceholder.h
new file mode 100644
index 0000000..5edf614
--- /dev/null
+++ b/webrtc/api/java/jni/classreferenceholder.h
@@ -0,0 +1,59 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// Android's FindClass() is trickier than usual because the app-specific
+// ClassLoader is not consulted when there is no app-specific frame on the
+// stack.  Consequently, we only look up all classes once in app/webrtc.
+// http://developer.android.com/training/articles/perf-jni.html#faq_FindClass
+
+#ifndef WEBRTC_API_JAVA_JNI_CLASSREFERENCEHOLDER_H_
+#define WEBRTC_API_JAVA_JNI_CLASSREFERENCEHOLDER_H_
+
+#include <jni.h>
+#include <map>
+#include <string>
+
+namespace webrtc_jni {
+
+// LoadGlobalClassReferenceHolder must be called in JNI_OnLoad.
+void LoadGlobalClassReferenceHolder();
+// FreeGlobalClassReferenceHolder must be called in JNI_UnLoad.
+void FreeGlobalClassReferenceHolder();
+
+// Returns a global reference guaranteed to be valid for the lifetime of the
+// process.
+jclass FindClass(JNIEnv* jni, const char* name);
+
+// Convenience macro defining JNI-accessible methods in the org.webrtc package.
+// Eliminates unnecessary boilerplate and line-wraps, reducing visual clutter.
+#define JOW(rettype, name) extern "C" rettype JNIEXPORT JNICALL \
+  Java_org_webrtc_##name
+
+}  // namespace webrtc_jni
+
+#endif  // WEBRTC_API_JAVA_JNI_CLASSREFERENCEHOLDER_H_
diff --git a/webrtc/api/java/jni/eglbase_jni.cc b/webrtc/api/java/jni/eglbase_jni.cc
new file mode 100644
index 0000000..26eeeb3
--- /dev/null
+++ b/webrtc/api/java/jni/eglbase_jni.cc
@@ -0,0 +1,90 @@
+/*
+ * libjingle
+ * Copyright 2016 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include "webrtc/api/java/jni/eglbase_jni.h"
+
+#include "webrtc/api/java/jni/androidmediacodeccommon.h"
+#include "webrtc/api/java/jni/classreferenceholder.h"
+#include "webrtc/api/java/jni/jni_helpers.h"
+
+namespace webrtc_jni {
+
+EglBase::EglBase() {
+}
+
+EglBase::~EglBase() {
+  if (egl_base_) {
+    JNIEnv* jni = AttachCurrentThreadIfNeeded();
+    jni->DeleteGlobalRef(egl_base_context_);
+    egl_base_context_ = nullptr;
+    jni->CallVoidMethod(egl_base_,
+                        GetMethodID(jni,
+                                    FindClass(jni, "org/webrtc/EglBase"),
+                                    "release", "()V"));
+    jni->DeleteGlobalRef(egl_base_);
+  }
+}
+
+bool EglBase::CreateEglBase(JNIEnv* jni, jobject egl_context) {
+  if (egl_base_) {
+    jni->DeleteGlobalRef(egl_base_context_);
+    egl_base_context_ = nullptr;
+    jni->CallVoidMethod(egl_base_,
+                        GetMethodID(jni,
+                                    FindClass(jni, "org/webrtc/EglBase"),
+                                    "release", "()V"));
+    jni->DeleteGlobalRef(egl_base_);
+    egl_base_ = nullptr;
+  }
+
+  if (IsNull(jni, egl_context))
+    return false;
+
+  jobject egl_base = jni->CallStaticObjectMethod(
+      FindClass(jni, "org/webrtc/EglBase"),
+      GetStaticMethodID(jni,
+                        FindClass(jni, "org/webrtc/EglBase"),
+                        "create",
+                        "(Lorg/webrtc/EglBase$Context;)Lorg/webrtc/EglBase;"),
+                        egl_context);
+  if (CheckException(jni))
+    return false;
+
+  egl_base_ = jni->NewGlobalRef(egl_base);
+  egl_base_context_ =  jni->NewGlobalRef(
+      jni->CallObjectMethod(
+          egl_base_,
+          GetMethodID(jni,
+                      FindClass(jni, "org/webrtc/EglBase"),
+                      "getEglBaseContext",
+                      "()Lorg/webrtc/EglBase$Context;")));
+  RTC_CHECK(egl_base_context_);
+  return true;
+}
+
+}  // namespace webrtc_jni
diff --git a/webrtc/api/java/jni/eglbase_jni.h b/webrtc/api/java/jni/eglbase_jni.h
new file mode 100644
index 0000000..de7e39e
--- /dev/null
+++ b/webrtc/api/java/jni/eglbase_jni.h
@@ -0,0 +1,60 @@
+/*
+ * libjingle
+ * Copyright 2016 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef WEBRTC_API_JAVA_JNI_EGLBASE_JNI_H_
+#define WEBRTC_API_JAVA_JNI_EGLBASE_JNI_H_
+
+#include <jni.h>
+
+#include "webrtc/base/constructormagic.h"
+
+namespace webrtc_jni {
+
+// Helper class used for creating a Java instance of org/webrtc/EglBase.
+class EglBase {
+ public:
+  EglBase();
+  ~EglBase();
+
+  // Creates an new java EglBase instance. |egl_base_context| must be a valid
+  // EglBase$Context.
+  // Returns false if |egl_base_context| is a null Java object or if an
+  // exception occur in Java.
+  bool CreateEglBase(JNIEnv* jni, jobject egl_base_context);
+  jobject egl_base_context() const { return egl_base_context_; }
+
+ private:
+  jobject egl_base_ = nullptr;  // instance of org/webrtc/EglBase
+  jobject egl_base_context_ = nullptr;  // instance of EglBase$Context
+
+  RTC_DISALLOW_COPY_AND_ASSIGN(EglBase);
+};
+
+}  // namespace webrtc_jni
+
+#endif  // WEBRTC_API_JAVA_JNI_EGLBASE_JNI_H_
diff --git a/webrtc/api/java/jni/jni_helpers.cc b/webrtc/api/java/jni/jni_helpers.cc
new file mode 100644
index 0000000..b07a9c5
--- /dev/null
+++ b/webrtc/api/java/jni/jni_helpers.cc
@@ -0,0 +1,296 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+#include "webrtc/api/java/jni/jni_helpers.h"
+
+#include "webrtc/api/java/jni/classreferenceholder.h"
+
+#include <asm/unistd.h>
+#include <sys/prctl.h>
+#include <sys/syscall.h>
+#include <unistd.h>
+
+namespace webrtc_jni {
+
+static JavaVM* g_jvm = nullptr;
+
+static pthread_once_t g_jni_ptr_once = PTHREAD_ONCE_INIT;
+
+// Key for per-thread JNIEnv* data.  Non-NULL in threads attached to |g_jvm| by
+// AttachCurrentThreadIfNeeded(), NULL in unattached threads and threads that
+// were attached by the JVM because of a Java->native call.
+static pthread_key_t g_jni_ptr;
+
+JavaVM *GetJVM() {
+  RTC_CHECK(g_jvm) << "JNI_OnLoad failed to run?";
+  return g_jvm;
+}
+
+// Return a |JNIEnv*| usable on this thread or NULL if this thread is detached.
+JNIEnv* GetEnv() {
+  void* env = NULL;
+  jint status = g_jvm->GetEnv(&env, JNI_VERSION_1_6);
+  RTC_CHECK(((env != NULL) && (status == JNI_OK)) ||
+            ((env == NULL) && (status == JNI_EDETACHED)))
+      << "Unexpected GetEnv return: " << status << ":" << env;
+  return reinterpret_cast<JNIEnv*>(env);
+}
+
+static void ThreadDestructor(void* prev_jni_ptr) {
+  // This function only runs on threads where |g_jni_ptr| is non-NULL, meaning
+  // we were responsible for originally attaching the thread, so are responsible
+  // for detaching it now.  However, because some JVM implementations (notably
+  // Oracle's http://goo.gl/eHApYT) also use the pthread_key_create mechanism,
+  // the JVMs accounting info for this thread may already be wiped out by the
+  // time this is called. Thus it may appear we are already detached even though
+  // it was our responsibility to detach!  Oh well.
+  if (!GetEnv())
+    return;
+
+  RTC_CHECK(GetEnv() == prev_jni_ptr)
+      << "Detaching from another thread: " << prev_jni_ptr << ":" << GetEnv();
+  jint status = g_jvm->DetachCurrentThread();
+  RTC_CHECK(status == JNI_OK) << "Failed to detach thread: " << status;
+  RTC_CHECK(!GetEnv()) << "Detaching was a successful no-op???";
+}
+
+static void CreateJNIPtrKey() {
+  RTC_CHECK(!pthread_key_create(&g_jni_ptr, &ThreadDestructor))
+      << "pthread_key_create";
+}
+
+jint InitGlobalJniVariables(JavaVM *jvm) {
+  RTC_CHECK(!g_jvm) << "InitGlobalJniVariables!";
+  g_jvm = jvm;
+  RTC_CHECK(g_jvm) << "InitGlobalJniVariables handed NULL?";
+
+  RTC_CHECK(!pthread_once(&g_jni_ptr_once, &CreateJNIPtrKey)) << "pthread_once";
+
+  JNIEnv* jni = nullptr;
+  if (jvm->GetEnv(reinterpret_cast<void**>(&jni), JNI_VERSION_1_6) != JNI_OK)
+    return -1;
+
+  return JNI_VERSION_1_6;
+}
+
+// Return thread ID as a string.
+static std::string GetThreadId() {
+  char buf[21];  // Big enough to hold a kuint64max plus terminating NULL.
+  RTC_CHECK_LT(snprintf(buf, sizeof(buf), "%ld",
+                        static_cast<long>(syscall(__NR_gettid))),
+               sizeof(buf))
+      << "Thread id is bigger than uint64??";
+  return std::string(buf);
+}
+
+// Return the current thread's name.
+static std::string GetThreadName() {
+  char name[17] = {0};
+  if (prctl(PR_GET_NAME, name) != 0)
+    return std::string("<noname>");
+  return std::string(name);
+}
+
+// Return a |JNIEnv*| usable on this thread.  Attaches to |g_jvm| if necessary.
+JNIEnv* AttachCurrentThreadIfNeeded() {
+  JNIEnv* jni = GetEnv();
+  if (jni)
+    return jni;
+  RTC_CHECK(!pthread_getspecific(g_jni_ptr))
+      << "TLS has a JNIEnv* but not attached?";
+
+  std::string name(GetThreadName() + " - " + GetThreadId());
+  JavaVMAttachArgs args;
+  args.version = JNI_VERSION_1_6;
+  args.name = &name[0];
+  args.group = NULL;
+  // Deal with difference in signatures between Oracle's jni.h and Android's.
+#ifdef _JAVASOFT_JNI_H_  // Oracle's jni.h violates the JNI spec!
+  void* env = NULL;
+#else
+  JNIEnv* env = NULL;
+#endif
+  RTC_CHECK(!g_jvm->AttachCurrentThread(&env, &args))
+      << "Failed to attach thread";
+  RTC_CHECK(env) << "AttachCurrentThread handed back NULL!";
+  jni = reinterpret_cast<JNIEnv*>(env);
+  RTC_CHECK(!pthread_setspecific(g_jni_ptr, jni)) << "pthread_setspecific";
+  return jni;
+}
+
+// Return a |jlong| that will correctly convert back to |ptr|.  This is needed
+// because the alternative (of silently passing a 32-bit pointer to a vararg
+// function expecting a 64-bit param) picks up garbage in the high 32 bits.
+jlong jlongFromPointer(void* ptr) {
+  static_assert(sizeof(intptr_t) <= sizeof(jlong),
+                "Time to rethink the use of jlongs");
+  // Going through intptr_t to be obvious about the definedness of the
+  // conversion from pointer to integral type.  intptr_t to jlong is a standard
+  // widening by the static_assert above.
+  jlong ret = reinterpret_cast<intptr_t>(ptr);
+  RTC_DCHECK(reinterpret_cast<void*>(ret) == ptr);
+  return ret;
+}
+
+// JNIEnv-helper methods that RTC_CHECK success: no Java exception thrown and
+// found object/class/method/field is non-null.
+jmethodID GetMethodID(
+    JNIEnv* jni, jclass c, const std::string& name, const char* signature) {
+  jmethodID m = jni->GetMethodID(c, name.c_str(), signature);
+  CHECK_EXCEPTION(jni) << "error during GetMethodID: " << name << ", "
+                       << signature;
+  RTC_CHECK(m) << name << ", " << signature;
+  return m;
+}
+
+jmethodID GetStaticMethodID(
+    JNIEnv* jni, jclass c, const char* name, const char* signature) {
+  jmethodID m = jni->GetStaticMethodID(c, name, signature);
+  CHECK_EXCEPTION(jni) << "error during GetStaticMethodID: " << name << ", "
+                       << signature;
+  RTC_CHECK(m) << name << ", " << signature;
+  return m;
+}
+
+jfieldID GetFieldID(
+    JNIEnv* jni, jclass c, const char* name, const char* signature) {
+  jfieldID f = jni->GetFieldID(c, name, signature);
+  CHECK_EXCEPTION(jni) << "error during GetFieldID";
+  RTC_CHECK(f) << name << ", " << signature;
+  return f;
+}
+
+jclass GetObjectClass(JNIEnv* jni, jobject object) {
+  jclass c = jni->GetObjectClass(object);
+  CHECK_EXCEPTION(jni) << "error during GetObjectClass";
+  RTC_CHECK(c) << "GetObjectClass returned NULL";
+  return c;
+}
+
+jobject GetObjectField(JNIEnv* jni, jobject object, jfieldID id) {
+  jobject o = jni->GetObjectField(object, id);
+  CHECK_EXCEPTION(jni) << "error during GetObjectField";
+  RTC_CHECK(o) << "GetObjectField returned NULL";
+  return o;
+}
+
+jstring GetStringField(JNIEnv* jni, jobject object, jfieldID id) {
+  return static_cast<jstring>(GetObjectField(jni, object, id));
+}
+
+jlong GetLongField(JNIEnv* jni, jobject object, jfieldID id) {
+  jlong l = jni->GetLongField(object, id);
+  CHECK_EXCEPTION(jni) << "error during GetLongField";
+  return l;
+}
+
+jint GetIntField(JNIEnv* jni, jobject object, jfieldID id) {
+  jint i = jni->GetIntField(object, id);
+  CHECK_EXCEPTION(jni) << "error during GetIntField";
+  return i;
+}
+
+bool GetBooleanField(JNIEnv* jni, jobject object, jfieldID id) {
+  jboolean b = jni->GetBooleanField(object, id);
+  CHECK_EXCEPTION(jni) << "error during GetBooleanField";
+  return b;
+}
+
+// Java references to "null" can only be distinguished as such in C++ by
+// creating a local reference, so this helper wraps that logic.
+bool IsNull(JNIEnv* jni, jobject obj) {
+  ScopedLocalRefFrame local_ref_frame(jni);
+  return jni->NewLocalRef(obj) == NULL;
+}
+
+// Given a UTF-8 encoded |native| string return a new (UTF-16) jstring.
+jstring JavaStringFromStdString(JNIEnv* jni, const std::string& native) {
+  jstring jstr = jni->NewStringUTF(native.c_str());
+  CHECK_EXCEPTION(jni) << "error during NewStringUTF";
+  return jstr;
+}
+
+// Given a (UTF-16) jstring return a new UTF-8 native string.
+std::string JavaToStdString(JNIEnv* jni, const jstring& j_string) {
+  const char* chars = jni->GetStringUTFChars(j_string, NULL);
+  CHECK_EXCEPTION(jni) << "Error during GetStringUTFChars";
+  std::string str(chars, jni->GetStringUTFLength(j_string));
+  CHECK_EXCEPTION(jni) << "Error during GetStringUTFLength";
+  jni->ReleaseStringUTFChars(j_string, chars);
+  CHECK_EXCEPTION(jni) << "Error during ReleaseStringUTFChars";
+  return str;
+}
+
+// Return the (singleton) Java Enum object corresponding to |index|;
+jobject JavaEnumFromIndex(JNIEnv* jni, jclass state_class,
+                          const std::string& state_class_name, int index) {
+  jmethodID state_values_id = GetStaticMethodID(
+      jni, state_class, "values", ("()[L" + state_class_name  + ";").c_str());
+  jobjectArray state_values = static_cast<jobjectArray>(
+      jni->CallStaticObjectMethod(state_class, state_values_id));
+  CHECK_EXCEPTION(jni) << "error during CallStaticObjectMethod";
+  jobject ret = jni->GetObjectArrayElement(state_values, index);
+  CHECK_EXCEPTION(jni) << "error during GetObjectArrayElement";
+  return ret;
+}
+
+std::string GetJavaEnumName(JNIEnv* jni,
+                            const std::string& className,
+                            jobject j_enum) {
+  jclass enumClass = FindClass(jni, className.c_str());
+  jmethodID nameMethod =
+      GetMethodID(jni, enumClass, "name", "()Ljava/lang/String;");
+  jstring name =
+      reinterpret_cast<jstring>(jni->CallObjectMethod(j_enum, nameMethod));
+  CHECK_EXCEPTION(jni) << "error during CallObjectMethod for " << className
+                       << ".name";
+  return JavaToStdString(jni, name);
+}
+
+jobject NewGlobalRef(JNIEnv* jni, jobject o) {
+  jobject ret = jni->NewGlobalRef(o);
+  CHECK_EXCEPTION(jni) << "error during NewGlobalRef";
+  RTC_CHECK(ret);
+  return ret;
+}
+
+void DeleteGlobalRef(JNIEnv* jni, jobject o) {
+  jni->DeleteGlobalRef(o);
+  CHECK_EXCEPTION(jni) << "error during DeleteGlobalRef";
+}
+
+// Scope Java local references to the lifetime of this object.  Use in all C++
+// callbacks (i.e. entry points that don't originate in a Java callstack
+// through a "native" method call).
+ScopedLocalRefFrame::ScopedLocalRefFrame(JNIEnv* jni) : jni_(jni) {
+  RTC_CHECK(!jni_->PushLocalFrame(0)) << "Failed to PushLocalFrame";
+}
+ScopedLocalRefFrame::~ScopedLocalRefFrame() {
+  jni_->PopLocalFrame(NULL);
+}
+
+}  // namespace webrtc_jni
diff --git a/webrtc/api/java/jni/jni_helpers.h b/webrtc/api/java/jni/jni_helpers.h
new file mode 100644
index 0000000..5498158
--- /dev/null
+++ b/webrtc/api/java/jni/jni_helpers.h
@@ -0,0 +1,146 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// This file contain convenience functions and classes for JNI.
+// Before using any of the methods, InitGlobalJniVariables must be called.
+
+#ifndef WEBRTC_API_JAVA_JNI_JNI_HELPERS_H_
+#define WEBRTC_API_JAVA_JNI_JNI_HELPERS_H_
+
+#include <jni.h>
+#include <string>
+
+#include "webrtc/base/checks.h"
+
+// Abort the process if |jni| has a Java exception pending.
+// This macros uses the comma operator to execute ExceptionDescribe
+// and ExceptionClear ignoring their return values and sending ""
+// to the error stream.
+#define CHECK_EXCEPTION(jni)        \
+  RTC_CHECK(!jni->ExceptionCheck()) \
+      << (jni->ExceptionDescribe(), jni->ExceptionClear(), "")
+
+// Helper that calls ptr->Release() and aborts the process with a useful
+// message if that didn't actually delete *ptr because of extra refcounts.
+#define CHECK_RELEASE(ptr) \
+  RTC_CHECK_EQ(0, (ptr)->Release()) << "Unexpected refcount."
+
+namespace webrtc_jni {
+
+jint InitGlobalJniVariables(JavaVM *jvm);
+
+// Return a |JNIEnv*| usable on this thread or NULL if this thread is detached.
+JNIEnv* GetEnv();
+
+JavaVM *GetJVM();
+
+// Return a |JNIEnv*| usable on this thread.  Attaches to |g_jvm| if necessary.
+JNIEnv* AttachCurrentThreadIfNeeded();
+
+// Return a |jlong| that will correctly convert back to |ptr|.  This is needed
+// because the alternative (of silently passing a 32-bit pointer to a vararg
+// function expecting a 64-bit param) picks up garbage in the high 32 bits.
+jlong jlongFromPointer(void* ptr);
+
+// JNIEnv-helper methods that RTC_CHECK success: no Java exception thrown and
+// found object/class/method/field is non-null.
+jmethodID GetMethodID(
+    JNIEnv* jni, jclass c, const std::string& name, const char* signature);
+
+jmethodID GetStaticMethodID(
+    JNIEnv* jni, jclass c, const char* name, const char* signature);
+
+jfieldID GetFieldID(JNIEnv* jni, jclass c, const char* name,
+                    const char* signature);
+
+jclass GetObjectClass(JNIEnv* jni, jobject object);
+
+jobject GetObjectField(JNIEnv* jni, jobject object, jfieldID id);
+
+jstring GetStringField(JNIEnv* jni, jobject object, jfieldID id);
+
+jlong GetLongField(JNIEnv* jni, jobject object, jfieldID id);
+
+jint GetIntField(JNIEnv* jni, jobject object, jfieldID id);
+
+bool GetBooleanField(JNIEnv* jni, jobject object, jfieldID id);
+
+// Java references to "null" can only be distinguished as such in C++ by
+// creating a local reference, so this helper wraps that logic.
+bool IsNull(JNIEnv* jni, jobject obj);
+
+// Given a UTF-8 encoded |native| string return a new (UTF-16) jstring.
+jstring JavaStringFromStdString(JNIEnv* jni, const std::string& native);
+
+// Given a (UTF-16) jstring return a new UTF-8 native string.
+std::string JavaToStdString(JNIEnv* jni, const jstring& j_string);
+
+// Return the (singleton) Java Enum object corresponding to |index|;
+jobject JavaEnumFromIndex(JNIEnv* jni, jclass state_class,
+                          const std::string& state_class_name, int index);
+
+// Returns the name of a Java enum.
+std::string GetJavaEnumName(JNIEnv* jni,
+                            const std::string& className,
+                            jobject j_enum);
+
+jobject NewGlobalRef(JNIEnv* jni, jobject o);
+
+void DeleteGlobalRef(JNIEnv* jni, jobject o);
+
+// Scope Java local references to the lifetime of this object.  Use in all C++
+// callbacks (i.e. entry points that don't originate in a Java callstack
+// through a "native" method call).
+class ScopedLocalRefFrame {
+ public:
+  explicit ScopedLocalRefFrame(JNIEnv* jni);
+  ~ScopedLocalRefFrame();
+
+ private:
+  JNIEnv* jni_;
+};
+
+// Scoped holder for global Java refs.
+template<class T>  // T is jclass, jobject, jintArray, etc.
+class ScopedGlobalRef {
+ public:
+  ScopedGlobalRef(JNIEnv* jni, T obj)
+      : obj_(static_cast<T>(jni->NewGlobalRef(obj))) {}
+  ~ScopedGlobalRef() {
+    DeleteGlobalRef(AttachCurrentThreadIfNeeded(), obj_);
+  }
+  T operator*() const {
+    return obj_;
+  }
+ private:
+  T obj_;
+};
+
+}  // namespace webrtc_jni
+
+#endif  // WEBRTC_API_JAVA_JNI_JNI_HELPERS_H_
diff --git a/webrtc/api/java/jni/jni_onload.cc b/webrtc/api/java/jni/jni_onload.cc
new file mode 100644
index 0000000..af2804d
--- /dev/null
+++ b/webrtc/api/java/jni/jni_onload.cc
@@ -0,0 +1,55 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <jni.h>
+#undef JNIEXPORT
+#define JNIEXPORT __attribute__((visibility("default")))
+
+#include "webrtc/api/java/jni/classreferenceholder.h"
+#include "webrtc/api/java/jni/jni_helpers.h"
+#include "webrtc/base/ssladapter.h"
+
+namespace webrtc_jni {
+
+extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM *jvm, void *reserved) {
+  jint ret = InitGlobalJniVariables(jvm);
+  RTC_DCHECK_GE(ret, 0);
+  if (ret < 0)
+    return -1;
+
+  RTC_CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()";
+  LoadGlobalClassReferenceHolder();
+
+  return ret;
+}
+
+extern "C" void JNIEXPORT JNICALL JNI_OnUnLoad(JavaVM *jvm, void *reserved) {
+  FreeGlobalClassReferenceHolder();
+  RTC_CHECK(rtc::CleanupSSL()) << "Failed to CleanupSSL()";
+}
+
+}  // namespace webrtc_jni
diff --git a/webrtc/api/java/jni/native_handle_impl.cc b/webrtc/api/java/jni/native_handle_impl.cc
new file mode 100644
index 0000000..8ec549c
--- /dev/null
+++ b/webrtc/api/java/jni/native_handle_impl.cc
@@ -0,0 +1,186 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/java/jni/native_handle_impl.h"
+
+#include "webrtc/api/java/jni/jni_helpers.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/keep_ref_until_done.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+
+using webrtc::NativeHandleBuffer;
+
+namespace {
+
+void RotateMatrix(float a[16], webrtc::VideoRotation rotation) {
+  // Texture coordinates are in the range 0 to 1. The transformation of the last
+  // row in each rotation matrix is needed for proper translation, e.g, to
+  // mirror x, we don't replace x by -x, but by 1-x.
+  switch (rotation) {
+    case webrtc::kVideoRotation_0:
+      break;
+    case webrtc::kVideoRotation_90: {
+      const float ROTATE_90[16] =
+          { a[4], a[5], a[6], a[7],
+            -a[0], -a[1], -a[2], -a[3],
+            a[8], a[9], a[10], a[11],
+            a[0] + a[12], a[1] + a[13], a[2] + a[14], a[3] + a[15]};
+      memcpy(a, ROTATE_90, sizeof(ROTATE_90));
+    } break;
+    case webrtc::kVideoRotation_180: {
+      const float ROTATE_180[16] =
+          { -a[0], -a[1], -a[2], -a[3],
+            -a[4], -a[5], -a[6], -a[7],
+            a[8], a[9], a[10], a[11],
+            a[0] + a[4] + a[12], a[1] +a[5] + a[13], a[2] + a[6] + a[14],
+            a[3] + a[11]+ a[15]};
+        memcpy(a, ROTATE_180, sizeof(ROTATE_180));
+      }
+      break;
+    case webrtc::kVideoRotation_270: {
+      const float ROTATE_270[16] =
+          { -a[4], -a[5], -a[6], -a[7],
+            a[0], a[1], a[2], a[3],
+            a[8], a[9], a[10], a[11],
+            a[4] + a[12], a[5] + a[13], a[6] + a[14], a[7] + a[15]};
+        memcpy(a, ROTATE_270, sizeof(ROTATE_270));
+    } break;
+  }
+}
+
+}  // anonymouse namespace
+
+namespace webrtc_jni {
+
+// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD.
+static const int kBufferAlignment = 64;
+
+NativeHandleImpl::NativeHandleImpl(JNIEnv* jni,
+                                   jint j_oes_texture_id,
+                                   jfloatArray j_transform_matrix)
+  : oes_texture_id(j_oes_texture_id) {
+  RTC_CHECK_EQ(16, jni->GetArrayLength(j_transform_matrix));
+  jfloat* transform_matrix_ptr =
+      jni->GetFloatArrayElements(j_transform_matrix, nullptr);
+  for (int i = 0; i < 16; ++i) {
+    sampling_matrix[i] = transform_matrix_ptr[i];
+  }
+  jni->ReleaseFloatArrayElements(j_transform_matrix, transform_matrix_ptr, 0);
+}
+
+AndroidTextureBuffer::AndroidTextureBuffer(
+    int width,
+    int height,
+    const NativeHandleImpl& native_handle,
+    jobject surface_texture_helper,
+    const rtc::Callback0<void>& no_longer_used)
+    : webrtc::NativeHandleBuffer(&native_handle_, width, height),
+      native_handle_(native_handle),
+      surface_texture_helper_(surface_texture_helper),
+      no_longer_used_cb_(no_longer_used) {}
+
+AndroidTextureBuffer::~AndroidTextureBuffer() {
+  no_longer_used_cb_();
+}
+
+rtc::scoped_refptr<webrtc::VideoFrameBuffer>
+AndroidTextureBuffer::NativeToI420Buffer() {
+  int uv_width = (width()+7) / 8;
+  int stride = 8 * uv_width;
+  int uv_height = (height()+1)/2;
+  size_t size = stride * (height() + uv_height);
+  // The data is owned by the frame, and the normal case is that the
+  // data is deleted by the frame's destructor callback.
+  //
+  // TODO(nisse): Use an I420BufferPool. We then need to extend that
+  // class, and I420Buffer, to support our memory layout.
+  rtc::scoped_ptr<uint8_t, webrtc::AlignedFreeDeleter> yuv_data(
+      static_cast<uint8_t*>(webrtc::AlignedMalloc(size, kBufferAlignment)));
+  // See SurfaceTextureHelper.java for the required layout.
+  uint8_t* y_data = yuv_data.get();
+  uint8_t* u_data = y_data + height() * stride;
+  uint8_t* v_data = u_data + stride/2;
+
+  rtc::scoped_refptr<webrtc::VideoFrameBuffer> copy =
+    new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
+        width(), height(),
+        y_data, stride,
+        u_data, stride,
+        v_data, stride,
+        rtc::Bind(&webrtc::AlignedFree, yuv_data.release()));
+
+  JNIEnv* jni = AttachCurrentThreadIfNeeded();
+  ScopedLocalRefFrame local_ref_frame(jni);
+
+  jmethodID transform_mid = GetMethodID(
+      jni,
+      GetObjectClass(jni, surface_texture_helper_),
+      "textureToYUV",
+      "(Ljava/nio/ByteBuffer;IIII[F)V");
+
+  jobject byte_buffer = jni->NewDirectByteBuffer(y_data, size);
+
+  // TODO(nisse): Keep java transform matrix around.
+  jfloatArray sampling_matrix = jni->NewFloatArray(16);
+  jni->SetFloatArrayRegion(sampling_matrix, 0, 16,
+                           native_handle_.sampling_matrix);
+
+  jni->CallVoidMethod(surface_texture_helper_,
+                      transform_mid,
+                      byte_buffer, width(), height(), stride,
+                      native_handle_.oes_texture_id, sampling_matrix);
+  CHECK_EXCEPTION(jni) << "textureToYUV throwed an exception";
+
+  return copy;
+}
+
+rtc::scoped_refptr<AndroidTextureBuffer>
+AndroidTextureBuffer::ScaleAndRotate(int dst_widht,
+                                     int dst_height,
+                                     webrtc::VideoRotation rotation) {
+  if (width() == dst_widht && height() == dst_height &&
+      rotation == webrtc::kVideoRotation_0) {
+    return this;
+  }
+  int rotated_width = (rotation % 180 == 0) ? dst_widht : dst_height;
+  int rotated_height = (rotation % 180 == 0) ? dst_height : dst_widht;
+
+  // Here we use Bind magic to add a reference count to |this| until the newly
+  // created AndroidTextureBuffer is destructed
+  rtc::scoped_refptr<AndroidTextureBuffer> buffer(
+      new rtc::RefCountedObject<AndroidTextureBuffer>(
+          rotated_width, rotated_height, native_handle_,
+          surface_texture_helper_, rtc::KeepRefUntilDone(this)));
+
+  RotateMatrix(buffer->native_handle_.sampling_matrix, rotation);
+  return buffer;
+}
+
+}  // namespace webrtc_jni
diff --git a/webrtc/api/java/jni/native_handle_impl.h b/webrtc/api/java/jni/native_handle_impl.h
new file mode 100644
index 0000000..4203bdf
--- /dev/null
+++ b/webrtc/api/java/jni/native_handle_impl.h
@@ -0,0 +1,77 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef WEBRTC_API_JAVA_JNI_NATIVE_HANDLE_IMPL_H_
+#define WEBRTC_API_JAVA_JNI_NATIVE_HANDLE_IMPL_H_
+
+#include <jni.h>
+
+#include "webrtc/common_video/include/video_frame_buffer.h"
+#include "webrtc/common_video/rotation.h"
+
+namespace webrtc_jni {
+
+// Wrapper for texture object.
+struct NativeHandleImpl {
+  NativeHandleImpl(JNIEnv* jni,
+                   jint j_oes_texture_id,
+                   jfloatArray j_transform_matrix);
+
+  const int oes_texture_id;
+  float sampling_matrix[16];
+};
+
+class AndroidTextureBuffer : public webrtc::NativeHandleBuffer {
+ public:
+  AndroidTextureBuffer(int width,
+                       int height,
+                       const NativeHandleImpl& native_handle,
+                       jobject surface_texture_helper,
+                       const rtc::Callback0<void>& no_longer_used);
+  ~AndroidTextureBuffer();
+  rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override;
+
+  rtc::scoped_refptr<AndroidTextureBuffer> ScaleAndRotate(
+      int dst_widht,
+      int dst_height,
+      webrtc::VideoRotation rotation);
+
+ private:
+  NativeHandleImpl native_handle_;
+  // Raw object pointer, relying on the caller, i.e.,
+  // AndroidVideoCapturerJni or the C++ SurfaceTextureHelper, to keep
+  // a global reference. TODO(nisse): Make this a reference to the C++
+  // SurfaceTextureHelper instead, but that requires some refactoring
+  // of AndroidVideoCapturerJni.
+  jobject surface_texture_helper_;
+  rtc::Callback0<void> no_longer_used_cb_;
+};
+
+}  // namespace webrtc_jni
+
+#endif  // WEBRTC_API_JAVA_JNI_NATIVE_HANDLE_IMPL_H_
diff --git a/webrtc/api/java/jni/peerconnection_jni.cc b/webrtc/api/java/jni/peerconnection_jni.cc
new file mode 100644
index 0000000..1160b2b
--- /dev/null
+++ b/webrtc/api/java/jni/peerconnection_jni.cc
@@ -0,0 +1,2058 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// Hints for future visitors:
+// This entire file is an implementation detail of the org.webrtc Java package,
+// the most interesting bits of which are org.webrtc.PeerConnection{,Factory}.
+// The layout of this file is roughly:
+// - various helper C++ functions & classes that wrap Java counterparts and
+//   expose a C++ interface that can be passed to the C++ PeerConnection APIs
+// - implementations of methods declared "static" in the Java package (named
+//   things like Java_org_webrtc_OMG_Can_This_Name_Be_Any_Longer, prescribed by
+//   the JNI spec).
+//
+// Lifecycle notes: objects are owned where they will be called; in other words
+// FooObservers are owned by C++-land, and user-callable objects (e.g.
+// PeerConnection and VideoTrack) are owned by Java-land.
+// When this file allocates C++ RefCountInterfaces it AddRef()s an artificial
+// ref simulating the jlong held in Java-land, and then Release()s the ref in
+// the respective free call.  Sometimes this AddRef is implicit in the
+// construction of a scoped_refptr<> which is then .release()d.
+// Any persistent (non-local) references from C++ to Java must be global or weak
+// (in which case they must be checked before use)!
+//
+// Exception notes: pretty much all JNI calls can throw Java exceptions, so each
+// call through a JNIEnv* pointer needs to be followed by an ExceptionCheck()
+// call.  In this file this is done in CHECK_EXCEPTION, making for much easier
+// debugging in case of failure (the alternative is to wait for control to
+// return to the Java frame that called code in this file, at which point it's
+// impossible to tell which JNI call broke).
+
+#include <jni.h>
+#undef JNIEXPORT
+#define JNIEXPORT __attribute__((visibility("default")))
+
+#include <limits>
+#include <utility>
+
+#include "webrtc/api/androidvideocapturer.h"
+#include "webrtc/api/dtlsidentitystore.h"
+#include "webrtc/api/java/jni/androidmediadecoder_jni.h"
+#include "webrtc/api/java/jni/androidmediaencoder_jni.h"
+#include "webrtc/api/java/jni/androidnetworkmonitor_jni.h"
+#include "webrtc/api/java/jni/androidvideocapturer_jni.h"
+#include "webrtc/api/java/jni/classreferenceholder.h"
+#include "webrtc/api/java/jni/jni_helpers.h"
+#include "webrtc/api/java/jni/native_handle_impl.h"
+#include "webrtc/api/mediaconstraintsinterface.h"
+#include "webrtc/api/peerconnectioninterface.h"
+#include "webrtc/api/rtpreceiverinterface.h"
+#include "webrtc/api/rtpsenderinterface.h"
+#include "webrtc/api/videosourceinterface.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/event_tracer.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/logsinks.h"
+#include "webrtc/base/messagequeue.h"
+#include "webrtc/base/networkmonitor.h"
+#include "webrtc/base/ssladapter.h"
+#include "webrtc/base/stringutils.h"
+#include "webrtc/media/base/videocapturer.h"
+#include "webrtc/media/base/videorenderer.h"
+#include "webrtc/media/devices/videorendererfactory.h"
+#include "webrtc/media/webrtc/webrtcvideodecoderfactory.h"
+#include "webrtc/media/webrtc/webrtcvideoencoderfactory.h"
+#include "webrtc/modules/video_render/video_render_internal.h"
+#include "webrtc/system_wrappers/include/field_trial_default.h"
+#include "webrtc/system_wrappers/include/logcat_trace_context.h"
+#include "webrtc/system_wrappers/include/trace.h"
+#include "webrtc/voice_engine/include/voe_base.h"
+
+using cricket::WebRtcVideoDecoderFactory;
+using cricket::WebRtcVideoEncoderFactory;
+using rtc::Bind;
+using rtc::Thread;
+using rtc::ThreadManager;
+using rtc::scoped_ptr;
+using webrtc::AudioSourceInterface;
+using webrtc::AudioTrackInterface;
+using webrtc::AudioTrackVector;
+using webrtc::CreateSessionDescriptionObserver;
+using webrtc::DataBuffer;
+using webrtc::DataChannelInit;
+using webrtc::DataChannelInterface;
+using webrtc::DataChannelObserver;
+using webrtc::IceCandidateInterface;
+using webrtc::LogcatTraceContext;
+using webrtc::MediaConstraintsInterface;
+using webrtc::MediaSourceInterface;
+using webrtc::MediaStreamInterface;
+using webrtc::MediaStreamTrackInterface;
+using webrtc::PeerConnectionFactoryInterface;
+using webrtc::PeerConnectionInterface;
+using webrtc::PeerConnectionObserver;
+using webrtc::RtpReceiverInterface;
+using webrtc::RtpSenderInterface;
+using webrtc::SessionDescriptionInterface;
+using webrtc::SetSessionDescriptionObserver;
+using webrtc::StatsObserver;
+using webrtc::StatsReport;
+using webrtc::StatsReports;
+using webrtc::VideoRendererInterface;
+using webrtc::VideoSourceInterface;
+using webrtc::VideoTrackInterface;
+using webrtc::VideoTrackVector;
+using webrtc::kVideoCodecVP8;
+
+namespace webrtc_jni {
+
+// Field trials initialization string
+static char *field_trials_init_string = NULL;
+
+// Set in PeerConnectionFactory_initializeAndroidGlobals().
+static bool factory_static_initialized = false;
+static bool video_hw_acceleration_enabled = true;
+
+// Return the (singleton) Java Enum object corresponding to |index|;
+// |state_class_fragment| is something like "MediaSource$State".
+static jobject JavaEnumFromIndex(
+    JNIEnv* jni, const std::string& state_class_fragment, int index) {
+  const std::string state_class = "org/webrtc/" + state_class_fragment;
+  return JavaEnumFromIndex(jni, FindClass(jni, state_class.c_str()),
+                           state_class, index);
+}
+
+static DataChannelInit JavaDataChannelInitToNative(
+    JNIEnv* jni, jobject j_init) {
+  DataChannelInit init;
+
+  jclass j_init_class = FindClass(jni, "org/webrtc/DataChannel$Init");
+  jfieldID ordered_id = GetFieldID(jni, j_init_class, "ordered", "Z");
+  jfieldID max_retransmit_time_id =
+      GetFieldID(jni, j_init_class, "maxRetransmitTimeMs", "I");
+  jfieldID max_retransmits_id =
+      GetFieldID(jni, j_init_class, "maxRetransmits", "I");
+  jfieldID protocol_id =
+      GetFieldID(jni, j_init_class, "protocol", "Ljava/lang/String;");
+  jfieldID negotiated_id = GetFieldID(jni, j_init_class, "negotiated", "Z");
+  jfieldID id_id = GetFieldID(jni, j_init_class, "id", "I");
+
+  init.ordered = GetBooleanField(jni, j_init, ordered_id);
+  init.maxRetransmitTime = GetIntField(jni, j_init, max_retransmit_time_id);
+  init.maxRetransmits = GetIntField(jni, j_init, max_retransmits_id);
+  init.protocol = JavaToStdString(
+      jni, GetStringField(jni, j_init, protocol_id));
+  init.negotiated = GetBooleanField(jni, j_init, negotiated_id);
+  init.id = GetIntField(jni, j_init, id_id);
+
+  return init;
+}
+
+class ConstraintsWrapper;
+
+// Adapter between the C++ PeerConnectionObserver interface and the Java
+// PeerConnection.Observer interface.  Wraps an instance of the Java interface
+// and dispatches C++ callbacks to Java.
+class PCOJava : public PeerConnectionObserver {
+ public:
+  PCOJava(JNIEnv* jni, jobject j_observer)
+      : j_observer_global_(jni, j_observer),
+        j_observer_class_(jni, GetObjectClass(jni, *j_observer_global_)),
+        j_media_stream_class_(jni, FindClass(jni, "org/webrtc/MediaStream")),
+        j_media_stream_ctor_(GetMethodID(
+            jni, *j_media_stream_class_, "<init>", "(J)V")),
+        j_audio_track_class_(jni, FindClass(jni, "org/webrtc/AudioTrack")),
+        j_audio_track_ctor_(GetMethodID(
+            jni, *j_audio_track_class_, "<init>", "(J)V")),
+        j_video_track_class_(jni, FindClass(jni, "org/webrtc/VideoTrack")),
+        j_video_track_ctor_(GetMethodID(
+            jni, *j_video_track_class_, "<init>", "(J)V")),
+        j_data_channel_class_(jni, FindClass(jni, "org/webrtc/DataChannel")),
+        j_data_channel_ctor_(GetMethodID(
+            jni, *j_data_channel_class_, "<init>", "(J)V")) {
+  }
+
+  virtual ~PCOJava() {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    while (!remote_streams_.empty())
+      DisposeRemoteStream(remote_streams_.begin());
+  }
+
+  void OnIceCandidate(const IceCandidateInterface* candidate) override {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    std::string sdp;
+    RTC_CHECK(candidate->ToString(&sdp)) << "got so far: " << sdp;
+    jclass candidate_class = FindClass(jni(), "org/webrtc/IceCandidate");
+    jmethodID ctor = GetMethodID(jni(), candidate_class,
+        "<init>", "(Ljava/lang/String;ILjava/lang/String;)V");
+    jstring j_mid = JavaStringFromStdString(jni(), candidate->sdp_mid());
+    jstring j_sdp = JavaStringFromStdString(jni(), sdp);
+    jobject j_candidate = jni()->NewObject(
+        candidate_class, ctor, j_mid, candidate->sdp_mline_index(), j_sdp);
+    CHECK_EXCEPTION(jni()) << "error during NewObject";
+    jmethodID m = GetMethodID(jni(), *j_observer_class_,
+                              "onIceCandidate", "(Lorg/webrtc/IceCandidate;)V");
+    jni()->CallVoidMethod(*j_observer_global_, m, j_candidate);
+    CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+  }
+
+  void OnSignalingChange(
+      PeerConnectionInterface::SignalingState new_state) override {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    jmethodID m = GetMethodID(
+        jni(), *j_observer_class_, "onSignalingChange",
+        "(Lorg/webrtc/PeerConnection$SignalingState;)V");
+    jobject new_state_enum =
+        JavaEnumFromIndex(jni(), "PeerConnection$SignalingState", new_state);
+    jni()->CallVoidMethod(*j_observer_global_, m, new_state_enum);
+    CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+  }
+
+  void OnIceConnectionChange(
+      PeerConnectionInterface::IceConnectionState new_state) override {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    jmethodID m = GetMethodID(
+        jni(), *j_observer_class_, "onIceConnectionChange",
+        "(Lorg/webrtc/PeerConnection$IceConnectionState;)V");
+    jobject new_state_enum = JavaEnumFromIndex(
+        jni(), "PeerConnection$IceConnectionState", new_state);
+    jni()->CallVoidMethod(*j_observer_global_, m, new_state_enum);
+    CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+  }
+
+  void OnIceConnectionReceivingChange(bool receiving) override {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    jmethodID m = GetMethodID(
+        jni(), *j_observer_class_, "onIceConnectionReceivingChange", "(Z)V");
+    jni()->CallVoidMethod(*j_observer_global_, m, receiving);
+    CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+  }
+
+  void OnIceGatheringChange(
+      PeerConnectionInterface::IceGatheringState new_state) override {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    jmethodID m = GetMethodID(
+        jni(), *j_observer_class_, "onIceGatheringChange",
+        "(Lorg/webrtc/PeerConnection$IceGatheringState;)V");
+    jobject new_state_enum = JavaEnumFromIndex(
+        jni(), "PeerConnection$IceGatheringState", new_state);
+    jni()->CallVoidMethod(*j_observer_global_, m, new_state_enum);
+    CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+  }
+
+  void OnAddStream(MediaStreamInterface* stream) override {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    // Java MediaStream holds one reference. Corresponding Release() is in
+    // MediaStream_free, triggered by MediaStream.dispose().
+    stream->AddRef();
+    jobject j_stream =
+        jni()->NewObject(*j_media_stream_class_, j_media_stream_ctor_,
+                         reinterpret_cast<jlong>(stream));
+    CHECK_EXCEPTION(jni()) << "error during NewObject";
+
+    for (const auto& track : stream->GetAudioTracks()) {
+      jstring id = JavaStringFromStdString(jni(), track->id());
+      // Java AudioTrack holds one reference. Corresponding Release() is in
+      // MediaStreamTrack_free, triggered by AudioTrack.dispose().
+      track->AddRef();
+      jobject j_track =
+          jni()->NewObject(*j_audio_track_class_, j_audio_track_ctor_,
+                           reinterpret_cast<jlong>(track.get()), id);
+      CHECK_EXCEPTION(jni()) << "error during NewObject";
+      jfieldID audio_tracks_id = GetFieldID(jni(),
+                                            *j_media_stream_class_,
+                                            "audioTracks",
+                                            "Ljava/util/LinkedList;");
+      jobject audio_tracks = GetObjectField(jni(), j_stream, audio_tracks_id);
+      jmethodID add = GetMethodID(jni(),
+                                  GetObjectClass(jni(), audio_tracks),
+                                  "add",
+                                  "(Ljava/lang/Object;)Z");
+      jboolean added = jni()->CallBooleanMethod(audio_tracks, add, j_track);
+      CHECK_EXCEPTION(jni()) << "error during CallBooleanMethod";
+      RTC_CHECK(added);
+    }
+
+    for (const auto& track : stream->GetVideoTracks()) {
+      jstring id = JavaStringFromStdString(jni(), track->id());
+      // Java VideoTrack holds one reference. Corresponding Release() is in
+      // MediaStreamTrack_free, triggered by VideoTrack.dispose().
+      track->AddRef();
+      jobject j_track =
+          jni()->NewObject(*j_video_track_class_, j_video_track_ctor_,
+                           reinterpret_cast<jlong>(track.get()), id);
+      CHECK_EXCEPTION(jni()) << "error during NewObject";
+      jfieldID video_tracks_id = GetFieldID(jni(),
+                                            *j_media_stream_class_,
+                                            "videoTracks",
+                                            "Ljava/util/LinkedList;");
+      jobject video_tracks = GetObjectField(jni(), j_stream, video_tracks_id);
+      jmethodID add = GetMethodID(jni(),
+                                  GetObjectClass(jni(), video_tracks),
+                                  "add",
+                                  "(Ljava/lang/Object;)Z");
+      jboolean added = jni()->CallBooleanMethod(video_tracks, add, j_track);
+      CHECK_EXCEPTION(jni()) << "error during CallBooleanMethod";
+      RTC_CHECK(added);
+    }
+    remote_streams_[stream] = NewGlobalRef(jni(), j_stream);
+
+    jmethodID m = GetMethodID(jni(), *j_observer_class_, "onAddStream",
+                              "(Lorg/webrtc/MediaStream;)V");
+    jni()->CallVoidMethod(*j_observer_global_, m, j_stream);
+    CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+  }
+
+  void OnRemoveStream(MediaStreamInterface* stream) override {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    NativeToJavaStreamsMap::iterator it = remote_streams_.find(stream);
+    RTC_CHECK(it != remote_streams_.end()) << "unexpected stream: " << std::hex
+                                           << stream;
+    jobject j_stream = it->second;
+    jmethodID m = GetMethodID(jni(), *j_observer_class_, "onRemoveStream",
+                              "(Lorg/webrtc/MediaStream;)V");
+    jni()->CallVoidMethod(*j_observer_global_, m, j_stream);
+    CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+    DisposeRemoteStream(it);
+  }
+
+  void OnDataChannel(DataChannelInterface* channel) override {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    jobject j_channel = jni()->NewObject(
+        *j_data_channel_class_, j_data_channel_ctor_, (jlong)channel);
+    CHECK_EXCEPTION(jni()) << "error during NewObject";
+
+    jmethodID m = GetMethodID(jni(), *j_observer_class_, "onDataChannel",
+                              "(Lorg/webrtc/DataChannel;)V");
+    jni()->CallVoidMethod(*j_observer_global_, m, j_channel);
+
+    // Channel is now owned by Java object, and will be freed from
+    // DataChannel.dispose().  Important that this be done _after_ the
+    // CallVoidMethod above as Java code might call back into native code and be
+    // surprised to see a refcount of 2.
+    int bumped_count = channel->AddRef();
+    RTC_CHECK(bumped_count == 2) << "Unexpected refcount OnDataChannel";
+
+    CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+  }
+
+  void OnRenegotiationNeeded() override {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    jmethodID m =
+        GetMethodID(jni(), *j_observer_class_, "onRenegotiationNeeded", "()V");
+    jni()->CallVoidMethod(*j_observer_global_, m);
+    CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+  }
+
+  void SetConstraints(ConstraintsWrapper* constraints) {
+    RTC_CHECK(!constraints_.get()) << "constraints already set!";
+    constraints_.reset(constraints);
+  }
+
+  const ConstraintsWrapper* constraints() { return constraints_.get(); }
+
+ private:
+  typedef std::map<MediaStreamInterface*, jobject> NativeToJavaStreamsMap;
+
+  void DisposeRemoteStream(const NativeToJavaStreamsMap::iterator& it) {
+    jobject j_stream = it->second;
+    remote_streams_.erase(it);
+    jni()->CallVoidMethod(
+        j_stream, GetMethodID(jni(), *j_media_stream_class_, "dispose", "()V"));
+    CHECK_EXCEPTION(jni()) << "error during MediaStream.dispose()";
+    DeleteGlobalRef(jni(), j_stream);
+  }
+
+  JNIEnv* jni() {
+    return AttachCurrentThreadIfNeeded();
+  }
+
+  const ScopedGlobalRef<jobject> j_observer_global_;
+  const ScopedGlobalRef<jclass> j_observer_class_;
+  const ScopedGlobalRef<jclass> j_media_stream_class_;
+  const jmethodID j_media_stream_ctor_;
+  const ScopedGlobalRef<jclass> j_audio_track_class_;
+  const jmethodID j_audio_track_ctor_;
+  const ScopedGlobalRef<jclass> j_video_track_class_;
+  const jmethodID j_video_track_ctor_;
+  const ScopedGlobalRef<jclass> j_data_channel_class_;
+  const jmethodID j_data_channel_ctor_;
+  // C++ -> Java remote streams. The stored jobects are global refs and must be
+  // manually deleted upon removal. Use DisposeRemoteStream().
+  NativeToJavaStreamsMap remote_streams_;
+  scoped_ptr<ConstraintsWrapper> constraints_;
+};
+
+// Wrapper for a Java MediaConstraints object.  Copies all needed data so when
+// the constructor returns the Java object is no longer needed.
+class ConstraintsWrapper : public MediaConstraintsInterface {
+ public:
+  ConstraintsWrapper(JNIEnv* jni, jobject j_constraints) {
+    PopulateConstraintsFromJavaPairList(
+        jni, j_constraints, "mandatory", &mandatory_);
+    PopulateConstraintsFromJavaPairList(
+        jni, j_constraints, "optional", &optional_);
+  }
+
+  virtual ~ConstraintsWrapper() {}
+
+  // MediaConstraintsInterface.
+  const Constraints& GetMandatory() const override { return mandatory_; }
+
+  const Constraints& GetOptional() const override { return optional_; }
+
+ private:
+  // Helper for translating a List<Pair<String, String>> to a Constraints.
+  static void PopulateConstraintsFromJavaPairList(
+      JNIEnv* jni, jobject j_constraints,
+      const char* field_name, Constraints* field) {
+    jfieldID j_id = GetFieldID(jni,
+        GetObjectClass(jni, j_constraints), field_name, "Ljava/util/List;");
+    jobject j_list = GetObjectField(jni, j_constraints, j_id);
+    jmethodID j_iterator_id = GetMethodID(jni,
+        GetObjectClass(jni, j_list), "iterator", "()Ljava/util/Iterator;");
+    jobject j_iterator = jni->CallObjectMethod(j_list, j_iterator_id);
+    CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+    jmethodID j_has_next = GetMethodID(jni,
+        GetObjectClass(jni, j_iterator), "hasNext", "()Z");
+    jmethodID j_next = GetMethodID(jni,
+        GetObjectClass(jni, j_iterator), "next", "()Ljava/lang/Object;");
+    while (jni->CallBooleanMethod(j_iterator, j_has_next)) {
+      CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
+      jobject entry = jni->CallObjectMethod(j_iterator, j_next);
+      CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+      jmethodID get_key = GetMethodID(jni,
+          GetObjectClass(jni, entry), "getKey", "()Ljava/lang/String;");
+      jstring j_key = reinterpret_cast<jstring>(
+          jni->CallObjectMethod(entry, get_key));
+      CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+      jmethodID get_value = GetMethodID(jni,
+          GetObjectClass(jni, entry), "getValue", "()Ljava/lang/String;");
+      jstring j_value = reinterpret_cast<jstring>(
+          jni->CallObjectMethod(entry, get_value));
+      CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+      field->push_back(Constraint(JavaToStdString(jni, j_key),
+                                  JavaToStdString(jni, j_value)));
+    }
+    CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
+  }
+
+  Constraints mandatory_;
+  Constraints optional_;
+};
+
+static jobject JavaSdpFromNativeSdp(
+    JNIEnv* jni, const SessionDescriptionInterface* desc) {
+  std::string sdp;
+  RTC_CHECK(desc->ToString(&sdp)) << "got so far: " << sdp;
+  jstring j_description = JavaStringFromStdString(jni, sdp);
+
+  jclass j_type_class = FindClass(
+      jni, "org/webrtc/SessionDescription$Type");
+  jmethodID j_type_from_canonical = GetStaticMethodID(
+      jni, j_type_class, "fromCanonicalForm",
+      "(Ljava/lang/String;)Lorg/webrtc/SessionDescription$Type;");
+  jstring j_type_string = JavaStringFromStdString(jni, desc->type());
+  jobject j_type = jni->CallStaticObjectMethod(
+      j_type_class, j_type_from_canonical, j_type_string);
+  CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+
+  jclass j_sdp_class = FindClass(jni, "org/webrtc/SessionDescription");
+  jmethodID j_sdp_ctor = GetMethodID(
+      jni, j_sdp_class, "<init>",
+      "(Lorg/webrtc/SessionDescription$Type;Ljava/lang/String;)V");
+  jobject j_sdp = jni->NewObject(
+      j_sdp_class, j_sdp_ctor, j_type, j_description);
+  CHECK_EXCEPTION(jni) << "error during NewObject";
+  return j_sdp;
+}
+
+template <class T>  // T is one of {Create,Set}SessionDescriptionObserver.
+class SdpObserverWrapper : public T {
+ public:
+  SdpObserverWrapper(JNIEnv* jni, jobject j_observer,
+                     ConstraintsWrapper* constraints)
+      : constraints_(constraints),
+        j_observer_global_(jni, j_observer),
+        j_observer_class_(jni, GetObjectClass(jni, j_observer)) {
+  }
+
+  virtual ~SdpObserverWrapper() {}
+
+  // Can't mark override because of templating.
+  virtual void OnSuccess() {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    jmethodID m = GetMethodID(jni(), *j_observer_class_, "onSetSuccess", "()V");
+    jni()->CallVoidMethod(*j_observer_global_, m);
+    CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+  }
+
+  // Can't mark override because of templating.
+  virtual void OnSuccess(SessionDescriptionInterface* desc) {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    jmethodID m = GetMethodID(
+        jni(), *j_observer_class_, "onCreateSuccess",
+        "(Lorg/webrtc/SessionDescription;)V");
+    jobject j_sdp = JavaSdpFromNativeSdp(jni(), desc);
+    jni()->CallVoidMethod(*j_observer_global_, m, j_sdp);
+    CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+  }
+
+ protected:
+  // Common implementation for failure of Set & Create types, distinguished by
+  // |op| being "Set" or "Create".
+  void DoOnFailure(const std::string& op, const std::string& error) {
+    jmethodID m = GetMethodID(jni(), *j_observer_class_, "on" + op + "Failure",
+                              "(Ljava/lang/String;)V");
+    jstring j_error_string = JavaStringFromStdString(jni(), error);
+    jni()->CallVoidMethod(*j_observer_global_, m, j_error_string);
+    CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+  }
+
+  JNIEnv* jni() {
+    return AttachCurrentThreadIfNeeded();
+  }
+
+ private:
+  scoped_ptr<ConstraintsWrapper> constraints_;
+  const ScopedGlobalRef<jobject> j_observer_global_;
+  const ScopedGlobalRef<jclass> j_observer_class_;
+};
+
+class CreateSdpObserverWrapper
+    : public SdpObserverWrapper<CreateSessionDescriptionObserver> {
+ public:
+  CreateSdpObserverWrapper(JNIEnv* jni, jobject j_observer,
+                           ConstraintsWrapper* constraints)
+      : SdpObserverWrapper(jni, j_observer, constraints) {}
+
+  void OnFailure(const std::string& error) override {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    SdpObserverWrapper::DoOnFailure(std::string("Create"), error);
+  }
+};
+
+class SetSdpObserverWrapper
+    : public SdpObserverWrapper<SetSessionDescriptionObserver> {
+ public:
+  SetSdpObserverWrapper(JNIEnv* jni, jobject j_observer,
+                        ConstraintsWrapper* constraints)
+      : SdpObserverWrapper(jni, j_observer, constraints) {}
+
+  void OnFailure(const std::string& error) override {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    SdpObserverWrapper::DoOnFailure(std::string("Set"), error);
+  }
+};
+
+// Adapter for a Java DataChannel$Observer presenting a C++ DataChannelObserver
+// and dispatching the callback from C++ back to Java.
+class DataChannelObserverWrapper : public DataChannelObserver {
+ public:
+  DataChannelObserverWrapper(JNIEnv* jni, jobject j_observer)
+      : j_observer_global_(jni, j_observer),
+        j_observer_class_(jni, GetObjectClass(jni, j_observer)),
+        j_buffer_class_(jni, FindClass(jni, "org/webrtc/DataChannel$Buffer")),
+        j_on_buffered_amount_change_mid_(GetMethodID(
+            jni, *j_observer_class_, "onBufferedAmountChange", "(J)V")),
+        j_on_state_change_mid_(
+            GetMethodID(jni, *j_observer_class_, "onStateChange", "()V")),
+        j_on_message_mid_(GetMethodID(jni, *j_observer_class_, "onMessage",
+                                      "(Lorg/webrtc/DataChannel$Buffer;)V")),
+        j_buffer_ctor_(GetMethodID(jni, *j_buffer_class_, "<init>",
+                                   "(Ljava/nio/ByteBuffer;Z)V")) {}
+
+  virtual ~DataChannelObserverWrapper() {}
+
+  void OnBufferedAmountChange(uint64_t previous_amount) override {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    jni()->CallVoidMethod(*j_observer_global_, j_on_buffered_amount_change_mid_,
+                          previous_amount);
+    CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+  }
+
+  void OnStateChange() override {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    jni()->CallVoidMethod(*j_observer_global_, j_on_state_change_mid_);
+    CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+  }
+
+  void OnMessage(const DataBuffer& buffer) override {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    jobject byte_buffer = jni()->NewDirectByteBuffer(
+        const_cast<char*>(buffer.data.data<char>()), buffer.data.size());
+    jobject j_buffer = jni()->NewObject(*j_buffer_class_, j_buffer_ctor_,
+                                        byte_buffer, buffer.binary);
+    jni()->CallVoidMethod(*j_observer_global_, j_on_message_mid_, j_buffer);
+    CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+  }
+
+ private:
+  JNIEnv* jni() {
+    return AttachCurrentThreadIfNeeded();
+  }
+
+  const ScopedGlobalRef<jobject> j_observer_global_;
+  const ScopedGlobalRef<jclass> j_observer_class_;
+  const ScopedGlobalRef<jclass> j_buffer_class_;
+  const jmethodID j_on_buffered_amount_change_mid_;
+  const jmethodID j_on_state_change_mid_;
+  const jmethodID j_on_message_mid_;
+  const jmethodID j_buffer_ctor_;
+};
+
+// Adapter for a Java StatsObserver presenting a C++ StatsObserver and
+// dispatching the callback from C++ back to Java.
+class StatsObserverWrapper : public StatsObserver {
+ public:
+  StatsObserverWrapper(JNIEnv* jni, jobject j_observer)
+      : j_observer_global_(jni, j_observer),
+        j_observer_class_(jni, GetObjectClass(jni, j_observer)),
+        j_stats_report_class_(jni, FindClass(jni, "org/webrtc/StatsReport")),
+        j_stats_report_ctor_(GetMethodID(
+            jni, *j_stats_report_class_, "<init>",
+            "(Ljava/lang/String;Ljava/lang/String;D"
+            "[Lorg/webrtc/StatsReport$Value;)V")),
+        j_value_class_(jni, FindClass(
+            jni, "org/webrtc/StatsReport$Value")),
+        j_value_ctor_(GetMethodID(
+            jni, *j_value_class_, "<init>",
+            "(Ljava/lang/String;Ljava/lang/String;)V")) {
+  }
+
+  virtual ~StatsObserverWrapper() {}
+
+  void OnComplete(const StatsReports& reports) override {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    jobjectArray j_reports = ReportsToJava(jni(), reports);
+    jmethodID m = GetMethodID(jni(), *j_observer_class_, "onComplete",
+                              "([Lorg/webrtc/StatsReport;)V");
+    jni()->CallVoidMethod(*j_observer_global_, m, j_reports);
+    CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+  }
+
+ private:
+  jobjectArray ReportsToJava(
+      JNIEnv* jni, const StatsReports& reports) {
+    jobjectArray reports_array = jni->NewObjectArray(
+        reports.size(), *j_stats_report_class_, NULL);
+    int i = 0;
+    for (const auto* report : reports) {
+      ScopedLocalRefFrame local_ref_frame(jni);
+      jstring j_id = JavaStringFromStdString(jni, report->id()->ToString());
+      jstring j_type = JavaStringFromStdString(jni, report->TypeToString());
+      jobjectArray j_values = ValuesToJava(jni, report->values());
+      jobject j_report = jni->NewObject(*j_stats_report_class_,
+                                        j_stats_report_ctor_,
+                                        j_id,
+                                        j_type,
+                                        report->timestamp(),
+                                        j_values);
+      jni->SetObjectArrayElement(reports_array, i++, j_report);
+    }
+    return reports_array;
+  }
+
+  jobjectArray ValuesToJava(JNIEnv* jni, const StatsReport::Values& values) {
+    jobjectArray j_values = jni->NewObjectArray(
+        values.size(), *j_value_class_, NULL);
+    int i = 0;
+    for (const auto& it : values) {
+      ScopedLocalRefFrame local_ref_frame(jni);
+      // Should we use the '.name' enum value here instead of converting the
+      // name to a string?
+      jstring j_name = JavaStringFromStdString(jni, it.second->display_name());
+      jstring j_value = JavaStringFromStdString(jni, it.second->ToString());
+      jobject j_element_value =
+          jni->NewObject(*j_value_class_, j_value_ctor_, j_name, j_value);
+      jni->SetObjectArrayElement(j_values, i++, j_element_value);
+    }
+    return j_values;
+  }
+
+  JNIEnv* jni() {
+    return AttachCurrentThreadIfNeeded();
+  }
+
+  const ScopedGlobalRef<jobject> j_observer_global_;
+  const ScopedGlobalRef<jclass> j_observer_class_;
+  const ScopedGlobalRef<jclass> j_stats_report_class_;
+  const jmethodID j_stats_report_ctor_;
+  const ScopedGlobalRef<jclass> j_value_class_;
+  const jmethodID j_value_ctor_;
+};
+
+// Adapter presenting a cricket::VideoRenderer as a
+// webrtc::VideoRendererInterface.
+class VideoRendererWrapper : public VideoRendererInterface {
+ public:
+  static VideoRendererWrapper* Create(cricket::VideoRenderer* renderer) {
+    if (renderer)
+      return new VideoRendererWrapper(renderer);
+    return NULL;
+  }
+
+  virtual ~VideoRendererWrapper() {}
+
+  void RenderFrame(const cricket::VideoFrame* video_frame) override {
+    ScopedLocalRefFrame local_ref_frame(AttachCurrentThreadIfNeeded());
+    renderer_->RenderFrame(video_frame->GetCopyWithRotationApplied());
+  }
+
+ private:
+  explicit VideoRendererWrapper(cricket::VideoRenderer* renderer)
+      : renderer_(renderer) {}
+  scoped_ptr<cricket::VideoRenderer> renderer_;
+};
+
+// Wrapper dispatching webrtc::VideoRendererInterface to a Java VideoRenderer
+// instance.
+class JavaVideoRendererWrapper : public VideoRendererInterface {
+ public:
+  JavaVideoRendererWrapper(JNIEnv* jni, jobject j_callbacks)
+      : j_callbacks_(jni, j_callbacks),
+        j_render_frame_id_(GetMethodID(
+            jni, GetObjectClass(jni, j_callbacks), "renderFrame",
+            "(Lorg/webrtc/VideoRenderer$I420Frame;)V")),
+        j_frame_class_(jni,
+                       FindClass(jni, "org/webrtc/VideoRenderer$I420Frame")),
+        j_i420_frame_ctor_id_(GetMethodID(
+            jni, *j_frame_class_, "<init>", "(III[I[Ljava/nio/ByteBuffer;J)V")),
+        j_texture_frame_ctor_id_(GetMethodID(
+            jni, *j_frame_class_, "<init>",
+            "(IIII[FJ)V")),
+        j_byte_buffer_class_(jni, FindClass(jni, "java/nio/ByteBuffer")) {
+    CHECK_EXCEPTION(jni);
+  }
+
+  virtual ~JavaVideoRendererWrapper() {}
+
+  void RenderFrame(const cricket::VideoFrame* video_frame) override {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    jobject j_frame = (video_frame->GetNativeHandle() != nullptr)
+                          ? CricketToJavaTextureFrame(video_frame)
+                          : CricketToJavaI420Frame(video_frame);
+    // |j_callbacks_| is responsible for releasing |j_frame| with
+    // VideoRenderer.renderFrameDone().
+    jni()->CallVoidMethod(*j_callbacks_, j_render_frame_id_, j_frame);
+    CHECK_EXCEPTION(jni());
+  }
+
+ private:
+  // Make a shallow copy of |frame| to be used with Java. The callee has
+  // ownership of the frame, and the frame should be released with
+  // VideoRenderer.releaseNativeFrame().
+  static jlong javaShallowCopy(const cricket::VideoFrame* frame) {
+    return jlongFromPointer(frame->Copy());
+  }
+
+  // Return a VideoRenderer.I420Frame referring to the data in |frame|.
+  jobject CricketToJavaI420Frame(const cricket::VideoFrame* frame) {
+    jintArray strides = jni()->NewIntArray(3);
+    jint* strides_array = jni()->GetIntArrayElements(strides, NULL);
+    strides_array[0] = frame->GetYPitch();
+    strides_array[1] = frame->GetUPitch();
+    strides_array[2] = frame->GetVPitch();
+    jni()->ReleaseIntArrayElements(strides, strides_array, 0);
+    jobjectArray planes = jni()->NewObjectArray(3, *j_byte_buffer_class_, NULL);
+    jobject y_buffer =
+        jni()->NewDirectByteBuffer(const_cast<uint8_t*>(frame->GetYPlane()),
+                                   frame->GetYPitch() * frame->GetHeight());
+    jobject u_buffer = jni()->NewDirectByteBuffer(
+        const_cast<uint8_t*>(frame->GetUPlane()), frame->GetChromaSize());
+    jobject v_buffer = jni()->NewDirectByteBuffer(
+        const_cast<uint8_t*>(frame->GetVPlane()), frame->GetChromaSize());
+    jni()->SetObjectArrayElement(planes, 0, y_buffer);
+    jni()->SetObjectArrayElement(planes, 1, u_buffer);
+    jni()->SetObjectArrayElement(planes, 2, v_buffer);
+    return jni()->NewObject(
+        *j_frame_class_, j_i420_frame_ctor_id_,
+        frame->GetWidth(), frame->GetHeight(),
+        static_cast<int>(frame->GetVideoRotation()),
+        strides, planes, javaShallowCopy(frame));
+  }
+
+  // Return a VideoRenderer.I420Frame referring texture object in |frame|.
+  jobject CricketToJavaTextureFrame(const cricket::VideoFrame* frame) {
+    NativeHandleImpl* handle =
+        reinterpret_cast<NativeHandleImpl*>(frame->GetNativeHandle());
+    jfloatArray sampling_matrix = jni()->NewFloatArray(16);
+    jni()->SetFloatArrayRegion(sampling_matrix, 0, 16, handle->sampling_matrix);
+    return jni()->NewObject(
+        *j_frame_class_, j_texture_frame_ctor_id_,
+        frame->GetWidth(), frame->GetHeight(),
+        static_cast<int>(frame->GetVideoRotation()),
+        handle->oes_texture_id, sampling_matrix, javaShallowCopy(frame));
+  }
+
+  JNIEnv* jni() {
+    return AttachCurrentThreadIfNeeded();
+  }
+
+  ScopedGlobalRef<jobject> j_callbacks_;
+  jmethodID j_render_frame_id_;
+  ScopedGlobalRef<jclass> j_frame_class_;
+  jmethodID j_i420_frame_ctor_id_;
+  jmethodID j_texture_frame_ctor_id_;
+  ScopedGlobalRef<jclass> j_byte_buffer_class_;
+};
+
+
+static DataChannelInterface* ExtractNativeDC(JNIEnv* jni, jobject j_dc) {
+  jfieldID native_dc_id = GetFieldID(jni,
+      GetObjectClass(jni, j_dc), "nativeDataChannel", "J");
+  jlong j_d = GetLongField(jni, j_dc, native_dc_id);
+  return reinterpret_cast<DataChannelInterface*>(j_d);
+}
+
+JOW(jlong, DataChannel_registerObserverNative)(
+    JNIEnv* jni, jobject j_dc, jobject j_observer) {
+  scoped_ptr<DataChannelObserverWrapper> observer(
+      new DataChannelObserverWrapper(jni, j_observer));
+  ExtractNativeDC(jni, j_dc)->RegisterObserver(observer.get());
+  return jlongFromPointer(observer.release());
+}
+
+JOW(void, DataChannel_unregisterObserverNative)(
+    JNIEnv* jni, jobject j_dc, jlong native_observer) {
+  ExtractNativeDC(jni, j_dc)->UnregisterObserver();
+  delete reinterpret_cast<DataChannelObserverWrapper*>(native_observer);
+}
+
+JOW(jstring, DataChannel_label)(JNIEnv* jni, jobject j_dc) {
+  return JavaStringFromStdString(jni, ExtractNativeDC(jni, j_dc)->label());
+}
+
+JOW(jobject, DataChannel_state)(JNIEnv* jni, jobject j_dc) {
+  return JavaEnumFromIndex(
+      jni, "DataChannel$State", ExtractNativeDC(jni, j_dc)->state());
+}
+
+JOW(jlong, DataChannel_bufferedAmount)(JNIEnv* jni, jobject j_dc) {
+  uint64_t buffered_amount = ExtractNativeDC(jni, j_dc)->buffered_amount();
+  RTC_CHECK_LE(buffered_amount, std::numeric_limits<int64_t>::max())
+      << "buffered_amount overflowed jlong!";
+  return static_cast<jlong>(buffered_amount);
+}
+
+JOW(void, DataChannel_close)(JNIEnv* jni, jobject j_dc) {
+  ExtractNativeDC(jni, j_dc)->Close();
+}
+
+JOW(jboolean, DataChannel_sendNative)(JNIEnv* jni, jobject j_dc,
+                                      jbyteArray data, jboolean binary) {
+  jbyte* bytes = jni->GetByteArrayElements(data, NULL);
+  bool ret = ExtractNativeDC(jni, j_dc)->Send(DataBuffer(
+      rtc::Buffer(bytes, jni->GetArrayLength(data)),
+      binary));
+  jni->ReleaseByteArrayElements(data, bytes, JNI_ABORT);
+  return ret;
+}
+
+JOW(void, DataChannel_dispose)(JNIEnv* jni, jobject j_dc) {
+  CHECK_RELEASE(ExtractNativeDC(jni, j_dc));
+}
+
+JOW(void, Logging_nativeEnableTracing)(
+    JNIEnv* jni, jclass, jstring j_path, jint nativeLevels,
+    jint nativeSeverity) {
+  std::string path = JavaToStdString(jni, j_path);
+  if (nativeLevels != webrtc::kTraceNone) {
+    webrtc::Trace::set_level_filter(nativeLevels);
+    if (path != "logcat:") {
+      RTC_CHECK_EQ(0, webrtc::Trace::SetTraceFile(path.c_str(), false))
+          << "SetTraceFile failed";
+    } else {
+      // Intentionally leak this to avoid needing to reason about its lifecycle.
+      // It keeps no state and functions only as a dispatch point.
+      static LogcatTraceContext* g_trace_callback = new LogcatTraceContext();
+    }
+  }
+  if (nativeSeverity >= rtc::LS_SENSITIVE && nativeSeverity <= rtc::LS_ERROR) {
+    rtc::LogMessage::LogToDebug(
+        static_cast<rtc::LoggingSeverity>(nativeSeverity));
+  }
+}
+
+JOW(void, Logging_nativeEnableLogThreads)(JNIEnv* jni, jclass) {
+  rtc::LogMessage::LogThreads(true);
+}
+
+JOW(void, Logging_nativeEnableLogTimeStamps)(JNIEnv* jni, jclass) {
+  rtc::LogMessage::LogTimestamps(true);
+}
+
+JOW(void, Logging_nativeLog)(
+    JNIEnv* jni, jclass, jint j_severity, jstring j_tag, jstring j_message) {
+  std::string message = JavaToStdString(jni, j_message);
+  std::string tag = JavaToStdString(jni, j_tag);
+  LOG_TAG(static_cast<rtc::LoggingSeverity>(j_severity), tag) << message;
+}
+
+JOW(void, PeerConnection_freePeerConnection)(JNIEnv*, jclass, jlong j_p) {
+  CHECK_RELEASE(reinterpret_cast<PeerConnectionInterface*>(j_p));
+}
+
+JOW(void, PeerConnection_freeObserver)(JNIEnv*, jclass, jlong j_p) {
+  PCOJava* p = reinterpret_cast<PCOJava*>(j_p);
+  delete p;
+}
+
+JOW(void, MediaSource_free)(JNIEnv*, jclass, jlong j_p) {
+  CHECK_RELEASE(reinterpret_cast<MediaSourceInterface*>(j_p));
+}
+
+JOW(void, VideoCapturer_free)(JNIEnv*, jclass, jlong j_p) {
+  delete reinterpret_cast<cricket::VideoCapturer*>(j_p);
+}
+
+JOW(void, VideoRenderer_freeWrappedVideoRenderer)(JNIEnv*, jclass, jlong j_p) {
+  delete reinterpret_cast<JavaVideoRendererWrapper*>(j_p);
+}
+
+JOW(void, VideoRenderer_releaseNativeFrame)(
+    JNIEnv* jni, jclass, jlong j_frame_ptr) {
+  delete reinterpret_cast<const cricket::VideoFrame*>(j_frame_ptr);
+}
+
+JOW(void, MediaStreamTrack_free)(JNIEnv*, jclass, jlong j_p) {
+  reinterpret_cast<MediaStreamTrackInterface*>(j_p)->Release();
+}
+
+JOW(jboolean, MediaStream_nativeAddAudioTrack)(
+    JNIEnv* jni, jclass, jlong pointer, jlong j_audio_track_pointer) {
+  return reinterpret_cast<MediaStreamInterface*>(pointer)->AddTrack(
+      reinterpret_cast<AudioTrackInterface*>(j_audio_track_pointer));
+}
+
+JOW(jboolean, MediaStream_nativeAddVideoTrack)(
+    JNIEnv* jni, jclass, jlong pointer, jlong j_video_track_pointer) {
+  return reinterpret_cast<MediaStreamInterface*>(pointer)
+      ->AddTrack(reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer));
+}
+
+JOW(jboolean, MediaStream_nativeRemoveAudioTrack)(
+    JNIEnv* jni, jclass, jlong pointer, jlong j_audio_track_pointer) {
+  return reinterpret_cast<MediaStreamInterface*>(pointer)->RemoveTrack(
+      reinterpret_cast<AudioTrackInterface*>(j_audio_track_pointer));
+}
+
+JOW(jboolean, MediaStream_nativeRemoveVideoTrack)(
+    JNIEnv* jni, jclass, jlong pointer, jlong j_video_track_pointer) {
+  return reinterpret_cast<MediaStreamInterface*>(pointer)->RemoveTrack(
+      reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer));
+}
+
+JOW(jstring, MediaStream_nativeLabel)(JNIEnv* jni, jclass, jlong j_p) {
+  return JavaStringFromStdString(
+      jni, reinterpret_cast<MediaStreamInterface*>(j_p)->label());
+}
+
+JOW(void, MediaStream_free)(JNIEnv*, jclass, jlong j_p) {
+  CHECK_RELEASE(reinterpret_cast<MediaStreamInterface*>(j_p));
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreateObserver)(
+    JNIEnv * jni, jclass, jobject j_observer) {
+  return (jlong)new PCOJava(jni, j_observer);
+}
+
+JOW(jboolean, PeerConnectionFactory_initializeAndroidGlobals)(
+    JNIEnv* jni, jclass, jobject context,
+    jboolean initialize_audio, jboolean initialize_video,
+    jboolean video_hw_acceleration) {
+  bool failure = false;
+  video_hw_acceleration_enabled = video_hw_acceleration;
+  AndroidNetworkMonitor::SetAndroidContext(jni, context);
+  if (!factory_static_initialized) {
+    if (initialize_video) {
+      failure |= AndroidVideoCapturerJni::SetAndroidObjects(jni, context);
+    }
+    if (initialize_audio)
+      failure |= webrtc::VoiceEngine::SetAndroidObjects(GetJVM(), context);
+    factory_static_initialized = true;
+  }
+  return !failure;
+}
+
+JOW(void, PeerConnectionFactory_initializeFieldTrials)(
+    JNIEnv* jni, jclass, jstring j_trials_init_string) {
+  field_trials_init_string = NULL;
+  if (j_trials_init_string != NULL) {
+    const char* init_string =
+        jni->GetStringUTFChars(j_trials_init_string, NULL);
+    int init_string_length = jni->GetStringUTFLength(j_trials_init_string);
+    field_trials_init_string = new char[init_string_length + 1];
+    rtc::strcpyn(field_trials_init_string, init_string_length + 1, init_string);
+    jni->ReleaseStringUTFChars(j_trials_init_string, init_string);
+    LOG(LS_INFO) << "initializeFieldTrials: " << field_trials_init_string;
+  }
+  webrtc::field_trial::InitFieldTrialsFromString(field_trials_init_string);
+}
+
+JOW(void, PeerConnectionFactory_initializeInternalTracer)(JNIEnv* jni, jclass) {
+  rtc::tracing::SetupInternalTracer();
+}
+
+JOW(jboolean, PeerConnectionFactory_startInternalTracingCapture)(
+    JNIEnv* jni, jclass, jstring j_event_tracing_filename) {
+  if (!j_event_tracing_filename)
+    return false;
+
+  const char* init_string =
+      jni->GetStringUTFChars(j_event_tracing_filename, NULL);
+  LOG(LS_INFO) << "Starting internal tracing to: " << init_string;
+  bool ret = rtc::tracing::StartInternalCapture(init_string);
+  jni->ReleaseStringUTFChars(j_event_tracing_filename, init_string);
+  return ret;
+}
+
+JOW(void, PeerConnectionFactory_stopInternalTracingCapture)(
+    JNIEnv* jni, jclass) {
+  rtc::tracing::StopInternalCapture();
+}
+
+JOW(void, PeerConnectionFactory_shutdownInternalTracer)(JNIEnv* jni, jclass) {
+  rtc::tracing::ShutdownInternalTracer();
+}
+
+// Helper struct for working around the fact that CreatePeerConnectionFactory()
+// comes in two flavors: either entirely automagical (constructing its own
+// threads and deleting them on teardown, but no external codec factory support)
+// or entirely manual (requires caller to delete threads after factory
+// teardown).  This struct takes ownership of its ctor's arguments to present a
+// single thing for Java to hold and eventually free.
+class OwnedFactoryAndThreads {
+ public:
+  OwnedFactoryAndThreads(Thread* worker_thread,
+                         Thread* signaling_thread,
+                         WebRtcVideoEncoderFactory* encoder_factory,
+                         WebRtcVideoDecoderFactory* decoder_factory,
+                         rtc::NetworkMonitorFactory* network_monitor_factory,
+                         PeerConnectionFactoryInterface* factory)
+      : worker_thread_(worker_thread),
+        signaling_thread_(signaling_thread),
+        encoder_factory_(encoder_factory),
+        decoder_factory_(decoder_factory),
+        network_monitor_factory_(network_monitor_factory),
+        factory_(factory) {}
+
+  ~OwnedFactoryAndThreads() {
+    CHECK_RELEASE(factory_);
+    if (network_monitor_factory_ != nullptr) {
+      rtc::NetworkMonitorFactory::ReleaseFactory(network_monitor_factory_);
+    }
+  }
+
+  PeerConnectionFactoryInterface* factory() { return factory_; }
+  WebRtcVideoEncoderFactory* encoder_factory() { return encoder_factory_; }
+  WebRtcVideoDecoderFactory* decoder_factory() { return decoder_factory_; }
+  rtc::NetworkMonitorFactory* network_monitor_factory() {
+    return network_monitor_factory_;
+  }
+  void clear_network_monitor_factory() { network_monitor_factory_ = nullptr; }
+  void InvokeJavaCallbacksOnFactoryThreads();
+
+ private:
+  void JavaCallbackOnFactoryThreads();
+
+  const scoped_ptr<Thread> worker_thread_;
+  const scoped_ptr<Thread> signaling_thread_;
+  WebRtcVideoEncoderFactory* encoder_factory_;
+  WebRtcVideoDecoderFactory* decoder_factory_;
+  rtc::NetworkMonitorFactory* network_monitor_factory_;
+  PeerConnectionFactoryInterface* factory_;  // Const after ctor except dtor.
+};
+
+void OwnedFactoryAndThreads::JavaCallbackOnFactoryThreads() {
+  JNIEnv* jni = AttachCurrentThreadIfNeeded();
+  ScopedLocalRefFrame local_ref_frame(jni);
+  jclass j_factory_class = FindClass(jni, "org/webrtc/PeerConnectionFactory");
+  jmethodID m = nullptr;
+  if (Thread::Current() == worker_thread_) {
+    LOG(LS_INFO) << "Worker thread JavaCallback";
+    m = GetStaticMethodID(jni, j_factory_class, "onWorkerThreadReady", "()V");
+  }
+  if (Thread::Current() == signaling_thread_) {
+    LOG(LS_INFO) << "Signaling thread JavaCallback";
+    m = GetStaticMethodID(
+        jni, j_factory_class, "onSignalingThreadReady", "()V");
+  }
+  if (m != nullptr) {
+    jni->CallStaticVoidMethod(j_factory_class, m);
+    CHECK_EXCEPTION(jni) << "error during JavaCallback::CallStaticVoidMethod";
+  }
+}
+
+void OwnedFactoryAndThreads::InvokeJavaCallbacksOnFactoryThreads() {
+  LOG(LS_INFO) << "InvokeJavaCallbacksOnFactoryThreads.";
+  worker_thread_->Invoke<void>(
+      Bind(&OwnedFactoryAndThreads::JavaCallbackOnFactoryThreads, this));
+  signaling_thread_->Invoke<void>(
+      Bind(&OwnedFactoryAndThreads::JavaCallbackOnFactoryThreads, this));
+}
+
+PeerConnectionFactoryInterface::Options ParseOptionsFromJava(JNIEnv* jni,
+                                                             jobject options) {
+  jclass options_class = jni->GetObjectClass(options);
+  jfieldID network_ignore_mask_field =
+      jni->GetFieldID(options_class, "networkIgnoreMask", "I");
+  int network_ignore_mask =
+      jni->GetIntField(options, network_ignore_mask_field);
+
+  jfieldID disable_encryption_field =
+      jni->GetFieldID(options_class, "disableEncryption", "Z");
+  bool disable_encryption =
+      jni->GetBooleanField(options, disable_encryption_field);
+
+  jfieldID disable_network_monitor_field =
+      jni->GetFieldID(options_class, "disableNetworkMonitor", "Z");
+  bool disable_network_monitor =
+      jni->GetBooleanField(options, disable_network_monitor_field);
+
+  PeerConnectionFactoryInterface::Options native_options;
+
+  // This doesn't necessarily match the c++ version of this struct; feel free
+  // to add more parameters as necessary.
+  native_options.network_ignore_mask = network_ignore_mask;
+  native_options.disable_encryption = disable_encryption;
+  native_options.disable_network_monitor = disable_network_monitor;
+  return native_options;
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreatePeerConnectionFactory)(
+    JNIEnv* jni, jclass, jobject joptions) {
+  // talk/ assumes pretty widely that the current Thread is ThreadManager'd, but
+  // ThreadManager only WrapCurrentThread()s the thread where it is first
+  // created.  Since the semantics around when auto-wrapping happens in
+  // webrtc/base/ are convoluted, we simply wrap here to avoid having to think
+  // about ramifications of auto-wrapping there.
+  rtc::ThreadManager::Instance()->WrapCurrentThread();
+  webrtc::Trace::CreateTrace();
+  Thread* worker_thread = new Thread();
+  worker_thread->SetName("worker_thread", NULL);
+  Thread* signaling_thread = new Thread();
+  signaling_thread->SetName("signaling_thread", NULL);
+  RTC_CHECK(worker_thread->Start() && signaling_thread->Start())
+      << "Failed to start threads";
+  WebRtcVideoEncoderFactory* encoder_factory = nullptr;
+  WebRtcVideoDecoderFactory* decoder_factory = nullptr;
+  rtc::NetworkMonitorFactory* network_monitor_factory = nullptr;
+
+  PeerConnectionFactoryInterface::Options options;
+  bool has_options = joptions != NULL;
+  if (has_options) {
+    options = ParseOptionsFromJava(jni, joptions);
+  }
+
+  if (video_hw_acceleration_enabled) {
+    encoder_factory = new MediaCodecVideoEncoderFactory();
+    decoder_factory = new MediaCodecVideoDecoderFactory();
+  }
+  // Do not create network_monitor_factory only if the options are
+  // provided and disable_network_monitor therein is set to true.
+  if (!(has_options && options.disable_network_monitor)) {
+    network_monitor_factory = new AndroidNetworkMonitorFactory();
+    rtc::NetworkMonitorFactory::SetFactory(network_monitor_factory);
+  }
+
+  rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+      webrtc::CreatePeerConnectionFactory(worker_thread,
+                                          signaling_thread,
+                                          NULL,
+                                          encoder_factory,
+                                          decoder_factory));
+  RTC_CHECK(factory) << "Failed to create the peer connection factory; "
+                     << "WebRTC/libjingle init likely failed on this device";
+  // TODO(honghaiz): Maybe put the options as the argument of
+  // CreatePeerConnectionFactory.
+  if (has_options) {
+    factory->SetOptions(options);
+  }
+  OwnedFactoryAndThreads* owned_factory = new OwnedFactoryAndThreads(
+      worker_thread, signaling_thread,
+      encoder_factory, decoder_factory,
+      network_monitor_factory, factory.release());
+  owned_factory->InvokeJavaCallbacksOnFactoryThreads();
+  return jlongFromPointer(owned_factory);
+}
+
+JOW(void, PeerConnectionFactory_nativeFreeFactory)(JNIEnv*, jclass, jlong j_p) {
+  delete reinterpret_cast<OwnedFactoryAndThreads*>(j_p);
+  if (field_trials_init_string) {
+    webrtc::field_trial::InitFieldTrialsFromString(NULL);
+    delete field_trials_init_string;
+    field_trials_init_string = NULL;
+  }
+  webrtc::Trace::ReturnTrace();
+}
+
+static PeerConnectionFactoryInterface* factoryFromJava(jlong j_p) {
+  return reinterpret_cast<OwnedFactoryAndThreads*>(j_p)->factory();
+}
+
+JOW(void, PeerConnectionFactory_nativeThreadsCallbacks)(
+    JNIEnv*, jclass, jlong j_p) {
+  OwnedFactoryAndThreads *factory =
+      reinterpret_cast<OwnedFactoryAndThreads*>(j_p);
+  factory->InvokeJavaCallbacksOnFactoryThreads();
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreateLocalMediaStream)(
+    JNIEnv* jni, jclass, jlong native_factory, jstring label) {
+  rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+      factoryFromJava(native_factory));
+  rtc::scoped_refptr<MediaStreamInterface> stream(
+      factory->CreateLocalMediaStream(JavaToStdString(jni, label)));
+  return (jlong)stream.release();
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreateVideoSource)(
+    JNIEnv* jni, jclass, jlong native_factory, jlong native_capturer,
+    jobject j_constraints) {
+  scoped_ptr<ConstraintsWrapper> constraints(
+      new ConstraintsWrapper(jni, j_constraints));
+  rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+      factoryFromJava(native_factory));
+  rtc::scoped_refptr<VideoSourceInterface> source(
+      factory->CreateVideoSource(
+          reinterpret_cast<cricket::VideoCapturer*>(native_capturer),
+          constraints.get()));
+  return (jlong)source.release();
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreateVideoTrack)(
+    JNIEnv* jni, jclass, jlong native_factory, jstring id,
+    jlong native_source) {
+  rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+      factoryFromJava(native_factory));
+  rtc::scoped_refptr<VideoTrackInterface> track(
+      factory->CreateVideoTrack(
+          JavaToStdString(jni, id),
+          reinterpret_cast<VideoSourceInterface*>(native_source)));
+  return (jlong)track.release();
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreateAudioSource)(
+    JNIEnv* jni, jclass, jlong native_factory, jobject j_constraints) {
+  scoped_ptr<ConstraintsWrapper> constraints(
+      new ConstraintsWrapper(jni, j_constraints));
+  rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+      factoryFromJava(native_factory));
+  rtc::scoped_refptr<AudioSourceInterface> source(
+      factory->CreateAudioSource(constraints.get()));
+  return (jlong)source.release();
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreateAudioTrack)(
+    JNIEnv* jni, jclass, jlong native_factory, jstring id,
+    jlong native_source) {
+  rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+      factoryFromJava(native_factory));
+  rtc::scoped_refptr<AudioTrackInterface> track(factory->CreateAudioTrack(
+      JavaToStdString(jni, id),
+      reinterpret_cast<AudioSourceInterface*>(native_source)));
+  return (jlong)track.release();
+}
+
+JOW(jboolean, PeerConnectionFactory_nativeStartAecDump)(
+    JNIEnv* jni, jclass, jlong native_factory, jint file,
+    jint filesize_limit_bytes) {
+  rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+      factoryFromJava(native_factory));
+  return factory->StartAecDump(file, filesize_limit_bytes);
+}
+
+JOW(void, PeerConnectionFactory_nativeStopAecDump)(
+    JNIEnv* jni, jclass, jlong native_factory) {
+  rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+      factoryFromJava(native_factory));
+  factory->StopAecDump();
+}
+
+JOW(jboolean, PeerConnectionFactory_nativeStartRtcEventLog)(
+    JNIEnv* jni, jclass, jlong native_factory, jint file) {
+  rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+      factoryFromJava(native_factory));
+  return factory->StartRtcEventLog(file);
+}
+
+JOW(void, PeerConnectionFactory_nativeStopRtcEventLog)(
+    JNIEnv* jni, jclass, jlong native_factory) {
+  rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+      factoryFromJava(native_factory));
+  factory->StopRtcEventLog();
+}
+
+JOW(void, PeerConnectionFactory_nativeSetOptions)(
+    JNIEnv* jni, jclass, jlong native_factory, jobject options) {
+  rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+      factoryFromJava(native_factory));
+  PeerConnectionFactoryInterface::Options options_to_set =
+      ParseOptionsFromJava(jni, options);
+  factory->SetOptions(options_to_set);
+
+  if (options_to_set.disable_network_monitor) {
+    OwnedFactoryAndThreads* owner =
+        reinterpret_cast<OwnedFactoryAndThreads*>(native_factory);
+    if (owner->network_monitor_factory()) {
+      rtc::NetworkMonitorFactory::ReleaseFactory(
+          owner->network_monitor_factory());
+      owner->clear_network_monitor_factory();
+    }
+  }
+}
+
+JOW(void, PeerConnectionFactory_nativeSetVideoHwAccelerationOptions)(
+    JNIEnv* jni, jclass, jlong native_factory, jobject local_egl_context,
+    jobject remote_egl_context) {
+  OwnedFactoryAndThreads* owned_factory =
+      reinterpret_cast<OwnedFactoryAndThreads*>(native_factory);
+
+  jclass j_eglbase14_context_class =
+      FindClass(jni, "org/webrtc/EglBase14$Context");
+
+  MediaCodecVideoEncoderFactory* encoder_factory =
+      static_cast<MediaCodecVideoEncoderFactory*>
+          (owned_factory->encoder_factory());
+  if (encoder_factory &&
+      jni->IsInstanceOf(local_egl_context, j_eglbase14_context_class)) {
+    LOG(LS_INFO) << "Set EGL context for HW encoding.";
+    encoder_factory->SetEGLContext(jni, local_egl_context);
+  }
+
+  MediaCodecVideoDecoderFactory* decoder_factory =
+      static_cast<MediaCodecVideoDecoderFactory*>
+          (owned_factory->decoder_factory());
+  if (decoder_factory &&
+      jni->IsInstanceOf(remote_egl_context, j_eglbase14_context_class)) {
+    LOG(LS_INFO) << "Set EGL context for HW decoding.";
+    decoder_factory->SetEGLContext(jni, remote_egl_context);
+  }
+}
+
+static PeerConnectionInterface::IceTransportsType
+JavaIceTransportsTypeToNativeType(JNIEnv* jni, jobject j_ice_transports_type) {
+  std::string enum_name = GetJavaEnumName(
+      jni, "org/webrtc/PeerConnection$IceTransportsType",
+      j_ice_transports_type);
+
+  if (enum_name == "ALL")
+    return PeerConnectionInterface::kAll;
+
+  if (enum_name == "RELAY")
+    return PeerConnectionInterface::kRelay;
+
+  if (enum_name == "NOHOST")
+    return PeerConnectionInterface::kNoHost;
+
+  if (enum_name == "NONE")
+    return PeerConnectionInterface::kNone;
+
+  RTC_CHECK(false) << "Unexpected IceTransportsType enum_name " << enum_name;
+  return PeerConnectionInterface::kAll;
+}
+
+static PeerConnectionInterface::BundlePolicy
+JavaBundlePolicyToNativeType(JNIEnv* jni, jobject j_bundle_policy) {
+  std::string enum_name = GetJavaEnumName(
+      jni, "org/webrtc/PeerConnection$BundlePolicy",
+      j_bundle_policy);
+
+  if (enum_name == "BALANCED")
+    return PeerConnectionInterface::kBundlePolicyBalanced;
+
+  if (enum_name == "MAXBUNDLE")
+    return PeerConnectionInterface::kBundlePolicyMaxBundle;
+
+  if (enum_name == "MAXCOMPAT")
+    return PeerConnectionInterface::kBundlePolicyMaxCompat;
+
+  RTC_CHECK(false) << "Unexpected BundlePolicy enum_name " << enum_name;
+  return PeerConnectionInterface::kBundlePolicyBalanced;
+}
+
+static PeerConnectionInterface::RtcpMuxPolicy
+JavaRtcpMuxPolicyToNativeType(JNIEnv* jni, jobject j_rtcp_mux_policy) {
+  std::string enum_name = GetJavaEnumName(
+      jni, "org/webrtc/PeerConnection$RtcpMuxPolicy",
+      j_rtcp_mux_policy);
+
+  if (enum_name == "NEGOTIATE")
+    return PeerConnectionInterface::kRtcpMuxPolicyNegotiate;
+
+  if (enum_name == "REQUIRE")
+    return PeerConnectionInterface::kRtcpMuxPolicyRequire;
+
+  RTC_CHECK(false) << "Unexpected RtcpMuxPolicy enum_name " << enum_name;
+  return PeerConnectionInterface::kRtcpMuxPolicyNegotiate;
+}
+
+static PeerConnectionInterface::TcpCandidatePolicy
+JavaTcpCandidatePolicyToNativeType(
+    JNIEnv* jni, jobject j_tcp_candidate_policy) {
+  std::string enum_name = GetJavaEnumName(
+      jni, "org/webrtc/PeerConnection$TcpCandidatePolicy",
+      j_tcp_candidate_policy);
+
+  if (enum_name == "ENABLED")
+    return PeerConnectionInterface::kTcpCandidatePolicyEnabled;
+
+  if (enum_name == "DISABLED")
+    return PeerConnectionInterface::kTcpCandidatePolicyDisabled;
+
+  RTC_CHECK(false) << "Unexpected TcpCandidatePolicy enum_name " << enum_name;
+  return PeerConnectionInterface::kTcpCandidatePolicyEnabled;
+}
+
+static rtc::KeyType JavaKeyTypeToNativeType(JNIEnv* jni, jobject j_key_type) {
+  std::string enum_name = GetJavaEnumName(
+      jni, "org/webrtc/PeerConnection$KeyType", j_key_type);
+
+  if (enum_name == "RSA")
+    return rtc::KT_RSA;
+  if (enum_name == "ECDSA")
+    return rtc::KT_ECDSA;
+
+  RTC_CHECK(false) << "Unexpected KeyType enum_name " << enum_name;
+  return rtc::KT_ECDSA;
+}
+
+static PeerConnectionInterface::ContinualGatheringPolicy
+    JavaContinualGatheringPolicyToNativeType(
+        JNIEnv* jni, jobject j_gathering_policy) {
+  std::string enum_name = GetJavaEnumName(
+      jni, "org/webrtc/PeerConnection$ContinualGatheringPolicy",
+      j_gathering_policy);
+  if (enum_name == "GATHER_ONCE")
+    return PeerConnectionInterface::GATHER_ONCE;
+
+  if (enum_name == "GATHER_CONTINUALLY")
+    return PeerConnectionInterface::GATHER_CONTINUALLY;
+
+  RTC_CHECK(false) << "Unexpected ContinualGatheringPolicy enum name "
+                   << enum_name;
+  return PeerConnectionInterface::GATHER_ONCE;
+}
+
+static void JavaIceServersToJsepIceServers(
+    JNIEnv* jni, jobject j_ice_servers,
+    PeerConnectionInterface::IceServers* ice_servers) {
+  jclass list_class = GetObjectClass(jni, j_ice_servers);
+  jmethodID iterator_id = GetMethodID(
+      jni, list_class, "iterator", "()Ljava/util/Iterator;");
+  jobject iterator = jni->CallObjectMethod(j_ice_servers, iterator_id);
+  CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+  jmethodID iterator_has_next = GetMethodID(
+      jni, GetObjectClass(jni, iterator), "hasNext", "()Z");
+  jmethodID iterator_next = GetMethodID(
+      jni, GetObjectClass(jni, iterator), "next", "()Ljava/lang/Object;");
+  while (jni->CallBooleanMethod(iterator, iterator_has_next)) {
+    CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
+    jobject j_ice_server = jni->CallObjectMethod(iterator, iterator_next);
+    CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+    jclass j_ice_server_class = GetObjectClass(jni, j_ice_server);
+    jfieldID j_ice_server_uri_id =
+        GetFieldID(jni, j_ice_server_class, "uri", "Ljava/lang/String;");
+    jfieldID j_ice_server_username_id =
+        GetFieldID(jni, j_ice_server_class, "username", "Ljava/lang/String;");
+    jfieldID j_ice_server_password_id =
+        GetFieldID(jni, j_ice_server_class, "password", "Ljava/lang/String;");
+    jstring uri = reinterpret_cast<jstring>(
+        GetObjectField(jni, j_ice_server, j_ice_server_uri_id));
+    jstring username = reinterpret_cast<jstring>(
+        GetObjectField(jni, j_ice_server, j_ice_server_username_id));
+    jstring password = reinterpret_cast<jstring>(
+        GetObjectField(jni, j_ice_server, j_ice_server_password_id));
+    PeerConnectionInterface::IceServer server;
+    server.uri = JavaToStdString(jni, uri);
+    server.username = JavaToStdString(jni, username);
+    server.password = JavaToStdString(jni, password);
+    ice_servers->push_back(server);
+  }
+  CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
+}
+
+static void JavaRTCConfigurationToJsepRTCConfiguration(
+    JNIEnv* jni,
+    jobject j_rtc_config,
+    PeerConnectionInterface::RTCConfiguration* rtc_config) {
+  jclass j_rtc_config_class = GetObjectClass(jni, j_rtc_config);
+
+  jfieldID j_ice_transports_type_id = GetFieldID(
+      jni, j_rtc_config_class, "iceTransportsType",
+      "Lorg/webrtc/PeerConnection$IceTransportsType;");
+  jobject j_ice_transports_type = GetObjectField(
+      jni, j_rtc_config, j_ice_transports_type_id);
+
+  jfieldID j_bundle_policy_id = GetFieldID(
+      jni, j_rtc_config_class, "bundlePolicy",
+      "Lorg/webrtc/PeerConnection$BundlePolicy;");
+  jobject j_bundle_policy = GetObjectField(
+      jni, j_rtc_config, j_bundle_policy_id);
+
+  jfieldID j_rtcp_mux_policy_id = GetFieldID(
+      jni, j_rtc_config_class, "rtcpMuxPolicy",
+      "Lorg/webrtc/PeerConnection$RtcpMuxPolicy;");
+  jobject j_rtcp_mux_policy = GetObjectField(
+      jni, j_rtc_config, j_rtcp_mux_policy_id);
+
+  jfieldID j_tcp_candidate_policy_id = GetFieldID(
+      jni, j_rtc_config_class, "tcpCandidatePolicy",
+      "Lorg/webrtc/PeerConnection$TcpCandidatePolicy;");
+  jobject j_tcp_candidate_policy = GetObjectField(
+      jni, j_rtc_config, j_tcp_candidate_policy_id);
+
+  jfieldID j_ice_servers_id = GetFieldID(
+      jni, j_rtc_config_class, "iceServers", "Ljava/util/List;");
+  jobject j_ice_servers = GetObjectField(jni, j_rtc_config, j_ice_servers_id);
+
+  jfieldID j_audio_jitter_buffer_max_packets_id =
+      GetFieldID(jni, j_rtc_config_class, "audioJitterBufferMaxPackets", "I");
+  jfieldID j_audio_jitter_buffer_fast_accelerate_id = GetFieldID(
+      jni, j_rtc_config_class, "audioJitterBufferFastAccelerate", "Z");
+
+  jfieldID j_ice_connection_receiving_timeout_id =
+      GetFieldID(jni, j_rtc_config_class, "iceConnectionReceivingTimeout", "I");
+
+  jfieldID j_ice_backup_candidate_pair_ping_interval_id = GetFieldID(
+      jni, j_rtc_config_class, "iceBackupCandidatePairPingInterval", "I");
+
+  jfieldID j_continual_gathering_policy_id =
+      GetFieldID(jni, j_rtc_config_class, "continualGatheringPolicy",
+                 "Lorg/webrtc/PeerConnection$ContinualGatheringPolicy;");
+  jobject j_continual_gathering_policy =
+      GetObjectField(jni, j_rtc_config, j_continual_gathering_policy_id);
+
+  rtc_config->type =
+      JavaIceTransportsTypeToNativeType(jni, j_ice_transports_type);
+  rtc_config->bundle_policy =
+      JavaBundlePolicyToNativeType(jni, j_bundle_policy);
+  rtc_config->rtcp_mux_policy =
+      JavaRtcpMuxPolicyToNativeType(jni, j_rtcp_mux_policy);
+  rtc_config->tcp_candidate_policy =
+      JavaTcpCandidatePolicyToNativeType(jni, j_tcp_candidate_policy);
+  JavaIceServersToJsepIceServers(jni, j_ice_servers, &rtc_config->servers);
+  rtc_config->audio_jitter_buffer_max_packets =
+      GetIntField(jni, j_rtc_config, j_audio_jitter_buffer_max_packets_id);
+  rtc_config->audio_jitter_buffer_fast_accelerate = GetBooleanField(
+      jni, j_rtc_config, j_audio_jitter_buffer_fast_accelerate_id);
+  rtc_config->ice_connection_receiving_timeout =
+      GetIntField(jni, j_rtc_config, j_ice_connection_receiving_timeout_id);
+  rtc_config->ice_backup_candidate_pair_ping_interval = GetIntField(
+      jni, j_rtc_config, j_ice_backup_candidate_pair_ping_interval_id);
+  rtc_config->continual_gathering_policy =
+      JavaContinualGatheringPolicyToNativeType(
+          jni, j_continual_gathering_policy);
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreatePeerConnection)(
+    JNIEnv *jni, jclass, jlong factory, jobject j_rtc_config,
+    jobject j_constraints, jlong observer_p) {
+  rtc::scoped_refptr<PeerConnectionFactoryInterface> f(
+      reinterpret_cast<PeerConnectionFactoryInterface*>(
+          factoryFromJava(factory)));
+
+  PeerConnectionInterface::RTCConfiguration rtc_config;
+  JavaRTCConfigurationToJsepRTCConfiguration(jni, j_rtc_config, &rtc_config);
+
+  jclass j_rtc_config_class = GetObjectClass(jni, j_rtc_config);
+  jfieldID j_key_type_id = GetFieldID(jni, j_rtc_config_class, "keyType",
+                                      "Lorg/webrtc/PeerConnection$KeyType;");
+  jobject j_key_type = GetObjectField(jni, j_rtc_config, j_key_type_id);
+
+  // Create ECDSA certificate.
+  if (JavaKeyTypeToNativeType(jni, j_key_type) == rtc::KT_ECDSA) {
+    scoped_ptr<rtc::SSLIdentity> ssl_identity(
+        rtc::SSLIdentity::Generate(webrtc::kIdentityName, rtc::KT_ECDSA));
+    if (ssl_identity.get()) {
+      rtc_config.certificates.push_back(
+          rtc::RTCCertificate::Create(std::move(ssl_identity)));
+      LOG(LS_INFO) << "ECDSA certificate created.";
+    } else {
+      // Failing to create certificate should not abort peer connection
+      // creation. Instead default encryption (currently RSA) will be used.
+      LOG(LS_WARNING) <<
+          "Failed to generate SSLIdentity. Default encryption will be used.";
+    }
+  }
+
+  PCOJava* observer = reinterpret_cast<PCOJava*>(observer_p);
+  observer->SetConstraints(new ConstraintsWrapper(jni, j_constraints));
+  rtc::scoped_refptr<PeerConnectionInterface> pc(f->CreatePeerConnection(
+      rtc_config, observer->constraints(), NULL, NULL, observer));
+  return (jlong)pc.release();
+}
+
+static rtc::scoped_refptr<PeerConnectionInterface> ExtractNativePC(
+    JNIEnv* jni, jobject j_pc) {
+  jfieldID native_pc_id = GetFieldID(jni,
+      GetObjectClass(jni, j_pc), "nativePeerConnection", "J");
+  jlong j_p = GetLongField(jni, j_pc, native_pc_id);
+  return rtc::scoped_refptr<PeerConnectionInterface>(
+      reinterpret_cast<PeerConnectionInterface*>(j_p));
+}
+
+JOW(jobject, PeerConnection_getLocalDescription)(JNIEnv* jni, jobject j_pc) {
+  const SessionDescriptionInterface* sdp =
+      ExtractNativePC(jni, j_pc)->local_description();
+  return sdp ? JavaSdpFromNativeSdp(jni, sdp) : NULL;
+}
+
+JOW(jobject, PeerConnection_getRemoteDescription)(JNIEnv* jni, jobject j_pc) {
+  const SessionDescriptionInterface* sdp =
+      ExtractNativePC(jni, j_pc)->remote_description();
+  return sdp ? JavaSdpFromNativeSdp(jni, sdp) : NULL;
+}
+
+JOW(jobject, PeerConnection_createDataChannel)(
+    JNIEnv* jni, jobject j_pc, jstring j_label, jobject j_init) {
+  DataChannelInit init = JavaDataChannelInitToNative(jni, j_init);
+  rtc::scoped_refptr<DataChannelInterface> channel(
+      ExtractNativePC(jni, j_pc)->CreateDataChannel(
+          JavaToStdString(jni, j_label), &init));
+  // Mustn't pass channel.get() directly through NewObject to avoid reading its
+  // vararg parameter as 64-bit and reading memory that doesn't belong to the
+  // 32-bit parameter.
+  jlong nativeChannelPtr = jlongFromPointer(channel.get());
+  RTC_CHECK(nativeChannelPtr) << "Failed to create DataChannel";
+  jclass j_data_channel_class = FindClass(jni, "org/webrtc/DataChannel");
+  jmethodID j_data_channel_ctor = GetMethodID(
+      jni, j_data_channel_class, "<init>", "(J)V");
+  jobject j_channel = jni->NewObject(
+      j_data_channel_class, j_data_channel_ctor, nativeChannelPtr);
+  CHECK_EXCEPTION(jni) << "error during NewObject";
+  // Channel is now owned by Java object, and will be freed from there.
+  int bumped_count = channel->AddRef();
+  RTC_CHECK(bumped_count == 2) << "Unexpected refcount";
+  return j_channel;
+}
+
+JOW(void, PeerConnection_createOffer)(
+    JNIEnv* jni, jobject j_pc, jobject j_observer, jobject j_constraints) {
+  ConstraintsWrapper* constraints =
+      new ConstraintsWrapper(jni, j_constraints);
+  rtc::scoped_refptr<CreateSdpObserverWrapper> observer(
+      new rtc::RefCountedObject<CreateSdpObserverWrapper>(
+          jni, j_observer, constraints));
+  ExtractNativePC(jni, j_pc)->CreateOffer(observer, constraints);
+}
+
+JOW(void, PeerConnection_createAnswer)(
+    JNIEnv* jni, jobject j_pc, jobject j_observer, jobject j_constraints) {
+  ConstraintsWrapper* constraints =
+      new ConstraintsWrapper(jni, j_constraints);
+  rtc::scoped_refptr<CreateSdpObserverWrapper> observer(
+      new rtc::RefCountedObject<CreateSdpObserverWrapper>(
+          jni, j_observer, constraints));
+  ExtractNativePC(jni, j_pc)->CreateAnswer(observer, constraints);
+}
+
+// Helper to create a SessionDescriptionInterface from a SessionDescription.
+static SessionDescriptionInterface* JavaSdpToNativeSdp(
+    JNIEnv* jni, jobject j_sdp) {
+  jfieldID j_type_id = GetFieldID(
+      jni, GetObjectClass(jni, j_sdp), "type",
+      "Lorg/webrtc/SessionDescription$Type;");
+  jobject j_type = GetObjectField(jni, j_sdp, j_type_id);
+  jmethodID j_canonical_form_id = GetMethodID(
+      jni, GetObjectClass(jni, j_type), "canonicalForm",
+      "()Ljava/lang/String;");
+  jstring j_type_string = (jstring)jni->CallObjectMethod(
+      j_type, j_canonical_form_id);
+  CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+  std::string std_type = JavaToStdString(jni, j_type_string);
+
+  jfieldID j_description_id = GetFieldID(
+      jni, GetObjectClass(jni, j_sdp), "description", "Ljava/lang/String;");
+  jstring j_description = (jstring)GetObjectField(jni, j_sdp, j_description_id);
+  std::string std_description = JavaToStdString(jni, j_description);
+
+  return webrtc::CreateSessionDescription(
+      std_type, std_description, NULL);
+}
+
+JOW(void, PeerConnection_setLocalDescription)(
+    JNIEnv* jni, jobject j_pc,
+    jobject j_observer, jobject j_sdp) {
+  rtc::scoped_refptr<SetSdpObserverWrapper> observer(
+      new rtc::RefCountedObject<SetSdpObserverWrapper>(
+          jni, j_observer, reinterpret_cast<ConstraintsWrapper*>(NULL)));
+  ExtractNativePC(jni, j_pc)->SetLocalDescription(
+      observer, JavaSdpToNativeSdp(jni, j_sdp));
+}
+
+JOW(void, PeerConnection_setRemoteDescription)(
+    JNIEnv* jni, jobject j_pc,
+    jobject j_observer, jobject j_sdp) {
+  rtc::scoped_refptr<SetSdpObserverWrapper> observer(
+      new rtc::RefCountedObject<SetSdpObserverWrapper>(
+          jni, j_observer, reinterpret_cast<ConstraintsWrapper*>(NULL)));
+  ExtractNativePC(jni, j_pc)->SetRemoteDescription(
+      observer, JavaSdpToNativeSdp(jni, j_sdp));
+}
+
+JOW(jboolean, PeerConnection_setConfiguration)(
+    JNIEnv* jni, jobject j_pc, jobject j_rtc_config) {
+  PeerConnectionInterface::RTCConfiguration rtc_config;
+  JavaRTCConfigurationToJsepRTCConfiguration(jni, j_rtc_config, &rtc_config);
+  return ExtractNativePC(jni, j_pc)->SetConfiguration(rtc_config);
+}
+
+JOW(jboolean, PeerConnection_nativeAddIceCandidate)(
+    JNIEnv* jni, jobject j_pc, jstring j_sdp_mid,
+    jint j_sdp_mline_index, jstring j_candidate_sdp) {
+  std::string sdp_mid = JavaToStdString(jni, j_sdp_mid);
+  std::string sdp = JavaToStdString(jni, j_candidate_sdp);
+  scoped_ptr<IceCandidateInterface> candidate(
+      webrtc::CreateIceCandidate(sdp_mid, j_sdp_mline_index, sdp, NULL));
+  return ExtractNativePC(jni, j_pc)->AddIceCandidate(candidate.get());
+}
+
+JOW(jboolean, PeerConnection_nativeAddLocalStream)(
+    JNIEnv* jni, jobject j_pc, jlong native_stream) {
+  return ExtractNativePC(jni, j_pc)->AddStream(
+      reinterpret_cast<MediaStreamInterface*>(native_stream));
+}
+
+JOW(void, PeerConnection_nativeRemoveLocalStream)(
+    JNIEnv* jni, jobject j_pc, jlong native_stream) {
+  ExtractNativePC(jni, j_pc)->RemoveStream(
+      reinterpret_cast<MediaStreamInterface*>(native_stream));
+}
+
+JOW(jobject, PeerConnection_nativeCreateSender)(
+    JNIEnv* jni, jobject j_pc, jstring j_kind, jstring j_stream_id) {
+  jclass j_rtp_sender_class = FindClass(jni, "org/webrtc/RtpSender");
+  jmethodID j_rtp_sender_ctor =
+      GetMethodID(jni, j_rtp_sender_class, "<init>", "(J)V");
+
+  std::string kind = JavaToStdString(jni, j_kind);
+  std::string stream_id = JavaToStdString(jni, j_stream_id);
+  rtc::scoped_refptr<RtpSenderInterface> sender =
+      ExtractNativePC(jni, j_pc)->CreateSender(kind, stream_id);
+  if (!sender.get()) {
+    return nullptr;
+  }
+  jlong nativeSenderPtr = jlongFromPointer(sender.get());
+  jobject j_sender =
+      jni->NewObject(j_rtp_sender_class, j_rtp_sender_ctor, nativeSenderPtr);
+  CHECK_EXCEPTION(jni) << "error during NewObject";
+  // Sender is now owned by the Java object, and will be freed from
+  // RtpSender.dispose(), called by PeerConnection.dispose() or getSenders().
+  sender->AddRef();
+  return j_sender;
+}
+
+JOW(jobject, PeerConnection_nativeGetSenders)(JNIEnv* jni, jobject j_pc) {
+  jclass j_array_list_class = FindClass(jni, "java/util/ArrayList");
+  jmethodID j_array_list_ctor =
+      GetMethodID(jni, j_array_list_class, "<init>", "()V");
+  jmethodID j_array_list_add =
+      GetMethodID(jni, j_array_list_class, "add", "(Ljava/lang/Object;)Z");
+  jobject j_senders = jni->NewObject(j_array_list_class, j_array_list_ctor);
+  CHECK_EXCEPTION(jni) << "error during NewObject";
+
+  jclass j_rtp_sender_class = FindClass(jni, "org/webrtc/RtpSender");
+  jmethodID j_rtp_sender_ctor =
+      GetMethodID(jni, j_rtp_sender_class, "<init>", "(J)V");
+
+  auto senders = ExtractNativePC(jni, j_pc)->GetSenders();
+  for (const auto& sender : senders) {
+    jlong nativeSenderPtr = jlongFromPointer(sender.get());
+    jobject j_sender =
+        jni->NewObject(j_rtp_sender_class, j_rtp_sender_ctor, nativeSenderPtr);
+    CHECK_EXCEPTION(jni) << "error during NewObject";
+    // Sender is now owned by the Java object, and will be freed from
+    // RtpSender.dispose(), called by PeerConnection.dispose() or getSenders().
+    sender->AddRef();
+    jni->CallBooleanMethod(j_senders, j_array_list_add, j_sender);
+    CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
+  }
+  return j_senders;
+}
+
+JOW(jobject, PeerConnection_nativeGetReceivers)(JNIEnv* jni, jobject j_pc) {
+  jclass j_array_list_class = FindClass(jni, "java/util/ArrayList");
+  jmethodID j_array_list_ctor =
+      GetMethodID(jni, j_array_list_class, "<init>", "()V");
+  jmethodID j_array_list_add =
+      GetMethodID(jni, j_array_list_class, "add", "(Ljava/lang/Object;)Z");
+  jobject j_receivers = jni->NewObject(j_array_list_class, j_array_list_ctor);
+  CHECK_EXCEPTION(jni) << "error during NewObject";
+
+  jclass j_rtp_receiver_class = FindClass(jni, "org/webrtc/RtpReceiver");
+  jmethodID j_rtp_receiver_ctor =
+      GetMethodID(jni, j_rtp_receiver_class, "<init>", "(J)V");
+
+  auto receivers = ExtractNativePC(jni, j_pc)->GetReceivers();
+  for (const auto& receiver : receivers) {
+    jlong nativeReceiverPtr = jlongFromPointer(receiver.get());
+    jobject j_receiver = jni->NewObject(j_rtp_receiver_class,
+                                        j_rtp_receiver_ctor, nativeReceiverPtr);
+    CHECK_EXCEPTION(jni) << "error during NewObject";
+    // Receiver is now owned by Java object, and will be freed from there.
+    receiver->AddRef();
+    jni->CallBooleanMethod(j_receivers, j_array_list_add, j_receiver);
+    CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
+  }
+  return j_receivers;
+}
+
+JOW(bool, PeerConnection_nativeGetStats)(
+    JNIEnv* jni, jobject j_pc, jobject j_observer, jlong native_track) {
+  rtc::scoped_refptr<StatsObserverWrapper> observer(
+      new rtc::RefCountedObject<StatsObserverWrapper>(jni, j_observer));
+  return ExtractNativePC(jni, j_pc)->GetStats(
+      observer,
+      reinterpret_cast<MediaStreamTrackInterface*>(native_track),
+      PeerConnectionInterface::kStatsOutputLevelStandard);
+}
+
+JOW(jobject, PeerConnection_signalingState)(JNIEnv* jni, jobject j_pc) {
+  PeerConnectionInterface::SignalingState state =
+      ExtractNativePC(jni, j_pc)->signaling_state();
+  return JavaEnumFromIndex(jni, "PeerConnection$SignalingState", state);
+}
+
+JOW(jobject, PeerConnection_iceConnectionState)(JNIEnv* jni, jobject j_pc) {
+  PeerConnectionInterface::IceConnectionState state =
+      ExtractNativePC(jni, j_pc)->ice_connection_state();
+  return JavaEnumFromIndex(jni, "PeerConnection$IceConnectionState", state);
+}
+
+JOW(jobject, PeerConnection_iceGatheringState)(JNIEnv* jni, jobject j_pc) {
+  PeerConnectionInterface::IceGatheringState state =
+      ExtractNativePC(jni, j_pc)->ice_gathering_state();
+  return JavaEnumFromIndex(jni, "PeerConnection$IceGatheringState", state);
+}
+
+JOW(void, PeerConnection_close)(JNIEnv* jni, jobject j_pc) {
+  ExtractNativePC(jni, j_pc)->Close();
+  return;
+}
+
+JOW(jobject, MediaSource_nativeState)(JNIEnv* jni, jclass, jlong j_p) {
+  rtc::scoped_refptr<MediaSourceInterface> p(
+      reinterpret_cast<MediaSourceInterface*>(j_p));
+  return JavaEnumFromIndex(jni, "MediaSource$State", p->state());
+}
+
+JOW(jlong, VideoRenderer_nativeWrapVideoRenderer)(
+    JNIEnv* jni, jclass, jobject j_callbacks) {
+  scoped_ptr<JavaVideoRendererWrapper> renderer(
+      new JavaVideoRendererWrapper(jni, j_callbacks));
+  return (jlong)renderer.release();
+}
+
+JOW(void, VideoRenderer_nativeCopyPlane)(
+    JNIEnv *jni, jclass, jobject j_src_buffer, jint width, jint height,
+    jint src_stride, jobject j_dst_buffer, jint dst_stride) {
+  size_t src_size = jni->GetDirectBufferCapacity(j_src_buffer);
+  size_t dst_size = jni->GetDirectBufferCapacity(j_dst_buffer);
+  RTC_CHECK(src_stride >= width) << "Wrong source stride " << src_stride;
+  RTC_CHECK(dst_stride >= width) << "Wrong destination stride " << dst_stride;
+  RTC_CHECK(src_size >= src_stride * height)
+      << "Insufficient source buffer capacity " << src_size;
+  RTC_CHECK(dst_size >= dst_stride * height)
+      << "Isufficient destination buffer capacity " << dst_size;
+  uint8_t *src =
+      reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_buffer));
+  uint8_t *dst =
+      reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_buffer));
+  if (src_stride == dst_stride) {
+    memcpy(dst, src, src_stride * height);
+  } else {
+    for (int i = 0; i < height; i++) {
+      memcpy(dst, src, width);
+      src += src_stride;
+      dst += dst_stride;
+    }
+  }
+}
+
+JOW(void, VideoSource_stop)(JNIEnv* jni, jclass, jlong j_p) {
+  reinterpret_cast<VideoSourceInterface*>(j_p)->Stop();
+}
+
+JOW(void, VideoSource_restart)(
+    JNIEnv* jni, jclass, jlong j_p_source, jlong j_p_format) {
+  reinterpret_cast<VideoSourceInterface*>(j_p_source)->Restart();
+}
+
+JOW(jstring, MediaStreamTrack_nativeId)(JNIEnv* jni, jclass, jlong j_p) {
+  return JavaStringFromStdString(
+      jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->id());
+}
+
+JOW(jstring, MediaStreamTrack_nativeKind)(JNIEnv* jni, jclass, jlong j_p) {
+  return JavaStringFromStdString(
+      jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->kind());
+}
+
+JOW(jboolean, MediaStreamTrack_nativeEnabled)(JNIEnv* jni, jclass, jlong j_p) {
+  return reinterpret_cast<MediaStreamTrackInterface*>(j_p)->enabled();
+}
+
+JOW(jobject, MediaStreamTrack_nativeState)(JNIEnv* jni, jclass, jlong j_p) {
+  return JavaEnumFromIndex(
+      jni,
+      "MediaStreamTrack$State",
+      reinterpret_cast<MediaStreamTrackInterface*>(j_p)->state());
+}
+
+JOW(jboolean, MediaStreamTrack_nativeSetState)(
+    JNIEnv* jni, jclass, jlong j_p, jint j_new_state) {
+  MediaStreamTrackInterface::TrackState new_state =
+      (MediaStreamTrackInterface::TrackState)j_new_state;
+  return reinterpret_cast<MediaStreamTrackInterface*>(j_p)
+      ->set_state(new_state);
+}
+
+JOW(jboolean, MediaStreamTrack_nativeSetEnabled)(
+    JNIEnv* jni, jclass, jlong j_p, jboolean enabled) {
+  return reinterpret_cast<MediaStreamTrackInterface*>(j_p)
+      ->set_enabled(enabled);
+}
+
+JOW(void, VideoTrack_nativeAddRenderer)(
+    JNIEnv* jni, jclass,
+    jlong j_video_track_pointer, jlong j_renderer_pointer) {
+  reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer)->AddRenderer(
+      reinterpret_cast<VideoRendererInterface*>(j_renderer_pointer));
+}
+
+JOW(void, VideoTrack_nativeRemoveRenderer)(
+    JNIEnv* jni, jclass,
+    jlong j_video_track_pointer, jlong j_renderer_pointer) {
+  reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer)->RemoveRenderer(
+      reinterpret_cast<VideoRendererInterface*>(j_renderer_pointer));
+}
+
+JOW(jlong, CallSessionFileRotatingLogSink_nativeAddSink)(
+    JNIEnv* jni, jclass,
+    jstring j_dirPath, jint j_maxFileSize, jint j_severity) {
+  std::string dir_path = JavaToStdString(jni, j_dirPath);
+  rtc::CallSessionFileRotatingLogSink* sink =
+      new rtc::CallSessionFileRotatingLogSink(dir_path, j_maxFileSize);
+  if (!sink->Init()) {
+    LOG_V(rtc::LoggingSeverity::LS_WARNING) <<
+        "Failed to init CallSessionFileRotatingLogSink for path " << dir_path;
+    delete sink;
+    return 0;
+  }
+  rtc::LogMessage::AddLogToStream(
+      sink, static_cast<rtc::LoggingSeverity>(j_severity));
+  return (jlong) sink;
+}
+
+JOW(void, CallSessionFileRotatingLogSink_nativeDeleteSink)(
+    JNIEnv* jni, jclass, jlong j_sink) {
+  rtc::CallSessionFileRotatingLogSink* sink =
+      reinterpret_cast<rtc::CallSessionFileRotatingLogSink*>(j_sink);
+  rtc::LogMessage::RemoveLogToStream(sink);
+  delete sink;
+}
+
+JOW(jbyteArray, CallSessionFileRotatingLogSink_nativeGetLogData)(
+    JNIEnv* jni, jclass, jstring j_dirPath) {
+  std::string dir_path = JavaToStdString(jni, j_dirPath);
+  rtc::scoped_ptr<rtc::CallSessionFileRotatingStream> stream(
+      new rtc::CallSessionFileRotatingStream(dir_path));
+  if (!stream->Open()) {
+    LOG_V(rtc::LoggingSeverity::LS_WARNING) <<
+        "Failed to open CallSessionFileRotatingStream for path " << dir_path;
+    return jni->NewByteArray(0);
+  }
+  size_t log_size = 0;
+  if (!stream->GetSize(&log_size) || log_size == 0) {
+    LOG_V(rtc::LoggingSeverity::LS_WARNING) <<
+        "CallSessionFileRotatingStream returns 0 size for path " << dir_path;
+    return jni->NewByteArray(0);
+  }
+
+  size_t read = 0;
+  rtc::scoped_ptr<jbyte> buffer(static_cast<jbyte*>(malloc(log_size)));
+  stream->ReadAll(buffer.get(), log_size, &read, nullptr);
+
+  jbyteArray result = jni->NewByteArray(read);
+  jni->SetByteArrayRegion(result, 0, read, buffer.get());
+
+  return result;
+}
+
+JOW(jboolean, RtpSender_nativeSetTrack)(JNIEnv* jni,
+                                    jclass,
+                                    jlong j_rtp_sender_pointer,
+                                    jlong j_track_pointer) {
+  return reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
+      ->SetTrack(reinterpret_cast<MediaStreamTrackInterface*>(j_track_pointer));
+}
+
+JOW(jlong, RtpSender_nativeGetTrack)(JNIEnv* jni,
+                                  jclass,
+                                  jlong j_rtp_sender_pointer,
+                                  jlong j_track_pointer) {
+  return jlongFromPointer(
+      reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
+          ->track()
+          .release());
+}
+
+JOW(jstring, RtpSender_nativeId)(
+    JNIEnv* jni, jclass, jlong j_rtp_sender_pointer) {
+  return JavaStringFromStdString(
+      jni, reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)->id());
+}
+
+JOW(void, RtpSender_free)(JNIEnv* jni, jclass, jlong j_rtp_sender_pointer) {
+  reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)->Release();
+}
+
+JOW(jlong, RtpReceiver_nativeGetTrack)(JNIEnv* jni,
+                                    jclass,
+                                    jlong j_rtp_receiver_pointer,
+                                    jlong j_track_pointer) {
+  return jlongFromPointer(
+      reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer)
+          ->track()
+          .release());
+}
+
+JOW(jstring, RtpReceiver_nativeId)(
+    JNIEnv* jni, jclass, jlong j_rtp_receiver_pointer) {
+  return JavaStringFromStdString(
+      jni,
+      reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer)->id());
+}
+
+JOW(void, RtpReceiver_free)(JNIEnv* jni, jclass, jlong j_rtp_receiver_pointer) {
+  reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer)->Release();
+}
+
+}  // namespace webrtc_jni
diff --git a/webrtc/api/java/jni/surfacetexturehelper_jni.cc b/webrtc/api/java/jni/surfacetexturehelper_jni.cc
new file mode 100644
index 0000000..335081d
--- /dev/null
+++ b/webrtc/api/java/jni/surfacetexturehelper_jni.cc
@@ -0,0 +1,68 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+
+#include "webrtc/api/java/jni/surfacetexturehelper_jni.h"
+
+#include "webrtc/api/java/jni/classreferenceholder.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/base/checks.h"
+
+namespace webrtc_jni {
+
+SurfaceTextureHelper::SurfaceTextureHelper(
+    JNIEnv* jni, jobject surface_texture_helper)
+  : j_surface_texture_helper_(jni, surface_texture_helper),
+    j_return_texture_method_(
+        GetMethodID(jni,
+                    FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
+                    "returnTextureFrame",
+                    "()V")) {
+  CHECK_EXCEPTION(jni) << "error during initialization of SurfaceTextureHelper";
+}
+
+SurfaceTextureHelper::~SurfaceTextureHelper() {
+}
+
+void SurfaceTextureHelper::ReturnTextureFrame() const {
+  JNIEnv* jni = AttachCurrentThreadIfNeeded();
+  jni->CallVoidMethod(*j_surface_texture_helper_, j_return_texture_method_);
+
+  CHECK_EXCEPTION(
+      jni) << "error during SurfaceTextureHelper.returnTextureFrame";
+}
+
+rtc::scoped_refptr<webrtc::VideoFrameBuffer>
+SurfaceTextureHelper::CreateTextureFrame(int width, int height,
+    const NativeHandleImpl& native_handle) {
+  return new rtc::RefCountedObject<AndroidTextureBuffer>(
+      width, height, native_handle, *j_surface_texture_helper_,
+      rtc::Bind(&SurfaceTextureHelper::ReturnTextureFrame, this));
+}
+
+}  // namespace webrtc_jni
diff --git a/webrtc/api/java/jni/surfacetexturehelper_jni.h b/webrtc/api/java/jni/surfacetexturehelper_jni.h
new file mode 100644
index 0000000..8953b02
--- /dev/null
+++ b/webrtc/api/java/jni/surfacetexturehelper_jni.h
@@ -0,0 +1,79 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef WEBRTC_API_JAVA_JNI_SURFACETEXTUREHELPER_JNI_H_
+#define WEBRTC_API_JAVA_JNI_SURFACETEXTUREHELPER_JNI_H_
+
+#include <jni.h>
+
+#include "webrtc/api/java/jni/jni_helpers.h"
+#include "webrtc/api/java/jni/native_handle_impl.h"
+#include "webrtc/base/refcount.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/common_video/include/video_frame_buffer.h"
+
+namespace webrtc_jni {
+
+// Helper class to create and synchronize access to an Android SurfaceTexture.
+// It is used for creating webrtc::VideoFrameBuffers from a SurfaceTexture when
+// the SurfaceTexture has been updated.
+// When the VideoFrameBuffer is released, this class returns the buffer to the
+// java SurfaceTextureHelper so it can be updated safely. The VideoFrameBuffer
+// can be released on an arbitrary thread.
+// SurfaceTextureHelper is reference counted to make sure that it is not
+// destroyed while a VideoFrameBuffer is in use.
+// This class is the C++ counterpart of the java class SurfaceTextureHelper.
+// Usage:
+// 1. Create an java instance of SurfaceTextureHelper.
+// 2. Create an instance of this class.
+// 3. Register a listener to the Java SurfaceListener and start producing
+// new buffers.
+// 4. Call CreateTextureFrame to wrap the Java texture in a VideoFrameBuffer.
+class SurfaceTextureHelper : public rtc::RefCountInterface {
+ public:
+  SurfaceTextureHelper(JNIEnv* jni, jobject surface_texture_helper);
+
+  rtc::scoped_refptr<webrtc::VideoFrameBuffer> CreateTextureFrame(
+      int width,
+      int height,
+      const NativeHandleImpl& native_handle);
+
+ protected:
+  ~SurfaceTextureHelper();
+
+ private:
+  //  May be called on arbitrary thread.
+  void ReturnTextureFrame() const;
+
+  const ScopedGlobalRef<jobject> j_surface_texture_helper_;
+  const jmethodID j_return_texture_method_;
+};
+
+}  // namespace webrtc_jni
+
+#endif  // WEBRTC_API_JAVA_JNI_SURFACETEXTUREHELPER_JNI_H_