Move talk/app/webrtc to webrtc/api
The previously disabled warnings that were inherited from
talk/build/common.gypi are now replaced by target-specific disabling
of only the failing warnings. Additional disabling was needed since the stricter
compilation warnings that applies to code in webrtc/.
License headers will be updated in a follow-up CL.
Other modifications:
* Updated the header guards.
* Sorted the includes using chromium/src/tools/sort-headers.py
except for these files:
talk/app/webrtc/peerconnectionendtoend_unittest.cc
talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
talk/app/webrtc/java/jni/androidmediaencoder_jni.cc
webrtc/media/devices/win32devicemanager.cc
The HAVE_SCTP define was added for the peerconnection_unittests target
in api_tests.gyp.
I also checked that none of
SRTP_RELATIVE_PATH
HAVE_SRTP
HAVE_WEBRTC_VIDEO
HAVE_WEBRTC_VOICE
were used by the talk/app/webrtc code.
For Chromium, the following changes will need to be applied to the roll CL that updates the
DEPS for WebRTC and libjingle:
https://codereview.chromium.org/1615433002
BUG=webrtc:5418
NOPRESUBMIT=True
R=deadbeef@webrtc.org, pthatcher@webrtc.org, tommi@webrtc.org
Review URL: https://codereview.webrtc.org/1610243002 .
Cr-Commit-Position: refs/heads/master@{#11545}
diff --git a/webrtc/api/java/README b/webrtc/api/java/README
new file mode 100644
index 0000000..f367556
--- /dev/null
+++ b/webrtc/api/java/README
@@ -0,0 +1,10 @@
+This directory holds a Java implementation of the webrtc::PeerConnection API, as
+well as the JNI glue C++ code that lets the Java implementation reuse the C++
+implementation of the same API.
+
+To build the Java API and related tests, build with OS=android in $GYP_DEFINES.
+
+To use the Java API, start by looking at the public interface of
+org.webrtc.PeerConnection{,Factory} and the org.webrtc.PeerConnectionTest.
+
+To understand the implementation of the API, see the native code in jni/.
diff --git a/webrtc/api/java/android/org/webrtc/Camera2Enumerator.java b/webrtc/api/java/android/org/webrtc/Camera2Enumerator.java
new file mode 100644
index 0000000..3444529
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/Camera2Enumerator.java
@@ -0,0 +1,122 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.annotation.TargetApi;
+import android.content.Context;
+
+import android.graphics.ImageFormat;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.os.Build;
+import android.os.SystemClock;
+import android.util.Range;
+import android.util.Size;
+
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+import org.webrtc.Logging;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+@TargetApi(21)
+public class Camera2Enumerator implements CameraEnumerationAndroid.Enumerator {
+ private final static String TAG = "Camera2Enumerator";
+ private final static double NANO_SECONDS_PER_SECOND = 1.0e9;
+
+ private final CameraManager cameraManager;
+ // Each entry contains the supported formats for a given camera index. The formats are enumerated
+ // lazily in getSupportedFormats(), and cached for future reference.
+ private final Map<Integer, List<CaptureFormat>> cachedSupportedFormats =
+ new HashMap<Integer, List<CaptureFormat>>();
+
+ public static boolean isSupported() {
+ return Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP;
+ }
+
+ public Camera2Enumerator(Context context) {
+ cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
+ }
+
+ @Override
+ public List<CaptureFormat> getSupportedFormats(int cameraId) {
+ synchronized (cachedSupportedFormats) {
+ if (cachedSupportedFormats.containsKey(cameraId)) {
+ return cachedSupportedFormats.get(cameraId);
+ }
+ Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
+ final long startTimeMs = SystemClock.elapsedRealtime();
+
+ final CameraCharacteristics cameraCharacteristics;
+ try {
+ cameraCharacteristics = cameraManager.getCameraCharacteristics(Integer.toString(cameraId));
+ } catch (Exception ex) {
+ Logging.e(TAG, "getCameraCharacteristics(): " + ex);
+ return new ArrayList<CaptureFormat>();
+ }
+
+ // Calculate default max fps from auto-exposure ranges in case getOutputMinFrameDuration() is
+ // not supported.
+ final Range<Integer>[] fpsRanges =
+ cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
+ int defaultMaxFps = 0;
+ for (Range<Integer> fpsRange : fpsRanges) {
+ defaultMaxFps = Math.max(defaultMaxFps, fpsRange.getUpper());
+ }
+
+ final StreamConfigurationMap streamMap =
+ cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+ final Size[] sizes = streamMap.getOutputSizes(ImageFormat.YUV_420_888);
+ if (sizes == null) {
+ throw new RuntimeException("ImageFormat.YUV_420_888 not supported.");
+ }
+
+ final List<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
+ for (Size size : sizes) {
+ long minFrameDurationNs = 0;
+ try {
+ minFrameDurationNs = streamMap.getOutputMinFrameDuration(ImageFormat.YUV_420_888, size);
+ } catch (Exception e) {
+ // getOutputMinFrameDuration() is not supported on all devices. Ignore silently.
+ }
+ final int maxFps = (minFrameDurationNs == 0)
+ ? defaultMaxFps
+ : (int) Math.round(NANO_SECONDS_PER_SECOND / minFrameDurationNs);
+ formatList.add(new CaptureFormat(size.getWidth(), size.getHeight(), 0, maxFps * 1000));
+ }
+ cachedSupportedFormats.put(cameraId, formatList);
+ final long endTimeMs = SystemClock.elapsedRealtime();
+ Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
+ + " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
+ return formatList;
+ }
+ }
+}
diff --git a/webrtc/api/java/android/org/webrtc/CameraEnumerationAndroid.java b/webrtc/api/java/android/org/webrtc/CameraEnumerationAndroid.java
new file mode 100644
index 0000000..5f68c37
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/CameraEnumerationAndroid.java
@@ -0,0 +1,227 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import static java.lang.Math.abs;
+import static java.lang.Math.ceil;
+import android.graphics.ImageFormat;
+
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+
+import org.webrtc.Logging;
+
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+
+@SuppressWarnings("deprecation")
+public class CameraEnumerationAndroid {
+ private final static String TAG = "CameraEnumerationAndroid";
+ // Synchronized on |CameraEnumerationAndroid.this|.
+ private static Enumerator enumerator = new CameraEnumerator();
+
+ public interface Enumerator {
+ /**
+ * Returns a list of supported CaptureFormats for the camera with index |cameraId|.
+ */
+ List<CaptureFormat> getSupportedFormats(int cameraId);
+ }
+
+ public static synchronized void setEnumerator(Enumerator enumerator) {
+ CameraEnumerationAndroid.enumerator = enumerator;
+ }
+
+ public static synchronized List<CaptureFormat> getSupportedFormats(int cameraId) {
+ return enumerator.getSupportedFormats(cameraId);
+ }
+
+ public static class CaptureFormat {
+ public final int width;
+ public final int height;
+ public final int maxFramerate;
+ public final int minFramerate;
+ // TODO(hbos): If VideoCapturerAndroid.startCapture is updated to support
+ // other image formats then this needs to be updated and
+ // VideoCapturerAndroid.getSupportedFormats need to return CaptureFormats of
+ // all imageFormats.
+ public final int imageFormat = ImageFormat.NV21;
+
+ public CaptureFormat(int width, int height, int minFramerate,
+ int maxFramerate) {
+ this.width = width;
+ this.height = height;
+ this.minFramerate = minFramerate;
+ this.maxFramerate = maxFramerate;
+ }
+
+ // Calculates the frame size of this capture format.
+ public int frameSize() {
+ return frameSize(width, height, imageFormat);
+ }
+
+ // Calculates the frame size of the specified image format. Currently only
+ // supporting ImageFormat.NV21.
+ // The size is width * height * number of bytes per pixel.
+ // http://developer.android.com/reference/android/hardware/Camera.html#addCallbackBuffer(byte[])
+ public static int frameSize(int width, int height, int imageFormat) {
+ if (imageFormat != ImageFormat.NV21) {
+ throw new UnsupportedOperationException("Don't know how to calculate "
+ + "the frame size of non-NV21 image formats.");
+ }
+ return (width * height * ImageFormat.getBitsPerPixel(imageFormat)) / 8;
+ }
+
+ @Override
+ public String toString() {
+ return width + "x" + height + "@[" + minFramerate + ":" + maxFramerate + "]";
+ }
+
+ public boolean isSameFormat(final CaptureFormat that) {
+ if (that == null) {
+ return false;
+ }
+ return width == that.width && height == that.height && maxFramerate == that.maxFramerate
+ && minFramerate == that.minFramerate;
+ }
+ }
+
+ // Returns device names that can be used to create a new VideoCapturerAndroid.
+ public static String[] getDeviceNames() {
+ String[] names = new String[android.hardware.Camera.getNumberOfCameras()];
+ for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
+ names[i] = getDeviceName(i);
+ }
+ return names;
+ }
+
+ // Returns number of cameras on device.
+ public static int getDeviceCount() {
+ return android.hardware.Camera.getNumberOfCameras();
+ }
+
+ // Returns the name of the camera with camera index. Returns null if the
+ // camera can not be used.
+ public static String getDeviceName(int index) {
+ android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
+ try {
+ android.hardware.Camera.getCameraInfo(index, info);
+ } catch (Exception e) {
+ Logging.e(TAG, "getCameraInfo failed on index " + index,e);
+ return null;
+ }
+
+ String facing =
+ (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) ? "front" : "back";
+ return "Camera " + index + ", Facing " + facing
+ + ", Orientation " + info.orientation;
+ }
+
+ // Returns the name of the front facing camera. Returns null if the
+ // camera can not be used or does not exist.
+ public static String getNameOfFrontFacingDevice() {
+ return getNameOfDevice(android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT);
+ }
+
+ // Returns the name of the back facing camera. Returns null if the
+ // camera can not be used or does not exist.
+ public static String getNameOfBackFacingDevice() {
+ return getNameOfDevice(android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK);
+ }
+
+ public static String getSupportedFormatsAsJson(int id) throws JSONException {
+ List<CaptureFormat> formats = getSupportedFormats(id);
+ JSONArray json_formats = new JSONArray();
+ for (CaptureFormat format : formats) {
+ JSONObject json_format = new JSONObject();
+ json_format.put("width", format.width);
+ json_format.put("height", format.height);
+ json_format.put("framerate", (format.maxFramerate + 999) / 1000);
+ json_formats.put(json_format);
+ }
+ Logging.d(TAG, "Supported formats for camera " + id + ": "
+ + json_formats.toString(2));
+ return json_formats.toString();
+ }
+
+ // Helper class for finding the closest supported format for the two functions below.
+ private static abstract class ClosestComparator<T> implements Comparator<T> {
+ // Difference between supported and requested parameter.
+ abstract int diff(T supportedParameter);
+
+ @Override
+ public int compare(T t1, T t2) {
+ return diff(t1) - diff(t2);
+ }
+ }
+
+ public static int[] getFramerateRange(android.hardware.Camera.Parameters parameters,
+ final int framerate) {
+ List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
+ if (listFpsRange.isEmpty()) {
+ Logging.w(TAG, "No supported preview fps range");
+ return new int[]{0, 0};
+ }
+ return Collections.min(listFpsRange,
+ new ClosestComparator<int[]>() {
+ @Override int diff(int[] range) {
+ final int maxFpsWeight = 10;
+ return range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX]
+ + maxFpsWeight * abs(framerate
+ - range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
+ }
+ });
+ }
+
+ public static android.hardware.Camera.Size getClosestSupportedSize(
+ List<android.hardware.Camera.Size> supportedSizes, final int requestedWidth,
+ final int requestedHeight) {
+ return Collections.min(supportedSizes,
+ new ClosestComparator<android.hardware.Camera.Size>() {
+ @Override int diff(android.hardware.Camera.Size size) {
+ return abs(requestedWidth - size.width) + abs(requestedHeight - size.height);
+ }
+ });
+ }
+
+ private static String getNameOfDevice(int facing) {
+ final android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
+ for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
+ try {
+ android.hardware.Camera.getCameraInfo(i, info);
+ if (info.facing == facing) {
+ return getDeviceName(i);
+ }
+ } catch (Exception e) {
+ Logging.e(TAG, "getCameraInfo() failed on index " + i, e);
+ }
+ }
+ return null;
+ }
+}
diff --git a/webrtc/api/java/android/org/webrtc/CameraEnumerator.java b/webrtc/api/java/android/org/webrtc/CameraEnumerator.java
new file mode 100644
index 0000000..54469cc
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/CameraEnumerator.java
@@ -0,0 +1,101 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.os.SystemClock;
+
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+import org.webrtc.Logging;
+
+import java.util.ArrayList;
+import java.util.List;
+
+@SuppressWarnings("deprecation")
+public class CameraEnumerator implements CameraEnumerationAndroid.Enumerator {
+ private final static String TAG = "CameraEnumerator";
+ // Each entry contains the supported formats for corresponding camera index. The formats for all
+ // cameras are enumerated on the first call to getSupportedFormats(), and cached for future
+ // reference.
+ private List<List<CaptureFormat>> cachedSupportedFormats;
+
+ @Override
+ public List<CaptureFormat> getSupportedFormats(int cameraId) {
+ synchronized (this) {
+ if (cachedSupportedFormats == null) {
+ cachedSupportedFormats = new ArrayList<List<CaptureFormat>>();
+ for (int i = 0; i < CameraEnumerationAndroid.getDeviceCount(); ++i) {
+ cachedSupportedFormats.add(enumerateFormats(i));
+ }
+ }
+ }
+ return cachedSupportedFormats.get(cameraId);
+ }
+
+ private List<CaptureFormat> enumerateFormats(int cameraId) {
+ Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
+ final long startTimeMs = SystemClock.elapsedRealtime();
+ final android.hardware.Camera.Parameters parameters;
+ android.hardware.Camera camera = null;
+ try {
+ Logging.d(TAG, "Opening camera with index " + cameraId);
+ camera = android.hardware.Camera.open(cameraId);
+ parameters = camera.getParameters();
+ } catch (RuntimeException e) {
+ Logging.e(TAG, "Open camera failed on camera index " + cameraId, e);
+ return new ArrayList<CaptureFormat>();
+ } finally {
+ if (camera != null) {
+ camera.release();
+ }
+ }
+
+ final List<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
+ try {
+ int minFps = 0;
+ int maxFps = 0;
+ final List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
+ if (listFpsRange != null) {
+ // getSupportedPreviewFpsRange() returns a sorted list. Take the fps range
+ // corresponding to the highest fps.
+ final int[] range = listFpsRange.get(listFpsRange.size() - 1);
+ minFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
+ maxFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
+ }
+ for (android.hardware.Camera.Size size : parameters.getSupportedPreviewSizes()) {
+ formatList.add(new CaptureFormat(size.width, size.height, minFps, maxFps));
+ }
+ } catch (Exception e) {
+ Logging.e(TAG, "getSupportedFormats() failed on camera index " + cameraId, e);
+ }
+
+ final long endTimeMs = SystemClock.elapsedRealtime();
+ Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
+ + " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
+ return formatList;
+ }
+}
diff --git a/webrtc/api/java/android/org/webrtc/EglBase.java b/webrtc/api/java/android/org/webrtc/EglBase.java
new file mode 100644
index 0000000..b675d09
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/EglBase.java
@@ -0,0 +1,139 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.graphics.SurfaceTexture;
+import android.view.Surface;
+
+import javax.microedition.khronos.egl.EGL10;
+
+
+/**
+ * Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
+ * and an EGLSurface.
+ */
+public abstract class EglBase {
+ // EGL wrapper for an actual EGLContext.
+ public static class Context {
+ }
+
+ // These constants are taken from EGL14.EGL_OPENGL_ES2_BIT and EGL14.EGL_CONTEXT_CLIENT_VERSION.
+ // https://android.googlesource.com/platform/frameworks/base/+/master/opengl/java/android/opengl/EGL14.java
+ // This is similar to how GlSurfaceView does:
+ // http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/opengl/GLSurfaceView.java#760
+ private static final int EGL_OPENGL_ES2_BIT = 4;
+ // Android-specific extension.
+ private static final int EGL_RECORDABLE_ANDROID = 0x3142;
+
+ public static final int[] CONFIG_PLAIN = {
+ EGL10.EGL_RED_SIZE, 8,
+ EGL10.EGL_GREEN_SIZE, 8,
+ EGL10.EGL_BLUE_SIZE, 8,
+ EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+ EGL10.EGL_NONE
+ };
+ public static final int[] CONFIG_RGBA = {
+ EGL10.EGL_RED_SIZE, 8,
+ EGL10.EGL_GREEN_SIZE, 8,
+ EGL10.EGL_BLUE_SIZE, 8,
+ EGL10.EGL_ALPHA_SIZE, 8,
+ EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+ EGL10.EGL_NONE
+ };
+ public static final int[] CONFIG_PIXEL_BUFFER = {
+ EGL10.EGL_RED_SIZE, 8,
+ EGL10.EGL_GREEN_SIZE, 8,
+ EGL10.EGL_BLUE_SIZE, 8,
+ EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+ EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
+ EGL10.EGL_NONE
+ };
+ public static final int[] CONFIG_PIXEL_RGBA_BUFFER = {
+ EGL10.EGL_RED_SIZE, 8,
+ EGL10.EGL_GREEN_SIZE, 8,
+ EGL10.EGL_BLUE_SIZE, 8,
+ EGL10.EGL_ALPHA_SIZE, 8,
+ EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+ EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
+ EGL10.EGL_NONE
+ };
+ public static final int[] CONFIG_RECORDABLE = {
+ EGL10.EGL_RED_SIZE, 8,
+ EGL10.EGL_GREEN_SIZE, 8,
+ EGL10.EGL_BLUE_SIZE, 8,
+ EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+ EGL_RECORDABLE_ANDROID, 1,
+ EGL10.EGL_NONE
+ };
+
+ // Create a new context with the specified config attributes, sharing data with sharedContext.
+ // |sharedContext| can be null.
+ public static EglBase create(Context sharedContext, int[] configAttributes) {
+ return (EglBase14.isEGL14Supported()
+ && (sharedContext == null || sharedContext instanceof EglBase14.Context))
+ ? new EglBase14((EglBase14.Context) sharedContext, configAttributes)
+ : new EglBase10((EglBase10.Context) sharedContext, configAttributes);
+ }
+
+ public static EglBase create() {
+ return create(null, CONFIG_PLAIN);
+ }
+
+ public static EglBase create(Context sharedContext) {
+ return create(sharedContext, CONFIG_PLAIN);
+ }
+
+ public abstract void createSurface(Surface surface);
+
+ // Create EGLSurface from the Android SurfaceTexture.
+ public abstract void createSurface(SurfaceTexture surfaceTexture);
+
+ // Create dummy 1x1 pixel buffer surface so the context can be made current.
+ public abstract void createDummyPbufferSurface();
+
+ public abstract void createPbufferSurface(int width, int height);
+
+ public abstract Context getEglBaseContext();
+
+ public abstract boolean hasSurface();
+
+ public abstract int surfaceWidth();
+
+ public abstract int surfaceHeight();
+
+ public abstract void releaseSurface();
+
+ public abstract void release();
+
+ public abstract void makeCurrent();
+
+ // Detach the current EGL context, so that it can be made current on another thread.
+ public abstract void detachCurrent();
+
+ public abstract void swapBuffers();
+}
diff --git a/webrtc/api/java/android/org/webrtc/EglBase10.java b/webrtc/api/java/android/org/webrtc/EglBase10.java
new file mode 100644
index 0000000..d11292d
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/EglBase10.java
@@ -0,0 +1,302 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.graphics.Canvas;
+import android.graphics.SurfaceTexture;
+import android.graphics.Rect;
+import android.view.Surface;
+import android.view.SurfaceHolder;
+
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGLContext;
+import javax.microedition.khronos.egl.EGLDisplay;
+import javax.microedition.khronos.egl.EGLSurface;
+
+/**
+ * Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
+ * and an EGLSurface.
+ */
+final class EglBase10 extends EglBase {
+ // This constant is taken from EGL14.EGL_CONTEXT_CLIENT_VERSION.
+ private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
+
+ private final EGL10 egl;
+ private EGLContext eglContext;
+ private EGLConfig eglConfig;
+ private EGLDisplay eglDisplay;
+ private EGLSurface eglSurface = EGL10.EGL_NO_SURFACE;
+
+ // EGL wrapper for an actual EGLContext.
+ public static class Context extends EglBase.Context {
+ private final EGLContext eglContext;
+
+ public Context(EGLContext eglContext) {
+ this.eglContext = eglContext;
+ }
+ }
+
+ // Create a new context with the specified config type, sharing data with sharedContext.
+ EglBase10(Context sharedContext, int[] configAttributes) {
+ this.egl = (EGL10) EGLContext.getEGL();
+ eglDisplay = getEglDisplay();
+ eglConfig = getEglConfig(eglDisplay, configAttributes);
+ eglContext = createEglContext(sharedContext, eglDisplay, eglConfig);
+ }
+
+ @Override
+ public void createSurface(Surface surface) {
+ /**
+ * We have to wrap Surface in a SurfaceHolder because for some reason eglCreateWindowSurface
+ * couldn't actually take a Surface object until API 17. Older versions fortunately just call
+ * SurfaceHolder.getSurface(), so we'll do that. No other methods are relevant.
+ */
+ class FakeSurfaceHolder implements SurfaceHolder {
+ private final Surface surface;
+
+ FakeSurfaceHolder(Surface surface) {
+ this.surface = surface;
+ }
+
+ @Override
+ public void addCallback(Callback callback) {}
+
+ @Override
+ public void removeCallback(Callback callback) {}
+
+ @Override
+ public boolean isCreating() {
+ return false;
+ }
+
+ @Deprecated
+ @Override
+ public void setType(int i) {}
+
+ @Override
+ public void setFixedSize(int i, int i2) {}
+
+ @Override
+ public void setSizeFromLayout() {}
+
+ @Override
+ public void setFormat(int i) {}
+
+ @Override
+ public void setKeepScreenOn(boolean b) {}
+
+ @Override
+ public Canvas lockCanvas() {
+ return null;
+ }
+
+ @Override
+ public Canvas lockCanvas(Rect rect) {
+ return null;
+ }
+
+ @Override
+ public void unlockCanvasAndPost(Canvas canvas) {}
+
+ @Override
+ public Rect getSurfaceFrame() {
+ return null;
+ }
+
+ @Override
+ public Surface getSurface() {
+ return surface;
+ }
+ }
+
+ createSurfaceInternal(new FakeSurfaceHolder(surface));
+ }
+
+ // Create EGLSurface from the Android SurfaceTexture.
+ @Override
+ public void createSurface(SurfaceTexture surfaceTexture) {
+ createSurfaceInternal(surfaceTexture);
+ }
+
+ // Create EGLSurface from either a SurfaceHolder or a SurfaceTexture.
+ private void createSurfaceInternal(Object nativeWindow) {
+ if (!(nativeWindow instanceof SurfaceHolder) && !(nativeWindow instanceof SurfaceTexture)) {
+ throw new IllegalStateException("Input must be either a SurfaceHolder or SurfaceTexture");
+ }
+ checkIsNotReleased();
+ if (eglSurface != EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ int[] surfaceAttribs = {EGL10.EGL_NONE};
+ eglSurface = egl.eglCreateWindowSurface(eglDisplay, eglConfig, nativeWindow, surfaceAttribs);
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("Failed to create window surface");
+ }
+ }
+
+ // Create dummy 1x1 pixel buffer surface so the context can be made current.
+ @Override
+ public void createDummyPbufferSurface() {
+ createPbufferSurface(1, 1);
+ }
+
+ @Override
+ public void createPbufferSurface(int width, int height) {
+ checkIsNotReleased();
+ if (eglSurface != EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ int[] surfaceAttribs = {EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE};
+ eglSurface = egl.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs);
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("Failed to create pixel buffer surface");
+ }
+ }
+
+ @Override
+ public org.webrtc.EglBase.Context getEglBaseContext() {
+ return new EglBase10.Context(eglContext);
+ }
+
+ @Override
+ public boolean hasSurface() {
+ return eglSurface != EGL10.EGL_NO_SURFACE;
+ }
+
+ @Override
+ public int surfaceWidth() {
+ final int widthArray[] = new int[1];
+ egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_WIDTH, widthArray);
+ return widthArray[0];
+ }
+
+ @Override
+ public int surfaceHeight() {
+ final int heightArray[] = new int[1];
+ egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_HEIGHT, heightArray);
+ return heightArray[0];
+ }
+
+ @Override
+ public void releaseSurface() {
+ if (eglSurface != EGL10.EGL_NO_SURFACE) {
+ egl.eglDestroySurface(eglDisplay, eglSurface);
+ eglSurface = EGL10.EGL_NO_SURFACE;
+ }
+ }
+
+ private void checkIsNotReleased() {
+ if (eglDisplay == EGL10.EGL_NO_DISPLAY || eglContext == EGL10.EGL_NO_CONTEXT
+ || eglConfig == null) {
+ throw new RuntimeException("This object has been released");
+ }
+ }
+
+ @Override
+ public void release() {
+ checkIsNotReleased();
+ releaseSurface();
+ detachCurrent();
+ egl.eglDestroyContext(eglDisplay, eglContext);
+ egl.eglTerminate(eglDisplay);
+ eglContext = EGL10.EGL_NO_CONTEXT;
+ eglDisplay = EGL10.EGL_NO_DISPLAY;
+ eglConfig = null;
+ }
+
+ @Override
+ public void makeCurrent() {
+ checkIsNotReleased();
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't make current");
+ }
+ if (!egl.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
+ throw new RuntimeException("eglMakeCurrent failed");
+ }
+ }
+
+ // Detach the current EGL context, so that it can be made current on another thread.
+ @Override
+ public void detachCurrent() {
+ if (!egl.eglMakeCurrent(
+ eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT)) {
+ throw new RuntimeException("eglMakeCurrent failed");
+ }
+ }
+
+ @Override
+ public void swapBuffers() {
+ checkIsNotReleased();
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't swap buffers");
+ }
+ egl.eglSwapBuffers(eglDisplay, eglSurface);
+ }
+
+ // Return an EGLDisplay, or die trying.
+ private EGLDisplay getEglDisplay() {
+ EGLDisplay eglDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
+ if (eglDisplay == EGL10.EGL_NO_DISPLAY) {
+ throw new RuntimeException("Unable to get EGL10 display");
+ }
+ int[] version = new int[2];
+ if (!egl.eglInitialize(eglDisplay, version)) {
+ throw new RuntimeException("Unable to initialize EGL10");
+ }
+ return eglDisplay;
+ }
+
+ // Return an EGLConfig, or die trying.
+ private EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) {
+ EGLConfig[] configs = new EGLConfig[1];
+ int[] numConfigs = new int[1];
+ if (!egl.eglChooseConfig(
+ eglDisplay, configAttributes, configs, configs.length, numConfigs)) {
+ throw new RuntimeException("Unable to find any matching EGL config");
+ }
+ return configs[0];
+ }
+
+ // Return an EGLConfig, or die trying.
+ private EGLContext createEglContext(
+ Context sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
+ if (sharedContext != null && sharedContext.eglContext == EGL10.EGL_NO_CONTEXT) {
+ throw new RuntimeException("Invalid sharedContext");
+ }
+ int[] contextAttributes = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE};
+ EGLContext rootContext =
+ sharedContext == null ? EGL10.EGL_NO_CONTEXT : sharedContext.eglContext;
+ EGLContext eglContext =
+ egl.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes);
+ if (eglContext == EGL10.EGL_NO_CONTEXT) {
+ throw new RuntimeException("Failed to create EGL context");
+ }
+ return eglContext;
+ }
+}
diff --git a/webrtc/api/java/android/org/webrtc/EglBase14.java b/webrtc/api/java/android/org/webrtc/EglBase14.java
new file mode 100644
index 0000000..8768ed4
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/EglBase14.java
@@ -0,0 +1,257 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.annotation.TargetApi;
+import android.graphics.SurfaceTexture;
+import android.opengl.EGL14;
+import android.opengl.EGLConfig;
+import android.opengl.EGLContext;
+import android.opengl.EGLDisplay;
+import android.opengl.EGLExt;
+import android.opengl.EGLSurface;
+import android.view.Surface;
+
+import org.webrtc.Logging;
+
+/**
+ * Holds EGL state and utility methods for handling an EGL14 EGLContext, an EGLDisplay,
+ * and an EGLSurface.
+ */
+@TargetApi(18)
+public final class EglBase14 extends EglBase {
+ private static final String TAG = "EglBase14";
+ private static final int EGLExt_SDK_VERSION = android.os.Build.VERSION_CODES.JELLY_BEAN_MR2;
+ private static final int CURRENT_SDK_VERSION = android.os.Build.VERSION.SDK_INT;
+ private EGLContext eglContext;
+ private EGLConfig eglConfig;
+ private EGLDisplay eglDisplay;
+ private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
+
+ // EGL 1.4 is supported from API 17. But EGLExt that is used for setting presentation
+ // time stamp on a surface is supported from 18 so we require 18.
+ public static boolean isEGL14Supported() {
+ Logging.d(TAG, "SDK version: " + CURRENT_SDK_VERSION
+ + ". isEGL14Supported: " + (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION));
+ return (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION);
+ }
+
+ public static class Context extends EglBase.Context {
+ private final android.opengl.EGLContext egl14Context;
+
+ public Context(android.opengl.EGLContext eglContext) {
+ this.egl14Context = eglContext;
+ }
+ }
+
+ // Create a new context with the specified config type, sharing data with sharedContext.
+ // |sharedContext| may be null.
+ public EglBase14(EglBase14.Context sharedContext, int[] configAttributes) {
+ eglDisplay = getEglDisplay();
+ eglConfig = getEglConfig(eglDisplay, configAttributes);
+ eglContext = createEglContext(sharedContext, eglDisplay, eglConfig);
+ }
+
+ // Create EGLSurface from the Android Surface.
+ @Override
+ public void createSurface(Surface surface) {
+ createSurfaceInternal(surface);
+ }
+
+ // Create EGLSurface from the Android SurfaceTexture.
+ @Override
+ public void createSurface(SurfaceTexture surfaceTexture) {
+ createSurfaceInternal(surfaceTexture);
+ }
+
+ // Create EGLSurface from either Surface or SurfaceTexture.
+ private void createSurfaceInternal(Object surface) {
+ if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) {
+ throw new IllegalStateException("Input must be either a Surface or SurfaceTexture");
+ }
+ checkIsNotReleased();
+ if (eglSurface != EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ int[] surfaceAttribs = {EGL14.EGL_NONE};
+ eglSurface = EGL14.eglCreateWindowSurface(eglDisplay, eglConfig, surface, surfaceAttribs, 0);
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("Failed to create window surface");
+ }
+ }
+
+ @Override
+ public void createDummyPbufferSurface() {
+ createPbufferSurface(1, 1);
+ }
+
+ @Override
+ public void createPbufferSurface(int width, int height) {
+ checkIsNotReleased();
+ if (eglSurface != EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ int[] surfaceAttribs = {EGL14.EGL_WIDTH, width, EGL14.EGL_HEIGHT, height, EGL14.EGL_NONE};
+ eglSurface = EGL14.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs, 0);
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("Failed to create pixel buffer surface");
+ }
+ }
+
+ @Override
+ public Context getEglBaseContext() {
+ return new EglBase14.Context(eglContext);
+ }
+
+ @Override
+ public boolean hasSurface() {
+ return eglSurface != EGL14.EGL_NO_SURFACE;
+ }
+
+ @Override
+ public int surfaceWidth() {
+ final int widthArray[] = new int[1];
+ EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_WIDTH, widthArray, 0);
+ return widthArray[0];
+ }
+
+ @Override
+ public int surfaceHeight() {
+ final int heightArray[] = new int[1];
+ EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_HEIGHT, heightArray, 0);
+ return heightArray[0];
+ }
+
+ @Override
+ public void releaseSurface() {
+ if (eglSurface != EGL14.EGL_NO_SURFACE) {
+ EGL14.eglDestroySurface(eglDisplay, eglSurface);
+ eglSurface = EGL14.EGL_NO_SURFACE;
+ }
+ }
+
+ private void checkIsNotReleased() {
+ if (eglDisplay == EGL14.EGL_NO_DISPLAY || eglContext == EGL14.EGL_NO_CONTEXT
+ || eglConfig == null) {
+ throw new RuntimeException("This object has been released");
+ }
+ }
+
+ @Override
+ public void release() {
+ checkIsNotReleased();
+ releaseSurface();
+ detachCurrent();
+ EGL14.eglDestroyContext(eglDisplay, eglContext);
+ EGL14.eglReleaseThread();
+ EGL14.eglTerminate(eglDisplay);
+ eglContext = EGL14.EGL_NO_CONTEXT;
+ eglDisplay = EGL14.EGL_NO_DISPLAY;
+ eglConfig = null;
+ }
+
+ @Override
+ public void makeCurrent() {
+ checkIsNotReleased();
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't make current");
+ }
+ if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
+ throw new RuntimeException("eglMakeCurrent failed");
+ }
+ }
+
+ // Detach the current EGL context, so that it can be made current on another thread.
+ @Override
+ public void detachCurrent() {
+ if (!EGL14.eglMakeCurrent(
+ eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) {
+ throw new RuntimeException("eglMakeCurrent failed");
+ }
+ }
+
+ @Override
+ public void swapBuffers() {
+ checkIsNotReleased();
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't swap buffers");
+ }
+ EGL14.eglSwapBuffers(eglDisplay, eglSurface);
+ }
+
+ public void swapBuffers(long timeStampNs) {
+ checkIsNotReleased();
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't swap buffers");
+ }
+ // See https://android.googlesource.com/platform/frameworks/native/+/tools_r22.2/opengl/specs/EGL_ANDROID_presentation_time.txt
+ EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, timeStampNs);
+ EGL14.eglSwapBuffers(eglDisplay, eglSurface);
+ }
+
+ // Return an EGLDisplay, or die trying.
+ private static EGLDisplay getEglDisplay() {
+ EGLDisplay eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
+ if (eglDisplay == EGL14.EGL_NO_DISPLAY) {
+ throw new RuntimeException("Unable to get EGL14 display");
+ }
+ int[] version = new int[2];
+ if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) {
+ throw new RuntimeException("Unable to initialize EGL14");
+ }
+ return eglDisplay;
+ }
+
+ // Return an EGLConfig, or die trying.
+ private static EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) {
+ EGLConfig[] configs = new EGLConfig[1];
+ int[] numConfigs = new int[1];
+ if (!EGL14.eglChooseConfig(
+ eglDisplay, configAttributes, 0, configs, 0, configs.length, numConfigs, 0)) {
+ throw new RuntimeException("Unable to find any matching EGL config");
+ }
+ return configs[0];
+ }
+
+ // Return an EGLConfig, or die trying.
+ private static EGLContext createEglContext(
+ EglBase14.Context sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
+ if (sharedContext != null && sharedContext.egl14Context == EGL14.EGL_NO_CONTEXT) {
+ throw new RuntimeException("Invalid sharedContext");
+ }
+ int[] contextAttributes = {EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, EGL14.EGL_NONE};
+ EGLContext rootContext =
+ sharedContext == null ? EGL14.EGL_NO_CONTEXT : sharedContext.egl14Context;
+ EGLContext eglContext =
+ EGL14.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes, 0);
+ if (eglContext == EGL14.EGL_NO_CONTEXT) {
+ throw new RuntimeException("Failed to create EGL context");
+ }
+ return eglContext;
+ }
+}
diff --git a/webrtc/api/java/android/org/webrtc/GlRectDrawer.java b/webrtc/api/java/android/org/webrtc/GlRectDrawer.java
new file mode 100644
index 0000000..6d3d5d2
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/GlRectDrawer.java
@@ -0,0 +1,230 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+
+import org.webrtc.GlShader;
+import org.webrtc.GlUtil;
+
+import java.nio.ByteBuffer;
+import java.nio.FloatBuffer;
+import java.util.Arrays;
+import java.util.IdentityHashMap;
+import java.util.Map;
+
+/**
+ * Helper class to draw an opaque quad on the target viewport location. Rotation, mirror, and
+ * cropping is specified using a 4x4 texture coordinate transform matrix. The frame input can either
+ * be an OES texture or YUV textures in I420 format. The GL state must be preserved between draw
+ * calls, this is intentional to maximize performance. The function release() must be called
+ * manually to free the resources held by this object.
+ */
+public class GlRectDrawer implements RendererCommon.GlDrawer {
+ // Simple vertex shader, used for both YUV and OES.
+ private static final String VERTEX_SHADER_STRING =
+ "varying vec2 interp_tc;\n"
+ + "attribute vec4 in_pos;\n"
+ + "attribute vec4 in_tc;\n"
+ + "\n"
+ + "uniform mat4 texMatrix;\n"
+ + "\n"
+ + "void main() {\n"
+ + " gl_Position = in_pos;\n"
+ + " interp_tc = (texMatrix * in_tc).xy;\n"
+ + "}\n";
+
+ private static final String YUV_FRAGMENT_SHADER_STRING =
+ "precision mediump float;\n"
+ + "varying vec2 interp_tc;\n"
+ + "\n"
+ + "uniform sampler2D y_tex;\n"
+ + "uniform sampler2D u_tex;\n"
+ + "uniform sampler2D v_tex;\n"
+ + "\n"
+ + "void main() {\n"
+ // CSC according to http://www.fourcc.org/fccyvrgb.php
+ + " float y = texture2D(y_tex, interp_tc).r;\n"
+ + " float u = texture2D(u_tex, interp_tc).r - 0.5;\n"
+ + " float v = texture2D(v_tex, interp_tc).r - 0.5;\n"
+ + " gl_FragColor = vec4(y + 1.403 * v, "
+ + " y - 0.344 * u - 0.714 * v, "
+ + " y + 1.77 * u, 1);\n"
+ + "}\n";
+
+ private static final String RGB_FRAGMENT_SHADER_STRING =
+ "precision mediump float;\n"
+ + "varying vec2 interp_tc;\n"
+ + "\n"
+ + "uniform sampler2D rgb_tex;\n"
+ + "\n"
+ + "void main() {\n"
+ + " gl_FragColor = texture2D(rgb_tex, interp_tc);\n"
+ + "}\n";
+
+ private static final String OES_FRAGMENT_SHADER_STRING =
+ "#extension GL_OES_EGL_image_external : require\n"
+ + "precision mediump float;\n"
+ + "varying vec2 interp_tc;\n"
+ + "\n"
+ + "uniform samplerExternalOES oes_tex;\n"
+ + "\n"
+ + "void main() {\n"
+ + " gl_FragColor = texture2D(oes_tex, interp_tc);\n"
+ + "}\n";
+
+ // Vertex coordinates in Normalized Device Coordinates, i.e. (-1, -1) is bottom-left and (1, 1) is
+ // top-right.
+ private static final FloatBuffer FULL_RECTANGLE_BUF =
+ GlUtil.createFloatBuffer(new float[] {
+ -1.0f, -1.0f, // Bottom left.
+ 1.0f, -1.0f, // Bottom right.
+ -1.0f, 1.0f, // Top left.
+ 1.0f, 1.0f, // Top right.
+ });
+
+ // Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
+ private static final FloatBuffer FULL_RECTANGLE_TEX_BUF =
+ GlUtil.createFloatBuffer(new float[] {
+ 0.0f, 0.0f, // Bottom left.
+ 1.0f, 0.0f, // Bottom right.
+ 0.0f, 1.0f, // Top left.
+ 1.0f, 1.0f // Top right.
+ });
+
+ private static class Shader {
+ public final GlShader glShader;
+ public final int texMatrixLocation;
+
+ public Shader(String fragmentShader) {
+ this.glShader = new GlShader(VERTEX_SHADER_STRING, fragmentShader);
+ this.texMatrixLocation = glShader.getUniformLocation("texMatrix");
+ }
+ }
+
+ // The keys are one of the fragments shaders above.
+ private final Map<String, Shader> shaders = new IdentityHashMap<String, Shader>();
+
+ /**
+ * Draw an OES texture frame with specified texture transformation matrix. Required resources are
+ * allocated at the first call to this function.
+ */
+ @Override
+ public void drawOes(int oesTextureId, float[] texMatrix, int x, int y, int width, int height) {
+ prepareShader(OES_FRAGMENT_SHADER_STRING, texMatrix);
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ // updateTexImage() may be called from another thread in another EGL context, so we need to
+ // bind/unbind the texture in each draw call so that GLES understads it's a new texture.
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, oesTextureId);
+ drawRectangle(x, y, width, height);
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
+ }
+
+ /**
+ * Draw a RGB(A) texture frame with specified texture transformation matrix. Required resources
+ * are allocated at the first call to this function.
+ */
+ @Override
+ public void drawRgb(int textureId, float[] texMatrix, int x, int y, int width, int height) {
+ prepareShader(RGB_FRAGMENT_SHADER_STRING, texMatrix);
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
+ drawRectangle(x, y, width, height);
+ // Unbind the texture as a precaution.
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
+ }
+
+ /**
+ * Draw a YUV frame with specified texture transformation matrix. Required resources are
+ * allocated at the first call to this function.
+ */
+ @Override
+ public void drawYuv(int[] yuvTextures, float[] texMatrix, int x, int y, int width, int height) {
+ prepareShader(YUV_FRAGMENT_SHADER_STRING, texMatrix);
+ // Bind the textures.
+ for (int i = 0; i < 3; ++i) {
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
+ }
+ drawRectangle(x, y, width, height);
+ // Unbind the textures as a precaution..
+ for (int i = 0; i < 3; ++i) {
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
+ }
+ }
+
+ private void drawRectangle(int x, int y, int width, int height) {
+ // Draw quad.
+ GLES20.glViewport(x, y, width, height);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+ }
+
+ private void prepareShader(String fragmentShader, float[] texMatrix) {
+ final Shader shader;
+ if (shaders.containsKey(fragmentShader)) {
+ shader = shaders.get(fragmentShader);
+ } else {
+ // Lazy allocation.
+ shader = new Shader(fragmentShader);
+ shaders.put(fragmentShader, shader);
+ shader.glShader.useProgram();
+ // Initialize fragment shader uniform values.
+ if (fragmentShader == YUV_FRAGMENT_SHADER_STRING) {
+ GLES20.glUniform1i(shader.glShader.getUniformLocation("y_tex"), 0);
+ GLES20.glUniform1i(shader.glShader.getUniformLocation("u_tex"), 1);
+ GLES20.glUniform1i(shader.glShader.getUniformLocation("v_tex"), 2);
+ } else if (fragmentShader == RGB_FRAGMENT_SHADER_STRING) {
+ GLES20.glUniform1i(shader.glShader.getUniformLocation("rgb_tex"), 0);
+ } else if (fragmentShader == OES_FRAGMENT_SHADER_STRING) {
+ GLES20.glUniform1i(shader.glShader.getUniformLocation("oes_tex"), 0);
+ } else {
+ throw new IllegalStateException("Unknown fragment shader: " + fragmentShader);
+ }
+ GlUtil.checkNoGLES2Error("Initialize fragment shader uniform values.");
+ // Initialize vertex shader attributes.
+ shader.glShader.setVertexAttribArray("in_pos", 2, FULL_RECTANGLE_BUF);
+ shader.glShader.setVertexAttribArray("in_tc", 2, FULL_RECTANGLE_TEX_BUF);
+ }
+ shader.glShader.useProgram();
+ // Copy the texture transformation matrix over.
+ GLES20.glUniformMatrix4fv(shader.texMatrixLocation, 1, false, texMatrix, 0);
+ }
+
+ /**
+ * Release all GLES resources. This needs to be done manually, otherwise the resources are leaked.
+ */
+ @Override
+ public void release() {
+ for (Shader shader : shaders.values()) {
+ shader.glShader.release();
+ }
+ shaders.clear();
+ }
+}
diff --git a/webrtc/api/java/android/org/webrtc/GlShader.java b/webrtc/api/java/android/org/webrtc/GlShader.java
new file mode 100644
index 0000000..966f0f5
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/GlShader.java
@@ -0,0 +1,144 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.opengl.GLES20;
+
+import org.webrtc.Logging;
+
+import java.nio.FloatBuffer;
+
+// Helper class for handling OpenGL shaders and shader programs.
+public class GlShader {
+ private static final String TAG = "GlShader";
+
+ private static int compileShader(int shaderType, String source) {
+ int[] result = new int[] {
+ GLES20.GL_FALSE
+ };
+ int shader = GLES20.glCreateShader(shaderType);
+ GLES20.glShaderSource(shader, source);
+ GLES20.glCompileShader(shader);
+ GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, result, 0);
+ if (result[0] != GLES20.GL_TRUE) {
+ Logging.e(TAG, "Could not compile shader " + shaderType + ":" +
+ GLES20.glGetShaderInfoLog(shader));
+ throw new RuntimeException(GLES20.glGetShaderInfoLog(shader));
+ }
+ GlUtil.checkNoGLES2Error("compileShader");
+ return shader;
+ }
+
+ private int vertexShader;
+ private int fragmentShader;
+ private int program;
+
+ public GlShader(String vertexSource, String fragmentSource) {
+ vertexShader = compileShader(GLES20.GL_VERTEX_SHADER, vertexSource);
+ fragmentShader = compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
+ program = GLES20.glCreateProgram();
+ if (program == 0) {
+ throw new RuntimeException("Could not create program");
+ }
+ GLES20.glAttachShader(program, vertexShader);
+ GLES20.glAttachShader(program, fragmentShader);
+ GLES20.glLinkProgram(program);
+ int[] linkStatus = new int[] {
+ GLES20.GL_FALSE
+ };
+ GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
+ if (linkStatus[0] != GLES20.GL_TRUE) {
+ Logging.e(TAG, "Could not link program: " +
+ GLES20.glGetProgramInfoLog(program));
+ throw new RuntimeException(GLES20.glGetProgramInfoLog(program));
+ }
+ GlUtil.checkNoGLES2Error("Creating GlShader");
+ }
+
+ public int getAttribLocation(String label) {
+ if (program == -1) {
+ throw new RuntimeException("The program has been released");
+ }
+ int location = GLES20.glGetAttribLocation(program, label);
+ if (location < 0) {
+ throw new RuntimeException("Could not locate '" + label + "' in program");
+ }
+ return location;
+ }
+
+ /**
+ * Enable and upload a vertex array for attribute |label|. The vertex data is specified in
+ * |buffer| with |dimension| number of components per vertex.
+ */
+ public void setVertexAttribArray(String label, int dimension, FloatBuffer buffer) {
+ if (program == -1) {
+ throw new RuntimeException("The program has been released");
+ }
+ int location = getAttribLocation(label);
+ GLES20.glEnableVertexAttribArray(location);
+ GLES20.glVertexAttribPointer(location, dimension, GLES20.GL_FLOAT, false, 0, buffer);
+ GlUtil.checkNoGLES2Error("setVertexAttribArray");
+ }
+
+ public int getUniformLocation(String label) {
+ if (program == -1) {
+ throw new RuntimeException("The program has been released");
+ }
+ int location = GLES20.glGetUniformLocation(program, label);
+ if (location < 0) {
+ throw new RuntimeException("Could not locate uniform '" + label + "' in program");
+ }
+ return location;
+ }
+
+ public void useProgram() {
+ if (program == -1) {
+ throw new RuntimeException("The program has been released");
+ }
+ GLES20.glUseProgram(program);
+ GlUtil.checkNoGLES2Error("glUseProgram");
+ }
+
+ public void release() {
+ Logging.d(TAG, "Deleting shader.");
+ // Flag shaders for deletion (does not delete until no longer attached to a program).
+ if (vertexShader != -1) {
+ GLES20.glDeleteShader(vertexShader);
+ vertexShader = -1;
+ }
+ if (fragmentShader != -1) {
+ GLES20.glDeleteShader(fragmentShader);
+ fragmentShader = -1;
+ }
+ // Delete program, automatically detaching any shaders from it.
+ if (program != -1) {
+ GLES20.glDeleteProgram(program);
+ program = -1;
+ }
+ }
+}
diff --git a/webrtc/api/java/android/org/webrtc/GlTextureFrameBuffer.java b/webrtc/api/java/android/org/webrtc/GlTextureFrameBuffer.java
new file mode 100644
index 0000000..fd52c37
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/GlTextureFrameBuffer.java
@@ -0,0 +1,142 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.opengl.GLES20;
+
+/**
+ * Helper class for handling OpenGL framebuffer with only color attachment and no depth or stencil
+ * buffer. Intended for simple tasks such as texture copy, texture downscaling, and texture color
+ * conversion.
+ */
+// TODO(magjed): Add unittests for this class.
+public class GlTextureFrameBuffer {
+ private final int frameBufferId;
+ private final int textureId;
+ private final int pixelFormat;
+ private int width;
+ private int height;
+
+ /**
+ * Generate texture and framebuffer resources. An EGLContext must be bound on the current thread
+ * when calling this function. The framebuffer is not complete until setSize() is called.
+ */
+ public GlTextureFrameBuffer(int pixelFormat) {
+ switch (pixelFormat) {
+ case GLES20.GL_LUMINANCE:
+ case GLES20.GL_RGB:
+ case GLES20.GL_RGBA:
+ this.pixelFormat = pixelFormat;
+ break;
+ default:
+ throw new IllegalArgumentException("Invalid pixel format: " + pixelFormat);
+ }
+
+ textureId = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
+ this.width = 0;
+ this.height = 0;
+
+ // Create framebuffer object and bind it.
+ final int frameBuffers[] = new int[1];
+ GLES20.glGenFramebuffers(1, frameBuffers, 0);
+ frameBufferId = frameBuffers[0];
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId);
+ GlUtil.checkNoGLES2Error("Generate framebuffer");
+
+ // Attach the texture to the framebuffer as color attachment.
+ GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
+ GLES20.GL_TEXTURE_2D, textureId, 0);
+ GlUtil.checkNoGLES2Error("Attach texture to framebuffer");
+
+ // Restore normal framebuffer.
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+ }
+
+ /**
+ * (Re)allocate texture. Will do nothing if the requested size equals the current size. An
+ * EGLContext must be bound on the current thread when calling this function. Must be called at
+ * least once before using the framebuffer. May be called multiple times to change size.
+ */
+ public void setSize(int width, int height) {
+ if (width == 0 || height == 0) {
+ throw new IllegalArgumentException("Invalid size: " + width + "x" + height);
+ }
+ if (width == this.width && height == this.height) {
+ return;
+ }
+ this.width = width;
+ this.height = height;
+
+ // Bind our framebuffer.
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId);
+ GlUtil.checkNoGLES2Error("glBindFramebuffer");
+
+ // Allocate texture.
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
+ GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, pixelFormat, width, height, 0, pixelFormat,
+ GLES20.GL_UNSIGNED_BYTE, null);
+
+ // Check that the framebuffer is in a good state.
+ final int status = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER);
+ if (status != GLES20.GL_FRAMEBUFFER_COMPLETE) {
+ throw new IllegalStateException("Framebuffer not complete, status: " + status);
+ }
+
+ // Restore normal framebuffer.
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
+ }
+
+ public int getWidth() {
+ return width;
+ }
+
+ public int getHeight() {
+ return height;
+ }
+
+ public int getFrameBufferId() {
+ return frameBufferId;
+ }
+
+ public int getTextureId() {
+ return textureId;
+ }
+
+ /**
+ * Release texture and framebuffer. An EGLContext must be bound on the current thread when calling
+ * this function. This object should not be used after this call.
+ */
+ public void release() {
+ GLES20.glDeleteTextures(1, new int[] {textureId}, 0);
+ GLES20.glDeleteFramebuffers(1, new int[] {frameBufferId}, 0);
+ width = 0;
+ height = 0;
+ }
+}
diff --git a/webrtc/api/java/android/org/webrtc/GlUtil.java b/webrtc/api/java/android/org/webrtc/GlUtil.java
new file mode 100644
index 0000000..8b43579
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/GlUtil.java
@@ -0,0 +1,75 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.opengl.GLES20;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+
+/**
+ * Some OpenGL static utility functions.
+ */
+public class GlUtil {
+ private GlUtil() {}
+
+ // Assert that no OpenGL ES 2.0 error has been raised.
+ public static void checkNoGLES2Error(String msg) {
+ int error = GLES20.glGetError();
+ if (error != GLES20.GL_NO_ERROR) {
+ throw new RuntimeException(msg + ": GLES20 error: " + error);
+ }
+ }
+
+ public static FloatBuffer createFloatBuffer(float[] coords) {
+ // Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it.
+ ByteBuffer bb = ByteBuffer.allocateDirect(coords.length * 4);
+ bb.order(ByteOrder.nativeOrder());
+ FloatBuffer fb = bb.asFloatBuffer();
+ fb.put(coords);
+ fb.position(0);
+ return fb;
+ }
+
+ /**
+ * Generate texture with standard parameters.
+ */
+ public static int generateTexture(int target) {
+ final int textureArray[] = new int[1];
+ GLES20.glGenTextures(1, textureArray, 0);
+ final int textureId = textureArray[0];
+ GLES20.glBindTexture(target, textureId);
+ GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
+ GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
+ GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
+ checkNoGLES2Error("generateTexture");
+ return textureId;
+ }
+}
diff --git a/webrtc/api/java/android/org/webrtc/NetworkMonitor.java b/webrtc/api/java/android/org/webrtc/NetworkMonitor.java
new file mode 100644
index 0000000..a075b36
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/NetworkMonitor.java
@@ -0,0 +1,270 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import static org.webrtc.NetworkMonitorAutoDetect.ConnectionType;
+import static org.webrtc.NetworkMonitorAutoDetect.INVALID_NET_ID;
+import static org.webrtc.NetworkMonitorAutoDetect.NetworkInformation;
+
+import org.webrtc.Logging;
+
+import android.content.Context;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Borrowed from Chromium's src/net/android/java/src/org/chromium/net/NetworkChangeNotifier.java
+ *
+ * Triggers updates to the underlying network state from OS networking events.
+ *
+ * WARNING: This class is not thread-safe.
+ */
+public class NetworkMonitor {
+ /**
+ * Alerted when the connection type of the network changes.
+ * The alert is fired on the UI thread.
+ */
+ public interface NetworkObserver {
+ public void onConnectionTypeChanged(ConnectionType connectionType);
+ }
+
+ private static final String TAG = "NetworkMonitor";
+ private static NetworkMonitor instance;
+
+ private final Context applicationContext;
+
+ // Native observers of the connection type changes.
+ private final ArrayList<Long> nativeNetworkObservers;
+ // Java observers of the connection type changes.
+ private final ArrayList<NetworkObserver> networkObservers;
+
+ // Object that detects the connection type changes.
+ private NetworkMonitorAutoDetect autoDetector;
+
+ private ConnectionType currentConnectionType = ConnectionType.CONNECTION_UNKNOWN;
+
+ private NetworkMonitor(Context context) {
+ assertIsTrue(context != null);
+ applicationContext =
+ context.getApplicationContext() == null ? context : context.getApplicationContext();
+
+ nativeNetworkObservers = new ArrayList<Long>();
+ networkObservers = new ArrayList<NetworkObserver>();
+ }
+
+ /**
+ * Initializes the singleton once.
+ * Called from the native code.
+ */
+ public static NetworkMonitor init(Context context) {
+ if (!isInitialized()) {
+ instance = new NetworkMonitor(context);
+ }
+ return instance;
+ }
+
+ public static boolean isInitialized() {
+ return instance != null;
+ }
+
+ /**
+ * Returns the singleton instance.
+ */
+ public static NetworkMonitor getInstance() {
+ return instance;
+ }
+
+ /**
+ * Enables auto detection of the current network state based on notifications from the system.
+ * Note that passing true here requires the embedding app have the platform ACCESS_NETWORK_STATE
+ * permission.
+ *
+ * @param shouldAutoDetect true if the NetworkMonitor should listen for system changes in
+ * network connectivity.
+ */
+ public static void setAutoDetectConnectivityState(boolean shouldAutoDetect) {
+ getInstance().setAutoDetectConnectivityStateInternal(shouldAutoDetect);
+ }
+
+ private static void assertIsTrue(boolean condition) {
+ if (!condition) {
+ throw new AssertionError("Expected to be true");
+ }
+ }
+
+ // Called by the native code.
+ private void startMonitoring(long nativeObserver) {
+ Logging.d(TAG, "Start monitoring from native observer " + nativeObserver);
+ nativeNetworkObservers.add(nativeObserver);
+ setAutoDetectConnectivityStateInternal(true);
+ }
+
+ // Called by the native code.
+ private void stopMonitoring(long nativeObserver) {
+ Logging.d(TAG, "Stop monitoring from native observer " + nativeObserver);
+ setAutoDetectConnectivityStateInternal(false);
+ nativeNetworkObservers.remove(nativeObserver);
+ }
+
+ private ConnectionType getCurrentConnectionType() {
+ return currentConnectionType;
+ }
+
+ private int getCurrentDefaultNetId() {
+ return autoDetector == null ? INVALID_NET_ID : autoDetector.getDefaultNetId();
+ }
+
+ private void destroyAutoDetector() {
+ if (autoDetector != null) {
+ autoDetector.destroy();
+ autoDetector = null;
+ }
+ }
+
+ private void setAutoDetectConnectivityStateInternal(boolean shouldAutoDetect) {
+ if (!shouldAutoDetect) {
+ destroyAutoDetector();
+ return;
+ }
+ if (autoDetector == null) {
+ autoDetector = new NetworkMonitorAutoDetect(
+ new NetworkMonitorAutoDetect.Observer() {
+
+ @Override
+ public void onConnectionTypeChanged(ConnectionType newConnectionType) {
+ updateCurrentConnectionType(newConnectionType);
+ }
+
+ @Override
+ public void onNetworkConnect(NetworkInformation networkInfo) {
+ notifyObserversOfNetworkConnect(networkInfo);
+ }
+
+ @Override
+ public void onNetworkDisconnect(int networkHandle) {
+ notifyObserversOfNetworkDisconnect(networkHandle);
+ }
+ },
+ applicationContext);
+ final NetworkMonitorAutoDetect.NetworkState networkState =
+ autoDetector.getCurrentNetworkState();
+ updateCurrentConnectionType(NetworkMonitorAutoDetect.getConnectionType(networkState));
+ updateActiveNetworkList();
+ }
+ }
+
+ private void updateCurrentConnectionType(ConnectionType newConnectionType) {
+ currentConnectionType = newConnectionType;
+ notifyObserversOfConnectionTypeChange(newConnectionType);
+ }
+
+ /**
+ * Alerts all observers of a connection change.
+ */
+ private void notifyObserversOfConnectionTypeChange(ConnectionType newConnectionType) {
+ for (long nativeObserver : nativeNetworkObservers) {
+ nativeNotifyConnectionTypeChanged(nativeObserver);
+ }
+ for (NetworkObserver observer : networkObservers) {
+ observer.onConnectionTypeChanged(newConnectionType);
+ }
+ }
+
+ private void notifyObserversOfNetworkConnect(NetworkInformation networkInfo) {
+ for (long nativeObserver : nativeNetworkObservers) {
+ nativeNotifyOfNetworkConnect(nativeObserver, networkInfo);
+ }
+ }
+
+ private void notifyObserversOfNetworkDisconnect(int networkHandle) {
+ for (long nativeObserver : nativeNetworkObservers) {
+ nativeNotifyOfNetworkDisconnect(nativeObserver, networkHandle);
+ }
+ }
+
+ private void updateActiveNetworkList() {
+ List<NetworkInformation> networkInfoList = autoDetector.getActiveNetworkList();
+ if (networkInfoList == null || networkInfoList.size() == 0) {
+ return;
+ }
+
+ NetworkInformation[] networkInfos = new NetworkInformation[networkInfoList.size()];
+ networkInfos = networkInfoList.toArray(networkInfos);
+ for (long nativeObserver : nativeNetworkObservers) {
+ nativeNotifyOfActiveNetworkList(nativeObserver, networkInfos);
+ }
+ }
+
+ /**
+ * Adds an observer for any connection type changes.
+ */
+ public static void addNetworkObserver(NetworkObserver observer) {
+ getInstance().addNetworkObserverInternal(observer);
+ }
+
+ private void addNetworkObserverInternal(NetworkObserver observer) {
+ networkObservers.add(observer);
+ }
+
+ /**
+ * Removes an observer for any connection type changes.
+ */
+ public static void removeNetworkObserver(NetworkObserver observer) {
+ getInstance().removeNetworkObserverInternal(observer);
+ }
+
+ private void removeNetworkObserverInternal(NetworkObserver observer) {
+ networkObservers.remove(observer);
+ }
+
+ /**
+ * Checks if there currently is connectivity.
+ */
+ public static boolean isOnline() {
+ ConnectionType connectionType = getInstance().getCurrentConnectionType();
+ return connectionType != ConnectionType.CONNECTION_UNKNOWN
+ && connectionType != ConnectionType.CONNECTION_NONE;
+ }
+
+ private native void nativeNotifyConnectionTypeChanged(long nativePtr);
+ private native void nativeNotifyOfNetworkConnect(long nativePtr, NetworkInformation networkInfo);
+ private native void nativeNotifyOfNetworkDisconnect(long nativePtr, int networkHandle);
+ private native void nativeNotifyOfActiveNetworkList(long nativePtr,
+ NetworkInformation[] networkInfos);
+
+ // For testing only.
+ static void resetInstanceForTests(Context context) {
+ instance = new NetworkMonitor(context);
+ }
+
+ // For testing only.
+ public static NetworkMonitorAutoDetect getAutoDetectorForTest() {
+ return getInstance().autoDetector;
+ }
+}
diff --git a/webrtc/api/java/android/org/webrtc/NetworkMonitorAutoDetect.java b/webrtc/api/java/android/org/webrtc/NetworkMonitorAutoDetect.java
new file mode 100644
index 0000000..d71efc6
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/NetworkMonitorAutoDetect.java
@@ -0,0 +1,605 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import static android.net.NetworkCapabilities.NET_CAPABILITY_INTERNET;
+import static android.net.NetworkCapabilities.TRANSPORT_CELLULAR;
+
+
+import org.webrtc.Logging;
+
+import android.Manifest.permission;
+import android.annotation.SuppressLint;
+import android.content.BroadcastReceiver;
+import android.content.Context;
+import android.content.Intent;
+import android.content.IntentFilter;
+import android.content.pm.PackageManager;
+import android.net.ConnectivityManager;
+import android.net.ConnectivityManager.NetworkCallback;
+import android.net.LinkAddress;
+import android.net.LinkProperties;
+import android.net.Network;
+import android.net.NetworkCapabilities;
+import android.net.NetworkInfo;
+import android.net.NetworkRequest;
+import android.net.wifi.WifiInfo;
+import android.net.wifi.WifiManager;
+import android.os.Build;
+import android.telephony.TelephonyManager;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Borrowed from Chromium's
+ * src/net/android/java/src/org/chromium/net/NetworkChangeNotifierAutoDetect.java
+ *
+ * Used by the NetworkMonitor to listen to platform changes in connectivity.
+ * Note that use of this class requires that the app have the platform
+ * ACCESS_NETWORK_STATE permission.
+ */
+public class NetworkMonitorAutoDetect extends BroadcastReceiver {
+ public static enum ConnectionType {
+ CONNECTION_UNKNOWN,
+ CONNECTION_ETHERNET,
+ CONNECTION_WIFI,
+ CONNECTION_4G,
+ CONNECTION_3G,
+ CONNECTION_2G,
+ CONNECTION_BLUETOOTH,
+ CONNECTION_NONE
+ }
+
+ public static class IPAddress {
+ public final byte[] address;
+ public IPAddress (byte[] address) {
+ this.address = address;
+ }
+ }
+
+ /** Java version of NetworkMonitor.NetworkInformation */
+ public static class NetworkInformation{
+ public final String name;
+ public final ConnectionType type;
+ public final int handle;
+ public final IPAddress[] ipAddresses;
+ public NetworkInformation(String name, ConnectionType type, int handle,
+ IPAddress[] addresses) {
+ this.name = name;
+ this.type = type;
+ this.handle = handle;
+ this.ipAddresses = addresses;
+ }
+ };
+
+ static class NetworkState {
+ private final boolean connected;
+ // Defined from ConnectivityManager.TYPE_XXX for non-mobile; for mobile, it is
+ // further divided into 2G, 3G, or 4G from the subtype.
+ private final int type;
+ // Defined from NetworkInfo.subtype, which is one of the TelephonyManager.NETWORK_TYPE_XXXs.
+ // Will be useful to find the maximum bandwidth.
+ private final int subtype;
+
+ public NetworkState(boolean connected, int type, int subtype) {
+ this.connected = connected;
+ this.type = type;
+ this.subtype = subtype;
+ }
+
+ public boolean isConnected() {
+ return connected;
+ }
+
+ public int getNetworkType() {
+ return type;
+ }
+
+ public int getNetworkSubType() {
+ return subtype;
+ }
+ }
+ /**
+ * The methods in this class get called when the network changes if the callback
+ * is registered with a proper network request. It is only available in Android Lollipop
+ * and above.
+ */
+ @SuppressLint("NewApi")
+ private class SimpleNetworkCallback extends NetworkCallback {
+
+ @Override
+ public void onAvailable(Network network) {
+ Logging.d(TAG, "Network becomes available: " + network.toString());
+ onNetworkChanged(network);
+ }
+
+ @Override
+ public void onCapabilitiesChanged(
+ Network network, NetworkCapabilities networkCapabilities) {
+ // A capabilities change may indicate the ConnectionType has changed,
+ // so forward the new NetworkInformation along to the observer.
+ Logging.d(TAG, "capabilities changed: " + networkCapabilities.toString());
+ onNetworkChanged(network);
+ }
+
+ @Override
+ public void onLinkPropertiesChanged(Network network, LinkProperties linkProperties) {
+ // A link property change may indicate the IP address changes.
+ // so forward the new NetworkInformation to the observer.
+ Logging.d(TAG, "link properties changed: " + linkProperties.toString());
+ onNetworkChanged(network);
+ }
+
+ @Override
+ public void onLosing(Network network, int maxMsToLive) {
+ // Tell the network is going to lose in MaxMsToLive milliseconds.
+ // We may use this signal later.
+ Logging.d(TAG, "Network with handle " + networkToNetId(network) +
+ " is about to lose in " + maxMsToLive + "ms");
+ }
+
+ @Override
+ public void onLost(Network network) {
+ int handle = networkToNetId(network);
+ Logging.d(TAG, "Network with handle " + handle + " is disconnected");
+ observer.onNetworkDisconnect(handle);
+ }
+
+ private void onNetworkChanged(Network network) {
+ NetworkInformation networkInformation = connectivityManagerDelegate.networkToInfo(network);
+ if (networkInformation.type != ConnectionType.CONNECTION_UNKNOWN
+ && networkInformation.type != ConnectionType.CONNECTION_NONE) {
+ observer.onNetworkConnect(networkInformation);
+ }
+ }
+ }
+
+ /** Queries the ConnectivityManager for information about the current connection. */
+ static class ConnectivityManagerDelegate {
+ /**
+ * Note: In some rare Android systems connectivityManager is null. We handle that
+ * gracefully below.
+ */
+ private final ConnectivityManager connectivityManager;
+
+ ConnectivityManagerDelegate(Context context) {
+ connectivityManager =
+ (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE);
+ }
+
+ // For testing.
+ ConnectivityManagerDelegate() {
+ // All the methods below should be overridden.
+ connectivityManager = null;
+ }
+
+ /**
+ * Returns connection type and status information about the current
+ * default network.
+ */
+ NetworkState getNetworkState() {
+ if (connectivityManager == null) {
+ return new NetworkState(false, -1, -1);
+ }
+ return getNetworkState(connectivityManager.getActiveNetworkInfo());
+ }
+
+ /**
+ * Returns connection type and status information about |network|.
+ * Only callable on Lollipop and newer releases.
+ */
+ @SuppressLint("NewApi")
+ NetworkState getNetworkState(Network network) {
+ if (connectivityManager == null) {
+ return new NetworkState(false, -1, -1);
+ }
+ return getNetworkState(connectivityManager.getNetworkInfo(network));
+ }
+
+ /**
+ * Returns connection type and status information gleaned from networkInfo.
+ */
+ NetworkState getNetworkState(NetworkInfo networkInfo) {
+ if (networkInfo == null || !networkInfo.isConnected()) {
+ return new NetworkState(false, -1, -1);
+ }
+ return new NetworkState(true, networkInfo.getType(), networkInfo.getSubtype());
+ }
+
+ /**
+ * Returns all connected networks.
+ * Only callable on Lollipop and newer releases.
+ */
+ @SuppressLint("NewApi")
+ Network[] getAllNetworks() {
+ if (connectivityManager == null) {
+ return new Network[0];
+ }
+ return connectivityManager.getAllNetworks();
+ }
+
+ List<NetworkInformation> getActiveNetworkList() {
+ if (!supportNetworkCallback()) {
+ return null;
+ }
+ ArrayList<NetworkInformation> netInfoList = new ArrayList<NetworkInformation>();
+ for (Network network : getAllNetworks()) {
+ NetworkInformation info = networkToInfo(network);
+ if (info.name != null && info.type != ConnectionType.CONNECTION_NONE
+ && info.type != ConnectionType.CONNECTION_UNKNOWN) {
+ netInfoList.add(info);
+ }
+ }
+ return netInfoList;
+ }
+
+ /**
+ * Returns the NetID of the current default network. Returns
+ * INVALID_NET_ID if no current default network connected.
+ * Only callable on Lollipop and newer releases.
+ */
+ @SuppressLint("NewApi")
+ int getDefaultNetId() {
+ if (!supportNetworkCallback()) {
+ return INVALID_NET_ID;
+ }
+ // Android Lollipop had no API to get the default network; only an
+ // API to return the NetworkInfo for the default network. To
+ // determine the default network one can find the network with
+ // type matching that of the default network.
+ final NetworkInfo defaultNetworkInfo = connectivityManager.getActiveNetworkInfo();
+ if (defaultNetworkInfo == null) {
+ return INVALID_NET_ID;
+ }
+ final Network[] networks = getAllNetworks();
+ int defaultNetId = INVALID_NET_ID;
+ for (Network network : networks) {
+ if (!hasInternetCapability(network)) {
+ continue;
+ }
+ final NetworkInfo networkInfo = connectivityManager.getNetworkInfo(network);
+ if (networkInfo != null && networkInfo.getType() == defaultNetworkInfo.getType()) {
+ // There should not be multiple connected networks of the
+ // same type. At least as of Android Marshmallow this is
+ // not supported. If this becomes supported this assertion
+ // may trigger. At that point we could consider using
+ // ConnectivityManager.getDefaultNetwork() though this
+ // may give confusing results with VPNs and is only
+ // available with Android Marshmallow.
+ assert defaultNetId == INVALID_NET_ID;
+ defaultNetId = networkToNetId(network);
+ }
+ }
+ return defaultNetId;
+ }
+
+ @SuppressLint("NewApi")
+ private NetworkInformation networkToInfo(Network network) {
+ LinkProperties linkProperties = connectivityManager.getLinkProperties(network);
+ NetworkInformation networkInformation = new NetworkInformation(
+ linkProperties.getInterfaceName(),
+ getConnectionType(getNetworkState(network)),
+ networkToNetId(network),
+ getIPAddresses(linkProperties));
+ return networkInformation;
+ }
+
+ /**
+ * Returns true if {@code network} can provide Internet access. Can be used to
+ * ignore specialized networks (e.g. IMS, FOTA).
+ */
+ @SuppressLint("NewApi")
+ boolean hasInternetCapability(Network network) {
+ if (connectivityManager == null) {
+ return false;
+ }
+ final NetworkCapabilities capabilities =
+ connectivityManager.getNetworkCapabilities(network);
+ return capabilities != null && capabilities.hasCapability(NET_CAPABILITY_INTERNET);
+ }
+
+ /** Only callable on Lollipop and newer releases. */
+ @SuppressLint("NewApi")
+ public void registerNetworkCallback(NetworkCallback networkCallback) {
+ connectivityManager.registerNetworkCallback(
+ new NetworkRequest.Builder().addCapability(NET_CAPABILITY_INTERNET).build(),
+ networkCallback);
+ }
+
+ /** Only callable on Lollipop and newer releases. */
+ @SuppressLint("NewApi")
+ public void requestMobileNetwork(NetworkCallback networkCallback) {
+ NetworkRequest.Builder builder = new NetworkRequest.Builder();
+ builder.addCapability(NET_CAPABILITY_INTERNET).addTransportType(TRANSPORT_CELLULAR);
+ connectivityManager.requestNetwork(builder.build(), networkCallback);
+ }
+
+ @SuppressLint("NewApi")
+ IPAddress[] getIPAddresses(LinkProperties linkProperties) {
+ IPAddress[] ipAddresses = new IPAddress[linkProperties.getLinkAddresses().size()];
+ int i = 0;
+ for (LinkAddress linkAddress : linkProperties.getLinkAddresses()) {
+ ipAddresses[i] = new IPAddress(linkAddress.getAddress().getAddress());
+ ++i;
+ }
+ return ipAddresses;
+ }
+
+ @SuppressLint("NewApi")
+ public void releaseCallback(NetworkCallback networkCallback) {
+ if (supportNetworkCallback()) {
+ Logging.d(TAG, "Unregister network callback");
+ connectivityManager.unregisterNetworkCallback(networkCallback);
+ }
+ }
+
+ public boolean supportNetworkCallback() {
+ return Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP && connectivityManager != null;
+ }
+ }
+
+
+ /** Queries the WifiManager for SSID of the current Wifi connection. */
+ static class WifiManagerDelegate {
+ private final Context context;
+ WifiManagerDelegate(Context context) {
+ this.context = context;
+ }
+
+ // For testing.
+ WifiManagerDelegate() {
+ // All the methods below should be overridden.
+ context = null;
+ }
+
+ String getWifiSSID() {
+ final Intent intent = context.registerReceiver(null,
+ new IntentFilter(WifiManager.NETWORK_STATE_CHANGED_ACTION));
+ if (intent != null) {
+ final WifiInfo wifiInfo = intent.getParcelableExtra(WifiManager.EXTRA_WIFI_INFO);
+ if (wifiInfo != null) {
+ final String ssid = wifiInfo.getSSID();
+ if (ssid != null) {
+ return ssid;
+ }
+ }
+ }
+ return "";
+ }
+
+ }
+
+ static final int INVALID_NET_ID = -1;
+ private static final String TAG = "NetworkMonitorAutoDetect";
+
+ // Observer for the connection type change.
+ private final Observer observer;
+ private final IntentFilter intentFilter;
+ private final Context context;
+ // Used to request mobile network. It does not do anything except for keeping
+ // the callback for releasing the request.
+ private final NetworkCallback mobileNetworkCallback;
+ // Used to receive updates on all networks.
+ private final NetworkCallback allNetworkCallback;
+ // connectivityManagerDelegate and wifiManagerDelegate are only non-final for testing.
+ private ConnectivityManagerDelegate connectivityManagerDelegate;
+ private WifiManagerDelegate wifiManagerDelegate;
+
+ private boolean isRegistered;
+ private ConnectionType connectionType;
+ private String wifiSSID;
+
+ /**
+ * Observer interface by which observer is notified of network changes.
+ */
+ public static interface Observer {
+ /**
+ * Called when default network changes.
+ */
+ public void onConnectionTypeChanged(ConnectionType newConnectionType);
+ public void onNetworkConnect(NetworkInformation networkInfo);
+ public void onNetworkDisconnect(int networkHandle);
+ }
+
+ /**
+ * Constructs a NetworkMonitorAutoDetect. Should only be called on UI thread.
+ */
+ @SuppressLint("NewApi")
+ public NetworkMonitorAutoDetect(Observer observer, Context context) {
+ this.observer = observer;
+ this.context = context;
+ connectivityManagerDelegate = new ConnectivityManagerDelegate(context);
+ wifiManagerDelegate = new WifiManagerDelegate(context);
+
+ final NetworkState networkState = connectivityManagerDelegate.getNetworkState();
+ connectionType = getConnectionType(networkState);
+ wifiSSID = getWifiSSID(networkState);
+ intentFilter = new IntentFilter(ConnectivityManager.CONNECTIVITY_ACTION);
+
+ registerReceiver();
+ if (connectivityManagerDelegate.supportNetworkCallback()) {
+ mobileNetworkCallback = new NetworkCallback();
+ connectivityManagerDelegate.requestMobileNetwork(mobileNetworkCallback);
+ allNetworkCallback = new SimpleNetworkCallback();
+ connectivityManagerDelegate.registerNetworkCallback(allNetworkCallback);
+ } else {
+ mobileNetworkCallback = null;
+ allNetworkCallback = null;
+ }
+ }
+
+ /**
+ * Allows overriding the ConnectivityManagerDelegate for tests.
+ */
+ void setConnectivityManagerDelegateForTests(ConnectivityManagerDelegate delegate) {
+ connectivityManagerDelegate = delegate;
+ }
+
+ /**
+ * Allows overriding the WifiManagerDelegate for tests.
+ */
+ void setWifiManagerDelegateForTests(WifiManagerDelegate delegate) {
+ wifiManagerDelegate = delegate;
+ }
+
+ /**
+ * Returns whether the object has registered to receive network connectivity intents.
+ * Visible for testing.
+ */
+ boolean isReceiverRegisteredForTesting() {
+ return isRegistered;
+ }
+
+ List<NetworkInformation> getActiveNetworkList() {
+ return connectivityManagerDelegate.getActiveNetworkList();
+ }
+
+ public void destroy() {
+ if (allNetworkCallback != null) {
+ connectivityManagerDelegate.releaseCallback(allNetworkCallback);
+ }
+ if (mobileNetworkCallback != null) {
+ connectivityManagerDelegate.releaseCallback(mobileNetworkCallback);
+ }
+ unregisterReceiver();
+ }
+
+ /**
+ * Registers a BroadcastReceiver in the given context.
+ */
+ private void registerReceiver() {
+ if (isRegistered) return;
+
+ isRegistered = true;
+ context.registerReceiver(this, intentFilter);
+ }
+
+ /**
+ * Unregisters the BroadcastReceiver in the given context.
+ */
+ private void unregisterReceiver() {
+ if (!isRegistered) return;
+
+ isRegistered = false;
+ context.unregisterReceiver(this);
+ }
+
+ public NetworkState getCurrentNetworkState() {
+ return connectivityManagerDelegate.getNetworkState();
+ }
+
+ /**
+ * Returns NetID of device's current default connected network used for
+ * communication.
+ * Only implemented on Lollipop and newer releases, returns INVALID_NET_ID
+ * when not implemented.
+ */
+ public int getDefaultNetId() {
+ return connectivityManagerDelegate.getDefaultNetId();
+ }
+
+ public static ConnectionType getConnectionType(NetworkState networkState) {
+ if (!networkState.isConnected()) {
+ return ConnectionType.CONNECTION_NONE;
+ }
+
+ switch (networkState.getNetworkType()) {
+ case ConnectivityManager.TYPE_ETHERNET:
+ return ConnectionType.CONNECTION_ETHERNET;
+ case ConnectivityManager.TYPE_WIFI:
+ return ConnectionType.CONNECTION_WIFI;
+ case ConnectivityManager.TYPE_WIMAX:
+ return ConnectionType.CONNECTION_4G;
+ case ConnectivityManager.TYPE_BLUETOOTH:
+ return ConnectionType.CONNECTION_BLUETOOTH;
+ case ConnectivityManager.TYPE_MOBILE:
+ // Use information from TelephonyManager to classify the connection.
+ switch (networkState.getNetworkSubType()) {
+ case TelephonyManager.NETWORK_TYPE_GPRS:
+ case TelephonyManager.NETWORK_TYPE_EDGE:
+ case TelephonyManager.NETWORK_TYPE_CDMA:
+ case TelephonyManager.NETWORK_TYPE_1xRTT:
+ case TelephonyManager.NETWORK_TYPE_IDEN:
+ return ConnectionType.CONNECTION_2G;
+ case TelephonyManager.NETWORK_TYPE_UMTS:
+ case TelephonyManager.NETWORK_TYPE_EVDO_0:
+ case TelephonyManager.NETWORK_TYPE_EVDO_A:
+ case TelephonyManager.NETWORK_TYPE_HSDPA:
+ case TelephonyManager.NETWORK_TYPE_HSUPA:
+ case TelephonyManager.NETWORK_TYPE_HSPA:
+ case TelephonyManager.NETWORK_TYPE_EVDO_B:
+ case TelephonyManager.NETWORK_TYPE_EHRPD:
+ case TelephonyManager.NETWORK_TYPE_HSPAP:
+ return ConnectionType.CONNECTION_3G;
+ case TelephonyManager.NETWORK_TYPE_LTE:
+ return ConnectionType.CONNECTION_4G;
+ default:
+ return ConnectionType.CONNECTION_UNKNOWN;
+ }
+ default:
+ return ConnectionType.CONNECTION_UNKNOWN;
+ }
+ }
+
+ private String getWifiSSID(NetworkState networkState) {
+ if (getConnectionType(networkState) != ConnectionType.CONNECTION_WIFI) return "";
+ return wifiManagerDelegate.getWifiSSID();
+ }
+
+ // BroadcastReceiver
+ @Override
+ public void onReceive(Context context, Intent intent) {
+ final NetworkState networkState = getCurrentNetworkState();
+ if (ConnectivityManager.CONNECTIVITY_ACTION.equals(intent.getAction())) {
+ connectionTypeChanged(networkState);
+ }
+ }
+
+ private void connectionTypeChanged(NetworkState networkState) {
+ ConnectionType newConnectionType = getConnectionType(networkState);
+ String newWifiSSID = getWifiSSID(networkState);
+ if (newConnectionType == connectionType && newWifiSSID.equals(wifiSSID)) return;
+
+ connectionType = newConnectionType;
+ wifiSSID = newWifiSSID;
+ Logging.d(TAG, "Network connectivity changed, type is: " + connectionType);
+ observer.onConnectionTypeChanged(newConnectionType);
+ }
+
+ /**
+ * Extracts NetID of network. Only available on Lollipop and newer releases.
+ */
+ @SuppressLint("NewApi")
+ private static int networkToNetId(Network network) {
+ // NOTE(pauljensen): This depends on Android framework implementation details.
+ // Fortunately this functionality is unlikely to ever change.
+ // TODO(honghaiz): When we update to Android M SDK, use Network.getNetworkHandle().
+ return Integer.parseInt(network.toString());
+ }
+}
diff --git a/webrtc/api/java/android/org/webrtc/OWNERS b/webrtc/api/java/android/org/webrtc/OWNERS
new file mode 100644
index 0000000..4d31ffb
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/OWNERS
@@ -0,0 +1 @@
+magjed@webrtc.org
diff --git a/webrtc/api/java/android/org/webrtc/RendererCommon.java b/webrtc/api/java/android/org/webrtc/RendererCommon.java
new file mode 100644
index 0000000..5ada4cc
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/RendererCommon.java
@@ -0,0 +1,260 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.graphics.Point;
+import android.opengl.GLES20;
+import android.opengl.Matrix;
+
+import java.nio.ByteBuffer;
+
+/**
+ * Static helper functions for renderer implementations.
+ */
+public class RendererCommon {
+ /** Interface for reporting rendering events. */
+ public static interface RendererEvents {
+ /**
+ * Callback fired once first frame is rendered.
+ */
+ public void onFirstFrameRendered();
+
+ /**
+ * Callback fired when rendered frame resolution or rotation has changed.
+ */
+ public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation);
+ }
+
+ /** Interface for rendering frames on an EGLSurface. */
+ public static interface GlDrawer {
+ /**
+ * Functions for drawing frames with different sources. The rendering surface target is
+ * implied by the current EGL context of the calling thread and requires no explicit argument.
+ * The coordinates specify the viewport location on the surface target.
+ */
+ void drawOes(int oesTextureId, float[] texMatrix, int x, int y, int width, int height);
+ void drawRgb(int textureId, float[] texMatrix, int x, int y, int width, int height);
+ void drawYuv(int[] yuvTextures, float[] texMatrix, int x, int y, int width, int height);
+
+ /**
+ * Release all GL resources. This needs to be done manually, otherwise resources may leak.
+ */
+ void release();
+ }
+
+ /**
+ * Helper class for uploading YUV bytebuffer frames to textures that handles stride > width. This
+ * class keeps an internal ByteBuffer to avoid unnecessary allocations for intermediate copies.
+ */
+ public static class YuvUploader {
+ // Intermediate copy buffer for uploading yuv frames that are not packed, i.e. stride > width.
+ // TODO(magjed): Investigate when GL_UNPACK_ROW_LENGTH is available, or make a custom shader
+ // that handles stride and compare performance with intermediate copy.
+ private ByteBuffer copyBuffer;
+
+ /**
+ * Upload |planes| into |outputYuvTextures|, taking stride into consideration.
+ * |outputYuvTextures| must have been generated in advance.
+ */
+ public void uploadYuvData(
+ int[] outputYuvTextures, int width, int height, int[] strides, ByteBuffer[] planes) {
+ final int[] planeWidths = new int[] {width, width / 2, width / 2};
+ final int[] planeHeights = new int[] {height, height / 2, height / 2};
+ // Make a first pass to see if we need a temporary copy buffer.
+ int copyCapacityNeeded = 0;
+ for (int i = 0; i < 3; ++i) {
+ if (strides[i] > planeWidths[i]) {
+ copyCapacityNeeded = Math.max(copyCapacityNeeded, planeWidths[i] * planeHeights[i]);
+ }
+ }
+ // Allocate copy buffer if necessary.
+ if (copyCapacityNeeded > 0
+ && (copyBuffer == null || copyBuffer.capacity() < copyCapacityNeeded)) {
+ copyBuffer = ByteBuffer.allocateDirect(copyCapacityNeeded);
+ }
+ // Upload each plane.
+ for (int i = 0; i < 3; ++i) {
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, outputYuvTextures[i]);
+ // GLES only accepts packed data, i.e. stride == planeWidth.
+ final ByteBuffer packedByteBuffer;
+ if (strides[i] == planeWidths[i]) {
+ // Input is packed already.
+ packedByteBuffer = planes[i];
+ } else {
+ VideoRenderer.nativeCopyPlane(
+ planes[i], planeWidths[i], planeHeights[i], strides[i], copyBuffer, planeWidths[i]);
+ packedByteBuffer = copyBuffer;
+ }
+ GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, planeWidths[i],
+ planeHeights[i], 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, packedByteBuffer);
+ }
+ }
+ }
+
+ // Types of video scaling:
+ // SCALE_ASPECT_FIT - video frame is scaled to fit the size of the view by
+ // maintaining the aspect ratio (black borders may be displayed).
+ // SCALE_ASPECT_FILL - video frame is scaled to fill the size of the view by
+ // maintaining the aspect ratio. Some portion of the video frame may be
+ // clipped.
+ // SCALE_ASPECT_BALANCED - Compromise between FIT and FILL. Video frame will fill as much as
+ // possible of the view while maintaining aspect ratio, under the constraint that at least
+ // |BALANCED_VISIBLE_FRACTION| of the frame content will be shown.
+ public static enum ScalingType { SCALE_ASPECT_FIT, SCALE_ASPECT_FILL, SCALE_ASPECT_BALANCED }
+ // The minimum fraction of the frame content that will be shown for |SCALE_ASPECT_BALANCED|.
+ // This limits excessive cropping when adjusting display size.
+ private static float BALANCED_VISIBLE_FRACTION = 0.5625f;
+ public static final float[] identityMatrix() {
+ return new float[] {
+ 1, 0, 0, 0,
+ 0, 1, 0, 0,
+ 0, 0, 1, 0,
+ 0, 0, 0, 1};
+ }
+ // Matrix with transform y' = 1 - y.
+ public static final float[] verticalFlipMatrix() {
+ return new float[] {
+ 1, 0, 0, 0,
+ 0, -1, 0, 0,
+ 0, 0, 1, 0,
+ 0, 1, 0, 1};
+ }
+
+ // Matrix with transform x' = 1 - x.
+ public static final float[] horizontalFlipMatrix() {
+ return new float[] {
+ -1, 0, 0, 0,
+ 0, 1, 0, 0,
+ 0, 0, 1, 0,
+ 1, 0, 0, 1};
+ }
+
+ /**
+ * Returns texture matrix that will have the effect of rotating the frame |rotationDegree|
+ * clockwise when rendered.
+ */
+ public static float[] rotateTextureMatrix(float[] textureMatrix, float rotationDegree) {
+ final float[] rotationMatrix = new float[16];
+ Matrix.setRotateM(rotationMatrix, 0, rotationDegree, 0, 0, 1);
+ adjustOrigin(rotationMatrix);
+ return multiplyMatrices(textureMatrix, rotationMatrix);
+ }
+
+ /**
+ * Returns new matrix with the result of a * b.
+ */
+ public static float[] multiplyMatrices(float[] a, float[] b) {
+ final float[] resultMatrix = new float[16];
+ Matrix.multiplyMM(resultMatrix, 0, a, 0, b, 0);
+ return resultMatrix;
+ }
+
+ /**
+ * Returns layout transformation matrix that applies an optional mirror effect and compensates
+ * for video vs display aspect ratio.
+ */
+ public static float[] getLayoutMatrix(
+ boolean mirror, float videoAspectRatio, float displayAspectRatio) {
+ float scaleX = 1;
+ float scaleY = 1;
+ // Scale X or Y dimension so that video and display size have same aspect ratio.
+ if (displayAspectRatio > videoAspectRatio) {
+ scaleY = videoAspectRatio / displayAspectRatio;
+ } else {
+ scaleX = displayAspectRatio / videoAspectRatio;
+ }
+ // Apply optional horizontal flip.
+ if (mirror) {
+ scaleX *= -1;
+ }
+ final float matrix[] = new float[16];
+ Matrix.setIdentityM(matrix, 0);
+ Matrix.scaleM(matrix, 0, scaleX, scaleY, 1);
+ adjustOrigin(matrix);
+ return matrix;
+ }
+
+ /**
+ * Calculate display size based on scaling type, video aspect ratio, and maximum display size.
+ */
+ public static Point getDisplaySize(ScalingType scalingType, float videoAspectRatio,
+ int maxDisplayWidth, int maxDisplayHeight) {
+ return getDisplaySize(convertScalingTypeToVisibleFraction(scalingType), videoAspectRatio,
+ maxDisplayWidth, maxDisplayHeight);
+ }
+
+ /**
+ * Move |matrix| transformation origin to (0.5, 0.5). This is the origin for texture coordinates
+ * that are in the range 0 to 1.
+ */
+ private static void adjustOrigin(float[] matrix) {
+ // Note that OpenGL is using column-major order.
+ // Pre translate with -0.5 to move coordinates to range [-0.5, 0.5].
+ matrix[12] -= 0.5f * (matrix[0] + matrix[4]);
+ matrix[13] -= 0.5f * (matrix[1] + matrix[5]);
+ // Post translate with 0.5 to move coordinates to range [0, 1].
+ matrix[12] += 0.5f;
+ matrix[13] += 0.5f;
+ }
+
+ /**
+ * Each scaling type has a one-to-one correspondence to a numeric minimum fraction of the video
+ * that must remain visible.
+ */
+ private static float convertScalingTypeToVisibleFraction(ScalingType scalingType) {
+ switch (scalingType) {
+ case SCALE_ASPECT_FIT:
+ return 1.0f;
+ case SCALE_ASPECT_FILL:
+ return 0.0f;
+ case SCALE_ASPECT_BALANCED:
+ return BALANCED_VISIBLE_FRACTION;
+ default:
+ throw new IllegalArgumentException();
+ }
+ }
+
+ /**
+ * Calculate display size based on minimum fraction of the video that must remain visible,
+ * video aspect ratio, and maximum display size.
+ */
+ private static Point getDisplaySize(float minVisibleFraction, float videoAspectRatio,
+ int maxDisplayWidth, int maxDisplayHeight) {
+ // If there is no constraint on the amount of cropping, fill the allowed display area.
+ if (minVisibleFraction == 0 || videoAspectRatio == 0) {
+ return new Point(maxDisplayWidth, maxDisplayHeight);
+ }
+ // Each dimension is constrained on max display size and how much we are allowed to crop.
+ final int width = Math.min(maxDisplayWidth,
+ Math.round(maxDisplayHeight / minVisibleFraction * videoAspectRatio));
+ final int height = Math.min(maxDisplayHeight,
+ Math.round(maxDisplayWidth / minVisibleFraction / videoAspectRatio));
+ return new Point(width, height);
+ }
+}
diff --git a/webrtc/api/java/android/org/webrtc/SurfaceTextureHelper.java b/webrtc/api/java/android/org/webrtc/SurfaceTextureHelper.java
new file mode 100644
index 0000000..b001d2a
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/SurfaceTextureHelper.java
@@ -0,0 +1,488 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.graphics.SurfaceTexture;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.os.Build;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.SystemClock;
+
+import java.nio.ByteBuffer;
+import java.nio.FloatBuffer;
+import java.util.concurrent.Callable;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Helper class to create and synchronize access to a SurfaceTexture. The caller will get notified
+ * of new frames in onTextureFrameAvailable(), and should call returnTextureFrame() when done with
+ * the frame. Only one texture frame can be in flight at once, so returnTextureFrame() must be
+ * called in order to receive a new frame. Call disconnect() to stop receiveing new frames and
+ * release all resources.
+ * Note that there is a C++ counter part of this class that optionally can be used. It is used for
+ * wrapping texture frames into webrtc::VideoFrames and also handles calling returnTextureFrame()
+ * when the webrtc::VideoFrame is no longer used.
+ */
+class SurfaceTextureHelper {
+ private static final String TAG = "SurfaceTextureHelper";
+ /**
+ * Callback interface for being notified that a new texture frame is available. The calls will be
+ * made on a dedicated thread with a bound EGLContext. The thread will be the same throughout the
+ * lifetime of the SurfaceTextureHelper instance, but different from the thread calling the
+ * SurfaceTextureHelper constructor. The callee is not allowed to make another EGLContext current
+ * on the calling thread.
+ */
+ public interface OnTextureFrameAvailableListener {
+ abstract void onTextureFrameAvailable(
+ int oesTextureId, float[] transformMatrix, long timestampNs);
+ }
+
+ public static SurfaceTextureHelper create(EglBase.Context sharedContext) {
+ return create(sharedContext, null);
+ }
+
+ /**
+ * Construct a new SurfaceTextureHelper sharing OpenGL resources with |sharedContext|. If
+ * |handler| is non-null, the callback will be executed on that handler's thread. If |handler| is
+ * null, a dedicated private thread is created for the callbacks.
+ */
+ public static SurfaceTextureHelper create(final EglBase.Context sharedContext,
+ final Handler handler) {
+ final Handler finalHandler;
+ if (handler != null) {
+ finalHandler = handler;
+ } else {
+ final HandlerThread thread = new HandlerThread(TAG);
+ thread.start();
+ finalHandler = new Handler(thread.getLooper());
+ }
+ // The onFrameAvailable() callback will be executed on the SurfaceTexture ctor thread. See:
+ // http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/graphics/SurfaceTexture.java#195.
+ // Therefore, in order to control the callback thread on API lvl < 21, the SurfaceTextureHelper
+ // is constructed on the |handler| thread.
+ return ThreadUtils.invokeUninterruptibly(finalHandler, new Callable<SurfaceTextureHelper>() {
+ @Override public SurfaceTextureHelper call() {
+ return new SurfaceTextureHelper(sharedContext, finalHandler, (handler == null));
+ }
+ });
+ }
+
+ // State for YUV conversion, instantiated on demand.
+ static private class YuvConverter {
+ private final EglBase eglBase;
+ private final GlShader shader;
+ private boolean released = false;
+
+ // Vertex coordinates in Normalized Device Coordinates, i.e.
+ // (-1, -1) is bottom-left and (1, 1) is top-right.
+ private static final FloatBuffer DEVICE_RECTANGLE =
+ GlUtil.createFloatBuffer(new float[] {
+ -1.0f, -1.0f, // Bottom left.
+ 1.0f, -1.0f, // Bottom right.
+ -1.0f, 1.0f, // Top left.
+ 1.0f, 1.0f, // Top right.
+ });
+
+ // Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
+ private static final FloatBuffer TEXTURE_RECTANGLE =
+ GlUtil.createFloatBuffer(new float[] {
+ 0.0f, 0.0f, // Bottom left.
+ 1.0f, 0.0f, // Bottom right.
+ 0.0f, 1.0f, // Top left.
+ 1.0f, 1.0f // Top right.
+ });
+
+ private static final String VERTEX_SHADER =
+ "varying vec2 interp_tc;\n"
+ + "attribute vec4 in_pos;\n"
+ + "attribute vec4 in_tc;\n"
+ + "\n"
+ + "uniform mat4 texMatrix;\n"
+ + "\n"
+ + "void main() {\n"
+ + " gl_Position = in_pos;\n"
+ + " interp_tc = (texMatrix * in_tc).xy;\n"
+ + "}\n";
+
+ private static final String FRAGMENT_SHADER =
+ "#extension GL_OES_EGL_image_external : require\n"
+ + "precision mediump float;\n"
+ + "varying vec2 interp_tc;\n"
+ + "\n"
+ + "uniform samplerExternalOES oesTex;\n"
+ // Difference in texture coordinate corresponding to one
+ // sub-pixel in the x direction.
+ + "uniform vec2 xUnit;\n"
+ // Color conversion coefficients, including constant term
+ + "uniform vec4 coeffs;\n"
+ + "\n"
+ + "void main() {\n"
+ // Since the alpha read from the texture is always 1, this could
+ // be written as a mat4 x vec4 multiply. However, that seems to
+ // give a worse framerate, possibly because the additional
+ // multiplies by 1.0 consume resources. TODO(nisse): Could also
+ // try to do it as a vec3 x mat3x4, followed by an add in of a
+ // constant vector.
+ + " gl_FragColor.r = coeffs.a + dot(coeffs.rgb,\n"
+ + " texture2D(oesTex, interp_tc - 1.5 * xUnit).rgb);\n"
+ + " gl_FragColor.g = coeffs.a + dot(coeffs.rgb,\n"
+ + " texture2D(oesTex, interp_tc - 0.5 * xUnit).rgb);\n"
+ + " gl_FragColor.b = coeffs.a + dot(coeffs.rgb,\n"
+ + " texture2D(oesTex, interp_tc + 0.5 * xUnit).rgb);\n"
+ + " gl_FragColor.a = coeffs.a + dot(coeffs.rgb,\n"
+ + " texture2D(oesTex, interp_tc + 1.5 * xUnit).rgb);\n"
+ + "}\n";
+
+ private int texMatrixLoc;
+ private int xUnitLoc;
+ private int coeffsLoc;;
+
+ YuvConverter (EglBase.Context sharedContext) {
+ eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_RGBA_BUFFER);
+ eglBase.createDummyPbufferSurface();
+ eglBase.makeCurrent();
+
+ shader = new GlShader(VERTEX_SHADER, FRAGMENT_SHADER);
+ shader.useProgram();
+ texMatrixLoc = shader.getUniformLocation("texMatrix");
+ xUnitLoc = shader.getUniformLocation("xUnit");
+ coeffsLoc = shader.getUniformLocation("coeffs");
+ GLES20.glUniform1i(shader.getUniformLocation("oesTex"), 0);
+ GlUtil.checkNoGLES2Error("Initialize fragment shader uniform values.");
+ // Initialize vertex shader attributes.
+ shader.setVertexAttribArray("in_pos", 2, DEVICE_RECTANGLE);
+ // If the width is not a multiple of 4 pixels, the texture
+ // will be scaled up slightly and clipped at the right border.
+ shader.setVertexAttribArray("in_tc", 2, TEXTURE_RECTANGLE);
+ eglBase.detachCurrent();
+ }
+
+ synchronized void convert(ByteBuffer buf,
+ int width, int height, int stride, int textureId, float [] transformMatrix) {
+ if (released) {
+ throw new IllegalStateException(
+ "YuvConverter.convert called on released object");
+ }
+
+ // We draw into a buffer laid out like
+ //
+ // +---------+
+ // | |
+ // | Y |
+ // | |
+ // | |
+ // +----+----+
+ // | U | V |
+ // | | |
+ // +----+----+
+ //
+ // In memory, we use the same stride for all of Y, U and V. The
+ // U data starts at offset |height| * |stride| from the Y data,
+ // and the V data starts at at offset |stride/2| from the U
+ // data, with rows of U and V data alternating.
+ //
+ // Now, it would have made sense to allocate a pixel buffer with
+ // a single byte per pixel (EGL10.EGL_COLOR_BUFFER_TYPE,
+ // EGL10.EGL_LUMINANCE_BUFFER,), but that seems to be
+ // unsupported by devices. So do the following hack: Allocate an
+ // RGBA buffer, of width |stride|/4. To render each of these
+ // large pixels, sample the texture at 4 different x coordinates
+ // and store the results in the four components.
+ //
+ // Since the V data needs to start on a boundary of such a
+ // larger pixel, it is not sufficient that |stride| is even, it
+ // has to be a multiple of 8 pixels.
+
+ if (stride % 8 != 0) {
+ throw new IllegalArgumentException(
+ "Invalid stride, must be a multiple of 8");
+ }
+ if (stride < width){
+ throw new IllegalArgumentException(
+ "Invalid stride, must >= width");
+ }
+
+ int y_width = (width+3) / 4;
+ int uv_width = (width+7) / 8;
+ int uv_height = (height+1)/2;
+ int total_height = height + uv_height;
+ int size = stride * total_height;
+
+ if (buf.capacity() < size) {
+ throw new IllegalArgumentException("YuvConverter.convert called with too small buffer");
+ }
+ // Produce a frame buffer starting at top-left corner, not
+ // bottom-left.
+ transformMatrix =
+ RendererCommon.multiplyMatrices(transformMatrix,
+ RendererCommon.verticalFlipMatrix());
+
+ // Create new pBuffferSurface with the correct size if needed.
+ if (eglBase.hasSurface()) {
+ if (eglBase.surfaceWidth() != stride/4 ||
+ eglBase.surfaceHeight() != total_height){
+ eglBase.releaseSurface();
+ eglBase.createPbufferSurface(stride/4, total_height);
+ }
+ } else {
+ eglBase.createPbufferSurface(stride/4, total_height);
+ }
+
+ eglBase.makeCurrent();
+
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
+ GLES20.glUniformMatrix4fv(texMatrixLoc, 1, false, transformMatrix, 0);
+
+ // Draw Y
+ GLES20.glViewport(0, 0, y_width, height);
+ // Matrix * (1;0;0;0) / width. Note that opengl uses column major order.
+ GLES20.glUniform2f(xUnitLoc,
+ transformMatrix[0] / width,
+ transformMatrix[1] / width);
+ // Y'UV444 to RGB888, see
+ // https://en.wikipedia.org/wiki/YUV#Y.27UV444_to_RGB888_conversion.
+ // We use the ITU-R coefficients for U and V */
+ GLES20.glUniform4f(coeffsLoc, 0.299f, 0.587f, 0.114f, 0.0f);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+
+ // Draw U
+ GLES20.glViewport(0, height, uv_width, uv_height);
+ // Matrix * (1;0;0;0) / (2*width). Note that opengl uses column major order.
+ GLES20.glUniform2f(xUnitLoc,
+ transformMatrix[0] / (2.0f*width),
+ transformMatrix[1] / (2.0f*width));
+ GLES20.glUniform4f(coeffsLoc, -0.169f, -0.331f, 0.499f, 0.5f);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+
+ // Draw V
+ GLES20.glViewport(stride/8, height, uv_width, uv_height);
+ GLES20.glUniform4f(coeffsLoc, 0.499f, -0.418f, -0.0813f, 0.5f);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+
+ GLES20.glReadPixels(0, 0, stride/4, total_height, GLES20.GL_RGBA,
+ GLES20.GL_UNSIGNED_BYTE, buf);
+
+ GlUtil.checkNoGLES2Error("YuvConverter.convert");
+
+ // Unbind texture. Reportedly needed on some devices to get
+ // the texture updated from the camera.
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
+ eglBase.detachCurrent();
+ }
+
+ synchronized void release() {
+ released = true;
+ eglBase.makeCurrent();
+ shader.release();
+ eglBase.release();
+ }
+ }
+
+ private final Handler handler;
+ private boolean isOwningThread;
+ private final EglBase eglBase;
+ private final SurfaceTexture surfaceTexture;
+ private final int oesTextureId;
+ private YuvConverter yuvConverter;
+
+ private OnTextureFrameAvailableListener listener;
+ // The possible states of this class.
+ private boolean hasPendingTexture = false;
+ private volatile boolean isTextureInUse = false;
+ private boolean isQuitting = false;
+
+ private SurfaceTextureHelper(EglBase.Context sharedContext,
+ Handler handler, boolean isOwningThread) {
+ if (handler.getLooper().getThread() != Thread.currentThread()) {
+ throw new IllegalStateException("SurfaceTextureHelper must be created on the handler thread");
+ }
+ this.handler = handler;
+ this.isOwningThread = isOwningThread;
+
+ eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_BUFFER);
+ eglBase.createDummyPbufferSurface();
+ eglBase.makeCurrent();
+
+ oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
+ surfaceTexture = new SurfaceTexture(oesTextureId);
+ }
+
+ private YuvConverter getYuvConverter() {
+ // yuvConverter is assigned once
+ if (yuvConverter != null)
+ return yuvConverter;
+
+ synchronized(this) {
+ if (yuvConverter == null)
+ yuvConverter = new YuvConverter(eglBase.getEglBaseContext());
+ return yuvConverter;
+ }
+ }
+
+ /**
+ * Start to stream textures to the given |listener|.
+ * A Listener can only be set once.
+ */
+ public void setListener(OnTextureFrameAvailableListener listener) {
+ if (this.listener != null) {
+ throw new IllegalStateException("SurfaceTextureHelper listener has already been set.");
+ }
+ this.listener = listener;
+ surfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
+ @Override
+ public void onFrameAvailable(SurfaceTexture surfaceTexture) {
+ hasPendingTexture = true;
+ tryDeliverTextureFrame();
+ }
+ });
+ }
+
+ /**
+ * Retrieve the underlying SurfaceTexture. The SurfaceTexture should be passed in to a video
+ * producer such as a camera or decoder.
+ */
+ public SurfaceTexture getSurfaceTexture() {
+ return surfaceTexture;
+ }
+
+ /**
+ * Call this function to signal that you are done with the frame received in
+ * onTextureFrameAvailable(). Only one texture frame can be in flight at once, so you must call
+ * this function in order to receive a new frame.
+ */
+ public void returnTextureFrame() {
+ handler.post(new Runnable() {
+ @Override public void run() {
+ isTextureInUse = false;
+ if (isQuitting) {
+ release();
+ } else {
+ tryDeliverTextureFrame();
+ }
+ }
+ });
+ }
+
+ public boolean isTextureInUse() {
+ return isTextureInUse;
+ }
+
+ /**
+ * Call disconnect() to stop receiving frames. Resources are released when the texture frame has
+ * been returned by a call to returnTextureFrame(). You are guaranteed to not receive any more
+ * onTextureFrameAvailable() after this function returns.
+ */
+ public void disconnect() {
+ if (!isOwningThread) {
+ throw new IllegalStateException("Must call disconnect(handler).");
+ }
+ if (handler.getLooper().getThread() == Thread.currentThread()) {
+ isQuitting = true;
+ if (!isTextureInUse) {
+ release();
+ }
+ return;
+ }
+ final CountDownLatch barrier = new CountDownLatch(1);
+ handler.postAtFrontOfQueue(new Runnable() {
+ @Override public void run() {
+ isQuitting = true;
+ barrier.countDown();
+ if (!isTextureInUse) {
+ release();
+ }
+ }
+ });
+ ThreadUtils.awaitUninterruptibly(barrier);
+ }
+
+ /**
+ * Call disconnect() to stop receiving frames and quit the looper used by |handler|.
+ * Resources are released when the texture frame has been returned by a call to
+ * returnTextureFrame(). You are guaranteed to not receive any more
+ * onTextureFrameAvailable() after this function returns.
+ */
+ public void disconnect(Handler handler) {
+ if (this.handler != handler) {
+ throw new IllegalStateException("Wrong handler.");
+ }
+ isOwningThread = true;
+ disconnect();
+ }
+
+ public void textureToYUV(ByteBuffer buf,
+ int width, int height, int stride, int textureId, float [] transformMatrix) {
+ if (textureId != oesTextureId)
+ throw new IllegalStateException("textureToByteBuffer called with unexpected textureId");
+
+ getYuvConverter().convert(buf, width, height, stride, textureId, transformMatrix);
+ }
+
+ private void tryDeliverTextureFrame() {
+ if (handler.getLooper().getThread() != Thread.currentThread()) {
+ throw new IllegalStateException("Wrong thread.");
+ }
+ if (isQuitting || !hasPendingTexture || isTextureInUse) {
+ return;
+ }
+ isTextureInUse = true;
+ hasPendingTexture = false;
+
+ eglBase.makeCurrent();
+ surfaceTexture.updateTexImage();
+
+ final float[] transformMatrix = new float[16];
+ surfaceTexture.getTransformMatrix(transformMatrix);
+ final long timestampNs = (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH)
+ ? surfaceTexture.getTimestamp()
+ : TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
+ listener.onTextureFrameAvailable(oesTextureId, transformMatrix, timestampNs);
+ }
+
+ private void release() {
+ if (handler.getLooper().getThread() != Thread.currentThread()) {
+ throw new IllegalStateException("Wrong thread.");
+ }
+ if (isTextureInUse || !isQuitting) {
+ throw new IllegalStateException("Unexpected release.");
+ }
+ synchronized (this) {
+ if (yuvConverter != null)
+ yuvConverter.release();
+ }
+ eglBase.makeCurrent();
+ GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0);
+ surfaceTexture.release();
+ eglBase.release();
+ handler.getLooper().quit();
+ }
+}
diff --git a/webrtc/api/java/android/org/webrtc/SurfaceViewRenderer.java b/webrtc/api/java/android/org/webrtc/SurfaceViewRenderer.java
new file mode 100644
index 0000000..fa199b3
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/SurfaceViewRenderer.java
@@ -0,0 +1,580 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.content.res.Resources.NotFoundException;
+import android.graphics.Point;
+import android.opengl.GLES20;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.util.AttributeSet;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+
+import org.webrtc.Logging;
+
+import java.util.concurrent.CountDownLatch;
+
+import javax.microedition.khronos.egl.EGLContext;
+
+/**
+ * Implements org.webrtc.VideoRenderer.Callbacks by displaying the video stream on a SurfaceView.
+ * renderFrame() is asynchronous to avoid blocking the calling thread.
+ * This class is thread safe and handles access from potentially four different threads:
+ * Interaction from the main app in init, release, setMirror, and setScalingtype.
+ * Interaction from C++ webrtc::VideoRendererInterface in renderFrame and canApplyRotation.
+ * Interaction from the Activity lifecycle in surfaceCreated, surfaceChanged, and surfaceDestroyed.
+ * Interaction with the layout framework in onMeasure and onSizeChanged.
+ */
+public class SurfaceViewRenderer extends SurfaceView
+ implements SurfaceHolder.Callback, VideoRenderer.Callbacks {
+ private static final String TAG = "SurfaceViewRenderer";
+
+ // Dedicated render thread.
+ private HandlerThread renderThread;
+ // |renderThreadHandler| is a handler for communicating with |renderThread|, and is synchronized
+ // on |handlerLock|.
+ private final Object handlerLock = new Object();
+ private Handler renderThreadHandler;
+
+ // EGL and GL resources for drawing YUV/OES textures. After initilization, these are only accessed
+ // from the render thread.
+ private EglBase eglBase;
+ private final RendererCommon.YuvUploader yuvUploader = new RendererCommon.YuvUploader();
+ private RendererCommon.GlDrawer drawer;
+ // Texture ids for YUV frames. Allocated on first arrival of a YUV frame.
+ private int[] yuvTextures = null;
+
+ // Pending frame to render. Serves as a queue with size 1. Synchronized on |frameLock|.
+ private final Object frameLock = new Object();
+ private VideoRenderer.I420Frame pendingFrame;
+
+ // These variables are synchronized on |layoutLock|.
+ private final Object layoutLock = new Object();
+ // These dimension values are used to keep track of the state in these functions: onMeasure(),
+ // onLayout(), and surfaceChanged(). A new layout is triggered with requestLayout(). This happens
+ // internally when the incoming frame size changes. requestLayout() can also be triggered
+ // externally. The layout change is a two pass process: first onMeasure() is called in a top-down
+ // traversal of the View tree, followed by an onLayout() pass that is also top-down. During the
+ // onLayout() pass, each parent is responsible for positioning its children using the sizes
+ // computed in the measure pass.
+ // |desiredLayoutsize| is the layout size we have requested in onMeasure() and are waiting for to
+ // take effect.
+ private Point desiredLayoutSize = new Point();
+ // |layoutSize|/|surfaceSize| is the actual current layout/surface size. They are updated in
+ // onLayout() and surfaceChanged() respectively.
+ private final Point layoutSize = new Point();
+ // TODO(magjed): Enable hardware scaler with SurfaceHolder.setFixedSize(). This will decouple
+ // layout and surface size.
+ private final Point surfaceSize = new Point();
+ // |isSurfaceCreated| keeps track of the current status in surfaceCreated()/surfaceDestroyed().
+ private boolean isSurfaceCreated;
+ // Last rendered frame dimensions, or 0 if no frame has been rendered yet.
+ private int frameWidth;
+ private int frameHeight;
+ private int frameRotation;
+ // |scalingType| determines how the video will fill the allowed layout area in onMeasure().
+ private RendererCommon.ScalingType scalingType = RendererCommon.ScalingType.SCALE_ASPECT_BALANCED;
+ // If true, mirrors the video stream horizontally.
+ private boolean mirror;
+ // Callback for reporting renderer events.
+ private RendererCommon.RendererEvents rendererEvents;
+
+ // These variables are synchronized on |statisticsLock|.
+ private final Object statisticsLock = new Object();
+ // Total number of video frames received in renderFrame() call.
+ private int framesReceived;
+ // Number of video frames dropped by renderFrame() because previous frame has not been rendered
+ // yet.
+ private int framesDropped;
+ // Number of rendered video frames.
+ private int framesRendered;
+ // Time in ns when the first video frame was rendered.
+ private long firstFrameTimeNs;
+ // Time in ns spent in renderFrameOnRenderThread() function.
+ private long renderTimeNs;
+
+ // Runnable for posting frames to render thread.
+ private final Runnable renderFrameRunnable = new Runnable() {
+ @Override public void run() {
+ renderFrameOnRenderThread();
+ }
+ };
+ // Runnable for clearing Surface to black.
+ private final Runnable makeBlackRunnable = new Runnable() {
+ @Override public void run() {
+ makeBlack();
+ }
+ };
+
+ /**
+ * Standard View constructor. In order to render something, you must first call init().
+ */
+ public SurfaceViewRenderer(Context context) {
+ super(context);
+ getHolder().addCallback(this);
+ }
+
+ /**
+ * Standard View constructor. In order to render something, you must first call init().
+ */
+ public SurfaceViewRenderer(Context context, AttributeSet attrs) {
+ super(context, attrs);
+ getHolder().addCallback(this);
+ }
+
+ /**
+ * Initialize this class, sharing resources with |sharedContext|. It is allowed to call init() to
+ * reinitialize the renderer after a previous init()/release() cycle.
+ */
+ public void init(
+ EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents) {
+ init(sharedContext, rendererEvents, EglBase.CONFIG_PLAIN, new GlRectDrawer());
+ }
+
+ /**
+ * Initialize this class, sharing resources with |sharedContext|. The custom |drawer| will be used
+ * for drawing frames on the EGLSurface. This class is responsible for calling release() on
+ * |drawer|. It is allowed to call init() to reinitialize the renderer after a previous
+ * init()/release() cycle.
+ */
+ public void init(EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents,
+ int[] configAttributes, RendererCommon.GlDrawer drawer) {
+ synchronized (handlerLock) {
+ if (renderThreadHandler != null) {
+ throw new IllegalStateException(getResourceName() + "Already initialized");
+ }
+ Logging.d(TAG, getResourceName() + "Initializing.");
+ this.rendererEvents = rendererEvents;
+ this.drawer = drawer;
+ renderThread = new HandlerThread(TAG);
+ renderThread.start();
+ eglBase = EglBase.create(sharedContext, configAttributes);
+ renderThreadHandler = new Handler(renderThread.getLooper());
+ }
+ tryCreateEglSurface();
+ }
+
+ /**
+ * Create and make an EGLSurface current if both init() and surfaceCreated() have been called.
+ */
+ public void tryCreateEglSurface() {
+ // |renderThreadHandler| is only created after |eglBase| is created in init(), so the
+ // following code will only execute if eglBase != null.
+ runOnRenderThread(new Runnable() {
+ @Override public void run() {
+ synchronized (layoutLock) {
+ if (isSurfaceCreated && !eglBase.hasSurface()) {
+ eglBase.createSurface(getHolder().getSurface());
+ eglBase.makeCurrent();
+ // Necessary for YUV frames with odd width.
+ GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
+ }
+ }
+ }
+ });
+ }
+
+ /**
+ * Block until any pending frame is returned and all GL resources released, even if an interrupt
+ * occurs. If an interrupt occurs during release(), the interrupt flag will be set. This function
+ * should be called before the Activity is destroyed and the EGLContext is still valid. If you
+ * don't call this function, the GL resources might leak.
+ */
+ public void release() {
+ final CountDownLatch eglCleanupBarrier = new CountDownLatch(1);
+ synchronized (handlerLock) {
+ if (renderThreadHandler == null) {
+ Logging.d(TAG, getResourceName() + "Already released");
+ return;
+ }
+ // Release EGL and GL resources on render thread.
+ // TODO(magjed): This might not be necessary - all OpenGL resources are automatically deleted
+ // when the EGL context is lost. It might be dangerous to delete them manually in
+ // Activity.onDestroy().
+ renderThreadHandler.postAtFrontOfQueue(new Runnable() {
+ @Override public void run() {
+ drawer.release();
+ drawer = null;
+ if (yuvTextures != null) {
+ GLES20.glDeleteTextures(3, yuvTextures, 0);
+ yuvTextures = null;
+ }
+ // Clear last rendered image to black.
+ makeBlack();
+ eglBase.release();
+ eglBase = null;
+ eglCleanupBarrier.countDown();
+ }
+ });
+ // Don't accept any more frames or messages to the render thread.
+ renderThreadHandler = null;
+ }
+ // Make sure the EGL/GL cleanup posted above is executed.
+ ThreadUtils.awaitUninterruptibly(eglCleanupBarrier);
+ renderThread.quit();
+ synchronized (frameLock) {
+ if (pendingFrame != null) {
+ VideoRenderer.renderFrameDone(pendingFrame);
+ pendingFrame = null;
+ }
+ }
+ // The |renderThread| cleanup is not safe to cancel and we need to wait until it's done.
+ ThreadUtils.joinUninterruptibly(renderThread);
+ renderThread = null;
+ // Reset statistics and event reporting.
+ synchronized (layoutLock) {
+ frameWidth = 0;
+ frameHeight = 0;
+ frameRotation = 0;
+ rendererEvents = null;
+ }
+ resetStatistics();
+ }
+
+ /**
+ * Reset statistics. This will reset the logged statistics in logStatistics(), and
+ * RendererEvents.onFirstFrameRendered() will be called for the next frame.
+ */
+ public void resetStatistics() {
+ synchronized (statisticsLock) {
+ framesReceived = 0;
+ framesDropped = 0;
+ framesRendered = 0;
+ firstFrameTimeNs = 0;
+ renderTimeNs = 0;
+ }
+ }
+
+ /**
+ * Set if the video stream should be mirrored or not.
+ */
+ public void setMirror(final boolean mirror) {
+ synchronized (layoutLock) {
+ this.mirror = mirror;
+ }
+ }
+
+ /**
+ * Set how the video will fill the allowed layout area.
+ */
+ public void setScalingType(RendererCommon.ScalingType scalingType) {
+ synchronized (layoutLock) {
+ this.scalingType = scalingType;
+ }
+ }
+
+ // VideoRenderer.Callbacks interface.
+ @Override
+ public void renderFrame(VideoRenderer.I420Frame frame) {
+ synchronized (statisticsLock) {
+ ++framesReceived;
+ }
+ synchronized (handlerLock) {
+ if (renderThreadHandler == null) {
+ Logging.d(TAG, getResourceName()
+ + "Dropping frame - Not initialized or already released.");
+ VideoRenderer.renderFrameDone(frame);
+ return;
+ }
+ synchronized (frameLock) {
+ if (pendingFrame != null) {
+ // Drop old frame.
+ synchronized (statisticsLock) {
+ ++framesDropped;
+ }
+ VideoRenderer.renderFrameDone(pendingFrame);
+ }
+ pendingFrame = frame;
+ updateFrameDimensionsAndReportEvents(frame);
+ renderThreadHandler.post(renderFrameRunnable);
+ }
+ }
+ }
+
+ // Returns desired layout size given current measure specification and video aspect ratio.
+ private Point getDesiredLayoutSize(int widthSpec, int heightSpec) {
+ synchronized (layoutLock) {
+ final int maxWidth = getDefaultSize(Integer.MAX_VALUE, widthSpec);
+ final int maxHeight = getDefaultSize(Integer.MAX_VALUE, heightSpec);
+ final Point size =
+ RendererCommon.getDisplaySize(scalingType, frameAspectRatio(), maxWidth, maxHeight);
+ if (MeasureSpec.getMode(widthSpec) == MeasureSpec.EXACTLY) {
+ size.x = maxWidth;
+ }
+ if (MeasureSpec.getMode(heightSpec) == MeasureSpec.EXACTLY) {
+ size.y = maxHeight;
+ }
+ return size;
+ }
+ }
+
+ // View layout interface.
+ @Override
+ protected void onMeasure(int widthSpec, int heightSpec) {
+ synchronized (layoutLock) {
+ if (frameWidth == 0 || frameHeight == 0) {
+ super.onMeasure(widthSpec, heightSpec);
+ return;
+ }
+ desiredLayoutSize = getDesiredLayoutSize(widthSpec, heightSpec);
+ if (desiredLayoutSize.x != getMeasuredWidth() || desiredLayoutSize.y != getMeasuredHeight()) {
+ // Clear the surface asap before the layout change to avoid stretched video and other
+ // render artifacs. Don't wait for it to finish because the IO thread should never be
+ // blocked, so it's a best-effort attempt.
+ synchronized (handlerLock) {
+ if (renderThreadHandler != null) {
+ renderThreadHandler.postAtFrontOfQueue(makeBlackRunnable);
+ }
+ }
+ }
+ setMeasuredDimension(desiredLayoutSize.x, desiredLayoutSize.y);
+ }
+ }
+
+ @Override
+ protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
+ synchronized (layoutLock) {
+ layoutSize.x = right - left;
+ layoutSize.y = bottom - top;
+ }
+ // Might have a pending frame waiting for a layout of correct size.
+ runOnRenderThread(renderFrameRunnable);
+ }
+
+ // SurfaceHolder.Callback interface.
+ @Override
+ public void surfaceCreated(final SurfaceHolder holder) {
+ Logging.d(TAG, getResourceName() + "Surface created.");
+ synchronized (layoutLock) {
+ isSurfaceCreated = true;
+ }
+ tryCreateEglSurface();
+ }
+
+ @Override
+ public void surfaceDestroyed(SurfaceHolder holder) {
+ Logging.d(TAG, getResourceName() + "Surface destroyed.");
+ synchronized (layoutLock) {
+ isSurfaceCreated = false;
+ surfaceSize.x = 0;
+ surfaceSize.y = 0;
+ }
+ runOnRenderThread(new Runnable() {
+ @Override public void run() {
+ eglBase.releaseSurface();
+ }
+ });
+ }
+
+ @Override
+ public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
+ Logging.d(TAG, getResourceName() + "Surface changed: " + width + "x" + height);
+ synchronized (layoutLock) {
+ surfaceSize.x = width;
+ surfaceSize.y = height;
+ }
+ // Might have a pending frame waiting for a surface of correct size.
+ runOnRenderThread(renderFrameRunnable);
+ }
+
+ /**
+ * Private helper function to post tasks safely.
+ */
+ private void runOnRenderThread(Runnable runnable) {
+ synchronized (handlerLock) {
+ if (renderThreadHandler != null) {
+ renderThreadHandler.post(runnable);
+ }
+ }
+ }
+
+ private String getResourceName() {
+ try {
+ return getResources().getResourceEntryName(getId()) + ": ";
+ } catch (NotFoundException e) {
+ return "";
+ }
+ }
+
+ private void makeBlack() {
+ if (Thread.currentThread() != renderThread) {
+ throw new IllegalStateException(getResourceName() + "Wrong thread.");
+ }
+ if (eglBase != null && eglBase.hasSurface()) {
+ GLES20.glClearColor(0, 0, 0, 0);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ eglBase.swapBuffers();
+ }
+ }
+
+ /**
+ * Requests new layout if necessary. Returns true if layout and surface size are consistent.
+ */
+ private boolean checkConsistentLayout() {
+ if (Thread.currentThread() != renderThread) {
+ throw new IllegalStateException(getResourceName() + "Wrong thread.");
+ }
+ synchronized (layoutLock) {
+ // Return false while we are in the middle of a layout change.
+ return layoutSize.equals(desiredLayoutSize) && surfaceSize.equals(layoutSize);
+ }
+ }
+
+ /**
+ * Renders and releases |pendingFrame|.
+ */
+ private void renderFrameOnRenderThread() {
+ if (Thread.currentThread() != renderThread) {
+ throw new IllegalStateException(getResourceName() + "Wrong thread.");
+ }
+ // Fetch and render |pendingFrame|.
+ final VideoRenderer.I420Frame frame;
+ synchronized (frameLock) {
+ if (pendingFrame == null) {
+ return;
+ }
+ frame = pendingFrame;
+ pendingFrame = null;
+ }
+ if (eglBase == null || !eglBase.hasSurface()) {
+ Logging.d(TAG, getResourceName() + "No surface to draw on");
+ VideoRenderer.renderFrameDone(frame);
+ return;
+ }
+ if (!checkConsistentLayout()) {
+ // Output intermediate black frames while the layout is updated.
+ makeBlack();
+ VideoRenderer.renderFrameDone(frame);
+ return;
+ }
+ // After a surface size change, the EGLSurface might still have a buffer of the old size in the
+ // pipeline. Querying the EGLSurface will show if the underlying buffer dimensions haven't yet
+ // changed. Such a buffer will be rendered incorrectly, so flush it with a black frame.
+ synchronized (layoutLock) {
+ if (eglBase.surfaceWidth() != surfaceSize.x || eglBase.surfaceHeight() != surfaceSize.y) {
+ makeBlack();
+ }
+ }
+
+ final long startTimeNs = System.nanoTime();
+ final float[] texMatrix;
+ synchronized (layoutLock) {
+ final float[] rotatedSamplingMatrix =
+ RendererCommon.rotateTextureMatrix(frame.samplingMatrix, frame.rotationDegree);
+ final float[] layoutMatrix = RendererCommon.getLayoutMatrix(
+ mirror, frameAspectRatio(), (float) layoutSize.x / layoutSize.y);
+ texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
+ }
+
+ // TODO(magjed): glClear() shouldn't be necessary since every pixel is covered anyway, but it's
+ // a workaround for bug 5147. Performance will be slightly worse.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ if (frame.yuvFrame) {
+ // Make sure YUV textures are allocated.
+ if (yuvTextures == null) {
+ yuvTextures = new int[3];
+ for (int i = 0; i < 3; i++) {
+ yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
+ }
+ }
+ yuvUploader.uploadYuvData(
+ yuvTextures, frame.width, frame.height, frame.yuvStrides, frame.yuvPlanes);
+ drawer.drawYuv(yuvTextures, texMatrix, 0, 0, surfaceSize.x, surfaceSize.y);
+ } else {
+ drawer.drawOes(frame.textureId, texMatrix, 0, 0, surfaceSize.x, surfaceSize.y);
+ }
+
+ eglBase.swapBuffers();
+ VideoRenderer.renderFrameDone(frame);
+ synchronized (statisticsLock) {
+ if (framesRendered == 0) {
+ firstFrameTimeNs = startTimeNs;
+ synchronized (layoutLock) {
+ Logging.d(TAG, getResourceName() + "Reporting first rendered frame.");
+ if (rendererEvents != null) {
+ rendererEvents.onFirstFrameRendered();
+ }
+ }
+ }
+ ++framesRendered;
+ renderTimeNs += (System.nanoTime() - startTimeNs);
+ if (framesRendered % 300 == 0) {
+ logStatistics();
+ }
+ }
+ }
+
+ // Return current frame aspect ratio, taking rotation into account.
+ private float frameAspectRatio() {
+ synchronized (layoutLock) {
+ if (frameWidth == 0 || frameHeight == 0) {
+ return 0.0f;
+ }
+ return (frameRotation % 180 == 0) ? (float) frameWidth / frameHeight
+ : (float) frameHeight / frameWidth;
+ }
+ }
+
+ // Update frame dimensions and report any changes to |rendererEvents|.
+ private void updateFrameDimensionsAndReportEvents(VideoRenderer.I420Frame frame) {
+ synchronized (layoutLock) {
+ if (frameWidth != frame.width || frameHeight != frame.height
+ || frameRotation != frame.rotationDegree) {
+ Logging.d(TAG, getResourceName() + "Reporting frame resolution changed to "
+ + frame.width + "x" + frame.height + " with rotation " + frame.rotationDegree);
+ if (rendererEvents != null) {
+ rendererEvents.onFrameResolutionChanged(frame.width, frame.height, frame.rotationDegree);
+ }
+ frameWidth = frame.width;
+ frameHeight = frame.height;
+ frameRotation = frame.rotationDegree;
+ post(new Runnable() {
+ @Override public void run() {
+ requestLayout();
+ }
+ });
+ }
+ }
+ }
+
+ private void logStatistics() {
+ synchronized (statisticsLock) {
+ Logging.d(TAG, getResourceName() + "Frames received: "
+ + framesReceived + ". Dropped: " + framesDropped + ". Rendered: " + framesRendered);
+ if (framesReceived > 0 && framesRendered > 0) {
+ final long timeSinceFirstFrameNs = System.nanoTime() - firstFrameTimeNs;
+ Logging.d(TAG, getResourceName() + "Duration: " + (int) (timeSinceFirstFrameNs / 1e6) +
+ " ms. FPS: " + framesRendered * 1e9 / timeSinceFirstFrameNs);
+ Logging.d(TAG, getResourceName() + "Average render time: "
+ + (int) (renderTimeNs / (1000 * framesRendered)) + " us.");
+ }
+ }
+ }
+}
diff --git a/webrtc/api/java/android/org/webrtc/ThreadUtils.java b/webrtc/api/java/android/org/webrtc/ThreadUtils.java
new file mode 100644
index 0000000..e60ead9
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/ThreadUtils.java
@@ -0,0 +1,192 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.os.Handler;
+import android.os.SystemClock;
+
+import java.util.concurrent.Callable;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
+public class ThreadUtils {
+ /**
+ * Utility class to be used for checking that a method is called on the correct thread.
+ */
+ public static class ThreadChecker {
+ private Thread thread = Thread.currentThread();
+
+ public void checkIsOnValidThread() {
+ if (thread == null) {
+ thread = Thread.currentThread();
+ }
+ if (Thread.currentThread() != thread) {
+ throw new IllegalStateException("Wrong thread");
+ }
+ }
+
+ public void detachThread() {
+ thread = null;
+ }
+ }
+
+ /**
+ * Utility interface to be used with executeUninterruptibly() to wait for blocking operations
+ * to complete without getting interrupted..
+ */
+ public interface BlockingOperation {
+ void run() throws InterruptedException;
+ }
+
+ /**
+ * Utility method to make sure a blocking operation is executed to completion without getting
+ * interrupted. This should be used in cases where the operation is waiting for some critical
+ * work, e.g. cleanup, that must complete before returning. If the thread is interrupted during
+ * the blocking operation, this function will re-run the operation until completion, and only then
+ * re-interrupt the thread.
+ */
+ public static void executeUninterruptibly(BlockingOperation operation) {
+ boolean wasInterrupted = false;
+ while (true) {
+ try {
+ operation.run();
+ break;
+ } catch (InterruptedException e) {
+ // Someone is asking us to return early at our convenience. We can't cancel this operation,
+ // but we should preserve the information and pass it along.
+ wasInterrupted = true;
+ }
+ }
+ // Pass interruption information along.
+ if (wasInterrupted) {
+ Thread.currentThread().interrupt();
+ }
+ }
+
+ public static boolean joinUninterruptibly(final Thread thread, long timeoutMs) {
+ final long startTimeMs = SystemClock.elapsedRealtime();
+ long timeRemainingMs = timeoutMs;
+ boolean wasInterrupted = false;
+ while (timeRemainingMs > 0) {
+ try {
+ thread.join(timeRemainingMs);
+ break;
+ } catch (InterruptedException e) {
+ // Someone is asking us to return early at our convenience. We can't cancel this operation,
+ // but we should preserve the information and pass it along.
+ wasInterrupted = true;
+ final long elapsedTimeMs = SystemClock.elapsedRealtime() - startTimeMs;
+ timeRemainingMs = timeoutMs - elapsedTimeMs;
+ }
+ }
+ // Pass interruption information along.
+ if (wasInterrupted) {
+ Thread.currentThread().interrupt();
+ }
+ return !thread.isAlive();
+ }
+
+ public static void joinUninterruptibly(final Thread thread) {
+ executeUninterruptibly(new BlockingOperation() {
+ @Override
+ public void run() throws InterruptedException {
+ thread.join();
+ }
+ });
+ }
+
+ public static void awaitUninterruptibly(final CountDownLatch latch) {
+ executeUninterruptibly(new BlockingOperation() {
+ @Override
+ public void run() throws InterruptedException {
+ latch.await();
+ }
+ });
+ }
+
+ public static boolean awaitUninterruptibly(CountDownLatch barrier, long timeoutMs) {
+ final long startTimeMs = SystemClock.elapsedRealtime();
+ long timeRemainingMs = timeoutMs;
+ boolean wasInterrupted = false;
+ boolean result = false;
+ do {
+ try {
+ result = barrier.await(timeRemainingMs, TimeUnit.MILLISECONDS);
+ break;
+ } catch (InterruptedException e) {
+ // Someone is asking us to return early at our convenience. We can't cancel this operation,
+ // but we should preserve the information and pass it along.
+ wasInterrupted = true;
+ final long elapsedTimeMs = SystemClock.elapsedRealtime() - startTimeMs;
+ timeRemainingMs = timeoutMs - elapsedTimeMs;
+ }
+ } while (timeRemainingMs > 0);
+ // Pass interruption information along.
+ if (wasInterrupted) {
+ Thread.currentThread().interrupt();
+ }
+ return result;
+ }
+
+ /**
+ * Post |callable| to |handler| and wait for the result.
+ */
+ public static <V> V invokeUninterruptibly(final Handler handler, final Callable<V> callable) {
+ class Result {
+ public V value;
+ }
+ final Result result = new Result();
+ final CountDownLatch barrier = new CountDownLatch(1);
+ handler.post(new Runnable() {
+ @Override public void run() {
+ try {
+ result.value = callable.call();
+ } catch (Exception e) {
+ throw new RuntimeException("Callable threw exception: " + e);
+ }
+ barrier.countDown();
+ }
+ });
+ awaitUninterruptibly(barrier);
+ return result.value;
+ }
+
+ /**
+ * Post |runner| to |handler| and wait for the result.
+ */
+ public static void invokeUninterruptibly(final Handler handler, final Runnable runner) {
+ final CountDownLatch barrier = new CountDownLatch(1);
+ handler.post(new Runnable() {
+ @Override public void run() {
+ runner.run();
+ barrier.countDown();
+ }
+ });
+ awaitUninterruptibly(barrier);
+ }
+}
diff --git a/webrtc/api/java/android/org/webrtc/VideoCapturerAndroid.java b/webrtc/api/java/android/org/webrtc/VideoCapturerAndroid.java
new file mode 100644
index 0000000..36f60ed
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/VideoCapturerAndroid.java
@@ -0,0 +1,793 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.SystemClock;
+import android.view.Surface;
+import android.view.WindowManager;
+
+import org.json.JSONException;
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+import org.webrtc.Logging;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.IdentityHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
+// Android specific implementation of VideoCapturer.
+// An instance of this class can be created by an application using
+// VideoCapturerAndroid.create();
+// This class extends VideoCapturer with a method to easily switch between the
+// front and back camera. It also provides methods for enumerating valid device
+// names.
+//
+// Threading notes: this class is called from C++ code, Android Camera callbacks, and possibly
+// arbitrary Java threads. All public entry points are thread safe, and delegate the work to the
+// camera thread. The internal *OnCameraThread() methods must check |camera| for null to check if
+// the camera has been stopped.
+@SuppressWarnings("deprecation")
+public class VideoCapturerAndroid extends VideoCapturer implements
+ android.hardware.Camera.PreviewCallback,
+ SurfaceTextureHelper.OnTextureFrameAvailableListener {
+ private final static String TAG = "VideoCapturerAndroid";
+ private final static int CAMERA_OBSERVER_PERIOD_MS = 2000;
+ private final static int CAMERA_FREEZE_REPORT_TIMOUT_MS = 6000;
+
+ private android.hardware.Camera camera; // Only non-null while capturing.
+ private HandlerThread cameraThread;
+ private final Handler cameraThreadHandler;
+ private Context applicationContext;
+ // Synchronization lock for |id|.
+ private final Object cameraIdLock = new Object();
+ private int id;
+ private android.hardware.Camera.CameraInfo info;
+ private final CameraStatistics cameraStatistics;
+ // Remember the requested format in case we want to switch cameras.
+ private int requestedWidth;
+ private int requestedHeight;
+ private int requestedFramerate;
+ // The capture format will be the closest supported format to the requested format.
+ private CaptureFormat captureFormat;
+ private final Object pendingCameraSwitchLock = new Object();
+ private volatile boolean pendingCameraSwitch;
+ private CapturerObserver frameObserver = null;
+ private final CameraEventsHandler eventsHandler;
+ private boolean firstFrameReported;
+ // Arbitrary queue depth. Higher number means more memory allocated & held,
+ // lower number means more sensitivity to processing time in the client (and
+ // potentially stalling the capturer if it runs out of buffers to write to).
+ private static final int NUMBER_OF_CAPTURE_BUFFERS = 3;
+ private final Set<byte[]> queuedBuffers = new HashSet<byte[]>();
+ private final boolean isCapturingToTexture;
+ final SurfaceTextureHelper surfaceHelper; // Package visible for testing purposes.
+ // The camera API can output one old frame after the camera has been switched or the resolution
+ // has been changed. This flag is used for dropping the first frame after camera restart.
+ private boolean dropNextFrame = false;
+ // |openCameraOnCodecThreadRunner| is used for retrying to open the camera if it is in use by
+ // another application when startCaptureOnCameraThread is called.
+ private Runnable openCameraOnCodecThreadRunner;
+ private final static int MAX_OPEN_CAMERA_ATTEMPTS = 3;
+ private final static int OPEN_CAMERA_DELAY_MS = 500;
+ private int openCameraAttempts;
+
+ // Camera error callback.
+ private final android.hardware.Camera.ErrorCallback cameraErrorCallback =
+ new android.hardware.Camera.ErrorCallback() {
+ @Override
+ public void onError(int error, android.hardware.Camera camera) {
+ String errorMessage;
+ if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) {
+ errorMessage = "Camera server died!";
+ } else {
+ errorMessage = "Camera error: " + error;
+ }
+ Logging.e(TAG, errorMessage);
+ if (eventsHandler != null) {
+ eventsHandler.onCameraError(errorMessage);
+ }
+ }
+ };
+
+ // Camera observer - monitors camera framerate. Observer is executed on camera thread.
+ private final Runnable cameraObserver = new Runnable() {
+ private int freezePeriodCount;
+ @Override
+ public void run() {
+ int cameraFramesCount = cameraStatistics.getAndResetFrameCount();
+ int cameraFps = (cameraFramesCount * 1000 + CAMERA_OBSERVER_PERIOD_MS / 2)
+ / CAMERA_OBSERVER_PERIOD_MS;
+
+ Logging.d(TAG, "Camera fps: " + cameraFps +".");
+ if (cameraFramesCount == 0) {
+ ++freezePeriodCount;
+ if (CAMERA_OBSERVER_PERIOD_MS * freezePeriodCount > CAMERA_FREEZE_REPORT_TIMOUT_MS
+ && eventsHandler != null) {
+ Logging.e(TAG, "Camera freezed.");
+ if (surfaceHelper.isTextureInUse()) {
+ // This can only happen if we are capturing to textures.
+ eventsHandler.onCameraFreezed("Camera failure. Client must return video buffers.");
+ } else {
+ eventsHandler.onCameraFreezed("Camera failure.");
+ }
+ return;
+ }
+ } else {
+ freezePeriodCount = 0;
+ }
+ cameraThreadHandler.postDelayed(this, CAMERA_OBSERVER_PERIOD_MS);
+ }
+ };
+
+ private static class CameraStatistics {
+ private int frameCount = 0;
+ private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
+
+ CameraStatistics() {
+ threadChecker.detachThread();
+ }
+
+ public void addFrame() {
+ threadChecker.checkIsOnValidThread();
+ ++frameCount;
+ }
+
+ public int getAndResetFrameCount() {
+ threadChecker.checkIsOnValidThread();
+ int count = frameCount;
+ frameCount = 0;
+ return count;
+ }
+ }
+
+ public static interface CameraEventsHandler {
+ // Camera error handler - invoked when camera can not be opened
+ // or any camera exception happens on camera thread.
+ void onCameraError(String errorDescription);
+
+ // Invoked when camera stops receiving frames
+ void onCameraFreezed(String errorDescription);
+
+ // Callback invoked when camera is opening.
+ void onCameraOpening(int cameraId);
+
+ // Callback invoked when first camera frame is available after camera is opened.
+ void onFirstFrameAvailable();
+
+ // Callback invoked when camera closed.
+ void onCameraClosed();
+ }
+
+ // Camera switch handler - one of these functions are invoked with the result of switchCamera().
+ // The callback may be called on an arbitrary thread.
+ public interface CameraSwitchHandler {
+ // Invoked on success. |isFrontCamera| is true if the new camera is front facing.
+ void onCameraSwitchDone(boolean isFrontCamera);
+ // Invoked on failure, e.g. camera is stopped or only one camera available.
+ void onCameraSwitchError(String errorDescription);
+ }
+
+ public static VideoCapturerAndroid create(String name,
+ CameraEventsHandler eventsHandler) {
+ return VideoCapturerAndroid.create(name, eventsHandler, null);
+ }
+
+ public static VideoCapturerAndroid create(String name,
+ CameraEventsHandler eventsHandler, EglBase.Context sharedEglContext) {
+ final int cameraId = lookupDeviceName(name);
+ if (cameraId == -1) {
+ return null;
+ }
+
+ final VideoCapturerAndroid capturer = new VideoCapturerAndroid(cameraId, eventsHandler,
+ sharedEglContext);
+ capturer.setNativeCapturer(
+ nativeCreateVideoCapturer(capturer, capturer.surfaceHelper));
+ return capturer;
+ }
+
+ public void printStackTrace() {
+ if (cameraThread != null) {
+ StackTraceElement[] cameraStackTraces = cameraThread.getStackTrace();
+ if (cameraStackTraces.length > 0) {
+ Logging.d(TAG, "VideoCapturerAndroid stacks trace:");
+ for (StackTraceElement stackTrace : cameraStackTraces) {
+ Logging.d(TAG, stackTrace.toString());
+ }
+ }
+ }
+ }
+
+ // Switch camera to the next valid camera id. This can only be called while
+ // the camera is running.
+ public void switchCamera(final CameraSwitchHandler handler) {
+ if (android.hardware.Camera.getNumberOfCameras() < 2) {
+ if (handler != null) {
+ handler.onCameraSwitchError("No camera to switch to.");
+ }
+ return;
+ }
+ synchronized (pendingCameraSwitchLock) {
+ if (pendingCameraSwitch) {
+ // Do not handle multiple camera switch request to avoid blocking
+ // camera thread by handling too many switch request from a queue.
+ Logging.w(TAG, "Ignoring camera switch request.");
+ if (handler != null) {
+ handler.onCameraSwitchError("Pending camera switch already in progress.");
+ }
+ return;
+ }
+ pendingCameraSwitch = true;
+ }
+ cameraThreadHandler.post(new Runnable() {
+ @Override public void run() {
+ if (camera == null) {
+ if (handler != null) {
+ handler.onCameraSwitchError("Camera is stopped.");
+ }
+ return;
+ }
+ switchCameraOnCameraThread();
+ synchronized (pendingCameraSwitchLock) {
+ pendingCameraSwitch = false;
+ }
+ if (handler != null) {
+ handler.onCameraSwitchDone(
+ info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT);
+ }
+ }
+ });
+ }
+
+ // Requests a new output format from the video capturer. Captured frames
+ // by the camera will be scaled/or dropped by the video capturer.
+ // It does not matter if width and height are flipped. I.E, |width| = 640, |height| = 480 produce
+ // the same result as |width| = 480, |height| = 640.
+ // TODO(magjed/perkj): Document what this function does. Change name?
+ public void onOutputFormatRequest(final int width, final int height, final int framerate) {
+ cameraThreadHandler.post(new Runnable() {
+ @Override public void run() {
+ onOutputFormatRequestOnCameraThread(width, height, framerate);
+ }
+ });
+ }
+
+ // Reconfigure the camera to capture in a new format. This should only be called while the camera
+ // is running.
+ public void changeCaptureFormat(final int width, final int height, final int framerate) {
+ cameraThreadHandler.post(new Runnable() {
+ @Override public void run() {
+ startPreviewOnCameraThread(width, height, framerate);
+ }
+ });
+ }
+
+ // Helper function to retrieve the current camera id synchronously. Note that the camera id might
+ // change at any point by switchCamera() calls.
+ int getCurrentCameraId() {
+ synchronized (cameraIdLock) {
+ return id;
+ }
+ }
+
+ public List<CaptureFormat> getSupportedFormats() {
+ return CameraEnumerationAndroid.getSupportedFormats(getCurrentCameraId());
+ }
+
+ // Returns true if this VideoCapturer is setup to capture video frames to a SurfaceTexture.
+ public boolean isCapturingToTexture() {
+ return isCapturingToTexture;
+ }
+
+ // Called from native code.
+ private String getSupportedFormatsAsJson() throws JSONException {
+ return CameraEnumerationAndroid.getSupportedFormatsAsJson(getCurrentCameraId());
+ }
+
+ // Called from native VideoCapturer_nativeCreateVideoCapturer.
+ private VideoCapturerAndroid(int cameraId) {
+ this(cameraId, null, null);
+ }
+
+ private VideoCapturerAndroid(int cameraId, CameraEventsHandler eventsHandler,
+ EglBase.Context sharedContext) {
+ this.id = cameraId;
+ this.eventsHandler = eventsHandler;
+ cameraThread = new HandlerThread(TAG);
+ cameraThread.start();
+ cameraThreadHandler = new Handler(cameraThread.getLooper());
+ isCapturingToTexture = (sharedContext != null);
+ cameraStatistics = new CameraStatistics();
+ surfaceHelper = SurfaceTextureHelper.create(sharedContext, cameraThreadHandler);
+ if (isCapturingToTexture) {
+ surfaceHelper.setListener(this);
+ }
+ Logging.d(TAG, "VideoCapturerAndroid isCapturingToTexture : " + isCapturingToTexture);
+ }
+
+ private void checkIsOnCameraThread() {
+ if (Thread.currentThread() != cameraThread) {
+ throw new IllegalStateException("Wrong thread");
+ }
+ }
+
+ // Returns the camera index for camera with name |deviceName|, or -1 if no such camera can be
+ // found. If |deviceName| is empty, the first available device is used.
+ private static int lookupDeviceName(String deviceName) {
+ Logging.d(TAG, "lookupDeviceName: " + deviceName);
+ if (deviceName == null || android.hardware.Camera.getNumberOfCameras() == 0) {
+ return -1;
+ }
+ if (deviceName.isEmpty()) {
+ return 0;
+ }
+ for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
+ if (deviceName.equals(CameraEnumerationAndroid.getDeviceName(i))) {
+ return i;
+ }
+ }
+ return -1;
+ }
+
+ // Called by native code to quit the camera thread. This needs to be done manually, otherwise the
+ // thread and handler will not be garbage collected.
+ private void release() {
+ Logging.d(TAG, "release");
+ if (isReleased()) {
+ throw new IllegalStateException("Already released");
+ }
+ ThreadUtils.invokeUninterruptibly(cameraThreadHandler, new Runnable() {
+ @Override
+ public void run() {
+ if (camera != null) {
+ throw new IllegalStateException("Release called while camera is running");
+ }
+ }
+ });
+ surfaceHelper.disconnect(cameraThreadHandler);
+ cameraThread = null;
+ }
+
+ // Used for testing purposes to check if release() has been called.
+ public boolean isReleased() {
+ return (cameraThread == null);
+ }
+
+ // Called by native code.
+ //
+ // Note that this actually opens the camera, and Camera callbacks run on the
+ // thread that calls open(), so this is done on the CameraThread.
+ void startCapture(
+ final int width, final int height, final int framerate,
+ final Context applicationContext, final CapturerObserver frameObserver) {
+ Logging.d(TAG, "startCapture requested: " + width + "x" + height
+ + "@" + framerate);
+ if (applicationContext == null) {
+ throw new RuntimeException("applicationContext not set.");
+ }
+ if (frameObserver == null) {
+ throw new RuntimeException("frameObserver not set.");
+ }
+
+ cameraThreadHandler.post(new Runnable() {
+ @Override public void run() {
+ startCaptureOnCameraThread(width, height, framerate, frameObserver,
+ applicationContext);
+ }
+ });
+ }
+
+ private void startCaptureOnCameraThread(
+ final int width, final int height, final int framerate, final CapturerObserver frameObserver,
+ final Context applicationContext) {
+ Throwable error = null;
+ checkIsOnCameraThread();
+ if (camera != null) {
+ throw new RuntimeException("Camera has already been started.");
+ }
+ this.applicationContext = applicationContext;
+ this.frameObserver = frameObserver;
+ this.firstFrameReported = false;
+
+ try {
+ try {
+ synchronized (cameraIdLock) {
+ Logging.d(TAG, "Opening camera " + id);
+ if (eventsHandler != null) {
+ eventsHandler.onCameraOpening(id);
+ }
+ camera = android.hardware.Camera.open(id);
+ info = new android.hardware.Camera.CameraInfo();
+ android.hardware.Camera.getCameraInfo(id, info);
+ }
+ } catch (RuntimeException e) {
+ openCameraAttempts++;
+ if (openCameraAttempts < MAX_OPEN_CAMERA_ATTEMPTS) {
+ Logging.e(TAG, "Camera.open failed, retrying", e);
+ openCameraOnCodecThreadRunner = new Runnable() {
+ @Override public void run() {
+ startCaptureOnCameraThread(width, height, framerate, frameObserver,
+ applicationContext);
+ }
+ };
+ cameraThreadHandler.postDelayed(openCameraOnCodecThreadRunner, OPEN_CAMERA_DELAY_MS);
+ return;
+ }
+ openCameraAttempts = 0;
+ throw e;
+ }
+
+ try {
+ camera.setPreviewTexture(surfaceHelper.getSurfaceTexture());
+ } catch (IOException e) {
+ Logging.e(TAG, "setPreviewTexture failed", error);
+ throw new RuntimeException(e);
+ }
+
+ Logging.d(TAG, "Camera orientation: " + info.orientation +
+ " .Device orientation: " + getDeviceOrientation());
+ camera.setErrorCallback(cameraErrorCallback);
+ startPreviewOnCameraThread(width, height, framerate);
+ frameObserver.onCapturerStarted(true);
+
+ // Start camera observer.
+ cameraThreadHandler.postDelayed(cameraObserver, CAMERA_OBSERVER_PERIOD_MS);
+ return;
+ } catch (RuntimeException e) {
+ error = e;
+ }
+ Logging.e(TAG, "startCapture failed", error);
+ stopCaptureOnCameraThread();
+ frameObserver.onCapturerStarted(false);
+ if (eventsHandler != null) {
+ eventsHandler.onCameraError("Camera can not be started.");
+ }
+ return;
+ }
+
+ // (Re)start preview with the closest supported format to |width| x |height| @ |framerate|.
+ private void startPreviewOnCameraThread(int width, int height, int framerate) {
+ checkIsOnCameraThread();
+ Logging.d(
+ TAG, "startPreviewOnCameraThread requested: " + width + "x" + height + "@" + framerate);
+ if (camera == null) {
+ Logging.e(TAG, "Calling startPreviewOnCameraThread on stopped camera.");
+ return;
+ }
+
+ requestedWidth = width;
+ requestedHeight = height;
+ requestedFramerate = framerate;
+
+ // Find closest supported format for |width| x |height| @ |framerate|.
+ final android.hardware.Camera.Parameters parameters = camera.getParameters();
+ final int[] range = CameraEnumerationAndroid.getFramerateRange(parameters, framerate * 1000);
+ final android.hardware.Camera.Size previewSize =
+ CameraEnumerationAndroid.getClosestSupportedSize(
+ parameters.getSupportedPreviewSizes(), width, height);
+ final CaptureFormat captureFormat = new CaptureFormat(
+ previewSize.width, previewSize.height,
+ range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
+ range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
+
+ // Check if we are already using this capture format, then we don't need to do anything.
+ if (captureFormat.isSameFormat(this.captureFormat)) {
+ return;
+ }
+
+ // Update camera parameters.
+ Logging.d(TAG, "isVideoStabilizationSupported: " +
+ parameters.isVideoStabilizationSupported());
+ if (parameters.isVideoStabilizationSupported()) {
+ parameters.setVideoStabilization(true);
+ }
+ // Note: setRecordingHint(true) actually decrease frame rate on N5.
+ // parameters.setRecordingHint(true);
+ if (captureFormat.maxFramerate > 0) {
+ parameters.setPreviewFpsRange(captureFormat.minFramerate, captureFormat.maxFramerate);
+ }
+ parameters.setPreviewSize(captureFormat.width, captureFormat.height);
+
+ if (!isCapturingToTexture) {
+ parameters.setPreviewFormat(captureFormat.imageFormat);
+ }
+ // Picture size is for taking pictures and not for preview/video, but we need to set it anyway
+ // as a workaround for an aspect ratio problem on Nexus 7.
+ final android.hardware.Camera.Size pictureSize =
+ CameraEnumerationAndroid.getClosestSupportedSize(
+ parameters.getSupportedPictureSizes(), width, height);
+ parameters.setPictureSize(pictureSize.width, pictureSize.height);
+
+ // Temporarily stop preview if it's already running.
+ if (this.captureFormat != null) {
+ camera.stopPreview();
+ dropNextFrame = true;
+ // Calling |setPreviewCallbackWithBuffer| with null should clear the internal camera buffer
+ // queue, but sometimes we receive a frame with the old resolution after this call anyway.
+ camera.setPreviewCallbackWithBuffer(null);
+ }
+
+ // (Re)start preview.
+ Logging.d(TAG, "Start capturing: " + captureFormat);
+ this.captureFormat = captureFormat;
+
+ List<String> focusModes = parameters.getSupportedFocusModes();
+ if (focusModes.contains(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
+ parameters.setFocusMode(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
+ }
+
+ camera.setParameters(parameters);
+ if (!isCapturingToTexture) {
+ queuedBuffers.clear();
+ final int frameSize = captureFormat.frameSize();
+ for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) {
+ final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
+ queuedBuffers.add(buffer.array());
+ camera.addCallbackBuffer(buffer.array());
+ }
+ camera.setPreviewCallbackWithBuffer(this);
+ }
+ camera.startPreview();
+ }
+
+ // Called by native code. Returns true when camera is known to be stopped.
+ void stopCapture() throws InterruptedException {
+ Logging.d(TAG, "stopCapture");
+ final CountDownLatch barrier = new CountDownLatch(1);
+ cameraThreadHandler.post(new Runnable() {
+ @Override public void run() {
+ stopCaptureOnCameraThread();
+ barrier.countDown();
+ }
+ });
+ barrier.await();
+ Logging.d(TAG, "stopCapture done");
+ }
+
+ private void stopCaptureOnCameraThread() {
+ checkIsOnCameraThread();
+ Logging.d(TAG, "stopCaptureOnCameraThread");
+ if (openCameraOnCodecThreadRunner != null) {
+ cameraThreadHandler.removeCallbacks(openCameraOnCodecThreadRunner);
+ }
+ openCameraAttempts = 0;
+ if (camera == null) {
+ Logging.e(TAG, "Calling stopCapture() for already stopped camera.");
+ return;
+ }
+
+ cameraThreadHandler.removeCallbacks(cameraObserver);
+ cameraStatistics.getAndResetFrameCount();
+ Logging.d(TAG, "Stop preview.");
+ camera.stopPreview();
+ camera.setPreviewCallbackWithBuffer(null);
+ queuedBuffers.clear();
+ captureFormat = null;
+
+ Logging.d(TAG, "Release camera.");
+ camera.release();
+ camera = null;
+ if (eventsHandler != null) {
+ eventsHandler.onCameraClosed();
+ }
+ }
+
+ private void switchCameraOnCameraThread() {
+ checkIsOnCameraThread();
+ Logging.d(TAG, "switchCameraOnCameraThread");
+ stopCaptureOnCameraThread();
+ synchronized (cameraIdLock) {
+ id = (id + 1) % android.hardware.Camera.getNumberOfCameras();
+ }
+ dropNextFrame = true;
+ startCaptureOnCameraThread(requestedWidth, requestedHeight, requestedFramerate, frameObserver,
+ applicationContext);
+ Logging.d(TAG, "switchCameraOnCameraThread done");
+ }
+
+ private void onOutputFormatRequestOnCameraThread(int width, int height, int framerate) {
+ checkIsOnCameraThread();
+ if (camera == null) {
+ Logging.e(TAG, "Calling onOutputFormatRequest() on stopped camera.");
+ return;
+ }
+ Logging.d(TAG, "onOutputFormatRequestOnCameraThread: " + width + "x" + height +
+ "@" + framerate);
+ frameObserver.onOutputFormatRequest(width, height, framerate);
+ }
+
+ // Exposed for testing purposes only.
+ Handler getCameraThreadHandler() {
+ return cameraThreadHandler;
+ }
+
+ private int getDeviceOrientation() {
+ int orientation = 0;
+
+ WindowManager wm = (WindowManager) applicationContext.getSystemService(
+ Context.WINDOW_SERVICE);
+ switch(wm.getDefaultDisplay().getRotation()) {
+ case Surface.ROTATION_90:
+ orientation = 90;
+ break;
+ case Surface.ROTATION_180:
+ orientation = 180;
+ break;
+ case Surface.ROTATION_270:
+ orientation = 270;
+ break;
+ case Surface.ROTATION_0:
+ default:
+ orientation = 0;
+ break;
+ }
+ return orientation;
+ }
+
+ private int getFrameOrientation() {
+ int rotation = getDeviceOrientation();
+ if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) {
+ rotation = 360 - rotation;
+ }
+ return (info.orientation + rotation) % 360;
+ }
+
+ // Called on cameraThread so must not "synchronized".
+ @Override
+ public void onPreviewFrame(byte[] data, android.hardware.Camera callbackCamera) {
+ checkIsOnCameraThread();
+ if (camera == null || !queuedBuffers.contains(data)) {
+ // The camera has been stopped or |data| is an old invalid buffer.
+ return;
+ }
+ if (camera != callbackCamera) {
+ throw new RuntimeException("Unexpected camera in callback!");
+ }
+
+ final long captureTimeNs =
+ TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
+
+ if (eventsHandler != null && !firstFrameReported) {
+ eventsHandler.onFirstFrameAvailable();
+ firstFrameReported = true;
+ }
+
+ cameraStatistics.addFrame();
+ frameObserver.onByteBufferFrameCaptured(data, captureFormat.width, captureFormat.height,
+ getFrameOrientation(), captureTimeNs);
+ camera.addCallbackBuffer(data);
+ }
+
+ @Override
+ public void onTextureFrameAvailable(
+ int oesTextureId, float[] transformMatrix, long timestampNs) {
+ checkIsOnCameraThread();
+ if (camera == null) {
+ // Camera is stopped, we need to return the buffer immediately.
+ surfaceHelper.returnTextureFrame();
+ return;
+ }
+ if (dropNextFrame) {
+ surfaceHelper.returnTextureFrame();
+ dropNextFrame = false;
+ return;
+ }
+
+ int rotation = getFrameOrientation();
+ if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) {
+ // Undo the mirror that the OS "helps" us with.
+ // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
+ transformMatrix =
+ RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.horizontalFlipMatrix());
+ }
+ cameraStatistics.addFrame();
+ frameObserver.onTextureFrameCaptured(captureFormat.width, captureFormat.height, oesTextureId,
+ transformMatrix, rotation, timestampNs);
+ }
+
+ // Interface used for providing callbacks to an observer.
+ interface CapturerObserver {
+ // Notify if the camera have been started successfully or not.
+ // Called on a Java thread owned by VideoCapturerAndroid.
+ abstract void onCapturerStarted(boolean success);
+
+ // Delivers a captured frame. Called on a Java thread owned by
+ // VideoCapturerAndroid.
+ abstract void onByteBufferFrameCaptured(byte[] data, int width, int height, int rotation,
+ long timeStamp);
+
+ // Delivers a captured frame in a texture with id |oesTextureId|. Called on a Java thread
+ // owned by VideoCapturerAndroid.
+ abstract void onTextureFrameCaptured(
+ int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
+ long timestamp);
+
+ // Requests an output format from the video capturer. Captured frames
+ // by the camera will be scaled/or dropped by the video capturer.
+ // Called on a Java thread owned by VideoCapturerAndroid.
+ abstract void onOutputFormatRequest(int width, int height, int framerate);
+ }
+
+ // An implementation of CapturerObserver that forwards all calls from
+ // Java to the C layer.
+ static class NativeObserver implements CapturerObserver {
+ private final long nativeCapturer;
+
+ public NativeObserver(long nativeCapturer) {
+ this.nativeCapturer = nativeCapturer;
+ }
+
+ @Override
+ public void onCapturerStarted(boolean success) {
+ nativeCapturerStarted(nativeCapturer, success);
+ }
+
+ @Override
+ public void onByteBufferFrameCaptured(byte[] data, int width, int height,
+ int rotation, long timeStamp) {
+ nativeOnByteBufferFrameCaptured(nativeCapturer, data, data.length, width, height, rotation,
+ timeStamp);
+ }
+
+ @Override
+ public void onTextureFrameCaptured(
+ int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
+ long timestamp) {
+ nativeOnTextureFrameCaptured(nativeCapturer, width, height, oesTextureId, transformMatrix,
+ rotation, timestamp);
+ }
+
+ @Override
+ public void onOutputFormatRequest(int width, int height, int framerate) {
+ nativeOnOutputFormatRequest(nativeCapturer, width, height, framerate);
+ }
+
+ private native void nativeCapturerStarted(long nativeCapturer,
+ boolean success);
+ private native void nativeOnByteBufferFrameCaptured(long nativeCapturer,
+ byte[] data, int length, int width, int height, int rotation, long timeStamp);
+ private native void nativeOnTextureFrameCaptured(long nativeCapturer, int width, int height,
+ int oesTextureId, float[] transformMatrix, int rotation, long timestamp);
+ private native void nativeOnOutputFormatRequest(long nativeCapturer,
+ int width, int height, int framerate);
+ }
+
+ private static native long nativeCreateVideoCapturer(
+ VideoCapturerAndroid videoCapturer,
+ SurfaceTextureHelper surfaceHelper);
+}
diff --git a/webrtc/api/java/android/org/webrtc/VideoRendererGui.java b/webrtc/api/java/android/org/webrtc/VideoRendererGui.java
new file mode 100644
index 0000000..bb6f01c
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/VideoRendererGui.java
@@ -0,0 +1,666 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import java.util.ArrayList;
+import java.util.concurrent.CountDownLatch;
+
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLContext;
+import javax.microedition.khronos.opengles.GL10;
+
+import android.annotation.SuppressLint;
+import android.graphics.Point;
+import android.graphics.Rect;
+import android.opengl.EGL14;
+import android.opengl.GLES20;
+import android.opengl.GLSurfaceView;
+
+import org.webrtc.Logging;
+import org.webrtc.VideoRenderer.I420Frame;
+
+/**
+ * Efficiently renders YUV frames using the GPU for CSC.
+ * Clients will want first to call setView() to pass GLSurfaceView
+ * and then for each video stream either create instance of VideoRenderer using
+ * createGui() call or VideoRenderer.Callbacks interface using create() call.
+ * Only one instance of the class can be created.
+ */
+public class VideoRendererGui implements GLSurfaceView.Renderer {
+ // |instance|, |instance.surface|, |eglContext|, and |eglContextReady| are synchronized on
+ // |VideoRendererGui.class|.
+ private static VideoRendererGui instance = null;
+ private static Runnable eglContextReady = null;
+ private static final String TAG = "VideoRendererGui";
+ private GLSurfaceView surface;
+ private static EglBase.Context eglContext = null;
+ // Indicates if SurfaceView.Renderer.onSurfaceCreated was called.
+ // If true then for every newly created yuv image renderer createTexture()
+ // should be called. The variable is accessed on multiple threads and
+ // all accesses are synchronized on yuvImageRenderers' object lock.
+ private boolean onSurfaceCreatedCalled;
+ private int screenWidth;
+ private int screenHeight;
+ // List of yuv renderers.
+ private final ArrayList<YuvImageRenderer> yuvImageRenderers;
+ // Render and draw threads.
+ private static Thread renderFrameThread;
+ private static Thread drawThread;
+
+ private VideoRendererGui(GLSurfaceView surface) {
+ this.surface = surface;
+ // Create an OpenGL ES 2.0 context.
+ surface.setPreserveEGLContextOnPause(true);
+ surface.setEGLContextClientVersion(2);
+ surface.setRenderer(this);
+ surface.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
+
+ yuvImageRenderers = new ArrayList<YuvImageRenderer>();
+ }
+
+ /**
+ * Class used to display stream of YUV420 frames at particular location
+ * on a screen. New video frames are sent to display using renderFrame()
+ * call.
+ */
+ private static class YuvImageRenderer implements VideoRenderer.Callbacks {
+ // |surface| is synchronized on |this|.
+ private GLSurfaceView surface;
+ private int id;
+ // TODO(magjed): Delete GL resources in release(). Must be synchronized with draw(). We are
+ // currently leaking resources to avoid a rare crash in release() where the EGLContext has
+ // become invalid beforehand.
+ private int[] yuvTextures = { 0, 0, 0 };
+ private final RendererCommon.YuvUploader yuvUploader = new RendererCommon.YuvUploader();
+ private final RendererCommon.GlDrawer drawer;
+ // Resources for making a deep copy of incoming OES texture frame.
+ private GlTextureFrameBuffer textureCopy;
+
+ // Pending frame to render. Serves as a queue with size 1. |pendingFrame| is accessed by two
+ // threads - frames are received in renderFrame() and consumed in draw(). Frames are dropped in
+ // renderFrame() if the previous frame has not been rendered yet.
+ private I420Frame pendingFrame;
+ private final Object pendingFrameLock = new Object();
+ // Type of video frame used for recent frame rendering.
+ private static enum RendererType { RENDERER_YUV, RENDERER_TEXTURE };
+ private RendererType rendererType;
+ private RendererCommon.ScalingType scalingType;
+ private boolean mirror;
+ private RendererCommon.RendererEvents rendererEvents;
+ // Flag if renderFrame() was ever called.
+ boolean seenFrame;
+ // Total number of video frames received in renderFrame() call.
+ private int framesReceived;
+ // Number of video frames dropped by renderFrame() because previous
+ // frame has not been rendered yet.
+ private int framesDropped;
+ // Number of rendered video frames.
+ private int framesRendered;
+ // Time in ns when the first video frame was rendered.
+ private long startTimeNs = -1;
+ // Time in ns spent in draw() function.
+ private long drawTimeNs;
+ // Time in ns spent in draw() copying resources from |pendingFrame| - including uploading frame
+ // data to rendering planes.
+ private long copyTimeNs;
+ // The allowed view area in percentage of screen size.
+ private final Rect layoutInPercentage;
+ // The actual view area in pixels. It is a centered subrectangle of the rectangle defined by
+ // |layoutInPercentage|.
+ private final Rect displayLayout = new Rect();
+ // Cached layout transformation matrix, calculated from current layout parameters.
+ private float[] layoutMatrix;
+ // Flag if layout transformation matrix update is needed.
+ private boolean updateLayoutProperties;
+ // Layout properties update lock. Guards |updateLayoutProperties|, |screenWidth|,
+ // |screenHeight|, |videoWidth|, |videoHeight|, |rotationDegree|, |scalingType|, and |mirror|.
+ private final Object updateLayoutLock = new Object();
+ // Texture sampling matrix.
+ private float[] rotatedSamplingMatrix;
+ // Viewport dimensions.
+ private int screenWidth;
+ private int screenHeight;
+ // Video dimension.
+ private int videoWidth;
+ private int videoHeight;
+
+ // This is the degree that the frame should be rotated clockwisely to have
+ // it rendered up right.
+ private int rotationDegree;
+
+ private YuvImageRenderer(
+ GLSurfaceView surface, int id,
+ int x, int y, int width, int height,
+ RendererCommon.ScalingType scalingType, boolean mirror, RendererCommon.GlDrawer drawer) {
+ Logging.d(TAG, "YuvImageRenderer.Create id: " + id);
+ this.surface = surface;
+ this.id = id;
+ this.scalingType = scalingType;
+ this.mirror = mirror;
+ this.drawer = drawer;
+ layoutInPercentage = new Rect(x, y, Math.min(100, x + width), Math.min(100, y + height));
+ updateLayoutProperties = false;
+ rotationDegree = 0;
+ }
+
+ public synchronized void reset() {
+ seenFrame = false;
+ }
+
+ private synchronized void release() {
+ surface = null;
+ drawer.release();
+ synchronized (pendingFrameLock) {
+ if (pendingFrame != null) {
+ VideoRenderer.renderFrameDone(pendingFrame);
+ pendingFrame = null;
+ }
+ }
+ }
+
+ private void createTextures() {
+ Logging.d(TAG, " YuvImageRenderer.createTextures " + id + " on GL thread:" +
+ Thread.currentThread().getId());
+
+ // Generate 3 texture ids for Y/U/V and place them into |yuvTextures|.
+ for (int i = 0; i < 3; i++) {
+ yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
+ }
+ // Generate texture and framebuffer for offscreen texture copy.
+ textureCopy = new GlTextureFrameBuffer(GLES20.GL_RGB);
+ }
+
+ private void updateLayoutMatrix() {
+ synchronized(updateLayoutLock) {
+ if (!updateLayoutProperties) {
+ return;
+ }
+ // Initialize to maximum allowed area. Round to integer coordinates inwards the layout
+ // bounding box (ceil left/top and floor right/bottom) to not break constraints.
+ displayLayout.set(
+ (screenWidth * layoutInPercentage.left + 99) / 100,
+ (screenHeight * layoutInPercentage.top + 99) / 100,
+ (screenWidth * layoutInPercentage.right) / 100,
+ (screenHeight * layoutInPercentage.bottom) / 100);
+ Logging.d(TAG, "ID: " + id + ". AdjustTextureCoords. Allowed display size: "
+ + displayLayout.width() + " x " + displayLayout.height() + ". Video: " + videoWidth
+ + " x " + videoHeight + ". Rotation: " + rotationDegree + ". Mirror: " + mirror);
+ final float videoAspectRatio = (rotationDegree % 180 == 0)
+ ? (float) videoWidth / videoHeight
+ : (float) videoHeight / videoWidth;
+ // Adjust display size based on |scalingType|.
+ final Point displaySize = RendererCommon.getDisplaySize(scalingType,
+ videoAspectRatio, displayLayout.width(), displayLayout.height());
+ displayLayout.inset((displayLayout.width() - displaySize.x) / 2,
+ (displayLayout.height() - displaySize.y) / 2);
+ Logging.d(TAG, " Adjusted display size: " + displayLayout.width() + " x "
+ + displayLayout.height());
+ layoutMatrix = RendererCommon.getLayoutMatrix(
+ mirror, videoAspectRatio, (float) displayLayout.width() / displayLayout.height());
+ updateLayoutProperties = false;
+ Logging.d(TAG, " AdjustTextureCoords done");
+ }
+ }
+
+ private void draw() {
+ if (!seenFrame) {
+ // No frame received yet - nothing to render.
+ return;
+ }
+ long now = System.nanoTime();
+
+ final boolean isNewFrame;
+ synchronized (pendingFrameLock) {
+ isNewFrame = (pendingFrame != null);
+ if (isNewFrame && startTimeNs == -1) {
+ startTimeNs = now;
+ }
+
+ if (isNewFrame) {
+ rotatedSamplingMatrix = RendererCommon.rotateTextureMatrix(
+ pendingFrame.samplingMatrix, pendingFrame.rotationDegree);
+ if (pendingFrame.yuvFrame) {
+ rendererType = RendererType.RENDERER_YUV;
+ yuvUploader.uploadYuvData(yuvTextures, pendingFrame.width, pendingFrame.height,
+ pendingFrame.yuvStrides, pendingFrame.yuvPlanes);
+ } else {
+ rendererType = RendererType.RENDERER_TEXTURE;
+ // External texture rendering. Make a deep copy of the external texture.
+ // Reallocate offscreen texture if necessary.
+ textureCopy.setSize(pendingFrame.rotatedWidth(), pendingFrame.rotatedHeight());
+
+ // Bind our offscreen framebuffer.
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, textureCopy.getFrameBufferId());
+ GlUtil.checkNoGLES2Error("glBindFramebuffer");
+
+ // Copy the OES texture content. This will also normalize the sampling matrix.
+ drawer.drawOes(pendingFrame.textureId, rotatedSamplingMatrix,
+ 0, 0, textureCopy.getWidth(), textureCopy.getHeight());
+ rotatedSamplingMatrix = RendererCommon.identityMatrix();
+
+ // Restore normal framebuffer.
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+ GLES20.glFinish();
+ }
+ copyTimeNs += (System.nanoTime() - now);
+ VideoRenderer.renderFrameDone(pendingFrame);
+ pendingFrame = null;
+ }
+ }
+
+ updateLayoutMatrix();
+ final float[] texMatrix =
+ RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
+ // OpenGL defaults to lower left origin - flip viewport position vertically.
+ final int viewportY = screenHeight - displayLayout.bottom;
+ if (rendererType == RendererType.RENDERER_YUV) {
+ drawer.drawYuv(yuvTextures, texMatrix,
+ displayLayout.left, viewportY, displayLayout.width(), displayLayout.height());
+ } else {
+ drawer.drawRgb(textureCopy.getTextureId(), texMatrix,
+ displayLayout.left, viewportY, displayLayout.width(), displayLayout.height());
+ }
+
+ if (isNewFrame) {
+ framesRendered++;
+ drawTimeNs += (System.nanoTime() - now);
+ if ((framesRendered % 300) == 0) {
+ logStatistics();
+ }
+ }
+ }
+
+ private void logStatistics() {
+ long timeSinceFirstFrameNs = System.nanoTime() - startTimeNs;
+ Logging.d(TAG, "ID: " + id + ". Type: " + rendererType +
+ ". Frames received: " + framesReceived +
+ ". Dropped: " + framesDropped + ". Rendered: " + framesRendered);
+ if (framesReceived > 0 && framesRendered > 0) {
+ Logging.d(TAG, "Duration: " + (int)(timeSinceFirstFrameNs / 1e6) +
+ " ms. FPS: " + framesRendered * 1e9 / timeSinceFirstFrameNs);
+ Logging.d(TAG, "Draw time: " +
+ (int) (drawTimeNs / (1000 * framesRendered)) + " us. Copy time: " +
+ (int) (copyTimeNs / (1000 * framesReceived)) + " us");
+ }
+ }
+
+ public void setScreenSize(final int screenWidth, final int screenHeight) {
+ synchronized(updateLayoutLock) {
+ if (screenWidth == this.screenWidth && screenHeight == this.screenHeight) {
+ return;
+ }
+ Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setScreenSize: " +
+ screenWidth + " x " + screenHeight);
+ this.screenWidth = screenWidth;
+ this.screenHeight = screenHeight;
+ updateLayoutProperties = true;
+ }
+ }
+
+ public void setPosition(int x, int y, int width, int height,
+ RendererCommon.ScalingType scalingType, boolean mirror) {
+ final Rect layoutInPercentage =
+ new Rect(x, y, Math.min(100, x + width), Math.min(100, y + height));
+ synchronized(updateLayoutLock) {
+ if (layoutInPercentage.equals(this.layoutInPercentage) && scalingType == this.scalingType
+ && mirror == this.mirror) {
+ return;
+ }
+ Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setPosition: (" + x + ", " + y +
+ ") " + width + " x " + height + ". Scaling: " + scalingType +
+ ". Mirror: " + mirror);
+ this.layoutInPercentage.set(layoutInPercentage);
+ this.scalingType = scalingType;
+ this.mirror = mirror;
+ updateLayoutProperties = true;
+ }
+ }
+
+ private void setSize(final int videoWidth, final int videoHeight, final int rotation) {
+ if (videoWidth == this.videoWidth && videoHeight == this.videoHeight
+ && rotation == rotationDegree) {
+ return;
+ }
+ if (rendererEvents != null) {
+ Logging.d(TAG, "ID: " + id +
+ ". Reporting frame resolution changed to " + videoWidth + " x " + videoHeight);
+ rendererEvents.onFrameResolutionChanged(videoWidth, videoHeight, rotation);
+ }
+
+ synchronized (updateLayoutLock) {
+ Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setSize: " +
+ videoWidth + " x " + videoHeight + " rotation " + rotation);
+
+ this.videoWidth = videoWidth;
+ this.videoHeight = videoHeight;
+ rotationDegree = rotation;
+ updateLayoutProperties = true;
+ Logging.d(TAG, " YuvImageRenderer.setSize done.");
+ }
+ }
+
+ @Override
+ public synchronized void renderFrame(I420Frame frame) {
+ if (surface == null) {
+ // This object has been released.
+ VideoRenderer.renderFrameDone(frame);
+ return;
+ }
+ if (renderFrameThread == null) {
+ renderFrameThread = Thread.currentThread();
+ }
+ if (!seenFrame && rendererEvents != null) {
+ Logging.d(TAG, "ID: " + id + ". Reporting first rendered frame.");
+ rendererEvents.onFirstFrameRendered();
+ }
+ framesReceived++;
+ synchronized (pendingFrameLock) {
+ // Check input frame parameters.
+ if (frame.yuvFrame) {
+ if (frame.yuvStrides[0] < frame.width ||
+ frame.yuvStrides[1] < frame.width / 2 ||
+ frame.yuvStrides[2] < frame.width / 2) {
+ Logging.e(TAG, "Incorrect strides " + frame.yuvStrides[0] + ", " +
+ frame.yuvStrides[1] + ", " + frame.yuvStrides[2]);
+ VideoRenderer.renderFrameDone(frame);
+ return;
+ }
+ }
+
+ if (pendingFrame != null) {
+ // Skip rendering of this frame if previous frame was not rendered yet.
+ framesDropped++;
+ VideoRenderer.renderFrameDone(frame);
+ seenFrame = true;
+ return;
+ }
+ pendingFrame = frame;
+ }
+ setSize(frame.width, frame.height, frame.rotationDegree);
+ seenFrame = true;
+
+ // Request rendering.
+ surface.requestRender();
+ }
+ }
+
+ /** Passes GLSurfaceView to video renderer. */
+ public static synchronized void setView(GLSurfaceView surface,
+ Runnable eglContextReadyCallback) {
+ Logging.d(TAG, "VideoRendererGui.setView");
+ instance = new VideoRendererGui(surface);
+ eglContextReady = eglContextReadyCallback;
+ }
+
+ public static synchronized EglBase.Context getEglBaseContext() {
+ return eglContext;
+ }
+
+ /** Releases GLSurfaceView video renderer. */
+ public static synchronized void dispose() {
+ if (instance == null){
+ return;
+ }
+ Logging.d(TAG, "VideoRendererGui.dispose");
+ synchronized (instance.yuvImageRenderers) {
+ for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
+ yuvImageRenderer.release();
+ }
+ instance.yuvImageRenderers.clear();
+ }
+ renderFrameThread = null;
+ drawThread = null;
+ instance.surface = null;
+ eglContext = null;
+ eglContextReady = null;
+ instance = null;
+ }
+
+ /**
+ * Creates VideoRenderer with top left corner at (x, y) and resolution
+ * (width, height). All parameters are in percentage of screen resolution.
+ */
+ public static VideoRenderer createGui(int x, int y, int width, int height,
+ RendererCommon.ScalingType scalingType, boolean mirror) throws Exception {
+ YuvImageRenderer javaGuiRenderer = create(
+ x, y, width, height, scalingType, mirror);
+ return new VideoRenderer(javaGuiRenderer);
+ }
+
+ public static VideoRenderer.Callbacks createGuiRenderer(
+ int x, int y, int width, int height,
+ RendererCommon.ScalingType scalingType, boolean mirror) {
+ return create(x, y, width, height, scalingType, mirror);
+ }
+
+ /**
+ * Creates VideoRenderer.Callbacks with top left corner at (x, y) and
+ * resolution (width, height). All parameters are in percentage of
+ * screen resolution.
+ */
+ public static synchronized YuvImageRenderer create(int x, int y, int width, int height,
+ RendererCommon.ScalingType scalingType, boolean mirror) {
+ return create(x, y, width, height, scalingType, mirror, new GlRectDrawer());
+ }
+
+ /**
+ * Creates VideoRenderer.Callbacks with top left corner at (x, y) and resolution (width, height).
+ * All parameters are in percentage of screen resolution. The custom |drawer| will be used for
+ * drawing frames on the EGLSurface. This class is responsible for calling release() on |drawer|.
+ */
+ public static synchronized YuvImageRenderer create(int x, int y, int width, int height,
+ RendererCommon.ScalingType scalingType, boolean mirror, RendererCommon.GlDrawer drawer) {
+ // Check display region parameters.
+ if (x < 0 || x > 100 || y < 0 || y > 100 ||
+ width < 0 || width > 100 || height < 0 || height > 100 ||
+ x + width > 100 || y + height > 100) {
+ throw new RuntimeException("Incorrect window parameters.");
+ }
+
+ if (instance == null) {
+ throw new RuntimeException(
+ "Attempt to create yuv renderer before setting GLSurfaceView");
+ }
+ final YuvImageRenderer yuvImageRenderer = new YuvImageRenderer(
+ instance.surface, instance.yuvImageRenderers.size(),
+ x, y, width, height, scalingType, mirror, drawer);
+ synchronized (instance.yuvImageRenderers) {
+ if (instance.onSurfaceCreatedCalled) {
+ // onSurfaceCreated has already been called for VideoRendererGui -
+ // need to create texture for new image and add image to the
+ // rendering list.
+ final CountDownLatch countDownLatch = new CountDownLatch(1);
+ instance.surface.queueEvent(new Runnable() {
+ @Override
+ public void run() {
+ yuvImageRenderer.createTextures();
+ yuvImageRenderer.setScreenSize(
+ instance.screenWidth, instance.screenHeight);
+ countDownLatch.countDown();
+ }
+ });
+ // Wait for task completion.
+ try {
+ countDownLatch.await();
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ // Add yuv renderer to rendering list.
+ instance.yuvImageRenderers.add(yuvImageRenderer);
+ }
+ return yuvImageRenderer;
+ }
+
+ public static synchronized void update(
+ VideoRenderer.Callbacks renderer, int x, int y, int width, int height,
+ RendererCommon.ScalingType scalingType, boolean mirror) {
+ Logging.d(TAG, "VideoRendererGui.update");
+ if (instance == null) {
+ throw new RuntimeException(
+ "Attempt to update yuv renderer before setting GLSurfaceView");
+ }
+ synchronized (instance.yuvImageRenderers) {
+ for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
+ if (yuvImageRenderer == renderer) {
+ yuvImageRenderer.setPosition(x, y, width, height, scalingType, mirror);
+ }
+ }
+ }
+ }
+
+ public static synchronized void setRendererEvents(
+ VideoRenderer.Callbacks renderer, RendererCommon.RendererEvents rendererEvents) {
+ Logging.d(TAG, "VideoRendererGui.setRendererEvents");
+ if (instance == null) {
+ throw new RuntimeException(
+ "Attempt to set renderer events before setting GLSurfaceView");
+ }
+ synchronized (instance.yuvImageRenderers) {
+ for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
+ if (yuvImageRenderer == renderer) {
+ yuvImageRenderer.rendererEvents = rendererEvents;
+ }
+ }
+ }
+ }
+
+ public static synchronized void remove(VideoRenderer.Callbacks renderer) {
+ Logging.d(TAG, "VideoRendererGui.remove");
+ if (instance == null) {
+ throw new RuntimeException(
+ "Attempt to remove renderer before setting GLSurfaceView");
+ }
+ synchronized (instance.yuvImageRenderers) {
+ final int index = instance.yuvImageRenderers.indexOf(renderer);
+ if (index == -1) {
+ Logging.w(TAG, "Couldn't remove renderer (not present in current list)");
+ } else {
+ instance.yuvImageRenderers.remove(index).release();
+ }
+ }
+ }
+
+ public static synchronized void reset(VideoRenderer.Callbacks renderer) {
+ Logging.d(TAG, "VideoRendererGui.reset");
+ if (instance == null) {
+ throw new RuntimeException(
+ "Attempt to reset renderer before setting GLSurfaceView");
+ }
+ synchronized (instance.yuvImageRenderers) {
+ for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
+ if (yuvImageRenderer == renderer) {
+ yuvImageRenderer.reset();
+ }
+ }
+ }
+ }
+
+ private static void printStackTrace(Thread thread, String threadName) {
+ if (thread != null) {
+ StackTraceElement[] stackTraces = thread.getStackTrace();
+ if (stackTraces.length > 0) {
+ Logging.d(TAG, threadName + " stacks trace:");
+ for (StackTraceElement stackTrace : stackTraces) {
+ Logging.d(TAG, stackTrace.toString());
+ }
+ }
+ }
+ }
+
+ public static synchronized void printStackTraces() {
+ if (instance == null) {
+ return;
+ }
+ printStackTrace(renderFrameThread, "Render frame thread");
+ printStackTrace(drawThread, "Draw thread");
+ }
+
+ @SuppressLint("NewApi")
+ @Override
+ public void onSurfaceCreated(GL10 unused, EGLConfig config) {
+ Logging.d(TAG, "VideoRendererGui.onSurfaceCreated");
+ // Store render EGL context.
+ synchronized (VideoRendererGui.class) {
+ if (EglBase14.isEGL14Supported()) {
+ eglContext = new EglBase14.Context(EGL14.eglGetCurrentContext());
+ } else {
+ eglContext = new EglBase10.Context(((EGL10) EGLContext.getEGL()).eglGetCurrentContext());
+ }
+
+ Logging.d(TAG, "VideoRendererGui EGL Context: " + eglContext);
+ }
+
+ synchronized (yuvImageRenderers) {
+ // Create textures for all images.
+ for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
+ yuvImageRenderer.createTextures();
+ }
+ onSurfaceCreatedCalled = true;
+ }
+ GlUtil.checkNoGLES2Error("onSurfaceCreated done");
+ GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
+ GLES20.glClearColor(0.15f, 0.15f, 0.15f, 1.0f);
+
+ // Fire EGL context ready event.
+ synchronized (VideoRendererGui.class) {
+ if (eglContextReady != null) {
+ eglContextReady.run();
+ }
+ }
+ }
+
+ @Override
+ public void onSurfaceChanged(GL10 unused, int width, int height) {
+ Logging.d(TAG, "VideoRendererGui.onSurfaceChanged: " +
+ width + " x " + height + " ");
+ screenWidth = width;
+ screenHeight = height;
+ synchronized (yuvImageRenderers) {
+ for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
+ yuvImageRenderer.setScreenSize(screenWidth, screenHeight);
+ }
+ }
+ }
+
+ @Override
+ public void onDrawFrame(GL10 unused) {
+ if (drawThread == null) {
+ drawThread = Thread.currentThread();
+ }
+ GLES20.glViewport(0, 0, screenWidth, screenHeight);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ synchronized (yuvImageRenderers) {
+ for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
+ yuvImageRenderer.draw();
+ }
+ }
+ }
+
+}
diff --git a/webrtc/api/java/jni/OWNERS b/webrtc/api/java/jni/OWNERS
new file mode 100644
index 0000000..4d31ffb
--- /dev/null
+++ b/webrtc/api/java/jni/OWNERS
@@ -0,0 +1 @@
+magjed@webrtc.org
diff --git a/webrtc/api/java/jni/androidmediacodeccommon.h b/webrtc/api/java/jni/androidmediacodeccommon.h
new file mode 100644
index 0000000..7044fb4
--- /dev/null
+++ b/webrtc/api/java/jni/androidmediacodeccommon.h
@@ -0,0 +1,113 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef WEBRTC_API_JAVA_JNI_ANDROIDMEDIACODECCOMMON_H_
+#define WEBRTC_API_JAVA_JNI_ANDROIDMEDIACODECCOMMON_H_
+
+#include <android/log.h>
+#include <string>
+
+#include "webrtc/base/thread.h"
+#include "webrtc/api/java/jni/classreferenceholder.h"
+#include "webrtc/api/java/jni/jni_helpers.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+
+namespace webrtc_jni {
+
+// Uncomment this define to enable verbose logging for every encoded/decoded
+// video frame.
+//#define TRACK_BUFFER_TIMING
+
+#define TAG_COMMON "MediaCodecVideo"
+
+// Color formats supported by encoder - should mirror supportedColorList
+// from MediaCodecVideoEncoder.java
+enum COLOR_FORMATTYPE {
+ COLOR_FormatYUV420Planar = 0x13,
+ COLOR_FormatYUV420SemiPlanar = 0x15,
+ COLOR_QCOM_FormatYUV420SemiPlanar = 0x7FA30C00,
+ // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
+ // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
+ // This format is presumably similar to COLOR_FormatYUV420SemiPlanar,
+ // but requires some (16, 32?) byte alignment.
+ COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04
+};
+
+// Arbitrary interval to poll the codec for new outputs.
+enum { kMediaCodecPollMs = 10 };
+// Media codec maximum output buffer ready timeout.
+enum { kMediaCodecTimeoutMs = 1000 };
+// Interval to print codec statistics (bitrate, fps, encoding/decoding time).
+enum { kMediaCodecStatisticsIntervalMs = 3000 };
+// Maximum amount of pending frames for VP8 decoder.
+enum { kMaxPendingFramesVp8 = 1 };
+// Maximum amount of pending frames for VP9 decoder.
+enum { kMaxPendingFramesVp9 = 1 };
+// Maximum amount of pending frames for H.264 decoder.
+enum { kMaxPendingFramesH264 = 8 };
+// Maximum amount of decoded frames for which per-frame logging is enabled.
+enum { kMaxDecodedLogFrames = 10 };
+// Maximum amount of encoded frames for which per-frame logging is enabled.
+enum { kMaxEncodedLogFrames = 10 };
+
+static inline int64_t GetCurrentTimeMs() {
+ return webrtc::TickTime::Now().Ticks() / 1000000LL;
+}
+
+static inline void AllowBlockingCalls() {
+ rtc::Thread* current_thread = rtc::Thread::Current();
+ if (current_thread != NULL)
+ current_thread->SetAllowBlockingCalls(true);
+}
+
+// Return the (singleton) Java Enum object corresponding to |index|;
+// |state_class_fragment| is something like "MediaSource$State".
+static inline jobject JavaEnumFromIndexAndClassName(
+ JNIEnv* jni, const std::string& state_class_fragment, int index) {
+ const std::string state_class = "org/webrtc/" + state_class_fragment;
+ return JavaEnumFromIndex(jni, FindClass(jni, state_class.c_str()),
+ state_class, index);
+}
+
+// Checks for any Java exception, prints stack backtrace and clears
+// currently thrown exception.
+static inline bool CheckException(JNIEnv* jni) {
+ if (jni->ExceptionCheck()) {
+ LOG_TAG(rtc::LS_ERROR, TAG_COMMON) << "Java JNI exception.";
+ jni->ExceptionDescribe();
+ jni->ExceptionClear();
+ return true;
+ }
+ return false;
+}
+
+} // namespace webrtc_jni
+
+#endif // WEBRTC_API_JAVA_JNI_ANDROIDMEDIACODECCOMMON_H_
diff --git a/webrtc/api/java/jni/androidmediadecoder_jni.cc b/webrtc/api/java/jni/androidmediadecoder_jni.cc
new file mode 100644
index 0000000..b9973be
--- /dev/null
+++ b/webrtc/api/java/jni/androidmediadecoder_jni.cc
@@ -0,0 +1,945 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include <algorithm>
+#include <vector>
+
+// NOTICE: androidmediadecoder_jni.h must be included before
+// androidmediacodeccommon.h to avoid build errors.
+#include "webrtc/api/java/jni/androidmediadecoder_jni.h"
+
+#include "third_party/libyuv/include/libyuv/convert.h"
+#include "third_party/libyuv/include/libyuv/convert_from.h"
+#include "third_party/libyuv/include/libyuv/video_common.h"
+#include "webrtc/api/java/jni/androidmediacodeccommon.h"
+#include "webrtc/api/java/jni/classreferenceholder.h"
+#include "webrtc/api/java/jni/native_handle_impl.h"
+#include "webrtc/api/java/jni/surfacetexturehelper_jni.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/timeutils.h"
+#include "webrtc/common_video/include/i420_buffer_pool.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
+#include "webrtc/system_wrappers/include/logcat_trace_context.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+
+using rtc::Bind;
+using rtc::Thread;
+using rtc::ThreadManager;
+using rtc::scoped_ptr;
+
+using webrtc::CodecSpecificInfo;
+using webrtc::DecodedImageCallback;
+using webrtc::EncodedImage;
+using webrtc::VideoFrame;
+using webrtc::RTPFragmentationHeader;
+using webrtc::TickTime;
+using webrtc::VideoCodec;
+using webrtc::VideoCodecType;
+using webrtc::kVideoCodecH264;
+using webrtc::kVideoCodecVP8;
+using webrtc::kVideoCodecVP9;
+
+namespace webrtc_jni {
+
+// Logging macros.
+#define TAG_DECODER "MediaCodecVideoDecoder"
+#ifdef TRACK_BUFFER_TIMING
+#define ALOGV(...)
+ __android_log_print(ANDROID_LOG_VERBOSE, TAG_DECODER, __VA_ARGS__)
+#else
+#define ALOGV(...)
+#endif
+#define ALOGD LOG_TAG(rtc::LS_INFO, TAG_DECODER)
+#define ALOGW LOG_TAG(rtc::LS_WARNING, TAG_DECODER)
+#define ALOGE LOG_TAG(rtc::LS_ERROR, TAG_DECODER)
+
+enum { kMaxWarningLogFrames = 2 };
+
+class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
+ public rtc::MessageHandler {
+ public:
+ explicit MediaCodecVideoDecoder(
+ JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context);
+ virtual ~MediaCodecVideoDecoder();
+
+ int32_t InitDecode(const VideoCodec* codecSettings, int32_t numberOfCores)
+ override;
+
+ int32_t Decode(
+ const EncodedImage& inputImage, bool missingFrames,
+ const RTPFragmentationHeader* fragmentation,
+ const CodecSpecificInfo* codecSpecificInfo = NULL,
+ int64_t renderTimeMs = -1) override;
+
+ int32_t RegisterDecodeCompleteCallback(DecodedImageCallback* callback)
+ override;
+
+ int32_t Release() override;
+
+ bool PrefersLateDecoding() const override { return true; }
+
+ // rtc::MessageHandler implementation.
+ void OnMessage(rtc::Message* msg) override;
+
+ const char* ImplementationName() const override;
+
+ private:
+ // CHECK-fail if not running on |codec_thread_|.
+ void CheckOnCodecThread();
+
+ int32_t InitDecodeOnCodecThread();
+ int32_t ReleaseOnCodecThread();
+ int32_t DecodeOnCodecThread(const EncodedImage& inputImage);
+ // Deliver any outputs pending in the MediaCodec to our |callback_| and return
+ // true on success.
+ bool DeliverPendingOutputs(JNIEnv* jni, int dequeue_timeout_us);
+ int32_t ProcessHWErrorOnCodecThread();
+ void EnableFrameLogOnWarning();
+
+ // Type of video codec.
+ VideoCodecType codecType_;
+
+ // Render EGL context - owned by factory, should not be allocated/destroyed
+ // by VideoDecoder.
+ jobject render_egl_context_;
+
+ bool key_frame_required_;
+ bool inited_;
+ bool sw_fallback_required_;
+ bool use_surface_;
+ VideoCodec codec_;
+ webrtc::I420BufferPool decoded_frame_pool_;
+ rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_;
+ DecodedImageCallback* callback_;
+ int frames_received_; // Number of frames received by decoder.
+ int frames_decoded_; // Number of frames decoded by decoder.
+ // Number of decoded frames for which log information is displayed.
+ int frames_decoded_logged_;
+ int64_t start_time_ms_; // Start time for statistics.
+ int current_frames_; // Number of frames in the current statistics interval.
+ int current_bytes_; // Encoded bytes in the current statistics interval.
+ int current_decoding_time_ms_; // Overall decoding time in the current second
+ int current_delay_time_ms_; // Overall delay time in the current second.
+ uint32_t max_pending_frames_; // Maximum number of pending input frames.
+
+ // State that is constant for the lifetime of this object once the ctor
+ // returns.
+ scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec.
+ ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_;
+ ScopedGlobalRef<jobject> j_media_codec_video_decoder_;
+ jmethodID j_init_decode_method_;
+ jmethodID j_release_method_;
+ jmethodID j_dequeue_input_buffer_method_;
+ jmethodID j_queue_input_buffer_method_;
+ jmethodID j_dequeue_byte_buffer_method_;
+ jmethodID j_dequeue_texture_buffer_method_;
+ jmethodID j_return_decoded_byte_buffer_method_;
+ // MediaCodecVideoDecoder fields.
+ jfieldID j_input_buffers_field_;
+ jfieldID j_output_buffers_field_;
+ jfieldID j_color_format_field_;
+ jfieldID j_width_field_;
+ jfieldID j_height_field_;
+ jfieldID j_stride_field_;
+ jfieldID j_slice_height_field_;
+ // MediaCodecVideoDecoder.DecodedTextureBuffer fields.
+ jfieldID j_texture_id_field_;
+ jfieldID j_transform_matrix_field_;
+ jfieldID j_texture_presentation_timestamp_ms_field_;
+ jfieldID j_texture_timestamp_ms_field_;
+ jfieldID j_texture_ntp_timestamp_ms_field_;
+ jfieldID j_texture_decode_time_ms_field_;
+ jfieldID j_texture_frame_delay_ms_field_;
+ // MediaCodecVideoDecoder.DecodedOutputBuffer fields.
+ jfieldID j_info_index_field_;
+ jfieldID j_info_offset_field_;
+ jfieldID j_info_size_field_;
+ jfieldID j_presentation_timestamp_ms_field_;
+ jfieldID j_timestamp_ms_field_;
+ jfieldID j_ntp_timestamp_ms_field_;
+ jfieldID j_byte_buffer_decode_time_ms_field_;
+
+ // Global references; must be deleted in Release().
+ std::vector<jobject> input_buffers_;
+};
+
+MediaCodecVideoDecoder::MediaCodecVideoDecoder(
+ JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context) :
+ codecType_(codecType),
+ render_egl_context_(render_egl_context),
+ key_frame_required_(true),
+ inited_(false),
+ sw_fallback_required_(false),
+ codec_thread_(new Thread()),
+ j_media_codec_video_decoder_class_(
+ jni,
+ FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")),
+ j_media_codec_video_decoder_(
+ jni,
+ jni->NewObject(*j_media_codec_video_decoder_class_,
+ GetMethodID(jni,
+ *j_media_codec_video_decoder_class_,
+ "<init>",
+ "()V"))) {
+ ScopedLocalRefFrame local_ref_frame(jni);
+ codec_thread_->SetName("MediaCodecVideoDecoder", NULL);
+ RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder";
+
+ j_init_decode_method_ = GetMethodID(
+ jni, *j_media_codec_video_decoder_class_, "initDecode",
+ "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;"
+ "IILorg/webrtc/SurfaceTextureHelper;)Z");
+ j_release_method_ =
+ GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V");
+ j_dequeue_input_buffer_method_ = GetMethodID(
+ jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I");
+ j_queue_input_buffer_method_ = GetMethodID(
+ jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJJJ)Z");
+ j_dequeue_byte_buffer_method_ = GetMethodID(
+ jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer",
+ "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer;");
+ j_dequeue_texture_buffer_method_ = GetMethodID(
+ jni, *j_media_codec_video_decoder_class_, "dequeueTextureBuffer",
+ "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer;");
+ j_return_decoded_byte_buffer_method_ =
+ GetMethodID(jni, *j_media_codec_video_decoder_class_,
+ "returnDecodedOutputBuffer", "(I)V");
+
+ j_input_buffers_field_ = GetFieldID(
+ jni, *j_media_codec_video_decoder_class_,
+ "inputBuffers", "[Ljava/nio/ByteBuffer;");
+ j_output_buffers_field_ = GetFieldID(
+ jni, *j_media_codec_video_decoder_class_,
+ "outputBuffers", "[Ljava/nio/ByteBuffer;");
+ j_color_format_field_ = GetFieldID(
+ jni, *j_media_codec_video_decoder_class_, "colorFormat", "I");
+ j_width_field_ = GetFieldID(
+ jni, *j_media_codec_video_decoder_class_, "width", "I");
+ j_height_field_ = GetFieldID(
+ jni, *j_media_codec_video_decoder_class_, "height", "I");
+ j_stride_field_ = GetFieldID(
+ jni, *j_media_codec_video_decoder_class_, "stride", "I");
+ j_slice_height_field_ = GetFieldID(
+ jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I");
+
+ jclass j_decoded_texture_buffer_class = FindClass(jni,
+ "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer");
+ j_texture_id_field_ = GetFieldID(
+ jni, j_decoded_texture_buffer_class, "textureID", "I");
+ j_transform_matrix_field_ = GetFieldID(
+ jni, j_decoded_texture_buffer_class, "transformMatrix", "[F");
+ j_texture_presentation_timestamp_ms_field_ = GetFieldID(
+ jni, j_decoded_texture_buffer_class, "presentationTimeStampMs", "J");
+ j_texture_timestamp_ms_field_ = GetFieldID(
+ jni, j_decoded_texture_buffer_class, "timeStampMs", "J");
+ j_texture_ntp_timestamp_ms_field_ = GetFieldID(
+ jni, j_decoded_texture_buffer_class, "ntpTimeStampMs", "J");
+ j_texture_decode_time_ms_field_ = GetFieldID(
+ jni, j_decoded_texture_buffer_class, "decodeTimeMs", "J");
+ j_texture_frame_delay_ms_field_ = GetFieldID(
+ jni, j_decoded_texture_buffer_class, "frameDelayMs", "J");
+
+ jclass j_decoded_output_buffer_class = FindClass(jni,
+ "org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer");
+ j_info_index_field_ = GetFieldID(
+ jni, j_decoded_output_buffer_class, "index", "I");
+ j_info_offset_field_ = GetFieldID(
+ jni, j_decoded_output_buffer_class, "offset", "I");
+ j_info_size_field_ = GetFieldID(
+ jni, j_decoded_output_buffer_class, "size", "I");
+ j_presentation_timestamp_ms_field_ = GetFieldID(
+ jni, j_decoded_output_buffer_class, "presentationTimeStampMs", "J");
+ j_timestamp_ms_field_ = GetFieldID(
+ jni, j_decoded_output_buffer_class, "timeStampMs", "J");
+ j_ntp_timestamp_ms_field_ = GetFieldID(
+ jni, j_decoded_output_buffer_class, "ntpTimeStampMs", "J");
+ j_byte_buffer_decode_time_ms_field_ = GetFieldID(
+ jni, j_decoded_output_buffer_class, "decodeTimeMs", "J");
+
+ CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed";
+ use_surface_ = (render_egl_context_ != NULL);
+ ALOGD << "MediaCodecVideoDecoder ctor. Use surface: " << use_surface_;
+ memset(&codec_, 0, sizeof(codec_));
+ AllowBlockingCalls();
+}
+
+MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
+ // Call Release() to ensure no more callbacks to us after we are deleted.
+ Release();
+}
+
+int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
+ int32_t numberOfCores) {
+ ALOGD << "InitDecode.";
+ if (inst == NULL) {
+ ALOGE << "NULL VideoCodec instance";
+ return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+ }
+ // Factory should guard against other codecs being used with us.
+ RTC_CHECK(inst->codecType == codecType_)
+ << "Unsupported codec " << inst->codecType << " for " << codecType_;
+
+ if (sw_fallback_required_) {
+ ALOGE << "InitDecode() - fallback to SW decoder";
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+ // Save VideoCodec instance for later.
+ if (&codec_ != inst) {
+ codec_ = *inst;
+ }
+ // If maxFramerate is not set then assume 30 fps.
+ codec_.maxFramerate = (codec_.maxFramerate >= 1) ? codec_.maxFramerate : 30;
+
+ // Call Java init.
+ return codec_thread_->Invoke<int32_t>(
+ Bind(&MediaCodecVideoDecoder::InitDecodeOnCodecThread, this));
+}
+
+int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
+ CheckOnCodecThread();
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+ ALOGD << "InitDecodeOnCodecThread Type: " << (int)codecType_ << ". "
+ << codec_.width << " x " << codec_.height << ". Fps: " <<
+ (int)codec_.maxFramerate;
+
+ // Release previous codec first if it was allocated before.
+ int ret_val = ReleaseOnCodecThread();
+ if (ret_val < 0) {
+ ALOGE << "Release failure: " << ret_val << " - fallback to SW codec";
+ sw_fallback_required_ = true;
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ // Always start with a complete key frame.
+ key_frame_required_ = true;
+ frames_received_ = 0;
+ frames_decoded_ = 0;
+ frames_decoded_logged_ = kMaxDecodedLogFrames;
+
+ jobject java_surface_texture_helper_ = nullptr;
+ if (use_surface_) {
+ java_surface_texture_helper_ = jni->CallStaticObjectMethod(
+ FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
+ GetStaticMethodID(jni,
+ FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
+ "create",
+ "(Lorg/webrtc/EglBase$Context;)"
+ "Lorg/webrtc/SurfaceTextureHelper;"),
+ render_egl_context_);
+ RTC_CHECK(java_surface_texture_helper_ != nullptr);
+ surface_texture_helper_ = new rtc::RefCountedObject<SurfaceTextureHelper>(
+ jni, java_surface_texture_helper_);
+ }
+
+ jobject j_video_codec_enum = JavaEnumFromIndexAndClassName(
+ jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_);
+ bool success = jni->CallBooleanMethod(
+ *j_media_codec_video_decoder_,
+ j_init_decode_method_,
+ j_video_codec_enum,
+ codec_.width,
+ codec_.height,
+ java_surface_texture_helper_);
+ if (CheckException(jni) || !success) {
+ ALOGE << "Codec initialization error - fallback to SW codec.";
+ sw_fallback_required_ = true;
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ inited_ = true;
+
+ switch (codecType_) {
+ case kVideoCodecVP8:
+ max_pending_frames_ = kMaxPendingFramesVp8;
+ break;
+ case kVideoCodecVP9:
+ max_pending_frames_ = kMaxPendingFramesVp9;
+ break;
+ case kVideoCodecH264:
+ max_pending_frames_ = kMaxPendingFramesH264;
+ break;
+ default:
+ max_pending_frames_ = 0;
+ }
+ start_time_ms_ = GetCurrentTimeMs();
+ current_frames_ = 0;
+ current_bytes_ = 0;
+ current_decoding_time_ms_ = 0;
+ current_delay_time_ms_ = 0;
+
+ jobjectArray input_buffers = (jobjectArray)GetObjectField(
+ jni, *j_media_codec_video_decoder_, j_input_buffers_field_);
+ size_t num_input_buffers = jni->GetArrayLength(input_buffers);
+ ALOGD << "Maximum amount of pending frames: " << max_pending_frames_;
+ input_buffers_.resize(num_input_buffers);
+ for (size_t i = 0; i < num_input_buffers; ++i) {
+ input_buffers_[i] =
+ jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
+ if (CheckException(jni)) {
+ ALOGE << "NewGlobalRef error - fallback to SW codec.";
+ sw_fallback_required_ = true;
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ }
+
+ codec_thread_->PostDelayed(kMediaCodecPollMs, this);
+
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoDecoder::Release() {
+ ALOGD << "DecoderRelease request";
+ return codec_thread_->Invoke<int32_t>(
+ Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this));
+}
+
+int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() {
+ if (!inited_) {
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+ CheckOnCodecThread();
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ALOGD << "DecoderReleaseOnCodecThread: Frames received: " <<
+ frames_received_ << ". Frames decoded: " << frames_decoded_;
+ ScopedLocalRefFrame local_ref_frame(jni);
+ for (size_t i = 0; i < input_buffers_.size(); i++) {
+ jni->DeleteGlobalRef(input_buffers_[i]);
+ }
+ input_buffers_.clear();
+ jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_);
+ surface_texture_helper_ = nullptr;
+ inited_ = false;
+ rtc::MessageQueueManager::Clear(this);
+ if (CheckException(jni)) {
+ ALOGE << "Decoder release exception";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ ALOGD << "DecoderReleaseOnCodecThread done";
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+void MediaCodecVideoDecoder::CheckOnCodecThread() {
+ RTC_CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread())
+ << "Running on wrong thread!";
+}
+
+void MediaCodecVideoDecoder::EnableFrameLogOnWarning() {
+ // Log next 2 output frames.
+ frames_decoded_logged_ = std::max(
+ frames_decoded_logged_, frames_decoded_ + kMaxWarningLogFrames);
+}
+
+int32_t MediaCodecVideoDecoder::ProcessHWErrorOnCodecThread() {
+ CheckOnCodecThread();
+ int ret_val = ReleaseOnCodecThread();
+ if (ret_val < 0) {
+ ALOGE << "ProcessHWError: Release failure";
+ }
+ if (codecType_ == kVideoCodecH264) {
+ // For now there is no SW H.264 which can be used as fallback codec.
+ // So try to restart hw codec for now.
+ ret_val = InitDecodeOnCodecThread();
+ ALOGE << "Reset H.264 codec done. Status: " << ret_val;
+ if (ret_val == WEBRTC_VIDEO_CODEC_OK) {
+ // H.264 codec was succesfully reset - return regular error code.
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ } else {
+ // Fail to restart H.264 codec - return error code which should stop the
+ // call.
+ return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ }
+ } else {
+ sw_fallback_required_ = true;
+ ALOGE << "Return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE";
+ return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ }
+}
+
+int32_t MediaCodecVideoDecoder::Decode(
+ const EncodedImage& inputImage,
+ bool missingFrames,
+ const RTPFragmentationHeader* fragmentation,
+ const CodecSpecificInfo* codecSpecificInfo,
+ int64_t renderTimeMs) {
+ if (sw_fallback_required_) {
+ ALOGE << "Decode() - fallback to SW codec";
+ return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ }
+ if (callback_ == NULL) {
+ ALOGE << "Decode() - callback_ is NULL";
+ return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+ }
+ if (inputImage._buffer == NULL && inputImage._length > 0) {
+ ALOGE << "Decode() - inputImage is incorrect";
+ return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+ }
+ if (!inited_) {
+ ALOGE << "Decode() - decoder is not initialized";
+ return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+ }
+
+ // Check if encoded frame dimension has changed.
+ if ((inputImage._encodedWidth * inputImage._encodedHeight > 0) &&
+ (inputImage._encodedWidth != codec_.width ||
+ inputImage._encodedHeight != codec_.height)) {
+ codec_.width = inputImage._encodedWidth;
+ codec_.height = inputImage._encodedHeight;
+ int32_t ret = InitDecode(&codec_, 1);
+ if (ret < 0) {
+ ALOGE << "InitDecode failure: " << ret << " - fallback to SW codec";
+ sw_fallback_required_ = true;
+ return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ }
+ }
+
+ // Always start with a complete key frame.
+ if (key_frame_required_) {
+ if (inputImage._frameType != webrtc::kVideoFrameKey) {
+ ALOGE << "Decode() - key frame is required";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ if (!inputImage._completeFrame) {
+ ALOGE << "Decode() - complete frame is required";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ key_frame_required_ = false;
+ }
+ if (inputImage._length == 0) {
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ return codec_thread_->Invoke<int32_t>(Bind(
+ &MediaCodecVideoDecoder::DecodeOnCodecThread, this, inputImage));
+}
+
+int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
+ const EncodedImage& inputImage) {
+ CheckOnCodecThread();
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+
+ // Try to drain the decoder and wait until output is not too
+ // much behind the input.
+ if (frames_received_ > frames_decoded_ + max_pending_frames_) {
+ ALOGW << "Decoder is too far behind. Try to drain. Received: " <<
+ frames_received_ << ". Decoded: " << frames_decoded_;
+ EnableFrameLogOnWarning();
+ }
+ const int64 drain_start = GetCurrentTimeMs();
+ while ((frames_received_ > frames_decoded_ + max_pending_frames_) &&
+ (GetCurrentTimeMs() - drain_start) < kMediaCodecTimeoutMs) {
+ if (!DeliverPendingOutputs(jni, kMediaCodecPollMs)) {
+ ALOGE << "DeliverPendingOutputs error. Frames received: " <<
+ frames_received_ << ". Frames decoded: " << frames_decoded_;
+ return ProcessHWErrorOnCodecThread();
+ }
+ }
+ if (frames_received_ > frames_decoded_ + max_pending_frames_) {
+ ALOGE << "Output buffer dequeue timeout. Frames received: " <<
+ frames_received_ << ". Frames decoded: " << frames_decoded_;
+ return ProcessHWErrorOnCodecThread();
+ }
+
+ // Get input buffer.
+ int j_input_buffer_index = jni->CallIntMethod(
+ *j_media_codec_video_decoder_, j_dequeue_input_buffer_method_);
+ if (CheckException(jni) || j_input_buffer_index < 0) {
+ ALOGE << "dequeueInputBuffer error: " << j_input_buffer_index <<
+ ". Retry DeliverPendingOutputs.";
+ EnableFrameLogOnWarning();
+ // Try to drain the decoder.
+ if (!DeliverPendingOutputs(jni, kMediaCodecPollMs)) {
+ ALOGE << "DeliverPendingOutputs error. Frames received: " <<
+ frames_received_ << ". Frames decoded: " << frames_decoded_;
+ return ProcessHWErrorOnCodecThread();
+ }
+ // Try dequeue input buffer one last time.
+ j_input_buffer_index = jni->CallIntMethod(
+ *j_media_codec_video_decoder_, j_dequeue_input_buffer_method_);
+ if (CheckException(jni) || j_input_buffer_index < 0) {
+ ALOGE << "dequeueInputBuffer critical error: " << j_input_buffer_index;
+ return ProcessHWErrorOnCodecThread();
+ }
+ }
+
+ // Copy encoded data to Java ByteBuffer.
+ jobject j_input_buffer = input_buffers_[j_input_buffer_index];
+ uint8_t* buffer =
+ reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer));
+ RTC_CHECK(buffer) << "Indirect buffer??";
+ int64_t buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer);
+ if (CheckException(jni) || buffer_capacity < inputImage._length) {
+ ALOGE << "Input frame size "<< inputImage._length <<
+ " is bigger than buffer size " << buffer_capacity;
+ return ProcessHWErrorOnCodecThread();
+ }
+ jlong presentation_timestamp_us = static_cast<jlong>(
+ static_cast<int64_t>(frames_received_) * 1000000 / codec_.maxFramerate);
+ memcpy(buffer, inputImage._buffer, inputImage._length);
+
+ if (frames_decoded_ < frames_decoded_logged_) {
+ ALOGD << "Decoder frame in # " << frames_received_ <<
+ ". Type: " << inputImage._frameType <<
+ ". Buffer # " << j_input_buffer_index <<
+ ". TS: " << presentation_timestamp_us / 1000 <<
+ ". Size: " << inputImage._length;
+ }
+
+ // Save input image timestamps for later output.
+ frames_received_++;
+ current_bytes_ += inputImage._length;
+
+ // Feed input to decoder.
+ bool success = jni->CallBooleanMethod(
+ *j_media_codec_video_decoder_,
+ j_queue_input_buffer_method_,
+ j_input_buffer_index,
+ inputImage._length,
+ presentation_timestamp_us,
+ static_cast<int64_t> (inputImage._timeStamp),
+ inputImage.ntp_time_ms_);
+ if (CheckException(jni) || !success) {
+ ALOGE << "queueInputBuffer error";
+ return ProcessHWErrorOnCodecThread();
+ }
+
+ // Try to drain the decoder
+ if (!DeliverPendingOutputs(jni, 0)) {
+ ALOGE << "DeliverPendingOutputs error";
+ return ProcessHWErrorOnCodecThread();
+ }
+
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+bool MediaCodecVideoDecoder::DeliverPendingOutputs(
+ JNIEnv* jni, int dequeue_timeout_ms) {
+ if (frames_received_ <= frames_decoded_) {
+ // No need to query for output buffers - decoder is drained.
+ return true;
+ }
+ // Get decoder output.
+ jobject j_decoder_output_buffer =
+ jni->CallObjectMethod(*j_media_codec_video_decoder_,
+ use_surface_ ? j_dequeue_texture_buffer_method_
+ : j_dequeue_byte_buffer_method_,
+ dequeue_timeout_ms);
+
+ if (CheckException(jni)) {
+ ALOGE << "dequeueOutputBuffer() error";
+ return false;
+ }
+ if (IsNull(jni, j_decoder_output_buffer)) {
+ // No decoded frame ready.
+ return true;
+ }
+
+ // Get decoded video frame properties.
+ int color_format = GetIntField(jni, *j_media_codec_video_decoder_,
+ j_color_format_field_);
+ int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_);
+ int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_);
+ int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_);
+ int slice_height = GetIntField(jni, *j_media_codec_video_decoder_,
+ j_slice_height_field_);
+
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer;
+ int64_t presentation_timestamps_ms = 0;
+ int64_t output_timestamps_ms = 0;
+ int64_t output_ntp_timestamps_ms = 0;
+ int decode_time_ms = 0;
+ int64_t frame_delayed_ms = 0;
+ if (use_surface_) {
+ // Extract data from Java DecodedTextureBuffer.
+ presentation_timestamps_ms = GetLongField(
+ jni, j_decoder_output_buffer,
+ j_texture_presentation_timestamp_ms_field_);
+ output_timestamps_ms = GetLongField(
+ jni, j_decoder_output_buffer, j_texture_timestamp_ms_field_);
+ output_ntp_timestamps_ms = GetLongField(
+ jni, j_decoder_output_buffer, j_texture_ntp_timestamp_ms_field_);
+ decode_time_ms = GetLongField(
+ jni, j_decoder_output_buffer, j_texture_decode_time_ms_field_);
+
+ const int texture_id =
+ GetIntField(jni, j_decoder_output_buffer, j_texture_id_field_);
+ if (texture_id != 0) { // |texture_id| == 0 represents a dropped frame.
+ const jfloatArray j_transform_matrix =
+ reinterpret_cast<jfloatArray>(GetObjectField(
+ jni, j_decoder_output_buffer, j_transform_matrix_field_));
+ frame_delayed_ms = GetLongField(
+ jni, j_decoder_output_buffer, j_texture_frame_delay_ms_field_);
+
+ // Create webrtc::VideoFrameBuffer with native texture handle.
+ frame_buffer = surface_texture_helper_->CreateTextureFrame(
+ width, height, NativeHandleImpl(jni, texture_id, j_transform_matrix));
+ } else {
+ EnableFrameLogOnWarning();
+ }
+ } else {
+ // Extract data from Java ByteBuffer and create output yuv420 frame -
+ // for non surface decoding only.
+ const int output_buffer_index = GetIntField(
+ jni, j_decoder_output_buffer, j_info_index_field_);
+ const int output_buffer_offset = GetIntField(
+ jni, j_decoder_output_buffer, j_info_offset_field_);
+ const int output_buffer_size = GetIntField(
+ jni, j_decoder_output_buffer, j_info_size_field_);
+ presentation_timestamps_ms = GetLongField(
+ jni, j_decoder_output_buffer, j_presentation_timestamp_ms_field_);
+ output_timestamps_ms = GetLongField(
+ jni, j_decoder_output_buffer, j_timestamp_ms_field_);
+ output_ntp_timestamps_ms = GetLongField(
+ jni, j_decoder_output_buffer, j_ntp_timestamp_ms_field_);
+
+ decode_time_ms = GetLongField(jni, j_decoder_output_buffer,
+ j_byte_buffer_decode_time_ms_field_);
+
+ if (output_buffer_size < width * height * 3 / 2) {
+ ALOGE << "Insufficient output buffer size: " << output_buffer_size;
+ return false;
+ }
+ jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField(
+ jni, *j_media_codec_video_decoder_, j_output_buffers_field_));
+ jobject output_buffer =
+ jni->GetObjectArrayElement(output_buffers, output_buffer_index);
+ uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(
+ output_buffer));
+ if (CheckException(jni)) {
+ return false;
+ }
+ payload += output_buffer_offset;
+
+ // Create yuv420 frame.
+ frame_buffer = decoded_frame_pool_.CreateBuffer(width, height);
+ if (color_format == COLOR_FormatYUV420Planar) {
+ RTC_CHECK_EQ(0, stride % 2);
+ RTC_CHECK_EQ(0, slice_height % 2);
+ const int uv_stride = stride / 2;
+ const int u_slice_height = slice_height / 2;
+ const uint8_t* y_ptr = payload;
+ const uint8_t* u_ptr = y_ptr + stride * slice_height;
+ const uint8_t* v_ptr = u_ptr + uv_stride * u_slice_height;
+ libyuv::I420Copy(y_ptr, stride,
+ u_ptr, uv_stride,
+ v_ptr, uv_stride,
+ frame_buffer->MutableData(webrtc::kYPlane),
+ frame_buffer->stride(webrtc::kYPlane),
+ frame_buffer->MutableData(webrtc::kUPlane),
+ frame_buffer->stride(webrtc::kUPlane),
+ frame_buffer->MutableData(webrtc::kVPlane),
+ frame_buffer->stride(webrtc::kVPlane),
+ width, height);
+ } else {
+ // All other supported formats are nv12.
+ const uint8_t* y_ptr = payload;
+ const uint8_t* uv_ptr = y_ptr + stride * slice_height;
+ libyuv::NV12ToI420(
+ y_ptr, stride,
+ uv_ptr, stride,
+ frame_buffer->MutableData(webrtc::kYPlane),
+ frame_buffer->stride(webrtc::kYPlane),
+ frame_buffer->MutableData(webrtc::kUPlane),
+ frame_buffer->stride(webrtc::kUPlane),
+ frame_buffer->MutableData(webrtc::kVPlane),
+ frame_buffer->stride(webrtc::kVPlane),
+ width, height);
+ }
+ // Return output byte buffer back to codec.
+ jni->CallVoidMethod(
+ *j_media_codec_video_decoder_,
+ j_return_decoded_byte_buffer_method_,
+ output_buffer_index);
+ if (CheckException(jni)) {
+ ALOGE << "returnDecodedOutputBuffer error";
+ return false;
+ }
+ }
+ VideoFrame decoded_frame(frame_buffer, 0, 0, webrtc::kVideoRotation_0);
+ decoded_frame.set_timestamp(output_timestamps_ms);
+ decoded_frame.set_ntp_time_ms(output_ntp_timestamps_ms);
+
+ if (frames_decoded_ < frames_decoded_logged_) {
+ ALOGD << "Decoder frame out # " << frames_decoded_ <<
+ ". " << width << " x " << height <<
+ ". " << stride << " x " << slice_height <<
+ ". Color: " << color_format <<
+ ". TS: " << presentation_timestamps_ms <<
+ ". DecTime: " << (int)decode_time_ms <<
+ ". DelayTime: " << (int)frame_delayed_ms;
+ }
+
+ // Calculate and print decoding statistics - every 3 seconds.
+ frames_decoded_++;
+ current_frames_++;
+ current_decoding_time_ms_ += decode_time_ms;
+ current_delay_time_ms_ += frame_delayed_ms;
+ int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
+ if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
+ current_frames_ > 0) {
+ int current_bitrate = current_bytes_ * 8 / statistic_time_ms;
+ int current_fps =
+ (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms;
+ ALOGD << "Frames decoded: " << frames_decoded_ <<
+ ". Received: " << frames_received_ <<
+ ". Bitrate: " << current_bitrate << " kbps" <<
+ ". Fps: " << current_fps <<
+ ". DecTime: " << (current_decoding_time_ms_ / current_frames_) <<
+ ". DelayTime: " << (current_delay_time_ms_ / current_frames_) <<
+ " for last " << statistic_time_ms << " ms.";
+ start_time_ms_ = GetCurrentTimeMs();
+ current_frames_ = 0;
+ current_bytes_ = 0;
+ current_decoding_time_ms_ = 0;
+ current_delay_time_ms_ = 0;
+ }
+
+ // |.IsZeroSize())| returns true when a frame has been dropped.
+ if (!decoded_frame.IsZeroSize()) {
+ // Callback - output decoded frame.
+ const int32_t callback_status =
+ callback_->Decoded(decoded_frame, decode_time_ms);
+ if (callback_status > 0) {
+ ALOGE << "callback error";
+ }
+ }
+ return true;
+}
+
+int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback(
+ DecodedImageCallback* callback) {
+ callback_ = callback;
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+void MediaCodecVideoDecoder::OnMessage(rtc::Message* msg) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+ if (!inited_) {
+ return;
+ }
+ // We only ever send one message to |this| directly (not through a Bind()'d
+ // functor), so expect no ID/data.
+ RTC_CHECK(!msg->message_id) << "Unexpected message!";
+ RTC_CHECK(!msg->pdata) << "Unexpected message!";
+ CheckOnCodecThread();
+
+ if (!DeliverPendingOutputs(jni, 0)) {
+ ALOGE << "OnMessage: DeliverPendingOutputs error";
+ ProcessHWErrorOnCodecThread();
+ return;
+ }
+ codec_thread_->PostDelayed(kMediaCodecPollMs, this);
+}
+
+MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() {
+ ALOGD << "MediaCodecVideoDecoderFactory ctor";
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+ jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder");
+ supported_codec_types_.clear();
+
+ bool is_vp8_hw_supported = jni->CallStaticBooleanMethod(
+ j_decoder_class,
+ GetStaticMethodID(jni, j_decoder_class, "isVp8HwSupported", "()Z"));
+ if (CheckException(jni)) {
+ is_vp8_hw_supported = false;
+ }
+ if (is_vp8_hw_supported) {
+ ALOGD << "VP8 HW Decoder supported.";
+ supported_codec_types_.push_back(kVideoCodecVP8);
+ }
+
+ bool is_vp9_hw_supported = jni->CallStaticBooleanMethod(
+ j_decoder_class,
+ GetStaticMethodID(jni, j_decoder_class, "isVp9HwSupported", "()Z"));
+ if (CheckException(jni)) {
+ is_vp9_hw_supported = false;
+ }
+ if (is_vp9_hw_supported) {
+ ALOGD << "VP9 HW Decoder supported.";
+ supported_codec_types_.push_back(kVideoCodecVP9);
+ }
+
+ bool is_h264_hw_supported = jni->CallStaticBooleanMethod(
+ j_decoder_class,
+ GetStaticMethodID(jni, j_decoder_class, "isH264HwSupported", "()Z"));
+ if (CheckException(jni)) {
+ is_h264_hw_supported = false;
+ }
+ if (is_h264_hw_supported) {
+ ALOGD << "H264 HW Decoder supported.";
+ supported_codec_types_.push_back(kVideoCodecH264);
+ }
+}
+
+MediaCodecVideoDecoderFactory::~MediaCodecVideoDecoderFactory() {
+ ALOGD << "MediaCodecVideoDecoderFactory dtor";
+}
+
+void MediaCodecVideoDecoderFactory::SetEGLContext(
+ JNIEnv* jni, jobject render_egl_context) {
+ ALOGD << "MediaCodecVideoDecoderFactory::SetEGLContext";
+ if (!egl_.CreateEglBase(jni, render_egl_context)) {
+ ALOGW << "Invalid EGL context - HW surface decoding is disabled.";
+ }
+}
+
+webrtc::VideoDecoder* MediaCodecVideoDecoderFactory::CreateVideoDecoder(
+ VideoCodecType type) {
+ if (supported_codec_types_.empty()) {
+ ALOGW << "No HW video decoder for type " << (int)type;
+ return nullptr;
+ }
+ for (VideoCodecType codec_type : supported_codec_types_) {
+ if (codec_type == type) {
+ ALOGD << "Create HW video decoder for type " << (int)type;
+ return new MediaCodecVideoDecoder(AttachCurrentThreadIfNeeded(), type,
+ egl_.egl_base_context());
+ }
+ }
+ ALOGW << "Can not find HW video decoder for type " << (int)type;
+ return nullptr;
+}
+
+void MediaCodecVideoDecoderFactory::DestroyVideoDecoder(
+ webrtc::VideoDecoder* decoder) {
+ ALOGD << "Destroy video decoder.";
+ delete decoder;
+}
+
+const char* MediaCodecVideoDecoder::ImplementationName() const {
+ return "MediaCodec";
+}
+
+} // namespace webrtc_jni
+
diff --git a/webrtc/api/java/jni/androidmediadecoder_jni.h b/webrtc/api/java/jni/androidmediadecoder_jni.h
new file mode 100644
index 0000000..c79490e
--- /dev/null
+++ b/webrtc/api/java/jni/androidmediadecoder_jni.h
@@ -0,0 +1,59 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef WEBRTC_API_JAVA_JNI_ANDROIDMEDIADECODER_JNI_H_
+#define WEBRTC_API_JAVA_JNI_ANDROIDMEDIADECODER_JNI_H_
+
+#include "webrtc/api/java/jni/eglbase_jni.h"
+#include "webrtc/media/webrtc/webrtcvideodecoderfactory.h"
+
+namespace webrtc_jni {
+
+// Implementation of Android MediaCodec based decoder factory.
+class MediaCodecVideoDecoderFactory
+ : public cricket::WebRtcVideoDecoderFactory {
+ public:
+ MediaCodecVideoDecoderFactory();
+ virtual ~MediaCodecVideoDecoderFactory();
+
+ void SetEGLContext(JNIEnv* jni, jobject render_egl_context);
+
+ // WebRtcVideoDecoderFactory implementation.
+ webrtc::VideoDecoder* CreateVideoDecoder(webrtc::VideoCodecType type)
+ override;
+
+ void DestroyVideoDecoder(webrtc::VideoDecoder* decoder) override;
+
+ private:
+ EglBase egl_;
+ std::vector<webrtc::VideoCodecType> supported_codec_types_;
+};
+
+} // namespace webrtc_jni
+
+#endif // WEBRTC_API_JAVA_JNI_ANDROIDMEDIADECODER_JNI_H_
diff --git a/webrtc/api/java/jni/androidmediaencoder_jni.cc b/webrtc/api/java/jni/androidmediaencoder_jni.cc
new file mode 100644
index 0000000..a06b026
--- /dev/null
+++ b/webrtc/api/java/jni/androidmediaencoder_jni.cc
@@ -0,0 +1,1265 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// NOTICE: androidmediaencoder_jni.h must be included before
+// androidmediacodeccommon.h to avoid build errors.
+#include "webrtc/api/java/jni/androidmediaencoder_jni.h"
+
+#include "third_party/libyuv/include/libyuv/convert.h"
+#include "third_party/libyuv/include/libyuv/convert_from.h"
+#include "third_party/libyuv/include/libyuv/video_common.h"
+#include "webrtc/api/java/jni/androidmediacodeccommon.h"
+#include "webrtc/api/java/jni/classreferenceholder.h"
+#include "webrtc/api/java/jni/native_handle_impl.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/thread_checker.h"
+#include "webrtc/common_types.h"
+#include "webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
+#include "webrtc/modules/video_coding/utility/quality_scaler.h"
+#include "webrtc/modules/video_coding/utility/vp8_header_parser.h"
+#include "webrtc/system_wrappers/include/field_trial.h"
+#include "webrtc/system_wrappers/include/logcat_trace_context.h"
+
+using rtc::Bind;
+using rtc::Thread;
+using rtc::ThreadManager;
+using rtc::scoped_ptr;
+
+using webrtc::CodecSpecificInfo;
+using webrtc::EncodedImage;
+using webrtc::VideoFrame;
+using webrtc::RTPFragmentationHeader;
+using webrtc::VideoCodec;
+using webrtc::VideoCodecType;
+using webrtc::kVideoCodecH264;
+using webrtc::kVideoCodecVP8;
+using webrtc::kVideoCodecVP9;
+
+namespace webrtc_jni {
+
+// H.264 start code length.
+#define H264_SC_LENGTH 4
+// Maximum allowed NALUs in one output frame.
+#define MAX_NALUS_PERFRAME 32
+// Maximum supported HW video encoder resolution.
+#define MAX_VIDEO_WIDTH 1280
+#define MAX_VIDEO_HEIGHT 1280
+// Maximum supported HW video encoder fps.
+#define MAX_VIDEO_FPS 30
+// Maximum allowed fps value in SetRates() call.
+#define MAX_ALLOWED_VIDEO_FPS 60
+// Maximum allowed frames in encoder input queue.
+#define MAX_ENCODER_Q_SIZE 2
+// Maximum allowed latency in ms.
+#define MAX_ENCODER_LATENCY_MS 70
+// Maximum amount of dropped frames caused by full encoder queue - exceeding
+// this threshold means that encoder probably got stuck and need to be reset.
+#define ENCODER_STALL_FRAMEDROP_THRESHOLD 60
+
+// Logging macros.
+#define TAG_ENCODER "MediaCodecVideoEncoder"
+#ifdef TRACK_BUFFER_TIMING
+#define ALOGV(...)
+ __android_log_print(ANDROID_LOG_VERBOSE, TAG_ENCODER, __VA_ARGS__)
+#else
+#define ALOGV(...)
+#endif
+#define ALOGD LOG_TAG(rtc::LS_INFO, TAG_ENCODER)
+#define ALOGW LOG_TAG(rtc::LS_WARNING, TAG_ENCODER)
+#define ALOGE LOG_TAG(rtc::LS_ERROR, TAG_ENCODER)
+
+namespace {
+// Maximum time limit between incoming frames before requesting a key frame.
+const size_t kFrameDiffThresholdMs = 1100;
+const int kMinKeyFrameInterval = 2;
+} // namespace
+
+// MediaCodecVideoEncoder is a webrtc::VideoEncoder implementation that uses
+// Android's MediaCodec SDK API behind the scenes to implement (hopefully)
+// HW-backed video encode. This C++ class is implemented as a very thin shim,
+// delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder.
+// MediaCodecVideoEncoder is created, operated, and destroyed on a single
+// thread, currently the libjingle Worker thread.
+class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
+ public rtc::MessageHandler {
+ public:
+ virtual ~MediaCodecVideoEncoder();
+ MediaCodecVideoEncoder(JNIEnv* jni,
+ VideoCodecType codecType,
+ jobject egl_context);
+
+ // webrtc::VideoEncoder implementation. Everything trampolines to
+ // |codec_thread_| for execution.
+ int32_t InitEncode(const webrtc::VideoCodec* codec_settings,
+ int32_t /* number_of_cores */,
+ size_t /* max_payload_size */) override;
+ int32_t Encode(const webrtc::VideoFrame& input_image,
+ const webrtc::CodecSpecificInfo* /* codec_specific_info */,
+ const std::vector<webrtc::FrameType>* frame_types) override;
+ int32_t RegisterEncodeCompleteCallback(
+ webrtc::EncodedImageCallback* callback) override;
+ int32_t Release() override;
+ int32_t SetChannelParameters(uint32_t /* packet_loss */,
+ int64_t /* rtt */) override;
+ int32_t SetRates(uint32_t new_bit_rate, uint32_t frame_rate) override;
+
+ // rtc::MessageHandler implementation.
+ void OnMessage(rtc::Message* msg) override;
+
+ void OnDroppedFrame() override;
+
+ int GetTargetFramerate() override;
+
+ bool SupportsNativeHandle() const override { return egl_context_ != nullptr; }
+ const char* ImplementationName() const override;
+
+ private:
+ // CHECK-fail if not running on |codec_thread_|.
+ void CheckOnCodecThread();
+
+ private:
+ // ResetCodecOnCodecThread() calls ReleaseOnCodecThread() and
+ // InitEncodeOnCodecThread() in an attempt to restore the codec to an
+ // operable state. Necessary after all manner of OMX-layer errors.
+ bool ResetCodecOnCodecThread();
+
+ // Implementation of webrtc::VideoEncoder methods above, all running on the
+ // codec thread exclusively.
+ //
+ // If width==0 then this is assumed to be a re-initialization and the
+ // previously-current values are reused instead of the passed parameters
+ // (makes it easier to reason about thread-safety).
+ int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps,
+ bool use_surface);
+ // Reconfigure to match |frame| in width, height. Also reconfigures the
+ // encoder if |frame| is a texture/byte buffer and the encoder is initialized
+ // for byte buffer/texture. Returns false if reconfiguring fails.
+ bool MaybeReconfigureEncoderOnCodecThread(const webrtc::VideoFrame& frame);
+ int32_t EncodeOnCodecThread(
+ const webrtc::VideoFrame& input_image,
+ const std::vector<webrtc::FrameType>* frame_types);
+ bool EncodeByteBufferOnCodecThread(JNIEnv* jni,
+ bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index);
+ bool EncodeTextureOnCodecThread(JNIEnv* jni,
+ bool key_frame, const webrtc::VideoFrame& frame);
+
+ int32_t RegisterEncodeCompleteCallbackOnCodecThread(
+ webrtc::EncodedImageCallback* callback);
+ int32_t ReleaseOnCodecThread();
+ int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate);
+
+ // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members.
+ int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info);
+ jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info);
+ bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info);
+ jlong GetOutputBufferInfoPresentationTimestampUs(
+ JNIEnv* jni, jobject j_output_buffer_info);
+
+ // Deliver any outputs pending in the MediaCodec to our |callback_| and return
+ // true on success.
+ bool DeliverPendingOutputs(JNIEnv* jni);
+
+ // Search for H.264 start codes.
+ int32_t NextNaluPosition(uint8_t *buffer, size_t buffer_size);
+
+ // Displays encoder statistics.
+ void LogStatistics(bool force_log);
+
+ // Type of video codec.
+ VideoCodecType codecType_;
+
+ // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to
+ // |codec_thread_| synchronously.
+ webrtc::EncodedImageCallback* callback_;
+
+ // State that is constant for the lifetime of this object once the ctor
+ // returns.
+ scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec.
+ rtc::ThreadChecker codec_thread_checker_;
+ ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_;
+ ScopedGlobalRef<jobject> j_media_codec_video_encoder_;
+ jmethodID j_init_encode_method_;
+ jmethodID j_get_input_buffers_method_;
+ jmethodID j_dequeue_input_buffer_method_;
+ jmethodID j_encode_buffer_method_;
+ jmethodID j_encode_texture_method_;
+ jmethodID j_release_method_;
+ jmethodID j_set_rates_method_;
+ jmethodID j_dequeue_output_buffer_method_;
+ jmethodID j_release_output_buffer_method_;
+ jfieldID j_color_format_field_;
+ jfieldID j_info_index_field_;
+ jfieldID j_info_buffer_field_;
+ jfieldID j_info_is_key_frame_field_;
+ jfieldID j_info_presentation_timestamp_us_field_;
+
+ // State that is valid only between InitEncode() and the next Release().
+ // Touched only on codec_thread_ so no explicit synchronization necessary.
+ int width_; // Frame width in pixels.
+ int height_; // Frame height in pixels.
+ bool inited_;
+ bool use_surface_;
+ uint16_t picture_id_;
+ enum libyuv::FourCC encoder_fourcc_; // Encoder color space format.
+ int last_set_bitrate_kbps_; // Last-requested bitrate in kbps.
+ int last_set_fps_; // Last-requested frame rate.
+ int64_t current_timestamp_us_; // Current frame timestamps in us.
+ int frames_received_; // Number of frames received by encoder.
+ int frames_encoded_; // Number of frames encoded by encoder.
+ int frames_dropped_media_encoder_; // Number of frames dropped by encoder.
+ // Number of dropped frames caused by full queue.
+ int consecutive_full_queue_frame_drops_;
+ int frames_in_queue_; // Number of frames in encoder queue.
+ int64_t stat_start_time_ms_; // Start time for statistics.
+ int current_frames_; // Number of frames in the current statistics interval.
+ int current_bytes_; // Encoded bytes in the current statistics interval.
+ int current_acc_qp_; // Accumulated QP in the current statistics interval.
+ int current_encoding_time_ms_; // Overall encoding time in the current second
+ int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame.
+ int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame.
+ std::vector<int32_t> timestamps_; // Video frames timestamp queue.
+ std::vector<int64_t> render_times_ms_; // Video frames render time queue.
+ std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to
+ // encoder input.
+ int32_t output_timestamp_; // Last output frame timestamp from timestamps_ Q.
+ int64_t output_render_time_ms_; // Last output frame render time from
+ // render_times_ms_ queue.
+ // Frame size in bytes fed to MediaCodec.
+ int yuv_size_;
+ // True only when between a callback_->Encoded() call return a positive value
+ // and the next Encode() call being ignored.
+ bool drop_next_input_frame_;
+ // Global references; must be deleted in Release().
+ std::vector<jobject> input_buffers_;
+ webrtc::QualityScaler quality_scaler_;
+ // Dynamic resolution change, off by default.
+ bool scale_;
+
+ // H264 bitstream parser, used to extract QP from encoded bitstreams.
+ webrtc::H264BitstreamParser h264_bitstream_parser_;
+
+ // VP9 variables to populate codec specific structure.
+ webrtc::GofInfoVP9 gof_; // Contains each frame's temporal information for
+ // non-flexible VP9 mode.
+ uint8_t tl0_pic_idx_;
+ size_t gof_idx_;
+
+ // EGL context - owned by factory, should not be allocated/destroyed
+ // by MediaCodecVideoEncoder.
+ jobject egl_context_;
+
+ // Temporary fix for VP8.
+ // Sends a key frame if frames are largely spaced apart (possibly
+ // corresponding to a large image change).
+ int64_t last_frame_received_ms_;
+ int frames_received_since_last_key_;
+ webrtc::VideoCodecMode codec_mode_;
+};
+
+MediaCodecVideoEncoder::~MediaCodecVideoEncoder() {
+ // Call Release() to ensure no more callbacks to us after we are deleted.
+ Release();
+}
+
+MediaCodecVideoEncoder::MediaCodecVideoEncoder(
+ JNIEnv* jni, VideoCodecType codecType, jobject egl_context) :
+ codecType_(codecType),
+ callback_(NULL),
+ codec_thread_(new Thread()),
+ j_media_codec_video_encoder_class_(
+ jni,
+ FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")),
+ j_media_codec_video_encoder_(
+ jni,
+ jni->NewObject(*j_media_codec_video_encoder_class_,
+ GetMethodID(jni,
+ *j_media_codec_video_encoder_class_,
+ "<init>",
+ "()V"))),
+ inited_(false),
+ use_surface_(false),
+ picture_id_(0),
+ egl_context_(egl_context) {
+ ScopedLocalRefFrame local_ref_frame(jni);
+ // It would be nice to avoid spinning up a new thread per MediaCodec, and
+ // instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug
+ // 2732 means that deadlocks abound. This class synchronously trampolines
+ // to |codec_thread_|, so if anything else can be coming to _us_ from
+ // |codec_thread_|, or from any thread holding the |_sendCritSect| described
+ // in the bug, we have a problem. For now work around that with a dedicated
+ // thread.
+ codec_thread_->SetName("MediaCodecVideoEncoder", NULL);
+ RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder";
+ codec_thread_checker_.DetachFromThread();
+ jclass j_output_buffer_info_class =
+ FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
+ j_init_encode_method_ = GetMethodID(
+ jni,
+ *j_media_codec_video_encoder_class_,
+ "initEncode",
+ "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;"
+ "IIIILorg/webrtc/EglBase14$Context;)Z");
+ j_get_input_buffers_method_ = GetMethodID(
+ jni,
+ *j_media_codec_video_encoder_class_,
+ "getInputBuffers",
+ "()[Ljava/nio/ByteBuffer;");
+ j_dequeue_input_buffer_method_ = GetMethodID(
+ jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I");
+ j_encode_buffer_method_ = GetMethodID(
+ jni, *j_media_codec_video_encoder_class_, "encodeBuffer", "(ZIIJ)Z");
+ j_encode_texture_method_ = GetMethodID(
+ jni, *j_media_codec_video_encoder_class_, "encodeTexture",
+ "(ZI[FJ)Z");
+ j_release_method_ =
+ GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V");
+ j_set_rates_method_ = GetMethodID(
+ jni, *j_media_codec_video_encoder_class_, "setRates", "(II)Z");
+ j_dequeue_output_buffer_method_ = GetMethodID(
+ jni,
+ *j_media_codec_video_encoder_class_,
+ "dequeueOutputBuffer",
+ "()Lorg/webrtc/MediaCodecVideoEncoder$OutputBufferInfo;");
+ j_release_output_buffer_method_ = GetMethodID(
+ jni, *j_media_codec_video_encoder_class_, "releaseOutputBuffer", "(I)Z");
+
+ j_color_format_field_ =
+ GetFieldID(jni, *j_media_codec_video_encoder_class_, "colorFormat", "I");
+ j_info_index_field_ =
+ GetFieldID(jni, j_output_buffer_info_class, "index", "I");
+ j_info_buffer_field_ = GetFieldID(
+ jni, j_output_buffer_info_class, "buffer", "Ljava/nio/ByteBuffer;");
+ j_info_is_key_frame_field_ =
+ GetFieldID(jni, j_output_buffer_info_class, "isKeyFrame", "Z");
+ j_info_presentation_timestamp_us_field_ = GetFieldID(
+ jni, j_output_buffer_info_class, "presentationTimestampUs", "J");
+ CHECK_EXCEPTION(jni) << "MediaCodecVideoEncoder ctor failed";
+ srand(time(NULL));
+ AllowBlockingCalls();
+}
+
+int32_t MediaCodecVideoEncoder::InitEncode(
+ const webrtc::VideoCodec* codec_settings,
+ int32_t /* number_of_cores */,
+ size_t /* max_payload_size */) {
+ const int kMinWidth = 320;
+ const int kMinHeight = 180;
+ const int kLowQpThresholdDenominator = 3;
+ if (codec_settings == NULL) {
+ ALOGE << "NULL VideoCodec instance";
+ return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+ }
+ // Factory should guard against other codecs being used with us.
+ RTC_CHECK(codec_settings->codecType == codecType_)
+ << "Unsupported codec " << codec_settings->codecType << " for "
+ << codecType_;
+
+ ALOGD << "InitEncode request";
+ codec_mode_ = codec_settings->mode;
+ scale_ = (codecType_ != kVideoCodecVP9) && (webrtc::field_trial::FindFullName(
+ "WebRTC-MediaCodecVideoEncoder-AutomaticResize") == "Enabled");
+ ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled");
+ if (scale_) {
+ if (codecType_ == kVideoCodecVP8) {
+ // QP is obtained from VP8-bitstream for HW, so the QP corresponds to the
+ // (internal) range: [0, 127]. And we cannot change QP_max in HW, so it is
+ // always = 127. Note that in SW, QP is that of the user-level range [0,
+ // 63].
+ const int kMaxQp = 127;
+ const int kBadQpThreshold = 95;
+ quality_scaler_.Init(
+ kMaxQp / kLowQpThresholdDenominator, kBadQpThreshold, false);
+ } else if (codecType_ == kVideoCodecH264) {
+ // H264 QP is in the range [0, 51].
+ const int kMaxQp = 51;
+ const int kBadQpThreshold = 40;
+ quality_scaler_.Init(
+ kMaxQp / kLowQpThresholdDenominator, kBadQpThreshold, false);
+ } else {
+ // When adding codec support to additional hardware codecs, also configure
+ // their QP thresholds for scaling.
+ RTC_NOTREACHED() << "Unsupported codec without configured QP thresholds.";
+ }
+ quality_scaler_.SetMinResolution(kMinWidth, kMinHeight);
+ quality_scaler_.ReportFramerate(codec_settings->maxFramerate);
+ }
+ return codec_thread_->Invoke<int32_t>(
+ Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread,
+ this,
+ codec_settings->width,
+ codec_settings->height,
+ codec_settings->startBitrate,
+ codec_settings->maxFramerate,
+ false /* use_surface */));
+}
+
+int32_t MediaCodecVideoEncoder::Encode(
+ const webrtc::VideoFrame& frame,
+ const webrtc::CodecSpecificInfo* /* codec_specific_info */,
+ const std::vector<webrtc::FrameType>* frame_types) {
+ return codec_thread_->Invoke<int32_t>(Bind(
+ &MediaCodecVideoEncoder::EncodeOnCodecThread, this, frame, frame_types));
+}
+
+int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback(
+ webrtc::EncodedImageCallback* callback) {
+ return codec_thread_->Invoke<int32_t>(
+ Bind(&MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread,
+ this,
+ callback));
+}
+
+int32_t MediaCodecVideoEncoder::Release() {
+ ALOGD << "EncoderRelease request";
+ return codec_thread_->Invoke<int32_t>(
+ Bind(&MediaCodecVideoEncoder::ReleaseOnCodecThread, this));
+}
+
+int32_t MediaCodecVideoEncoder::SetChannelParameters(uint32_t /* packet_loss */,
+ int64_t /* rtt */) {
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoEncoder::SetRates(uint32_t new_bit_rate,
+ uint32_t frame_rate) {
+ return codec_thread_->Invoke<int32_t>(
+ Bind(&MediaCodecVideoEncoder::SetRatesOnCodecThread,
+ this,
+ new_bit_rate,
+ frame_rate));
+}
+
+void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+
+ // We only ever send one message to |this| directly (not through a Bind()'d
+ // functor), so expect no ID/data.
+ RTC_CHECK(!msg->message_id) << "Unexpected message!";
+ RTC_CHECK(!msg->pdata) << "Unexpected message!";
+ if (!inited_) {
+ return;
+ }
+
+ // It would be nice to recover from a failure here if one happened, but it's
+ // unclear how to signal such a failure to the app, so instead we stay silent
+ // about it and let the next app-called API method reveal the borkedness.
+ DeliverPendingOutputs(jni);
+ codec_thread_->PostDelayed(kMediaCodecPollMs, this);
+}
+
+bool MediaCodecVideoEncoder::ResetCodecOnCodecThread() {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+ ALOGE << "ResetOnCodecThread";
+ if (ReleaseOnCodecThread() != WEBRTC_VIDEO_CODEC_OK ||
+ InitEncodeOnCodecThread(width_, height_, 0, 0, false) !=
+ WEBRTC_VIDEO_CODEC_OK) {
+ // TODO(fischman): wouldn't it be nice if there was a way to gracefully
+ // degrade to a SW encoder at this point? There isn't one AFAICT :(
+ // https://code.google.com/p/webrtc/issues/detail?id=2920
+ return false;
+ }
+ return true;
+}
+
+int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
+ int width, int height, int kbps, int fps, bool use_surface) {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+ RTC_CHECK(!use_surface || egl_context_ != nullptr) << "EGL context not set.";
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+
+ ALOGD << "InitEncodeOnCodecThread Type: " << (int)codecType_ << ", " <<
+ width << " x " << height << ". Bitrate: " << kbps <<
+ " kbps. Fps: " << fps;
+ if (kbps == 0) {
+ kbps = last_set_bitrate_kbps_;
+ }
+ if (fps == 0) {
+ fps = MAX_VIDEO_FPS;
+ }
+
+ width_ = width;
+ height_ = height;
+ last_set_bitrate_kbps_ = kbps;
+ last_set_fps_ = (fps < MAX_VIDEO_FPS) ? fps : MAX_VIDEO_FPS;
+ yuv_size_ = width_ * height_ * 3 / 2;
+ frames_received_ = 0;
+ frames_encoded_ = 0;
+ frames_dropped_media_encoder_ = 0;
+ consecutive_full_queue_frame_drops_ = 0;
+ frames_in_queue_ = 0;
+ current_timestamp_us_ = 0;
+ stat_start_time_ms_ = GetCurrentTimeMs();
+ current_frames_ = 0;
+ current_bytes_ = 0;
+ current_acc_qp_ = 0;
+ current_encoding_time_ms_ = 0;
+ last_input_timestamp_ms_ = -1;
+ last_output_timestamp_ms_ = -1;
+ output_timestamp_ = 0;
+ output_render_time_ms_ = 0;
+ timestamps_.clear();
+ render_times_ms_.clear();
+ frame_rtc_times_ms_.clear();
+ drop_next_input_frame_ = false;
+ use_surface_ = use_surface;
+ picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF;
+ gof_.SetGofInfoVP9(webrtc::TemporalStructureMode::kTemporalStructureMode1);
+ tl0_pic_idx_ = static_cast<uint8_t>(rand());
+ gof_idx_ = 0;
+ last_frame_received_ms_ = -1;
+ frames_received_since_last_key_ = kMinKeyFrameInterval;
+
+ // We enforce no extra stride/padding in the format creation step.
+ jobject j_video_codec_enum = JavaEnumFromIndexAndClassName(
+ jni, "MediaCodecVideoEncoder$VideoCodecType", codecType_);
+ const bool encode_status = jni->CallBooleanMethod(
+ *j_media_codec_video_encoder_, j_init_encode_method_,
+ j_video_codec_enum, width, height, kbps, fps,
+ (use_surface ? egl_context_ : nullptr));
+ if (!encode_status) {
+ ALOGE << "Failed to configure encoder.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ CHECK_EXCEPTION(jni);
+
+ if (!use_surface) {
+ jobjectArray input_buffers = reinterpret_cast<jobjectArray>(
+ jni->CallObjectMethod(*j_media_codec_video_encoder_,
+ j_get_input_buffers_method_));
+ CHECK_EXCEPTION(jni);
+ if (IsNull(jni, input_buffers)) {
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ switch (GetIntField(jni, *j_media_codec_video_encoder_,
+ j_color_format_field_)) {
+ case COLOR_FormatYUV420Planar:
+ encoder_fourcc_ = libyuv::FOURCC_YU12;
+ break;
+ case COLOR_FormatYUV420SemiPlanar:
+ case COLOR_QCOM_FormatYUV420SemiPlanar:
+ case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
+ encoder_fourcc_ = libyuv::FOURCC_NV12;
+ break;
+ default:
+ LOG(LS_ERROR) << "Wrong color format.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ size_t num_input_buffers = jni->GetArrayLength(input_buffers);
+ RTC_CHECK(input_buffers_.empty())
+ << "Unexpected double InitEncode without Release";
+ input_buffers_.resize(num_input_buffers);
+ for (size_t i = 0; i < num_input_buffers; ++i) {
+ input_buffers_[i] =
+ jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
+ int64_t yuv_buffer_capacity =
+ jni->GetDirectBufferCapacity(input_buffers_[i]);
+ CHECK_EXCEPTION(jni);
+ RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity";
+ }
+ }
+
+ inited_ = true;
+ codec_thread_->PostDelayed(kMediaCodecPollMs, this);
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
+ const webrtc::VideoFrame& frame,
+ const std::vector<webrtc::FrameType>* frame_types) {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+
+ if (!inited_) {
+ return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+ }
+
+ bool send_key_frame = false;
+ if (codec_mode_ == webrtc::kRealtimeVideo) {
+ ++frames_received_since_last_key_;
+ int64_t now_ms = GetCurrentTimeMs();
+ if (last_frame_received_ms_ != -1 &&
+ (now_ms - last_frame_received_ms_) > kFrameDiffThresholdMs) {
+ // Add limit to prevent triggering a key for every frame for very low
+ // framerates (e.g. if frame diff > kFrameDiffThresholdMs).
+ if (frames_received_since_last_key_ > kMinKeyFrameInterval) {
+ ALOGD << "Send key, frame diff: " << (now_ms - last_frame_received_ms_);
+ send_key_frame = true;
+ }
+ frames_received_since_last_key_ = 0;
+ }
+ last_frame_received_ms_ = now_ms;
+ }
+
+ frames_received_++;
+ if (!DeliverPendingOutputs(jni)) {
+ if (!ResetCodecOnCodecThread())
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ if (frames_encoded_ < kMaxEncodedLogFrames) {
+ ALOGD << "Encoder frame in # " << (frames_received_ - 1) <<
+ ". TS: " << (int)(current_timestamp_us_ / 1000) <<
+ ". Q: " << frames_in_queue_ <<
+ ". Fps: " << last_set_fps_ <<
+ ". Kbps: " << last_set_bitrate_kbps_;
+ }
+
+ if (drop_next_input_frame_) {
+ ALOGW << "Encoder drop frame - failed callback.";
+ drop_next_input_frame_ = false;
+ current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
+ frames_dropped_media_encoder_++;
+ OnDroppedFrame();
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count";
+
+ // Check if we accumulated too many frames in encoder input buffers
+ // or the encoder latency exceeds 70 ms and drop frame if so.
+ if (frames_in_queue_ > 0 && last_input_timestamp_ms_ >= 0) {
+ int encoder_latency_ms = last_input_timestamp_ms_ -
+ last_output_timestamp_ms_;
+ if (frames_in_queue_ > MAX_ENCODER_Q_SIZE ||
+ encoder_latency_ms > MAX_ENCODER_LATENCY_MS) {
+ ALOGD << "Drop frame - encoder is behind by " << encoder_latency_ms <<
+ " ms. Q size: " << frames_in_queue_ << ". TS: " <<
+ (int)(current_timestamp_us_ / 1000) << ". Fps: " << last_set_fps_ <<
+ ". Consecutive drops: " << consecutive_full_queue_frame_drops_ ;
+ current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
+ consecutive_full_queue_frame_drops_++;
+ if (consecutive_full_queue_frame_drops_ >=
+ ENCODER_STALL_FRAMEDROP_THRESHOLD) {
+ ALOGE << "Encoder got stuck. Reset.";
+ ResetCodecOnCodecThread();
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ frames_dropped_media_encoder_++;
+ OnDroppedFrame();
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+ }
+ consecutive_full_queue_frame_drops_ = 0;
+
+ VideoFrame input_frame = frame;
+ if (scale_) {
+ // Check framerate before spatial resolution change.
+ quality_scaler_.OnEncodeFrame(frame);
+ const webrtc::QualityScaler::Resolution scaled_resolution =
+ quality_scaler_.GetScaledResolution();
+ if (scaled_resolution.width != frame.width() ||
+ scaled_resolution.height != frame.height()) {
+ if (frame.native_handle() != nullptr) {
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> scaled_buffer(
+ static_cast<AndroidTextureBuffer*>(
+ frame.video_frame_buffer().get())->ScaleAndRotate(
+ scaled_resolution.width,
+ scaled_resolution.height,
+ webrtc::kVideoRotation_0));
+ input_frame.set_video_frame_buffer(scaled_buffer);
+ } else {
+ input_frame = quality_scaler_.GetScaledFrame(frame);
+ }
+ }
+ }
+
+ if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) {
+ ALOGE << "Failed to reconfigure encoder.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ // Save time when input frame is sent to the encoder input.
+ frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
+
+ const bool key_frame =
+ frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame;
+ bool encode_status = true;
+ if (!input_frame.native_handle()) {
+ int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_,
+ j_dequeue_input_buffer_method_);
+ CHECK_EXCEPTION(jni);
+ if (j_input_buffer_index == -1) {
+ // Video codec falls behind - no input buffer available.
+ ALOGW << "Encoder drop frame - no input buffers available";
+ frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
+ current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
+ frames_dropped_media_encoder_++;
+ OnDroppedFrame();
+ return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887.
+ }
+ if (j_input_buffer_index == -2) {
+ ResetCodecOnCodecThread();
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ encode_status = EncodeByteBufferOnCodecThread(jni, key_frame, input_frame,
+ j_input_buffer_index);
+ } else {
+ encode_status = EncodeTextureOnCodecThread(jni, key_frame, input_frame);
+ }
+
+ if (!encode_status) {
+ ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp();
+ ResetCodecOnCodecThread();
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ last_input_timestamp_ms_ =
+ current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec;
+ frames_in_queue_++;
+
+ // Save input image timestamps for later output
+ timestamps_.push_back(input_frame.timestamp());
+ render_times_ms_.push_back(input_frame.render_time_ms());
+ current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
+
+ if (!DeliverPendingOutputs(jni)) {
+ ALOGE << "Failed deliver pending outputs.";
+ ResetCodecOnCodecThread();
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+bool MediaCodecVideoEncoder::MaybeReconfigureEncoderOnCodecThread(
+ const webrtc::VideoFrame& frame) {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+
+ const bool is_texture_frame = frame.native_handle() != nullptr;
+ const bool reconfigure_due_to_format = is_texture_frame != use_surface_;
+ const bool reconfigure_due_to_size =
+ frame.width() != width_ || frame.height() != height_;
+
+ if (reconfigure_due_to_format) {
+ ALOGD << "Reconfigure encoder due to format change. "
+ << (use_surface_ ?
+ "Reconfiguring to encode from byte buffer." :
+ "Reconfiguring to encode from texture.");
+ LogStatistics(true);
+ }
+ if (reconfigure_due_to_size) {
+ ALOGW << "Reconfigure encoder due to frame resolution change from "
+ << width_ << " x " << height_ << " to " << frame.width() << " x "
+ << frame.height();
+ LogStatistics(true);
+ width_ = frame.width();
+ height_ = frame.height();
+ }
+
+ if (!reconfigure_due_to_format && !reconfigure_due_to_size)
+ return true;
+
+ ReleaseOnCodecThread();
+
+ return InitEncodeOnCodecThread(width_, height_, 0, 0 , is_texture_frame) ==
+ WEBRTC_VIDEO_CODEC_OK;
+}
+
+bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni,
+ bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index) {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+ RTC_CHECK(!use_surface_);
+
+ jobject j_input_buffer = input_buffers_[input_buffer_index];
+ uint8_t* yuv_buffer =
+ reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer));
+ CHECK_EXCEPTION(jni);
+ RTC_CHECK(yuv_buffer) << "Indirect buffer??";
+ RTC_CHECK(!libyuv::ConvertFromI420(
+ frame.buffer(webrtc::kYPlane), frame.stride(webrtc::kYPlane),
+ frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane),
+ frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane),
+ yuv_buffer, width_, width_, height_, encoder_fourcc_))
+ << "ConvertFromI420 failed";
+
+ bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
+ j_encode_buffer_method_,
+ key_frame,
+ input_buffer_index,
+ yuv_size_,
+ current_timestamp_us_);
+ CHECK_EXCEPTION(jni);
+ return encode_status;
+}
+
+bool MediaCodecVideoEncoder::EncodeTextureOnCodecThread(JNIEnv* jni,
+ bool key_frame, const webrtc::VideoFrame& frame) {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+ RTC_CHECK(use_surface_);
+ NativeHandleImpl* handle =
+ static_cast<NativeHandleImpl*>(frame.native_handle());
+ jfloatArray sampling_matrix = jni->NewFloatArray(16);
+ jni->SetFloatArrayRegion(sampling_matrix, 0, 16, handle->sampling_matrix);
+
+ bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
+ j_encode_texture_method_,
+ key_frame,
+ handle->oes_texture_id,
+ sampling_matrix,
+ current_timestamp_us_);
+ CHECK_EXCEPTION(jni);
+ return encode_status;
+}
+
+int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread(
+ webrtc::EncodedImageCallback* callback) {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+ callback_ = callback;
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+ if (!inited_) {
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ALOGD << "EncoderReleaseOnCodecThread: Frames received: " <<
+ frames_received_ << ". Encoded: " << frames_encoded_ <<
+ ". Dropped: " << frames_dropped_media_encoder_;
+ ScopedLocalRefFrame local_ref_frame(jni);
+ for (size_t i = 0; i < input_buffers_.size(); ++i)
+ jni->DeleteGlobalRef(input_buffers_[i]);
+ input_buffers_.clear();
+ jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_);
+ CHECK_EXCEPTION(jni);
+ rtc::MessageQueueManager::Clear(this);
+ inited_ = false;
+ use_surface_ = false;
+ ALOGD << "EncoderReleaseOnCodecThread done.";
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate,
+ uint32_t frame_rate) {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+ frame_rate = (frame_rate < MAX_ALLOWED_VIDEO_FPS) ?
+ frame_rate : MAX_ALLOWED_VIDEO_FPS;
+ if (last_set_bitrate_kbps_ == new_bit_rate &&
+ last_set_fps_ == frame_rate) {
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+ if (scale_) {
+ quality_scaler_.ReportFramerate(frame_rate);
+ }
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+ if (new_bit_rate > 0) {
+ last_set_bitrate_kbps_ = new_bit_rate;
+ }
+ if (frame_rate > 0) {
+ last_set_fps_ = frame_rate;
+ }
+ bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
+ j_set_rates_method_,
+ last_set_bitrate_kbps_,
+ last_set_fps_);
+ CHECK_EXCEPTION(jni);
+ if (!ret) {
+ ResetCodecOnCodecThread();
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int MediaCodecVideoEncoder::GetOutputBufferInfoIndex(
+ JNIEnv* jni,
+ jobject j_output_buffer_info) {
+ return GetIntField(jni, j_output_buffer_info, j_info_index_field_);
+}
+
+jobject MediaCodecVideoEncoder::GetOutputBufferInfoBuffer(
+ JNIEnv* jni,
+ jobject j_output_buffer_info) {
+ return GetObjectField(jni, j_output_buffer_info, j_info_buffer_field_);
+}
+
+bool MediaCodecVideoEncoder::GetOutputBufferInfoIsKeyFrame(
+ JNIEnv* jni,
+ jobject j_output_buffer_info) {
+ return GetBooleanField(jni, j_output_buffer_info, j_info_is_key_frame_field_);
+}
+
+jlong MediaCodecVideoEncoder::GetOutputBufferInfoPresentationTimestampUs(
+ JNIEnv* jni,
+ jobject j_output_buffer_info) {
+ return GetLongField(
+ jni, j_output_buffer_info, j_info_presentation_timestamp_us_field_);
+}
+
+bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+ while (true) {
+ jobject j_output_buffer_info = jni->CallObjectMethod(
+ *j_media_codec_video_encoder_, j_dequeue_output_buffer_method_);
+ CHECK_EXCEPTION(jni);
+ if (IsNull(jni, j_output_buffer_info)) {
+ break;
+ }
+
+ int output_buffer_index =
+ GetOutputBufferInfoIndex(jni, j_output_buffer_info);
+ if (output_buffer_index == -1) {
+ ResetCodecOnCodecThread();
+ return false;
+ }
+
+ // Get key and config frame flags.
+ jobject j_output_buffer =
+ GetOutputBufferInfoBuffer(jni, j_output_buffer_info);
+ bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info);
+
+ // Get frame timestamps from a queue - for non config frames only.
+ int64_t frame_encoding_time_ms = 0;
+ last_output_timestamp_ms_ =
+ GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) /
+ 1000;
+ if (frames_in_queue_ > 0) {
+ output_timestamp_ = timestamps_.front();
+ timestamps_.erase(timestamps_.begin());
+ output_render_time_ms_ = render_times_ms_.front();
+ render_times_ms_.erase(render_times_ms_.begin());
+ frame_encoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front();
+ frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
+ frames_in_queue_--;
+ }
+
+ // Extract payload.
+ size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer);
+ uint8_t* payload = reinterpret_cast<uint8_t*>(
+ jni->GetDirectBufferAddress(j_output_buffer));
+ CHECK_EXCEPTION(jni);
+
+ if (frames_encoded_ < kMaxEncodedLogFrames) {
+ int current_latency =
+ (int)(last_input_timestamp_ms_ - last_output_timestamp_ms_);
+ ALOGD << "Encoder frame out # " << frames_encoded_ <<
+ ". Key: " << key_frame <<
+ ". Size: " << payload_size <<
+ ". TS: " << (int)last_output_timestamp_ms_ <<
+ ". Latency: " << current_latency <<
+ ". EncTime: " << frame_encoding_time_ms;
+ }
+
+ // Callback - return encoded frame.
+ int32_t callback_status = 0;
+ if (callback_) {
+ scoped_ptr<webrtc::EncodedImage> image(
+ new webrtc::EncodedImage(payload, payload_size, payload_size));
+ image->_encodedWidth = width_;
+ image->_encodedHeight = height_;
+ image->_timeStamp = output_timestamp_;
+ image->capture_time_ms_ = output_render_time_ms_;
+ image->_frameType =
+ (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta);
+ image->_completeFrame = true;
+ image->adapt_reason_.quality_resolution_downscales =
+ scale_ ? quality_scaler_.downscale_shift() : -1;
+
+ webrtc::CodecSpecificInfo info;
+ memset(&info, 0, sizeof(info));
+ info.codecType = codecType_;
+ if (codecType_ == kVideoCodecVP8) {
+ info.codecSpecific.VP8.pictureId = picture_id_;
+ info.codecSpecific.VP8.nonReference = false;
+ info.codecSpecific.VP8.simulcastIdx = 0;
+ info.codecSpecific.VP8.temporalIdx = webrtc::kNoTemporalIdx;
+ info.codecSpecific.VP8.layerSync = false;
+ info.codecSpecific.VP8.tl0PicIdx = webrtc::kNoTl0PicIdx;
+ info.codecSpecific.VP8.keyIdx = webrtc::kNoKeyIdx;
+ } else if (codecType_ == kVideoCodecVP9) {
+ if (key_frame) {
+ gof_idx_ = 0;
+ }
+ info.codecSpecific.VP9.picture_id = picture_id_;
+ info.codecSpecific.VP9.inter_pic_predicted = key_frame ? false : true;
+ info.codecSpecific.VP9.flexible_mode = false;
+ info.codecSpecific.VP9.ss_data_available = key_frame ? true : false;
+ info.codecSpecific.VP9.tl0_pic_idx = tl0_pic_idx_++;
+ info.codecSpecific.VP9.temporal_idx = webrtc::kNoTemporalIdx;
+ info.codecSpecific.VP9.spatial_idx = webrtc::kNoSpatialIdx;
+ info.codecSpecific.VP9.temporal_up_switch = true;
+ info.codecSpecific.VP9.inter_layer_predicted = false;
+ info.codecSpecific.VP9.gof_idx =
+ static_cast<uint8_t>(gof_idx_++ % gof_.num_frames_in_gof);
+ info.codecSpecific.VP9.num_spatial_layers = 1;
+ info.codecSpecific.VP9.spatial_layer_resolution_present = false;
+ if (info.codecSpecific.VP9.ss_data_available) {
+ info.codecSpecific.VP9.spatial_layer_resolution_present = true;
+ info.codecSpecific.VP9.width[0] = width_;
+ info.codecSpecific.VP9.height[0] = height_;
+ info.codecSpecific.VP9.gof.CopyGofInfoVP9(gof_);
+ }
+ }
+ picture_id_ = (picture_id_ + 1) & 0x7FFF;
+
+ // Generate a header describing a single fragment.
+ webrtc::RTPFragmentationHeader header;
+ memset(&header, 0, sizeof(header));
+ if (codecType_ == kVideoCodecVP8 || codecType_ == kVideoCodecVP9) {
+ header.VerifyAndAllocateFragmentationHeader(1);
+ header.fragmentationOffset[0] = 0;
+ header.fragmentationLength[0] = image->_length;
+ header.fragmentationPlType[0] = 0;
+ header.fragmentationTimeDiff[0] = 0;
+ if (codecType_ == kVideoCodecVP8 && scale_) {
+ int qp;
+ if (webrtc::vp8::GetQp(payload, payload_size, &qp)) {
+ current_acc_qp_ += qp;
+ quality_scaler_.ReportQP(qp);
+ }
+ }
+ } else if (codecType_ == kVideoCodecH264) {
+ if (scale_) {
+ h264_bitstream_parser_.ParseBitstream(payload, payload_size);
+ int qp;
+ if (h264_bitstream_parser_.GetLastSliceQp(&qp)) {
+ current_acc_qp_ += qp;
+ quality_scaler_.ReportQP(qp);
+ }
+ }
+ // For H.264 search for start codes.
+ int32_t scPositions[MAX_NALUS_PERFRAME + 1] = {};
+ int32_t scPositionsLength = 0;
+ int32_t scPosition = 0;
+ while (scPositionsLength < MAX_NALUS_PERFRAME) {
+ int32_t naluPosition = NextNaluPosition(
+ payload + scPosition, payload_size - scPosition);
+ if (naluPosition < 0) {
+ break;
+ }
+ scPosition += naluPosition;
+ scPositions[scPositionsLength++] = scPosition;
+ scPosition += H264_SC_LENGTH;
+ }
+ if (scPositionsLength == 0) {
+ ALOGE << "Start code is not found!";
+ ALOGE << "Data:" << image->_buffer[0] << " " << image->_buffer[1]
+ << " " << image->_buffer[2] << " " << image->_buffer[3]
+ << " " << image->_buffer[4] << " " << image->_buffer[5];
+ ResetCodecOnCodecThread();
+ return false;
+ }
+ scPositions[scPositionsLength] = payload_size;
+ header.VerifyAndAllocateFragmentationHeader(scPositionsLength);
+ for (size_t i = 0; i < scPositionsLength; i++) {
+ header.fragmentationOffset[i] = scPositions[i] + H264_SC_LENGTH;
+ header.fragmentationLength[i] =
+ scPositions[i + 1] - header.fragmentationOffset[i];
+ header.fragmentationPlType[i] = 0;
+ header.fragmentationTimeDiff[i] = 0;
+ }
+ }
+
+ callback_status = callback_->Encoded(*image, &info, &header);
+ }
+
+ // Return output buffer back to the encoder.
+ bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
+ j_release_output_buffer_method_,
+ output_buffer_index);
+ CHECK_EXCEPTION(jni);
+ if (!success) {
+ ResetCodecOnCodecThread();
+ return false;
+ }
+
+ // Calculate and print encoding statistics - every 3 seconds.
+ frames_encoded_++;
+ current_frames_++;
+ current_bytes_ += payload_size;
+ current_encoding_time_ms_ += frame_encoding_time_ms;
+ LogStatistics(false);
+
+ if (callback_status > 0) {
+ drop_next_input_frame_ = true;
+ // Theoretically could handle callback_status<0 here, but unclear what
+ // that would mean for us.
+ }
+ }
+ return true;
+}
+
+void MediaCodecVideoEncoder::LogStatistics(bool force_log) {
+ int statistic_time_ms = GetCurrentTimeMs() - stat_start_time_ms_;
+ if ((statistic_time_ms >= kMediaCodecStatisticsIntervalMs || force_log) &&
+ current_frames_ > 0 && statistic_time_ms > 0) {
+ int current_bitrate = current_bytes_ * 8 / statistic_time_ms;
+ int current_fps =
+ (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms;
+ ALOGD << "Encoded frames: " << frames_encoded_ <<
+ ". Bitrate: " << current_bitrate <<
+ ", target: " << last_set_bitrate_kbps_ << " kbps" <<
+ ", fps: " << current_fps <<
+ ", encTime: " << (current_encoding_time_ms_ / current_frames_) <<
+ ". QP: " << (current_acc_qp_ / current_frames_) <<
+ " for last " << statistic_time_ms << " ms.";
+ stat_start_time_ms_ = GetCurrentTimeMs();
+ current_frames_ = 0;
+ current_bytes_ = 0;
+ current_acc_qp_ = 0;
+ current_encoding_time_ms_ = 0;
+ }
+}
+
+int32_t MediaCodecVideoEncoder::NextNaluPosition(
+ uint8_t *buffer, size_t buffer_size) {
+ if (buffer_size < H264_SC_LENGTH) {
+ return -1;
+ }
+ uint8_t *head = buffer;
+ // Set end buffer pointer to 4 bytes before actual buffer end so we can
+ // access head[1], head[2] and head[3] in a loop without buffer overrun.
+ uint8_t *end = buffer + buffer_size - H264_SC_LENGTH;
+
+ while (head < end) {
+ if (head[0]) {
+ head++;
+ continue;
+ }
+ if (head[1]) { // got 00xx
+ head += 2;
+ continue;
+ }
+ if (head[2]) { // got 0000xx
+ head += 3;
+ continue;
+ }
+ if (head[3] != 0x01) { // got 000000xx
+ head++; // xx != 1, continue searching.
+ continue;
+ }
+ return (int32_t)(head - buffer);
+ }
+ return -1;
+}
+
+void MediaCodecVideoEncoder::OnDroppedFrame() {
+ // Report dropped frame to quality_scaler_.
+ if (scale_)
+ quality_scaler_.ReportDroppedFrame();
+}
+
+int MediaCodecVideoEncoder::GetTargetFramerate() {
+ return scale_ ? quality_scaler_.GetTargetFramerate() : -1;
+}
+
+const char* MediaCodecVideoEncoder::ImplementationName() const {
+ return "MediaCodec";
+}
+
+MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+ jclass j_encoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoEncoder");
+ supported_codecs_.clear();
+
+ bool is_vp8_hw_supported = jni->CallStaticBooleanMethod(
+ j_encoder_class,
+ GetStaticMethodID(jni, j_encoder_class, "isVp8HwSupported", "()Z"));
+ CHECK_EXCEPTION(jni);
+ if (is_vp8_hw_supported) {
+ ALOGD << "VP8 HW Encoder supported.";
+ supported_codecs_.push_back(VideoCodec(kVideoCodecVP8, "VP8",
+ MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS));
+ }
+
+ bool is_vp9_hw_supported = jni->CallStaticBooleanMethod(
+ j_encoder_class,
+ GetStaticMethodID(jni, j_encoder_class, "isVp9HwSupported", "()Z"));
+ CHECK_EXCEPTION(jni);
+ if (is_vp9_hw_supported) {
+ ALOGD << "VP9 HW Encoder supported.";
+ supported_codecs_.push_back(VideoCodec(kVideoCodecVP9, "VP9",
+ MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS));
+ }
+
+ bool is_h264_hw_supported = jni->CallStaticBooleanMethod(
+ j_encoder_class,
+ GetStaticMethodID(jni, j_encoder_class, "isH264HwSupported", "()Z"));
+ CHECK_EXCEPTION(jni);
+ if (is_h264_hw_supported) {
+ ALOGD << "H.264 HW Encoder supported.";
+ supported_codecs_.push_back(VideoCodec(kVideoCodecH264, "H264",
+ MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS));
+ }
+}
+
+MediaCodecVideoEncoderFactory::~MediaCodecVideoEncoderFactory() {
+ ALOGD << "MediaCodecVideoEncoderFactory dtor";
+}
+
+void MediaCodecVideoEncoderFactory::SetEGLContext(
+ JNIEnv* jni, jobject render_egl_context) {
+ ALOGD << "MediaCodecVideoEncoderFactory::SetEGLContext";
+ if (!egl_base_.CreateEglBase(jni, render_egl_context)) {
+ ALOGW << "Invalid EGL context - HW surface encoding is disabled.";
+ }
+}
+
+webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder(
+ VideoCodecType type) {
+ if (supported_codecs_.empty()) {
+ ALOGW << "No HW video encoder for type " << (int)type;
+ return nullptr;
+ }
+ for (std::vector<VideoCodec>::const_iterator it = supported_codecs_.begin();
+ it != supported_codecs_.end(); ++it) {
+ if (it->type == type) {
+ ALOGD << "Create HW video encoder for type " << (int)type <<
+ " (" << it->name << ").";
+ return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded(), type,
+ egl_base_.egl_base_context());
+ }
+ }
+ ALOGW << "Can not find HW video encoder for type " << (int)type;
+ return nullptr;
+}
+
+const std::vector<MediaCodecVideoEncoderFactory::VideoCodec>&
+MediaCodecVideoEncoderFactory::codecs() const {
+ return supported_codecs_;
+}
+
+void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(
+ webrtc::VideoEncoder* encoder) {
+ ALOGD << "Destroy video encoder.";
+ delete encoder;
+}
+
+} // namespace webrtc_jni
+
diff --git a/webrtc/api/java/jni/androidmediaencoder_jni.h b/webrtc/api/java/jni/androidmediaencoder_jni.h
new file mode 100644
index 0000000..e96a489
--- /dev/null
+++ b/webrtc/api/java/jni/androidmediaencoder_jni.h
@@ -0,0 +1,63 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef WEBRTC_API_JAVA_JNI_ANDROIDMEDIAENCODER_JNI_H_
+#define WEBRTC_API_JAVA_JNI_ANDROIDMEDIAENCODER_JNI_H_
+
+#include <vector>
+
+#include "webrtc/api/java/jni/eglbase_jni.h"
+#include "webrtc/media/webrtc/webrtcvideoencoderfactory.h"
+
+namespace webrtc_jni {
+
+// Implementation of Android MediaCodec based encoder factory.
+class MediaCodecVideoEncoderFactory
+ : public cricket::WebRtcVideoEncoderFactory {
+ public:
+ MediaCodecVideoEncoderFactory();
+ virtual ~MediaCodecVideoEncoderFactory();
+
+ void SetEGLContext(JNIEnv* jni, jobject render_egl_context);
+
+ // WebRtcVideoEncoderFactory implementation.
+ webrtc::VideoEncoder* CreateVideoEncoder(webrtc::VideoCodecType type)
+ override;
+ const std::vector<VideoCodec>& codecs() const override;
+ void DestroyVideoEncoder(webrtc::VideoEncoder* encoder) override;
+
+ private:
+ EglBase egl_base_;
+
+ // Empty if platform support is lacking, const after ctor returns.
+ std::vector<VideoCodec> supported_codecs_;
+};
+
+} // namespace webrtc_jni
+
+#endif // WEBRTC_API_JAVA_JNI_ANDROIDMEDIAENCODER_JNI_H_
diff --git a/webrtc/api/java/jni/androidnetworkmonitor_jni.cc b/webrtc/api/java/jni/androidnetworkmonitor_jni.cc
new file mode 100644
index 0000000..a38fa11
--- /dev/null
+++ b/webrtc/api/java/jni/androidnetworkmonitor_jni.cc
@@ -0,0 +1,384 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/java/jni/androidnetworkmonitor_jni.h"
+
+#include <dlfcn.h>
+
+#include "webrtc/api/java/jni/classreferenceholder.h"
+#include "webrtc/api/java/jni/jni_helpers.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/ipaddress.h"
+
+namespace webrtc_jni {
+
+jobject AndroidNetworkMonitor::application_context_ = nullptr;
+
+static NetworkType GetNetworkTypeFromJava(JNIEnv* jni, jobject j_network_type) {
+ std::string enum_name =
+ GetJavaEnumName(jni, "org/webrtc/NetworkMonitorAutoDetect$ConnectionType",
+ j_network_type);
+ if (enum_name == "CONNECTION_UNKNOWN") {
+ return NetworkType::NETWORK_UNKNOWN;
+ }
+ if (enum_name == "CONNECTION_ETHERNET") {
+ return NetworkType::NETWORK_ETHERNET;
+ }
+ if (enum_name == "CONNECTION_WIFI") {
+ return NetworkType::NETWORK_WIFI;
+ }
+ if (enum_name == "CONNECTION_4G") {
+ return NetworkType::NETWORK_4G;
+ }
+ if (enum_name == "CONNECTION_3G") {
+ return NetworkType::NETWORK_3G;
+ }
+ if (enum_name == "CONNECTION_2G") {
+ return NetworkType::NETWORK_2G;
+ }
+ if (enum_name == "CONNECTION_BLUETOOTH") {
+ return NetworkType::NETWORK_BLUETOOTH;
+ }
+ if (enum_name == "CONNECTION_NONE") {
+ return NetworkType::NETWORK_NONE;
+ }
+ ASSERT(false);
+ return NetworkType::NETWORK_UNKNOWN;
+}
+
+static rtc::AdapterType AdapterTypeFromNetworkType(NetworkType network_type) {
+ switch (network_type) {
+ case NETWORK_UNKNOWN:
+ RTC_DCHECK(false) << "Unknown network type";
+ return rtc::ADAPTER_TYPE_UNKNOWN;
+ case NETWORK_ETHERNET:
+ return rtc::ADAPTER_TYPE_ETHERNET;
+ case NETWORK_WIFI:
+ return rtc::ADAPTER_TYPE_WIFI;
+ case NETWORK_4G:
+ case NETWORK_3G:
+ case NETWORK_2G:
+ return rtc::ADAPTER_TYPE_CELLULAR;
+ case NETWORK_BLUETOOTH:
+ // There is no corresponding mapping for bluetooth networks.
+ // Map it to VPN for now.
+ return rtc::ADAPTER_TYPE_VPN;
+ default:
+ RTC_DCHECK(false) << "Invalid network type " << network_type;
+ return rtc::ADAPTER_TYPE_UNKNOWN;
+ }
+}
+
+static rtc::IPAddress GetIPAddressFromJava(JNIEnv* jni, jobject j_ip_address) {
+ jclass j_ip_address_class = GetObjectClass(jni, j_ip_address);
+ jfieldID j_address_id = GetFieldID(jni, j_ip_address_class, "address", "[B");
+ jbyteArray j_addresses =
+ static_cast<jbyteArray>(GetObjectField(jni, j_ip_address, j_address_id));
+ size_t address_length = jni->GetArrayLength(j_addresses);
+ jbyte* addr_array = jni->GetByteArrayElements(j_addresses, nullptr);
+ CHECK_EXCEPTION(jni) << "Error during GetIPAddressFromJava";
+ if (address_length == 4) {
+ // IP4
+ struct in_addr ip4_addr;
+ memcpy(&ip4_addr.s_addr, addr_array, 4);
+ jni->ReleaseByteArrayElements(j_addresses, addr_array, JNI_ABORT);
+ return rtc::IPAddress(ip4_addr);
+ }
+ // IP6
+ RTC_CHECK(address_length == 16);
+ struct in6_addr ip6_addr;
+ memcpy(ip6_addr.s6_addr, addr_array, address_length);
+ jni->ReleaseByteArrayElements(j_addresses, addr_array, JNI_ABORT);
+ return rtc::IPAddress(ip6_addr);
+}
+
+static void GetIPAddressesFromJava(JNIEnv* jni,
+ jobjectArray j_ip_addresses,
+ std::vector<rtc::IPAddress>* ip_addresses) {
+ ip_addresses->clear();
+ size_t num_addresses = jni->GetArrayLength(j_ip_addresses);
+ CHECK_EXCEPTION(jni) << "Error during GetArrayLength";
+ for (size_t i = 0; i < num_addresses; ++i) {
+ jobject j_ip_address = jni->GetObjectArrayElement(j_ip_addresses, i);
+ CHECK_EXCEPTION(jni) << "Error during GetObjectArrayElement";
+ rtc::IPAddress ip = GetIPAddressFromJava(jni, j_ip_address);
+ ip_addresses->push_back(ip);
+ }
+}
+
+static NetworkInformation GetNetworkInformationFromJava(
+ JNIEnv* jni,
+ jobject j_network_info) {
+ jclass j_network_info_class = GetObjectClass(jni, j_network_info);
+ jfieldID j_interface_name_id =
+ GetFieldID(jni, j_network_info_class, "name", "Ljava/lang/String;");
+ jfieldID j_handle_id = GetFieldID(jni, j_network_info_class, "handle", "I");
+ jfieldID j_type_id =
+ GetFieldID(jni, j_network_info_class, "type",
+ "Lorg/webrtc/NetworkMonitorAutoDetect$ConnectionType;");
+ jfieldID j_ip_addresses_id =
+ GetFieldID(jni, j_network_info_class, "ipAddresses",
+ "[Lorg/webrtc/NetworkMonitorAutoDetect$IPAddress;");
+
+ NetworkInformation network_info;
+ network_info.interface_name = JavaToStdString(
+ jni, GetStringField(jni, j_network_info, j_interface_name_id));
+ network_info.handle =
+ static_cast<NetworkHandle>(GetIntField(jni, j_network_info, j_handle_id));
+ network_info.type = GetNetworkTypeFromJava(
+ jni, GetObjectField(jni, j_network_info, j_type_id));
+ jobjectArray j_ip_addresses = static_cast<jobjectArray>(
+ GetObjectField(jni, j_network_info, j_ip_addresses_id));
+ GetIPAddressesFromJava(jni, j_ip_addresses, &network_info.ip_addresses);
+ return network_info;
+}
+
+std::string NetworkInformation::ToString() const {
+ std::stringstream ss;
+ ss << "NetInfo[name " << interface_name << "; handle " << handle << "; type "
+ << type << "; address";
+ for (const rtc::IPAddress address : ip_addresses) {
+ ss << " " << address.ToString();
+ }
+ ss << "]";
+ return ss.str();
+}
+
+// static
+void AndroidNetworkMonitor::SetAndroidContext(JNIEnv* jni, jobject context) {
+ if (application_context_) {
+ jni->DeleteGlobalRef(application_context_);
+ }
+ application_context_ = NewGlobalRef(jni, context);
+}
+
+AndroidNetworkMonitor::AndroidNetworkMonitor()
+ : j_network_monitor_class_(jni(),
+ FindClass(jni(), "org/webrtc/NetworkMonitor")),
+ j_network_monitor_(
+ jni(),
+ jni()->CallStaticObjectMethod(
+ *j_network_monitor_class_,
+ GetStaticMethodID(
+ jni(),
+ *j_network_monitor_class_,
+ "init",
+ "(Landroid/content/Context;)Lorg/webrtc/NetworkMonitor;"),
+ application_context_)) {
+ ASSERT(application_context_ != nullptr);
+ CHECK_EXCEPTION(jni()) << "Error during NetworkMonitor.init";
+}
+
+void AndroidNetworkMonitor::Start() {
+ RTC_CHECK(thread_checker_.CalledOnValidThread());
+ if (started_) {
+ return;
+ }
+ started_ = true;
+
+ // This is kind of magic behavior, but doing this allows the SocketServer to
+ // use this as a NetworkBinder to bind sockets on a particular network when
+ // it creates sockets.
+ worker_thread()->socketserver()->set_network_binder(this);
+
+ jmethodID m =
+ GetMethodID(jni(), *j_network_monitor_class_, "startMonitoring", "(J)V");
+ jni()->CallVoidMethod(*j_network_monitor_, m, jlongFromPointer(this));
+ CHECK_EXCEPTION(jni()) << "Error during CallVoidMethod";
+}
+
+void AndroidNetworkMonitor::Stop() {
+ RTC_CHECK(thread_checker_.CalledOnValidThread());
+ if (!started_) {
+ return;
+ }
+ started_ = false;
+
+ // Once the network monitor stops, it will clear all network information and
+ // it won't find the network handle to bind anyway.
+ if (worker_thread()->socketserver()->network_binder() == this) {
+ worker_thread()->socketserver()->set_network_binder(nullptr);
+ }
+
+ jmethodID m =
+ GetMethodID(jni(), *j_network_monitor_class_, "stopMonitoring", "(J)V");
+ jni()->CallVoidMethod(*j_network_monitor_, m, jlongFromPointer(this));
+ CHECK_EXCEPTION(jni()) << "Error during NetworkMonitor.stopMonitoring";
+
+ network_handle_by_address_.clear();
+ network_info_by_handle_.clear();
+}
+
+int AndroidNetworkMonitor::BindSocketToNetwork(int socket_fd,
+ const rtc::IPAddress& address) {
+ RTC_CHECK(thread_checker_.CalledOnValidThread());
+ // Android prior to Lollipop didn't have support for binding sockets to
+ // networks. However, in that case it should not have reached here because
+ // |network_handle_by_address_| should only be populated in Android Lollipop
+ // and above.
+ // TODO(honghaiz): Add a check for Android version here so that it won't try
+ // to look for handle if the Android version is before Lollipop.
+ auto iter = network_handle_by_address_.find(address);
+ if (iter == network_handle_by_address_.end()) {
+ return rtc::NETWORK_BIND_ADDRESS_NOT_FOUND;
+ }
+ NetworkHandle network_handle = iter->second;
+
+ // NOTE: This does rely on Android implementation details, but
+ // these details are unlikely to change.
+ typedef int (*SetNetworkForSocket)(unsigned netId, int socketFd);
+ static SetNetworkForSocket setNetworkForSocket;
+ // This is not threadsafe, but we are running this only on the worker thread.
+ if (setNetworkForSocket == nullptr) {
+ // Android's netd client library should always be loaded in our address
+ // space as it shims libc functions like connect().
+ const std::string net_library_path = "libnetd_client.so";
+ void* lib = dlopen(net_library_path.c_str(), RTLD_LAZY);
+ if (lib == nullptr) {
+ LOG(LS_ERROR) << "Library " << net_library_path << " not found!";
+ return rtc::NETWORK_BIND_NOT_IMPLEMENTED;
+ }
+ setNetworkForSocket = reinterpret_cast<SetNetworkForSocket>(
+ dlsym(lib, "setNetworkForSocket"));
+ }
+ if (setNetworkForSocket == nullptr) {
+ LOG(LS_ERROR) << "Symbol setNetworkForSocket not found ";
+ return rtc::NETWORK_BIND_NOT_IMPLEMENTED;
+ }
+ int rv = setNetworkForSocket(network_handle, socket_fd);
+ // If |network| has since disconnected, |rv| will be ENONET. Surface this as
+ // ERR_NETWORK_CHANGED, rather than MapSystemError(ENONET) which gives back
+ // the less descriptive ERR_FAILED.
+ if (rv == 0) {
+ return rtc::NETWORK_BIND_SUCCESS;
+ }
+ if (rv == ENONET) {
+ return rtc::NETWORK_BIND_NETWORK_CHANGED;
+ }
+ return rtc::NETWORK_BIND_FAILURE;
+}
+
+void AndroidNetworkMonitor::OnNetworkConnected(
+ const NetworkInformation& network_info) {
+ LOG(LS_INFO) << "Network connected: " << network_info.ToString();
+ worker_thread()->Invoke<void>(rtc::Bind(
+ &AndroidNetworkMonitor::OnNetworkConnected_w, this, network_info));
+}
+
+void AndroidNetworkMonitor::OnNetworkConnected_w(
+ const NetworkInformation& network_info) {
+ adapter_type_by_name_[network_info.interface_name] =
+ AdapterTypeFromNetworkType(network_info.type);
+ network_info_by_handle_[network_info.handle] = network_info;
+ for (const rtc::IPAddress& address : network_info.ip_addresses) {
+ network_handle_by_address_[address] = network_info.handle;
+ }
+}
+
+void AndroidNetworkMonitor::OnNetworkDisconnected(NetworkHandle handle) {
+ LOG(LS_INFO) << "Network disconnected for handle " << handle;
+ worker_thread()->Invoke<void>(
+ rtc::Bind(&AndroidNetworkMonitor::OnNetworkDisconnected_w, this, handle));
+}
+
+void AndroidNetworkMonitor::OnNetworkDisconnected_w(NetworkHandle handle) {
+ auto iter = network_info_by_handle_.find(handle);
+ if (iter != network_info_by_handle_.end()) {
+ for (const rtc::IPAddress& address : iter->second.ip_addresses) {
+ network_handle_by_address_.erase(address);
+ }
+ network_info_by_handle_.erase(iter);
+ }
+}
+
+void AndroidNetworkMonitor::SetNetworkInfos(
+ const std::vector<NetworkInformation>& network_infos) {
+ RTC_CHECK(thread_checker_.CalledOnValidThread());
+ network_handle_by_address_.clear();
+ network_info_by_handle_.clear();
+ for (NetworkInformation network : network_infos) {
+ OnNetworkConnected_w(network);
+ }
+}
+
+rtc::AdapterType AndroidNetworkMonitor::GetAdapterType(
+ const std::string& if_name) {
+ auto iter = adapter_type_by_name_.find(if_name);
+ if (iter == adapter_type_by_name_.end()) {
+ return rtc::ADAPTER_TYPE_UNKNOWN;
+ }
+ return iter->second;
+}
+
+rtc::NetworkMonitorInterface*
+AndroidNetworkMonitorFactory::CreateNetworkMonitor() {
+ return new AndroidNetworkMonitor();
+}
+
+JOW(void, NetworkMonitor_nativeNotifyConnectionTypeChanged)(
+ JNIEnv* jni, jobject j_monitor, jlong j_native_monitor) {
+ rtc::NetworkMonitorInterface* network_monitor =
+ reinterpret_cast<rtc::NetworkMonitorInterface*>(j_native_monitor);
+ network_monitor->OnNetworksChanged();
+}
+
+JOW(void, NetworkMonitor_nativeNotifyOfActiveNetworkList)(
+ JNIEnv* jni, jobject j_monitor, jlong j_native_monitor,
+ jobjectArray j_network_infos) {
+ AndroidNetworkMonitor* network_monitor =
+ reinterpret_cast<AndroidNetworkMonitor*>(j_native_monitor);
+ std::vector<NetworkInformation> network_infos;
+ size_t num_networks = jni->GetArrayLength(j_network_infos);
+ for (size_t i = 0; i < num_networks; ++i) {
+ jobject j_network_info = jni->GetObjectArrayElement(j_network_infos, i);
+ CHECK_EXCEPTION(jni) << "Error during GetObjectArrayElement";
+ network_infos.push_back(GetNetworkInformationFromJava(jni, j_network_info));
+ }
+ network_monitor->SetNetworkInfos(network_infos);
+}
+
+JOW(void, NetworkMonitor_nativeNotifyOfNetworkConnect)(
+ JNIEnv* jni, jobject j_monitor, jlong j_native_monitor,
+ jobject j_network_info) {
+ AndroidNetworkMonitor* network_monitor =
+ reinterpret_cast<AndroidNetworkMonitor*>(j_native_monitor);
+ NetworkInformation network_info =
+ GetNetworkInformationFromJava(jni, j_network_info);
+ network_monitor->OnNetworkConnected(network_info);
+}
+
+JOW(void, NetworkMonitor_nativeNotifyOfNetworkDisconnect)(
+ JNIEnv* jni, jobject j_monitor, jlong j_native_monitor,
+ jint network_handle) {
+ AndroidNetworkMonitor* network_monitor =
+ reinterpret_cast<AndroidNetworkMonitor*>(j_native_monitor);
+ network_monitor->OnNetworkDisconnected(
+ static_cast<NetworkHandle>(network_handle));
+}
+
+} // namespace webrtc_jni
diff --git a/webrtc/api/java/jni/androidnetworkmonitor_jni.h b/webrtc/api/java/jni/androidnetworkmonitor_jni.h
new file mode 100644
index 0000000..220a5bc
--- /dev/null
+++ b/webrtc/api/java/jni/androidnetworkmonitor_jni.h
@@ -0,0 +1,108 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_JAVA_JNI_ANDROIDNETWORKMONITOR_JNI_H_
+#define WEBRTC_API_JAVA_JNI_ANDROIDNETWORKMONITOR_JNI_H_
+
+#include "webrtc/base/networkmonitor.h"
+
+#include <map>
+
+#include "webrtc/api/java/jni/jni_helpers.h"
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/thread_checker.h"
+
+namespace webrtc_jni {
+
+typedef uint32_t NetworkHandle;
+
+// c++ equivalent of java NetworkMonitorAutoDetect.ConnectionType.
+enum NetworkType {
+ NETWORK_UNKNOWN,
+ NETWORK_ETHERNET,
+ NETWORK_WIFI,
+ NETWORK_4G,
+ NETWORK_3G,
+ NETWORK_2G,
+ NETWORK_BLUETOOTH,
+ NETWORK_NONE
+};
+
+// The information is collected from Android OS so that the native code can get
+// the network type and handle (Android network ID) for each interface.
+struct NetworkInformation {
+ std::string interface_name;
+ NetworkHandle handle;
+ NetworkType type;
+ std::vector<rtc::IPAddress> ip_addresses;
+
+ std::string ToString() const;
+};
+
+class AndroidNetworkMonitor : public rtc::NetworkMonitorBase,
+ public rtc::NetworkBinderInterface {
+ public:
+ AndroidNetworkMonitor();
+
+ static void SetAndroidContext(JNIEnv* jni, jobject context);
+
+ void Start() override;
+ void Stop() override;
+
+ int BindSocketToNetwork(int socket_fd,
+ const rtc::IPAddress& address) override;
+ rtc::AdapterType GetAdapterType(const std::string& if_name) override;
+ void OnNetworkConnected(const NetworkInformation& network_info);
+ void OnNetworkDisconnected(NetworkHandle network_handle);
+ void SetNetworkInfos(const std::vector<NetworkInformation>& network_infos);
+
+ private:
+ JNIEnv* jni() { return AttachCurrentThreadIfNeeded(); }
+
+ void OnNetworkConnected_w(const NetworkInformation& network_info);
+ void OnNetworkDisconnected_w(NetworkHandle network_handle);
+
+ ScopedGlobalRef<jclass> j_network_monitor_class_;
+ ScopedGlobalRef<jobject> j_network_monitor_;
+ rtc::ThreadChecker thread_checker_;
+ static jobject application_context_;
+ bool started_ = false;
+ std::map<std::string, rtc::AdapterType> adapter_type_by_name_;
+ std::map<rtc::IPAddress, NetworkHandle> network_handle_by_address_;
+ std::map<NetworkHandle, NetworkInformation> network_info_by_handle_;
+};
+
+class AndroidNetworkMonitorFactory : public rtc::NetworkMonitorFactory {
+ public:
+ AndroidNetworkMonitorFactory() {}
+
+ rtc::NetworkMonitorInterface* CreateNetworkMonitor() override;
+};
+
+} // namespace webrtc_jni
+
+#endif // WEBRTC_API_JAVA_JNI_ANDROIDNETWORKMONITOR_JNI_H_
diff --git a/webrtc/api/java/jni/androidvideocapturer_jni.cc b/webrtc/api/java/jni/androidvideocapturer_jni.cc
new file mode 100644
index 0000000..a636d62
--- /dev/null
+++ b/webrtc/api/java/jni/androidvideocapturer_jni.cc
@@ -0,0 +1,246 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include "webrtc/api/java/jni/androidvideocapturer_jni.h"
+#include "webrtc/api/java/jni/classreferenceholder.h"
+#include "webrtc/api/java/jni/native_handle_impl.h"
+#include "webrtc/api/java/jni/surfacetexturehelper_jni.h"
+#include "third_party/libyuv/include/libyuv/convert.h"
+#include "webrtc/base/bind.h"
+
+namespace webrtc_jni {
+
+jobject AndroidVideoCapturerJni::application_context_ = nullptr;
+
+// static
+int AndroidVideoCapturerJni::SetAndroidObjects(JNIEnv* jni,
+ jobject appliction_context) {
+ if (application_context_) {
+ jni->DeleteGlobalRef(application_context_);
+ }
+ application_context_ = NewGlobalRef(jni, appliction_context);
+
+ return 0;
+}
+
+AndroidVideoCapturerJni::AndroidVideoCapturerJni(
+ JNIEnv* jni,
+ jobject j_video_capturer,
+ jobject j_surface_texture_helper)
+ : j_video_capturer_(jni, j_video_capturer),
+ j_video_capturer_class_(
+ jni, FindClass(jni, "org/webrtc/VideoCapturerAndroid")),
+ j_observer_class_(
+ jni,
+ FindClass(jni,
+ "org/webrtc/VideoCapturerAndroid$NativeObserver")),
+ surface_texture_helper_(new rtc::RefCountedObject<SurfaceTextureHelper>(
+ jni, j_surface_texture_helper)),
+ capturer_(nullptr) {
+ LOG(LS_INFO) << "AndroidVideoCapturerJni ctor";
+ thread_checker_.DetachFromThread();
+}
+
+AndroidVideoCapturerJni::~AndroidVideoCapturerJni() {
+ LOG(LS_INFO) << "AndroidVideoCapturerJni dtor";
+ jni()->CallVoidMethod(
+ *j_video_capturer_,
+ GetMethodID(jni(), *j_video_capturer_class_, "release", "()V"));
+ CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.release()";
+}
+
+void AndroidVideoCapturerJni::Start(int width, int height, int framerate,
+ webrtc::AndroidVideoCapturer* capturer) {
+ LOG(LS_INFO) << "AndroidVideoCapturerJni start";
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ {
+ rtc::CritScope cs(&capturer_lock_);
+ RTC_CHECK(capturer_ == nullptr);
+ RTC_CHECK(invoker_.get() == nullptr);
+ capturer_ = capturer;
+ invoker_.reset(new rtc::GuardedAsyncInvoker());
+ }
+ jobject j_frame_observer =
+ jni()->NewObject(*j_observer_class_,
+ GetMethodID(jni(), *j_observer_class_, "<init>", "(J)V"),
+ jlongFromPointer(this));
+ CHECK_EXCEPTION(jni()) << "error during NewObject";
+
+ jmethodID m = GetMethodID(
+ jni(), *j_video_capturer_class_, "startCapture",
+ "(IIILandroid/content/Context;"
+ "Lorg/webrtc/VideoCapturerAndroid$CapturerObserver;)V");
+ jni()->CallVoidMethod(*j_video_capturer_,
+ m, width, height,
+ framerate,
+ application_context_,
+ j_frame_observer);
+ CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.startCapture";
+}
+
+void AndroidVideoCapturerJni::Stop() {
+ LOG(LS_INFO) << "AndroidVideoCapturerJni stop";
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ {
+ rtc::CritScope cs(&capturer_lock_);
+ // Destroying |invoker_| will cancel all pending calls to |capturer_|.
+ invoker_ = nullptr;
+ capturer_ = nullptr;
+ }
+ jmethodID m = GetMethodID(jni(), *j_video_capturer_class_,
+ "stopCapture", "()V");
+ jni()->CallVoidMethod(*j_video_capturer_, m);
+ CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.stopCapture";
+ LOG(LS_INFO) << "AndroidVideoCapturerJni stop done";
+}
+
+template <typename... Args>
+void AndroidVideoCapturerJni::AsyncCapturerInvoke(
+ const char* method_name,
+ void (webrtc::AndroidVideoCapturer::*method)(Args...),
+ typename Identity<Args>::type... args) {
+ rtc::CritScope cs(&capturer_lock_);
+ if (!invoker_) {
+ LOG(LS_WARNING) << method_name << "() called for closed capturer.";
+ return;
+ }
+ invoker_->AsyncInvoke<void>(rtc::Bind(method, capturer_, args...));
+}
+
+std::string AndroidVideoCapturerJni::GetSupportedFormats() {
+ jmethodID m =
+ GetMethodID(jni(), *j_video_capturer_class_,
+ "getSupportedFormatsAsJson", "()Ljava/lang/String;");
+ jstring j_json_caps =
+ (jstring) jni()->CallObjectMethod(*j_video_capturer_, m);
+ CHECK_EXCEPTION(jni()) << "error during supportedFormatsAsJson";
+ return JavaToStdString(jni(), j_json_caps);
+}
+
+void AndroidVideoCapturerJni::OnCapturerStarted(bool success) {
+ LOG(LS_INFO) << "AndroidVideoCapturerJni capture started: " << success;
+ AsyncCapturerInvoke("OnCapturerStarted",
+ &webrtc::AndroidVideoCapturer::OnCapturerStarted,
+ success);
+}
+
+void AndroidVideoCapturerJni::OnMemoryBufferFrame(void* video_frame,
+ int length,
+ int width,
+ int height,
+ int rotation,
+ int64_t timestamp_ns) {
+ const uint8_t* y_plane = static_cast<uint8_t*>(video_frame);
+ const uint8_t* vu_plane = y_plane + width * height;
+
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
+ buffer_pool_.CreateBuffer(width, height);
+ libyuv::NV21ToI420(
+ y_plane, width,
+ vu_plane, width,
+ buffer->MutableData(webrtc::kYPlane), buffer->stride(webrtc::kYPlane),
+ buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane),
+ buffer->MutableData(webrtc::kVPlane), buffer->stride(webrtc::kVPlane),
+ width, height);
+ AsyncCapturerInvoke("OnIncomingFrame",
+ &webrtc::AndroidVideoCapturer::OnIncomingFrame,
+ buffer, rotation, timestamp_ns);
+}
+
+void AndroidVideoCapturerJni::OnTextureFrame(int width,
+ int height,
+ int rotation,
+ int64_t timestamp_ns,
+ const NativeHandleImpl& handle) {
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
+ surface_texture_helper_->CreateTextureFrame(width, height, handle));
+
+ AsyncCapturerInvoke("OnIncomingFrame",
+ &webrtc::AndroidVideoCapturer::OnIncomingFrame,
+ buffer, rotation, timestamp_ns);
+}
+
+void AndroidVideoCapturerJni::OnOutputFormatRequest(int width,
+ int height,
+ int fps) {
+ AsyncCapturerInvoke("OnOutputFormatRequest",
+ &webrtc::AndroidVideoCapturer::OnOutputFormatRequest,
+ width, height, fps);
+}
+
+JNIEnv* AndroidVideoCapturerJni::jni() { return AttachCurrentThreadIfNeeded(); }
+
+JOW(void,
+ VideoCapturerAndroid_00024NativeObserver_nativeOnByteBufferFrameCaptured)
+ (JNIEnv* jni, jclass, jlong j_capturer, jbyteArray j_frame, jint length,
+ jint width, jint height, jint rotation, jlong timestamp) {
+ jboolean is_copy = true;
+ jbyte* bytes = jni->GetByteArrayElements(j_frame, &is_copy);
+ reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)
+ ->OnMemoryBufferFrame(bytes, length, width, height, rotation, timestamp);
+ jni->ReleaseByteArrayElements(j_frame, bytes, JNI_ABORT);
+}
+
+JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeOnTextureFrameCaptured)
+ (JNIEnv* jni, jclass, jlong j_capturer, jint j_width, jint j_height,
+ jint j_oes_texture_id, jfloatArray j_transform_matrix,
+ jint j_rotation, jlong j_timestamp) {
+ reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)
+ ->OnTextureFrame(j_width, j_height, j_rotation, j_timestamp,
+ NativeHandleImpl(jni, j_oes_texture_id,
+ j_transform_matrix));
+}
+
+JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeCapturerStarted)
+ (JNIEnv* jni, jclass, jlong j_capturer, jboolean j_success) {
+ LOG(LS_INFO) << "NativeObserver_nativeCapturerStarted";
+ reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)->OnCapturerStarted(
+ j_success);
+}
+
+JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeOnOutputFormatRequest)
+ (JNIEnv* jni, jclass, jlong j_capturer, jint j_width, jint j_height,
+ jint j_fps) {
+ LOG(LS_INFO) << "NativeObserver_nativeOnOutputFormatRequest";
+ reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)->OnOutputFormatRequest(
+ j_width, j_height, j_fps);
+}
+
+JOW(jlong, VideoCapturerAndroid_nativeCreateVideoCapturer)
+ (JNIEnv* jni, jclass,
+ jobject j_video_capturer, jobject j_surface_texture_helper) {
+ rtc::scoped_refptr<webrtc::AndroidVideoCapturerDelegate> delegate =
+ new rtc::RefCountedObject<AndroidVideoCapturerJni>(
+ jni, j_video_capturer, j_surface_texture_helper);
+ rtc::scoped_ptr<cricket::VideoCapturer> capturer(
+ new webrtc::AndroidVideoCapturer(delegate));
+ // Caller takes ownership of the cricket::VideoCapturer* pointer.
+ return jlongFromPointer(capturer.release());
+}
+
+} // namespace webrtc_jni
diff --git a/webrtc/api/java/jni/androidvideocapturer_jni.h b/webrtc/api/java/jni/androidvideocapturer_jni.h
new file mode 100644
index 0000000..bf611f5
--- /dev/null
+++ b/webrtc/api/java/jni/androidvideocapturer_jni.h
@@ -0,0 +1,116 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef WEBRTC_API_JAVA_JNI_ANDROIDVIDEOCAPTURER_JNI_H_
+#define WEBRTC_API_JAVA_JNI_ANDROIDVIDEOCAPTURER_JNI_H_
+
+#include <string>
+
+#include "webrtc/api/androidvideocapturer.h"
+#include "webrtc/api/java/jni/jni_helpers.h"
+#include "webrtc/base/asyncinvoker.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/thread_checker.h"
+#include "webrtc/common_video/include/i420_buffer_pool.h"
+
+namespace webrtc_jni {
+
+struct NativeHandleImpl;
+class SurfaceTextureHelper;
+
+// AndroidVideoCapturerJni implements AndroidVideoCapturerDelegate.
+// The purpose of the delegate is to hide the JNI specifics from the C++ only
+// AndroidVideoCapturer.
+class AndroidVideoCapturerJni : public webrtc::AndroidVideoCapturerDelegate {
+ public:
+ static int SetAndroidObjects(JNIEnv* jni, jobject appliction_context);
+
+ AndroidVideoCapturerJni(JNIEnv* jni,
+ jobject j_video_capturer,
+ jobject j_surface_texture_helper);
+
+ void Start(int width, int height, int framerate,
+ webrtc::AndroidVideoCapturer* capturer) override;
+ void Stop() override;
+
+ std::string GetSupportedFormats() override;
+
+ // Called from VideoCapturerAndroid::NativeObserver on a Java thread.
+ void OnCapturerStarted(bool success);
+ void OnMemoryBufferFrame(void* video_frame, int length, int width,
+ int height, int rotation, int64_t timestamp_ns);
+ void OnTextureFrame(int width, int height, int rotation, int64_t timestamp_ns,
+ const NativeHandleImpl& handle);
+ void OnOutputFormatRequest(int width, int height, int fps);
+
+ protected:
+ ~AndroidVideoCapturerJni();
+
+ private:
+ JNIEnv* jni();
+
+ // To avoid deducing Args from the 3rd parameter of AsyncCapturerInvoke.
+ template <typename T>
+ struct Identity {
+ typedef T type;
+ };
+
+ // Helper function to make safe asynchronous calls to |capturer_|. The calls
+ // are not guaranteed to be delivered.
+ template <typename... Args>
+ void AsyncCapturerInvoke(
+ const char* method_name,
+ void (webrtc::AndroidVideoCapturer::*method)(Args...),
+ typename Identity<Args>::type... args);
+
+ const ScopedGlobalRef<jobject> j_video_capturer_;
+ const ScopedGlobalRef<jclass> j_video_capturer_class_;
+ const ScopedGlobalRef<jclass> j_observer_class_;
+
+ // Used on the Java thread running the camera.
+ webrtc::I420BufferPool buffer_pool_;
+ rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_;
+ rtc::ThreadChecker thread_checker_;
+
+ // |capturer| is a guaranteed to be a valid pointer between a call to
+ // AndroidVideoCapturerDelegate::Start
+ // until AndroidVideoCapturerDelegate::Stop.
+ rtc::CriticalSection capturer_lock_;
+ webrtc::AndroidVideoCapturer* capturer_ GUARDED_BY(capturer_lock_);
+ // |invoker_| is used to communicate with |capturer_| on the thread Start() is
+ // called on.
+ rtc::scoped_ptr<rtc::GuardedAsyncInvoker> invoker_ GUARDED_BY(capturer_lock_);
+
+ static jobject application_context_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(AndroidVideoCapturerJni);
+};
+
+} // namespace webrtc_jni
+
+#endif // WEBRTC_API_JAVA_JNI_ANDROIDVIDEOCAPTURER_JNI_H_
diff --git a/webrtc/api/java/jni/classreferenceholder.cc b/webrtc/api/java/jni/classreferenceholder.cc
new file mode 100644
index 0000000..0625cc2
--- /dev/null
+++ b/webrtc/api/java/jni/classreferenceholder.cc
@@ -0,0 +1,152 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+#include "webrtc/api/java/jni/classreferenceholder.h"
+
+#include "webrtc/api/java/jni/jni_helpers.h"
+
+namespace webrtc_jni {
+
+// ClassReferenceHolder holds global reference to Java classes in app/webrtc.
+class ClassReferenceHolder {
+ public:
+ explicit ClassReferenceHolder(JNIEnv* jni);
+ ~ClassReferenceHolder();
+
+ void FreeReferences(JNIEnv* jni);
+ jclass GetClass(const std::string& name);
+
+ private:
+ void LoadClass(JNIEnv* jni, const std::string& name);
+
+ std::map<std::string, jclass> classes_;
+};
+
+// Allocated in LoadGlobalClassReferenceHolder(),
+// freed in FreeGlobalClassReferenceHolder().
+static ClassReferenceHolder* g_class_reference_holder = nullptr;
+
+void LoadGlobalClassReferenceHolder() {
+ RTC_CHECK(g_class_reference_holder == nullptr);
+ g_class_reference_holder = new ClassReferenceHolder(GetEnv());
+}
+
+void FreeGlobalClassReferenceHolder() {
+ g_class_reference_holder->FreeReferences(AttachCurrentThreadIfNeeded());
+ delete g_class_reference_holder;
+ g_class_reference_holder = nullptr;
+}
+
+ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni) {
+ LoadClass(jni, "android/graphics/SurfaceTexture");
+ LoadClass(jni, "java/nio/ByteBuffer");
+ LoadClass(jni, "java/util/ArrayList");
+ LoadClass(jni, "org/webrtc/AudioTrack");
+ LoadClass(jni, "org/webrtc/CameraEnumerator");
+ LoadClass(jni, "org/webrtc/Camera2Enumerator");
+ LoadClass(jni, "org/webrtc/CameraEnumerationAndroid");
+ LoadClass(jni, "org/webrtc/DataChannel");
+ LoadClass(jni, "org/webrtc/DataChannel$Buffer");
+ LoadClass(jni, "org/webrtc/DataChannel$Init");
+ LoadClass(jni, "org/webrtc/DataChannel$State");
+ LoadClass(jni, "org/webrtc/EglBase");
+ LoadClass(jni, "org/webrtc/EglBase$Context");
+ LoadClass(jni, "org/webrtc/EglBase14$Context");
+ LoadClass(jni, "org/webrtc/IceCandidate");
+ LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder");
+ LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
+ LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$VideoCodecType");
+ LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder");
+ LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer");
+ LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer");
+ LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$VideoCodecType");
+ LoadClass(jni, "org/webrtc/MediaSource$State");
+ LoadClass(jni, "org/webrtc/MediaStream");
+ LoadClass(jni, "org/webrtc/MediaStreamTrack$State");
+ LoadClass(jni, "org/webrtc/NetworkMonitor");
+ LoadClass(jni, "org/webrtc/NetworkMonitorAutoDetect$ConnectionType");
+ LoadClass(jni, "org/webrtc/NetworkMonitorAutoDetect$IPAddress");
+ LoadClass(jni, "org/webrtc/NetworkMonitorAutoDetect$NetworkInformation");
+ LoadClass(jni, "org/webrtc/PeerConnectionFactory");
+ LoadClass(jni, "org/webrtc/PeerConnection$BundlePolicy");
+ LoadClass(jni, "org/webrtc/PeerConnection$ContinualGatheringPolicy");
+ LoadClass(jni, "org/webrtc/PeerConnection$RtcpMuxPolicy");
+ LoadClass(jni, "org/webrtc/PeerConnection$IceConnectionState");
+ LoadClass(jni, "org/webrtc/PeerConnection$IceGatheringState");
+ LoadClass(jni, "org/webrtc/PeerConnection$IceTransportsType");
+ LoadClass(jni, "org/webrtc/PeerConnection$TcpCandidatePolicy");
+ LoadClass(jni, "org/webrtc/PeerConnection$KeyType");
+ LoadClass(jni, "org/webrtc/PeerConnection$SignalingState");
+ LoadClass(jni, "org/webrtc/RtpReceiver");
+ LoadClass(jni, "org/webrtc/RtpSender");
+ LoadClass(jni, "org/webrtc/SessionDescription");
+ LoadClass(jni, "org/webrtc/SessionDescription$Type");
+ LoadClass(jni, "org/webrtc/StatsReport");
+ LoadClass(jni, "org/webrtc/StatsReport$Value");
+ LoadClass(jni, "org/webrtc/SurfaceTextureHelper");
+ LoadClass(jni, "org/webrtc/VideoCapturerAndroid");
+ LoadClass(jni, "org/webrtc/VideoCapturerAndroid$NativeObserver");
+ LoadClass(jni, "org/webrtc/VideoRenderer$I420Frame");
+ LoadClass(jni, "org/webrtc/VideoTrack");
+}
+
+ClassReferenceHolder::~ClassReferenceHolder() {
+ RTC_CHECK(classes_.empty()) << "Must call FreeReferences() before dtor!";
+}
+
+void ClassReferenceHolder::FreeReferences(JNIEnv* jni) {
+ for (std::map<std::string, jclass>::const_iterator it = classes_.begin();
+ it != classes_.end(); ++it) {
+ jni->DeleteGlobalRef(it->second);
+ }
+ classes_.clear();
+}
+
+jclass ClassReferenceHolder::GetClass(const std::string& name) {
+ std::map<std::string, jclass>::iterator it = classes_.find(name);
+ RTC_CHECK(it != classes_.end()) << "Unexpected GetClass() call for: " << name;
+ return it->second;
+}
+
+void ClassReferenceHolder::LoadClass(JNIEnv* jni, const std::string& name) {
+ jclass localRef = jni->FindClass(name.c_str());
+ CHECK_EXCEPTION(jni) << "error during FindClass: " << name;
+ RTC_CHECK(localRef) << name;
+ jclass globalRef = reinterpret_cast<jclass>(jni->NewGlobalRef(localRef));
+ CHECK_EXCEPTION(jni) << "error during NewGlobalRef: " << name;
+ RTC_CHECK(globalRef) << name;
+ bool inserted = classes_.insert(std::make_pair(name, globalRef)).second;
+ RTC_CHECK(inserted) << "Duplicate class name: " << name;
+}
+
+// Returns a global reference guaranteed to be valid for the lifetime of the
+// process.
+jclass FindClass(JNIEnv* jni, const char* name) {
+ return g_class_reference_holder->GetClass(name);
+}
+
+} // namespace webrtc_jni
diff --git a/webrtc/api/java/jni/classreferenceholder.h b/webrtc/api/java/jni/classreferenceholder.h
new file mode 100644
index 0000000..5edf614
--- /dev/null
+++ b/webrtc/api/java/jni/classreferenceholder.h
@@ -0,0 +1,59 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// Android's FindClass() is trickier than usual because the app-specific
+// ClassLoader is not consulted when there is no app-specific frame on the
+// stack. Consequently, we only look up all classes once in app/webrtc.
+// http://developer.android.com/training/articles/perf-jni.html#faq_FindClass
+
+#ifndef WEBRTC_API_JAVA_JNI_CLASSREFERENCEHOLDER_H_
+#define WEBRTC_API_JAVA_JNI_CLASSREFERENCEHOLDER_H_
+
+#include <jni.h>
+#include <map>
+#include <string>
+
+namespace webrtc_jni {
+
+// LoadGlobalClassReferenceHolder must be called in JNI_OnLoad.
+void LoadGlobalClassReferenceHolder();
+// FreeGlobalClassReferenceHolder must be called in JNI_UnLoad.
+void FreeGlobalClassReferenceHolder();
+
+// Returns a global reference guaranteed to be valid for the lifetime of the
+// process.
+jclass FindClass(JNIEnv* jni, const char* name);
+
+// Convenience macro defining JNI-accessible methods in the org.webrtc package.
+// Eliminates unnecessary boilerplate and line-wraps, reducing visual clutter.
+#define JOW(rettype, name) extern "C" rettype JNIEXPORT JNICALL \
+ Java_org_webrtc_##name
+
+} // namespace webrtc_jni
+
+#endif // WEBRTC_API_JAVA_JNI_CLASSREFERENCEHOLDER_H_
diff --git a/webrtc/api/java/jni/eglbase_jni.cc b/webrtc/api/java/jni/eglbase_jni.cc
new file mode 100644
index 0000000..26eeeb3
--- /dev/null
+++ b/webrtc/api/java/jni/eglbase_jni.cc
@@ -0,0 +1,90 @@
+/*
+ * libjingle
+ * Copyright 2016 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include "webrtc/api/java/jni/eglbase_jni.h"
+
+#include "webrtc/api/java/jni/androidmediacodeccommon.h"
+#include "webrtc/api/java/jni/classreferenceholder.h"
+#include "webrtc/api/java/jni/jni_helpers.h"
+
+namespace webrtc_jni {
+
+EglBase::EglBase() {
+}
+
+EglBase::~EglBase() {
+ if (egl_base_) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ jni->DeleteGlobalRef(egl_base_context_);
+ egl_base_context_ = nullptr;
+ jni->CallVoidMethod(egl_base_,
+ GetMethodID(jni,
+ FindClass(jni, "org/webrtc/EglBase"),
+ "release", "()V"));
+ jni->DeleteGlobalRef(egl_base_);
+ }
+}
+
+bool EglBase::CreateEglBase(JNIEnv* jni, jobject egl_context) {
+ if (egl_base_) {
+ jni->DeleteGlobalRef(egl_base_context_);
+ egl_base_context_ = nullptr;
+ jni->CallVoidMethod(egl_base_,
+ GetMethodID(jni,
+ FindClass(jni, "org/webrtc/EglBase"),
+ "release", "()V"));
+ jni->DeleteGlobalRef(egl_base_);
+ egl_base_ = nullptr;
+ }
+
+ if (IsNull(jni, egl_context))
+ return false;
+
+ jobject egl_base = jni->CallStaticObjectMethod(
+ FindClass(jni, "org/webrtc/EglBase"),
+ GetStaticMethodID(jni,
+ FindClass(jni, "org/webrtc/EglBase"),
+ "create",
+ "(Lorg/webrtc/EglBase$Context;)Lorg/webrtc/EglBase;"),
+ egl_context);
+ if (CheckException(jni))
+ return false;
+
+ egl_base_ = jni->NewGlobalRef(egl_base);
+ egl_base_context_ = jni->NewGlobalRef(
+ jni->CallObjectMethod(
+ egl_base_,
+ GetMethodID(jni,
+ FindClass(jni, "org/webrtc/EglBase"),
+ "getEglBaseContext",
+ "()Lorg/webrtc/EglBase$Context;")));
+ RTC_CHECK(egl_base_context_);
+ return true;
+}
+
+} // namespace webrtc_jni
diff --git a/webrtc/api/java/jni/eglbase_jni.h b/webrtc/api/java/jni/eglbase_jni.h
new file mode 100644
index 0000000..de7e39e
--- /dev/null
+++ b/webrtc/api/java/jni/eglbase_jni.h
@@ -0,0 +1,60 @@
+/*
+ * libjingle
+ * Copyright 2016 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef WEBRTC_API_JAVA_JNI_EGLBASE_JNI_H_
+#define WEBRTC_API_JAVA_JNI_EGLBASE_JNI_H_
+
+#include <jni.h>
+
+#include "webrtc/base/constructormagic.h"
+
+namespace webrtc_jni {
+
+// Helper class used for creating a Java instance of org/webrtc/EglBase.
+class EglBase {
+ public:
+ EglBase();
+ ~EglBase();
+
+ // Creates an new java EglBase instance. |egl_base_context| must be a valid
+ // EglBase$Context.
+ // Returns false if |egl_base_context| is a null Java object or if an
+ // exception occur in Java.
+ bool CreateEglBase(JNIEnv* jni, jobject egl_base_context);
+ jobject egl_base_context() const { return egl_base_context_; }
+
+ private:
+ jobject egl_base_ = nullptr; // instance of org/webrtc/EglBase
+ jobject egl_base_context_ = nullptr; // instance of EglBase$Context
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(EglBase);
+};
+
+} // namespace webrtc_jni
+
+#endif // WEBRTC_API_JAVA_JNI_EGLBASE_JNI_H_
diff --git a/webrtc/api/java/jni/jni_helpers.cc b/webrtc/api/java/jni/jni_helpers.cc
new file mode 100644
index 0000000..b07a9c5
--- /dev/null
+++ b/webrtc/api/java/jni/jni_helpers.cc
@@ -0,0 +1,296 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+#include "webrtc/api/java/jni/jni_helpers.h"
+
+#include "webrtc/api/java/jni/classreferenceholder.h"
+
+#include <asm/unistd.h>
+#include <sys/prctl.h>
+#include <sys/syscall.h>
+#include <unistd.h>
+
+namespace webrtc_jni {
+
+static JavaVM* g_jvm = nullptr;
+
+static pthread_once_t g_jni_ptr_once = PTHREAD_ONCE_INIT;
+
+// Key for per-thread JNIEnv* data. Non-NULL in threads attached to |g_jvm| by
+// AttachCurrentThreadIfNeeded(), NULL in unattached threads and threads that
+// were attached by the JVM because of a Java->native call.
+static pthread_key_t g_jni_ptr;
+
+JavaVM *GetJVM() {
+ RTC_CHECK(g_jvm) << "JNI_OnLoad failed to run?";
+ return g_jvm;
+}
+
+// Return a |JNIEnv*| usable on this thread or NULL if this thread is detached.
+JNIEnv* GetEnv() {
+ void* env = NULL;
+ jint status = g_jvm->GetEnv(&env, JNI_VERSION_1_6);
+ RTC_CHECK(((env != NULL) && (status == JNI_OK)) ||
+ ((env == NULL) && (status == JNI_EDETACHED)))
+ << "Unexpected GetEnv return: " << status << ":" << env;
+ return reinterpret_cast<JNIEnv*>(env);
+}
+
+static void ThreadDestructor(void* prev_jni_ptr) {
+ // This function only runs on threads where |g_jni_ptr| is non-NULL, meaning
+ // we were responsible for originally attaching the thread, so are responsible
+ // for detaching it now. However, because some JVM implementations (notably
+ // Oracle's http://goo.gl/eHApYT) also use the pthread_key_create mechanism,
+ // the JVMs accounting info for this thread may already be wiped out by the
+ // time this is called. Thus it may appear we are already detached even though
+ // it was our responsibility to detach! Oh well.
+ if (!GetEnv())
+ return;
+
+ RTC_CHECK(GetEnv() == prev_jni_ptr)
+ << "Detaching from another thread: " << prev_jni_ptr << ":" << GetEnv();
+ jint status = g_jvm->DetachCurrentThread();
+ RTC_CHECK(status == JNI_OK) << "Failed to detach thread: " << status;
+ RTC_CHECK(!GetEnv()) << "Detaching was a successful no-op???";
+}
+
+static void CreateJNIPtrKey() {
+ RTC_CHECK(!pthread_key_create(&g_jni_ptr, &ThreadDestructor))
+ << "pthread_key_create";
+}
+
+jint InitGlobalJniVariables(JavaVM *jvm) {
+ RTC_CHECK(!g_jvm) << "InitGlobalJniVariables!";
+ g_jvm = jvm;
+ RTC_CHECK(g_jvm) << "InitGlobalJniVariables handed NULL?";
+
+ RTC_CHECK(!pthread_once(&g_jni_ptr_once, &CreateJNIPtrKey)) << "pthread_once";
+
+ JNIEnv* jni = nullptr;
+ if (jvm->GetEnv(reinterpret_cast<void**>(&jni), JNI_VERSION_1_6) != JNI_OK)
+ return -1;
+
+ return JNI_VERSION_1_6;
+}
+
+// Return thread ID as a string.
+static std::string GetThreadId() {
+ char buf[21]; // Big enough to hold a kuint64max plus terminating NULL.
+ RTC_CHECK_LT(snprintf(buf, sizeof(buf), "%ld",
+ static_cast<long>(syscall(__NR_gettid))),
+ sizeof(buf))
+ << "Thread id is bigger than uint64??";
+ return std::string(buf);
+}
+
+// Return the current thread's name.
+static std::string GetThreadName() {
+ char name[17] = {0};
+ if (prctl(PR_GET_NAME, name) != 0)
+ return std::string("<noname>");
+ return std::string(name);
+}
+
+// Return a |JNIEnv*| usable on this thread. Attaches to |g_jvm| if necessary.
+JNIEnv* AttachCurrentThreadIfNeeded() {
+ JNIEnv* jni = GetEnv();
+ if (jni)
+ return jni;
+ RTC_CHECK(!pthread_getspecific(g_jni_ptr))
+ << "TLS has a JNIEnv* but not attached?";
+
+ std::string name(GetThreadName() + " - " + GetThreadId());
+ JavaVMAttachArgs args;
+ args.version = JNI_VERSION_1_6;
+ args.name = &name[0];
+ args.group = NULL;
+ // Deal with difference in signatures between Oracle's jni.h and Android's.
+#ifdef _JAVASOFT_JNI_H_ // Oracle's jni.h violates the JNI spec!
+ void* env = NULL;
+#else
+ JNIEnv* env = NULL;
+#endif
+ RTC_CHECK(!g_jvm->AttachCurrentThread(&env, &args))
+ << "Failed to attach thread";
+ RTC_CHECK(env) << "AttachCurrentThread handed back NULL!";
+ jni = reinterpret_cast<JNIEnv*>(env);
+ RTC_CHECK(!pthread_setspecific(g_jni_ptr, jni)) << "pthread_setspecific";
+ return jni;
+}
+
+// Return a |jlong| that will correctly convert back to |ptr|. This is needed
+// because the alternative (of silently passing a 32-bit pointer to a vararg
+// function expecting a 64-bit param) picks up garbage in the high 32 bits.
+jlong jlongFromPointer(void* ptr) {
+ static_assert(sizeof(intptr_t) <= sizeof(jlong),
+ "Time to rethink the use of jlongs");
+ // Going through intptr_t to be obvious about the definedness of the
+ // conversion from pointer to integral type. intptr_t to jlong is a standard
+ // widening by the static_assert above.
+ jlong ret = reinterpret_cast<intptr_t>(ptr);
+ RTC_DCHECK(reinterpret_cast<void*>(ret) == ptr);
+ return ret;
+}
+
+// JNIEnv-helper methods that RTC_CHECK success: no Java exception thrown and
+// found object/class/method/field is non-null.
+jmethodID GetMethodID(
+ JNIEnv* jni, jclass c, const std::string& name, const char* signature) {
+ jmethodID m = jni->GetMethodID(c, name.c_str(), signature);
+ CHECK_EXCEPTION(jni) << "error during GetMethodID: " << name << ", "
+ << signature;
+ RTC_CHECK(m) << name << ", " << signature;
+ return m;
+}
+
+jmethodID GetStaticMethodID(
+ JNIEnv* jni, jclass c, const char* name, const char* signature) {
+ jmethodID m = jni->GetStaticMethodID(c, name, signature);
+ CHECK_EXCEPTION(jni) << "error during GetStaticMethodID: " << name << ", "
+ << signature;
+ RTC_CHECK(m) << name << ", " << signature;
+ return m;
+}
+
+jfieldID GetFieldID(
+ JNIEnv* jni, jclass c, const char* name, const char* signature) {
+ jfieldID f = jni->GetFieldID(c, name, signature);
+ CHECK_EXCEPTION(jni) << "error during GetFieldID";
+ RTC_CHECK(f) << name << ", " << signature;
+ return f;
+}
+
+jclass GetObjectClass(JNIEnv* jni, jobject object) {
+ jclass c = jni->GetObjectClass(object);
+ CHECK_EXCEPTION(jni) << "error during GetObjectClass";
+ RTC_CHECK(c) << "GetObjectClass returned NULL";
+ return c;
+}
+
+jobject GetObjectField(JNIEnv* jni, jobject object, jfieldID id) {
+ jobject o = jni->GetObjectField(object, id);
+ CHECK_EXCEPTION(jni) << "error during GetObjectField";
+ RTC_CHECK(o) << "GetObjectField returned NULL";
+ return o;
+}
+
+jstring GetStringField(JNIEnv* jni, jobject object, jfieldID id) {
+ return static_cast<jstring>(GetObjectField(jni, object, id));
+}
+
+jlong GetLongField(JNIEnv* jni, jobject object, jfieldID id) {
+ jlong l = jni->GetLongField(object, id);
+ CHECK_EXCEPTION(jni) << "error during GetLongField";
+ return l;
+}
+
+jint GetIntField(JNIEnv* jni, jobject object, jfieldID id) {
+ jint i = jni->GetIntField(object, id);
+ CHECK_EXCEPTION(jni) << "error during GetIntField";
+ return i;
+}
+
+bool GetBooleanField(JNIEnv* jni, jobject object, jfieldID id) {
+ jboolean b = jni->GetBooleanField(object, id);
+ CHECK_EXCEPTION(jni) << "error during GetBooleanField";
+ return b;
+}
+
+// Java references to "null" can only be distinguished as such in C++ by
+// creating a local reference, so this helper wraps that logic.
+bool IsNull(JNIEnv* jni, jobject obj) {
+ ScopedLocalRefFrame local_ref_frame(jni);
+ return jni->NewLocalRef(obj) == NULL;
+}
+
+// Given a UTF-8 encoded |native| string return a new (UTF-16) jstring.
+jstring JavaStringFromStdString(JNIEnv* jni, const std::string& native) {
+ jstring jstr = jni->NewStringUTF(native.c_str());
+ CHECK_EXCEPTION(jni) << "error during NewStringUTF";
+ return jstr;
+}
+
+// Given a (UTF-16) jstring return a new UTF-8 native string.
+std::string JavaToStdString(JNIEnv* jni, const jstring& j_string) {
+ const char* chars = jni->GetStringUTFChars(j_string, NULL);
+ CHECK_EXCEPTION(jni) << "Error during GetStringUTFChars";
+ std::string str(chars, jni->GetStringUTFLength(j_string));
+ CHECK_EXCEPTION(jni) << "Error during GetStringUTFLength";
+ jni->ReleaseStringUTFChars(j_string, chars);
+ CHECK_EXCEPTION(jni) << "Error during ReleaseStringUTFChars";
+ return str;
+}
+
+// Return the (singleton) Java Enum object corresponding to |index|;
+jobject JavaEnumFromIndex(JNIEnv* jni, jclass state_class,
+ const std::string& state_class_name, int index) {
+ jmethodID state_values_id = GetStaticMethodID(
+ jni, state_class, "values", ("()[L" + state_class_name + ";").c_str());
+ jobjectArray state_values = static_cast<jobjectArray>(
+ jni->CallStaticObjectMethod(state_class, state_values_id));
+ CHECK_EXCEPTION(jni) << "error during CallStaticObjectMethod";
+ jobject ret = jni->GetObjectArrayElement(state_values, index);
+ CHECK_EXCEPTION(jni) << "error during GetObjectArrayElement";
+ return ret;
+}
+
+std::string GetJavaEnumName(JNIEnv* jni,
+ const std::string& className,
+ jobject j_enum) {
+ jclass enumClass = FindClass(jni, className.c_str());
+ jmethodID nameMethod =
+ GetMethodID(jni, enumClass, "name", "()Ljava/lang/String;");
+ jstring name =
+ reinterpret_cast<jstring>(jni->CallObjectMethod(j_enum, nameMethod));
+ CHECK_EXCEPTION(jni) << "error during CallObjectMethod for " << className
+ << ".name";
+ return JavaToStdString(jni, name);
+}
+
+jobject NewGlobalRef(JNIEnv* jni, jobject o) {
+ jobject ret = jni->NewGlobalRef(o);
+ CHECK_EXCEPTION(jni) << "error during NewGlobalRef";
+ RTC_CHECK(ret);
+ return ret;
+}
+
+void DeleteGlobalRef(JNIEnv* jni, jobject o) {
+ jni->DeleteGlobalRef(o);
+ CHECK_EXCEPTION(jni) << "error during DeleteGlobalRef";
+}
+
+// Scope Java local references to the lifetime of this object. Use in all C++
+// callbacks (i.e. entry points that don't originate in a Java callstack
+// through a "native" method call).
+ScopedLocalRefFrame::ScopedLocalRefFrame(JNIEnv* jni) : jni_(jni) {
+ RTC_CHECK(!jni_->PushLocalFrame(0)) << "Failed to PushLocalFrame";
+}
+ScopedLocalRefFrame::~ScopedLocalRefFrame() {
+ jni_->PopLocalFrame(NULL);
+}
+
+} // namespace webrtc_jni
diff --git a/webrtc/api/java/jni/jni_helpers.h b/webrtc/api/java/jni/jni_helpers.h
new file mode 100644
index 0000000..5498158
--- /dev/null
+++ b/webrtc/api/java/jni/jni_helpers.h
@@ -0,0 +1,146 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// This file contain convenience functions and classes for JNI.
+// Before using any of the methods, InitGlobalJniVariables must be called.
+
+#ifndef WEBRTC_API_JAVA_JNI_JNI_HELPERS_H_
+#define WEBRTC_API_JAVA_JNI_JNI_HELPERS_H_
+
+#include <jni.h>
+#include <string>
+
+#include "webrtc/base/checks.h"
+
+// Abort the process if |jni| has a Java exception pending.
+// This macros uses the comma operator to execute ExceptionDescribe
+// and ExceptionClear ignoring their return values and sending ""
+// to the error stream.
+#define CHECK_EXCEPTION(jni) \
+ RTC_CHECK(!jni->ExceptionCheck()) \
+ << (jni->ExceptionDescribe(), jni->ExceptionClear(), "")
+
+// Helper that calls ptr->Release() and aborts the process with a useful
+// message if that didn't actually delete *ptr because of extra refcounts.
+#define CHECK_RELEASE(ptr) \
+ RTC_CHECK_EQ(0, (ptr)->Release()) << "Unexpected refcount."
+
+namespace webrtc_jni {
+
+jint InitGlobalJniVariables(JavaVM *jvm);
+
+// Return a |JNIEnv*| usable on this thread or NULL if this thread is detached.
+JNIEnv* GetEnv();
+
+JavaVM *GetJVM();
+
+// Return a |JNIEnv*| usable on this thread. Attaches to |g_jvm| if necessary.
+JNIEnv* AttachCurrentThreadIfNeeded();
+
+// Return a |jlong| that will correctly convert back to |ptr|. This is needed
+// because the alternative (of silently passing a 32-bit pointer to a vararg
+// function expecting a 64-bit param) picks up garbage in the high 32 bits.
+jlong jlongFromPointer(void* ptr);
+
+// JNIEnv-helper methods that RTC_CHECK success: no Java exception thrown and
+// found object/class/method/field is non-null.
+jmethodID GetMethodID(
+ JNIEnv* jni, jclass c, const std::string& name, const char* signature);
+
+jmethodID GetStaticMethodID(
+ JNIEnv* jni, jclass c, const char* name, const char* signature);
+
+jfieldID GetFieldID(JNIEnv* jni, jclass c, const char* name,
+ const char* signature);
+
+jclass GetObjectClass(JNIEnv* jni, jobject object);
+
+jobject GetObjectField(JNIEnv* jni, jobject object, jfieldID id);
+
+jstring GetStringField(JNIEnv* jni, jobject object, jfieldID id);
+
+jlong GetLongField(JNIEnv* jni, jobject object, jfieldID id);
+
+jint GetIntField(JNIEnv* jni, jobject object, jfieldID id);
+
+bool GetBooleanField(JNIEnv* jni, jobject object, jfieldID id);
+
+// Java references to "null" can only be distinguished as such in C++ by
+// creating a local reference, so this helper wraps that logic.
+bool IsNull(JNIEnv* jni, jobject obj);
+
+// Given a UTF-8 encoded |native| string return a new (UTF-16) jstring.
+jstring JavaStringFromStdString(JNIEnv* jni, const std::string& native);
+
+// Given a (UTF-16) jstring return a new UTF-8 native string.
+std::string JavaToStdString(JNIEnv* jni, const jstring& j_string);
+
+// Return the (singleton) Java Enum object corresponding to |index|;
+jobject JavaEnumFromIndex(JNIEnv* jni, jclass state_class,
+ const std::string& state_class_name, int index);
+
+// Returns the name of a Java enum.
+std::string GetJavaEnumName(JNIEnv* jni,
+ const std::string& className,
+ jobject j_enum);
+
+jobject NewGlobalRef(JNIEnv* jni, jobject o);
+
+void DeleteGlobalRef(JNIEnv* jni, jobject o);
+
+// Scope Java local references to the lifetime of this object. Use in all C++
+// callbacks (i.e. entry points that don't originate in a Java callstack
+// through a "native" method call).
+class ScopedLocalRefFrame {
+ public:
+ explicit ScopedLocalRefFrame(JNIEnv* jni);
+ ~ScopedLocalRefFrame();
+
+ private:
+ JNIEnv* jni_;
+};
+
+// Scoped holder for global Java refs.
+template<class T> // T is jclass, jobject, jintArray, etc.
+class ScopedGlobalRef {
+ public:
+ ScopedGlobalRef(JNIEnv* jni, T obj)
+ : obj_(static_cast<T>(jni->NewGlobalRef(obj))) {}
+ ~ScopedGlobalRef() {
+ DeleteGlobalRef(AttachCurrentThreadIfNeeded(), obj_);
+ }
+ T operator*() const {
+ return obj_;
+ }
+ private:
+ T obj_;
+};
+
+} // namespace webrtc_jni
+
+#endif // WEBRTC_API_JAVA_JNI_JNI_HELPERS_H_
diff --git a/webrtc/api/java/jni/jni_onload.cc b/webrtc/api/java/jni/jni_onload.cc
new file mode 100644
index 0000000..af2804d
--- /dev/null
+++ b/webrtc/api/java/jni/jni_onload.cc
@@ -0,0 +1,55 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <jni.h>
+#undef JNIEXPORT
+#define JNIEXPORT __attribute__((visibility("default")))
+
+#include "webrtc/api/java/jni/classreferenceholder.h"
+#include "webrtc/api/java/jni/jni_helpers.h"
+#include "webrtc/base/ssladapter.h"
+
+namespace webrtc_jni {
+
+extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM *jvm, void *reserved) {
+ jint ret = InitGlobalJniVariables(jvm);
+ RTC_DCHECK_GE(ret, 0);
+ if (ret < 0)
+ return -1;
+
+ RTC_CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()";
+ LoadGlobalClassReferenceHolder();
+
+ return ret;
+}
+
+extern "C" void JNIEXPORT JNICALL JNI_OnUnLoad(JavaVM *jvm, void *reserved) {
+ FreeGlobalClassReferenceHolder();
+ RTC_CHECK(rtc::CleanupSSL()) << "Failed to CleanupSSL()";
+}
+
+} // namespace webrtc_jni
diff --git a/webrtc/api/java/jni/native_handle_impl.cc b/webrtc/api/java/jni/native_handle_impl.cc
new file mode 100644
index 0000000..8ec549c
--- /dev/null
+++ b/webrtc/api/java/jni/native_handle_impl.cc
@@ -0,0 +1,186 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/java/jni/native_handle_impl.h"
+
+#include "webrtc/api/java/jni/jni_helpers.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/keep_ref_until_done.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+
+using webrtc::NativeHandleBuffer;
+
+namespace {
+
+void RotateMatrix(float a[16], webrtc::VideoRotation rotation) {
+ // Texture coordinates are in the range 0 to 1. The transformation of the last
+ // row in each rotation matrix is needed for proper translation, e.g, to
+ // mirror x, we don't replace x by -x, but by 1-x.
+ switch (rotation) {
+ case webrtc::kVideoRotation_0:
+ break;
+ case webrtc::kVideoRotation_90: {
+ const float ROTATE_90[16] =
+ { a[4], a[5], a[6], a[7],
+ -a[0], -a[1], -a[2], -a[3],
+ a[8], a[9], a[10], a[11],
+ a[0] + a[12], a[1] + a[13], a[2] + a[14], a[3] + a[15]};
+ memcpy(a, ROTATE_90, sizeof(ROTATE_90));
+ } break;
+ case webrtc::kVideoRotation_180: {
+ const float ROTATE_180[16] =
+ { -a[0], -a[1], -a[2], -a[3],
+ -a[4], -a[5], -a[6], -a[7],
+ a[8], a[9], a[10], a[11],
+ a[0] + a[4] + a[12], a[1] +a[5] + a[13], a[2] + a[6] + a[14],
+ a[3] + a[11]+ a[15]};
+ memcpy(a, ROTATE_180, sizeof(ROTATE_180));
+ }
+ break;
+ case webrtc::kVideoRotation_270: {
+ const float ROTATE_270[16] =
+ { -a[4], -a[5], -a[6], -a[7],
+ a[0], a[1], a[2], a[3],
+ a[8], a[9], a[10], a[11],
+ a[4] + a[12], a[5] + a[13], a[6] + a[14], a[7] + a[15]};
+ memcpy(a, ROTATE_270, sizeof(ROTATE_270));
+ } break;
+ }
+}
+
+} // anonymouse namespace
+
+namespace webrtc_jni {
+
+// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD.
+static const int kBufferAlignment = 64;
+
+NativeHandleImpl::NativeHandleImpl(JNIEnv* jni,
+ jint j_oes_texture_id,
+ jfloatArray j_transform_matrix)
+ : oes_texture_id(j_oes_texture_id) {
+ RTC_CHECK_EQ(16, jni->GetArrayLength(j_transform_matrix));
+ jfloat* transform_matrix_ptr =
+ jni->GetFloatArrayElements(j_transform_matrix, nullptr);
+ for (int i = 0; i < 16; ++i) {
+ sampling_matrix[i] = transform_matrix_ptr[i];
+ }
+ jni->ReleaseFloatArrayElements(j_transform_matrix, transform_matrix_ptr, 0);
+}
+
+AndroidTextureBuffer::AndroidTextureBuffer(
+ int width,
+ int height,
+ const NativeHandleImpl& native_handle,
+ jobject surface_texture_helper,
+ const rtc::Callback0<void>& no_longer_used)
+ : webrtc::NativeHandleBuffer(&native_handle_, width, height),
+ native_handle_(native_handle),
+ surface_texture_helper_(surface_texture_helper),
+ no_longer_used_cb_(no_longer_used) {}
+
+AndroidTextureBuffer::~AndroidTextureBuffer() {
+ no_longer_used_cb_();
+}
+
+rtc::scoped_refptr<webrtc::VideoFrameBuffer>
+AndroidTextureBuffer::NativeToI420Buffer() {
+ int uv_width = (width()+7) / 8;
+ int stride = 8 * uv_width;
+ int uv_height = (height()+1)/2;
+ size_t size = stride * (height() + uv_height);
+ // The data is owned by the frame, and the normal case is that the
+ // data is deleted by the frame's destructor callback.
+ //
+ // TODO(nisse): Use an I420BufferPool. We then need to extend that
+ // class, and I420Buffer, to support our memory layout.
+ rtc::scoped_ptr<uint8_t, webrtc::AlignedFreeDeleter> yuv_data(
+ static_cast<uint8_t*>(webrtc::AlignedMalloc(size, kBufferAlignment)));
+ // See SurfaceTextureHelper.java for the required layout.
+ uint8_t* y_data = yuv_data.get();
+ uint8_t* u_data = y_data + height() * stride;
+ uint8_t* v_data = u_data + stride/2;
+
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> copy =
+ new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
+ width(), height(),
+ y_data, stride,
+ u_data, stride,
+ v_data, stride,
+ rtc::Bind(&webrtc::AlignedFree, yuv_data.release()));
+
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+
+ jmethodID transform_mid = GetMethodID(
+ jni,
+ GetObjectClass(jni, surface_texture_helper_),
+ "textureToYUV",
+ "(Ljava/nio/ByteBuffer;IIII[F)V");
+
+ jobject byte_buffer = jni->NewDirectByteBuffer(y_data, size);
+
+ // TODO(nisse): Keep java transform matrix around.
+ jfloatArray sampling_matrix = jni->NewFloatArray(16);
+ jni->SetFloatArrayRegion(sampling_matrix, 0, 16,
+ native_handle_.sampling_matrix);
+
+ jni->CallVoidMethod(surface_texture_helper_,
+ transform_mid,
+ byte_buffer, width(), height(), stride,
+ native_handle_.oes_texture_id, sampling_matrix);
+ CHECK_EXCEPTION(jni) << "textureToYUV throwed an exception";
+
+ return copy;
+}
+
+rtc::scoped_refptr<AndroidTextureBuffer>
+AndroidTextureBuffer::ScaleAndRotate(int dst_widht,
+ int dst_height,
+ webrtc::VideoRotation rotation) {
+ if (width() == dst_widht && height() == dst_height &&
+ rotation == webrtc::kVideoRotation_0) {
+ return this;
+ }
+ int rotated_width = (rotation % 180 == 0) ? dst_widht : dst_height;
+ int rotated_height = (rotation % 180 == 0) ? dst_height : dst_widht;
+
+ // Here we use Bind magic to add a reference count to |this| until the newly
+ // created AndroidTextureBuffer is destructed
+ rtc::scoped_refptr<AndroidTextureBuffer> buffer(
+ new rtc::RefCountedObject<AndroidTextureBuffer>(
+ rotated_width, rotated_height, native_handle_,
+ surface_texture_helper_, rtc::KeepRefUntilDone(this)));
+
+ RotateMatrix(buffer->native_handle_.sampling_matrix, rotation);
+ return buffer;
+}
+
+} // namespace webrtc_jni
diff --git a/webrtc/api/java/jni/native_handle_impl.h b/webrtc/api/java/jni/native_handle_impl.h
new file mode 100644
index 0000000..4203bdf
--- /dev/null
+++ b/webrtc/api/java/jni/native_handle_impl.h
@@ -0,0 +1,77 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef WEBRTC_API_JAVA_JNI_NATIVE_HANDLE_IMPL_H_
+#define WEBRTC_API_JAVA_JNI_NATIVE_HANDLE_IMPL_H_
+
+#include <jni.h>
+
+#include "webrtc/common_video/include/video_frame_buffer.h"
+#include "webrtc/common_video/rotation.h"
+
+namespace webrtc_jni {
+
+// Wrapper for texture object.
+struct NativeHandleImpl {
+ NativeHandleImpl(JNIEnv* jni,
+ jint j_oes_texture_id,
+ jfloatArray j_transform_matrix);
+
+ const int oes_texture_id;
+ float sampling_matrix[16];
+};
+
+class AndroidTextureBuffer : public webrtc::NativeHandleBuffer {
+ public:
+ AndroidTextureBuffer(int width,
+ int height,
+ const NativeHandleImpl& native_handle,
+ jobject surface_texture_helper,
+ const rtc::Callback0<void>& no_longer_used);
+ ~AndroidTextureBuffer();
+ rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override;
+
+ rtc::scoped_refptr<AndroidTextureBuffer> ScaleAndRotate(
+ int dst_widht,
+ int dst_height,
+ webrtc::VideoRotation rotation);
+
+ private:
+ NativeHandleImpl native_handle_;
+ // Raw object pointer, relying on the caller, i.e.,
+ // AndroidVideoCapturerJni or the C++ SurfaceTextureHelper, to keep
+ // a global reference. TODO(nisse): Make this a reference to the C++
+ // SurfaceTextureHelper instead, but that requires some refactoring
+ // of AndroidVideoCapturerJni.
+ jobject surface_texture_helper_;
+ rtc::Callback0<void> no_longer_used_cb_;
+};
+
+} // namespace webrtc_jni
+
+#endif // WEBRTC_API_JAVA_JNI_NATIVE_HANDLE_IMPL_H_
diff --git a/webrtc/api/java/jni/peerconnection_jni.cc b/webrtc/api/java/jni/peerconnection_jni.cc
new file mode 100644
index 0000000..1160b2b
--- /dev/null
+++ b/webrtc/api/java/jni/peerconnection_jni.cc
@@ -0,0 +1,2058 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// Hints for future visitors:
+// This entire file is an implementation detail of the org.webrtc Java package,
+// the most interesting bits of which are org.webrtc.PeerConnection{,Factory}.
+// The layout of this file is roughly:
+// - various helper C++ functions & classes that wrap Java counterparts and
+// expose a C++ interface that can be passed to the C++ PeerConnection APIs
+// - implementations of methods declared "static" in the Java package (named
+// things like Java_org_webrtc_OMG_Can_This_Name_Be_Any_Longer, prescribed by
+// the JNI spec).
+//
+// Lifecycle notes: objects are owned where they will be called; in other words
+// FooObservers are owned by C++-land, and user-callable objects (e.g.
+// PeerConnection and VideoTrack) are owned by Java-land.
+// When this file allocates C++ RefCountInterfaces it AddRef()s an artificial
+// ref simulating the jlong held in Java-land, and then Release()s the ref in
+// the respective free call. Sometimes this AddRef is implicit in the
+// construction of a scoped_refptr<> which is then .release()d.
+// Any persistent (non-local) references from C++ to Java must be global or weak
+// (in which case they must be checked before use)!
+//
+// Exception notes: pretty much all JNI calls can throw Java exceptions, so each
+// call through a JNIEnv* pointer needs to be followed by an ExceptionCheck()
+// call. In this file this is done in CHECK_EXCEPTION, making for much easier
+// debugging in case of failure (the alternative is to wait for control to
+// return to the Java frame that called code in this file, at which point it's
+// impossible to tell which JNI call broke).
+
+#include <jni.h>
+#undef JNIEXPORT
+#define JNIEXPORT __attribute__((visibility("default")))
+
+#include <limits>
+#include <utility>
+
+#include "webrtc/api/androidvideocapturer.h"
+#include "webrtc/api/dtlsidentitystore.h"
+#include "webrtc/api/java/jni/androidmediadecoder_jni.h"
+#include "webrtc/api/java/jni/androidmediaencoder_jni.h"
+#include "webrtc/api/java/jni/androidnetworkmonitor_jni.h"
+#include "webrtc/api/java/jni/androidvideocapturer_jni.h"
+#include "webrtc/api/java/jni/classreferenceholder.h"
+#include "webrtc/api/java/jni/jni_helpers.h"
+#include "webrtc/api/java/jni/native_handle_impl.h"
+#include "webrtc/api/mediaconstraintsinterface.h"
+#include "webrtc/api/peerconnectioninterface.h"
+#include "webrtc/api/rtpreceiverinterface.h"
+#include "webrtc/api/rtpsenderinterface.h"
+#include "webrtc/api/videosourceinterface.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/event_tracer.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/logsinks.h"
+#include "webrtc/base/messagequeue.h"
+#include "webrtc/base/networkmonitor.h"
+#include "webrtc/base/ssladapter.h"
+#include "webrtc/base/stringutils.h"
+#include "webrtc/media/base/videocapturer.h"
+#include "webrtc/media/base/videorenderer.h"
+#include "webrtc/media/devices/videorendererfactory.h"
+#include "webrtc/media/webrtc/webrtcvideodecoderfactory.h"
+#include "webrtc/media/webrtc/webrtcvideoencoderfactory.h"
+#include "webrtc/modules/video_render/video_render_internal.h"
+#include "webrtc/system_wrappers/include/field_trial_default.h"
+#include "webrtc/system_wrappers/include/logcat_trace_context.h"
+#include "webrtc/system_wrappers/include/trace.h"
+#include "webrtc/voice_engine/include/voe_base.h"
+
+using cricket::WebRtcVideoDecoderFactory;
+using cricket::WebRtcVideoEncoderFactory;
+using rtc::Bind;
+using rtc::Thread;
+using rtc::ThreadManager;
+using rtc::scoped_ptr;
+using webrtc::AudioSourceInterface;
+using webrtc::AudioTrackInterface;
+using webrtc::AudioTrackVector;
+using webrtc::CreateSessionDescriptionObserver;
+using webrtc::DataBuffer;
+using webrtc::DataChannelInit;
+using webrtc::DataChannelInterface;
+using webrtc::DataChannelObserver;
+using webrtc::IceCandidateInterface;
+using webrtc::LogcatTraceContext;
+using webrtc::MediaConstraintsInterface;
+using webrtc::MediaSourceInterface;
+using webrtc::MediaStreamInterface;
+using webrtc::MediaStreamTrackInterface;
+using webrtc::PeerConnectionFactoryInterface;
+using webrtc::PeerConnectionInterface;
+using webrtc::PeerConnectionObserver;
+using webrtc::RtpReceiverInterface;
+using webrtc::RtpSenderInterface;
+using webrtc::SessionDescriptionInterface;
+using webrtc::SetSessionDescriptionObserver;
+using webrtc::StatsObserver;
+using webrtc::StatsReport;
+using webrtc::StatsReports;
+using webrtc::VideoRendererInterface;
+using webrtc::VideoSourceInterface;
+using webrtc::VideoTrackInterface;
+using webrtc::VideoTrackVector;
+using webrtc::kVideoCodecVP8;
+
+namespace webrtc_jni {
+
+// Field trials initialization string
+static char *field_trials_init_string = NULL;
+
+// Set in PeerConnectionFactory_initializeAndroidGlobals().
+static bool factory_static_initialized = false;
+static bool video_hw_acceleration_enabled = true;
+
+// Return the (singleton) Java Enum object corresponding to |index|;
+// |state_class_fragment| is something like "MediaSource$State".
+static jobject JavaEnumFromIndex(
+ JNIEnv* jni, const std::string& state_class_fragment, int index) {
+ const std::string state_class = "org/webrtc/" + state_class_fragment;
+ return JavaEnumFromIndex(jni, FindClass(jni, state_class.c_str()),
+ state_class, index);
+}
+
+static DataChannelInit JavaDataChannelInitToNative(
+ JNIEnv* jni, jobject j_init) {
+ DataChannelInit init;
+
+ jclass j_init_class = FindClass(jni, "org/webrtc/DataChannel$Init");
+ jfieldID ordered_id = GetFieldID(jni, j_init_class, "ordered", "Z");
+ jfieldID max_retransmit_time_id =
+ GetFieldID(jni, j_init_class, "maxRetransmitTimeMs", "I");
+ jfieldID max_retransmits_id =
+ GetFieldID(jni, j_init_class, "maxRetransmits", "I");
+ jfieldID protocol_id =
+ GetFieldID(jni, j_init_class, "protocol", "Ljava/lang/String;");
+ jfieldID negotiated_id = GetFieldID(jni, j_init_class, "negotiated", "Z");
+ jfieldID id_id = GetFieldID(jni, j_init_class, "id", "I");
+
+ init.ordered = GetBooleanField(jni, j_init, ordered_id);
+ init.maxRetransmitTime = GetIntField(jni, j_init, max_retransmit_time_id);
+ init.maxRetransmits = GetIntField(jni, j_init, max_retransmits_id);
+ init.protocol = JavaToStdString(
+ jni, GetStringField(jni, j_init, protocol_id));
+ init.negotiated = GetBooleanField(jni, j_init, negotiated_id);
+ init.id = GetIntField(jni, j_init, id_id);
+
+ return init;
+}
+
+class ConstraintsWrapper;
+
+// Adapter between the C++ PeerConnectionObserver interface and the Java
+// PeerConnection.Observer interface. Wraps an instance of the Java interface
+// and dispatches C++ callbacks to Java.
+class PCOJava : public PeerConnectionObserver {
+ public:
+ PCOJava(JNIEnv* jni, jobject j_observer)
+ : j_observer_global_(jni, j_observer),
+ j_observer_class_(jni, GetObjectClass(jni, *j_observer_global_)),
+ j_media_stream_class_(jni, FindClass(jni, "org/webrtc/MediaStream")),
+ j_media_stream_ctor_(GetMethodID(
+ jni, *j_media_stream_class_, "<init>", "(J)V")),
+ j_audio_track_class_(jni, FindClass(jni, "org/webrtc/AudioTrack")),
+ j_audio_track_ctor_(GetMethodID(
+ jni, *j_audio_track_class_, "<init>", "(J)V")),
+ j_video_track_class_(jni, FindClass(jni, "org/webrtc/VideoTrack")),
+ j_video_track_ctor_(GetMethodID(
+ jni, *j_video_track_class_, "<init>", "(J)V")),
+ j_data_channel_class_(jni, FindClass(jni, "org/webrtc/DataChannel")),
+ j_data_channel_ctor_(GetMethodID(
+ jni, *j_data_channel_class_, "<init>", "(J)V")) {
+ }
+
+ virtual ~PCOJava() {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ while (!remote_streams_.empty())
+ DisposeRemoteStream(remote_streams_.begin());
+ }
+
+ void OnIceCandidate(const IceCandidateInterface* candidate) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ std::string sdp;
+ RTC_CHECK(candidate->ToString(&sdp)) << "got so far: " << sdp;
+ jclass candidate_class = FindClass(jni(), "org/webrtc/IceCandidate");
+ jmethodID ctor = GetMethodID(jni(), candidate_class,
+ "<init>", "(Ljava/lang/String;ILjava/lang/String;)V");
+ jstring j_mid = JavaStringFromStdString(jni(), candidate->sdp_mid());
+ jstring j_sdp = JavaStringFromStdString(jni(), sdp);
+ jobject j_candidate = jni()->NewObject(
+ candidate_class, ctor, j_mid, candidate->sdp_mline_index(), j_sdp);
+ CHECK_EXCEPTION(jni()) << "error during NewObject";
+ jmethodID m = GetMethodID(jni(), *j_observer_class_,
+ "onIceCandidate", "(Lorg/webrtc/IceCandidate;)V");
+ jni()->CallVoidMethod(*j_observer_global_, m, j_candidate);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ void OnSignalingChange(
+ PeerConnectionInterface::SignalingState new_state) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jmethodID m = GetMethodID(
+ jni(), *j_observer_class_, "onSignalingChange",
+ "(Lorg/webrtc/PeerConnection$SignalingState;)V");
+ jobject new_state_enum =
+ JavaEnumFromIndex(jni(), "PeerConnection$SignalingState", new_state);
+ jni()->CallVoidMethod(*j_observer_global_, m, new_state_enum);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ void OnIceConnectionChange(
+ PeerConnectionInterface::IceConnectionState new_state) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jmethodID m = GetMethodID(
+ jni(), *j_observer_class_, "onIceConnectionChange",
+ "(Lorg/webrtc/PeerConnection$IceConnectionState;)V");
+ jobject new_state_enum = JavaEnumFromIndex(
+ jni(), "PeerConnection$IceConnectionState", new_state);
+ jni()->CallVoidMethod(*j_observer_global_, m, new_state_enum);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ void OnIceConnectionReceivingChange(bool receiving) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jmethodID m = GetMethodID(
+ jni(), *j_observer_class_, "onIceConnectionReceivingChange", "(Z)V");
+ jni()->CallVoidMethod(*j_observer_global_, m, receiving);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ void OnIceGatheringChange(
+ PeerConnectionInterface::IceGatheringState new_state) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jmethodID m = GetMethodID(
+ jni(), *j_observer_class_, "onIceGatheringChange",
+ "(Lorg/webrtc/PeerConnection$IceGatheringState;)V");
+ jobject new_state_enum = JavaEnumFromIndex(
+ jni(), "PeerConnection$IceGatheringState", new_state);
+ jni()->CallVoidMethod(*j_observer_global_, m, new_state_enum);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ void OnAddStream(MediaStreamInterface* stream) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ // Java MediaStream holds one reference. Corresponding Release() is in
+ // MediaStream_free, triggered by MediaStream.dispose().
+ stream->AddRef();
+ jobject j_stream =
+ jni()->NewObject(*j_media_stream_class_, j_media_stream_ctor_,
+ reinterpret_cast<jlong>(stream));
+ CHECK_EXCEPTION(jni()) << "error during NewObject";
+
+ for (const auto& track : stream->GetAudioTracks()) {
+ jstring id = JavaStringFromStdString(jni(), track->id());
+ // Java AudioTrack holds one reference. Corresponding Release() is in
+ // MediaStreamTrack_free, triggered by AudioTrack.dispose().
+ track->AddRef();
+ jobject j_track =
+ jni()->NewObject(*j_audio_track_class_, j_audio_track_ctor_,
+ reinterpret_cast<jlong>(track.get()), id);
+ CHECK_EXCEPTION(jni()) << "error during NewObject";
+ jfieldID audio_tracks_id = GetFieldID(jni(),
+ *j_media_stream_class_,
+ "audioTracks",
+ "Ljava/util/LinkedList;");
+ jobject audio_tracks = GetObjectField(jni(), j_stream, audio_tracks_id);
+ jmethodID add = GetMethodID(jni(),
+ GetObjectClass(jni(), audio_tracks),
+ "add",
+ "(Ljava/lang/Object;)Z");
+ jboolean added = jni()->CallBooleanMethod(audio_tracks, add, j_track);
+ CHECK_EXCEPTION(jni()) << "error during CallBooleanMethod";
+ RTC_CHECK(added);
+ }
+
+ for (const auto& track : stream->GetVideoTracks()) {
+ jstring id = JavaStringFromStdString(jni(), track->id());
+ // Java VideoTrack holds one reference. Corresponding Release() is in
+ // MediaStreamTrack_free, triggered by VideoTrack.dispose().
+ track->AddRef();
+ jobject j_track =
+ jni()->NewObject(*j_video_track_class_, j_video_track_ctor_,
+ reinterpret_cast<jlong>(track.get()), id);
+ CHECK_EXCEPTION(jni()) << "error during NewObject";
+ jfieldID video_tracks_id = GetFieldID(jni(),
+ *j_media_stream_class_,
+ "videoTracks",
+ "Ljava/util/LinkedList;");
+ jobject video_tracks = GetObjectField(jni(), j_stream, video_tracks_id);
+ jmethodID add = GetMethodID(jni(),
+ GetObjectClass(jni(), video_tracks),
+ "add",
+ "(Ljava/lang/Object;)Z");
+ jboolean added = jni()->CallBooleanMethod(video_tracks, add, j_track);
+ CHECK_EXCEPTION(jni()) << "error during CallBooleanMethod";
+ RTC_CHECK(added);
+ }
+ remote_streams_[stream] = NewGlobalRef(jni(), j_stream);
+
+ jmethodID m = GetMethodID(jni(), *j_observer_class_, "onAddStream",
+ "(Lorg/webrtc/MediaStream;)V");
+ jni()->CallVoidMethod(*j_observer_global_, m, j_stream);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ void OnRemoveStream(MediaStreamInterface* stream) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ NativeToJavaStreamsMap::iterator it = remote_streams_.find(stream);
+ RTC_CHECK(it != remote_streams_.end()) << "unexpected stream: " << std::hex
+ << stream;
+ jobject j_stream = it->second;
+ jmethodID m = GetMethodID(jni(), *j_observer_class_, "onRemoveStream",
+ "(Lorg/webrtc/MediaStream;)V");
+ jni()->CallVoidMethod(*j_observer_global_, m, j_stream);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ DisposeRemoteStream(it);
+ }
+
+ void OnDataChannel(DataChannelInterface* channel) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jobject j_channel = jni()->NewObject(
+ *j_data_channel_class_, j_data_channel_ctor_, (jlong)channel);
+ CHECK_EXCEPTION(jni()) << "error during NewObject";
+
+ jmethodID m = GetMethodID(jni(), *j_observer_class_, "onDataChannel",
+ "(Lorg/webrtc/DataChannel;)V");
+ jni()->CallVoidMethod(*j_observer_global_, m, j_channel);
+
+ // Channel is now owned by Java object, and will be freed from
+ // DataChannel.dispose(). Important that this be done _after_ the
+ // CallVoidMethod above as Java code might call back into native code and be
+ // surprised to see a refcount of 2.
+ int bumped_count = channel->AddRef();
+ RTC_CHECK(bumped_count == 2) << "Unexpected refcount OnDataChannel";
+
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ void OnRenegotiationNeeded() override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jmethodID m =
+ GetMethodID(jni(), *j_observer_class_, "onRenegotiationNeeded", "()V");
+ jni()->CallVoidMethod(*j_observer_global_, m);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ void SetConstraints(ConstraintsWrapper* constraints) {
+ RTC_CHECK(!constraints_.get()) << "constraints already set!";
+ constraints_.reset(constraints);
+ }
+
+ const ConstraintsWrapper* constraints() { return constraints_.get(); }
+
+ private:
+ typedef std::map<MediaStreamInterface*, jobject> NativeToJavaStreamsMap;
+
+ void DisposeRemoteStream(const NativeToJavaStreamsMap::iterator& it) {
+ jobject j_stream = it->second;
+ remote_streams_.erase(it);
+ jni()->CallVoidMethod(
+ j_stream, GetMethodID(jni(), *j_media_stream_class_, "dispose", "()V"));
+ CHECK_EXCEPTION(jni()) << "error during MediaStream.dispose()";
+ DeleteGlobalRef(jni(), j_stream);
+ }
+
+ JNIEnv* jni() {
+ return AttachCurrentThreadIfNeeded();
+ }
+
+ const ScopedGlobalRef<jobject> j_observer_global_;
+ const ScopedGlobalRef<jclass> j_observer_class_;
+ const ScopedGlobalRef<jclass> j_media_stream_class_;
+ const jmethodID j_media_stream_ctor_;
+ const ScopedGlobalRef<jclass> j_audio_track_class_;
+ const jmethodID j_audio_track_ctor_;
+ const ScopedGlobalRef<jclass> j_video_track_class_;
+ const jmethodID j_video_track_ctor_;
+ const ScopedGlobalRef<jclass> j_data_channel_class_;
+ const jmethodID j_data_channel_ctor_;
+ // C++ -> Java remote streams. The stored jobects are global refs and must be
+ // manually deleted upon removal. Use DisposeRemoteStream().
+ NativeToJavaStreamsMap remote_streams_;
+ scoped_ptr<ConstraintsWrapper> constraints_;
+};
+
+// Wrapper for a Java MediaConstraints object. Copies all needed data so when
+// the constructor returns the Java object is no longer needed.
+class ConstraintsWrapper : public MediaConstraintsInterface {
+ public:
+ ConstraintsWrapper(JNIEnv* jni, jobject j_constraints) {
+ PopulateConstraintsFromJavaPairList(
+ jni, j_constraints, "mandatory", &mandatory_);
+ PopulateConstraintsFromJavaPairList(
+ jni, j_constraints, "optional", &optional_);
+ }
+
+ virtual ~ConstraintsWrapper() {}
+
+ // MediaConstraintsInterface.
+ const Constraints& GetMandatory() const override { return mandatory_; }
+
+ const Constraints& GetOptional() const override { return optional_; }
+
+ private:
+ // Helper for translating a List<Pair<String, String>> to a Constraints.
+ static void PopulateConstraintsFromJavaPairList(
+ JNIEnv* jni, jobject j_constraints,
+ const char* field_name, Constraints* field) {
+ jfieldID j_id = GetFieldID(jni,
+ GetObjectClass(jni, j_constraints), field_name, "Ljava/util/List;");
+ jobject j_list = GetObjectField(jni, j_constraints, j_id);
+ jmethodID j_iterator_id = GetMethodID(jni,
+ GetObjectClass(jni, j_list), "iterator", "()Ljava/util/Iterator;");
+ jobject j_iterator = jni->CallObjectMethod(j_list, j_iterator_id);
+ CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+ jmethodID j_has_next = GetMethodID(jni,
+ GetObjectClass(jni, j_iterator), "hasNext", "()Z");
+ jmethodID j_next = GetMethodID(jni,
+ GetObjectClass(jni, j_iterator), "next", "()Ljava/lang/Object;");
+ while (jni->CallBooleanMethod(j_iterator, j_has_next)) {
+ CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
+ jobject entry = jni->CallObjectMethod(j_iterator, j_next);
+ CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+ jmethodID get_key = GetMethodID(jni,
+ GetObjectClass(jni, entry), "getKey", "()Ljava/lang/String;");
+ jstring j_key = reinterpret_cast<jstring>(
+ jni->CallObjectMethod(entry, get_key));
+ CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+ jmethodID get_value = GetMethodID(jni,
+ GetObjectClass(jni, entry), "getValue", "()Ljava/lang/String;");
+ jstring j_value = reinterpret_cast<jstring>(
+ jni->CallObjectMethod(entry, get_value));
+ CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+ field->push_back(Constraint(JavaToStdString(jni, j_key),
+ JavaToStdString(jni, j_value)));
+ }
+ CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
+ }
+
+ Constraints mandatory_;
+ Constraints optional_;
+};
+
+static jobject JavaSdpFromNativeSdp(
+ JNIEnv* jni, const SessionDescriptionInterface* desc) {
+ std::string sdp;
+ RTC_CHECK(desc->ToString(&sdp)) << "got so far: " << sdp;
+ jstring j_description = JavaStringFromStdString(jni, sdp);
+
+ jclass j_type_class = FindClass(
+ jni, "org/webrtc/SessionDescription$Type");
+ jmethodID j_type_from_canonical = GetStaticMethodID(
+ jni, j_type_class, "fromCanonicalForm",
+ "(Ljava/lang/String;)Lorg/webrtc/SessionDescription$Type;");
+ jstring j_type_string = JavaStringFromStdString(jni, desc->type());
+ jobject j_type = jni->CallStaticObjectMethod(
+ j_type_class, j_type_from_canonical, j_type_string);
+ CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+
+ jclass j_sdp_class = FindClass(jni, "org/webrtc/SessionDescription");
+ jmethodID j_sdp_ctor = GetMethodID(
+ jni, j_sdp_class, "<init>",
+ "(Lorg/webrtc/SessionDescription$Type;Ljava/lang/String;)V");
+ jobject j_sdp = jni->NewObject(
+ j_sdp_class, j_sdp_ctor, j_type, j_description);
+ CHECK_EXCEPTION(jni) << "error during NewObject";
+ return j_sdp;
+}
+
+template <class T> // T is one of {Create,Set}SessionDescriptionObserver.
+class SdpObserverWrapper : public T {
+ public:
+ SdpObserverWrapper(JNIEnv* jni, jobject j_observer,
+ ConstraintsWrapper* constraints)
+ : constraints_(constraints),
+ j_observer_global_(jni, j_observer),
+ j_observer_class_(jni, GetObjectClass(jni, j_observer)) {
+ }
+
+ virtual ~SdpObserverWrapper() {}
+
+ // Can't mark override because of templating.
+ virtual void OnSuccess() {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jmethodID m = GetMethodID(jni(), *j_observer_class_, "onSetSuccess", "()V");
+ jni()->CallVoidMethod(*j_observer_global_, m);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ // Can't mark override because of templating.
+ virtual void OnSuccess(SessionDescriptionInterface* desc) {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jmethodID m = GetMethodID(
+ jni(), *j_observer_class_, "onCreateSuccess",
+ "(Lorg/webrtc/SessionDescription;)V");
+ jobject j_sdp = JavaSdpFromNativeSdp(jni(), desc);
+ jni()->CallVoidMethod(*j_observer_global_, m, j_sdp);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ protected:
+ // Common implementation for failure of Set & Create types, distinguished by
+ // |op| being "Set" or "Create".
+ void DoOnFailure(const std::string& op, const std::string& error) {
+ jmethodID m = GetMethodID(jni(), *j_observer_class_, "on" + op + "Failure",
+ "(Ljava/lang/String;)V");
+ jstring j_error_string = JavaStringFromStdString(jni(), error);
+ jni()->CallVoidMethod(*j_observer_global_, m, j_error_string);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ JNIEnv* jni() {
+ return AttachCurrentThreadIfNeeded();
+ }
+
+ private:
+ scoped_ptr<ConstraintsWrapper> constraints_;
+ const ScopedGlobalRef<jobject> j_observer_global_;
+ const ScopedGlobalRef<jclass> j_observer_class_;
+};
+
+class CreateSdpObserverWrapper
+ : public SdpObserverWrapper<CreateSessionDescriptionObserver> {
+ public:
+ CreateSdpObserverWrapper(JNIEnv* jni, jobject j_observer,
+ ConstraintsWrapper* constraints)
+ : SdpObserverWrapper(jni, j_observer, constraints) {}
+
+ void OnFailure(const std::string& error) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ SdpObserverWrapper::DoOnFailure(std::string("Create"), error);
+ }
+};
+
+class SetSdpObserverWrapper
+ : public SdpObserverWrapper<SetSessionDescriptionObserver> {
+ public:
+ SetSdpObserverWrapper(JNIEnv* jni, jobject j_observer,
+ ConstraintsWrapper* constraints)
+ : SdpObserverWrapper(jni, j_observer, constraints) {}
+
+ void OnFailure(const std::string& error) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ SdpObserverWrapper::DoOnFailure(std::string("Set"), error);
+ }
+};
+
+// Adapter for a Java DataChannel$Observer presenting a C++ DataChannelObserver
+// and dispatching the callback from C++ back to Java.
+class DataChannelObserverWrapper : public DataChannelObserver {
+ public:
+ DataChannelObserverWrapper(JNIEnv* jni, jobject j_observer)
+ : j_observer_global_(jni, j_observer),
+ j_observer_class_(jni, GetObjectClass(jni, j_observer)),
+ j_buffer_class_(jni, FindClass(jni, "org/webrtc/DataChannel$Buffer")),
+ j_on_buffered_amount_change_mid_(GetMethodID(
+ jni, *j_observer_class_, "onBufferedAmountChange", "(J)V")),
+ j_on_state_change_mid_(
+ GetMethodID(jni, *j_observer_class_, "onStateChange", "()V")),
+ j_on_message_mid_(GetMethodID(jni, *j_observer_class_, "onMessage",
+ "(Lorg/webrtc/DataChannel$Buffer;)V")),
+ j_buffer_ctor_(GetMethodID(jni, *j_buffer_class_, "<init>",
+ "(Ljava/nio/ByteBuffer;Z)V")) {}
+
+ virtual ~DataChannelObserverWrapper() {}
+
+ void OnBufferedAmountChange(uint64_t previous_amount) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jni()->CallVoidMethod(*j_observer_global_, j_on_buffered_amount_change_mid_,
+ previous_amount);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ void OnStateChange() override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jni()->CallVoidMethod(*j_observer_global_, j_on_state_change_mid_);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ void OnMessage(const DataBuffer& buffer) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jobject byte_buffer = jni()->NewDirectByteBuffer(
+ const_cast<char*>(buffer.data.data<char>()), buffer.data.size());
+ jobject j_buffer = jni()->NewObject(*j_buffer_class_, j_buffer_ctor_,
+ byte_buffer, buffer.binary);
+ jni()->CallVoidMethod(*j_observer_global_, j_on_message_mid_, j_buffer);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ private:
+ JNIEnv* jni() {
+ return AttachCurrentThreadIfNeeded();
+ }
+
+ const ScopedGlobalRef<jobject> j_observer_global_;
+ const ScopedGlobalRef<jclass> j_observer_class_;
+ const ScopedGlobalRef<jclass> j_buffer_class_;
+ const jmethodID j_on_buffered_amount_change_mid_;
+ const jmethodID j_on_state_change_mid_;
+ const jmethodID j_on_message_mid_;
+ const jmethodID j_buffer_ctor_;
+};
+
+// Adapter for a Java StatsObserver presenting a C++ StatsObserver and
+// dispatching the callback from C++ back to Java.
+class StatsObserverWrapper : public StatsObserver {
+ public:
+ StatsObserverWrapper(JNIEnv* jni, jobject j_observer)
+ : j_observer_global_(jni, j_observer),
+ j_observer_class_(jni, GetObjectClass(jni, j_observer)),
+ j_stats_report_class_(jni, FindClass(jni, "org/webrtc/StatsReport")),
+ j_stats_report_ctor_(GetMethodID(
+ jni, *j_stats_report_class_, "<init>",
+ "(Ljava/lang/String;Ljava/lang/String;D"
+ "[Lorg/webrtc/StatsReport$Value;)V")),
+ j_value_class_(jni, FindClass(
+ jni, "org/webrtc/StatsReport$Value")),
+ j_value_ctor_(GetMethodID(
+ jni, *j_value_class_, "<init>",
+ "(Ljava/lang/String;Ljava/lang/String;)V")) {
+ }
+
+ virtual ~StatsObserverWrapper() {}
+
+ void OnComplete(const StatsReports& reports) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jobjectArray j_reports = ReportsToJava(jni(), reports);
+ jmethodID m = GetMethodID(jni(), *j_observer_class_, "onComplete",
+ "([Lorg/webrtc/StatsReport;)V");
+ jni()->CallVoidMethod(*j_observer_global_, m, j_reports);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ private:
+ jobjectArray ReportsToJava(
+ JNIEnv* jni, const StatsReports& reports) {
+ jobjectArray reports_array = jni->NewObjectArray(
+ reports.size(), *j_stats_report_class_, NULL);
+ int i = 0;
+ for (const auto* report : reports) {
+ ScopedLocalRefFrame local_ref_frame(jni);
+ jstring j_id = JavaStringFromStdString(jni, report->id()->ToString());
+ jstring j_type = JavaStringFromStdString(jni, report->TypeToString());
+ jobjectArray j_values = ValuesToJava(jni, report->values());
+ jobject j_report = jni->NewObject(*j_stats_report_class_,
+ j_stats_report_ctor_,
+ j_id,
+ j_type,
+ report->timestamp(),
+ j_values);
+ jni->SetObjectArrayElement(reports_array, i++, j_report);
+ }
+ return reports_array;
+ }
+
+ jobjectArray ValuesToJava(JNIEnv* jni, const StatsReport::Values& values) {
+ jobjectArray j_values = jni->NewObjectArray(
+ values.size(), *j_value_class_, NULL);
+ int i = 0;
+ for (const auto& it : values) {
+ ScopedLocalRefFrame local_ref_frame(jni);
+ // Should we use the '.name' enum value here instead of converting the
+ // name to a string?
+ jstring j_name = JavaStringFromStdString(jni, it.second->display_name());
+ jstring j_value = JavaStringFromStdString(jni, it.second->ToString());
+ jobject j_element_value =
+ jni->NewObject(*j_value_class_, j_value_ctor_, j_name, j_value);
+ jni->SetObjectArrayElement(j_values, i++, j_element_value);
+ }
+ return j_values;
+ }
+
+ JNIEnv* jni() {
+ return AttachCurrentThreadIfNeeded();
+ }
+
+ const ScopedGlobalRef<jobject> j_observer_global_;
+ const ScopedGlobalRef<jclass> j_observer_class_;
+ const ScopedGlobalRef<jclass> j_stats_report_class_;
+ const jmethodID j_stats_report_ctor_;
+ const ScopedGlobalRef<jclass> j_value_class_;
+ const jmethodID j_value_ctor_;
+};
+
+// Adapter presenting a cricket::VideoRenderer as a
+// webrtc::VideoRendererInterface.
+class VideoRendererWrapper : public VideoRendererInterface {
+ public:
+ static VideoRendererWrapper* Create(cricket::VideoRenderer* renderer) {
+ if (renderer)
+ return new VideoRendererWrapper(renderer);
+ return NULL;
+ }
+
+ virtual ~VideoRendererWrapper() {}
+
+ void RenderFrame(const cricket::VideoFrame* video_frame) override {
+ ScopedLocalRefFrame local_ref_frame(AttachCurrentThreadIfNeeded());
+ renderer_->RenderFrame(video_frame->GetCopyWithRotationApplied());
+ }
+
+ private:
+ explicit VideoRendererWrapper(cricket::VideoRenderer* renderer)
+ : renderer_(renderer) {}
+ scoped_ptr<cricket::VideoRenderer> renderer_;
+};
+
+// Wrapper dispatching webrtc::VideoRendererInterface to a Java VideoRenderer
+// instance.
+class JavaVideoRendererWrapper : public VideoRendererInterface {
+ public:
+ JavaVideoRendererWrapper(JNIEnv* jni, jobject j_callbacks)
+ : j_callbacks_(jni, j_callbacks),
+ j_render_frame_id_(GetMethodID(
+ jni, GetObjectClass(jni, j_callbacks), "renderFrame",
+ "(Lorg/webrtc/VideoRenderer$I420Frame;)V")),
+ j_frame_class_(jni,
+ FindClass(jni, "org/webrtc/VideoRenderer$I420Frame")),
+ j_i420_frame_ctor_id_(GetMethodID(
+ jni, *j_frame_class_, "<init>", "(III[I[Ljava/nio/ByteBuffer;J)V")),
+ j_texture_frame_ctor_id_(GetMethodID(
+ jni, *j_frame_class_, "<init>",
+ "(IIII[FJ)V")),
+ j_byte_buffer_class_(jni, FindClass(jni, "java/nio/ByteBuffer")) {
+ CHECK_EXCEPTION(jni);
+ }
+
+ virtual ~JavaVideoRendererWrapper() {}
+
+ void RenderFrame(const cricket::VideoFrame* video_frame) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jobject j_frame = (video_frame->GetNativeHandle() != nullptr)
+ ? CricketToJavaTextureFrame(video_frame)
+ : CricketToJavaI420Frame(video_frame);
+ // |j_callbacks_| is responsible for releasing |j_frame| with
+ // VideoRenderer.renderFrameDone().
+ jni()->CallVoidMethod(*j_callbacks_, j_render_frame_id_, j_frame);
+ CHECK_EXCEPTION(jni());
+ }
+
+ private:
+ // Make a shallow copy of |frame| to be used with Java. The callee has
+ // ownership of the frame, and the frame should be released with
+ // VideoRenderer.releaseNativeFrame().
+ static jlong javaShallowCopy(const cricket::VideoFrame* frame) {
+ return jlongFromPointer(frame->Copy());
+ }
+
+ // Return a VideoRenderer.I420Frame referring to the data in |frame|.
+ jobject CricketToJavaI420Frame(const cricket::VideoFrame* frame) {
+ jintArray strides = jni()->NewIntArray(3);
+ jint* strides_array = jni()->GetIntArrayElements(strides, NULL);
+ strides_array[0] = frame->GetYPitch();
+ strides_array[1] = frame->GetUPitch();
+ strides_array[2] = frame->GetVPitch();
+ jni()->ReleaseIntArrayElements(strides, strides_array, 0);
+ jobjectArray planes = jni()->NewObjectArray(3, *j_byte_buffer_class_, NULL);
+ jobject y_buffer =
+ jni()->NewDirectByteBuffer(const_cast<uint8_t*>(frame->GetYPlane()),
+ frame->GetYPitch() * frame->GetHeight());
+ jobject u_buffer = jni()->NewDirectByteBuffer(
+ const_cast<uint8_t*>(frame->GetUPlane()), frame->GetChromaSize());
+ jobject v_buffer = jni()->NewDirectByteBuffer(
+ const_cast<uint8_t*>(frame->GetVPlane()), frame->GetChromaSize());
+ jni()->SetObjectArrayElement(planes, 0, y_buffer);
+ jni()->SetObjectArrayElement(planes, 1, u_buffer);
+ jni()->SetObjectArrayElement(planes, 2, v_buffer);
+ return jni()->NewObject(
+ *j_frame_class_, j_i420_frame_ctor_id_,
+ frame->GetWidth(), frame->GetHeight(),
+ static_cast<int>(frame->GetVideoRotation()),
+ strides, planes, javaShallowCopy(frame));
+ }
+
+ // Return a VideoRenderer.I420Frame referring texture object in |frame|.
+ jobject CricketToJavaTextureFrame(const cricket::VideoFrame* frame) {
+ NativeHandleImpl* handle =
+ reinterpret_cast<NativeHandleImpl*>(frame->GetNativeHandle());
+ jfloatArray sampling_matrix = jni()->NewFloatArray(16);
+ jni()->SetFloatArrayRegion(sampling_matrix, 0, 16, handle->sampling_matrix);
+ return jni()->NewObject(
+ *j_frame_class_, j_texture_frame_ctor_id_,
+ frame->GetWidth(), frame->GetHeight(),
+ static_cast<int>(frame->GetVideoRotation()),
+ handle->oes_texture_id, sampling_matrix, javaShallowCopy(frame));
+ }
+
+ JNIEnv* jni() {
+ return AttachCurrentThreadIfNeeded();
+ }
+
+ ScopedGlobalRef<jobject> j_callbacks_;
+ jmethodID j_render_frame_id_;
+ ScopedGlobalRef<jclass> j_frame_class_;
+ jmethodID j_i420_frame_ctor_id_;
+ jmethodID j_texture_frame_ctor_id_;
+ ScopedGlobalRef<jclass> j_byte_buffer_class_;
+};
+
+
+static DataChannelInterface* ExtractNativeDC(JNIEnv* jni, jobject j_dc) {
+ jfieldID native_dc_id = GetFieldID(jni,
+ GetObjectClass(jni, j_dc), "nativeDataChannel", "J");
+ jlong j_d = GetLongField(jni, j_dc, native_dc_id);
+ return reinterpret_cast<DataChannelInterface*>(j_d);
+}
+
+JOW(jlong, DataChannel_registerObserverNative)(
+ JNIEnv* jni, jobject j_dc, jobject j_observer) {
+ scoped_ptr<DataChannelObserverWrapper> observer(
+ new DataChannelObserverWrapper(jni, j_observer));
+ ExtractNativeDC(jni, j_dc)->RegisterObserver(observer.get());
+ return jlongFromPointer(observer.release());
+}
+
+JOW(void, DataChannel_unregisterObserverNative)(
+ JNIEnv* jni, jobject j_dc, jlong native_observer) {
+ ExtractNativeDC(jni, j_dc)->UnregisterObserver();
+ delete reinterpret_cast<DataChannelObserverWrapper*>(native_observer);
+}
+
+JOW(jstring, DataChannel_label)(JNIEnv* jni, jobject j_dc) {
+ return JavaStringFromStdString(jni, ExtractNativeDC(jni, j_dc)->label());
+}
+
+JOW(jobject, DataChannel_state)(JNIEnv* jni, jobject j_dc) {
+ return JavaEnumFromIndex(
+ jni, "DataChannel$State", ExtractNativeDC(jni, j_dc)->state());
+}
+
+JOW(jlong, DataChannel_bufferedAmount)(JNIEnv* jni, jobject j_dc) {
+ uint64_t buffered_amount = ExtractNativeDC(jni, j_dc)->buffered_amount();
+ RTC_CHECK_LE(buffered_amount, std::numeric_limits<int64_t>::max())
+ << "buffered_amount overflowed jlong!";
+ return static_cast<jlong>(buffered_amount);
+}
+
+JOW(void, DataChannel_close)(JNIEnv* jni, jobject j_dc) {
+ ExtractNativeDC(jni, j_dc)->Close();
+}
+
+JOW(jboolean, DataChannel_sendNative)(JNIEnv* jni, jobject j_dc,
+ jbyteArray data, jboolean binary) {
+ jbyte* bytes = jni->GetByteArrayElements(data, NULL);
+ bool ret = ExtractNativeDC(jni, j_dc)->Send(DataBuffer(
+ rtc::Buffer(bytes, jni->GetArrayLength(data)),
+ binary));
+ jni->ReleaseByteArrayElements(data, bytes, JNI_ABORT);
+ return ret;
+}
+
+JOW(void, DataChannel_dispose)(JNIEnv* jni, jobject j_dc) {
+ CHECK_RELEASE(ExtractNativeDC(jni, j_dc));
+}
+
+JOW(void, Logging_nativeEnableTracing)(
+ JNIEnv* jni, jclass, jstring j_path, jint nativeLevels,
+ jint nativeSeverity) {
+ std::string path = JavaToStdString(jni, j_path);
+ if (nativeLevels != webrtc::kTraceNone) {
+ webrtc::Trace::set_level_filter(nativeLevels);
+ if (path != "logcat:") {
+ RTC_CHECK_EQ(0, webrtc::Trace::SetTraceFile(path.c_str(), false))
+ << "SetTraceFile failed";
+ } else {
+ // Intentionally leak this to avoid needing to reason about its lifecycle.
+ // It keeps no state and functions only as a dispatch point.
+ static LogcatTraceContext* g_trace_callback = new LogcatTraceContext();
+ }
+ }
+ if (nativeSeverity >= rtc::LS_SENSITIVE && nativeSeverity <= rtc::LS_ERROR) {
+ rtc::LogMessage::LogToDebug(
+ static_cast<rtc::LoggingSeverity>(nativeSeverity));
+ }
+}
+
+JOW(void, Logging_nativeEnableLogThreads)(JNIEnv* jni, jclass) {
+ rtc::LogMessage::LogThreads(true);
+}
+
+JOW(void, Logging_nativeEnableLogTimeStamps)(JNIEnv* jni, jclass) {
+ rtc::LogMessage::LogTimestamps(true);
+}
+
+JOW(void, Logging_nativeLog)(
+ JNIEnv* jni, jclass, jint j_severity, jstring j_tag, jstring j_message) {
+ std::string message = JavaToStdString(jni, j_message);
+ std::string tag = JavaToStdString(jni, j_tag);
+ LOG_TAG(static_cast<rtc::LoggingSeverity>(j_severity), tag) << message;
+}
+
+JOW(void, PeerConnection_freePeerConnection)(JNIEnv*, jclass, jlong j_p) {
+ CHECK_RELEASE(reinterpret_cast<PeerConnectionInterface*>(j_p));
+}
+
+JOW(void, PeerConnection_freeObserver)(JNIEnv*, jclass, jlong j_p) {
+ PCOJava* p = reinterpret_cast<PCOJava*>(j_p);
+ delete p;
+}
+
+JOW(void, MediaSource_free)(JNIEnv*, jclass, jlong j_p) {
+ CHECK_RELEASE(reinterpret_cast<MediaSourceInterface*>(j_p));
+}
+
+JOW(void, VideoCapturer_free)(JNIEnv*, jclass, jlong j_p) {
+ delete reinterpret_cast<cricket::VideoCapturer*>(j_p);
+}
+
+JOW(void, VideoRenderer_freeWrappedVideoRenderer)(JNIEnv*, jclass, jlong j_p) {
+ delete reinterpret_cast<JavaVideoRendererWrapper*>(j_p);
+}
+
+JOW(void, VideoRenderer_releaseNativeFrame)(
+ JNIEnv* jni, jclass, jlong j_frame_ptr) {
+ delete reinterpret_cast<const cricket::VideoFrame*>(j_frame_ptr);
+}
+
+JOW(void, MediaStreamTrack_free)(JNIEnv*, jclass, jlong j_p) {
+ reinterpret_cast<MediaStreamTrackInterface*>(j_p)->Release();
+}
+
+JOW(jboolean, MediaStream_nativeAddAudioTrack)(
+ JNIEnv* jni, jclass, jlong pointer, jlong j_audio_track_pointer) {
+ return reinterpret_cast<MediaStreamInterface*>(pointer)->AddTrack(
+ reinterpret_cast<AudioTrackInterface*>(j_audio_track_pointer));
+}
+
+JOW(jboolean, MediaStream_nativeAddVideoTrack)(
+ JNIEnv* jni, jclass, jlong pointer, jlong j_video_track_pointer) {
+ return reinterpret_cast<MediaStreamInterface*>(pointer)
+ ->AddTrack(reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer));
+}
+
+JOW(jboolean, MediaStream_nativeRemoveAudioTrack)(
+ JNIEnv* jni, jclass, jlong pointer, jlong j_audio_track_pointer) {
+ return reinterpret_cast<MediaStreamInterface*>(pointer)->RemoveTrack(
+ reinterpret_cast<AudioTrackInterface*>(j_audio_track_pointer));
+}
+
+JOW(jboolean, MediaStream_nativeRemoveVideoTrack)(
+ JNIEnv* jni, jclass, jlong pointer, jlong j_video_track_pointer) {
+ return reinterpret_cast<MediaStreamInterface*>(pointer)->RemoveTrack(
+ reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer));
+}
+
+JOW(jstring, MediaStream_nativeLabel)(JNIEnv* jni, jclass, jlong j_p) {
+ return JavaStringFromStdString(
+ jni, reinterpret_cast<MediaStreamInterface*>(j_p)->label());
+}
+
+JOW(void, MediaStream_free)(JNIEnv*, jclass, jlong j_p) {
+ CHECK_RELEASE(reinterpret_cast<MediaStreamInterface*>(j_p));
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreateObserver)(
+ JNIEnv * jni, jclass, jobject j_observer) {
+ return (jlong)new PCOJava(jni, j_observer);
+}
+
+JOW(jboolean, PeerConnectionFactory_initializeAndroidGlobals)(
+ JNIEnv* jni, jclass, jobject context,
+ jboolean initialize_audio, jboolean initialize_video,
+ jboolean video_hw_acceleration) {
+ bool failure = false;
+ video_hw_acceleration_enabled = video_hw_acceleration;
+ AndroidNetworkMonitor::SetAndroidContext(jni, context);
+ if (!factory_static_initialized) {
+ if (initialize_video) {
+ failure |= AndroidVideoCapturerJni::SetAndroidObjects(jni, context);
+ }
+ if (initialize_audio)
+ failure |= webrtc::VoiceEngine::SetAndroidObjects(GetJVM(), context);
+ factory_static_initialized = true;
+ }
+ return !failure;
+}
+
+JOW(void, PeerConnectionFactory_initializeFieldTrials)(
+ JNIEnv* jni, jclass, jstring j_trials_init_string) {
+ field_trials_init_string = NULL;
+ if (j_trials_init_string != NULL) {
+ const char* init_string =
+ jni->GetStringUTFChars(j_trials_init_string, NULL);
+ int init_string_length = jni->GetStringUTFLength(j_trials_init_string);
+ field_trials_init_string = new char[init_string_length + 1];
+ rtc::strcpyn(field_trials_init_string, init_string_length + 1, init_string);
+ jni->ReleaseStringUTFChars(j_trials_init_string, init_string);
+ LOG(LS_INFO) << "initializeFieldTrials: " << field_trials_init_string;
+ }
+ webrtc::field_trial::InitFieldTrialsFromString(field_trials_init_string);
+}
+
+JOW(void, PeerConnectionFactory_initializeInternalTracer)(JNIEnv* jni, jclass) {
+ rtc::tracing::SetupInternalTracer();
+}
+
+JOW(jboolean, PeerConnectionFactory_startInternalTracingCapture)(
+ JNIEnv* jni, jclass, jstring j_event_tracing_filename) {
+ if (!j_event_tracing_filename)
+ return false;
+
+ const char* init_string =
+ jni->GetStringUTFChars(j_event_tracing_filename, NULL);
+ LOG(LS_INFO) << "Starting internal tracing to: " << init_string;
+ bool ret = rtc::tracing::StartInternalCapture(init_string);
+ jni->ReleaseStringUTFChars(j_event_tracing_filename, init_string);
+ return ret;
+}
+
+JOW(void, PeerConnectionFactory_stopInternalTracingCapture)(
+ JNIEnv* jni, jclass) {
+ rtc::tracing::StopInternalCapture();
+}
+
+JOW(void, PeerConnectionFactory_shutdownInternalTracer)(JNIEnv* jni, jclass) {
+ rtc::tracing::ShutdownInternalTracer();
+}
+
+// Helper struct for working around the fact that CreatePeerConnectionFactory()
+// comes in two flavors: either entirely automagical (constructing its own
+// threads and deleting them on teardown, but no external codec factory support)
+// or entirely manual (requires caller to delete threads after factory
+// teardown). This struct takes ownership of its ctor's arguments to present a
+// single thing for Java to hold and eventually free.
+class OwnedFactoryAndThreads {
+ public:
+ OwnedFactoryAndThreads(Thread* worker_thread,
+ Thread* signaling_thread,
+ WebRtcVideoEncoderFactory* encoder_factory,
+ WebRtcVideoDecoderFactory* decoder_factory,
+ rtc::NetworkMonitorFactory* network_monitor_factory,
+ PeerConnectionFactoryInterface* factory)
+ : worker_thread_(worker_thread),
+ signaling_thread_(signaling_thread),
+ encoder_factory_(encoder_factory),
+ decoder_factory_(decoder_factory),
+ network_monitor_factory_(network_monitor_factory),
+ factory_(factory) {}
+
+ ~OwnedFactoryAndThreads() {
+ CHECK_RELEASE(factory_);
+ if (network_monitor_factory_ != nullptr) {
+ rtc::NetworkMonitorFactory::ReleaseFactory(network_monitor_factory_);
+ }
+ }
+
+ PeerConnectionFactoryInterface* factory() { return factory_; }
+ WebRtcVideoEncoderFactory* encoder_factory() { return encoder_factory_; }
+ WebRtcVideoDecoderFactory* decoder_factory() { return decoder_factory_; }
+ rtc::NetworkMonitorFactory* network_monitor_factory() {
+ return network_monitor_factory_;
+ }
+ void clear_network_monitor_factory() { network_monitor_factory_ = nullptr; }
+ void InvokeJavaCallbacksOnFactoryThreads();
+
+ private:
+ void JavaCallbackOnFactoryThreads();
+
+ const scoped_ptr<Thread> worker_thread_;
+ const scoped_ptr<Thread> signaling_thread_;
+ WebRtcVideoEncoderFactory* encoder_factory_;
+ WebRtcVideoDecoderFactory* decoder_factory_;
+ rtc::NetworkMonitorFactory* network_monitor_factory_;
+ PeerConnectionFactoryInterface* factory_; // Const after ctor except dtor.
+};
+
+void OwnedFactoryAndThreads::JavaCallbackOnFactoryThreads() {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+ jclass j_factory_class = FindClass(jni, "org/webrtc/PeerConnectionFactory");
+ jmethodID m = nullptr;
+ if (Thread::Current() == worker_thread_) {
+ LOG(LS_INFO) << "Worker thread JavaCallback";
+ m = GetStaticMethodID(jni, j_factory_class, "onWorkerThreadReady", "()V");
+ }
+ if (Thread::Current() == signaling_thread_) {
+ LOG(LS_INFO) << "Signaling thread JavaCallback";
+ m = GetStaticMethodID(
+ jni, j_factory_class, "onSignalingThreadReady", "()V");
+ }
+ if (m != nullptr) {
+ jni->CallStaticVoidMethod(j_factory_class, m);
+ CHECK_EXCEPTION(jni) << "error during JavaCallback::CallStaticVoidMethod";
+ }
+}
+
+void OwnedFactoryAndThreads::InvokeJavaCallbacksOnFactoryThreads() {
+ LOG(LS_INFO) << "InvokeJavaCallbacksOnFactoryThreads.";
+ worker_thread_->Invoke<void>(
+ Bind(&OwnedFactoryAndThreads::JavaCallbackOnFactoryThreads, this));
+ signaling_thread_->Invoke<void>(
+ Bind(&OwnedFactoryAndThreads::JavaCallbackOnFactoryThreads, this));
+}
+
+PeerConnectionFactoryInterface::Options ParseOptionsFromJava(JNIEnv* jni,
+ jobject options) {
+ jclass options_class = jni->GetObjectClass(options);
+ jfieldID network_ignore_mask_field =
+ jni->GetFieldID(options_class, "networkIgnoreMask", "I");
+ int network_ignore_mask =
+ jni->GetIntField(options, network_ignore_mask_field);
+
+ jfieldID disable_encryption_field =
+ jni->GetFieldID(options_class, "disableEncryption", "Z");
+ bool disable_encryption =
+ jni->GetBooleanField(options, disable_encryption_field);
+
+ jfieldID disable_network_monitor_field =
+ jni->GetFieldID(options_class, "disableNetworkMonitor", "Z");
+ bool disable_network_monitor =
+ jni->GetBooleanField(options, disable_network_monitor_field);
+
+ PeerConnectionFactoryInterface::Options native_options;
+
+ // This doesn't necessarily match the c++ version of this struct; feel free
+ // to add more parameters as necessary.
+ native_options.network_ignore_mask = network_ignore_mask;
+ native_options.disable_encryption = disable_encryption;
+ native_options.disable_network_monitor = disable_network_monitor;
+ return native_options;
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreatePeerConnectionFactory)(
+ JNIEnv* jni, jclass, jobject joptions) {
+ // talk/ assumes pretty widely that the current Thread is ThreadManager'd, but
+ // ThreadManager only WrapCurrentThread()s the thread where it is first
+ // created. Since the semantics around when auto-wrapping happens in
+ // webrtc/base/ are convoluted, we simply wrap here to avoid having to think
+ // about ramifications of auto-wrapping there.
+ rtc::ThreadManager::Instance()->WrapCurrentThread();
+ webrtc::Trace::CreateTrace();
+ Thread* worker_thread = new Thread();
+ worker_thread->SetName("worker_thread", NULL);
+ Thread* signaling_thread = new Thread();
+ signaling_thread->SetName("signaling_thread", NULL);
+ RTC_CHECK(worker_thread->Start() && signaling_thread->Start())
+ << "Failed to start threads";
+ WebRtcVideoEncoderFactory* encoder_factory = nullptr;
+ WebRtcVideoDecoderFactory* decoder_factory = nullptr;
+ rtc::NetworkMonitorFactory* network_monitor_factory = nullptr;
+
+ PeerConnectionFactoryInterface::Options options;
+ bool has_options = joptions != NULL;
+ if (has_options) {
+ options = ParseOptionsFromJava(jni, joptions);
+ }
+
+ if (video_hw_acceleration_enabled) {
+ encoder_factory = new MediaCodecVideoEncoderFactory();
+ decoder_factory = new MediaCodecVideoDecoderFactory();
+ }
+ // Do not create network_monitor_factory only if the options are
+ // provided and disable_network_monitor therein is set to true.
+ if (!(has_options && options.disable_network_monitor)) {
+ network_monitor_factory = new AndroidNetworkMonitorFactory();
+ rtc::NetworkMonitorFactory::SetFactory(network_monitor_factory);
+ }
+
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ webrtc::CreatePeerConnectionFactory(worker_thread,
+ signaling_thread,
+ NULL,
+ encoder_factory,
+ decoder_factory));
+ RTC_CHECK(factory) << "Failed to create the peer connection factory; "
+ << "WebRTC/libjingle init likely failed on this device";
+ // TODO(honghaiz): Maybe put the options as the argument of
+ // CreatePeerConnectionFactory.
+ if (has_options) {
+ factory->SetOptions(options);
+ }
+ OwnedFactoryAndThreads* owned_factory = new OwnedFactoryAndThreads(
+ worker_thread, signaling_thread,
+ encoder_factory, decoder_factory,
+ network_monitor_factory, factory.release());
+ owned_factory->InvokeJavaCallbacksOnFactoryThreads();
+ return jlongFromPointer(owned_factory);
+}
+
+JOW(void, PeerConnectionFactory_nativeFreeFactory)(JNIEnv*, jclass, jlong j_p) {
+ delete reinterpret_cast<OwnedFactoryAndThreads*>(j_p);
+ if (field_trials_init_string) {
+ webrtc::field_trial::InitFieldTrialsFromString(NULL);
+ delete field_trials_init_string;
+ field_trials_init_string = NULL;
+ }
+ webrtc::Trace::ReturnTrace();
+}
+
+static PeerConnectionFactoryInterface* factoryFromJava(jlong j_p) {
+ return reinterpret_cast<OwnedFactoryAndThreads*>(j_p)->factory();
+}
+
+JOW(void, PeerConnectionFactory_nativeThreadsCallbacks)(
+ JNIEnv*, jclass, jlong j_p) {
+ OwnedFactoryAndThreads *factory =
+ reinterpret_cast<OwnedFactoryAndThreads*>(j_p);
+ factory->InvokeJavaCallbacksOnFactoryThreads();
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreateLocalMediaStream)(
+ JNIEnv* jni, jclass, jlong native_factory, jstring label) {
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ rtc::scoped_refptr<MediaStreamInterface> stream(
+ factory->CreateLocalMediaStream(JavaToStdString(jni, label)));
+ return (jlong)stream.release();
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreateVideoSource)(
+ JNIEnv* jni, jclass, jlong native_factory, jlong native_capturer,
+ jobject j_constraints) {
+ scoped_ptr<ConstraintsWrapper> constraints(
+ new ConstraintsWrapper(jni, j_constraints));
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ rtc::scoped_refptr<VideoSourceInterface> source(
+ factory->CreateVideoSource(
+ reinterpret_cast<cricket::VideoCapturer*>(native_capturer),
+ constraints.get()));
+ return (jlong)source.release();
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreateVideoTrack)(
+ JNIEnv* jni, jclass, jlong native_factory, jstring id,
+ jlong native_source) {
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ rtc::scoped_refptr<VideoTrackInterface> track(
+ factory->CreateVideoTrack(
+ JavaToStdString(jni, id),
+ reinterpret_cast<VideoSourceInterface*>(native_source)));
+ return (jlong)track.release();
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreateAudioSource)(
+ JNIEnv* jni, jclass, jlong native_factory, jobject j_constraints) {
+ scoped_ptr<ConstraintsWrapper> constraints(
+ new ConstraintsWrapper(jni, j_constraints));
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ rtc::scoped_refptr<AudioSourceInterface> source(
+ factory->CreateAudioSource(constraints.get()));
+ return (jlong)source.release();
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreateAudioTrack)(
+ JNIEnv* jni, jclass, jlong native_factory, jstring id,
+ jlong native_source) {
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ rtc::scoped_refptr<AudioTrackInterface> track(factory->CreateAudioTrack(
+ JavaToStdString(jni, id),
+ reinterpret_cast<AudioSourceInterface*>(native_source)));
+ return (jlong)track.release();
+}
+
+JOW(jboolean, PeerConnectionFactory_nativeStartAecDump)(
+ JNIEnv* jni, jclass, jlong native_factory, jint file,
+ jint filesize_limit_bytes) {
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ return factory->StartAecDump(file, filesize_limit_bytes);
+}
+
+JOW(void, PeerConnectionFactory_nativeStopAecDump)(
+ JNIEnv* jni, jclass, jlong native_factory) {
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ factory->StopAecDump();
+}
+
+JOW(jboolean, PeerConnectionFactory_nativeStartRtcEventLog)(
+ JNIEnv* jni, jclass, jlong native_factory, jint file) {
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ return factory->StartRtcEventLog(file);
+}
+
+JOW(void, PeerConnectionFactory_nativeStopRtcEventLog)(
+ JNIEnv* jni, jclass, jlong native_factory) {
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ factory->StopRtcEventLog();
+}
+
+JOW(void, PeerConnectionFactory_nativeSetOptions)(
+ JNIEnv* jni, jclass, jlong native_factory, jobject options) {
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ PeerConnectionFactoryInterface::Options options_to_set =
+ ParseOptionsFromJava(jni, options);
+ factory->SetOptions(options_to_set);
+
+ if (options_to_set.disable_network_monitor) {
+ OwnedFactoryAndThreads* owner =
+ reinterpret_cast<OwnedFactoryAndThreads*>(native_factory);
+ if (owner->network_monitor_factory()) {
+ rtc::NetworkMonitorFactory::ReleaseFactory(
+ owner->network_monitor_factory());
+ owner->clear_network_monitor_factory();
+ }
+ }
+}
+
+JOW(void, PeerConnectionFactory_nativeSetVideoHwAccelerationOptions)(
+ JNIEnv* jni, jclass, jlong native_factory, jobject local_egl_context,
+ jobject remote_egl_context) {
+ OwnedFactoryAndThreads* owned_factory =
+ reinterpret_cast<OwnedFactoryAndThreads*>(native_factory);
+
+ jclass j_eglbase14_context_class =
+ FindClass(jni, "org/webrtc/EglBase14$Context");
+
+ MediaCodecVideoEncoderFactory* encoder_factory =
+ static_cast<MediaCodecVideoEncoderFactory*>
+ (owned_factory->encoder_factory());
+ if (encoder_factory &&
+ jni->IsInstanceOf(local_egl_context, j_eglbase14_context_class)) {
+ LOG(LS_INFO) << "Set EGL context for HW encoding.";
+ encoder_factory->SetEGLContext(jni, local_egl_context);
+ }
+
+ MediaCodecVideoDecoderFactory* decoder_factory =
+ static_cast<MediaCodecVideoDecoderFactory*>
+ (owned_factory->decoder_factory());
+ if (decoder_factory &&
+ jni->IsInstanceOf(remote_egl_context, j_eglbase14_context_class)) {
+ LOG(LS_INFO) << "Set EGL context for HW decoding.";
+ decoder_factory->SetEGLContext(jni, remote_egl_context);
+ }
+}
+
+static PeerConnectionInterface::IceTransportsType
+JavaIceTransportsTypeToNativeType(JNIEnv* jni, jobject j_ice_transports_type) {
+ std::string enum_name = GetJavaEnumName(
+ jni, "org/webrtc/PeerConnection$IceTransportsType",
+ j_ice_transports_type);
+
+ if (enum_name == "ALL")
+ return PeerConnectionInterface::kAll;
+
+ if (enum_name == "RELAY")
+ return PeerConnectionInterface::kRelay;
+
+ if (enum_name == "NOHOST")
+ return PeerConnectionInterface::kNoHost;
+
+ if (enum_name == "NONE")
+ return PeerConnectionInterface::kNone;
+
+ RTC_CHECK(false) << "Unexpected IceTransportsType enum_name " << enum_name;
+ return PeerConnectionInterface::kAll;
+}
+
+static PeerConnectionInterface::BundlePolicy
+JavaBundlePolicyToNativeType(JNIEnv* jni, jobject j_bundle_policy) {
+ std::string enum_name = GetJavaEnumName(
+ jni, "org/webrtc/PeerConnection$BundlePolicy",
+ j_bundle_policy);
+
+ if (enum_name == "BALANCED")
+ return PeerConnectionInterface::kBundlePolicyBalanced;
+
+ if (enum_name == "MAXBUNDLE")
+ return PeerConnectionInterface::kBundlePolicyMaxBundle;
+
+ if (enum_name == "MAXCOMPAT")
+ return PeerConnectionInterface::kBundlePolicyMaxCompat;
+
+ RTC_CHECK(false) << "Unexpected BundlePolicy enum_name " << enum_name;
+ return PeerConnectionInterface::kBundlePolicyBalanced;
+}
+
+static PeerConnectionInterface::RtcpMuxPolicy
+JavaRtcpMuxPolicyToNativeType(JNIEnv* jni, jobject j_rtcp_mux_policy) {
+ std::string enum_name = GetJavaEnumName(
+ jni, "org/webrtc/PeerConnection$RtcpMuxPolicy",
+ j_rtcp_mux_policy);
+
+ if (enum_name == "NEGOTIATE")
+ return PeerConnectionInterface::kRtcpMuxPolicyNegotiate;
+
+ if (enum_name == "REQUIRE")
+ return PeerConnectionInterface::kRtcpMuxPolicyRequire;
+
+ RTC_CHECK(false) << "Unexpected RtcpMuxPolicy enum_name " << enum_name;
+ return PeerConnectionInterface::kRtcpMuxPolicyNegotiate;
+}
+
+static PeerConnectionInterface::TcpCandidatePolicy
+JavaTcpCandidatePolicyToNativeType(
+ JNIEnv* jni, jobject j_tcp_candidate_policy) {
+ std::string enum_name = GetJavaEnumName(
+ jni, "org/webrtc/PeerConnection$TcpCandidatePolicy",
+ j_tcp_candidate_policy);
+
+ if (enum_name == "ENABLED")
+ return PeerConnectionInterface::kTcpCandidatePolicyEnabled;
+
+ if (enum_name == "DISABLED")
+ return PeerConnectionInterface::kTcpCandidatePolicyDisabled;
+
+ RTC_CHECK(false) << "Unexpected TcpCandidatePolicy enum_name " << enum_name;
+ return PeerConnectionInterface::kTcpCandidatePolicyEnabled;
+}
+
+static rtc::KeyType JavaKeyTypeToNativeType(JNIEnv* jni, jobject j_key_type) {
+ std::string enum_name = GetJavaEnumName(
+ jni, "org/webrtc/PeerConnection$KeyType", j_key_type);
+
+ if (enum_name == "RSA")
+ return rtc::KT_RSA;
+ if (enum_name == "ECDSA")
+ return rtc::KT_ECDSA;
+
+ RTC_CHECK(false) << "Unexpected KeyType enum_name " << enum_name;
+ return rtc::KT_ECDSA;
+}
+
+static PeerConnectionInterface::ContinualGatheringPolicy
+ JavaContinualGatheringPolicyToNativeType(
+ JNIEnv* jni, jobject j_gathering_policy) {
+ std::string enum_name = GetJavaEnumName(
+ jni, "org/webrtc/PeerConnection$ContinualGatheringPolicy",
+ j_gathering_policy);
+ if (enum_name == "GATHER_ONCE")
+ return PeerConnectionInterface::GATHER_ONCE;
+
+ if (enum_name == "GATHER_CONTINUALLY")
+ return PeerConnectionInterface::GATHER_CONTINUALLY;
+
+ RTC_CHECK(false) << "Unexpected ContinualGatheringPolicy enum name "
+ << enum_name;
+ return PeerConnectionInterface::GATHER_ONCE;
+}
+
+static void JavaIceServersToJsepIceServers(
+ JNIEnv* jni, jobject j_ice_servers,
+ PeerConnectionInterface::IceServers* ice_servers) {
+ jclass list_class = GetObjectClass(jni, j_ice_servers);
+ jmethodID iterator_id = GetMethodID(
+ jni, list_class, "iterator", "()Ljava/util/Iterator;");
+ jobject iterator = jni->CallObjectMethod(j_ice_servers, iterator_id);
+ CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+ jmethodID iterator_has_next = GetMethodID(
+ jni, GetObjectClass(jni, iterator), "hasNext", "()Z");
+ jmethodID iterator_next = GetMethodID(
+ jni, GetObjectClass(jni, iterator), "next", "()Ljava/lang/Object;");
+ while (jni->CallBooleanMethod(iterator, iterator_has_next)) {
+ CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
+ jobject j_ice_server = jni->CallObjectMethod(iterator, iterator_next);
+ CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+ jclass j_ice_server_class = GetObjectClass(jni, j_ice_server);
+ jfieldID j_ice_server_uri_id =
+ GetFieldID(jni, j_ice_server_class, "uri", "Ljava/lang/String;");
+ jfieldID j_ice_server_username_id =
+ GetFieldID(jni, j_ice_server_class, "username", "Ljava/lang/String;");
+ jfieldID j_ice_server_password_id =
+ GetFieldID(jni, j_ice_server_class, "password", "Ljava/lang/String;");
+ jstring uri = reinterpret_cast<jstring>(
+ GetObjectField(jni, j_ice_server, j_ice_server_uri_id));
+ jstring username = reinterpret_cast<jstring>(
+ GetObjectField(jni, j_ice_server, j_ice_server_username_id));
+ jstring password = reinterpret_cast<jstring>(
+ GetObjectField(jni, j_ice_server, j_ice_server_password_id));
+ PeerConnectionInterface::IceServer server;
+ server.uri = JavaToStdString(jni, uri);
+ server.username = JavaToStdString(jni, username);
+ server.password = JavaToStdString(jni, password);
+ ice_servers->push_back(server);
+ }
+ CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
+}
+
+static void JavaRTCConfigurationToJsepRTCConfiguration(
+ JNIEnv* jni,
+ jobject j_rtc_config,
+ PeerConnectionInterface::RTCConfiguration* rtc_config) {
+ jclass j_rtc_config_class = GetObjectClass(jni, j_rtc_config);
+
+ jfieldID j_ice_transports_type_id = GetFieldID(
+ jni, j_rtc_config_class, "iceTransportsType",
+ "Lorg/webrtc/PeerConnection$IceTransportsType;");
+ jobject j_ice_transports_type = GetObjectField(
+ jni, j_rtc_config, j_ice_transports_type_id);
+
+ jfieldID j_bundle_policy_id = GetFieldID(
+ jni, j_rtc_config_class, "bundlePolicy",
+ "Lorg/webrtc/PeerConnection$BundlePolicy;");
+ jobject j_bundle_policy = GetObjectField(
+ jni, j_rtc_config, j_bundle_policy_id);
+
+ jfieldID j_rtcp_mux_policy_id = GetFieldID(
+ jni, j_rtc_config_class, "rtcpMuxPolicy",
+ "Lorg/webrtc/PeerConnection$RtcpMuxPolicy;");
+ jobject j_rtcp_mux_policy = GetObjectField(
+ jni, j_rtc_config, j_rtcp_mux_policy_id);
+
+ jfieldID j_tcp_candidate_policy_id = GetFieldID(
+ jni, j_rtc_config_class, "tcpCandidatePolicy",
+ "Lorg/webrtc/PeerConnection$TcpCandidatePolicy;");
+ jobject j_tcp_candidate_policy = GetObjectField(
+ jni, j_rtc_config, j_tcp_candidate_policy_id);
+
+ jfieldID j_ice_servers_id = GetFieldID(
+ jni, j_rtc_config_class, "iceServers", "Ljava/util/List;");
+ jobject j_ice_servers = GetObjectField(jni, j_rtc_config, j_ice_servers_id);
+
+ jfieldID j_audio_jitter_buffer_max_packets_id =
+ GetFieldID(jni, j_rtc_config_class, "audioJitterBufferMaxPackets", "I");
+ jfieldID j_audio_jitter_buffer_fast_accelerate_id = GetFieldID(
+ jni, j_rtc_config_class, "audioJitterBufferFastAccelerate", "Z");
+
+ jfieldID j_ice_connection_receiving_timeout_id =
+ GetFieldID(jni, j_rtc_config_class, "iceConnectionReceivingTimeout", "I");
+
+ jfieldID j_ice_backup_candidate_pair_ping_interval_id = GetFieldID(
+ jni, j_rtc_config_class, "iceBackupCandidatePairPingInterval", "I");
+
+ jfieldID j_continual_gathering_policy_id =
+ GetFieldID(jni, j_rtc_config_class, "continualGatheringPolicy",
+ "Lorg/webrtc/PeerConnection$ContinualGatheringPolicy;");
+ jobject j_continual_gathering_policy =
+ GetObjectField(jni, j_rtc_config, j_continual_gathering_policy_id);
+
+ rtc_config->type =
+ JavaIceTransportsTypeToNativeType(jni, j_ice_transports_type);
+ rtc_config->bundle_policy =
+ JavaBundlePolicyToNativeType(jni, j_bundle_policy);
+ rtc_config->rtcp_mux_policy =
+ JavaRtcpMuxPolicyToNativeType(jni, j_rtcp_mux_policy);
+ rtc_config->tcp_candidate_policy =
+ JavaTcpCandidatePolicyToNativeType(jni, j_tcp_candidate_policy);
+ JavaIceServersToJsepIceServers(jni, j_ice_servers, &rtc_config->servers);
+ rtc_config->audio_jitter_buffer_max_packets =
+ GetIntField(jni, j_rtc_config, j_audio_jitter_buffer_max_packets_id);
+ rtc_config->audio_jitter_buffer_fast_accelerate = GetBooleanField(
+ jni, j_rtc_config, j_audio_jitter_buffer_fast_accelerate_id);
+ rtc_config->ice_connection_receiving_timeout =
+ GetIntField(jni, j_rtc_config, j_ice_connection_receiving_timeout_id);
+ rtc_config->ice_backup_candidate_pair_ping_interval = GetIntField(
+ jni, j_rtc_config, j_ice_backup_candidate_pair_ping_interval_id);
+ rtc_config->continual_gathering_policy =
+ JavaContinualGatheringPolicyToNativeType(
+ jni, j_continual_gathering_policy);
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreatePeerConnection)(
+ JNIEnv *jni, jclass, jlong factory, jobject j_rtc_config,
+ jobject j_constraints, jlong observer_p) {
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> f(
+ reinterpret_cast<PeerConnectionFactoryInterface*>(
+ factoryFromJava(factory)));
+
+ PeerConnectionInterface::RTCConfiguration rtc_config;
+ JavaRTCConfigurationToJsepRTCConfiguration(jni, j_rtc_config, &rtc_config);
+
+ jclass j_rtc_config_class = GetObjectClass(jni, j_rtc_config);
+ jfieldID j_key_type_id = GetFieldID(jni, j_rtc_config_class, "keyType",
+ "Lorg/webrtc/PeerConnection$KeyType;");
+ jobject j_key_type = GetObjectField(jni, j_rtc_config, j_key_type_id);
+
+ // Create ECDSA certificate.
+ if (JavaKeyTypeToNativeType(jni, j_key_type) == rtc::KT_ECDSA) {
+ scoped_ptr<rtc::SSLIdentity> ssl_identity(
+ rtc::SSLIdentity::Generate(webrtc::kIdentityName, rtc::KT_ECDSA));
+ if (ssl_identity.get()) {
+ rtc_config.certificates.push_back(
+ rtc::RTCCertificate::Create(std::move(ssl_identity)));
+ LOG(LS_INFO) << "ECDSA certificate created.";
+ } else {
+ // Failing to create certificate should not abort peer connection
+ // creation. Instead default encryption (currently RSA) will be used.
+ LOG(LS_WARNING) <<
+ "Failed to generate SSLIdentity. Default encryption will be used.";
+ }
+ }
+
+ PCOJava* observer = reinterpret_cast<PCOJava*>(observer_p);
+ observer->SetConstraints(new ConstraintsWrapper(jni, j_constraints));
+ rtc::scoped_refptr<PeerConnectionInterface> pc(f->CreatePeerConnection(
+ rtc_config, observer->constraints(), NULL, NULL, observer));
+ return (jlong)pc.release();
+}
+
+static rtc::scoped_refptr<PeerConnectionInterface> ExtractNativePC(
+ JNIEnv* jni, jobject j_pc) {
+ jfieldID native_pc_id = GetFieldID(jni,
+ GetObjectClass(jni, j_pc), "nativePeerConnection", "J");
+ jlong j_p = GetLongField(jni, j_pc, native_pc_id);
+ return rtc::scoped_refptr<PeerConnectionInterface>(
+ reinterpret_cast<PeerConnectionInterface*>(j_p));
+}
+
+JOW(jobject, PeerConnection_getLocalDescription)(JNIEnv* jni, jobject j_pc) {
+ const SessionDescriptionInterface* sdp =
+ ExtractNativePC(jni, j_pc)->local_description();
+ return sdp ? JavaSdpFromNativeSdp(jni, sdp) : NULL;
+}
+
+JOW(jobject, PeerConnection_getRemoteDescription)(JNIEnv* jni, jobject j_pc) {
+ const SessionDescriptionInterface* sdp =
+ ExtractNativePC(jni, j_pc)->remote_description();
+ return sdp ? JavaSdpFromNativeSdp(jni, sdp) : NULL;
+}
+
+JOW(jobject, PeerConnection_createDataChannel)(
+ JNIEnv* jni, jobject j_pc, jstring j_label, jobject j_init) {
+ DataChannelInit init = JavaDataChannelInitToNative(jni, j_init);
+ rtc::scoped_refptr<DataChannelInterface> channel(
+ ExtractNativePC(jni, j_pc)->CreateDataChannel(
+ JavaToStdString(jni, j_label), &init));
+ // Mustn't pass channel.get() directly through NewObject to avoid reading its
+ // vararg parameter as 64-bit and reading memory that doesn't belong to the
+ // 32-bit parameter.
+ jlong nativeChannelPtr = jlongFromPointer(channel.get());
+ RTC_CHECK(nativeChannelPtr) << "Failed to create DataChannel";
+ jclass j_data_channel_class = FindClass(jni, "org/webrtc/DataChannel");
+ jmethodID j_data_channel_ctor = GetMethodID(
+ jni, j_data_channel_class, "<init>", "(J)V");
+ jobject j_channel = jni->NewObject(
+ j_data_channel_class, j_data_channel_ctor, nativeChannelPtr);
+ CHECK_EXCEPTION(jni) << "error during NewObject";
+ // Channel is now owned by Java object, and will be freed from there.
+ int bumped_count = channel->AddRef();
+ RTC_CHECK(bumped_count == 2) << "Unexpected refcount";
+ return j_channel;
+}
+
+JOW(void, PeerConnection_createOffer)(
+ JNIEnv* jni, jobject j_pc, jobject j_observer, jobject j_constraints) {
+ ConstraintsWrapper* constraints =
+ new ConstraintsWrapper(jni, j_constraints);
+ rtc::scoped_refptr<CreateSdpObserverWrapper> observer(
+ new rtc::RefCountedObject<CreateSdpObserverWrapper>(
+ jni, j_observer, constraints));
+ ExtractNativePC(jni, j_pc)->CreateOffer(observer, constraints);
+}
+
+JOW(void, PeerConnection_createAnswer)(
+ JNIEnv* jni, jobject j_pc, jobject j_observer, jobject j_constraints) {
+ ConstraintsWrapper* constraints =
+ new ConstraintsWrapper(jni, j_constraints);
+ rtc::scoped_refptr<CreateSdpObserverWrapper> observer(
+ new rtc::RefCountedObject<CreateSdpObserverWrapper>(
+ jni, j_observer, constraints));
+ ExtractNativePC(jni, j_pc)->CreateAnswer(observer, constraints);
+}
+
+// Helper to create a SessionDescriptionInterface from a SessionDescription.
+static SessionDescriptionInterface* JavaSdpToNativeSdp(
+ JNIEnv* jni, jobject j_sdp) {
+ jfieldID j_type_id = GetFieldID(
+ jni, GetObjectClass(jni, j_sdp), "type",
+ "Lorg/webrtc/SessionDescription$Type;");
+ jobject j_type = GetObjectField(jni, j_sdp, j_type_id);
+ jmethodID j_canonical_form_id = GetMethodID(
+ jni, GetObjectClass(jni, j_type), "canonicalForm",
+ "()Ljava/lang/String;");
+ jstring j_type_string = (jstring)jni->CallObjectMethod(
+ j_type, j_canonical_form_id);
+ CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+ std::string std_type = JavaToStdString(jni, j_type_string);
+
+ jfieldID j_description_id = GetFieldID(
+ jni, GetObjectClass(jni, j_sdp), "description", "Ljava/lang/String;");
+ jstring j_description = (jstring)GetObjectField(jni, j_sdp, j_description_id);
+ std::string std_description = JavaToStdString(jni, j_description);
+
+ return webrtc::CreateSessionDescription(
+ std_type, std_description, NULL);
+}
+
+JOW(void, PeerConnection_setLocalDescription)(
+ JNIEnv* jni, jobject j_pc,
+ jobject j_observer, jobject j_sdp) {
+ rtc::scoped_refptr<SetSdpObserverWrapper> observer(
+ new rtc::RefCountedObject<SetSdpObserverWrapper>(
+ jni, j_observer, reinterpret_cast<ConstraintsWrapper*>(NULL)));
+ ExtractNativePC(jni, j_pc)->SetLocalDescription(
+ observer, JavaSdpToNativeSdp(jni, j_sdp));
+}
+
+JOW(void, PeerConnection_setRemoteDescription)(
+ JNIEnv* jni, jobject j_pc,
+ jobject j_observer, jobject j_sdp) {
+ rtc::scoped_refptr<SetSdpObserverWrapper> observer(
+ new rtc::RefCountedObject<SetSdpObserverWrapper>(
+ jni, j_observer, reinterpret_cast<ConstraintsWrapper*>(NULL)));
+ ExtractNativePC(jni, j_pc)->SetRemoteDescription(
+ observer, JavaSdpToNativeSdp(jni, j_sdp));
+}
+
+JOW(jboolean, PeerConnection_setConfiguration)(
+ JNIEnv* jni, jobject j_pc, jobject j_rtc_config) {
+ PeerConnectionInterface::RTCConfiguration rtc_config;
+ JavaRTCConfigurationToJsepRTCConfiguration(jni, j_rtc_config, &rtc_config);
+ return ExtractNativePC(jni, j_pc)->SetConfiguration(rtc_config);
+}
+
+JOW(jboolean, PeerConnection_nativeAddIceCandidate)(
+ JNIEnv* jni, jobject j_pc, jstring j_sdp_mid,
+ jint j_sdp_mline_index, jstring j_candidate_sdp) {
+ std::string sdp_mid = JavaToStdString(jni, j_sdp_mid);
+ std::string sdp = JavaToStdString(jni, j_candidate_sdp);
+ scoped_ptr<IceCandidateInterface> candidate(
+ webrtc::CreateIceCandidate(sdp_mid, j_sdp_mline_index, sdp, NULL));
+ return ExtractNativePC(jni, j_pc)->AddIceCandidate(candidate.get());
+}
+
+JOW(jboolean, PeerConnection_nativeAddLocalStream)(
+ JNIEnv* jni, jobject j_pc, jlong native_stream) {
+ return ExtractNativePC(jni, j_pc)->AddStream(
+ reinterpret_cast<MediaStreamInterface*>(native_stream));
+}
+
+JOW(void, PeerConnection_nativeRemoveLocalStream)(
+ JNIEnv* jni, jobject j_pc, jlong native_stream) {
+ ExtractNativePC(jni, j_pc)->RemoveStream(
+ reinterpret_cast<MediaStreamInterface*>(native_stream));
+}
+
+JOW(jobject, PeerConnection_nativeCreateSender)(
+ JNIEnv* jni, jobject j_pc, jstring j_kind, jstring j_stream_id) {
+ jclass j_rtp_sender_class = FindClass(jni, "org/webrtc/RtpSender");
+ jmethodID j_rtp_sender_ctor =
+ GetMethodID(jni, j_rtp_sender_class, "<init>", "(J)V");
+
+ std::string kind = JavaToStdString(jni, j_kind);
+ std::string stream_id = JavaToStdString(jni, j_stream_id);
+ rtc::scoped_refptr<RtpSenderInterface> sender =
+ ExtractNativePC(jni, j_pc)->CreateSender(kind, stream_id);
+ if (!sender.get()) {
+ return nullptr;
+ }
+ jlong nativeSenderPtr = jlongFromPointer(sender.get());
+ jobject j_sender =
+ jni->NewObject(j_rtp_sender_class, j_rtp_sender_ctor, nativeSenderPtr);
+ CHECK_EXCEPTION(jni) << "error during NewObject";
+ // Sender is now owned by the Java object, and will be freed from
+ // RtpSender.dispose(), called by PeerConnection.dispose() or getSenders().
+ sender->AddRef();
+ return j_sender;
+}
+
+JOW(jobject, PeerConnection_nativeGetSenders)(JNIEnv* jni, jobject j_pc) {
+ jclass j_array_list_class = FindClass(jni, "java/util/ArrayList");
+ jmethodID j_array_list_ctor =
+ GetMethodID(jni, j_array_list_class, "<init>", "()V");
+ jmethodID j_array_list_add =
+ GetMethodID(jni, j_array_list_class, "add", "(Ljava/lang/Object;)Z");
+ jobject j_senders = jni->NewObject(j_array_list_class, j_array_list_ctor);
+ CHECK_EXCEPTION(jni) << "error during NewObject";
+
+ jclass j_rtp_sender_class = FindClass(jni, "org/webrtc/RtpSender");
+ jmethodID j_rtp_sender_ctor =
+ GetMethodID(jni, j_rtp_sender_class, "<init>", "(J)V");
+
+ auto senders = ExtractNativePC(jni, j_pc)->GetSenders();
+ for (const auto& sender : senders) {
+ jlong nativeSenderPtr = jlongFromPointer(sender.get());
+ jobject j_sender =
+ jni->NewObject(j_rtp_sender_class, j_rtp_sender_ctor, nativeSenderPtr);
+ CHECK_EXCEPTION(jni) << "error during NewObject";
+ // Sender is now owned by the Java object, and will be freed from
+ // RtpSender.dispose(), called by PeerConnection.dispose() or getSenders().
+ sender->AddRef();
+ jni->CallBooleanMethod(j_senders, j_array_list_add, j_sender);
+ CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
+ }
+ return j_senders;
+}
+
+JOW(jobject, PeerConnection_nativeGetReceivers)(JNIEnv* jni, jobject j_pc) {
+ jclass j_array_list_class = FindClass(jni, "java/util/ArrayList");
+ jmethodID j_array_list_ctor =
+ GetMethodID(jni, j_array_list_class, "<init>", "()V");
+ jmethodID j_array_list_add =
+ GetMethodID(jni, j_array_list_class, "add", "(Ljava/lang/Object;)Z");
+ jobject j_receivers = jni->NewObject(j_array_list_class, j_array_list_ctor);
+ CHECK_EXCEPTION(jni) << "error during NewObject";
+
+ jclass j_rtp_receiver_class = FindClass(jni, "org/webrtc/RtpReceiver");
+ jmethodID j_rtp_receiver_ctor =
+ GetMethodID(jni, j_rtp_receiver_class, "<init>", "(J)V");
+
+ auto receivers = ExtractNativePC(jni, j_pc)->GetReceivers();
+ for (const auto& receiver : receivers) {
+ jlong nativeReceiverPtr = jlongFromPointer(receiver.get());
+ jobject j_receiver = jni->NewObject(j_rtp_receiver_class,
+ j_rtp_receiver_ctor, nativeReceiverPtr);
+ CHECK_EXCEPTION(jni) << "error during NewObject";
+ // Receiver is now owned by Java object, and will be freed from there.
+ receiver->AddRef();
+ jni->CallBooleanMethod(j_receivers, j_array_list_add, j_receiver);
+ CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
+ }
+ return j_receivers;
+}
+
+JOW(bool, PeerConnection_nativeGetStats)(
+ JNIEnv* jni, jobject j_pc, jobject j_observer, jlong native_track) {
+ rtc::scoped_refptr<StatsObserverWrapper> observer(
+ new rtc::RefCountedObject<StatsObserverWrapper>(jni, j_observer));
+ return ExtractNativePC(jni, j_pc)->GetStats(
+ observer,
+ reinterpret_cast<MediaStreamTrackInterface*>(native_track),
+ PeerConnectionInterface::kStatsOutputLevelStandard);
+}
+
+JOW(jobject, PeerConnection_signalingState)(JNIEnv* jni, jobject j_pc) {
+ PeerConnectionInterface::SignalingState state =
+ ExtractNativePC(jni, j_pc)->signaling_state();
+ return JavaEnumFromIndex(jni, "PeerConnection$SignalingState", state);
+}
+
+JOW(jobject, PeerConnection_iceConnectionState)(JNIEnv* jni, jobject j_pc) {
+ PeerConnectionInterface::IceConnectionState state =
+ ExtractNativePC(jni, j_pc)->ice_connection_state();
+ return JavaEnumFromIndex(jni, "PeerConnection$IceConnectionState", state);
+}
+
+JOW(jobject, PeerConnection_iceGatheringState)(JNIEnv* jni, jobject j_pc) {
+ PeerConnectionInterface::IceGatheringState state =
+ ExtractNativePC(jni, j_pc)->ice_gathering_state();
+ return JavaEnumFromIndex(jni, "PeerConnection$IceGatheringState", state);
+}
+
+JOW(void, PeerConnection_close)(JNIEnv* jni, jobject j_pc) {
+ ExtractNativePC(jni, j_pc)->Close();
+ return;
+}
+
+JOW(jobject, MediaSource_nativeState)(JNIEnv* jni, jclass, jlong j_p) {
+ rtc::scoped_refptr<MediaSourceInterface> p(
+ reinterpret_cast<MediaSourceInterface*>(j_p));
+ return JavaEnumFromIndex(jni, "MediaSource$State", p->state());
+}
+
+JOW(jlong, VideoRenderer_nativeWrapVideoRenderer)(
+ JNIEnv* jni, jclass, jobject j_callbacks) {
+ scoped_ptr<JavaVideoRendererWrapper> renderer(
+ new JavaVideoRendererWrapper(jni, j_callbacks));
+ return (jlong)renderer.release();
+}
+
+JOW(void, VideoRenderer_nativeCopyPlane)(
+ JNIEnv *jni, jclass, jobject j_src_buffer, jint width, jint height,
+ jint src_stride, jobject j_dst_buffer, jint dst_stride) {
+ size_t src_size = jni->GetDirectBufferCapacity(j_src_buffer);
+ size_t dst_size = jni->GetDirectBufferCapacity(j_dst_buffer);
+ RTC_CHECK(src_stride >= width) << "Wrong source stride " << src_stride;
+ RTC_CHECK(dst_stride >= width) << "Wrong destination stride " << dst_stride;
+ RTC_CHECK(src_size >= src_stride * height)
+ << "Insufficient source buffer capacity " << src_size;
+ RTC_CHECK(dst_size >= dst_stride * height)
+ << "Isufficient destination buffer capacity " << dst_size;
+ uint8_t *src =
+ reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_buffer));
+ uint8_t *dst =
+ reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_buffer));
+ if (src_stride == dst_stride) {
+ memcpy(dst, src, src_stride * height);
+ } else {
+ for (int i = 0; i < height; i++) {
+ memcpy(dst, src, width);
+ src += src_stride;
+ dst += dst_stride;
+ }
+ }
+}
+
+JOW(void, VideoSource_stop)(JNIEnv* jni, jclass, jlong j_p) {
+ reinterpret_cast<VideoSourceInterface*>(j_p)->Stop();
+}
+
+JOW(void, VideoSource_restart)(
+ JNIEnv* jni, jclass, jlong j_p_source, jlong j_p_format) {
+ reinterpret_cast<VideoSourceInterface*>(j_p_source)->Restart();
+}
+
+JOW(jstring, MediaStreamTrack_nativeId)(JNIEnv* jni, jclass, jlong j_p) {
+ return JavaStringFromStdString(
+ jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->id());
+}
+
+JOW(jstring, MediaStreamTrack_nativeKind)(JNIEnv* jni, jclass, jlong j_p) {
+ return JavaStringFromStdString(
+ jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->kind());
+}
+
+JOW(jboolean, MediaStreamTrack_nativeEnabled)(JNIEnv* jni, jclass, jlong j_p) {
+ return reinterpret_cast<MediaStreamTrackInterface*>(j_p)->enabled();
+}
+
+JOW(jobject, MediaStreamTrack_nativeState)(JNIEnv* jni, jclass, jlong j_p) {
+ return JavaEnumFromIndex(
+ jni,
+ "MediaStreamTrack$State",
+ reinterpret_cast<MediaStreamTrackInterface*>(j_p)->state());
+}
+
+JOW(jboolean, MediaStreamTrack_nativeSetState)(
+ JNIEnv* jni, jclass, jlong j_p, jint j_new_state) {
+ MediaStreamTrackInterface::TrackState new_state =
+ (MediaStreamTrackInterface::TrackState)j_new_state;
+ return reinterpret_cast<MediaStreamTrackInterface*>(j_p)
+ ->set_state(new_state);
+}
+
+JOW(jboolean, MediaStreamTrack_nativeSetEnabled)(
+ JNIEnv* jni, jclass, jlong j_p, jboolean enabled) {
+ return reinterpret_cast<MediaStreamTrackInterface*>(j_p)
+ ->set_enabled(enabled);
+}
+
+JOW(void, VideoTrack_nativeAddRenderer)(
+ JNIEnv* jni, jclass,
+ jlong j_video_track_pointer, jlong j_renderer_pointer) {
+ reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer)->AddRenderer(
+ reinterpret_cast<VideoRendererInterface*>(j_renderer_pointer));
+}
+
+JOW(void, VideoTrack_nativeRemoveRenderer)(
+ JNIEnv* jni, jclass,
+ jlong j_video_track_pointer, jlong j_renderer_pointer) {
+ reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer)->RemoveRenderer(
+ reinterpret_cast<VideoRendererInterface*>(j_renderer_pointer));
+}
+
+JOW(jlong, CallSessionFileRotatingLogSink_nativeAddSink)(
+ JNIEnv* jni, jclass,
+ jstring j_dirPath, jint j_maxFileSize, jint j_severity) {
+ std::string dir_path = JavaToStdString(jni, j_dirPath);
+ rtc::CallSessionFileRotatingLogSink* sink =
+ new rtc::CallSessionFileRotatingLogSink(dir_path, j_maxFileSize);
+ if (!sink->Init()) {
+ LOG_V(rtc::LoggingSeverity::LS_WARNING) <<
+ "Failed to init CallSessionFileRotatingLogSink for path " << dir_path;
+ delete sink;
+ return 0;
+ }
+ rtc::LogMessage::AddLogToStream(
+ sink, static_cast<rtc::LoggingSeverity>(j_severity));
+ return (jlong) sink;
+}
+
+JOW(void, CallSessionFileRotatingLogSink_nativeDeleteSink)(
+ JNIEnv* jni, jclass, jlong j_sink) {
+ rtc::CallSessionFileRotatingLogSink* sink =
+ reinterpret_cast<rtc::CallSessionFileRotatingLogSink*>(j_sink);
+ rtc::LogMessage::RemoveLogToStream(sink);
+ delete sink;
+}
+
+JOW(jbyteArray, CallSessionFileRotatingLogSink_nativeGetLogData)(
+ JNIEnv* jni, jclass, jstring j_dirPath) {
+ std::string dir_path = JavaToStdString(jni, j_dirPath);
+ rtc::scoped_ptr<rtc::CallSessionFileRotatingStream> stream(
+ new rtc::CallSessionFileRotatingStream(dir_path));
+ if (!stream->Open()) {
+ LOG_V(rtc::LoggingSeverity::LS_WARNING) <<
+ "Failed to open CallSessionFileRotatingStream for path " << dir_path;
+ return jni->NewByteArray(0);
+ }
+ size_t log_size = 0;
+ if (!stream->GetSize(&log_size) || log_size == 0) {
+ LOG_V(rtc::LoggingSeverity::LS_WARNING) <<
+ "CallSessionFileRotatingStream returns 0 size for path " << dir_path;
+ return jni->NewByteArray(0);
+ }
+
+ size_t read = 0;
+ rtc::scoped_ptr<jbyte> buffer(static_cast<jbyte*>(malloc(log_size)));
+ stream->ReadAll(buffer.get(), log_size, &read, nullptr);
+
+ jbyteArray result = jni->NewByteArray(read);
+ jni->SetByteArrayRegion(result, 0, read, buffer.get());
+
+ return result;
+}
+
+JOW(jboolean, RtpSender_nativeSetTrack)(JNIEnv* jni,
+ jclass,
+ jlong j_rtp_sender_pointer,
+ jlong j_track_pointer) {
+ return reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
+ ->SetTrack(reinterpret_cast<MediaStreamTrackInterface*>(j_track_pointer));
+}
+
+JOW(jlong, RtpSender_nativeGetTrack)(JNIEnv* jni,
+ jclass,
+ jlong j_rtp_sender_pointer,
+ jlong j_track_pointer) {
+ return jlongFromPointer(
+ reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
+ ->track()
+ .release());
+}
+
+JOW(jstring, RtpSender_nativeId)(
+ JNIEnv* jni, jclass, jlong j_rtp_sender_pointer) {
+ return JavaStringFromStdString(
+ jni, reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)->id());
+}
+
+JOW(void, RtpSender_free)(JNIEnv* jni, jclass, jlong j_rtp_sender_pointer) {
+ reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)->Release();
+}
+
+JOW(jlong, RtpReceiver_nativeGetTrack)(JNIEnv* jni,
+ jclass,
+ jlong j_rtp_receiver_pointer,
+ jlong j_track_pointer) {
+ return jlongFromPointer(
+ reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer)
+ ->track()
+ .release());
+}
+
+JOW(jstring, RtpReceiver_nativeId)(
+ JNIEnv* jni, jclass, jlong j_rtp_receiver_pointer) {
+ return JavaStringFromStdString(
+ jni,
+ reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer)->id());
+}
+
+JOW(void, RtpReceiver_free)(JNIEnv* jni, jclass, jlong j_rtp_receiver_pointer) {
+ reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer)->Release();
+}
+
+} // namespace webrtc_jni
diff --git a/webrtc/api/java/jni/surfacetexturehelper_jni.cc b/webrtc/api/java/jni/surfacetexturehelper_jni.cc
new file mode 100644
index 0000000..335081d
--- /dev/null
+++ b/webrtc/api/java/jni/surfacetexturehelper_jni.cc
@@ -0,0 +1,68 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+
+#include "webrtc/api/java/jni/surfacetexturehelper_jni.h"
+
+#include "webrtc/api/java/jni/classreferenceholder.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/base/checks.h"
+
+namespace webrtc_jni {
+
+SurfaceTextureHelper::SurfaceTextureHelper(
+ JNIEnv* jni, jobject surface_texture_helper)
+ : j_surface_texture_helper_(jni, surface_texture_helper),
+ j_return_texture_method_(
+ GetMethodID(jni,
+ FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
+ "returnTextureFrame",
+ "()V")) {
+ CHECK_EXCEPTION(jni) << "error during initialization of SurfaceTextureHelper";
+}
+
+SurfaceTextureHelper::~SurfaceTextureHelper() {
+}
+
+void SurfaceTextureHelper::ReturnTextureFrame() const {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ jni->CallVoidMethod(*j_surface_texture_helper_, j_return_texture_method_);
+
+ CHECK_EXCEPTION(
+ jni) << "error during SurfaceTextureHelper.returnTextureFrame";
+}
+
+rtc::scoped_refptr<webrtc::VideoFrameBuffer>
+SurfaceTextureHelper::CreateTextureFrame(int width, int height,
+ const NativeHandleImpl& native_handle) {
+ return new rtc::RefCountedObject<AndroidTextureBuffer>(
+ width, height, native_handle, *j_surface_texture_helper_,
+ rtc::Bind(&SurfaceTextureHelper::ReturnTextureFrame, this));
+}
+
+} // namespace webrtc_jni
diff --git a/webrtc/api/java/jni/surfacetexturehelper_jni.h b/webrtc/api/java/jni/surfacetexturehelper_jni.h
new file mode 100644
index 0000000..8953b02
--- /dev/null
+++ b/webrtc/api/java/jni/surfacetexturehelper_jni.h
@@ -0,0 +1,79 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef WEBRTC_API_JAVA_JNI_SURFACETEXTUREHELPER_JNI_H_
+#define WEBRTC_API_JAVA_JNI_SURFACETEXTUREHELPER_JNI_H_
+
+#include <jni.h>
+
+#include "webrtc/api/java/jni/jni_helpers.h"
+#include "webrtc/api/java/jni/native_handle_impl.h"
+#include "webrtc/base/refcount.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/common_video/include/video_frame_buffer.h"
+
+namespace webrtc_jni {
+
+// Helper class to create and synchronize access to an Android SurfaceTexture.
+// It is used for creating webrtc::VideoFrameBuffers from a SurfaceTexture when
+// the SurfaceTexture has been updated.
+// When the VideoFrameBuffer is released, this class returns the buffer to the
+// java SurfaceTextureHelper so it can be updated safely. The VideoFrameBuffer
+// can be released on an arbitrary thread.
+// SurfaceTextureHelper is reference counted to make sure that it is not
+// destroyed while a VideoFrameBuffer is in use.
+// This class is the C++ counterpart of the java class SurfaceTextureHelper.
+// Usage:
+// 1. Create an java instance of SurfaceTextureHelper.
+// 2. Create an instance of this class.
+// 3. Register a listener to the Java SurfaceListener and start producing
+// new buffers.
+// 4. Call CreateTextureFrame to wrap the Java texture in a VideoFrameBuffer.
+class SurfaceTextureHelper : public rtc::RefCountInterface {
+ public:
+ SurfaceTextureHelper(JNIEnv* jni, jobject surface_texture_helper);
+
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> CreateTextureFrame(
+ int width,
+ int height,
+ const NativeHandleImpl& native_handle);
+
+ protected:
+ ~SurfaceTextureHelper();
+
+ private:
+ // May be called on arbitrary thread.
+ void ReturnTextureFrame() const;
+
+ const ScopedGlobalRef<jobject> j_surface_texture_helper_;
+ const jmethodID j_return_texture_method_;
+};
+
+} // namespace webrtc_jni
+
+#endif // WEBRTC_API_JAVA_JNI_SURFACETEXTUREHELPER_JNI_H_
diff --git a/webrtc/api/java/src/org/webrtc/AudioSource.java b/webrtc/api/java/src/org/webrtc/AudioSource.java
new file mode 100644
index 0000000..06177a6
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/AudioSource.java
@@ -0,0 +1,38 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+/**
+ * Java wrapper for a C++ AudioSourceInterface. Used as the source for one or
+ * more {@code AudioTrack} objects.
+ */
+public class AudioSource extends MediaSource {
+ public AudioSource(long nativeSource) {
+ super(nativeSource);
+ }
+}
diff --git a/webrtc/api/java/src/org/webrtc/AudioTrack.java b/webrtc/api/java/src/org/webrtc/AudioTrack.java
new file mode 100644
index 0000000..3200080
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/AudioTrack.java
@@ -0,0 +1,35 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ AudioTrackInterface */
+public class AudioTrack extends MediaStreamTrack {
+ public AudioTrack(long nativeTrack) {
+ super(nativeTrack);
+ }
+}
diff --git a/webrtc/api/java/src/org/webrtc/CallSessionFileRotatingLogSink.java b/webrtc/api/java/src/org/webrtc/CallSessionFileRotatingLogSink.java
new file mode 100644
index 0000000..f7032a7
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/CallSessionFileRotatingLogSink.java
@@ -0,0 +1,57 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+public class CallSessionFileRotatingLogSink {
+ static {
+ System.loadLibrary("jingle_peerconnection_so");
+ }
+
+ private long nativeSink;
+
+ public static byte[] getLogData(String dirPath) {
+ return nativeGetLogData(dirPath);
+ }
+
+ public CallSessionFileRotatingLogSink(
+ String dirPath, int maxFileSize, Logging.Severity severity) {
+ nativeSink = nativeAddSink(dirPath, maxFileSize, severity.ordinal());
+ }
+
+ public void dispose() {
+ if (nativeSink != 0) {
+ nativeDeleteSink(nativeSink);
+ nativeSink = 0;
+ }
+ }
+
+ private static native long nativeAddSink(
+ String dirPath, int maxFileSize, int severity);
+ private static native void nativeDeleteSink(long nativeSink);
+ private static native byte[] nativeGetLogData(String dirPath);
+}
diff --git a/webrtc/api/java/src/org/webrtc/DataChannel.java b/webrtc/api/java/src/org/webrtc/DataChannel.java
new file mode 100644
index 0000000..1866098
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/DataChannel.java
@@ -0,0 +1,143 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import java.nio.ByteBuffer;
+
+/** Java wrapper for a C++ DataChannelInterface. */
+public class DataChannel {
+ /** Java wrapper for WebIDL RTCDataChannel. */
+ public static class Init {
+ public boolean ordered = true;
+ // Optional unsigned short in WebIDL, -1 means unspecified.
+ public int maxRetransmitTimeMs = -1;
+ // Optional unsigned short in WebIDL, -1 means unspecified.
+ public int maxRetransmits = -1;
+ public String protocol = "";
+ public boolean negotiated = false;
+ // Optional unsigned short in WebIDL, -1 means unspecified.
+ public int id = -1;
+
+ public Init() {}
+
+ // Called only by native code.
+ private Init(
+ boolean ordered, int maxRetransmitTimeMs, int maxRetransmits,
+ String protocol, boolean negotiated, int id) {
+ this.ordered = ordered;
+ this.maxRetransmitTimeMs = maxRetransmitTimeMs;
+ this.maxRetransmits = maxRetransmits;
+ this.protocol = protocol;
+ this.negotiated = negotiated;
+ this.id = id;
+ }
+ }
+
+ /** Java version of C++ DataBuffer. The atom of data in a DataChannel. */
+ public static class Buffer {
+ /** The underlying data. */
+ public final ByteBuffer data;
+
+ /**
+ * Indicates whether |data| contains UTF-8 text or "binary data"
+ * (i.e. anything else).
+ */
+ public final boolean binary;
+
+ public Buffer(ByteBuffer data, boolean binary) {
+ this.data = data;
+ this.binary = binary;
+ }
+ }
+
+ /** Java version of C++ DataChannelObserver. */
+ public interface Observer {
+ /** The data channel's bufferedAmount has changed. */
+ public void onBufferedAmountChange(long previousAmount);
+ /** The data channel state has changed. */
+ public void onStateChange();
+ /**
+ * A data buffer was successfully received. NOTE: |buffer.data| will be
+ * freed once this function returns so callers who want to use the data
+ * asynchronously must make sure to copy it first.
+ */
+ public void onMessage(Buffer buffer);
+ }
+
+ /** Keep in sync with DataChannelInterface::DataState. */
+ public enum State { CONNECTING, OPEN, CLOSING, CLOSED };
+
+ private final long nativeDataChannel;
+ private long nativeObserver;
+
+ public DataChannel(long nativeDataChannel) {
+ this.nativeDataChannel = nativeDataChannel;
+ }
+
+ /** Register |observer|, replacing any previously-registered observer. */
+ public void registerObserver(Observer observer) {
+ if (nativeObserver != 0) {
+ unregisterObserverNative(nativeObserver);
+ }
+ nativeObserver = registerObserverNative(observer);
+ }
+ private native long registerObserverNative(Observer observer);
+
+ /** Unregister the (only) observer. */
+ public void unregisterObserver() {
+ unregisterObserverNative(nativeObserver);
+ }
+ private native void unregisterObserverNative(long nativeObserver);
+
+ public native String label();
+
+ public native State state();
+
+ /**
+ * Return the number of bytes of application data (UTF-8 text and binary data)
+ * that have been queued using SendBuffer but have not yet been transmitted
+ * to the network.
+ */
+ public native long bufferedAmount();
+
+ /** Close the channel. */
+ public native void close();
+
+ /** Send |data| to the remote peer; return success. */
+ public boolean send(Buffer buffer) {
+ // TODO(fischman): this could be cleverer about avoiding copies if the
+ // ByteBuffer is direct and/or is backed by an array.
+ byte[] data = new byte[buffer.data.remaining()];
+ buffer.data.get(data);
+ return sendNative(data, buffer.binary);
+ }
+ private native boolean sendNative(byte[] data, boolean binary);
+
+ /** Dispose of native resources attached to this channel. */
+ public native void dispose();
+};
diff --git a/webrtc/api/java/src/org/webrtc/IceCandidate.java b/webrtc/api/java/src/org/webrtc/IceCandidate.java
new file mode 100644
index 0000000..eb42ce4
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/IceCandidate.java
@@ -0,0 +1,48 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+/**
+ * Representation of a single ICE Candidate, mirroring
+ * {@code IceCandidateInterface} in the C++ API.
+ */
+public class IceCandidate {
+ public final String sdpMid;
+ public final int sdpMLineIndex;
+ public final String sdp;
+
+ public IceCandidate(String sdpMid, int sdpMLineIndex, String sdp) {
+ this.sdpMid = sdpMid;
+ this.sdpMLineIndex = sdpMLineIndex;
+ this.sdp = sdp;
+ }
+
+ public String toString() {
+ return sdpMid + ":" + sdpMLineIndex + ":" + sdp;
+ }
+}
diff --git a/webrtc/api/java/src/org/webrtc/MediaCodecVideoDecoder.java b/webrtc/api/java/src/org/webrtc/MediaCodecVideoDecoder.java
new file mode 100644
index 0000000..1288d41
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/MediaCodecVideoDecoder.java
@@ -0,0 +1,701 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecInfo.CodecCapabilities;
+import android.media.MediaCodecList;
+import android.media.MediaFormat;
+import android.os.Build;
+import android.os.SystemClock;
+import android.view.Surface;
+
+import org.webrtc.Logging;
+
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.CountDownLatch;
+import java.util.Queue;
+import java.util.concurrent.TimeUnit;
+
+// Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder.
+// This class is an implementation detail of the Java PeerConnection API.
+@SuppressWarnings("deprecation")
+public class MediaCodecVideoDecoder {
+ // This class is constructed, operated, and destroyed by its C++ incarnation,
+ // so the class and its methods have non-public visibility. The API this
+ // class exposes aims to mimic the webrtc::VideoDecoder API as closely as
+ // possibly to minimize the amount of translation work necessary.
+
+ private static final String TAG = "MediaCodecVideoDecoder";
+
+ // Tracks webrtc::VideoCodecType.
+ public enum VideoCodecType {
+ VIDEO_CODEC_VP8,
+ VIDEO_CODEC_VP9,
+ VIDEO_CODEC_H264
+ }
+
+ // Timeout for input buffer dequeue.
+ private static final int DEQUEUE_INPUT_TIMEOUT = 500000;
+ // Timeout for codec releasing.
+ private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000;
+ // Max number of output buffers queued before starting to drop decoded frames.
+ private static final int MAX_QUEUED_OUTPUTBUFFERS = 3;
+ // Active running decoder instance. Set in initDecode() (called from native code)
+ // and reset to null in release() call.
+ private static MediaCodecVideoDecoder runningInstance = null;
+ private static MediaCodecVideoDecoderErrorCallback errorCallback = null;
+ private static int codecErrors = 0;
+ // List of disabled codec types - can be set from application.
+ private static Set<String> hwDecoderDisabledTypes = new HashSet<String>();
+
+ private Thread mediaCodecThread;
+ private MediaCodec mediaCodec;
+ private ByteBuffer[] inputBuffers;
+ private ByteBuffer[] outputBuffers;
+ private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
+ private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9";
+ private static final String H264_MIME_TYPE = "video/avc";
+ // List of supported HW VP8 decoders.
+ private static final String[] supportedVp8HwCodecPrefixes =
+ {"OMX.qcom.", "OMX.Nvidia.", "OMX.Exynos.", "OMX.Intel." };
+ // List of supported HW VP9 decoders.
+ private static final String[] supportedVp9HwCodecPrefixes =
+ {"OMX.qcom.", "OMX.Exynos." };
+ // List of supported HW H.264 decoders.
+ private static final String[] supportedH264HwCodecPrefixes =
+ {"OMX.qcom.", "OMX.Intel." };
+ // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
+ // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
+ private static final int
+ COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
+ // Allowable color formats supported by codec - in order of preference.
+ private static final List<Integer> supportedColorList = Arrays.asList(
+ CodecCapabilities.COLOR_FormatYUV420Planar,
+ CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
+ CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
+ COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m);
+ private int colorFormat;
+ private int width;
+ private int height;
+ private int stride;
+ private int sliceHeight;
+ private boolean hasDecodedFirstFrame;
+ private final Queue<TimeStamps> decodeStartTimeMs = new LinkedList<TimeStamps>();
+ private boolean useSurface;
+
+ // The below variables are only used when decoding to a Surface.
+ private TextureListener textureListener;
+ private int droppedFrames;
+ private Surface surface = null;
+ private final Queue<DecodedOutputBuffer>
+ dequeuedSurfaceOutputBuffers = new LinkedList<DecodedOutputBuffer>();
+
+ // MediaCodec error handler - invoked when critical error happens which may prevent
+ // further use of media codec API. Now it means that one of media codec instances
+ // is hanging and can no longer be used in the next call.
+ public static interface MediaCodecVideoDecoderErrorCallback {
+ void onMediaCodecVideoDecoderCriticalError(int codecErrors);
+ }
+
+ public static void setErrorCallback(MediaCodecVideoDecoderErrorCallback errorCallback) {
+ Logging.d(TAG, "Set error callback");
+ MediaCodecVideoDecoder.errorCallback = errorCallback;
+ }
+
+ // Functions to disable HW decoding - can be called from applications for platforms
+ // which have known HW decoding problems.
+ public static void disableVp8HwCodec() {
+ Logging.w(TAG, "VP8 decoding is disabled by application.");
+ hwDecoderDisabledTypes.add(VP8_MIME_TYPE);
+ }
+
+ public static void disableVp9HwCodec() {
+ Logging.w(TAG, "VP9 decoding is disabled by application.");
+ hwDecoderDisabledTypes.add(VP9_MIME_TYPE);
+ }
+
+ public static void disableH264HwCodec() {
+ Logging.w(TAG, "H.264 decoding is disabled by application.");
+ hwDecoderDisabledTypes.add(H264_MIME_TYPE);
+ }
+
+ // Functions to query if HW decoding is supported.
+ public static boolean isVp8HwSupported() {
+ return !hwDecoderDisabledTypes.contains(VP8_MIME_TYPE) &&
+ (findDecoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null);
+ }
+
+ public static boolean isVp9HwSupported() {
+ return !hwDecoderDisabledTypes.contains(VP9_MIME_TYPE) &&
+ (findDecoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes) != null);
+ }
+
+ public static boolean isH264HwSupported() {
+ return !hwDecoderDisabledTypes.contains(H264_MIME_TYPE) &&
+ (findDecoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null);
+ }
+
+ public static void printStackTrace() {
+ if (runningInstance != null && runningInstance.mediaCodecThread != null) {
+ StackTraceElement[] mediaCodecStackTraces = runningInstance.mediaCodecThread.getStackTrace();
+ if (mediaCodecStackTraces.length > 0) {
+ Logging.d(TAG, "MediaCodecVideoDecoder stacks trace:");
+ for (StackTraceElement stackTrace : mediaCodecStackTraces) {
+ Logging.d(TAG, stackTrace.toString());
+ }
+ }
+ }
+ }
+
+ // Helper struct for findDecoder() below.
+ private static class DecoderProperties {
+ public DecoderProperties(String codecName, int colorFormat) {
+ this.codecName = codecName;
+ this.colorFormat = colorFormat;
+ }
+ public final String codecName; // OpenMax component name for VP8 codec.
+ public final int colorFormat; // Color format supported by codec.
+ }
+
+ private static DecoderProperties findDecoder(
+ String mime, String[] supportedCodecPrefixes) {
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
+ return null; // MediaCodec.setParameters is missing.
+ }
+ Logging.d(TAG, "Trying to find HW decoder for mime " + mime);
+ for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
+ MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
+ if (info.isEncoder()) {
+ continue;
+ }
+ String name = null;
+ for (String mimeType : info.getSupportedTypes()) {
+ if (mimeType.equals(mime)) {
+ name = info.getName();
+ break;
+ }
+ }
+ if (name == null) {
+ continue; // No HW support in this codec; try the next one.
+ }
+ Logging.d(TAG, "Found candidate decoder " + name);
+
+ // Check if this is supported decoder.
+ boolean supportedCodec = false;
+ for (String codecPrefix : supportedCodecPrefixes) {
+ if (name.startsWith(codecPrefix)) {
+ supportedCodec = true;
+ break;
+ }
+ }
+ if (!supportedCodec) {
+ continue;
+ }
+
+ // Check if codec supports either yuv420 or nv12.
+ CodecCapabilities capabilities =
+ info.getCapabilitiesForType(mime);
+ for (int colorFormat : capabilities.colorFormats) {
+ Logging.v(TAG, " Color: 0x" + Integer.toHexString(colorFormat));
+ }
+ for (int supportedColorFormat : supportedColorList) {
+ for (int codecColorFormat : capabilities.colorFormats) {
+ if (codecColorFormat == supportedColorFormat) {
+ // Found supported HW decoder.
+ Logging.d(TAG, "Found target decoder " + name +
+ ". Color: 0x" + Integer.toHexString(codecColorFormat));
+ return new DecoderProperties(name, codecColorFormat);
+ }
+ }
+ }
+ }
+ Logging.d(TAG, "No HW decoder found for mime " + mime);
+ return null; // No HW decoder.
+ }
+
+ private void checkOnMediaCodecThread() throws IllegalStateException {
+ if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
+ throw new IllegalStateException(
+ "MediaCodecVideoDecoder previously operated on " + mediaCodecThread +
+ " but is now called on " + Thread.currentThread());
+ }
+ }
+
+ // Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer output.
+ private boolean initDecode(
+ VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTextureHelper) {
+ if (mediaCodecThread != null) {
+ throw new RuntimeException("Forgot to release()?");
+ }
+ useSurface = (surfaceTextureHelper != null);
+ String mime = null;
+ String[] supportedCodecPrefixes = null;
+ if (type == VideoCodecType.VIDEO_CODEC_VP8) {
+ mime = VP8_MIME_TYPE;
+ supportedCodecPrefixes = supportedVp8HwCodecPrefixes;
+ } else if (type == VideoCodecType.VIDEO_CODEC_VP9) {
+ mime = VP9_MIME_TYPE;
+ supportedCodecPrefixes = supportedVp9HwCodecPrefixes;
+ } else if (type == VideoCodecType.VIDEO_CODEC_H264) {
+ mime = H264_MIME_TYPE;
+ supportedCodecPrefixes = supportedH264HwCodecPrefixes;
+ } else {
+ throw new RuntimeException("Non supported codec " + type);
+ }
+ DecoderProperties properties = findDecoder(mime, supportedCodecPrefixes);
+ if (properties == null) {
+ throw new RuntimeException("Cannot find HW decoder for " + type);
+ }
+ Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height +
+ ". Color: 0x" + Integer.toHexString(properties.colorFormat) +
+ ". Use Surface: " + useSurface);
+ runningInstance = this; // Decoder is now running and can be queried for stack traces.
+ mediaCodecThread = Thread.currentThread();
+ try {
+ this.width = width;
+ this.height = height;
+ stride = width;
+ sliceHeight = height;
+
+ if (useSurface) {
+ textureListener = new TextureListener(surfaceTextureHelper);
+ surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
+ }
+
+ MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
+ if (!useSurface) {
+ format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
+ }
+ Logging.d(TAG, " Format: " + format);
+ mediaCodec =
+ MediaCodecVideoEncoder.createByCodecName(properties.codecName);
+ if (mediaCodec == null) {
+ Logging.e(TAG, "Can not create media decoder");
+ return false;
+ }
+ mediaCodec.configure(format, surface, null, 0);
+ mediaCodec.start();
+ colorFormat = properties.colorFormat;
+ outputBuffers = mediaCodec.getOutputBuffers();
+ inputBuffers = mediaCodec.getInputBuffers();
+ decodeStartTimeMs.clear();
+ hasDecodedFirstFrame = false;
+ dequeuedSurfaceOutputBuffers.clear();
+ droppedFrames = 0;
+ Logging.d(TAG, "Input buffers: " + inputBuffers.length +
+ ". Output buffers: " + outputBuffers.length);
+ return true;
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "initDecode failed", e);
+ return false;
+ }
+ }
+
+ private void release() {
+ Logging.d(TAG, "Java releaseDecoder. Total number of dropped frames: " + droppedFrames);
+ checkOnMediaCodecThread();
+
+ // Run Mediacodec stop() and release() on separate thread since sometime
+ // Mediacodec.stop() may hang.
+ final CountDownLatch releaseDone = new CountDownLatch(1);
+
+ Runnable runMediaCodecRelease = new Runnable() {
+ @Override
+ public void run() {
+ try {
+ Logging.d(TAG, "Java releaseDecoder on release thread");
+ mediaCodec.stop();
+ mediaCodec.release();
+ Logging.d(TAG, "Java releaseDecoder on release thread done");
+ } catch (Exception e) {
+ Logging.e(TAG, "Media decoder release failed", e);
+ }
+ releaseDone.countDown();
+ }
+ };
+ new Thread(runMediaCodecRelease).start();
+
+ if (!ThreadUtils.awaitUninterruptibly(releaseDone, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
+ Logging.e(TAG, "Media decoder release timeout");
+ codecErrors++;
+ if (errorCallback != null) {
+ Logging.e(TAG, "Invoke codec error callback. Errors: " + codecErrors);
+ errorCallback.onMediaCodecVideoDecoderCriticalError(codecErrors);
+ }
+ }
+
+ mediaCodec = null;
+ mediaCodecThread = null;
+ runningInstance = null;
+ if (useSurface) {
+ surface.release();
+ surface = null;
+ textureListener.release();
+ }
+ Logging.d(TAG, "Java releaseDecoder done");
+ }
+
+ // Dequeue an input buffer and return its index, -1 if no input buffer is
+ // available, or -2 if the codec is no longer operative.
+ private int dequeueInputBuffer() {
+ checkOnMediaCodecThread();
+ try {
+ return mediaCodec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "dequeueIntputBuffer failed", e);
+ return -2;
+ }
+ }
+
+ private boolean queueInputBuffer(int inputBufferIndex, int size, long presentationTimeStamUs,
+ long timeStampMs, long ntpTimeStamp) {
+ checkOnMediaCodecThread();
+ try {
+ inputBuffers[inputBufferIndex].position(0);
+ inputBuffers[inputBufferIndex].limit(size);
+ decodeStartTimeMs.add(new TimeStamps(SystemClock.elapsedRealtime(), timeStampMs,
+ ntpTimeStamp));
+ mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, presentationTimeStamUs, 0);
+ return true;
+ }
+ catch (IllegalStateException e) {
+ Logging.e(TAG, "decode failed", e);
+ return false;
+ }
+ }
+
+ private static class TimeStamps {
+ public TimeStamps(long decodeStartTimeMs, long timeStampMs, long ntpTimeStampMs) {
+ this.decodeStartTimeMs = decodeStartTimeMs;
+ this.timeStampMs = timeStampMs;
+ this.ntpTimeStampMs = ntpTimeStampMs;
+ }
+ // Time when this frame was queued for decoding.
+ private final long decodeStartTimeMs;
+ // Only used for bookkeeping in Java. Stores C++ inputImage._timeStamp value for input frame.
+ private final long timeStampMs;
+ // Only used for bookkeeping in Java. Stores C++ inputImage.ntp_time_ms_ value for input frame.
+ private final long ntpTimeStampMs;
+ }
+
+ // Helper struct for dequeueOutputBuffer() below.
+ private static class DecodedOutputBuffer {
+ public DecodedOutputBuffer(int index, int offset, int size, long presentationTimeStampMs,
+ long timeStampMs, long ntpTimeStampMs, long decodeTime, long endDecodeTime) {
+ this.index = index;
+ this.offset = offset;
+ this.size = size;
+ this.presentationTimeStampMs = presentationTimeStampMs;
+ this.timeStampMs = timeStampMs;
+ this.ntpTimeStampMs = ntpTimeStampMs;
+ this.decodeTimeMs = decodeTime;
+ this.endDecodeTimeMs = endDecodeTime;
+ }
+
+ private final int index;
+ private final int offset;
+ private final int size;
+ // Presentation timestamp returned in dequeueOutputBuffer call.
+ private final long presentationTimeStampMs;
+ // C++ inputImage._timeStamp value for output frame.
+ private final long timeStampMs;
+ // C++ inputImage.ntp_time_ms_ value for output frame.
+ private final long ntpTimeStampMs;
+ // Number of ms it took to decode this frame.
+ private final long decodeTimeMs;
+ // System time when this frame decoding finished.
+ private final long endDecodeTimeMs;
+ }
+
+ // Helper struct for dequeueTextureBuffer() below.
+ private static class DecodedTextureBuffer {
+ private final int textureID;
+ private final float[] transformMatrix;
+ // Presentation timestamp returned in dequeueOutputBuffer call.
+ private final long presentationTimeStampMs;
+ // C++ inputImage._timeStamp value for output frame.
+ private final long timeStampMs;
+ // C++ inputImage.ntp_time_ms_ value for output frame.
+ private final long ntpTimeStampMs;
+ // Number of ms it took to decode this frame.
+ private final long decodeTimeMs;
+ // Interval from when the frame finished decoding until this buffer has been created.
+ // Since there is only one texture, this interval depend on the time from when
+ // a frame is decoded and provided to C++ and until that frame is returned to the MediaCodec
+ // so that the texture can be updated with the next decoded frame.
+ private final long frameDelayMs;
+
+ // A DecodedTextureBuffer with zero |textureID| has special meaning and represents a frame
+ // that was dropped.
+ public DecodedTextureBuffer(int textureID, float[] transformMatrix,
+ long presentationTimeStampMs, long timeStampMs, long ntpTimeStampMs, long decodeTimeMs,
+ long frameDelay) {
+ this.textureID = textureID;
+ this.transformMatrix = transformMatrix;
+ this.presentationTimeStampMs = presentationTimeStampMs;
+ this.timeStampMs = timeStampMs;
+ this.ntpTimeStampMs = ntpTimeStampMs;
+ this.decodeTimeMs = decodeTimeMs;
+ this.frameDelayMs = frameDelay;
+ }
+ }
+
+ // Poll based texture listener.
+ private static class TextureListener
+ implements SurfaceTextureHelper.OnTextureFrameAvailableListener {
+ private final SurfaceTextureHelper surfaceTextureHelper;
+ // |newFrameLock| is used to synchronize arrival of new frames with wait()/notifyAll().
+ private final Object newFrameLock = new Object();
+ // |bufferToRender| is non-null when waiting for transition between addBufferToRender() to
+ // onTextureFrameAvailable().
+ private DecodedOutputBuffer bufferToRender;
+ private DecodedTextureBuffer renderedBuffer;
+
+ public TextureListener(SurfaceTextureHelper surfaceTextureHelper) {
+ this.surfaceTextureHelper = surfaceTextureHelper;
+ surfaceTextureHelper.setListener(this);
+ }
+
+ public void addBufferToRender(DecodedOutputBuffer buffer) {
+ if (bufferToRender != null) {
+ Logging.e(TAG,
+ "Unexpected addBufferToRender() called while waiting for a texture.");
+ throw new IllegalStateException("Waiting for a texture.");
+ }
+ bufferToRender = buffer;
+ }
+
+ public boolean isWaitingForTexture() {
+ synchronized (newFrameLock) {
+ return bufferToRender != null;
+ }
+ }
+
+ // Callback from |surfaceTextureHelper|. May be called on an arbitrary thread.
+ @Override
+ public void onTextureFrameAvailable(
+ int oesTextureId, float[] transformMatrix, long timestampNs) {
+ synchronized (newFrameLock) {
+ if (renderedBuffer != null) {
+ Logging.e(TAG,
+ "Unexpected onTextureFrameAvailable() called while already holding a texture.");
+ throw new IllegalStateException("Already holding a texture.");
+ }
+ // |timestampNs| is always zero on some Android versions.
+ renderedBuffer = new DecodedTextureBuffer(oesTextureId, transformMatrix,
+ bufferToRender.presentationTimeStampMs, bufferToRender.timeStampMs,
+ bufferToRender.ntpTimeStampMs, bufferToRender.decodeTimeMs,
+ SystemClock.elapsedRealtime() - bufferToRender.endDecodeTimeMs);
+ bufferToRender = null;
+ newFrameLock.notifyAll();
+ }
+ }
+
+ // Dequeues and returns a DecodedTextureBuffer if available, or null otherwise.
+ public DecodedTextureBuffer dequeueTextureBuffer(int timeoutMs) {
+ synchronized (newFrameLock) {
+ if (renderedBuffer == null && timeoutMs > 0 && isWaitingForTexture()) {
+ try {
+ newFrameLock.wait(timeoutMs);
+ } catch(InterruptedException e) {
+ // Restore the interrupted status by reinterrupting the thread.
+ Thread.currentThread().interrupt();
+ }
+ }
+ DecodedTextureBuffer returnedBuffer = renderedBuffer;
+ renderedBuffer = null;
+ return returnedBuffer;
+ }
+ }
+
+ public void release() {
+ // SurfaceTextureHelper.disconnect() will block until any onTextureFrameAvailable() in
+ // progress is done. Therefore, the call to disconnect() must be outside any synchronized
+ // statement that is also used in the onTextureFrameAvailable() above to avoid deadlocks.
+ surfaceTextureHelper.disconnect();
+ synchronized (newFrameLock) {
+ if (renderedBuffer != null) {
+ surfaceTextureHelper.returnTextureFrame();
+ renderedBuffer = null;
+ }
+ }
+ }
+ }
+
+ // Returns null if no decoded buffer is available, and otherwise a DecodedByteBuffer.
+ // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
+ // unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException
+ // upon codec error.
+ private DecodedOutputBuffer dequeueOutputBuffer(int dequeueTimeoutMs) {
+ checkOnMediaCodecThread();
+ if (decodeStartTimeMs.isEmpty()) {
+ return null;
+ }
+ // Drain the decoder until receiving a decoded buffer or hitting
+ // MediaCodec.INFO_TRY_AGAIN_LATER.
+ final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
+ while (true) {
+ final int result = mediaCodec.dequeueOutputBuffer(
+ info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
+ switch (result) {
+ case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
+ outputBuffers = mediaCodec.getOutputBuffers();
+ Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.length);
+ if (hasDecodedFirstFrame) {
+ throw new RuntimeException("Unexpected output buffer change event.");
+ }
+ break;
+ case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
+ MediaFormat format = mediaCodec.getOutputFormat();
+ Logging.d(TAG, "Decoder format changed: " + format.toString());
+ int new_width = format.getInteger(MediaFormat.KEY_WIDTH);
+ int new_height = format.getInteger(MediaFormat.KEY_HEIGHT);
+ if (hasDecodedFirstFrame && (new_width != width || new_height != height)) {
+ throw new RuntimeException("Unexpected size change. Configured " + width + "*" +
+ height + ". New " + new_width + "*" + new_height);
+ }
+ width = format.getInteger(MediaFormat.KEY_WIDTH);
+ height = format.getInteger(MediaFormat.KEY_HEIGHT);
+
+ if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
+ colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
+ Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
+ if (!supportedColorList.contains(colorFormat)) {
+ throw new IllegalStateException("Non supported color format: " + colorFormat);
+ }
+ }
+ if (format.containsKey("stride")) {
+ stride = format.getInteger("stride");
+ }
+ if (format.containsKey("slice-height")) {
+ sliceHeight = format.getInteger("slice-height");
+ }
+ Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sliceHeight);
+ stride = Math.max(width, stride);
+ sliceHeight = Math.max(height, sliceHeight);
+ break;
+ case MediaCodec.INFO_TRY_AGAIN_LATER:
+ return null;
+ default:
+ hasDecodedFirstFrame = true;
+ TimeStamps timeStamps = decodeStartTimeMs.remove();
+ return new DecodedOutputBuffer(result,
+ info.offset,
+ info.size,
+ TimeUnit.MICROSECONDS.toMillis(info.presentationTimeUs),
+ timeStamps.timeStampMs,
+ timeStamps.ntpTimeStampMs,
+ SystemClock.elapsedRealtime() - timeStamps.decodeStartTimeMs,
+ SystemClock.elapsedRealtime());
+ }
+ }
+ }
+
+ // Returns null if no decoded buffer is available, and otherwise a DecodedTextureBuffer.
+ // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
+ // unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException
+ // upon codec error. If |dequeueTimeoutMs| > 0, the oldest decoded frame will be dropped if
+ // a frame can't be returned.
+ private DecodedTextureBuffer dequeueTextureBuffer(int dequeueTimeoutMs) {
+ checkOnMediaCodecThread();
+ if (!useSurface) {
+ throw new IllegalStateException("dequeueTexture() called for byte buffer decoding.");
+ }
+ DecodedOutputBuffer outputBuffer = dequeueOutputBuffer(dequeueTimeoutMs);
+ if (outputBuffer != null) {
+ dequeuedSurfaceOutputBuffers.add(outputBuffer);
+ }
+
+ MaybeRenderDecodedTextureBuffer();
+ // Check if there is texture ready now by waiting max |dequeueTimeoutMs|.
+ DecodedTextureBuffer renderedBuffer = textureListener.dequeueTextureBuffer(dequeueTimeoutMs);
+ if (renderedBuffer != null) {
+ MaybeRenderDecodedTextureBuffer();
+ return renderedBuffer;
+ }
+
+ if ((dequeuedSurfaceOutputBuffers.size()
+ >= Math.min(MAX_QUEUED_OUTPUTBUFFERS, outputBuffers.length)
+ || (dequeueTimeoutMs > 0 && !dequeuedSurfaceOutputBuffers.isEmpty()))) {
+ ++droppedFrames;
+ // Drop the oldest frame still in dequeuedSurfaceOutputBuffers.
+ // The oldest frame is owned by |textureListener| and can't be dropped since
+ // mediaCodec.releaseOutputBuffer has already been called.
+ final DecodedOutputBuffer droppedFrame = dequeuedSurfaceOutputBuffers.remove();
+ if (dequeueTimeoutMs > 0) {
+ // TODO(perkj): Re-add the below log when VideoRenderGUI has been removed or fixed to
+ // return the one and only texture even if it does not render.
+ Logging.w(TAG, "Draining decoder. Dropping frame with TS: "
+ + droppedFrame.presentationTimeStampMs +
+ ". Total number of dropped frames: " + droppedFrames);
+ } else {
+ Logging.w(TAG, "Too many output buffers " + dequeuedSurfaceOutputBuffers.size() +
+ ". Dropping frame with TS: " + droppedFrame.presentationTimeStampMs +
+ ". Total number of dropped frames: " + droppedFrames);
+ }
+
+ mediaCodec.releaseOutputBuffer(droppedFrame.index, false /* render */);
+ return new DecodedTextureBuffer(0, null,
+ droppedFrame.presentationTimeStampMs, droppedFrame.timeStampMs,
+ droppedFrame.ntpTimeStampMs, droppedFrame.decodeTimeMs,
+ SystemClock.elapsedRealtime() - droppedFrame.endDecodeTimeMs);
+ }
+ return null;
+ }
+
+ private void MaybeRenderDecodedTextureBuffer() {
+ if (dequeuedSurfaceOutputBuffers.isEmpty() || textureListener.isWaitingForTexture()) {
+ return;
+ }
+ // Get the first frame in the queue and render to the decoder output surface.
+ final DecodedOutputBuffer buffer = dequeuedSurfaceOutputBuffers.remove();
+ textureListener.addBufferToRender(buffer);
+ mediaCodec.releaseOutputBuffer(buffer.index, true /* render */);
+ }
+
+ // Release a dequeued output byte buffer back to the codec for re-use. Should only be called for
+ // non-surface decoding.
+ // Throws IllegalStateException if the call is made on the wrong thread, if codec is configured
+ // for surface decoding, or if |mediaCodec| is not in the Executing state. Throws
+ // MediaCodec.CodecException upon codec error.
+ private void returnDecodedOutputBuffer(int index)
+ throws IllegalStateException, MediaCodec.CodecException {
+ checkOnMediaCodecThread();
+ if (useSurface) {
+ throw new IllegalStateException("returnDecodedOutputBuffer() called for surface decoding.");
+ }
+ mediaCodec.releaseOutputBuffer(index, false /* render */);
+ }
+}
diff --git a/webrtc/api/java/src/org/webrtc/MediaCodecVideoEncoder.java b/webrtc/api/java/src/org/webrtc/MediaCodecVideoEncoder.java
new file mode 100644
index 0000000..8b8ee71
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/MediaCodecVideoEncoder.java
@@ -0,0 +1,602 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.annotation.TargetApi;
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo.CodecCapabilities;
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecList;
+import android.media.MediaFormat;
+import android.opengl.GLES20;
+import android.os.Build;
+import android.os.Bundle;
+import android.view.Surface;
+
+import org.webrtc.Logging;
+
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
+// Java-side of peerconnection_jni.cc:MediaCodecVideoEncoder.
+// This class is an implementation detail of the Java PeerConnection API.
+@TargetApi(19)
+@SuppressWarnings("deprecation")
+public class MediaCodecVideoEncoder {
+ // This class is constructed, operated, and destroyed by its C++ incarnation,
+ // so the class and its methods have non-public visibility. The API this
+ // class exposes aims to mimic the webrtc::VideoEncoder API as closely as
+ // possibly to minimize the amount of translation work necessary.
+
+ private static final String TAG = "MediaCodecVideoEncoder";
+
+ // Tracks webrtc::VideoCodecType.
+ public enum VideoCodecType {
+ VIDEO_CODEC_VP8,
+ VIDEO_CODEC_VP9,
+ VIDEO_CODEC_H264
+ }
+
+ private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; // Timeout for codec releasing.
+ private static final int DEQUEUE_TIMEOUT = 0; // Non-blocking, no wait.
+ // Active running encoder instance. Set in initEncode() (called from native code)
+ // and reset to null in release() call.
+ private static MediaCodecVideoEncoder runningInstance = null;
+ private static MediaCodecVideoEncoderErrorCallback errorCallback = null;
+ private static int codecErrors = 0;
+ // List of disabled codec types - can be set from application.
+ private static Set<String> hwEncoderDisabledTypes = new HashSet<String>();
+
+ private Thread mediaCodecThread;
+ private MediaCodec mediaCodec;
+ private ByteBuffer[] outputBuffers;
+ private EglBase14 eglBase;
+ private int width;
+ private int height;
+ private Surface inputSurface;
+ private GlRectDrawer drawer;
+ private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
+ private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9";
+ private static final String H264_MIME_TYPE = "video/avc";
+ // List of supported HW VP8 codecs.
+ private static final String[] supportedVp8HwCodecPrefixes =
+ {"OMX.qcom.", "OMX.Intel." };
+ // List of supported HW VP9 decoders.
+ private static final String[] supportedVp9HwCodecPrefixes =
+ {"OMX.qcom."};
+ // List of supported HW H.264 codecs.
+ private static final String[] supportedH264HwCodecPrefixes =
+ {"OMX.qcom." };
+ // List of devices with poor H.264 encoder quality.
+ private static final String[] H264_HW_EXCEPTION_MODELS = new String[] {
+ // HW H.264 encoder on below devices has poor bitrate control - actual
+ // bitrates deviates a lot from the target value.
+ "SAMSUNG-SGH-I337",
+ "Nexus 7",
+ "Nexus 4"
+ };
+
+ // Bitrate modes - should be in sync with OMX_VIDEO_CONTROLRATETYPE defined
+ // in OMX_Video.h
+ private static final int VIDEO_ControlRateConstant = 2;
+ // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
+ // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
+ private static final int
+ COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
+ // Allowable color formats supported by codec - in order of preference.
+ private static final int[] supportedColorList = {
+ CodecCapabilities.COLOR_FormatYUV420Planar,
+ CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
+ CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
+ COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m
+ };
+ private static final int[] supportedSurfaceColorList = {
+ CodecCapabilities.COLOR_FormatSurface
+ };
+ private VideoCodecType type;
+ private int colorFormat; // Used by native code.
+
+ // SPS and PPS NALs (Config frame) for H.264.
+ private ByteBuffer configData = null;
+
+ // MediaCodec error handler - invoked when critical error happens which may prevent
+ // further use of media codec API. Now it means that one of media codec instances
+ // is hanging and can no longer be used in the next call.
+ public static interface MediaCodecVideoEncoderErrorCallback {
+ void onMediaCodecVideoEncoderCriticalError(int codecErrors);
+ }
+
+ public static void setErrorCallback(MediaCodecVideoEncoderErrorCallback errorCallback) {
+ Logging.d(TAG, "Set error callback");
+ MediaCodecVideoEncoder.errorCallback = errorCallback;
+ }
+
+ // Functions to disable HW encoding - can be called from applications for platforms
+ // which have known HW decoding problems.
+ public static void disableVp8HwCodec() {
+ Logging.w(TAG, "VP8 encoding is disabled by application.");
+ hwEncoderDisabledTypes.add(VP8_MIME_TYPE);
+ }
+
+ public static void disableVp9HwCodec() {
+ Logging.w(TAG, "VP9 encoding is disabled by application.");
+ hwEncoderDisabledTypes.add(VP9_MIME_TYPE);
+ }
+
+ public static void disableH264HwCodec() {
+ Logging.w(TAG, "H.264 encoding is disabled by application.");
+ hwEncoderDisabledTypes.add(H264_MIME_TYPE);
+ }
+
+ // Functions to query if HW encoding is supported.
+ public static boolean isVp8HwSupported() {
+ return !hwEncoderDisabledTypes.contains(VP8_MIME_TYPE) &&
+ (findHwEncoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes, supportedColorList) != null);
+ }
+
+ public static boolean isVp9HwSupported() {
+ return !hwEncoderDisabledTypes.contains(VP9_MIME_TYPE) &&
+ (findHwEncoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes, supportedColorList) != null);
+ }
+
+ public static boolean isH264HwSupported() {
+ return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE) &&
+ (findHwEncoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes, supportedColorList) != null);
+ }
+
+ public static boolean isVp8HwSupportedUsingTextures() {
+ return !hwEncoderDisabledTypes.contains(VP8_MIME_TYPE) && (findHwEncoder(
+ VP8_MIME_TYPE, supportedVp8HwCodecPrefixes, supportedSurfaceColorList) != null);
+ }
+
+ public static boolean isVp9HwSupportedUsingTextures() {
+ return !hwEncoderDisabledTypes.contains(VP9_MIME_TYPE) && (findHwEncoder(
+ VP9_MIME_TYPE, supportedVp9HwCodecPrefixes, supportedSurfaceColorList) != null);
+ }
+
+ public static boolean isH264HwSupportedUsingTextures() {
+ return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE) && (findHwEncoder(
+ H264_MIME_TYPE, supportedH264HwCodecPrefixes, supportedSurfaceColorList) != null);
+ }
+
+ // Helper struct for findHwEncoder() below.
+ private static class EncoderProperties {
+ public EncoderProperties(String codecName, int colorFormat) {
+ this.codecName = codecName;
+ this.colorFormat = colorFormat;
+ }
+ public final String codecName; // OpenMax component name for HW codec.
+ public final int colorFormat; // Color format supported by codec.
+ }
+
+ private static EncoderProperties findHwEncoder(
+ String mime, String[] supportedHwCodecPrefixes, int[] colorList) {
+ // MediaCodec.setParameters is missing for JB and below, so bitrate
+ // can not be adjusted dynamically.
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
+ return null;
+ }
+
+ // Check if device is in H.264 exception list.
+ if (mime.equals(H264_MIME_TYPE)) {
+ List<String> exceptionModels = Arrays.asList(H264_HW_EXCEPTION_MODELS);
+ if (exceptionModels.contains(Build.MODEL)) {
+ Logging.w(TAG, "Model: " + Build.MODEL + " has black listed H.264 encoder.");
+ return null;
+ }
+ }
+
+ for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
+ MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
+ if (!info.isEncoder()) {
+ continue;
+ }
+ String name = null;
+ for (String mimeType : info.getSupportedTypes()) {
+ if (mimeType.equals(mime)) {
+ name = info.getName();
+ break;
+ }
+ }
+ if (name == null) {
+ continue; // No HW support in this codec; try the next one.
+ }
+ Logging.v(TAG, "Found candidate encoder " + name);
+
+ // Check if this is supported HW encoder.
+ boolean supportedCodec = false;
+ for (String hwCodecPrefix : supportedHwCodecPrefixes) {
+ if (name.startsWith(hwCodecPrefix)) {
+ supportedCodec = true;
+ break;
+ }
+ }
+ if (!supportedCodec) {
+ continue;
+ }
+
+ CodecCapabilities capabilities = info.getCapabilitiesForType(mime);
+ for (int colorFormat : capabilities.colorFormats) {
+ Logging.v(TAG, " Color: 0x" + Integer.toHexString(colorFormat));
+ }
+
+ for (int supportedColorFormat : colorList) {
+ for (int codecColorFormat : capabilities.colorFormats) {
+ if (codecColorFormat == supportedColorFormat) {
+ // Found supported HW encoder.
+ Logging.d(TAG, "Found target encoder for mime " + mime + " : " + name +
+ ". Color: 0x" + Integer.toHexString(codecColorFormat));
+ return new EncoderProperties(name, codecColorFormat);
+ }
+ }
+ }
+ }
+ return null; // No HW encoder.
+ }
+
+ private void checkOnMediaCodecThread() {
+ if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
+ throw new RuntimeException(
+ "MediaCodecVideoEncoder previously operated on " + mediaCodecThread +
+ " but is now called on " + Thread.currentThread());
+ }
+ }
+
+ public static void printStackTrace() {
+ if (runningInstance != null && runningInstance.mediaCodecThread != null) {
+ StackTraceElement[] mediaCodecStackTraces = runningInstance.mediaCodecThread.getStackTrace();
+ if (mediaCodecStackTraces.length > 0) {
+ Logging.d(TAG, "MediaCodecVideoEncoder stacks trace:");
+ for (StackTraceElement stackTrace : mediaCodecStackTraces) {
+ Logging.d(TAG, stackTrace.toString());
+ }
+ }
+ }
+ }
+
+ static MediaCodec createByCodecName(String codecName) {
+ try {
+ // In the L-SDK this call can throw IOException so in order to work in
+ // both cases catch an exception.
+ return MediaCodec.createByCodecName(codecName);
+ } catch (Exception e) {
+ return null;
+ }
+ }
+
+ boolean initEncode(VideoCodecType type, int width, int height, int kbps, int fps,
+ EglBase14.Context sharedContext) {
+ final boolean useSurface = sharedContext != null;
+ Logging.d(TAG, "Java initEncode: " + type + " : " + width + " x " + height +
+ ". @ " + kbps + " kbps. Fps: " + fps + ". Encode from texture : " + useSurface);
+
+ this.width = width;
+ this.height = height;
+ if (mediaCodecThread != null) {
+ throw new RuntimeException("Forgot to release()?");
+ }
+ EncoderProperties properties = null;
+ String mime = null;
+ int keyFrameIntervalSec = 0;
+ if (type == VideoCodecType.VIDEO_CODEC_VP8) {
+ mime = VP8_MIME_TYPE;
+ properties = findHwEncoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes,
+ useSurface ? supportedSurfaceColorList : supportedColorList);
+ keyFrameIntervalSec = 100;
+ } else if (type == VideoCodecType.VIDEO_CODEC_VP9) {
+ mime = VP9_MIME_TYPE;
+ properties = findHwEncoder(VP9_MIME_TYPE, supportedH264HwCodecPrefixes,
+ useSurface ? supportedSurfaceColorList : supportedColorList);
+ keyFrameIntervalSec = 100;
+ } else if (type == VideoCodecType.VIDEO_CODEC_H264) {
+ mime = H264_MIME_TYPE;
+ properties = findHwEncoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes,
+ useSurface ? supportedSurfaceColorList : supportedColorList);
+ keyFrameIntervalSec = 20;
+ }
+ if (properties == null) {
+ throw new RuntimeException("Can not find HW encoder for " + type);
+ }
+ runningInstance = this; // Encoder is now running and can be queried for stack traces.
+ colorFormat = properties.colorFormat;
+ Logging.d(TAG, "Color format: " + colorFormat);
+
+ mediaCodecThread = Thread.currentThread();
+ try {
+ MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
+ format.setInteger(MediaFormat.KEY_BIT_RATE, 1000 * kbps);
+ format.setInteger("bitrate-mode", VIDEO_ControlRateConstant);
+ format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
+ format.setInteger(MediaFormat.KEY_FRAME_RATE, fps);
+ format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec);
+ Logging.d(TAG, " Format: " + format);
+ mediaCodec = createByCodecName(properties.codecName);
+ this.type = type;
+ if (mediaCodec == null) {
+ Logging.e(TAG, "Can not create media encoder");
+ return false;
+ }
+ mediaCodec.configure(
+ format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
+
+ if (useSurface) {
+ eglBase = new EglBase14(sharedContext, EglBase.CONFIG_RECORDABLE);
+ // Create an input surface and keep a reference since we must release the surface when done.
+ inputSurface = mediaCodec.createInputSurface();
+ eglBase.createSurface(inputSurface);
+ drawer = new GlRectDrawer();
+ }
+ mediaCodec.start();
+ outputBuffers = mediaCodec.getOutputBuffers();
+ Logging.d(TAG, "Output buffers: " + outputBuffers.length);
+
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "initEncode failed", e);
+ return false;
+ }
+ return true;
+ }
+
+ ByteBuffer[] getInputBuffers() {
+ ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
+ Logging.d(TAG, "Input buffers: " + inputBuffers.length);
+ return inputBuffers;
+ }
+
+ boolean encodeBuffer(
+ boolean isKeyframe, int inputBuffer, int size,
+ long presentationTimestampUs) {
+ checkOnMediaCodecThread();
+ try {
+ if (isKeyframe) {
+ // Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could
+ // indicate this in queueInputBuffer() below and guarantee _this_ frame
+ // be encoded as a key frame, but sadly that flag is ignored. Instead,
+ // we request a key frame "soon".
+ Logging.d(TAG, "Sync frame request");
+ Bundle b = new Bundle();
+ b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
+ mediaCodec.setParameters(b);
+ }
+ mediaCodec.queueInputBuffer(
+ inputBuffer, 0, size, presentationTimestampUs, 0);
+ return true;
+ }
+ catch (IllegalStateException e) {
+ Logging.e(TAG, "encodeBuffer failed", e);
+ return false;
+ }
+ }
+
+ boolean encodeTexture(boolean isKeyframe, int oesTextureId, float[] transformationMatrix,
+ long presentationTimestampUs) {
+ checkOnMediaCodecThread();
+ try {
+ if (isKeyframe) {
+ Logging.d(TAG, "Sync frame request");
+ Bundle b = new Bundle();
+ b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
+ mediaCodec.setParameters(b);
+ }
+ eglBase.makeCurrent();
+ // TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway,
+ // but it's a workaround for bug webrtc:5147.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ drawer.drawOes(oesTextureId, transformationMatrix, 0, 0, width, height);
+ eglBase.swapBuffers(TimeUnit.MICROSECONDS.toNanos(presentationTimestampUs));
+ return true;
+ }
+ catch (RuntimeException e) {
+ Logging.e(TAG, "encodeTexture failed", e);
+ return false;
+ }
+ }
+
+ void release() {
+ Logging.d(TAG, "Java releaseEncoder");
+ checkOnMediaCodecThread();
+
+ // Run Mediacodec stop() and release() on separate thread since sometime
+ // Mediacodec.stop() may hang.
+ final CountDownLatch releaseDone = new CountDownLatch(1);
+
+ Runnable runMediaCodecRelease = new Runnable() {
+ @Override
+ public void run() {
+ try {
+ Logging.d(TAG, "Java releaseEncoder on release thread");
+ mediaCodec.stop();
+ mediaCodec.release();
+ Logging.d(TAG, "Java releaseEncoder on release thread done");
+ } catch (Exception e) {
+ Logging.e(TAG, "Media encoder release failed", e);
+ }
+ releaseDone.countDown();
+ }
+ };
+ new Thread(runMediaCodecRelease).start();
+
+ if (!ThreadUtils.awaitUninterruptibly(releaseDone, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
+ Logging.e(TAG, "Media encoder release timeout");
+ codecErrors++;
+ if (errorCallback != null) {
+ Logging.e(TAG, "Invoke codec error callback. Errors: " + codecErrors);
+ errorCallback.onMediaCodecVideoEncoderCriticalError(codecErrors);
+ }
+ }
+
+ mediaCodec = null;
+ mediaCodecThread = null;
+ if (drawer != null) {
+ drawer.release();
+ drawer = null;
+ }
+ if (eglBase != null) {
+ eglBase.release();
+ eglBase = null;
+ }
+ if (inputSurface != null) {
+ inputSurface.release();
+ inputSurface = null;
+ }
+ runningInstance = null;
+ Logging.d(TAG, "Java releaseEncoder done");
+ }
+
+ private boolean setRates(int kbps, int frameRateIgnored) {
+ // frameRate argument is ignored - HW encoder is supposed to use
+ // video frame timestamps for bit allocation.
+ checkOnMediaCodecThread();
+ Logging.v(TAG, "setRates: " + kbps + " kbps. Fps: " + frameRateIgnored);
+ try {
+ Bundle params = new Bundle();
+ params.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, 1000 * kbps);
+ mediaCodec.setParameters(params);
+ return true;
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "setRates failed", e);
+ return false;
+ }
+ }
+
+ // Dequeue an input buffer and return its index, -1 if no input buffer is
+ // available, or -2 if the codec is no longer operative.
+ int dequeueInputBuffer() {
+ checkOnMediaCodecThread();
+ try {
+ return mediaCodec.dequeueInputBuffer(DEQUEUE_TIMEOUT);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "dequeueIntputBuffer failed", e);
+ return -2;
+ }
+ }
+
+ // Helper struct for dequeueOutputBuffer() below.
+ static class OutputBufferInfo {
+ public OutputBufferInfo(
+ int index, ByteBuffer buffer,
+ boolean isKeyFrame, long presentationTimestampUs) {
+ this.index = index;
+ this.buffer = buffer;
+ this.isKeyFrame = isKeyFrame;
+ this.presentationTimestampUs = presentationTimestampUs;
+ }
+
+ public final int index;
+ public final ByteBuffer buffer;
+ public final boolean isKeyFrame;
+ public final long presentationTimestampUs;
+ }
+
+ // Dequeue and return an output buffer, or null if no output is ready. Return
+ // a fake OutputBufferInfo with index -1 if the codec is no longer operable.
+ OutputBufferInfo dequeueOutputBuffer() {
+ checkOnMediaCodecThread();
+ try {
+ MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
+ int result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
+ // Check if this is config frame and save configuration data.
+ if (result >= 0) {
+ boolean isConfigFrame =
+ (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
+ if (isConfigFrame) {
+ Logging.d(TAG, "Config frame generated. Offset: " + info.offset +
+ ". Size: " + info.size);
+ configData = ByteBuffer.allocateDirect(info.size);
+ outputBuffers[result].position(info.offset);
+ outputBuffers[result].limit(info.offset + info.size);
+ configData.put(outputBuffers[result]);
+ // Release buffer back.
+ mediaCodec.releaseOutputBuffer(result, false);
+ // Query next output.
+ result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
+ }
+ }
+ if (result >= 0) {
+ // MediaCodec doesn't care about Buffer position/remaining/etc so we can
+ // mess with them to get a slice and avoid having to pass extra
+ // (BufferInfo-related) parameters back to C++.
+ ByteBuffer outputBuffer = outputBuffers[result].duplicate();
+ outputBuffer.position(info.offset);
+ outputBuffer.limit(info.offset + info.size);
+ // Check key frame flag.
+ boolean isKeyFrame =
+ (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
+ if (isKeyFrame) {
+ Logging.d(TAG, "Sync frame generated");
+ }
+ if (isKeyFrame && type == VideoCodecType.VIDEO_CODEC_H264) {
+ Logging.d(TAG, "Appending config frame of size " + configData.capacity() +
+ " to output buffer with offset " + info.offset + ", size " +
+ info.size);
+ // For H.264 key frame append SPS and PPS NALs at the start
+ ByteBuffer keyFrameBuffer = ByteBuffer.allocateDirect(
+ configData.capacity() + info.size);
+ configData.rewind();
+ keyFrameBuffer.put(configData);
+ keyFrameBuffer.put(outputBuffer);
+ keyFrameBuffer.position(0);
+ return new OutputBufferInfo(result, keyFrameBuffer,
+ isKeyFrame, info.presentationTimeUs);
+ } else {
+ return new OutputBufferInfo(result, outputBuffer.slice(),
+ isKeyFrame, info.presentationTimeUs);
+ }
+ } else if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
+ outputBuffers = mediaCodec.getOutputBuffers();
+ return dequeueOutputBuffer();
+ } else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
+ return dequeueOutputBuffer();
+ } else if (result == MediaCodec.INFO_TRY_AGAIN_LATER) {
+ return null;
+ }
+ throw new RuntimeException("dequeueOutputBuffer: " + result);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "dequeueOutputBuffer failed", e);
+ return new OutputBufferInfo(-1, null, false, -1);
+ }
+ }
+
+ // Release a dequeued output buffer back to the codec for re-use. Return
+ // false if the codec is no longer operable.
+ boolean releaseOutputBuffer(int index) {
+ checkOnMediaCodecThread();
+ try {
+ mediaCodec.releaseOutputBuffer(index, false);
+ return true;
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "releaseOutputBuffer failed", e);
+ return false;
+ }
+ }
+}
diff --git a/webrtc/api/java/src/org/webrtc/MediaConstraints.java b/webrtc/api/java/src/org/webrtc/MediaConstraints.java
new file mode 100644
index 0000000..730df35
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/MediaConstraints.java
@@ -0,0 +1,101 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * Description of media constraints for {@code MediaStream} and
+ * {@code PeerConnection}.
+ */
+public class MediaConstraints {
+ /** Simple String key/value pair. */
+ public static class KeyValuePair {
+ private final String key;
+ private final String value;
+
+ public KeyValuePair(String key, String value) {
+ this.key = key;
+ this.value = value;
+ }
+
+ public String getKey() {
+ return key;
+ }
+
+ public String getValue() {
+ return value;
+ }
+
+ public String toString() {
+ return key + ": " + value;
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (this == other) {
+ return true;
+ }
+ if (other == null || getClass() != other.getClass()) {
+ return false;
+ }
+ KeyValuePair that = (KeyValuePair)other;
+ return key.equals(that.key) && value.equals(that.value);
+ }
+
+ @Override
+ public int hashCode() {
+ return key.hashCode() + value.hashCode();
+ }
+ }
+
+ public final List<KeyValuePair> mandatory;
+ public final List<KeyValuePair> optional;
+
+ public MediaConstraints() {
+ mandatory = new LinkedList<KeyValuePair>();
+ optional = new LinkedList<KeyValuePair>();
+ }
+
+ private static String stringifyKeyValuePairList(List<KeyValuePair> list) {
+ StringBuilder builder = new StringBuilder("[");
+ for (KeyValuePair pair : list) {
+ if (builder.length() > 1) {
+ builder.append(", ");
+ }
+ builder.append(pair.toString());
+ }
+ return builder.append("]").toString();
+ }
+
+ public String toString() {
+ return "mandatory: " + stringifyKeyValuePairList(mandatory) +
+ ", optional: " + stringifyKeyValuePairList(optional);
+ }
+}
diff --git a/webrtc/api/java/src/org/webrtc/MediaSource.java b/webrtc/api/java/src/org/webrtc/MediaSource.java
new file mode 100644
index 0000000..d79b462
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/MediaSource.java
@@ -0,0 +1,55 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+
+package org.webrtc;
+
+/** Java wrapper for a C++ MediaSourceInterface. */
+public class MediaSource {
+ /** Tracks MediaSourceInterface.SourceState */
+ public enum State {
+ INITIALIZING, LIVE, ENDED, MUTED
+ }
+
+ final long nativeSource; // Package-protected for PeerConnectionFactory.
+
+ public MediaSource(long nativeSource) {
+ this.nativeSource = nativeSource;
+ }
+
+ public State state() {
+ return nativeState(nativeSource);
+ }
+
+ public void dispose() {
+ free(nativeSource);
+ }
+
+ private static native State nativeState(long pointer);
+
+ private static native void free(long nativeSource);
+}
diff --git a/webrtc/api/java/src/org/webrtc/MediaStream.java b/webrtc/api/java/src/org/webrtc/MediaStream.java
new file mode 100644
index 0000000..be00f13
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/MediaStream.java
@@ -0,0 +1,134 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import java.util.LinkedList;
+
+/** Java wrapper for a C++ MediaStreamInterface. */
+public class MediaStream {
+ public final LinkedList<AudioTrack> audioTracks;
+ public final LinkedList<VideoTrack> videoTracks;
+ public final LinkedList<VideoTrack> preservedVideoTracks;
+ // Package-protected for PeerConnection.
+ final long nativeStream;
+
+ public MediaStream(long nativeStream) {
+ audioTracks = new LinkedList<AudioTrack>();
+ videoTracks = new LinkedList<VideoTrack>();
+ preservedVideoTracks = new LinkedList<VideoTrack>();
+ this.nativeStream = nativeStream;
+ }
+
+ public boolean addTrack(AudioTrack track) {
+ if (nativeAddAudioTrack(nativeStream, track.nativeTrack)) {
+ audioTracks.add(track);
+ return true;
+ }
+ return false;
+ }
+
+ public boolean addTrack(VideoTrack track) {
+ if (nativeAddVideoTrack(nativeStream, track.nativeTrack)) {
+ videoTracks.add(track);
+ return true;
+ }
+ return false;
+ }
+
+ // Tracks added in addTrack() call will be auto released once MediaStream.dispose()
+ // is called. If video track need to be preserved after MediaStream is destroyed it
+ // should be added to MediaStream using addPreservedTrack() call.
+ public boolean addPreservedTrack(VideoTrack track) {
+ if (nativeAddVideoTrack(nativeStream, track.nativeTrack)) {
+ preservedVideoTracks.add(track);
+ return true;
+ }
+ return false;
+ }
+
+ public boolean removeTrack(AudioTrack track) {
+ if (nativeRemoveAudioTrack(nativeStream, track.nativeTrack)) {
+ audioTracks.remove(track);
+ return true;
+ }
+ return false;
+ }
+
+ public boolean removeTrack(VideoTrack track) {
+ if (nativeRemoveVideoTrack(nativeStream, track.nativeTrack)) {
+ videoTracks.remove(track);
+ preservedVideoTracks.remove(track);
+ return true;
+ }
+ return false;
+ }
+
+ public void dispose() {
+ // Remove and release previously added audio and video tracks.
+ while (!audioTracks.isEmpty()) {
+ AudioTrack track = audioTracks.getFirst();
+ removeTrack(track);
+ track.dispose();
+ }
+ while (!videoTracks.isEmpty()) {
+ VideoTrack track = videoTracks.getFirst();
+ removeTrack(track);
+ track.dispose();
+ }
+ // Remove, but do not release preserved video tracks.
+ while (!preservedVideoTracks.isEmpty()) {
+ removeTrack(preservedVideoTracks.getFirst());
+ }
+ free(nativeStream);
+ }
+
+ public String label() {
+ return nativeLabel(nativeStream);
+ }
+
+ public String toString() {
+ return "[" + label() + ":A=" + audioTracks.size() +
+ ":V=" + videoTracks.size() + "]";
+ }
+
+ private static native boolean nativeAddAudioTrack(
+ long nativeStream, long nativeAudioTrack);
+
+ private static native boolean nativeAddVideoTrack(
+ long nativeStream, long nativeVideoTrack);
+
+ private static native boolean nativeRemoveAudioTrack(
+ long nativeStream, long nativeAudioTrack);
+
+ private static native boolean nativeRemoveVideoTrack(
+ long nativeStream, long nativeVideoTrack);
+
+ private static native String nativeLabel(long nativeStream);
+
+ private static native void free(long nativeStream);
+}
diff --git a/webrtc/api/java/src/org/webrtc/MediaStreamTrack.java b/webrtc/api/java/src/org/webrtc/MediaStreamTrack.java
new file mode 100644
index 0000000..3965069
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/MediaStreamTrack.java
@@ -0,0 +1,86 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ MediaStreamTrackInterface. */
+public class MediaStreamTrack {
+ /** Tracks MediaStreamTrackInterface.TrackState */
+ public enum State {
+ INITIALIZING, LIVE, ENDED, FAILED
+ }
+
+ final long nativeTrack;
+
+ public MediaStreamTrack(long nativeTrack) {
+ this.nativeTrack = nativeTrack;
+ }
+
+ public String id() {
+ return nativeId(nativeTrack);
+ }
+
+ public String kind() {
+ return nativeKind(nativeTrack);
+ }
+
+ public boolean enabled() {
+ return nativeEnabled(nativeTrack);
+ }
+
+ public boolean setEnabled(boolean enable) {
+ return nativeSetEnabled(nativeTrack, enable);
+ }
+
+ public State state() {
+ return nativeState(nativeTrack);
+ }
+
+ public boolean setState(State newState) {
+ return nativeSetState(nativeTrack, newState.ordinal());
+ }
+
+ public void dispose() {
+ free(nativeTrack);
+ }
+
+ private static native String nativeId(long nativeTrack);
+
+ private static native String nativeKind(long nativeTrack);
+
+ private static native boolean nativeEnabled(long nativeTrack);
+
+ private static native boolean nativeSetEnabled(
+ long nativeTrack, boolean enabled);
+
+ private static native State nativeState(long nativeTrack);
+
+ private static native boolean nativeSetState(
+ long nativeTrack, int newState);
+
+ private static native void free(long nativeTrack);
+}
diff --git a/webrtc/api/java/src/org/webrtc/PeerConnection.java b/webrtc/api/java/src/org/webrtc/PeerConnection.java
new file mode 100644
index 0000000..36cd075
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/PeerConnection.java
@@ -0,0 +1,305 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+
+package org.webrtc;
+
+import java.util.Collections;
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * Java-land version of the PeerConnection APIs; wraps the C++ API
+ * http://www.webrtc.org/reference/native-apis, which in turn is inspired by the
+ * JS APIs: http://dev.w3.org/2011/webrtc/editor/webrtc.html and
+ * http://www.w3.org/TR/mediacapture-streams/
+ */
+public class PeerConnection {
+ static {
+ System.loadLibrary("jingle_peerconnection_so");
+ }
+
+ /** Tracks PeerConnectionInterface::IceGatheringState */
+ public enum IceGatheringState { NEW, GATHERING, COMPLETE };
+
+
+ /** Tracks PeerConnectionInterface::IceConnectionState */
+ public enum IceConnectionState {
+ NEW, CHECKING, CONNECTED, COMPLETED, FAILED, DISCONNECTED, CLOSED
+ };
+
+ /** Tracks PeerConnectionInterface::SignalingState */
+ public enum SignalingState {
+ STABLE, HAVE_LOCAL_OFFER, HAVE_LOCAL_PRANSWER, HAVE_REMOTE_OFFER,
+ HAVE_REMOTE_PRANSWER, CLOSED
+ };
+
+ /** Java version of PeerConnectionObserver. */
+ public static interface Observer {
+ /** Triggered when the SignalingState changes. */
+ public void onSignalingChange(SignalingState newState);
+
+ /** Triggered when the IceConnectionState changes. */
+ public void onIceConnectionChange(IceConnectionState newState);
+
+ /** Triggered when the ICE connection receiving status changes. */
+ public void onIceConnectionReceivingChange(boolean receiving);
+
+ /** Triggered when the IceGatheringState changes. */
+ public void onIceGatheringChange(IceGatheringState newState);
+
+ /** Triggered when a new ICE candidate has been found. */
+ public void onIceCandidate(IceCandidate candidate);
+
+ /** Triggered when media is received on a new stream from remote peer. */
+ public void onAddStream(MediaStream stream);
+
+ /** Triggered when a remote peer close a stream. */
+ public void onRemoveStream(MediaStream stream);
+
+ /** Triggered when a remote peer opens a DataChannel. */
+ public void onDataChannel(DataChannel dataChannel);
+
+ /** Triggered when renegotiation is necessary. */
+ public void onRenegotiationNeeded();
+ }
+
+ /** Java version of PeerConnectionInterface.IceServer. */
+ public static class IceServer {
+ public final String uri;
+ public final String username;
+ public final String password;
+
+ /** Convenience constructor for STUN servers. */
+ public IceServer(String uri) {
+ this(uri, "", "");
+ }
+
+ public IceServer(String uri, String username, String password) {
+ this.uri = uri;
+ this.username = username;
+ this.password = password;
+ }
+
+ public String toString() {
+ return uri + "[" + username + ":" + password + "]";
+ }
+ }
+
+ /** Java version of PeerConnectionInterface.IceTransportsType */
+ public enum IceTransportsType {
+ NONE, RELAY, NOHOST, ALL
+ };
+
+ /** Java version of PeerConnectionInterface.BundlePolicy */
+ public enum BundlePolicy {
+ BALANCED, MAXBUNDLE, MAXCOMPAT
+ };
+
+ /** Java version of PeerConnectionInterface.RtcpMuxPolicy */
+ public enum RtcpMuxPolicy {
+ NEGOTIATE, REQUIRE
+ };
+
+ /** Java version of PeerConnectionInterface.TcpCandidatePolicy */
+ public enum TcpCandidatePolicy {
+ ENABLED, DISABLED
+ };
+
+ /** Java version of rtc::KeyType */
+ public enum KeyType {
+ RSA, ECDSA
+ }
+
+ /** Java version of PeerConnectionInterface.ContinualGatheringPolicy */
+ public enum ContinualGatheringPolicy {
+ GATHER_ONCE, GATHER_CONTINUALLY
+ }
+
+ /** Java version of PeerConnectionInterface.RTCConfiguration */
+ public static class RTCConfiguration {
+ public IceTransportsType iceTransportsType;
+ public List<IceServer> iceServers;
+ public BundlePolicy bundlePolicy;
+ public RtcpMuxPolicy rtcpMuxPolicy;
+ public TcpCandidatePolicy tcpCandidatePolicy;
+ public int audioJitterBufferMaxPackets;
+ public boolean audioJitterBufferFastAccelerate;
+ public int iceConnectionReceivingTimeout;
+ public int iceBackupCandidatePairPingInterval;
+ public KeyType keyType;
+ public ContinualGatheringPolicy continualGatheringPolicy;
+
+ public RTCConfiguration(List<IceServer> iceServers) {
+ iceTransportsType = IceTransportsType.ALL;
+ bundlePolicy = BundlePolicy.BALANCED;
+ rtcpMuxPolicy = RtcpMuxPolicy.NEGOTIATE;
+ tcpCandidatePolicy = TcpCandidatePolicy.ENABLED;
+ this.iceServers = iceServers;
+ audioJitterBufferMaxPackets = 50;
+ audioJitterBufferFastAccelerate = false;
+ iceConnectionReceivingTimeout = -1;
+ iceBackupCandidatePairPingInterval = -1;
+ keyType = KeyType.ECDSA;
+ continualGatheringPolicy = ContinualGatheringPolicy.GATHER_ONCE;
+ }
+ };
+
+ private final List<MediaStream> localStreams;
+ private final long nativePeerConnection;
+ private final long nativeObserver;
+ private List<RtpSender> senders;
+ private List<RtpReceiver> receivers;
+
+ PeerConnection(long nativePeerConnection, long nativeObserver) {
+ this.nativePeerConnection = nativePeerConnection;
+ this.nativeObserver = nativeObserver;
+ localStreams = new LinkedList<MediaStream>();
+ senders = new LinkedList<RtpSender>();
+ receivers = new LinkedList<RtpReceiver>();
+ }
+
+ // JsepInterface.
+ public native SessionDescription getLocalDescription();
+
+ public native SessionDescription getRemoteDescription();
+
+ public native DataChannel createDataChannel(
+ String label, DataChannel.Init init);
+
+ public native void createOffer(
+ SdpObserver observer, MediaConstraints constraints);
+
+ public native void createAnswer(
+ SdpObserver observer, MediaConstraints constraints);
+
+ public native void setLocalDescription(
+ SdpObserver observer, SessionDescription sdp);
+
+ public native void setRemoteDescription(
+ SdpObserver observer, SessionDescription sdp);
+
+ public native boolean setConfiguration(RTCConfiguration config);
+
+ public boolean addIceCandidate(IceCandidate candidate) {
+ return nativeAddIceCandidate(
+ candidate.sdpMid, candidate.sdpMLineIndex, candidate.sdp);
+ }
+
+ public boolean addStream(MediaStream stream) {
+ boolean ret = nativeAddLocalStream(stream.nativeStream);
+ if (!ret) {
+ return false;
+ }
+ localStreams.add(stream);
+ return true;
+ }
+
+ public void removeStream(MediaStream stream) {
+ nativeRemoveLocalStream(stream.nativeStream);
+ localStreams.remove(stream);
+ }
+
+ public RtpSender createSender(String kind, String stream_id) {
+ RtpSender new_sender = nativeCreateSender(kind, stream_id);
+ if (new_sender != null) {
+ senders.add(new_sender);
+ }
+ return new_sender;
+ }
+
+ // Note that calling getSenders will dispose of the senders previously
+ // returned (and same goes for getReceivers).
+ public List<RtpSender> getSenders() {
+ for (RtpSender sender : senders) {
+ sender.dispose();
+ }
+ senders = nativeGetSenders();
+ return Collections.unmodifiableList(senders);
+ }
+
+ public List<RtpReceiver> getReceivers() {
+ for (RtpReceiver receiver : receivers) {
+ receiver.dispose();
+ }
+ receivers = nativeGetReceivers();
+ return Collections.unmodifiableList(receivers);
+ }
+
+ public boolean getStats(StatsObserver observer, MediaStreamTrack track) {
+ return nativeGetStats(observer, (track == null) ? 0 : track.nativeTrack);
+ }
+
+ // TODO(fischman): add support for DTMF-related methods once that API
+ // stabilizes.
+ public native SignalingState signalingState();
+
+ public native IceConnectionState iceConnectionState();
+
+ public native IceGatheringState iceGatheringState();
+
+ public native void close();
+
+ public void dispose() {
+ close();
+ for (MediaStream stream : localStreams) {
+ nativeRemoveLocalStream(stream.nativeStream);
+ stream.dispose();
+ }
+ localStreams.clear();
+ for (RtpSender sender : senders) {
+ sender.dispose();
+ }
+ senders.clear();
+ for (RtpReceiver receiver : receivers) {
+ receiver.dispose();
+ }
+ receivers.clear();
+ freePeerConnection(nativePeerConnection);
+ freeObserver(nativeObserver);
+ }
+
+ private static native void freePeerConnection(long nativePeerConnection);
+
+ private static native void freeObserver(long nativeObserver);
+
+ private native boolean nativeAddIceCandidate(
+ String sdpMid, int sdpMLineIndex, String iceCandidateSdp);
+
+ private native boolean nativeAddLocalStream(long nativeStream);
+
+ private native void nativeRemoveLocalStream(long nativeStream);
+
+ private native boolean nativeGetStats(
+ StatsObserver observer, long nativeTrack);
+
+ private native RtpSender nativeCreateSender(String kind, String stream_id);
+
+ private native List<RtpSender> nativeGetSenders();
+
+ private native List<RtpReceiver> nativeGetReceivers();
+}
diff --git a/webrtc/api/java/src/org/webrtc/PeerConnectionFactory.java b/webrtc/api/java/src/org/webrtc/PeerConnectionFactory.java
new file mode 100644
index 0000000..e6b3205
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/PeerConnectionFactory.java
@@ -0,0 +1,271 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+
+package org.webrtc;
+
+import java.util.List;
+
+/**
+ * Java wrapper for a C++ PeerConnectionFactoryInterface. Main entry point to
+ * the PeerConnection API for clients.
+ */
+public class PeerConnectionFactory {
+ static {
+ System.loadLibrary("jingle_peerconnection_so");
+ }
+
+ private static final String TAG = "PeerConnectionFactory";
+ private final long nativeFactory;
+ private static Thread workerThread;
+ private static Thread signalingThread;
+
+ public static class Options {
+ // Keep in sync with webrtc/base/network.h!
+ static final int ADAPTER_TYPE_UNKNOWN = 0;
+ static final int ADAPTER_TYPE_ETHERNET = 1 << 0;
+ static final int ADAPTER_TYPE_WIFI = 1 << 1;
+ static final int ADAPTER_TYPE_CELLULAR = 1 << 2;
+ static final int ADAPTER_TYPE_VPN = 1 << 3;
+ static final int ADAPTER_TYPE_LOOPBACK = 1 << 4;
+
+ public int networkIgnoreMask;
+ public boolean disableEncryption;
+ public boolean disableNetworkMonitor;
+ }
+
+ // |context| is an android.content.Context object, but we keep it untyped here
+ // to allow building on non-Android platforms.
+ // Callers may specify either |initializeAudio| or |initializeVideo| as false
+ // to skip initializing the respective engine (and avoid the need for the
+ // respective permissions).
+ // |renderEGLContext| can be provided to suport HW video decoding to
+ // texture and will be used to create a shared EGL context on video
+ // decoding thread.
+ public static native boolean initializeAndroidGlobals(
+ Object context, boolean initializeAudio, boolean initializeVideo,
+ boolean videoHwAcceleration);
+
+ // Field trial initialization. Must be called before PeerConnectionFactory
+ // is created.
+ public static native void initializeFieldTrials(String fieldTrialsInitString);
+ // Internal tracing initialization. Must be called before PeerConnectionFactory is created to
+ // prevent racing with tracing code.
+ public static native void initializeInternalTracer();
+ // Internal tracing shutdown, called to prevent resource leaks. Must be called after
+ // PeerConnectionFactory is gone to prevent races with code performing tracing.
+ public static native void shutdownInternalTracer();
+ // Start/stop internal capturing of internal tracing.
+ public static native boolean startInternalTracingCapture(String tracing_filename);
+ public static native void stopInternalTracingCapture();
+
+ @Deprecated
+ public PeerConnectionFactory() {
+ this(null);
+ }
+
+ public PeerConnectionFactory(Options options) {
+ nativeFactory = nativeCreatePeerConnectionFactory(options);
+ if (nativeFactory == 0) {
+ throw new RuntimeException("Failed to initialize PeerConnectionFactory!");
+ }
+ }
+
+ public PeerConnection createPeerConnection(
+ PeerConnection.RTCConfiguration rtcConfig,
+ MediaConstraints constraints,
+ PeerConnection.Observer observer) {
+ long nativeObserver = nativeCreateObserver(observer);
+ if (nativeObserver == 0) {
+ return null;
+ }
+ long nativePeerConnection = nativeCreatePeerConnection(
+ nativeFactory, rtcConfig, constraints, nativeObserver);
+ if (nativePeerConnection == 0) {
+ return null;
+ }
+ return new PeerConnection(nativePeerConnection, nativeObserver);
+ }
+
+ public PeerConnection createPeerConnection(
+ List<PeerConnection.IceServer> iceServers,
+ MediaConstraints constraints,
+ PeerConnection.Observer observer) {
+ PeerConnection.RTCConfiguration rtcConfig =
+ new PeerConnection.RTCConfiguration(iceServers);
+ return createPeerConnection(rtcConfig, constraints, observer);
+ }
+
+ public MediaStream createLocalMediaStream(String label) {
+ return new MediaStream(
+ nativeCreateLocalMediaStream(nativeFactory, label));
+ }
+
+ public VideoSource createVideoSource(
+ VideoCapturer capturer, MediaConstraints constraints) {
+ return new VideoSource(nativeCreateVideoSource(
+ nativeFactory, capturer.takeNativeVideoCapturer(), constraints));
+ }
+
+ public VideoTrack createVideoTrack(String id, VideoSource source) {
+ return new VideoTrack(nativeCreateVideoTrack(
+ nativeFactory, id, source.nativeSource));
+ }
+
+ public AudioSource createAudioSource(MediaConstraints constraints) {
+ return new AudioSource(nativeCreateAudioSource(nativeFactory, constraints));
+ }
+
+ public AudioTrack createAudioTrack(String id, AudioSource source) {
+ return new AudioTrack(nativeCreateAudioTrack(
+ nativeFactory, id, source.nativeSource));
+ }
+
+ // Starts recording an AEC dump. Ownership of the file is transfered to the
+ // native code. If an AEC dump is already in progress, it will be stopped and
+ // a new one will start using the provided file.
+ public boolean startAecDump(int file_descriptor, int filesize_limit_bytes) {
+ return nativeStartAecDump(nativeFactory, file_descriptor, filesize_limit_bytes);
+ }
+
+ // Stops recording an AEC dump. If no AEC dump is currently being recorded,
+ // this call will have no effect.
+ public void stopAecDump() {
+ nativeStopAecDump(nativeFactory);
+ }
+
+ // Starts recording an RTC event log. Ownership of the file is transfered to
+ // the native code. If an RTC event log is already being recorded, it will be
+ // stopped and a new one will start using the provided file.
+ public boolean startRtcEventLog(int file_descriptor) {
+ return nativeStartRtcEventLog(nativeFactory, file_descriptor);
+ }
+
+ // Stops recording an RTC event log. If no RTC event log is currently being
+ // recorded, this call will have no effect.
+ public void StopRtcEventLog() {
+ nativeStopRtcEventLog(nativeFactory);
+ }
+
+ @Deprecated
+ public void setOptions(Options options) {
+ nativeSetOptions(nativeFactory, options);
+ }
+
+ /** Set the EGL context used by HW Video encoding and decoding.
+ *
+ * @param localEGLContext An instance of EglBase.Context.
+ * Must be the same as used by VideoCapturerAndroid and any local
+ * video renderer.
+ * @param remoteEGLContext An instance of EglBase.Context.
+ * Must be the same as used by any remote video renderer.
+ */
+ public void setVideoHwAccelerationOptions(Object localEGLContext, Object remoteEGLContext) {
+ nativeSetVideoHwAccelerationOptions(nativeFactory, localEGLContext, remoteEGLContext);
+ }
+
+ public void dispose() {
+ nativeFreeFactory(nativeFactory);
+ signalingThread = null;
+ workerThread = null;
+ }
+
+ public void threadsCallbacks() {
+ nativeThreadsCallbacks(nativeFactory);
+ }
+
+ private static void printStackTrace(Thread thread, String threadName) {
+ if (thread != null) {
+ StackTraceElement[] stackTraces = thread.getStackTrace();
+ if (stackTraces.length > 0) {
+ Logging.d(TAG, threadName + " stacks trace:");
+ for (StackTraceElement stackTrace : stackTraces) {
+ Logging.d(TAG, stackTrace.toString());
+ }
+ }
+ }
+ }
+
+ public static void printStackTraces() {
+ printStackTrace(workerThread, "Worker thread");
+ printStackTrace(signalingThread, "Signaling thread");
+ }
+
+ private static void onWorkerThreadReady() {
+ workerThread = Thread.currentThread();
+ Logging.d(TAG, "onWorkerThreadReady");
+ }
+
+ private static void onSignalingThreadReady() {
+ signalingThread = Thread.currentThread();
+ Logging.d(TAG, "onSignalingThreadReady");
+ }
+
+ private static native long nativeCreatePeerConnectionFactory(Options options);
+
+ private static native long nativeCreateObserver(
+ PeerConnection.Observer observer);
+
+ private static native long nativeCreatePeerConnection(
+ long nativeFactory, PeerConnection.RTCConfiguration rtcConfig,
+ MediaConstraints constraints, long nativeObserver);
+
+ private static native long nativeCreateLocalMediaStream(
+ long nativeFactory, String label);
+
+ private static native long nativeCreateVideoSource(
+ long nativeFactory, long nativeVideoCapturer,
+ MediaConstraints constraints);
+
+ private static native long nativeCreateVideoTrack(
+ long nativeFactory, String id, long nativeVideoSource);
+
+ private static native long nativeCreateAudioSource(
+ long nativeFactory, MediaConstraints constraints);
+
+ private static native long nativeCreateAudioTrack(
+ long nativeFactory, String id, long nativeSource);
+
+ private static native boolean nativeStartAecDump(
+ long nativeFactory, int file_descriptor, int filesize_limit_bytes);
+
+ private static native void nativeStopAecDump(long nativeFactory);
+
+ private static native boolean nativeStartRtcEventLog(long nativeFactory, int file_descriptor);
+
+ private static native void nativeStopRtcEventLog(long nativeFactory);
+
+ @Deprecated
+ public native void nativeSetOptions(long nativeFactory, Options options);
+
+ private static native void nativeSetVideoHwAccelerationOptions(
+ long nativeFactory, Object localEGLContext, Object remoteEGLContext);
+
+ private static native void nativeThreadsCallbacks(long nativeFactory);
+
+ private static native void nativeFreeFactory(long nativeFactory);
+}
diff --git a/webrtc/api/java/src/org/webrtc/RtpReceiver.java b/webrtc/api/java/src/org/webrtc/RtpReceiver.java
new file mode 100644
index 0000000..597f441
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/RtpReceiver.java
@@ -0,0 +1,63 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ RtpReceiverInterface. */
+public class RtpReceiver {
+ final long nativeRtpReceiver;
+
+ private MediaStreamTrack cachedTrack;
+
+ public RtpReceiver(long nativeRtpReceiver) {
+ this.nativeRtpReceiver = nativeRtpReceiver;
+ long track = nativeGetTrack(nativeRtpReceiver);
+ // We can assume that an RtpReceiver always has an associated track.
+ cachedTrack = new MediaStreamTrack(track);
+ }
+
+ public MediaStreamTrack track() {
+ return cachedTrack;
+ }
+
+ public String id() {
+ return nativeId(nativeRtpReceiver);
+ }
+
+ public void dispose() {
+ cachedTrack.dispose();
+ free(nativeRtpReceiver);
+ }
+
+ // This should increment the reference count of the track.
+ // Will be released in dispose().
+ private static native long nativeGetTrack(long nativeRtpReceiver);
+
+ private static native String nativeId(long nativeRtpReceiver);
+
+ private static native void free(long nativeRtpReceiver);
+};
diff --git a/webrtc/api/java/src/org/webrtc/RtpSender.java b/webrtc/api/java/src/org/webrtc/RtpSender.java
new file mode 100644
index 0000000..9ac2e70
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/RtpSender.java
@@ -0,0 +1,88 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ RtpSenderInterface. */
+public class RtpSender {
+ final long nativeRtpSender;
+
+ private MediaStreamTrack cachedTrack;
+ private boolean ownsTrack = true;
+
+ public RtpSender(long nativeRtpSender) {
+ this.nativeRtpSender = nativeRtpSender;
+ long track = nativeGetTrack(nativeRtpSender);
+ // It may be possible for an RtpSender to be created without a track.
+ cachedTrack = (track == 0) ? null : new MediaStreamTrack(track);
+ }
+
+ // If |takeOwnership| is true, the RtpSender takes ownership of the track
+ // from the caller, and will auto-dispose of it when no longer needed.
+ // |takeOwnership| should only be used if the caller owns the track; it is
+ // not appropriate when the track is owned by, for example, another RtpSender
+ // or a MediaStream.
+ public boolean setTrack(MediaStreamTrack track, boolean takeOwnership) {
+ if (!nativeSetTrack(nativeRtpSender,
+ (track == null) ? 0 : track.nativeTrack)) {
+ return false;
+ }
+ if (cachedTrack != null && ownsTrack) {
+ cachedTrack.dispose();
+ }
+ cachedTrack = track;
+ ownsTrack = takeOwnership;
+ return true;
+ }
+
+ public MediaStreamTrack track() {
+ return cachedTrack;
+ }
+
+ public String id() {
+ return nativeId(nativeRtpSender);
+ }
+
+ public void dispose() {
+ if (cachedTrack != null && ownsTrack) {
+ cachedTrack.dispose();
+ }
+ free(nativeRtpSender);
+ }
+
+ private static native boolean nativeSetTrack(long nativeRtpSender,
+ long nativeTrack);
+
+ // This should increment the reference count of the track.
+ // Will be released in dispose() or setTrack().
+ private static native long nativeGetTrack(long nativeRtpSender);
+
+ private static native String nativeId(long nativeRtpSender);
+
+ private static native void free(long nativeRtpSender);
+}
+;
diff --git a/webrtc/api/java/src/org/webrtc/SdpObserver.java b/webrtc/api/java/src/org/webrtc/SdpObserver.java
new file mode 100644
index 0000000..779bf1b
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/SdpObserver.java
@@ -0,0 +1,43 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+/** Interface for observing SDP-related events. */
+public interface SdpObserver {
+ /** Called on success of Create{Offer,Answer}(). */
+ public void onCreateSuccess(SessionDescription sdp);
+
+ /** Called on success of Set{Local,Remote}Description(). */
+ public void onSetSuccess();
+
+ /** Called on error of Create{Offer,Answer}(). */
+ public void onCreateFailure(String error);
+
+ /** Called on error of Set{Local,Remote}Description(). */
+ public void onSetFailure(String error);
+}
diff --git a/webrtc/api/java/src/org/webrtc/SessionDescription.java b/webrtc/api/java/src/org/webrtc/SessionDescription.java
new file mode 100644
index 0000000..c3dfcd4
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/SessionDescription.java
@@ -0,0 +1,57 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+
+package org.webrtc;
+
+/**
+ * Description of an RFC 4566 Session.
+ * SDPs are passed as serialized Strings in Java-land and are materialized
+ * to SessionDescriptionInterface as appropriate in the JNI layer.
+ */
+public class SessionDescription {
+ /** Java-land enum version of SessionDescriptionInterface's type() string. */
+ public static enum Type {
+ OFFER, PRANSWER, ANSWER;
+
+ public String canonicalForm() {
+ return name().toLowerCase();
+ }
+
+ public static Type fromCanonicalForm(String canonical) {
+ return Type.valueOf(Type.class, canonical.toUpperCase());
+ }
+ }
+
+ public final Type type;
+ public final String description;
+
+ public SessionDescription(Type type, String description) {
+ this.type = type;
+ this.description = description;
+ }
+}
diff --git a/webrtc/api/java/src/org/webrtc/StatsObserver.java b/webrtc/api/java/src/org/webrtc/StatsObserver.java
new file mode 100644
index 0000000..99223ad
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/StatsObserver.java
@@ -0,0 +1,34 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+/** Interface for observing Stats reports (see webrtc::StatsObservers). */
+public interface StatsObserver {
+ /** Called when the reports are ready.*/
+ public void onComplete(StatsReport[] reports);
+}
diff --git a/webrtc/api/java/src/org/webrtc/StatsReport.java b/webrtc/api/java/src/org/webrtc/StatsReport.java
new file mode 100644
index 0000000..6e32543
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/StatsReport.java
@@ -0,0 +1,72 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+/** Java version of webrtc::StatsReport. */
+public class StatsReport {
+
+ /** Java version of webrtc::StatsReport::Value. */
+ public static class Value {
+ public final String name;
+ public final String value;
+
+ public Value(String name, String value) {
+ this.name = name;
+ this.value = value;
+ }
+
+ public String toString() {
+ StringBuilder builder = new StringBuilder();
+ builder.append("[").append(name).append(": ").append(value).append("]");
+ return builder.toString();
+ }
+ }
+
+ public final String id;
+ public final String type;
+ // Time since 1970-01-01T00:00:00Z in milliseconds.
+ public final double timestamp;
+ public final Value[] values;
+
+ public StatsReport(String id, String type, double timestamp, Value[] values) {
+ this.id = id;
+ this.type = type;
+ this.timestamp = timestamp;
+ this.values = values;
+ }
+
+ public String toString() {
+ StringBuilder builder = new StringBuilder();
+ builder.append("id: ").append(id).append(", type: ").append(type)
+ .append(", timestamp: ").append(timestamp).append(", values: ");
+ for (int i = 0; i < values.length; ++i) {
+ builder.append(values[i].toString()).append(", ");
+ }
+ return builder.toString();
+ }
+}
diff --git a/webrtc/api/java/src/org/webrtc/VideoCapturer.java b/webrtc/api/java/src/org/webrtc/VideoCapturer.java
new file mode 100644
index 0000000..ad41053
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/VideoCapturer.java
@@ -0,0 +1,62 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+/** Java version of cricket::VideoCapturer. */
+// TODO(perkj): Merge VideoCapturer and VideoCapturerAndroid.
+public class VideoCapturer {
+ private long nativeVideoCapturer;
+
+ protected VideoCapturer() {
+ }
+
+ // Sets |nativeCapturer| to be owned by VideoCapturer.
+ protected void setNativeCapturer(long nativeCapturer) {
+ this.nativeVideoCapturer = nativeCapturer;
+ }
+
+ // Package-visible for PeerConnectionFactory.
+ long takeNativeVideoCapturer() {
+ if (nativeVideoCapturer == 0) {
+ throw new RuntimeException("Capturer can only be taken once!");
+ }
+ long ret = nativeVideoCapturer;
+ nativeVideoCapturer = 0;
+ return ret;
+ }
+
+ public void dispose() {
+ // No-op iff this capturer is owned by a source (see comment on
+ // PeerConnectionFactoryInterface::CreateVideoSource()).
+ if (nativeVideoCapturer != 0) {
+ free(nativeVideoCapturer);
+ }
+ }
+
+ private static native void free(long nativeVideoCapturer);
+}
diff --git a/webrtc/api/java/src/org/webrtc/VideoRenderer.java b/webrtc/api/java/src/org/webrtc/VideoRenderer.java
new file mode 100644
index 0000000..c14802e
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/VideoRenderer.java
@@ -0,0 +1,170 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import java.nio.ByteBuffer;
+
+/**
+ * Java version of VideoRendererInterface. In addition to allowing clients to
+ * define their own rendering behavior (by passing in a Callbacks object), this
+ * class also provides a createGui() method for creating a GUI-rendering window
+ * on various platforms.
+ */
+public class VideoRenderer {
+ /**
+ * Java version of cricket::VideoFrame. Frames are only constructed from native code and test
+ * code.
+ */
+ public static class I420Frame {
+ public final int width;
+ public final int height;
+ public final int[] yuvStrides;
+ public ByteBuffer[] yuvPlanes;
+ public final boolean yuvFrame;
+ // Matrix that transforms standard coordinates to their proper sampling locations in
+ // the texture. This transform compensates for any properties of the video source that
+ // cause it to appear different from a normalized texture. This matrix does not take
+ // |rotationDegree| into account.
+ public final float[] samplingMatrix;
+ public int textureId;
+ // Frame pointer in C++.
+ private long nativeFramePointer;
+
+ // rotationDegree is the degree that the frame must be rotated clockwisely
+ // to be rendered correctly.
+ public int rotationDegree;
+
+ /**
+ * Construct a frame of the given dimensions with the specified planar data.
+ */
+ I420Frame(int width, int height, int rotationDegree, int[] yuvStrides, ByteBuffer[] yuvPlanes,
+ long nativeFramePointer) {
+ this.width = width;
+ this.height = height;
+ this.yuvStrides = yuvStrides;
+ this.yuvPlanes = yuvPlanes;
+ this.yuvFrame = true;
+ this.rotationDegree = rotationDegree;
+ this.nativeFramePointer = nativeFramePointer;
+ if (rotationDegree % 90 != 0) {
+ throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
+ }
+ // The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
+ // top-left corner of the image, but in glTexImage2D() the first element corresponds to the
+ // bottom-left corner. This discrepancy is corrected by setting a vertical flip as sampling
+ // matrix.
+ samplingMatrix = new float[] {
+ 1, 0, 0, 0,
+ 0, -1, 0, 0,
+ 0, 0, 1, 0,
+ 0, 1, 0, 1};
+ }
+
+ /**
+ * Construct a texture frame of the given dimensions with data in SurfaceTexture
+ */
+ I420Frame(int width, int height, int rotationDegree, int textureId, float[] samplingMatrix,
+ long nativeFramePointer) {
+ this.width = width;
+ this.height = height;
+ this.yuvStrides = null;
+ this.yuvPlanes = null;
+ this.samplingMatrix = samplingMatrix;
+ this.textureId = textureId;
+ this.yuvFrame = false;
+ this.rotationDegree = rotationDegree;
+ this.nativeFramePointer = nativeFramePointer;
+ if (rotationDegree % 90 != 0) {
+ throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
+ }
+ }
+
+ public int rotatedWidth() {
+ return (rotationDegree % 180 == 0) ? width : height;
+ }
+
+ public int rotatedHeight() {
+ return (rotationDegree % 180 == 0) ? height : width;
+ }
+
+ @Override
+ public String toString() {
+ return width + "x" + height + ":" + yuvStrides[0] + ":" + yuvStrides[1] +
+ ":" + yuvStrides[2];
+ }
+ }
+
+ // Helper native function to do a video frame plane copying.
+ public static native void nativeCopyPlane(ByteBuffer src, int width,
+ int height, int srcStride, ByteBuffer dst, int dstStride);
+
+ /** The real meat of VideoRendererInterface. */
+ public static interface Callbacks {
+ // |frame| might have pending rotation and implementation of Callbacks
+ // should handle that by applying rotation during rendering. The callee
+ // is responsible for signaling when it is done with |frame| by calling
+ // renderFrameDone(frame).
+ public void renderFrame(I420Frame frame);
+ }
+
+ /**
+ * This must be called after every renderFrame() to release the frame.
+ */
+ public static void renderFrameDone(I420Frame frame) {
+ frame.yuvPlanes = null;
+ frame.textureId = 0;
+ if (frame.nativeFramePointer != 0) {
+ releaseNativeFrame(frame.nativeFramePointer);
+ frame.nativeFramePointer = 0;
+ }
+ }
+
+ long nativeVideoRenderer;
+
+ public VideoRenderer(Callbacks callbacks) {
+ nativeVideoRenderer = nativeWrapVideoRenderer(callbacks);
+ }
+
+ private VideoRenderer(long nativeVideoRenderer) {
+ this.nativeVideoRenderer = nativeVideoRenderer;
+ }
+
+ public void dispose() {
+ if (nativeVideoRenderer == 0) {
+ // Already disposed.
+ return;
+ }
+
+ freeWrappedVideoRenderer(nativeVideoRenderer);
+ nativeVideoRenderer = 0;
+ }
+
+ private static native long nativeWrapVideoRenderer(Callbacks callbacks);
+ private static native void freeWrappedVideoRenderer(long nativeVideoRenderer);
+ private static native void releaseNativeFrame(long nativeFramePointer);
+}
diff --git a/webrtc/api/java/src/org/webrtc/VideoSource.java b/webrtc/api/java/src/org/webrtc/VideoSource.java
new file mode 100644
index 0000000..7151748
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/VideoSource.java
@@ -0,0 +1,63 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+
+package org.webrtc;
+
+/**
+ * Java version of VideoSourceInterface, extended with stop/restart
+ * functionality to allow explicit control of the camera device on android,
+ * where there is no support for multiple open capture devices and the cost of
+ * holding a camera open (even if MediaStreamTrack.setEnabled(false) is muting
+ * its output to the encoder) can be too high to bear.
+ */
+public class VideoSource extends MediaSource {
+
+ public VideoSource(long nativeSource) {
+ super(nativeSource);
+ }
+
+ // Stop capture feeding this source.
+ public void stop() {
+ stop(nativeSource);
+ }
+
+ // Restart capture feeding this source. stop() must have been called since
+ // the last call to restart() (if any). Note that this isn't "start()";
+ // sources are started by default at birth.
+ public void restart() {
+ restart(nativeSource);
+ }
+
+ @Override
+ public void dispose() {
+ super.dispose();
+ }
+
+ private static native void stop(long nativeSource);
+ private static native void restart(long nativeSource);
+}
diff --git a/webrtc/api/java/src/org/webrtc/VideoTrack.java b/webrtc/api/java/src/org/webrtc/VideoTrack.java
new file mode 100644
index 0000000..7333a90
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/VideoTrack.java
@@ -0,0 +1,68 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import java.util.LinkedList;
+
+/** Java version of VideoTrackInterface. */
+public class VideoTrack extends MediaStreamTrack {
+ private final LinkedList<VideoRenderer> renderers =
+ new LinkedList<VideoRenderer>();
+
+ public VideoTrack(long nativeTrack) {
+ super(nativeTrack);
+ }
+
+ public void addRenderer(VideoRenderer renderer) {
+ renderers.add(renderer);
+ nativeAddRenderer(nativeTrack, renderer.nativeVideoRenderer);
+ }
+
+ public void removeRenderer(VideoRenderer renderer) {
+ if (!renderers.remove(renderer)) {
+ return;
+ }
+ nativeRemoveRenderer(nativeTrack, renderer.nativeVideoRenderer);
+ renderer.dispose();
+ }
+
+ public void dispose() {
+ while (!renderers.isEmpty()) {
+ removeRenderer(renderers.getFirst());
+ }
+ super.dispose();
+ }
+
+ private static native void free(long nativeTrack);
+
+ private static native void nativeAddRenderer(
+ long nativeTrack, long nativeRenderer);
+
+ private static native void nativeRemoveRenderer(
+ long nativeTrack, long nativeRenderer);
+}